From 9022d16951468c75a918e9c17b9741a359e125c9 Mon Sep 17 00:00:00 2001 From: evilhero Date: Sun, 29 Dec 2019 20:20:41 -0500 Subject: [PATCH 01/26] LOG:(#2383) added logging line to import results to show search results prior to adding to db --- mylar/webserve.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mylar/webserve.py b/mylar/webserve.py index ca749ab4..cbcbad17 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -4707,6 +4707,7 @@ class WebInterface(object): "issues": sres['issues'], "ogcname": ogcname, "comicyear": sres['comicyear']} + logger.fdebug('search_values: [%s]/%s' % (cVal, nVal)) myDB.upsert("searchresults", nVal, cVal) logger.info('[IMPORT] There is more than one result that might be valid - normally this is due to the filename(s) not having enough information for me to use (ie. no volume label/year). Manual intervention is required.') #force the status here just in case From 85f295976df90cbb6f6f478da44e726d6567286c Mon Sep 17 00:00:00 2001 From: evilhero Date: Sun, 29 Dec 2019 22:31:50 -0500 Subject: [PATCH 02/26] =?UTF-8?q?FIX:=20fix=20for=20=C2=BD=20symbol=20caus?= =?UTF-8?q?ing=20a=20traceback=20error=20for=20some=20series?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mylar/filechecker.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mylar/filechecker.py b/mylar/filechecker.py index 0fcffe67..5eb9cf65 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -1131,7 +1131,8 @@ class FileChecker(object): if alt_series is not None: if 'XCV' in alt_series: alt_series = re.sub('XCV', x, alt_series,1) - elif 'XCV' in alt_issue: + if alt_issue is not None: + if 'XCV' in alt_issue: alt_issue = re.sub('XCV', x, alt_issue,1) series_name = re.sub('c11', '+', series_name) From a834df2179928527420187af9d136703788d695e Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Tue, 31 Dec 2019 10:24:20 -0500 Subject: [PATCH 03/26] IMP: WebViewer 1.0 --- data/css/webviewerstyle.css | 330 ++++++ data/interfaces/default/comicdetails.html | 1 + data/interfaces/default/header.html | 16 + data/interfaces/default/read.html | 114 ++ data/js/jscolor.min.js | 10 + lib/pathlib.py | 1279 +++++++++++++++++++++ mylar/webserve.py | 11 + mylar/webviewer.py | 163 +++ 8 files changed, 1924 insertions(+) create mode 100644 data/css/webviewerstyle.css create mode 100644 data/interfaces/default/header.html create mode 100644 data/interfaces/default/read.html create mode 100644 data/js/jscolor.min.js create mode 100644 lib/pathlib.py create mode 100644 mylar/webviewer.py diff --git a/data/css/webviewerstyle.css b/data/css/webviewerstyle.css new file mode 100644 index 00000000..a75ec81c --- /dev/null +++ b/data/css/webviewerstyle.css @@ -0,0 +1,330 @@ +html { +} +header, +main, +footer { + background: #757575; +} +body { + color: #FFFFFF; + background: #757575; +} +#breadcrumbs { + top: 0; + background: #BDBDBD; +} +#comic-info { + display: inline; +} +#dash_dashboard { + padding-left: 8px; + display: inline-block; +} +#dash_library { + padding-left: 8px; + display: inline-block; +} +#dash_settings { + padding-left: 8px; + display: inline-block; +} +#directory-card-content { + display:flex; + color: #FFFFFF; + background-color: #BDBDBD; + text-position: center; +} +#footer { + background: #757575; +} +#theme-settings .dropdown-content li>a, .dropdown-content li>span, .select-dropdown li.disabled, .select-dropdown li.disabled>span, .select-dropdown li.optgroup { + background: #757575; + color: #FFFFFF; +} +#log-modal { + background: #BDBDBD; + color: #FFFFFF; + width: 100%; + height: 100%; +} +#logo-wrapper { + background: #757575; +} +i { + color: #FFFFFF; +} +#nav-dropdown { + background-color: #BDBDBD; +} +#nav-dropdown a { + color: #FFFFFF; +} +#page-settings-text { + color: #FFFFFF; +} +#page-settings-right-text i { + color: #FFFFFF; +} +#page-settings-left-text i { + color: #FFFFFF; +} +#pagination-num a { + color: #FFFFFF; +} +#pagination-num.active { + background-color: #BDBDBD; +} +#read-link { + color: #FFFFFF; +} +#search-modal { + background: #BDBDBD; + color: #FFFFFF; +} +#search-modal input { + margin: 0 auto; +} +#settings-arrow { + color: #BDBDBD; +} +#settings-button { + background: #BDBDBD; + color: #FFFFFF; +} +#settings-help { + position: absolute; + z-index:5000; + color: #FFFFFF; + background: #BDBDBD; +} +#size-height-button { + background: #BDBDBD; + color: #FFFFFF; +} +#size-width-button { + background: #BDBDBD; + color: #FFFFFF; +} +#size-normal-button { + background: #BDBDBD; + color: #FFFFFF; +} +#summary-pane { + background: #BDBDBD; + color: #FFFFFF; +} +#tab-row { + background: #757575; + color: #FFFFFF; +} +.btn-flat { + background: #BDBDBD; + color: #FFFFFF; +} +.card { + position: relative; + background-color: #BDBDBD; + box-shadow: 1px 1px 10px 1px rgba(0, 0, 0, 0.7); +} +.card-image { + position: relative; + height: 350px; + overflow: hidden; +} +.card-image img { + position: absolute; + height: 100%; + width: 100%; + top: 0; + bottom: 0; + left: 0; + right: 0; +} +.card-content { + height: auto; + background: #BDBDBD; + color: #FFFFFF; + text-align: center; +} +.card-content a { + color: #FFFFFF; +} +.center-cols > .col { + float:none; /* disable the float */ + display: inline-block; /* make blocks inline-block */ + text-align: initial; /* restore text-align to default */ + vertical-align: top; +} +.dimmed { + background-color: rgba(0, 0, 0, 0.7); +} +.dropdown-button { + background-color: #BDBDBD; + color: #FFFFFF; +} +.input-field { + color: #FFFFFF; +} +.input-field label { + color: #FFFFFF; +} +.input-field input[type=text]:focus + label { + color: #FFFFFF; +} +.input-field input[type=text]:focus { + border-bottom: 1px solid #FFFFFF; + box-shadow: 0 1px 0 0 #FFFFFF; +} +.input-field input[type=password]:focus + label { + color: #FFFFFF; +} +.input-field input[type=password]:focus { + border-bottom: 1px solid #FFFFFF; + box-shadow: 0 1px 0 0 #FFFFFF; +} +.nav-wrapper .input-field input[type="search"] { + height: auto; + color: #FFFFFF; + background: #BDBDBD; +} +.nav-wrapper .input-field input[type="search"]:focus { + background: #BDBDBD; + color: #FFFFFF; + box-shadow: 0 1px 0 0 #BDBDBD; +} +.nav-wrapper .input-field input[type="search"]:focus ~ .material-icons.icon-close { + right: 2rem; +} +.nav-wrapper .dropdown-button { + position:absolute; + left: 0; + top: 0; +} +.overlay-settings { + position: fixed; + width: 100%; + background-color: rgba(0, 0, 0, 0); + color: #000; + z-index:4000; + min-height: 1px; +} +.overlay-settings-text { + color: #FFFFFF; + height: 900px; + line-height: 900px; + text-align: center; +} +.page-left { + position: fixed; + left: 0px; + top: 0px; + height: 100%; + width: 33.33%; +} +.page-settings { + position: fixed; + left: 33.33%; + top: 0px; + height: 100%; + width: 33.33%; +} +.page-right { + position: fixed; + right: 0px; + top: 0px; + height: 100%; + width: 33.33%; +} +.reader-overlay { + position: absolute; + background-color:#757575; + width:100%; + height:100%; + top:0px; + left:0px; + z-index:1000; + max-width:100%; +} +.reader-page-wide { + width:100%; +} +.reader-page-high { + position: absolute; + margin: auto; + top: 0px; + bottom: 0px; + left: 0; + right: 0; + height: 100%; + max-width:100%; +} +.reader-page-norm { + position: absolute; + margin-left: auto; + margin-right: auto; + top: 0px; + bottom: 0px; + left: 0; + right: 0; + max-width:100%; +} +.settings-span-left { + display: inline-block; + left: 0px; + vertical-align: middle; + line-height: normal; +} +.settings-span-center { + display: inline-block; + text-align: center; + vertical-align: middle; + line-height: normal; +} +.settings-span-right { + display: inline-block; + right: 0px; + vertical-align: middle; + line-height: normal; +} +.tabs .tab a { + background: #757575; + color: #FFFFFF; +} +.tabs .tab a:hover { + color: #BDBDBD; +} +.tabs .indicator { + background-color: #FFFFFF; +} + +@media only screen and (min-width : 601px) and (max-width : 1260px) { + .toast { + border-radius: 0; + } +} + +@media only screen and (min-width : 1261px) { + .toast { + border-radius: 0; + } +} + +@media only screen and (min-width : 601px) and (max-width : 1260px) { + #toast-container { + bottom: 0; + top: 90%; + right: 50%; + transform: translate(50%, 0); + white-space: nowrap; + } +} + +@media only screen and (min-width : 1261px) { + #toast-container { + bottom: 0; + top: 90%; + right: 50%; + transform: translate(50%, 0); + white-space: nowrap; + } +} \ No newline at end of file diff --git a/data/interfaces/default/comicdetails.html b/data/interfaces/default/comicdetails.html index c71df227..577e830f 100755 --- a/data/interfaces/default/comicdetails.html +++ b/data/interfaces/default/comicdetails.html @@ -431,6 +431,7 @@ %> %if linky: + %if linky.endswith('.cbz'):
diff --git a/data/interfaces/default/header.html b/data/interfaces/default/header.html new file mode 100644 index 00000000..b45a7fd5 --- /dev/null +++ b/data/interfaces/default/header.html @@ -0,0 +1,16 @@ +<%page args="jscolor=False"/> + + Mylar WebViewer + + + + + + + + + + % if jscolor is True: + + % endif + \ No newline at end of file diff --git a/data/interfaces/default/read.html b/data/interfaces/default/read.html new file mode 100644 index 00000000..316e6643 --- /dev/null +++ b/data/interfaces/default/read.html @@ -0,0 +1,114 @@ +<%! + import mylar +%> +<% + now_page = pages[current_page] +%> + + + <%include file="header.html" /> + + +
+% if (current_page + 1) == 1: + help_outline +% endif +
+
+
+ + + +
+
+ + + +
+
+ + + +
+
+
+
+% if size == "wide": + +% elif size == "high": + +% elif size == "norm": + +% else: + +% endif: +
+
+ + + + \ No newline at end of file diff --git a/data/js/jscolor.min.js b/data/js/jscolor.min.js new file mode 100644 index 00000000..2a7a788b --- /dev/null +++ b/data/js/jscolor.min.js @@ -0,0 +1,10 @@ +/** + * jscolor - JavaScript Color Picker + * + * @link http://jscolor.com + * @license For open source use: GPLv3 + * For commercial use: JSColor Commercial License + * @author Jan Odvarko + * + * See usage examples at http://jscolor.com/examples/ + */"use strict";window.jscolor||(window.jscolor=function(){var e={register:function(){e.attachDOMReadyEvent(e.init),e.attachEvent(document,"mousedown",e.onDocumentMouseDown),e.attachEvent(document,"touchstart",e.onDocumentTouchStart),e.attachEvent(window,"resize",e.onWindowResize)},init:function(){e.jscolor.lookupClass&&e.jscolor.installByClassName(e.jscolor.lookupClass)},tryInstallOnElements:function(t,n){var r=new RegExp("(^|\\s)("+n+")(\\s*(\\{[^}]*\\})|\\s|$)","i");for(var i=0;is[u]?-r[u]+n[u]+i[u]/2>s[u]/2&&n[u]+i[u]-o[u]>=0?n[u]+i[u]-o[u]:n[u]:n[u],-r[a]+n[a]+i[a]+o[a]-l+l*f>s[a]?-r[a]+n[a]+i[a]/2>s[a]/2&&n[a]+i[a]-l-l*f>=0?n[a]+i[a]-l-l*f:n[a]+i[a]-l+l*f:n[a]+i[a]-l+l*f>=0?n[a]+i[a]-l+l*f:n[a]+i[a]-l-l*f];var h=c[u],p=c[a],d=t.fixed?"fixed":"absolute",v=(c[0]+o[0]>n[0]||c[0]2)switch(e.mode.charAt(2).toLowerCase()){case"s":return"s";case"v":return"v"}return null},onDocumentMouseDown:function(t){t||(t=window.event);var n=t.target||t.srcElement;n._jscLinkedInstance?n._jscLinkedInstance.showOnClick&&n._jscLinkedInstance.show():n._jscControlName?e.onControlPointerStart(t,n,n._jscControlName,"mouse"):e.picker&&e.picker.owner&&e.picker.owner.hide()},onDocumentTouchStart:function(t){t||(t=window.event);var n=t.target||t.srcElement;n._jscLinkedInstance?n._jscLinkedInstance.showOnClick&&n._jscLinkedInstance.show():n._jscControlName?e.onControlPointerStart(t,n,n._jscControlName,"touch"):e.picker&&e.picker.owner&&e.picker.owner.hide()},onWindowResize:function(t){e.redrawPosition()},onParentScroll:function(t){e.picker&&e.picker.owner&&e.picker.owner.hide()},_pointerMoveEvent:{mouse:"mousemove",touch:"touchmove"},_pointerEndEvent:{mouse:"mouseup",touch:"touchend"},_pointerOrigin:null,_capturedTarget:null,onControlPointerStart:function(t,n,r,i){var s=n._jscInstance;e.preventDefault(t),e.captureTarget(n);var o=function(s,o){e.attachGroupEvent("drag",s,e._pointerMoveEvent[i],e.onDocumentPointerMove(t,n,r,i,o)),e.attachGroupEvent("drag",s,e._pointerEndEvent[i],e.onDocumentPointerEnd(t,n,r,i))};o(document,[0,0]);if(window.parent&&window.frameElement){var u=window.frameElement.getBoundingClientRect(),a=[-u.left,-u.top];o(window.parent.window.document,a)}var f=e.getAbsPointerPos(t),l=e.getRelPointerPos(t);e._pointerOrigin={x:f.x-l.x,y:f.y-l.y};switch(r){case"pad":switch(e.getSliderComponent(s)){case"s":s.hsv[1]===0&&s.fromHSV(null,100,null);break;case"v":s.hsv[2]===0&&s.fromHSV(null,null,100)}e.setPad(s,t,0,0);break;case"sld":e.setSld(s,t,0)}e.dispatchFineChange(s)},onDocumentPointerMove:function(t,n,r,i,s){return function(t){var i=n._jscInstance;switch(r){case"pad":t||(t=window.event),e.setPad(i,t,s[0],s[1]),e.dispatchFineChange(i);break;case"sld":t||(t=window.event),e.setSld(i,t,s[1]),e.dispatchFineChange(i)}}},onDocumentPointerEnd:function(t,n,r,i){return function(t){var r=n._jscInstance;e.detachGroupEvents("drag"),e.releaseTarget(),e.dispatchChange(r)}},dispatchChange:function(t){t.valueElement&&e.isElementType(t.valueElement,"input")&&e.fireEvent(t.valueElement,"change")},dispatchFineChange:function(e){if(e.onFineChange){var t;typeof e.onFineChange=="string"?t=new Function(e.onFineChange):t=e.onFineChange,t.call(e)}},setPad:function(t,n,r,i){var s=e.getAbsPointerPos(n),o=r+s.x-e._pointerOrigin.x-t.padding-t.insetWidth,u=i+s.y-e._pointerOrigin.y-t.padding-t.insetWidth,a=o*(360/(t.width-1)),f=100-u*(100/(t.height-1));switch(e.getPadYComponent(t)){case"s":t.fromHSV(a,f,null,e.leaveSld);break;case"v":t.fromHSV(a,null,f,e.leaveSld)}},setSld:function(t,n,r){var i=e.getAbsPointerPos(n),s=r+i.y-e._pointerOrigin.y-t.padding-t.insetWidth,o=100-s*(100/(t.height-1));switch(e.getSliderComponent(t)){case"s":t.fromHSV(null,o,null,e.leavePad);break;case"v":t.fromHSV(null,null,o,e.leavePad)}},_vmlNS:"jsc_vml_",_vmlCSS:"jsc_vml_css_",_vmlReady:!1,initVML:function(){if(!e._vmlReady){var t=document;t.namespaces[e._vmlNS]||t.namespaces.add(e._vmlNS,"urn:schemas-microsoft-com:vml");if(!t.styleSheets[e._vmlCSS]){var n=["shape","shapetype","group","background","path","formulas","handles","fill","stroke","shadow","textbox","textpath","imagedata","line","polyline","curve","rect","roundrect","oval","arc","image"],r=t.createStyleSheet();r.owningElement.id=e._vmlCSS;for(var i=0;i=3&&(s=r[0].match(i))&&(o=r[1].match(i))&&(u=r[2].match(i))){var a=parseFloat((s[1]||"0")+(s[2]||"")),f=parseFloat((o[1]||"0")+(o[2]||"")),l=parseFloat((u[1]||"0")+(u[2]||""));return this.fromRGB(a,f,l,t),!0}}return!1},this.toString=function(){return(256|Math.round(this.rgb[0])).toString(16).substr(1)+(256|Math.round(this.rgb[1])).toString(16).substr(1)+(256|Math.round(this.rgb[2])).toString(16).substr(1)},this.toHEXString=function(){return"#"+this.toString().toUpperCase()},this.toRGBString=function(){return"rgb("+Math.round(this.rgb[0])+","+Math.round(this.rgb[1])+","+Math.round(this.rgb[2])+")"},this.isLight=function(){return.213*this.rgb[0]+.715*this.rgb[1]+.072*this.rgb[2]>127.5},this._processParentElementsInDOM=function(){if(this._linkedElementsProcessed)return;this._linkedElementsProcessed=!0;var t=this.targetElement;do{var n=e.getStyle(t);n&&n.position.toLowerCase()==="fixed"&&(this.fixed=!0),t!==this.targetElement&&(t._jscEventsAttached||(e.attachEvent(t,"scroll",e.onParentScroll),t._jscEventsAttached=!0))}while((t=t.parentNode)&&!e.isElementType(t,"body"))};if(typeof t=="string"){var h=t,p=document.getElementById(h);p?this.targetElement=p:e.warn("Could not find target element with ID '"+h+"'")}else t?this.targetElement=t:e.warn("Invalid target element: '"+t+"'");if(this.targetElement._jscLinkedInstance){e.warn("Cannot link jscolor twice to the same element. Skipping.");return}this.targetElement._jscLinkedInstance=this,this.valueElement=e.fetchElement(this.valueElement),this.styleElement=e.fetchElement(this.styleElement);var d=this,v=this.container?e.fetchElement(this.container):document.getElementsByTagName("body")[0],m=3;if(e.isElementType(this.targetElement,"button"))if(this.targetElement.onclick){var g=this.targetElement.onclick;this.targetElement.onclick=function(e){return g.call(this,e),!1}}else this.targetElement.onclick=function(){return!1};if(this.valueElement&&e.isElementType(this.valueElement,"input")){var y=function(){d.fromString(d.valueElement.value,e.leaveValue),e.dispatchFineChange(d)};e.attachEvent(this.valueElement,"keyup",y),e.attachEvent(this.valueElement,"input",y),e.attachEvent(this.valueElement,"blur",c),this.valueElement.setAttribute("autocomplete","off")}this.styleElement&&(this.styleElement._jscOrigStyle={backgroundImage:this.styleElement.style.backgroundImage,backgroundColor:this.styleElement.style.backgroundColor,color:this.styleElement.style.color}),this.value?this.fromString(this.value)||this.exportColor():this.importColor()}};return e.jscolor.lookupClass="jscolor",e.jscolor.installByClassName=function(t){var n=document.getElementsByTagName("input"),r=document.getElementsByTagName("button");e.tryInstallOnElements(n,t),e.tryInstallOnElements(r,t)},e.register(),e.jscolor}()); \ No newline at end of file diff --git a/lib/pathlib.py b/lib/pathlib.py new file mode 100644 index 00000000..65eb76b5 --- /dev/null +++ b/lib/pathlib.py @@ -0,0 +1,1279 @@ +import fnmatch +import functools +import io +import ntpath +import os +import posixpath +import re +import sys +import time +from collections import Sequence +from contextlib import contextmanager +from errno import EINVAL, ENOENT +from operator import attrgetter +from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO +try: + from urllib import quote as urlquote, quote as urlquote_from_bytes +except ImportError: + from urllib.parse import quote as urlquote, quote_from_bytes as urlquote_from_bytes + + +try: + intern = intern +except NameError: + intern = sys.intern +try: + basestring = basestring +except NameError: + basestring = str + +supports_symlinks = True +try: + import nt +except ImportError: + nt = None +else: + if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2): + from nt import _getfinalpathname + else: + supports_symlinks = False + _getfinalpathname = None + + +__all__ = [ + "PurePath", "PurePosixPath", "PureWindowsPath", + "Path", "PosixPath", "WindowsPath", + ] + +# +# Internals +# + +_py2 = sys.version_info < (3,) +_py2_fs_encoding = 'ascii' + +def _py2_fsencode(parts): + # py2 => minimal unicode support + return [part.encode(_py2_fs_encoding) if isinstance(part, unicode) + else part for part in parts] + +def _is_wildcard_pattern(pat): + # Whether this pattern needs actual matching using fnmatch, or can + # be looked up directly as a file. + return "*" in pat or "?" in pat or "[" in pat + + +class _Flavour(object): + """A flavour implements a particular (platform-specific) set of path + semantics.""" + + def __init__(self): + self.join = self.sep.join + + def parse_parts(self, parts): + if _py2: + parts = _py2_fsencode(parts) + parsed = [] + sep = self.sep + altsep = self.altsep + drv = root = '' + it = reversed(parts) + for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) + drv, root, rel = self.splitroot(part) + if sep in rel: + for x in reversed(rel.split(sep)): + if x and x != '.': + parsed.append(intern(x)) + else: + if rel and rel != '.': + parsed.append(intern(rel)) + if drv or root: + if not drv: + # If no drive is present, try to find one in the previous + # parts. This makes the result of parsing e.g. + # ("C:", "/", "a") reasonably intuitive. + for part in it: + drv = self.splitroot(part)[0] + if drv: + break + break + if drv or root: + parsed.append(drv + root) + parsed.reverse() + return drv, root, parsed + + def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): + """ + Join the two paths represented by the respective + (drive, root, parts) tuples. Return a new (drive, root, parts) tuple. + """ + if root2: + if not drv2 and drv: + return drv, root2, [drv + root2] + parts2[1:] + elif drv2: + if drv2 == drv or self.casefold(drv2) == self.casefold(drv): + # Same drive => second path is relative to the first + return drv, root, parts + parts2[1:] + else: + # Second path is non-anchored (common case) + return drv, root, parts + parts2 + return drv2, root2, parts2 + + +class _WindowsFlavour(_Flavour): + # Reference for Windows paths can be found at + # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx + + sep = '\\' + altsep = '/' + has_drv = True + pathmod = ntpath + + is_supported = (nt is not None) + + drive_letters = ( + set(chr(x) for x in range(ord('a'), ord('z') + 1)) | + set(chr(x) for x in range(ord('A'), ord('Z') + 1)) + ) + ext_namespace_prefix = '\\\\?\\' + + reserved_names = ( + set(['CON', 'PRN', 'AUX', 'NUL']) | + set(['COM%d' % i for i in range(1, 10)]) | + set(['LPT%d' % i for i in range(1, 10)]) + ) + + # Interesting findings about extended paths: + # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported + # but '\\?\c:/a' is not + # - extended paths are always absolute; "relative" extended paths will + # fail. + + def splitroot(self, part, sep=sep): + first = part[0:1] + second = part[1:2] + if (second == sep and first == sep): + # XXX extended paths should also disable the collapsing of "." + # components (according to MSDN docs). + prefix, part = self._split_extended_path(part) + first = part[0:1] + second = part[1:2] + else: + prefix = '' + third = part[2:3] + if (second == sep and first == sep and third != sep): + # is a UNC path: + # vvvvvvvvvvvvvvvvvvvvv root + # \\machine\mountpoint\directory\etc\... + # directory ^^^^^^^^^^^^^^ + index = part.find(sep, 2) + if index != -1: + index2 = part.find(sep, index + 1) + # a UNC path can't have two slashes in a row + # (after the initial two) + if index2 != index + 1: + if index2 == -1: + index2 = len(part) + if prefix: + return prefix + part[1:index2], sep, part[index2+1:] + else: + return part[:index2], sep, part[index2+1:] + drv = root = '' + if second == ':' and first in self.drive_letters: + drv = part[:2] + part = part[2:] + first = third + if first == sep: + root = first + part = part.lstrip(sep) + return prefix + drv, root, part + + def casefold(self, s): + return s.lower() + + def casefold_parts(self, parts): + return [p.lower() for p in parts] + + def resolve(self, path): + s = str(path) + if not s: + return os.getcwd() + if _getfinalpathname is not None: + return self._ext_to_normal(_getfinalpathname(s)) + # Means fallback on absolute + return None + + def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix): + prefix = '' + if s.startswith(ext_prefix): + prefix = s[:4] + s = s[4:] + if s.startswith('UNC\\'): + prefix += s[:3] + s = '\\' + s[3:] + return prefix, s + + def _ext_to_normal(self, s): + # Turn back an extended path into a normal DOS-like path + return self._split_extended_path(s)[1] + + def is_reserved(self, parts): + # NOTE: the rules for reserved names seem somewhat complicated + # (e.g. r"..\NUL" is reserved but not r"foo\NUL"). + # We err on the side of caution and return True for paths which are + # not considered reserved by Windows. + if not parts: + return False + if parts[0].startswith('\\\\'): + # UNC paths are never reserved + return False + return parts[-1].partition('.')[0].upper() in self.reserved_names + + def make_uri(self, path): + # Under Windows, file URIs use the UTF-8 encoding. + drive = path.drive + if len(drive) == 2 and drive[1] == ':': + # It's a path on a local drive => 'file:///c:/a/b' + rest = path.as_posix()[2:].lstrip('/') + return 'file:///%s/%s' % ( + drive, urlquote_from_bytes(rest.encode('utf-8'))) + else: + # It's a path on a network drive => 'file://host/share/a/b' + return 'file:' + urlquote_from_bytes(path.as_posix().encode('utf-8')) + + +class _PosixFlavour(_Flavour): + sep = '/' + altsep = '' + has_drv = False + pathmod = posixpath + + is_supported = (os.name != 'nt') + + def splitroot(self, part, sep=sep): + if part and part[0] == sep: + stripped_part = part.lstrip(sep) + # According to POSIX path resolution: + # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap04.html#tag_04_11 + # "A pathname that begins with two successive slashes may be + # interpreted in an implementation-defined manner, although more + # than two leading slashes shall be treated as a single slash". + if len(part) - len(stripped_part) == 2: + return '', sep * 2, stripped_part + else: + return '', sep, stripped_part + else: + return '', '', part + + def casefold(self, s): + return s + + def casefold_parts(self, parts): + return parts + + def resolve(self, path): + sep = self.sep + accessor = path._accessor + seen = {} + def _resolve(path, rest): + if rest.startswith(sep): + path = '' + + for name in rest.split(sep): + if not name or name == '.': + # current dir + continue + if name == '..': + # parent dir + path, _, _ = path.rpartition(sep) + continue + newpath = path + sep + name + if newpath in seen: + # Already seen this path + path = seen[newpath] + if path is not None: + # use cached value + continue + # The symlink is not resolved, so we must have a symlink loop. + raise RuntimeError("Symlink loop from %r" % newpath) + # Resolve the symbolic link + try: + target = accessor.readlink(newpath) + except OSError as e: + if e.errno != EINVAL: + raise + # Not a symlink + path = newpath + else: + seen[newpath] = None # not resolved symlink + path = _resolve(path, target) + seen[newpath] = path # resolved symlink + + return path + # NOTE: according to POSIX, getcwd() cannot contain path components + # which are symlinks. + base = '' if path.is_absolute() else os.getcwd() + return _resolve(base, str(path)) or sep + + def is_reserved(self, parts): + return False + + def make_uri(self, path): + # We represent the path using the local filesystem encoding, + # for portability to other applications. + bpath = bytes(path) + return 'file://' + urlquote_from_bytes(bpath) + + +_windows_flavour = _WindowsFlavour() +_posix_flavour = _PosixFlavour() + + +class _Accessor: + """An accessor implements a particular (system-specific or not) way of + accessing paths on the filesystem.""" + + +class _NormalAccessor(_Accessor): + + def _wrap_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobj, *args): + return strfunc(str(pathobj), *args) + return staticmethod(wrapped) + + def _wrap_binary_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobjA, pathobjB, *args): + return strfunc(str(pathobjA), str(pathobjB), *args) + return staticmethod(wrapped) + + stat = _wrap_strfunc(os.stat) + + lstat = _wrap_strfunc(os.lstat) + + open = _wrap_strfunc(os.open) + + listdir = _wrap_strfunc(os.listdir) + + chmod = _wrap_strfunc(os.chmod) + + if hasattr(os, "lchmod"): + lchmod = _wrap_strfunc(os.lchmod) + else: + def lchmod(self, pathobj, mode): + raise NotImplementedError("lchmod() not available on this system") + + mkdir = _wrap_strfunc(os.mkdir) + + unlink = _wrap_strfunc(os.unlink) + + rmdir = _wrap_strfunc(os.rmdir) + + rename = _wrap_binary_strfunc(os.rename) + + if sys.version_info >= (3, 3): + replace = _wrap_binary_strfunc(os.replace) + + if nt: + if supports_symlinks: + symlink = _wrap_binary_strfunc(os.symlink) + else: + def symlink(a, b, target_is_directory): + raise NotImplementedError("symlink() not available on this system") + else: + # Under POSIX, os.symlink() takes two args + @staticmethod + def symlink(a, b, target_is_directory): + return os.symlink(str(a), str(b)) + + utime = _wrap_strfunc(os.utime) + + # Helper for resolve() + def readlink(self, path): + return os.readlink(path) + + +_normal_accessor = _NormalAccessor() + + +# +# Globbing helpers +# + +@contextmanager +def _cached(func): + try: + func.__cached__ + yield func + except AttributeError: + cache = {} + def wrapper(*args): + try: + return cache[args] + except KeyError: + value = cache[args] = func(*args) + return value + wrapper.__cached__ = True + try: + yield wrapper + finally: + cache.clear() + +def _make_selector(pattern_parts): + pat = pattern_parts[0] + child_parts = pattern_parts[1:] + if pat == '**': + cls = _RecursiveWildcardSelector + elif '**' in pat: + raise ValueError("Invalid pattern: '**' can only be an entire path component") + elif _is_wildcard_pattern(pat): + cls = _WildcardSelector + else: + cls = _PreciseSelector + return cls(pat, child_parts) + +if hasattr(functools, "lru_cache"): + _make_selector = functools.lru_cache()(_make_selector) + + +class _Selector: + """A selector matches a specific glob pattern part against the children + of a given path.""" + + def __init__(self, child_parts): + self.child_parts = child_parts + if child_parts: + self.successor = _make_selector(child_parts) + else: + self.successor = _TerminatingSelector() + + def select_from(self, parent_path): + """Iterate over all child paths of `parent_path` matched by this + selector. This can contain parent_path itself.""" + path_cls = type(parent_path) + is_dir = path_cls.is_dir + exists = path_cls.exists + listdir = parent_path._accessor.listdir + return self._select_from(parent_path, is_dir, exists, listdir) + + +class _TerminatingSelector: + + def _select_from(self, parent_path, is_dir, exists, listdir): + yield parent_path + + +class _PreciseSelector(_Selector): + + def __init__(self, name, child_parts): + self.name = name + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, listdir): + if not is_dir(parent_path): + return + path = parent_path._make_child_relpath(self.name) + if exists(path): + for p in self.successor._select_from(path, is_dir, exists, listdir): + yield p + + +class _WildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + self.pat = re.compile(fnmatch.translate(pat)) + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, listdir): + if not is_dir(parent_path): + return + cf = parent_path._flavour.casefold + for name in listdir(parent_path): + casefolded = cf(name) + if self.pat.match(casefolded): + path = parent_path._make_child_relpath(name) + for p in self.successor._select_from(path, is_dir, exists, listdir): + yield p + + +class _RecursiveWildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + _Selector.__init__(self, child_parts) + + def _iterate_directories(self, parent_path, is_dir, listdir): + yield parent_path + for name in listdir(parent_path): + path = parent_path._make_child_relpath(name) + if is_dir(path): + for p in self._iterate_directories(path, is_dir, listdir): + yield p + + def _select_from(self, parent_path, is_dir, exists, listdir): + if not is_dir(parent_path): + return + with _cached(listdir) as listdir: + yielded = set() + try: + successor_select = self.successor._select_from + for starting_point in self._iterate_directories(parent_path, is_dir, listdir): + for p in successor_select(starting_point, is_dir, exists, listdir): + if p not in yielded: + yield p + yielded.add(p) + finally: + yielded.clear() + + +# +# Public API +# + +class _PathParents(Sequence): + """This object provides sequence-like access to the logical ancestors + of a path. Don't try to construct it yourself.""" + __slots__ = ('_pathcls', '_drv', '_root', '_parts') + + def __init__(self, path): + # We don't store the instance to avoid reference cycles + self._pathcls = type(path) + self._drv = path._drv + self._root = path._root + self._parts = path._parts + + def __len__(self): + if self._drv or self._root: + return len(self._parts) - 1 + else: + return len(self._parts) + + def __getitem__(self, idx): + if idx < 0 or idx >= len(self): + raise IndexError(idx) + return self._pathcls._from_parsed_parts(self._drv, self._root, + self._parts[:-idx - 1]) + + def __repr__(self): + return "<{0}.parents>".format(self._pathcls.__name__) + + +class PurePath(object): + """PurePath represents a filesystem path and offers operations which + don't imply any actual filesystem I/O. Depending on your system, + instantiating a PurePath will return either a PurePosixPath or a + PureWindowsPath object. You can also instantiate either of these classes + directly, regardless of your system. + """ + __slots__ = ( + '_drv', '_root', '_parts', + '_str', '_hash', '_pparts', '_cached_cparts', + ) + + def __new__(cls, *args): + """Construct a PurePath from one or several strings and or existing + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ + if cls is PurePath: + cls = PureWindowsPath if os.name == 'nt' else PurePosixPath + return cls._from_parts(args) + + def __reduce__(self): + # Using the parts tuple helps share interned path parts + # when pickling related paths. + return (self.__class__, tuple(self._parts)) + + @classmethod + def _parse_args(cls, args): + # This is useful when you don't want to create an instance, just + # canonicalize some constructor arguments. + parts = [] + for a in args: + if isinstance(a, PurePath): + parts += a._parts + elif isinstance(a, basestring): + parts.append(a) + else: + raise TypeError( + "argument should be a path or str object, not %r" + % type(a)) + return cls._flavour.parse_parts(parts) + + @classmethod + def _from_parts(cls, args, init=True): + # We need to call _parse_args on the instance, so as to get the + # right flavour. + self = object.__new__(cls) + drv, root, parts = self._parse_args(args) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _from_parsed_parts(cls, drv, root, parts, init=True): + self = object.__new__(cls) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _format_parsed_parts(cls, drv, root, parts): + if drv or root: + return drv + root + cls._flavour.join(parts[1:]) + else: + return cls._flavour.join(parts) + + def _init(self): + # Overriden in concrete Path + pass + + def _make_child(self, args): + drv, root, parts = self._parse_args(args) + drv, root, parts = self._flavour.join_parsed_parts( + self._drv, self._root, self._parts, drv, root, parts) + return self._from_parsed_parts(drv, root, parts) + + def __str__(self): + """Return the string representation of the path, suitable for + passing to system calls.""" + try: + return self._str + except AttributeError: + self._str = self._format_parsed_parts(self._drv, self._root, + self._parts) or '.' + return self._str + + def as_posix(self): + """Return the string representation of the path with forward (/) + slashes.""" + f = self._flavour + return str(self).replace(f.sep, '/') + + def __bytes__(self): + """Return the bytes representation of the path. This is only + recommended to use under Unix.""" + if sys.version_info < (3, 2): + raise NotImplementedError("needs Python 3.2 or later") + return os.fsencode(str(self)) + + def __repr__(self): + return "{0}({1!r})".format(self.__class__.__name__, self.as_posix()) + + def as_uri(self): + """Return the path as a 'file' URI.""" + if not self.is_absolute(): + raise ValueError("relative path can't be expressed as a file URI") + return self._flavour.make_uri(self) + + @property + def _cparts(self): + # Cached casefolded parts, for hashing and comparison + try: + return self._cached_cparts + except AttributeError: + self._cached_cparts = self._flavour.casefold_parts(self._parts) + return self._cached_cparts + + def __eq__(self, other): + if not isinstance(other, PurePath): + return NotImplemented + return self._cparts == other._cparts and self._flavour is other._flavour + + def __ne__(self, other): + return not self == other + + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(tuple(self._cparts)) + return self._hash + + def __lt__(self, other): + if not isinstance(other, PurePath) or self._flavour is not other._flavour: + return NotImplemented + return self._cparts < other._cparts + + def __le__(self, other): + if not isinstance(other, PurePath) or self._flavour is not other._flavour: + return NotImplemented + return self._cparts <= other._cparts + + def __gt__(self, other): + if not isinstance(other, PurePath) or self._flavour is not other._flavour: + return NotImplemented + return self._cparts > other._cparts + + def __ge__(self, other): + if not isinstance(other, PurePath) or self._flavour is not other._flavour: + return NotImplemented + return self._cparts >= other._cparts + + drive = property(attrgetter('_drv'), + doc="""The drive prefix (letter or UNC path), if any.""") + + root = property(attrgetter('_root'), + doc="""The root of the path, if any.""") + + @property + def anchor(self): + """The concatenation of the drive and root, or ''.""" + anchor = self._drv + self._root + return anchor + + @property + def name(self): + """The final path component, if any.""" + parts = self._parts + if len(parts) == (1 if (self._drv or self._root) else 0): + return '' + return parts[-1] + + @property + def suffix(self): + """The final component's last suffix, if any.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[i:] + else: + return '' + + @property + def suffixes(self): + """A list of the final component's suffixes, if any.""" + name = self.name + if name.endswith('.'): + return [] + name = name.lstrip('.') + return ['.' + suffix for suffix in name.split('.')[1:]] + + @property + def stem(self): + """The final path component, minus its last suffix.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[:i] + else: + return name + + def with_name(self, name): + """Return a new path with the file name changed.""" + if not self.name: + raise ValueError("%r has an empty name" % (self,)) + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def with_suffix(self, suffix): + """Return a new path with the file suffix changed (or added, if none).""" + # XXX if suffix is None, should the current suffix be removed? + drv, root, parts = self._flavour.parse_parts((suffix,)) + if drv or root or len(parts) != 1: + raise ValueError("Invalid suffix %r" % (suffix)) + suffix = parts[0] + if not suffix.startswith('.'): + raise ValueError("Invalid suffix %r" % (suffix)) + name = self.name + if not name: + raise ValueError("%r has an empty name" % (self,)) + old_suffix = self.suffix + if not old_suffix: + name = name + suffix + else: + name = name[:-len(old_suffix)] + suffix + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def relative_to(self, *other): + """Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + a subpath of the other path), raise ValueError. + """ + # For the purpose of this method, drive and root are considered + # separate parts, i.e.: + # Path('c:/').relative_to('c:') gives Path('/') + # Path('c:/').relative_to('/') raise ValueError + if not other: + raise TypeError("need at least one argument") + parts = self._parts + drv = self._drv + root = self._root + if root: + abs_parts = [drv, root] + parts[1:] + else: + abs_parts = parts + to_drv, to_root, to_parts = self._parse_args(other) + if to_root: + to_abs_parts = [to_drv, to_root] + to_parts[1:] + else: + to_abs_parts = to_parts + n = len(to_abs_parts) + cf = self._flavour.casefold_parts + if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): + formatted = self._format_parsed_parts(to_drv, to_root, to_parts) + raise ValueError("{!r} does not start with {!r}" + .format(str(self), str(formatted))) + return self._from_parsed_parts('', root if n == 1 else '', + abs_parts[n:]) + + @property + def parts(self): + """An object providing sequence-like access to the + components in the filesystem path.""" + # We cache the tuple to avoid building a new one each time .parts + # is accessed. XXX is this necessary? + try: + return self._pparts + except AttributeError: + self._pparts = tuple(self._parts) + return self._pparts + + def joinpath(self, *args): + """Combine this path with one or several arguments, and return a + new path representing either a subpath (if all arguments are relative + paths) or a totally different path (if one of the arguments is + anchored). + """ + return self._make_child(args) + + def __truediv__(self, key): + return self._make_child((key,)) + + def __rtruediv__(self, key): + return self._from_parts([key] + self._parts) + + if sys.version_info < (3,): + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + + @property + def parent(self): + """The logical parent of the path.""" + drv = self._drv + root = self._root + parts = self._parts + if len(parts) == 1 and (drv or root): + return self + return self._from_parsed_parts(drv, root, parts[:-1]) + + @property + def parents(self): + """A sequence of this path's logical parents.""" + return _PathParents(self) + + def is_absolute(self): + """True if the path is absolute (has both a root and, if applicable, + a drive).""" + if not self._root: + return False + return not self._flavour.has_drv or bool(self._drv) + + def is_reserved(self): + """Return True if the path contains one of the special names reserved + by the system, if any.""" + return self._flavour.is_reserved(self._parts) + + def match(self, path_pattern): + """ + Return True if this path matches the given pattern. + """ + cf = self._flavour.casefold + path_pattern = cf(path_pattern) + drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) + if not pat_parts: + raise ValueError("empty pattern") + if drv and drv != cf(self._drv): + return False + if root and root != cf(self._root): + return False + parts = self._cparts + if drv or root: + if len(pat_parts) != len(parts): + return False + pat_parts = pat_parts[1:] + elif len(pat_parts) > len(parts): + return False + for part, pat in zip(reversed(parts), reversed(pat_parts)): + if not fnmatch.fnmatchcase(part, pat): + return False + return True + + +class PurePosixPath(PurePath): + _flavour = _posix_flavour + __slots__ = () + + +class PureWindowsPath(PurePath): + _flavour = _windows_flavour + __slots__ = () + + +# Filesystem-accessing classes + + +class Path(PurePath): + __slots__ = ( + '_accessor', + ) + + def __new__(cls, *args, **kwargs): + if cls is Path: + cls = WindowsPath if os.name == 'nt' else PosixPath + self = cls._from_parts(args, init=False) + if not self._flavour.is_supported: + raise NotImplementedError("cannot instantiate %r on your system" + % (cls.__name__,)) + self._init() + return self + + def _init(self, + # Private non-constructor arguments + template=None, + ): + if template is not None: + self._accessor = template._accessor + else: + self._accessor = _normal_accessor + + def _make_child_relpath(self, part): + # This is an optimization used for dir walking. `part` must be + # a single part relative to this path. + parts = self._parts + [part] + return self._from_parsed_parts(self._drv, self._root, parts) + + def _opener(self, name, flags, mode=0o666): + # A stub for the opener argument to built-in open() + return self._accessor.open(self, flags, mode) + + def _raw_open(self, flags, mode=0o777): + """ + Open the file pointed by this path and return a file descriptor, + as os.open() does. + """ + return self._accessor.open(self, flags, mode) + + # Public API + + @classmethod + def cwd(cls): + """Return a new path pointing to the current working directory + (as returned by os.getcwd()). + """ + return cls(os.getcwd()) + + def iterdir(self): + """Iterate over the files in this directory. Does not yield any + result for the special paths '.' and '..'. + """ + for name in self._accessor.listdir(self): + if name in ('.', '..'): + # Yielding a path object for these makes little sense + continue + yield self._make_child_relpath(name) + + def glob(self, pattern): + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given pattern. + """ + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def rglob(self, pattern): + """Recursively yield all existing files (of any kind, including + directories) matching the given pattern, anywhere in this subtree. + """ + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(("**",) + tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def absolute(self): + """Return an absolute version of this path. This function works + even if the path doesn't point to anything. + + No normalization is done, i.e. all '.' and '..' will be kept along. + Use resolve() to get the canonical path to a file. + """ + # XXX untested yet! + if self.is_absolute(): + return self + # FIXME this must defer to the specific flavour (and, under Windows, + # use nt._getfullpathname()) + obj = self._from_parts([os.getcwd()] + self._parts, init=False) + obj._init(template=self) + return obj + + def resolve(self): + """ + Make the path absolute, resolving all symlinks on the way and also + normalizing it (for example turning slashes into backslashes under + Windows). + """ + s = self._flavour.resolve(self) + if s is None: + # No symlink resolution => for consistency, raise an error if + # the path doesn't exist or is forbidden + self.stat() + s = str(self.absolute()) + # Now we have no symlinks in the path, it's safe to normalize it. + normed = self._flavour.pathmod.normpath(s) + obj = self._from_parts((normed,), init=False) + obj._init(template=self) + return obj + + def stat(self): + """ + Return the result of the stat() system call on this path, like + os.stat() does. + """ + return self._accessor.stat(self) + + def owner(self): + """ + Return the login name of the file owner. + """ + import pwd + return pwd.getpwuid(self.stat().st_uid).pw_name + + def group(self): + """ + Return the group name of the file gid. + """ + import grp + return grp.getgrgid(self.stat().st_gid).gr_name + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + """ + Open the file pointed by this path and return a file object, as + the built-in open() function does. + """ + if sys.version_info >= (3, 3): + return io.open(str(self), mode, buffering, encoding, errors, newline, + opener=self._opener) + else: + return io.open(str(self), mode, buffering, encoding, errors, newline) + + def touch(self, mode=0o666, exist_ok=True): + """ + Create this file with the given access mode, if it doesn't exist. + """ + if exist_ok: + # First try to bump modification time + # Implementation note: GNU touch uses the UTIME_NOW option of + # the utimensat() / futimens() functions. + t = time.time() + try: + self._accessor.utime(self, (t, t)) + except OSError: + # Avoid exception chaining + pass + else: + return + flags = os.O_CREAT | os.O_WRONLY + if not exist_ok: + flags |= os.O_EXCL + fd = self._raw_open(flags, mode) + os.close(fd) + + def mkdir(self, mode=0o777, parents=False): + if not parents: + self._accessor.mkdir(self, mode) + else: + try: + self._accessor.mkdir(self, mode) + except OSError as e: + if e.errno != ENOENT: + raise + self.parent.mkdir(parents=True) + self._accessor.mkdir(self, mode) + + def chmod(self, mode): + """ + Change the permissions of the path, like os.chmod(). + """ + self._accessor.chmod(self, mode) + + def lchmod(self, mode): + """ + Like chmod(), except if the path points to a symlink, the symlink's + permissions are changed, rather than its target's. + """ + self._accessor.lchmod(self, mode) + + def unlink(self): + """ + Remove this file or link. + If the path is a directory, use rmdir() instead. + """ + self._accessor.unlink(self) + + def rmdir(self): + """ + Remove this directory. The directory must be empty. + """ + self._accessor.rmdir(self) + + def lstat(self): + """ + Like stat(), except if the path points to a symlink, the symlink's + status information is returned, rather than its target's. + """ + return self._accessor.lstat(self) + + def rename(self, target): + """ + Rename this path to the given path. + """ + self._accessor.rename(self, target) + + def replace(self, target): + """ + Rename this path to the given path, clobbering the existing + destination if it exists. + """ + if sys.version_info < (3, 3): + raise NotImplementedError("replace() is only available " + "with Python 3.3 and later") + self._accessor.replace(self, target) + + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the given path. + Note the order of arguments (self, target) is the reverse of os.symlink's. + """ + self._accessor.symlink(target, self, target_is_directory) + + # Convenience functions for querying the stat results + + def exists(self): + """ + Whether this path exists. + """ + try: + self.stat() + except OSError as e: + if e.errno != ENOENT: + raise + return False + return True + + def is_dir(self): + """ + Whether this path is a directory. + """ + try: + return S_ISDIR(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_file(self): + """ + Whether this path is a regular file (also True for symlinks pointing + to regular files). + """ + try: + return S_ISREG(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_symlink(self): + """ + Whether this path is a symbolic link. + """ + try: + return S_ISLNK(self.lstat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist + return False + + def is_block_device(self): + """ + Whether this path is a block device. + """ + try: + return S_ISBLK(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_char_device(self): + """ + Whether this path is a character device. + """ + try: + return S_ISCHR(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_fifo(self): + """ + Whether this path is a FIFO. + """ + try: + return S_ISFIFO(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_socket(self): + """ + Whether this path is a socket. + """ + try: + return S_ISSOCK(self.stat().st_mode) + except OSError as e: + if e.errno != ENOENT: + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + +class PosixPath(Path, PurePosixPath): + __slots__ = () + +class WindowsPath(Path, PureWindowsPath): + __slots__ = () diff --git a/mylar/webserve.py b/mylar/webserve.py index cbcbad17..984c587d 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -6426,3 +6426,14 @@ class WebInterface(object): download_specific_release.exposed = True + def read_comic(self, ish_id, page_num, size): + from mylar.webviewer import WebViewer + wv = WebViewer() + page_num = int(page_num) + #cherrypy.session['ishid'] = ish_id + data = wv.read_comic(ish_id, page_num, size) + #data = wv.read_comic(ish_id) + return data + read_comic.exposed = True + + \ No newline at end of file diff --git a/mylar/webviewer.py b/mylar/webviewer.py new file mode 100644 index 00000000..c9cebe5b --- /dev/null +++ b/mylar/webviewer.py @@ -0,0 +1,163 @@ +import os +import re +import cherrypy +import stat +import zipfile +from lib.rarfile import rarfile + +import mylar + +from PIL import Image +from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, librarysync, moveit, Failed, readinglist, config +from mylar.webserve import serve_template + +class WebViewer(object): + + def __init__(self): + self.ish_id = None + self.page_num = None + self.kwargs = None + self.data = None + + if not os.path.exists(os.path.join(mylar.DATA_DIR, 'sessions')): + os.makedirs(os.path.abspath(os.path.join(mylar.DATA_DIR, 'sessions'))) + + updatecherrypyconf = { + 'tools.gzip.on': True, + 'tools.gzip.mime_types': ['text/*', 'application/*', 'image/*'], + 'tools.sessions.timeout': 1440, + 'tools.sessions.storage_class': cherrypy.lib.sessions.FileSession, + 'tools.sessions.storage_path': os.path.join(mylar.DATA_DIR, "sessions"), + 'request.show_tracebacks': False, + } + if mylar.CONFIG.HTTP_PASSWORD is None: + updatecherrypyconf.update({ + 'tools.sessions.on': True, + }) + + cherrypy.config.update(updatecherrypyconf) + cherrypy.engine.signals.subscribe() + cherrypy.engine.timeout_monitor.unsubscribe() + + def read_comic(self, ish_id = None, page_num = None, size = None): + logger.debug("WebReader Requested, looking for ish_id %s and page_num %s" % (ish_id, page_num)) + if size == None: + user_size_pref = 'wide' + else: + user_size_pref = size + + try: + ish_id + except: + logger.warn("WebReader: ish_id not set!") + + myDB = db.DBConnection() + comic = myDB.selectone('select comics.ComicLocation, issues.Location from comics, issues where comics.comicid = issues.comicid and issues.issueid = ?' , [ish_id]).fetchone() + if comic is None: + logger.warn("WebReader: ish_id %s requested but not in the database!" % ish_id) + raise cherrypy.HTTPRedirect("home") +# cherrypy.config.update() + comic_path = os.path.join(comic['ComicLocation'], comic['Location']) + logger.debug("WebReader found ish_id %s at %s" % (ish_id, comic_path)) + +# cherrypy.session['ish_id'].load() +# if 'sizepref' not in cherrypy.session: +# cherrypy.session['sizepref'] = user_size_pref +# user_size_pref = cherrypy.session['sizepref'] +# logger.debug("WebReader setting user_size_pref to %s" % user_size_pref) + + scanner = ComicScanner() + image_list = scanner.reading_images(ish_id) + logger.debug("Image list contains %s pages" % (len(image_list))) + if len(image_list) == 0: + logger.debug("Unpacking ish_id %s from comic_path %s" % (ish_id, comic_path)) + scanner.user_unpack_comic(ish_id, comic_path) + else: + logger.debug("ish_id %s already unpacked." % ish_id) + + num_pages = len(image_list) + logger.debug("Found %s pages for ish_id %s from comic_path %s" % (num_pages, ish_id, comic_path)) + + if num_pages == 0: + image_list = ['images/skipped_icon.png'] + + cookie_comic = re.sub(r'\W+', '', comic_path) + cookie_comic = "wv_" + cookie_comic.decode('unicode_escape') + logger.debug("about to drop a cookie for " + cookie_comic + " which represents " + comic_path) + cookie_check = cherrypy.request.cookie + if cookie_comic not in cookie_check: + logger.debug("Cookie Creation") + cookie_path = '/' + cookie_maxage = '2419200' + cookie_set = cherrypy.response.cookie + cookie_set['cookie_comic'] = 0 + cookie_set['cookie_comic']['path'] = cookie_path.decode('unicode_escape') + cookie_set['cookie_comic']['max-age'] = cookie_maxage.decode('unicode_escape') + next_page = page_num + 1 + prev_page = page_num - 1 + else: + logger.debug("Cookie Read") + page_num = int(cherrypy.request.cookie['cookie_comic'].value) + logger.debug("Cookie Set To %d" % page_num) + next_page = page_num + 1 + prev_page = page_num - 1 + + logger.info("Reader Served") + logger.debug("Serving comic " + comic['Location'] + " page number " + str(page_num)) + + return serve_template(templatename="read.html", pages=image_list, current_page=page_num, np=next_page, pp=prev_page, nop=num_pages, size=user_size_pref, cc=cookie_comic, comicpath=comic_path, ish_id=ish_id) + + def up_size_pref(self, pref): + cherrypy.session.load() + cherrypy.session['sizepref'] = pref + cherrypy.session.save() + return + +class ComicScanner(object): + + # This method will handle scanning the directories and returning a list of them all. + def dir_scan(self): + logger.debug("Dir Scan Requested") + full_paths = [] + full_paths.append(mylar.CONFIG.DESTINATION_DIR) + for root, dirs, files in os.walk(mylar.CONFIG.DESTINATION_DIR): + full_paths.extend(os.path.join(root, d) for d in dirs) + + logger.info("Dir Scan Completed") + logger.info("%i Dirs Found" % (len(full_paths))) + return full_paths + + def user_unpack_comic(self, ish_id, comic_path): + logger.info("%s unpack requested" % comic_path) + for root, dirs, files in os.walk(os.path.join(mylar.CONFIG.CACHE_DIR, "webviewer", ish_id), topdown=False): + for f in files: + os.chmod(os.path.join(root, f), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 + os.remove(os.path.join(root, f)) + for root, dirs, files in os.walk(os.path.join(mylar.CONFIG.CACHE_DIR, "webviewer", ish_id), topdown=False): + for d in dirs: + os.chmod(os.path.join(root, d), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 + os.rmdir(os.path.join(root, d)) + if comic_path.endswith(".cbr"): + opened_rar = rarfile.RarFile(comic_path) + opened_rar.extractall(os.path.join(mylar.CONFIG.CACHE_DIR, "webviewer", ish_id)) + elif comic_path.endswith(".cbz"): + opened_zip = zipfile.ZipFile(comic_path) + opened_zip.extractall(os.path.join(mylar.CONFIG.CACHE_DIR, "webviewer", ish_id)) + return + + # This method will return a list of .jpg files in their numberical order to be fed into the reading view. + def reading_images(self, ish_id): + logger.debug("Image List Requested") + image_list = [] + image_src = os.path.join(mylar.CONFIG.CACHE_DIR, "webviewer", ish_id) + image_loc = os.path.join(mylar.CONFIG.HTTP_ROOT, 'cache', "webviewer", ish_id) + for root, dirs, files in os.walk(image_src): + for f in files: + if f.endswith((".png", ".gif", ".bmp", ".dib", ".jpg", ".jpeg", ".jpe", ".jif", ".jfif", ".jfi", ".tiff", ".tif")): + image_list.append( os.path.join(image_loc, f) ) + image_list.sort() + logger.debug("Image List Created") + return image_list + + + From 5132dde4adfc33b14196eeceb5a488e2842b5c4d Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Tue, 31 Dec 2019 10:24:51 -0500 Subject: [PATCH 04/26] Update .gitignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index ea4c0e5a..b859575a 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ Thumbs.db ehtumbs.db Thumbs.db lib/comictaggerlib/ct_settings/ -settings.json \ No newline at end of file +settings.json +.DS_Store From 4f8943b5b5680f47097c4048ebd342ca66c21a01 Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Tue, 31 Dec 2019 10:31:33 -0500 Subject: [PATCH 05/26] IMP: Icon for comicdetails page --- data/interfaces/default/images/readabook.png | Bin 0 -> 8523 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 data/interfaces/default/images/readabook.png diff --git a/data/interfaces/default/images/readabook.png b/data/interfaces/default/images/readabook.png new file mode 100644 index 0000000000000000000000000000000000000000..441c234979dffad35754844ef6872fe6c8015b07 GIT binary patch literal 8523 zcmeHtc|4Tu+xImy%#dY5Sz}a#$Qr^Rlx2qOdu2s?D-sexgU3P=cLh9zB5a&(An0zc*lLI{7$f359QH%Ed$&!3fodqx z!x@5-)gg%bI|Oa4r%pgnygmd?1w#;_2!iBdOIhv~;05}qlY=dIg1;XJTGPQpG?wTe z2cE=#ee9n{?*kmgNwzj#NrSVzGkJmWW#iuxZ@qgsOhnQ)OG}fb-5!dZRV|m%n|V~{ zmqDo1_s;3PXTF=z+BE%srr=O(^p&`+j$h9{D2|UW4Y``NbBEzX5^4|C{w#XOt~{a$ zO^9yI*-p~<`E-7Pk-wc3)N3{z*E|2KX6606$vMaL>*HqQKT><=hm!NS3Hh`h_Woe0 zSiC-_DAn3|S6Vvy?HTsE0d!oa}}lqgmOA$L>H(8k!G8Cn+Ai6%xhq;*VGIo`|KA~J{Q5KXy$zr=>l(0+~C2MrgUyP&0$X=a_TJr0XGoS{QvRSMA6ZZxypk*}NF8-aEWl5e2V#e82`tNG z_Ds~QcHoz|is9oVx-NqJG;)m)4G9$aPbN%)=mmu;oQ}yWkFn<;4xEC&PYCa)b5jfR z(Pa~~H-nvXJ*ujt8ny@QGKwKgzl}GKbzFF~gA~$&^F^pw~y3~#eK`T(4WmxGOc*8=2v6mNKWiKgxVuGwkX)<1akZizD<#ePobvuQ0k=Onp+bV9% z9+v`K<$0O+7UDca{BXQXBSjTdc{4$=MGTYPV|JNRfAJyUS{VYlDLJFbT^V<`%v0d- z;G>kC9HIim-P_I{MvOa{?AWTigXi+$jc1eN2?~lMlB(FE#S)!_oM@U1o`=V7d5Eh% zfV`eb-cQIBqh_;FfNA%LKNOXfMLRLV`^Y_RPBTF)+nelA&Ounm-1+^!x3M`(DpLNM za4-7WC9*AH3-*xvJLm1n-{#?lX46e?#NEV_#Hqz|S?t4=C3*G}ejb}!ELl>dsLExj zbiJgPicamd3r+jAEpm00oT-^)X#Y;skqWZ& z^U0x7DNnH)Ga%9bN&)Jhw1p$&EpGbmu_ov*$pKv0w9U0HxUMi4LQx<*IH6vEjexug zAF>sL9Vt6KGGZ{Y$dU>r_vMGiRjeb~8J!eUPik#NiJJ<5pqxy7%I8`|nyLr~+7S$` zYyL2VYPx$&1y0@ZdtyTa+rAFN=&0wYMl)Z~flw@15)mV)bQ!0*}gi835J1I07qheEwkcDL`LGZqmFFU}IOD zPVx(%)W{{B0pQe8)S6R~fX1Okmnl7mblaLtiy+A%=tg^$NUew^Taya%Nwl@MDF&0y z)Q+PK*qPjzGk~f$ow!K0o+=9Klc+uepgywelOQU`our_qRB{n+AD`rtgZJowY;d5N zo=__RxXb~c3p-;hx#saP9J=LTa)S)G9G9~-QH?K{zUmkTxqa--0xYZl-q@}@``Q4JdvOrW_EXh0PLw5Bd=XHx#Jmo83@ zi?IQonn3c)QUK#%%WdYCjb-GGWnlIk!aD5R0aDF{ki$1OxTWnPE$Sby{nwiCjWyx_ z!zxgSSo4(ekC+}vwLf$}0FDO0M=#L5u-(m$An*B$Lmrm@ur+{)i-6?@tDaB!j*A~; zfcd+i@_RFxWa}5U>HkhWV0`5fcq6!h@9&Ox{$C{jK+nQCdRR6){Ie%C#8kN*{eO^8 z7ynE0-VHq*jr)h|JzW+;Q|6y#rwRWyI9Y33bKxJFRX6(72&>8i7*_U{+Rfw*?N|cm zF-#_z2q}UL&kboMJwPqxP!$bg4B@+oadgI zou+qpywlA+zzx%gr2bf*2NUSCWUjw6AMcEmpm z=X5m%qWx}yzzF+<(#|FCpDG$N71SO>L*89XN9K{+o?2xHYj1wHqt*a#BSb7f2o)V1 zj4%Vi&O{~9bC{F4WLw^-IJM?2whq*vsm!f_0TM76`@A7EHo!v-#MKMsjkp?NF&sfv zso`KOFt0$3jJo@p?asbEVb8Vfg{rk$sjU?yd57k7x2<|hfp}iX1{|)&DSL#0WTT}l zYLQ{%shEv``f?zEY80yG1_(pbg0DuQw+c) z*$Hjr>ZZq*Kmr;K^^ffa8_^kIRU9%^t7W}HzemY{1gJO@#_3591bLbVB5vxtMQjAH zdN=Q4MrRbTdfA#f2-J}y4qi%$h-w!ZY7@I{!k&RnM*9iP06m-rIVFBjG({KGwj28{ z0HVJDm%X?o=?T!EMCAoA>em-Re#ZesWLMM!J5VZK)dNJ~Xvj{`2()zm;dT~c>}ddW z>7I0wGmwp{gL-qw`+oq_x|a&)(K|r|ojwHyRR~vvA*j{J7_=<(31r_iko*e@snbC9 zX9Ixjv%sl{hNN3Uy`V4HbVtEdFbnujgLY-B{mjqaeh zoqge3h6nGioIphPU6eeETKtM;R3)!^;%15Nf+d`544B z2EAA9J1U0y9Qa%GFUH4eZvboO7+BmN$MCGz1v>Qq82~E*79QkeYQtqo_PsjRbn}bC zSmu6EcR#&GrHN>RNbo;>iE?@>0+`hPf8sEMvZyrjJY z#q-wcHdDEczJsY<it6WtI=>M7JTzC_xWZh_V7*zg7uo z_TzDUa&RJLXH-1`wq#oY^^p`GE+fulT2Ws`1qI05X>)A)W@H`DJHSY70S$s}{dWV{ z74*S1#~?S!Ikhrn{&^s0Q~IXW+zE9&X>0<~L0r9muYYD`$2AolQkfy5<($CgNmMxR zkoHb+1e_A?uwD%gHLmTTw%V#O|4yb(XW}Sz;oL>jSv}In3B+2FpzGf`J)JAjNG-XD z8>%}nFivCkiDluLXKoAam9l#4|8u||_N|nYBzZhgmPpAE*t~I!*U;BXzL7Whq%!p8 zND2DLrZ!xXrM@|9J3?Q@RB{4!4IP+6CPty=&#EaO!NnTfsjD`mo$Pa-%v;KnM6UX{ z4`Vh!Om=R($9iiKoa_0n{N#Jz@OyNjPwvFk0Z6#=}2JD`vR|(U`m@4 z8<9a8!e_@Tl<4>#GZ{fNEINt}Z=7OkP#cgD4tgmb_y~xRst##fMxXCW)|rH@n%+3E z9btz9QwBp4st7bR8M{Kg$yQ)|JS&#fV3|?nkEUX1tt1R$pNNI{%q8-{C@mD#=u|$b z5ha(`E7=0&Dpeo0ip54=U$bP?|6H4>V(bO&OsL7V`?bLek>}xT_N$31LbwoaXQ)9=*zn9y}p zF`8Omxmp&($f#=0)2*ubz75;9wWZX^Nnew7<9xEHKs{1y9Y@osRGKO}$@r5w$xu>v zHiO3vKd$fhl@Sb>_$cp4h#Pi+yNTw3Av%R6@jM6(8b?FX6bk z^4}XH88&CM7KPv7P!V>W!2}Bco~r=8xRalHBN#P#>9|tXA*;Wp$EmlT#U&?hP0HaO*kar`#(wi75v<`8Oj|HqLwX41YYePa>C!Ce4eyXj^Y`@Ycv?cw~DJHgNPo zeJ^_7!9hFyeXKu228CB8Vl5|ESNEFJDVN5jVIvMTT`@8%u;JBzlj}o&^rAW1U2mw@ zA#C1l>4D~#UA>qzC4noubRt~x=uUnFJDXwsUi}NPQ6~b`fOgS|G6-Be$qKbe|*`67wq7Am7fU<6k-Z`1FygTxV8SIef z(*4Qu{P_J=fmKds+8)NhLKNrz+7kl{jhB#WQx33rlX6Se{r+vK9Rh#G%h{X-vmsQe z)X>iqPIQvoq(@)hd@aDSUmhCtH>a6X6J-*A418l>Aex*Y9O=}9)-Sij4Qu50YPZPT zTKaSdZ0;B^{y+j}%hH~sIpxa?;M808OVpB<53cXIbNfl>CsfL2fzi)7OsBFS0FImK zJnl1V$|>8No=>q%jqlE4r(#l0NJl0_R8P*NpPp6;elN1ttHZ`rT(A4S(O`I%$p45od;PRL8%J%PcC$^8x*un%jk~5i@rL}hE6Elr()sdtd zO?3rFeB<*7>!@oA>?X$U`PB0QPf>?Z1YfQvuBAE2g|7G*QZ*j1<;Fbje^Suec$X5?At+WTQIj)!uDZH4$Sx`7G^3{cht@z&ZCKLzS=3 zH-8*Z7g7Cd5KJ|eNG6B09DX6>-o(c$);yXf{$z?*^RY^%Ip zrWH_W>m5PKu#n58G%0*Gxb=$adO^U2BAxhK((Ce{II1$|D*9QH@26jB~{GI9(d;*TOnf+@ekSMaeU z>$>wrvd6}8*(0zDm54mC?R$F|qjtqPo9yC|`UD(jmh?e zLt4Y-a72Wy$xdb(7zc@g3xzFDLm@orMIOSM&Jb7l&)5Ix;Xh9BpJ4d^n+dXW$>peh z*pvvNPAjnqg|G82!s=UsaPAgMhVOTsEb=Bi;y3{$|=(ePg&p@)aT+$2Zkn zArT36h+r9h$~RNNq7U3NdeZPt*t55&?AbuZdcuQ@!8%-0TeU{u$jQT$j2DFn zYv=a|iZx9YOqod*Hw7z6Lp|l;^3XLQ70YrJ)O-38(qP@Xhz&b~IWYx`m9`=GIuD^?rhqQm;R3LL?(S##+5l1mzIaw-FN)3?>32p~e3-NPQ5sPf8kW+&3ld^GuR zhS7d66v|ujM{1*Vg+3o7IJ?h25EW>^hwUs?czIkQa3X@ysIK7Cug@lY5Fa{+>YxzO zP-K$&i%pSc`csYODLcp-u%fDGDHcgO#2_JX2H` zc~!>B+V^44dI{qrmPDse!x&0jTkMCh{&$SH<)0Nnek6MFZEVYV3gO5Cv~O?@A!h{ML)Ow+Ux?R?uw)0h55ws=eajV+6P;ls6N~A`uGPlA45Bma|3q8t zkIz^She0>3$=tS%3$>=F*lhdd4*o$vjbi6VITk_*BZQs;KLvzAd-M&=bPV?B80vc& zm=N?03Hp0=^oE=(u>R3sE0 tc|44ur>94WiPehvY`2-oJ6zK`NkuNOmr^xAzC1{V!%*asdDU literal 0 HcmV?d00001 From 00d7107d40c5b20c2f976501b600460b9d4263f3 Mon Sep 17 00:00:00 2001 From: evilhero Date: Fri, 10 Jan 2020 10:29:45 -0500 Subject: [PATCH 06/26] FIX:(#2383) Quick patch to bandaid the incorrect writing of a dict to the searchresults table of the db --- mylar/webserve.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/mylar/webserve.py b/mylar/webserve.py index 984c587d..7d45c3d2 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -4692,13 +4692,17 @@ class WebInterface(object): if len(search_matches) > 1: # if we matched on more than one series above, just save those results instead of the entire search result set. for sres in search_matches: + if type(sres['haveit']) == dict: + imp_cid = sres['haveit']['comicid'] + else: + imp_cid = sres['haveit'] cVal = {"SRID": SRID, "comicid": sres['comicid']} #should store ogcname in here somewhere to account for naming conversions above. nVal = {"Series": ComicName, "results": len(search_matches), "publisher": sres['publisher'], - "haveit": sres['haveit'], + "haveit": imp_cid, "name": sres['name'], "deck": sres['deck'], "url": sres['url'], @@ -4707,7 +4711,7 @@ class WebInterface(object): "issues": sres['issues'], "ogcname": ogcname, "comicyear": sres['comicyear']} - logger.fdebug('search_values: [%s]/%s' % (cVal, nVal)) + #logger.fdebug('search_values: [%s]/%s' % (cVal, nVal)) myDB.upsert("searchresults", nVal, cVal) logger.info('[IMPORT] There is more than one result that might be valid - normally this is due to the filename(s) not having enough information for me to use (ie. no volume label/year). Manual intervention is required.') #force the status here just in case @@ -4719,13 +4723,17 @@ class WebInterface(object): # store the search results for series that returned more than one result for user to select later / when they want. # should probably assign some random numeric for an id to reference back at some point. for sres in sresults: + if type(sres['haveit']) == dict: + imp_cid = sres['haveit']['comicid'] + else: + imp_cid = sres['haveit'] cVal = {"SRID": SRID, "comicid": sres['comicid']} #should store ogcname in here somewhere to account for naming conversions above. nVal = {"Series": ComicName, "results": len(sresults), "publisher": sres['publisher'], - "haveit": sres['haveit'], + "haveit": imp_cid, "name": sres['name'], "deck": sres['deck'], "url": sres['url'], @@ -6435,5 +6443,3 @@ class WebInterface(object): #data = wv.read_comic(ish_id) return data read_comic.exposed = True - - \ No newline at end of file From 6ffb9387355515f25f465dedae39739e3adcf411 Mon Sep 17 00:00:00 2001 From: evilhero Date: Mon, 13 Jan 2020 12:05:27 -0500 Subject: [PATCH 07/26] FIX: fix for incorrect error variable reference during a failed search --- mylar/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/search.py b/mylar/search.py index eb13a953..0c2616a6 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -762,7 +762,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa break except requests.exceptions.RequestException as e: logger.warn('General Error fetching data from %s: %s' % (nzbprov, e)) - if e.r.status_code == 503: + if str(r.status_code) == '503': #HTTP Error 503 logger.warn('Aborting search due to Provider unavailability') foundc['status'] = False From 931cbed1c61a957bf919f4a212f6bc96ce9b1440 Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Mon, 20 Jan 2020 13:40:38 -0500 Subject: [PATCH 08/26] FIX:(#2405) When searching annuals would incorrectly grab the wrong issue under certain conditions, FIX: fixed the issue number detection when parsing nzb results due to the use of decimal places as spacers being used by some providers --- mylar/filechecker.py | 9 +++++++-- mylar/search.py | 12 ++++++------ 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/mylar/filechecker.py b/mylar/filechecker.py index 5eb9cf65..58b640e8 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -272,6 +272,11 @@ class FileChecker(object): logger.fdebug('[SARC] Removed Reading Order sequence from subname. Now set to : %s' % modfilename) #make sure all the brackets are properly spaced apart + if modfilename.find('\s') == -1: + #if no spaces exist, assume decimals being used as spacers (ie. nzb name) + modspacer = '.' + else: + modspacer = ' ' m = re.findall('[^()]+', modfilename) cnt = 1 #2019-12-24----fixed to accomodate naming convention like Amazing Mary Jane (2019) 002.cbr, and to account for brackets properly @@ -279,10 +284,10 @@ class FileChecker(object): while cnt < len(m): #logger.fdebug('[m=%s] modfilename.find: %s' % (m[cnt], modfilename[modfilename.find('('+m[cnt]+')')+len(m[cnt])+2])) #logger.fdebug('mod_1: %s' % modfilename.find('('+m[cnt]+')')) - if modfilename[modfilename.find('('+m[cnt]+')')-1] != ' ' and modfilename.find('('+m[cnt]+')') != -1: + if modfilename[modfilename.find('('+m[cnt]+')')-1] != modspacer and modfilename.find('('+m[cnt]+')') != -1: #logger.fdebug('before_space: %s' % modfilename[modfilename.find('('+m[cnt]+')')-1]) #logger.fdebug('after_space: %s' % modfilename[modfilename.find('('+m[cnt]+')')+len(m[cnt])+2]) - modfilename = '%s%s%s' % (modfilename[:modfilename.find('('+m[cnt]+')')], ' ', modfilename[modfilename.find('('+m[cnt]+')'):]) + modfilename = '%s%s%s' % (modfilename[:modfilename.find('('+m[cnt]+')')], modspacer, modfilename[modfilename.find('('+m[cnt]+')'):]) cnt+=1 except Exception as e: #logger.warn('[ERROR] %s' % e) diff --git a/mylar/search.py b/mylar/search.py index 0c2616a6..940005b4 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -1109,7 +1109,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa logger.fdebug('Cleaned up title to : %s' % cleantitle) #send it to the parser here. - p_comic = filechecker.FileChecker(file=ComicTitle) + p_comic = filechecker.FileChecker(file=ComicTitle, watchcomic=ComicName) parsed_comic = p_comic.listFiles() logger.fdebug('parsed_info: %s' % parsed_comic) @@ -1376,18 +1376,18 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa intIss = 1000 else: intIss = 9999999999 - if parsed_comic['issue_number'] is not None: - logger.fdebug("issue we found for is : %s" % parsed_comic['issue_number']) - comintIss = helpers.issuedigits(parsed_comic['issue_number']) + if filecomic['justthedigits'] is not None: + logger.fdebug("issue we found for is : %s" % filecomic['justthedigits']) + comintIss = helpers.issuedigits(filecomic['justthedigits']) logger.fdebug("integer value of issue we have found : %s" % comintIss) else: comintIss = 11111111111 #do this so that we don't touch the actual value but just use it for comparisons - if parsed_comic['issue_number'] is None: + if filecomic['justthedigits'] is None: pc_in = None else: - pc_in = helpers.issuedigits(parsed_comic['issue_number']) + pc_in = helpers.issuedigits(filecomic['justthedigits']) #issue comparison now as well if int(intIss) == int(comintIss) or all([cmloopit == 4, findcomiciss is None, pc_in is None]) or all([cmloopit == 4, findcomiciss is None, pc_in == 1]): nowrite = False From 52bd27cb6b1377b45e154da238e60e819798c1e8 Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Tue, 21 Jan 2020 16:42:29 -0500 Subject: [PATCH 09/26] FIX:(#2404) When changing directory path for a specific series, if the old path didn't exist it would log an error and not update the current page --- mylar/webserve.py | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/mylar/webserve.py b/mylar/webserve.py index 7d45c3d2..ddf2f19a 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -5295,28 +5295,39 @@ class WebInterface(object): newValues['AlternateFileName'] = str(alt_filename) #force the check/creation of directory com_location here + updatedir = True if any([mylar.CONFIG.CREATE_FOLDERS is True, os.path.isdir(orig_location)]): if os.path.isdir(str(com_location)): logger.info(u"Validating Directory (" + str(com_location) + "). Already exists! Continuing...") else: - if orig_location != com_location: + if orig_location != com_location and os.path.isdir(orig_location) is True: logger.fdebug('Renaming existing location [%s] to new location: %s' % (orig_location, com_location)) try: os.rename(orig_location, com_location) except Exception as e: - logger.warn('Unable to rename existing directory: %s' % e) - return + if 'No such file or directory' in e: + checkdirectory = filechecker.validateAndCreateDirectory(com_location, True) + if not checkdirectory: + logger.warn('Error trying to validate/create directory. Aborting this process at this time.') + updatedir = False + else: + logger.warn('Unable to rename existing directory: %s' % e) + updatedir = False else: - logger.fdebug("Updated Directory doesn't exist! - attempting to create now.") + if orig_location != com_location and os.path.isdir(orig_location) is False: + logger.fdebug("Original Directory (%s) doesn't exist! - attempting to create new directory (%s)" % (orig_location, com_location)) + else: + logger.fdebug("Updated Directory doesn't exist! - attempting to create now.") checkdirectory = filechecker.validateAndCreateDirectory(com_location, True) if not checkdirectory: logger.warn('Error trying to validate/create directory. Aborting this process at this time.') - return - - newValues['ComicLocation'] = com_location - - myDB.upsert("comics", newValues, controlValueDict) - logger.fdebug('Updated Series options!') + updatedir = False + else: + logger.info('[Create directories False] Not creating physical directory, but updating series location in dB to: %s' % com_location) + if updatedir is True: + newValues['ComicLocation'] = com_location + myDB.upsert("comics", newValues, controlValueDict) + logger.fdebug('Updated Series options!') raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) comic_config.exposed = True From 251c59808cfbb4882af2abb30b84437246dba5ad Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Mon, 27 Jan 2020 21:37:23 -0500 Subject: [PATCH 10/26] FIX:(#2408) filenames containing a '@' would not be recognized during a recheck/refresh --- mylar/filechecker.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mylar/filechecker.py b/mylar/filechecker.py index 58b640e8..4b7b7668 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -378,13 +378,14 @@ class FileChecker(object): ret_sf1 = ' '.join(sf) #here we should account for some characters that get stripped out due to the regex's - #namely, unique characters - known so far: +, & + #namely, unique characters - known so far: +, &, @ #c11 = '\+' #f11 = '\&' #g11 = '\'' ret_sf1 = re.sub('\+', 'c11', ret_sf1).strip() ret_sf1 = re.sub('\&', 'f11', ret_sf1).strip() ret_sf1 = re.sub('\'', 'g11', ret_sf1).strip() + ret_sf1 = re.sub('\@', 'h11', ret_sf1).strip() #split_file = re.findall('(?imu)\([\w\s-]+\)|[-+]?\d*\.\d+|\d+[\s]COVERS+|\d{4}-\d{2}-\d{2}|\d+[(th|nd|rd|st)]+|\d+|[\w-]+|#?\d\.\d+|#[\.-]\w+|#[\d*\.\d+|\w+\d+]+|#(? Date: Thu, 9 Jan 2020 12:28:58 -0500 Subject: [PATCH 11/26] IMP: OPDS page size as configuration option (#2393) --- Mylar.py | 1 + data/interfaces/default/config.html | 4 ++++ mylar/config.py | 1 + mylar/opds.py | 2 +- mylar/webserve.py | 3 ++- 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/Mylar.py b/Mylar.py index b4b86877..a313f50f 100755 --- a/Mylar.py +++ b/Mylar.py @@ -308,6 +308,7 @@ def main(): 'opds_authentication': mylar.CONFIG.OPDS_AUTHENTICATION, 'opds_username': mylar.CONFIG.OPDS_USERNAME, 'opds_password': mylar.CONFIG.OPDS_PASSWORD, + 'opds_pagesize': mylar.CONFIG.OPDS_PAGESIZE, } # Try to start the server. diff --git a/data/interfaces/default/config.html b/data/interfaces/default/config.html index 6e467066..14bf7981 100644 --- a/data/interfaces/default/config.html +++ b/data/interfaces/default/config.html @@ -244,6 +244,10 @@
Access the OPDS server at http://mylarhost/opds/ - keep in mind your scheme (http or https), your hostname, port, and any http_root you may have set.
+
+ + +
<% opds_notes = "Require authentication for OPDS. If checked\nyou will need to provide a username/password.\nThe service user name will work (if set). Additionally,\nyou can provide a user with only OPDS access below.\nNOTE: If this is not checked, OPDS will be available\nwithout a password." diff --git a/mylar/config.py b/mylar/config.py index 41accffc..374dad14 100644 --- a/mylar/config.py +++ b/mylar/config.py @@ -368,6 +368,7 @@ _CONFIG_DEFINITIONS = OrderedDict({ 'OPDS_USERNAME': (str, 'OPDS', None), 'OPDS_PASSWORD': (str, 'OPDS', None), 'OPDS_METAINFO': (bool, 'OPDS', False), + 'OPDS_PAGESIZE': (int, 'OPDS', 30), }) diff --git a/mylar/opds.py b/mylar/opds.py index 299ba194..e2bb4faf 100644 --- a/mylar/opds.py +++ b/mylar/opds.py @@ -39,7 +39,7 @@ class OPDS(object): def __init__(self): self.cmd = None - self.PAGE_SIZE=30 + self.PAGE_SIZE=mylar.CONFIG.OPDS_PAGESIZE self.img = None self.issue_id = None self.file = None diff --git a/mylar/webserve.py b/mylar/webserve.py index ddf2f19a..c1c2e03a 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -5149,6 +5149,7 @@ class WebInterface(object): "opds_username": mylar.CONFIG.OPDS_USERNAME, "opds_password": mylar.CONFIG.OPDS_PASSWORD, "opds_metainfo": helpers.checked(mylar.CONFIG.OPDS_METAINFO), + "opds_pagesize": mylar.CONFIG.OPDS_PAGESIZE, "dlstats": dlprovstats, "dltotals": freq_tot, "alphaindex": mylar.CONFIG.ALPHAINDEX @@ -5397,7 +5398,7 @@ class WebInterface(object): 'lowercase_filenames', 'autowant_upcoming', 'autowant_all', 'comic_cover_local', 'alternate_latest_series_covers', 'cvinfo', 'snatchedtorrent_notify', 'prowl_enabled', 'prowl_onsnatch', 'pushover_enabled', 'pushover_onsnatch', 'boxcar_enabled', 'boxcar_onsnatch', 'pushbullet_enabled', 'pushbullet_onsnatch', 'telegram_enabled', 'telegram_onsnatch', 'slack_enabled', 'slack_onsnatch', - 'email_enabled', 'email_enc', 'email_ongrab', 'email_onpost', 'opds_enable', 'opds_authentication', 'opds_metainfo', 'enable_ddl', 'deluge_pause'] #enable_public + 'email_enabled', 'email_enc', 'email_ongrab', 'email_onpost', 'opds_enable', 'opds_authentication', 'opds_metainfo', 'opds_pagesize', 'enable_ddl', 'deluge_pause'] #enable_public for checked_config in checked_configs: if checked_config not in kwargs: From ebc86eb8c1a17de5cf16eded77d94f7e6fd27093 Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Thu, 9 Jan 2020 13:22:52 -0500 Subject: [PATCH 12/26] FIX: throw error if PIL or pillow are not installed --- mylar/webviewer.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mylar/webviewer.py b/mylar/webviewer.py index c9cebe5b..e5976ca1 100644 --- a/mylar/webviewer.py +++ b/mylar/webviewer.py @@ -7,7 +7,12 @@ from lib.rarfile import rarfile import mylar -from PIL import Image +try: + from PIL import Image +except ImportError: + logger.debug("WebReader Requested, but PIL or pillow libraries must be installed. Please execute 'pip install pillow', then restart Mylar.") + return serve_template(templatename="index.html", title="Home", comics=comics, alphaindex=mylar.CONFIG.ALPHAINDEX) + from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, librarysync, moveit, Failed, readinglist, config from mylar.webserve import serve_template From 52fb8dc1c37140ac372d0c9e45607f1dc629a0fb Mon Sep 17 00:00:00 2001 From: Bart274 Date: Fri, 10 Jan 2020 10:31:08 +0100 Subject: [PATCH 13/26] add download info to Snatched notification for Telegram --- mylar/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/search.py b/mylar/search.py index 940005b4..427a3087 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -2751,7 +2751,7 @@ def notify_snatch(sent_to, comicname, comyear, IssueNumber, nzbprov, pack): if mylar.CONFIG.TELEGRAM_ENABLED and mylar.CONFIG.TELEGRAM_ONSNATCH: logger.info(u"Sending Telegram notification") telegram = notifiers.TELEGRAM() - telegram.notify(snline + " - " + snatched_name) + telegram.notify(snline + "%s - %s - Mylar %s" % (snline, snatched_name, sent_to)) if mylar.CONFIG.SLACK_ENABLED and mylar.CONFIG.SLACK_ONSNATCH: logger.info(u"Sending Slack notification") slack = notifiers.SLACK() From 295ab9e6381aee4e83ae3f9681ba5053f1239abf Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Mon, 27 Jan 2020 21:58:05 -0500 Subject: [PATCH 14/26] removed extra snline reference" --- mylar/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/search.py b/mylar/search.py index 427a3087..92bb6742 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -2751,7 +2751,7 @@ def notify_snatch(sent_to, comicname, comyear, IssueNumber, nzbprov, pack): if mylar.CONFIG.TELEGRAM_ENABLED and mylar.CONFIG.TELEGRAM_ONSNATCH: logger.info(u"Sending Telegram notification") telegram = notifiers.TELEGRAM() - telegram.notify(snline + "%s - %s - Mylar %s" % (snline, snatched_name, sent_to)) + telegram.notify("%s - %s - Mylar %s" % (snline, snatched_name, sent_to)) if mylar.CONFIG.SLACK_ENABLED and mylar.CONFIG.SLACK_ONSNATCH: logger.info(u"Sending Slack notification") slack = notifiers.SLACK() From 45dc95d3045f582d15ccf310491298952c418151 Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Mon, 13 Jan 2020 14:50:13 -0500 Subject: [PATCH 15/26] FIX: timeout_monitor was removed from cherrypy in 12.0 --- mylar/webviewer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/webviewer.py b/mylar/webviewer.py index e5976ca1..e94c8821 100644 --- a/mylar/webviewer.py +++ b/mylar/webviewer.py @@ -34,6 +34,7 @@ class WebViewer(object): 'tools.sessions.storage_class': cherrypy.lib.sessions.FileSession, 'tools.sessions.storage_path': os.path.join(mylar.DATA_DIR, "sessions"), 'request.show_tracebacks': False, + 'engine.timeout_monitor.on': False, } if mylar.CONFIG.HTTP_PASSWORD is None: updatecherrypyconf.update({ @@ -42,7 +43,6 @@ class WebViewer(object): cherrypy.config.update(updatecherrypyconf) cherrypy.engine.signals.subscribe() - cherrypy.engine.timeout_monitor.unsubscribe() def read_comic(self, ish_id = None, page_num = None, size = None): logger.debug("WebReader Requested, looking for ish_id %s and page_num %s" % (ish_id, page_num)) From 7df84f51ac876d4e3b15d5b35ed44076eefe5984 Mon Sep 17 00:00:00 2001 From: Bart274 Date: Fri, 31 Jan 2020 12:40:22 +0100 Subject: [PATCH 16/26] Typo in log message --- mylar/importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/importer.py b/mylar/importer.py index 43480379..ce1b67cf 100644 --- a/mylar/importer.py +++ b/mylar/importer.py @@ -176,7 +176,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No logger.info('Corrected year of ' + str(SeriesYear) + ' to corrected year for series that was manually entered previously of ' + str(csyear)) SeriesYear = csyear - logger.info('Sucessfully retrieved details for ' + comic['ComicName']) + logger.info('Successfully retrieved details for ' + comic['ComicName']) #since the weekly issue check could return either annuals or issues, let's initialize it here so it carries through properly. weeklyissue_check = [] From d96265d008c991375d520e3dfea77ded7b05d5d0 Mon Sep 17 00:00:00 2001 From: Bart274 Date: Fri, 31 Jan 2020 14:13:44 +0100 Subject: [PATCH 17/26] send imageurl to notifiers --- mylar/PostProcessor.py | 16 ++++++++++++---- mylar/notifiers.py | 26 ++++++++++++++++++-------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index a0b29ac9..acc99bb4 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -1942,8 +1942,12 @@ class PostProcessor(object): logger.info('%s Post-Processing completed for: [ %s #%s ] %s' % (module, comicname, issuenumber, grab_dst)) self._log(u"Post Processing SUCCESSFUL! ") + imageUrl = myDB.select('SELECT ImageURL from issues WHERE IssueID=?', [issueid]) + if imageUrl: + imageUrl = imageUrl[0][0] + try: - self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) + self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module, imageUrl=imageUrl) except: pass @@ -2771,7 +2775,11 @@ class PostProcessor(object): # self.sendnotify(series, issueyear, dispiss, annchk, module) # return self.queue.put(self.valreturn) - self.sendnotify(series, issueyear, dispiss, annchk, module) + imageUrl = myDB.select('SELECT ImageURL from issues WHERE IssueID=?', [issueid]) + if imageUrl: + imageUrl = imageUrl[0][0] + + self.sendnotify(series, issueyear, dispiss, annchk, module, imageUrl) logger.info('%s Post-Processing completed for: %s %s' % (module, series, dispiss)) self._log(u"Post Processing SUCCESSFUL! ") @@ -2784,7 +2792,7 @@ class PostProcessor(object): return self.queue.put(self.valreturn) - def sendnotify(self, series, issueyear, issuenumOG, annchk, module): + def sendnotify(self, series, issueyear, issuenumOG, annchk, module, imageUrl): if issueyear is None: prline = '%s %s' % (series, issuenumOG) @@ -2812,7 +2820,7 @@ class PostProcessor(object): if mylar.CONFIG.TELEGRAM_ENABLED: telegram = notifiers.TELEGRAM() - telegram.notify(prline2) + telegram.notify(prline2, imageUrl) if mylar.CONFIG.SLACK_ENABLED: slack = notifiers.SLACK() diff --git a/mylar/notifiers.py b/mylar/notifiers.py index 663c2a5b..b7d81f5f 100644 --- a/mylar/notifiers.py +++ b/mylar/notifiers.py @@ -340,15 +340,25 @@ class TELEGRAM: else: self.token = test_token - def notify(self, message): - # Construct message - payload = {'chat_id': self.userid, 'text': message} + def notify(self, message, imageurl=None): + if imageurl: + # Construct message + payload = {'chat_id': self.userid, 'caption': message, 'photo': imageurl} - # Send message to user using Telegram's Bot API - try: - response = requests.post(self.TELEGRAM_API % (self.token, "sendMessage"), json=payload, verify=True) - except Exception, e: - logger.info(u'Telegram notify failed: ' + str(e)) + # Send message to user using Telegram's Bot API + try: + response = requests.post(self.TELEGRAM_API % (self.token, "sendPhoto"), json=payload, verify=True) + except Exception, e: + logger.info(u'Telegram notify failed: ' + str(e)) + else: + # Construct message + payload = {'chat_id': self.userid, 'text': message} + + # Send message to user using Telegram's Bot API + try: + response = requests.post(self.TELEGRAM_API % (self.token, "sendMessage"), json=payload, verify=True) + except Exception, e: + logger.info(u'Telegram notify failed: ' + str(e)) # Error logging sent_successfuly = True From 7f44f4a278cfa08300256c870f5ad4a3c1af003d Mon Sep 17 00:00:00 2001 From: Bart274 Date: Fri, 31 Jan 2020 14:38:38 +0100 Subject: [PATCH 18/26] cleanup of code --- mylar/notifiers.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/mylar/notifiers.py b/mylar/notifiers.py index b7d81f5f..509168ed 100644 --- a/mylar/notifiers.py +++ b/mylar/notifiers.py @@ -340,25 +340,21 @@ class TELEGRAM: else: self.token = test_token - def notify(self, message, imageurl=None): - if imageurl: + def notify(self, message, imageUrl=None): + # Construct message + payload = {'chat_id': self.userid, 'text': message} + sendMethod = "sendMessage" + + if imageUrl: # Construct message - payload = {'chat_id': self.userid, 'caption': message, 'photo': imageurl} + payload = {'chat_id': self.userid, 'caption': message, 'photo': imageUrl} + sendMethod = "sendPhoto" - # Send message to user using Telegram's Bot API - try: - response = requests.post(self.TELEGRAM_API % (self.token, "sendPhoto"), json=payload, verify=True) - except Exception, e: - logger.info(u'Telegram notify failed: ' + str(e)) - else: - # Construct message - payload = {'chat_id': self.userid, 'text': message} - - # Send message to user using Telegram's Bot API - try: - response = requests.post(self.TELEGRAM_API % (self.token, "sendMessage"), json=payload, verify=True) - except Exception, e: - logger.info(u'Telegram notify failed: ' + str(e)) + # Send message to user using Telegram's Bot API + try: + response = requests.post(self.TELEGRAM_API % (self.token, sendMethod), json=payload, verify=True) + except Exception as e: + logger.info('Telegram notify failed: ' + str(e)) # Error logging sent_successfuly = True From 563439acdf42359976e2e980b587f256d9f65aa5 Mon Sep 17 00:00:00 2001 From: Bart274 Date: Fri, 31 Jan 2020 15:41:12 +0100 Subject: [PATCH 19/26] fallback to default text notification in case of errors --- mylar/notifiers.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mylar/notifiers.py b/mylar/notifiers.py index 509168ed..52ce656b 100644 --- a/mylar/notifiers.py +++ b/mylar/notifiers.py @@ -357,13 +357,16 @@ class TELEGRAM: logger.info('Telegram notify failed: ' + str(e)) # Error logging - sent_successfuly = True + sent_successfully = True if not response.status_code == 200: logger.info(u'Could not send notification to TelegramBot (token=%s). Response: [%s]' % (self.token, response.text)) - sent_successfuly = False + sent_successfully = False + + if not sent_successfully and sendMethod != "sendMessage": + return self.notify(message) logger.info(u"Telegram notifications sent.") - return sent_successfuly + return sent_successfully def test_notify(self): return self.notify('Test Message: Release the Ninjas!') From f49f563e301239880a42c209484eaf1899d39654 Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Thu, 6 Feb 2020 22:07:45 -0500 Subject: [PATCH 20/26] FIX: fixes DDL error when search results contain an unexpected parsing result --- mylar/getcomics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mylar/getcomics.py b/mylar/getcomics.py index eb483c45..49b87c51 100644 --- a/mylar/getcomics.py +++ b/mylar/getcomics.py @@ -124,7 +124,7 @@ class GC(object): option_find = f.find("p", {"style": "text-align: center;"}) i = 0 - while i <= 2: + while (i <= 2 and option_find is not None): option_find = option_find.findNext(text=True) if 'Year' in option_find: year = option_find.findNext(text=True) From e604c3d2166daab87321244542cda53aae085779 Mon Sep 17 00:00:00 2001 From: Barbeque Sauce Date: Thu, 20 Feb 2020 18:51:22 -0500 Subject: [PATCH 21/26] Fix: save state across page loads --- data/interfaces/default/weeklypull.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/interfaces/default/weeklypull.html b/data/interfaces/default/weeklypull.html index cc462308..f840f14b 100755 --- a/data/interfaces/default/weeklypull.html +++ b/data/interfaces/default/weeklypull.html @@ -369,6 +369,8 @@ "sInfoFiltered":"(filtered from _MAX_ total issues)", "sSearch": ""}, "bStateSave": true, + "StateSave": true, + "StateDuration": 0, "iDisplayLength": 25, "sPaginationType": "full_numbers", "aaSorting": [[0, 'asc']] From 9627a344bab330f7fb549d0e99e660513a22a5b3 Mon Sep 17 00:00:00 2001 From: AJ Slater Date: Fri, 21 Feb 2020 21:25:39 -0800 Subject: [PATCH 22/26] implement torznab test --- data/interfaces/default/config.html | 19 +++++++++++---- mylar/config.py | 8 +++---- mylar/helpers.py | 36 +++++++++++++++++++++++++++++ mylar/webserve.py | 28 ++++++++++++++++++++-- 4 files changed, 80 insertions(+), 11 deletions(-) diff --git a/data/interfaces/default/config.html b/data/interfaces/default/config.html index 14bf7981..fb594f54 100644 --- a/data/interfaces/default/config.html +++ b/data/interfaces/default/config.html @@ -897,6 +897,10 @@ else: torznab_enabled = "" + if torznab[2] == '1' or torznab[2] == 1: + torznab_verify = "checked" + else: + torznab_verify = "" %>
@@ -908,13 +912,17 @@
+
+ + +
- +
- +
@@ -2183,7 +2191,7 @@ $("#add_torznab").click(function() { var intId = $("#torznab_providers > div").size() + deletedTorznabs + 1; - var torformfields = $("
"); + var torformfields = $("
"); var tortestButton = $("
"); var torremoveButton = $("
"); torremoveButton.click(function() { @@ -2326,10 +2334,11 @@ $(".torznabtest").click(function () { var torznab = this.attributes["name"].value.replace('torznab_test', ''); - var imagechk = document.getElementById("tornabstatus"+torznab); + var imagechk = document.getElementById("torznabstatus"+torznab); var name = document.getElementById("torznab_name"+torznab).value; var host = document.getElementById("torznab_host"+torznab).value; - var apikey = document.getElementById("torznab_api"+torznab).value; + var ssl = document.getElementById("torznab_verify"+torznab).checked; + var apikey = document.getElementById("torznab_apikey"+torznab).value; $.get("testtorznab", { name: name, host: host, ssl: ssl, apikey: apikey }, function(data){ diff --git a/mylar/config.py b/mylar/config.py index 374dad14..3fd428b3 100644 --- a/mylar/config.py +++ b/mylar/config.py @@ -556,7 +556,7 @@ class Config(object): if self.CONFIG_VERSION < 8: print('Checking for existing torznab configuration...') if not any([self.TORZNAB_NAME is None, self.TORZNAB_HOST is None, self.TORZNAB_APIKEY is None, self.TORZNAB_CATEGORY is None]): - torznabs =[(self.TORZNAB_NAME, self.TORZNAB_HOST, self.TORZNAB_APIKEY, self.TORZNAB_CATEGORY, str(int(self.ENABLE_TORZNAB)))] + torznabs =[(self.TORZNAB_NAME, self.TORZNAB_HOST, self.TORZNAB_VERIFY, self.TORZNAB_APIKEY, self.TORZNAB_CATEGORY, str(int(self.ENABLE_TORZNAB)))] setattr(self, 'EXTRA_TORZNABS', torznabs) config.set('Torznab', 'EXTRA_TORZNABS', str(torznabs)) print('Successfully converted existing torznab for multiple configuration allowance. Removing old references.') @@ -564,9 +564,9 @@ class Config(object): print('No existing torznab configuration found. Just removing config references at this point..') config.remove_option('Torznab', 'torznab_name') config.remove_option('Torznab', 'torznab_host') + config.remove_option('Torznab', 'torznab_verify') config.remove_option('Torznab', 'torznab_apikey') config.remove_option('Torznab', 'torznab_category') - config.remove_option('Torznab', 'torznab_verify') print('Successfully removed outdated config entries.') if self.newconfig < 9: #rejig rtorrent settings due to change. @@ -1112,7 +1112,7 @@ class Config(object): return extra_newznabs def get_extra_torznabs(self): - extra_torznabs = zip(*[iter(self.EXTRA_TORZNABS.split(', '))]*5) + extra_torznabs = zip(*[iter(self.EXTRA_TORZNABS.split(', '))]*6) return extra_torznabs def provider_sequence(self): @@ -1155,7 +1155,7 @@ class Config(object): if self.ENABLE_TORZNAB: for ets in self.EXTRA_TORZNABS: - if str(ets[4]) == '1': # if torznabs are enabled + if str(ets[5]) == '1': # if torznabs are enabled if ets[0] == "": et_name = ets[1] else: diff --git a/mylar/helpers.py b/mylar/helpers.py index 101102ea..501e180e 100755 --- a/mylar/helpers.py +++ b/mylar/helpers.py @@ -3657,6 +3657,42 @@ def newznab_test(name, host, ssl, apikey): logger.info('[ERROR:%s] - %s' % (code, description)) return False +def torznab_test(name, host, ssl, apikey): + from xml.dom.minidom import parseString, Element + params = {'t': 'search', + 'apikey': apikey, + 'o': 'xml'} + + if host[-1:] == '/': + host = host[:-1] + headers = {'User-Agent': str(mylar.USER_AGENT)} + logger.info('host: %s' % host) + try: + r = requests.get(host, params=params, headers=headers, verify=bool(ssl)) + except Exception as e: + logger.warn('Unable to connect: %s' % e) + return + else: + try: + data = parseString(r.content) + except Exception as e: + logger.warn('[WARNING] Error attempting to test: %s' % e) + + try: + error_code = data.getElementsByTagName('error')[0].attributes['code'].value + except Exception as e: + logger.info('Connected - Status code returned: %s' % r.status_code) + if r.status_code == 200: + return True + else: + logger.warn('Received response - Status code returned: %s' % r.status_code) + return False + + code = error_code + description = data.getElementsByTagName('error')[0].attributes['description'].value + logger.info('[ERROR:%s] - %s' % (code, description)) + return False + def get_free_space(folder): min_threshold = 100000000 #threshold for minimum amount of freespace available (#100mb) if platform.system() == "Windows": diff --git a/mylar/webserve.py b/mylar/webserve.py index c1c2e03a..f24ed40f 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -5017,7 +5017,7 @@ class WebInterface(object): "dognzb_verify": helpers.checked(mylar.CONFIG.DOGNZB_VERIFY), "experimental": helpers.checked(mylar.CONFIG.EXPERIMENTAL), "enable_torznab": helpers.checked(mylar.CONFIG.ENABLE_TORZNAB), - "extra_torznabs": sorted(mylar.CONFIG.EXTRA_TORZNABS, key=itemgetter(4), reverse=True), + "extra_torznabs": sorted(mylar.CONFIG.EXTRA_TORZNABS, key=itemgetter(5), reverse=True), "newznab": helpers.checked(mylar.CONFIG.NEWZNAB), "extra_newznabs": sorted(mylar.CONFIG.EXTRA_NEWZNABS, key=itemgetter(5), reverse=True), "enable_ddl": helpers.checked(mylar.CONFIG.ENABLE_DDL), @@ -5323,10 +5323,12 @@ class WebInterface(object): if not checkdirectory: logger.warn('Error trying to validate/create directory. Aborting this process at this time.') updatedir = False + else: logger.info('[Create directories False] Not creating physical directory, but updating series location in dB to: %s' % com_location) if updatedir is True: newValues['ComicLocation'] = com_location + myDB.upsert("comics", newValues, controlValueDict) logger.fdebug('Updated Series options!') raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) @@ -5447,6 +5449,10 @@ class WebInterface(object): if torznab_name == "": continue torznab_host = helpers.clean_url(kwargs['torznab_host' + torznab_number]) + try: + torznab_verify = kwargs['torznab_verify' + torznab_number] + except: + torznab_verify = 0 torznab_api = kwargs['torznab_apikey' + torznab_number] torznab_category = kwargs['torznab_category' + torznab_number] try: @@ -5456,7 +5462,7 @@ class WebInterface(object): del kwargs[kwarg] - mylar.CONFIG.EXTRA_TORZNABS.append((torznab_name, torznab_host, torznab_api, torznab_category, torznab_enabled)) + mylar.CONFIG.EXTRA_TORZNABS.append((torznab_name, torznab_host, torznab_verify, torznab_api, torznab_category, torznab_enabled)) mylar.CONFIG.process_kwargs(kwargs) @@ -6027,6 +6033,24 @@ class WebInterface(object): return 'Error - failed running test for %s' % name testnewznab.exposed = True + def testtorznab(self, name, host, ssl, apikey): + logger.fdebug('ssl/verify: %s' % ssl) + if 'ssl' == '0' or ssl == '1': + ssl = bool(int(ssl)) + else: + if ssl == 'false': + ssl = False + else: + ssl = True + result = helpers.torznab_test(name, host, ssl, apikey) + if result is True: + logger.info('Successfully tested %s [%s] - valid api response received' % (name, host)) + return 'Successfully tested %s!' % name + else: + print result + logger.warn('Testing failed to %s [HOST:%s][SSL:%s]' % (name, host, bool(ssl))) + return 'Error - failed running test for %s' % name + testtorznab.exposed = True def orderThis(self, **kwargs): return From 8ae3d744669fc8391780653ba8b7633ef69f825b Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Sun, 23 Feb 2020 13:32:37 -0500 Subject: [PATCH 23/26] (#2425) import errno --- mylar/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mylar/config.py b/mylar/config.py index 3fd428b3..33361821 100644 --- a/mylar/config.py +++ b/mylar/config.py @@ -11,6 +11,7 @@ import re import ConfigParser import mylar from mylar import logger, helpers, encrypted +import errno config = ConfigParser.SafeConfigParser() From d691c05de488beedb520a1d7c7923daa17d47694 Mon Sep 17 00:00:00 2001 From: AJ Slater Date: Fri, 21 Feb 2020 21:33:42 -0800 Subject: [PATCH 24/26] remove extra s's, which makes a png download successsfully --- data/interfaces/default/config.html | 24 ++++++++++++------------ mylar/PostProcessor.py | 2 +- mylar/filechecker.py | 2 +- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/data/interfaces/default/config.html b/data/interfaces/default/config.html index fb594f54..fd0f6c77 100644 --- a/data/interfaces/default/config.html +++ b/data/interfaces/default/config.html @@ -416,7 +416,7 @@
- +
@@ -484,7 +484,7 @@
- +
@@ -635,7 +635,7 @@ Automatically start torrent on successful loading within rtorrent client
- +
@@ -694,7 +694,7 @@
- +
@@ -736,7 +736,7 @@
- +
@@ -1351,7 +1351,7 @@
- +
@@ -1379,7 +1379,7 @@
- +
@@ -1399,7 +1399,7 @@
- +
@@ -1431,7 +1431,7 @@ Send to all subscribers of the channel with this tag (Optional)
- +
@@ -1454,7 +1454,7 @@
- +
@@ -1474,7 +1474,7 @@
- +
@@ -1526,7 +1526,7 @@ Notify when comics are post processed?
- +
diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index acc99bb4..0777d9eb 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -2525,7 +2525,7 @@ class PostProcessor(object): rem_issueid = nfilename[xyb+3:yyb] logger.fdebug('issueid: %s' % rem_issueid) nfilename = '%s %s'.strip() % (nfilename[:xyb], nfilename[yyb+3:]) - logger.fdebug('issueid information [%s] removed successsfully: %s' % (rem_issueid, nfilename)) + logger.fdebug('issueid information [%s] removed successfully: %s' % (rem_issueid, nfilename)) self._log("New Filename: %s" % nfilename) logger.fdebug('%s New Filename: %s' % (module, nfilename)) diff --git a/mylar/filechecker.py b/mylar/filechecker.py index 4b7b7668..85dab904 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -340,7 +340,7 @@ class FileChecker(object): issueid = modfilename[x+3:y] logger.fdebug('issueid: %s' % issueid) modfilename = '%s %s'.strip() % (modfilename[:x], modfilename[y+3:]) - logger.fdebug('issueid %s removed successsfully: %s' % (issueid, modfilename)) + logger.fdebug('issueid %s removed successfully: %s' % (issueid, modfilename)) #here we take a snapshot of the current modfilename, the intent is that we will remove characters that match #as we discover them - namely volume, issue #, years, etc From 2e7d4b500fab48178e8464b1d84794ea2efce166 Mon Sep 17 00:00:00 2001 From: AJ Slater Date: Fri, 21 Feb 2020 22:25:15 -0800 Subject: [PATCH 25/26] fix turnitoff being called before its defined --- data/interfaces/default/base.html | 2 +- data/interfaces/default/manage.html | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/data/interfaces/default/base.html b/data/interfaces/default/base.html index 66f125c0..fefaca5b 100755 --- a/data/interfaces/default/base.html +++ b/data/interfaces/default/base.html @@ -26,6 +26,7 @@ ${next.headIncludes()} + <% @@ -109,7 +110,6 @@ Back to top - diff --git a/data/interfaces/default/manage.html b/data/interfaces/default/manage.html index 0b1044dc..d07b8d6b 100755 --- a/data/interfaces/default/manage.html +++ b/data/interfaces/default/manage.html @@ -32,17 +32,23 @@ %if mylar.IMPORT_STATUS == 'Import completed.':
%else:
%endif %else:
%endif @@ -277,7 +283,9 @@ }; function turnitoff() { CheckEnabled = false; - clearInterval(ImportTimer); + if (typeof ImportTimer !== 'undefined') { + clearInterval(ImportTimer); + }; }; function turniton() { if (CheckEnabled == false) { From 70d8944c80663bc3dc00373bfdaeb0fce59ca360 Mon Sep 17 00:00:00 2001 From: evilhero <909424+evilhero@users.noreply.github.com> Date: Sun, 23 Feb 2020 14:13:21 -0500 Subject: [PATCH 26/26] notification of repo freeze --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 82591ea8..67dfdc8b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ ## ![Mylar Logo](https://github.com/evilhero/mylar/blob/master/data/images/mylarlogo.png) Mylar +## Note that feature development has stopped as we have moved to [Mylar3](https://github.com/mylar3/mylar3). +## This means only critical bug errors will get addressed until such time as we decide not to continue supporting this version. EOL is still to be decided. + Mylar is an automated Comic Book (cbr/cbz) downloader program for use with NZB and torrents written in python. It supports SABnzbd, NZBGET, and many torrent clients in addition to DDL. It will allow you to monitor weekly pull-lists for items belonging to user-specific series to download, as well as being able to monitor story-arcs. Support for TPB's and GN's is also now available.