diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index 70853f01..43cc4e9c 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -361,7 +361,7 @@ class PostProcessor(object): if mylar.USE_SABNZBD==1: if self.nzb_name != 'Manual Run': logger.fdebug('%s Using SABnzbd' % module) - logger.fdebug('%s NZB name as passed from NZBGet: %s' % (module, self.nzb_name)) + logger.fdebug('%s NZB name as passed from SABnzbd: %s' % (module, self.nzb_name)) if self.nzb_name == 'Manual Run': logger.fdebug('%s Manual Run Post-Processing enabled.' % module) @@ -403,7 +403,9 @@ class PostProcessor(object): filelist = flc.listFiles() if filelist['comiccount'] == 0: # is None: logger.warn('There were no files located - check the debugging logs if you think this is in error.') - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) logger.info('I have located %s files that I should be able to post-process. Continuing...' % filelist['comiccount']) else: if all([self.comicid is None, '_' not in self.issueid]): @@ -769,6 +771,7 @@ class PostProcessor(object): if datematch == "True": #need to reset this to False here so that the True doesn't carry down and avoid the year checks due to the True datematch = "False" + lonevol = False # if we get to here, we need to do some more comparisons just to make sure we have the right volume # first we chk volume label if it exists, then we drop down to issue year # if the above both don't exist, and there's more than one series on the watchlist (or the series is > v1) @@ -790,6 +793,7 @@ class PostProcessor(object): elif len(watchvals) > 1 and int(tmp_watchmatch_vol) >= 1: if int(tmp_watchmatch_vol) == int(tmp_watchlist_vol): logger.fdebug('%s[ISSUE-VERIFY][SeriesYear-Volume MATCH] Volume label of series Year of %s matched to volume label of %s' % (module, watch_values['ComicVersion'], watchmatch['series_volume'])) + lonevol = True else: logger.fdebug('%s[ISSUE-VERIFY][SeriesYear-Volume FAILURE] Volume label of Series Year of %s DID NOT match to volume label of %s' % (module, watch_values['ComicVersion'], watchmatch['series_volume'])) datematch = "False" @@ -799,6 +803,7 @@ class PostProcessor(object): datematch = "False" elif len(watchvals) == 1 and int(tmp_watchlist_vol) == 1: logger.fdebug('%s[ISSUE-VERIFY][Lone Volume MATCH] Volume label of %s indicates only volume for this series on your watchlist.' % (module, watch_values['ComicVersion'])) + lonevol = True elif int(tmp_watchlist_vol) > 1: logger.fdebug('%s[ISSUE-VERIFY][Lone Volume FAILURE] Volume label of %s indicates that there is more than one volume for this series, but the one on your watchlist has no volume label set.' % (module, watch_values['ComicVersion'])) datematch = "False" @@ -817,6 +822,9 @@ class PostProcessor(object): else: logger.fdebug('%s[ISSUE-VERIFY][Issue Year MATCH] Modified Issue Year of %s is a match to the year found in the filename of : %s' % (module, issyr, watchmatch['issue_year'])) datematch = 'True' + elif datematch == 'False' and watchmatch['issue_year'] is None and lonevol is True: + logger.fdebug('%s[LONE-VOLUME/NO YEAR][MATCH] Only Volume on watchlist matches, no year present in filename. Assuming match based on volume and title.' % module) + datematch = 'True' if datematch == 'True': if watchmatch['sub']: @@ -1606,12 +1614,16 @@ class PostProcessor(object): logger.info('%s No matches for Manual Run ... exiting.' % module) if mylar.APILOCK is True: mylar.APILOCK = False - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) elif len(manual_arclist) > 0 and len(manual_list) == 0: logger.info('%s Manual post-processing completed for %s story-arc issues.' % (module, len(manual_arclist))) if mylar.APILOCK is True: mylar.APILOCK = False - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) elif len(manual_arclist) > 0: logger.info('%s Manual post-processing completed for %s story-arc issues.' % (module, len(manual_arclist))) @@ -1668,7 +1680,9 @@ class PostProcessor(object): logger.info('%s Manual post-processing completed for %s issues [FAILED: %s]' % (module, i, self.failed_files)) if mylar.APILOCK is True: mylar.APILOCK = False - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) else: pass @@ -1891,7 +1905,9 @@ class PostProcessor(object): except Exception as e: logger.error('%s Failed to %s %s: %s' % (module, mylar.CONFIG.FILE_OPTS, grab_src, e)) self._log("Failed to %s %s: %s" % (mylar.CONFIG.FILE_OPTS, grab_src, e)) - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) #tidyup old path if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']): @@ -1969,10 +1985,14 @@ class PostProcessor(object): #loop through the hits here. if len(manual_list) == 0 and len(manual_arclist) == 0: logger.info('%s No matches for Manual Run ... exiting.' % module) - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) elif len(manual_arclist) > 0 and len(manual_list) == 0: logger.info('%s Manual post-processing completed for %s story-arc issues.' % (module, len(manual_arclist))) - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) elif len(manual_arclist) > 0: logger.info('%s Manual post-processing completed for %s story-arc issues.' % (module, len(manual_arclist))) i = 0 @@ -2018,7 +2038,9 @@ class PostProcessor(object): logger.info('%s Manual post-processing completed for %s issues.' % (module, i)) else: logger.info('%s Manual post-processing completed for %s issues [FAILED: %s]' % (module, i, self.failed_files)) - return + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) else: comicid = issuenzb['ComicID'] diff --git a/mylar/__init__.py b/mylar/__init__.py index 5cf2150a..161f0871 100644 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -102,6 +102,7 @@ CONFIG = None CONFIG_FILE = None CV_HEADERS = None CVURL = None +EXPURL = None DEMURL = None WWTURL = None WWT_CF_COOKIEVALUE = None @@ -166,7 +167,7 @@ def initialize(config_file): global CONFIG, _INITIALIZED, QUIET, CONFIG_FILE, OS_DETECT, MAINTENANCE, CURRENT_VERSION, LATEST_VERSION, COMMITS_BEHIND, INSTALL_TYPE, IMPORTLOCK, PULLBYFILE, INKDROPS_32P, \ DONATEBUTTON, CURRENT_WEEKNUMBER, CURRENT_YEAR, UMASK, USER_AGENT, SNATCHED_QUEUE, NZB_QUEUE, PP_QUEUE, SEARCH_QUEUE, DDL_QUEUE, PULLNEW, COMICSORT, WANTED_TAB_OFF, CV_HEADERS, \ - IMPORTBUTTON, IMPORT_FILES, IMPORT_TOTALFILES, IMPORT_CID_COUNT, IMPORT_PARSED_COUNT, IMPORT_FAILURE_COUNT, CHECKENABLED, CVURL, DEMURL, WWTURL, WWT_CF_COOKIEVALUE, \ + IMPORTBUTTON, IMPORT_FILES, IMPORT_TOTALFILES, IMPORT_CID_COUNT, IMPORT_PARSED_COUNT, IMPORT_FAILURE_COUNT, CHECKENABLED, CVURL, DEMURL, EXPURL, WWTURL, WWT_CF_COOKIEVALUE, \ DDLPOOL, NZBPOOL, SNPOOL, PPPOOL, SEARCHPOOL, \ USE_SABNZBD, USE_NZBGET, USE_BLACKHOLE, USE_RTORRENT, USE_UTORRENT, USE_QBITTORRENT, USE_DELUGE, USE_TRANSMISSION, USE_WATCHDIR, SAB_PARAMS, \ PROG_DIR, DATA_DIR, CMTAGGER_PATH, DOWNLOAD_APIKEY, LOCAL_IP, STATIC_COMICRN_VERSION, STATIC_APC_VERSION, KEYS_32P, AUTHKEY_32P, FEED_32P, FEEDINFO_32P, \ @@ -250,6 +251,9 @@ def initialize(config_file): WWTURL = 'https://worldwidetorrents.to/' DEMURL = 'https://www.demonoid.pw/' + #set the default URL for nzbindex + EXPURL = 'https://nzbindex.nl/' + if CONFIG.LOCMOVE: helpers.updateComicLocation() diff --git a/mylar/api.py b/mylar/api.py index 9cf6ad44..2e438731 100644 --- a/mylar/api.py +++ b/mylar/api.py @@ -17,7 +17,7 @@ # along with Mylar. If not, see . import mylar -from mylar import db, mb, importer, search, process, versioncheck, logger, webserve, helpers +from mylar import db, mb, importer, search, process, versioncheck, logger, webserve, helpers, encrypted import simplejson as simplejson import json import cherrypy @@ -31,7 +31,7 @@ from cherrypy.lib.static import serve_file, serve_download import datetime cmd_list = ['getIndex', 'getComic', 'getUpcoming', 'getWanted', 'getHistory', - 'getLogs', 'clearLogs','findComic', 'addComic', 'delComic', + 'getLogs', 'getAPI', 'clearLogs','findComic', 'addComic', 'delComic', 'pauseComic', 'resumeComic', 'refreshComic', 'addIssue', 'queueIssue', 'unqueueIssue', 'forceSearch', 'forceProcess', 'getVersion', 'checkGithub','shutdown', 'restart', 'update', @@ -56,44 +56,46 @@ class Api(object): def checkParams(self, *args, **kwargs): - if 'apikey' not in kwargs: - self.data = self._error_with_message('Missing api key') - return - if 'cmd' not in kwargs: self.data = self._error_with_message('Missing parameter: cmd') return - if not mylar.CONFIG.API_ENABLED: - if kwargs['apikey'] != mylar.DOWNLOAD_APIKEY: - self.data = self._error_with_message('API not enabled') - return - - if kwargs['apikey'] != mylar.CONFIG.API_KEY and all([kwargs['apikey'] != mylar.DOWNLOAD_APIKEY, mylar.DOWNLOAD_APIKEY != None]): - self.data = self._error_with_message('Incorrect API key') + if 'apikey' not in kwargs and ('apikey' not in kwargs and kwargs['cmd'] != 'getAPI'): + self.data = self._error_with_message('Missing api key') return + elif kwargs['cmd'] == 'getAPI': + self.apitype = 'normal' else: - if kwargs['apikey'] == mylar.CONFIG.API_KEY: - self.apitype = 'normal' - elif kwargs['apikey'] == mylar.DOWNLOAD_APIKEY: - self.apitype = 'download' - logger.fdebug('Matched to key. Api set to : ' + self.apitype + ' mode.') - self.apikey = kwargs.pop('apikey') + if not mylar.CONFIG.API_ENABLED: + if kwargs['apikey'] != mylar.DOWNLOAD_APIKEY: + self.data = self._error_with_message('API not enabled') + return - if not([mylar.CONFIG.API_KEY, mylar.DOWNLOAD_APIKEY]): - self.data = self._error_with_message('API key not generated') - return + if kwargs['apikey'] != mylar.CONFIG.API_KEY and all([kwargs['apikey'] != mylar.DOWNLOAD_APIKEY, mylar.DOWNLOAD_APIKEY != None]): + self.data = self._error_with_message('Incorrect API key') + return + else: + if kwargs['apikey'] == mylar.CONFIG.API_KEY: + self.apitype = 'normal' + elif kwargs['apikey'] == mylar.DOWNLOAD_APIKEY: + self.apitype = 'download' + logger.fdebug('Matched to key. Api set to : ' + self.apitype + ' mode.') + self.apikey = kwargs.pop('apikey') - if self.apitype: - if self.apitype == 'normal' and len(mylar.CONFIG.API_KEY) != 32: + if not([mylar.CONFIG.API_KEY, mylar.DOWNLOAD_APIKEY]): + self.data = self._error_with_message('API key not generated') + return + + if self.apitype: + if self.apitype == 'normal' and len(mylar.CONFIG.API_KEY) != 32: + self.data = self._error_with_message('API key not generated correctly') + return + if self.apitype == 'download' and len(mylar.DOWNLOAD_APIKEY) != 32: + self.data = self._error_with_message('Download API key not generated correctly') + return + else: self.data = self._error_with_message('API key not generated correctly') return - if self.apitype == 'download' and len(mylar.DOWNLOAD_APIKEY) != 32: - self.data = self._error_with_message('Download API key not generated correctly') - return - else: - self.data = self._error_with_message('API key not generated correctly') - return if kwargs['cmd'] not in cmd_list: self.data = self._error_with_message('Unknown command: %s' % kwargs['cmd']) @@ -149,6 +151,37 @@ class Api(object): cherrypy.response.headers['Content-Type'] = "application/json" return simplejson.dumps(error) + def _getAPI(self, **kwargs): + if 'username' not in kwargs: + self.data = self._error_with_message('Missing parameter: username') + return + else: + username = kwargs['username'] + + if 'password' not in kwargs: + self.data = self._error_with_message('Missing parameter: password') + return + else: + password = kwargs['password'] + + if any([mylar.CONFIG.HTTP_USERNAME is None, mylar.CONFIG.HTTP_PASSWORD is None]): + self.data = self._error_with_message('Unable to use this command - username & password MUST be enabled.') + return + + ht_user = mylar.CONFIG.HTTP_USERNAME + edc = encrypted.Encryptor(mylar.CONFIG.HTTP_PASSWORD) + ed_chk = edc.decrypt_it() + if mylar.CONFIG.ENCRYPT_PASSWORDS is True: + if username == ht_user and all([ed_chk['status'] is True, ed_chk['password'] == password]): + self.data = {'apikey': mylar.CONFIG.API_KEY} + else: + self.data = self._error_with_message('Incorrect username or password.') + else: + if username == ht_user and password == mylar.CONFIG.HTTP_PASSWORD: + self.data = {'apikey': mylar.CONFIG.API_KEY} + else: + self.data = self._error_with_message('Incorrect username or password.') + def _getIndex(self, **kwargs): self.data = self._dic_from_query('SELECT * from comics order by ComicSortName COLLATE NOCASE') return diff --git a/mylar/auth32p.py b/mylar/auth32p.py index a25e486b..f5ee6ada 100644 --- a/mylar/auth32p.py +++ b/mylar/auth32p.py @@ -54,7 +54,7 @@ class info32p(object): self.method = None if any([mylar.CONFIG.MODE_32P is True, self.test is True]): - lses = self.LoginSession(mylar.CONFIG.USERNAME_32P, mylar.CONFIG.PASSWORD_32P) + lses = self.LoginSession(self.username_32p, self.password_32p) if not lses.login(): if not self.test: logger.error('%s [LOGIN FAILED] Disabling 32P provider until login error(s) can be fixed in order to avoid temporary bans.' % self.module) diff --git a/mylar/findcomicfeed.py b/mylar/findcomicfeed.py index 44ca4bb6..8ce6046f 100755 --- a/mylar/findcomicfeed.py +++ b/mylar/findcomicfeed.py @@ -98,10 +98,10 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix, bookt if mylar.CONFIG.PREFERRED_QUALITY == 1: joinSearch = joinSearch + " .cbr" elif mylar.CONFIG.PREFERRED_QUALITY == 2: joinSearch = joinSearch + " .cbz" - feeds.append(feedparser.parse("http://beta.nzbindex.com/search/rss?q=%s&max=50&minage=0%s&hidespam=1&hidepassword=1&sort=agedesc%s&complete=0&hidecross=0&hasNFO=0&poster=&g[]=85" % (joinSearch, max_age, size_constraints))) + feeds.append(feedparser.parse(mylar.EXPURL + "search/rss?q=%s&max=50&minage=0%s&hidespam=1&hidepassword=1&sort=agedesc%s&complete=0&hidecross=0&hasNFO=0&poster=&g[]=85" % (joinSearch, max_age, size_constraints))) time.sleep(5) if mylar.CONFIG.ALTEXPERIMENTAL: - feeds.append(feedparser.parse("http://beta.nzbindex.com/search/rss?q=%s&max=50&minage=0%s&hidespam=1&hidepassword=1&sort=agedesc%s&complete=0&hidecross=0&hasNFO=0&poster=&g[]=86" % (joinSearch, max_age, size_constraints))) + feeds.append(feedparser.parse(mylar.EXPURL + "search/rss?q=%s&max=50&minage=0%s&hidespam=1&hidepassword=1&sort=agedesc%s&complete=0&hidecross=0&hasNFO=0&poster=&g[]=86" % (joinSearch, max_age, size_constraints))) time.sleep(5) i+=1 diff --git a/mylar/helpers.py b/mylar/helpers.py index 35a0b0ac..8c663646 100755 --- a/mylar/helpers.py +++ b/mylar/helpers.py @@ -3009,7 +3009,10 @@ def weekly_info(week=None, year=None, current=None): if weekdst is not None: if mylar.CONFIG.WEEKFOLDER_FORMAT == 0: - weekfold = os.path.join(weekdst, str( str(weekinfo['year']) + '-' + str(weeknumber) )) + weekn = weeknumber + if len(str(weekn)) == 1: + weekn = '%s%s' % ('0', str(weekn)) + weekfold = os.path.join(weekdst, '%s-%s' % (weekinfo['year'], weekn)) else: weekfold = os.path.join(weekdst, str( str(weekinfo['midweek']) )) else: diff --git a/mylar/importer.py b/mylar/importer.py index 1c964f9b..43480379 100644 --- a/mylar/importer.py +++ b/mylar/importer.py @@ -238,9 +238,10 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No comicIssues = str(int(comic['ComicIssues']) + 1) if mylar.CONFIG.ALTERNATE_LATEST_SERIES_COVERS is False: + cimage = os.path.join(mylar.CONFIG.CACHE_DIR, str(comicid) + '.jpg') PRComicImage = os.path.join('cache', str(comicid) + ".jpg") ComicImage = helpers.replacetheslash(PRComicImage) - if os.path.isfile(PRComicImage) is True: + if os.path.isfile(cimage) is True: logger.fdebug('Cover already exists for series. Not redownloading.') else: covercheck = helpers.getImage(comicid, comic['ComicImage']) @@ -252,11 +253,11 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No if all([mylar.CONFIG.COMIC_COVER_LOCAL is True, os.path.isdir(comlocation) is True, os.path.isfile(os.path.join(comlocation, 'cover.jpg')) is False]): try: comiclocal = os.path.join(comlocation, 'cover.jpg') - shutil.copyfile(PRComicImage, comiclocal) + shutil.copyfile(cimage, comiclocal) if mylar.CONFIG.ENFORCE_PERMS: filechecker.setperms(comiclocal) except IOError as e: - logger.error('Unable to save cover (' + str(comiclocal) + ') into series directory (' + str(comlocation) + ') at this time.') + logger.error('Unable to save cover (%s) into series directory (%s) at this time.' % (cimage, comiclocal)) else: ComicImage = None @@ -1589,14 +1590,15 @@ def annual_check(ComicName, SeriesYear, comicid, issuetype, issuechk, annualslis def image_it(comicid, latestissueid, comlocation, ComicImage): #alternate series covers download latest image... - imageurl = mylar.cv.getComic(comicid, 'image', issueid=latestissueid) + cimage = os.path.join(mylar.CONFIG.CACHE_DIR, str(comicid) + '.jpg') + imageurl = mylar.cv.getComic(comicid, 'image', issueid=latestissueid) covercheck = helpers.getImage(comicid, imageurl['image']) if covercheck == 'retry': logger.fdebug('Attempting to retrieve a different comic image for this particular issue.') if imageurl['image_alt'] is not None: covercheck = helpers.getImage(comicid, imageurl['image_alt']) else: - if not os.path.isfile(os.path.join(mylar.CACHE_DIR, str(comicid) + '.jpg')): + if not os.path.isfile(cimage): logger.fdebug('Failed to retrieve issue image, possibly because not available. Reverting back to series image.') covercheck = helpers.getImage(comicid, ComicImage) PRComicImage = os.path.join('cache', str(comicid) + ".jpg") @@ -1606,13 +1608,13 @@ def image_it(comicid, latestissueid, comlocation, ComicImage): if all([mylar.CONFIG.COMIC_COVER_LOCAL is True, os.path.isdir(comlocation) is True, os.path.isfile(os.path.join(comlocation, 'cover.jpg'))]): try: comiclocal = os.path.join(comlocation, 'cover.jpg') - shutil.copyfile(PRComicImage, comiclocal) + shutil.copyfile(cimage, comiclocal) if mylar.CONFIG.ENFORCE_PERMS: filechecker.setperms(comiclocal) except IOError as e: - logger.error('[%s] Error saving cover into series directory (%s) at this time' % (e, comiclocal)) + logger.error('[%s] Error saving cover (%s) into series directory (%s) at this time' % (e, cimage, comiclocal)) except Exception as e: - logger.error('[%s] Unable to save cover into series directory (%s) at this time' % (e, comiclocal)) + logger.error('[%s] Unable to save cover (%s) into series directory (%s) at this time' % (e, cimage, comiclocal)) myDB = db.DBConnection() myDB.upsert('comics', {'ComicImage': ComicImage}, {'ComicID': comicid}) diff --git a/mylar/updater.py b/mylar/updater.py index 964c38b0..91ec96d7 100755 --- a/mylar/updater.py +++ b/mylar/updater.py @@ -1173,7 +1173,7 @@ def forceRescan(ComicID, archive=None, module=None, recheck=False): #this will detect duplicate filenames within the same directory. for di in issuedupechk: - if di['fcdigit'] == fcdigit: + if di['fcdigit'] == fcdigit and di['issueid'] == reiss['IssueID']: #base off of config - base duplication keep on filesize or file-type (or both) logger.fdebug('[DUPECHECK] Duplicate issue detected [' + di['filename'] + '] [' + tmpfc['ComicFilename'] + ']') # mylar.CONFIG.DUPECONSTRAINT = 'filesize' / 'filetype-cbr' / 'filetype-cbz' diff --git a/mylar/weeklypull.py b/mylar/weeklypull.py index 0d754ed8..d6bdbfda 100755 --- a/mylar/weeklypull.py +++ b/mylar/weeklypull.py @@ -1325,16 +1325,7 @@ def weekly_singlecopy(comicid, issuenum, file, path, weekinfo): module = '[WEEKLY-PULL COPY]' if mylar.CONFIG.WEEKFOLDER: - if mylar.CONFIG.WEEKFOLDER_LOC: - weekdst = mylar.CONFIG.WEEKFOLDER_LOC - else: - weekdst = mylar.CONFIG.DESTINATION_DIR - - if mylar.CONFIG.WEEKFOLDER_FORMAT == 0: - desdir = os.path.join(weekdst, str( str(weekinfo['year']) + '-' + str(weekinfo['weeknumber']) )) - else: - desdir = os.path.join(weekdst, str( str(weekinfo['midweek']) )) - + desdir = weekinfo['week_folder'] dircheck = mylar.filechecker.validateAndCreateDirectory(desdir, True, module=module) if dircheck: pass