mirror of https://github.com/evilhero/mylar
IMP: (#1122) Added provider OMGWTFNZBS (results untested), FIX:(#1124) Experimental search results had a 1/9 (or simliar) parts as part of the header, would error out trying to create the nzb due to invalid path, FIX: When importing a series, if Move Files wasn't enabled would error out during the import process, FIX: When on Import page, if selecting Save Changes without Scanning Library would leave pop-up notification up forever, FIX: When searching for series (like A-Force), results would return 16000+ which would look like it locked up due to the excessive number of results - will now better handle such titles, and overall search results are limited to the first 1000 hits, FIX: When clicking on 'Add Series' from the pull-list, if a series was a new series with no issues published would return 0 results from CV even though the series would exist, FIX:(#1120) Story Arcs with special characters would cause error during manual post-processing run, FIX:(#1125) If year is fuzzied for a given series when searching, would error out with invalid date comparisons
This commit is contained in:
parent
9af9913819
commit
caa8654b4a
|
@ -491,6 +491,22 @@
|
|||
</div>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="useomgwtfnzbs" type="checkbox" onclick="initConfigCheckbox($(this));" name="omgwtfnzbs" value="1" ${config['use_omgwtfnzbs']} /><legend>OMGWTFNZBS</legend>
|
||||
</div>
|
||||
<div class="config">
|
||||
<div class="row">
|
||||
<label>OMGWTFNZBS UserName</label>
|
||||
<input type="text" name="omgwtfnzbs_username" value="${config['omgwtfnzbs_username']}" size="15" >
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>OMGWTFNZBS API</label>
|
||||
<input type="text" name="omgwtfnzbs_apikey" value="${config['omgwtfnzbs_api']}" size="36">
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="useexperimental" type="checkbox" onclick="initConfigCheckbox($(this));" name="experimental" value="1" ${config['use_experimental']} /><legend>Use Experimental Search</legend>
|
||||
|
@ -1364,6 +1380,7 @@
|
|||
initConfigCheckbox("#enable_torznab");
|
||||
initConfigCheckbox("#usenzbsu");
|
||||
initConfigCheckbox("#usedognzb");
|
||||
initConfigCheckbox("#useomgwtfnzbs");
|
||||
initConfigCheckbox("#enable_torrents");
|
||||
initConfigCheckbox("#torrent_local");
|
||||
initConfigCheckbox("#torrent_seedbox");
|
||||
|
|
|
@ -735,8 +735,11 @@ class PostProcessor(object):
|
|||
if 'S' in sandwich:
|
||||
self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
|
||||
logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc))
|
||||
arcdir = helpers.filesafe(sarc)
|
||||
if mylar.REPLACE_SPACES:
|
||||
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.STORYARCDIR:
|
||||
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", sarc)
|
||||
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", arcdir)
|
||||
self._log("StoryArc Directory set to : " + storyarcd)
|
||||
logger.info(module + ' Story Arc Directory set to : ' + storyarcd)
|
||||
else:
|
||||
|
|
|
@ -247,6 +247,10 @@ NZBSU_APIKEY = None
|
|||
DOGNZB = False
|
||||
DOGNZB_APIKEY = None
|
||||
|
||||
OMGWTFNZBS = False
|
||||
OMGWTFNZBS_USERNAME = None
|
||||
OMGWTFNZBS_APIKEY = None
|
||||
|
||||
NEWZNAB = False
|
||||
NEWZNAB_NAME = None
|
||||
NEWZNAB_HOST = None
|
||||
|
@ -409,7 +413,7 @@ def initialize():
|
|||
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, GIT_USER, GIT_BRANCH, USER_AGENT, DESTINATION_DIR, MULTIPLE_DEST_DIRS, CREATE_FOLDERS, DELETE_REMOVE_DIR, \
|
||||
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, DUPECONSTRAINT, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
|
||||
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_TO_MYLAR, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
|
||||
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, \
|
||||
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, \
|
||||
NEWZNAB, NEWZNAB_NAME, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_UID, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
|
||||
ENABLE_TORZNAB, TORZNAB_NAME, TORZNAB_HOST, TORZNAB_APIKEY, TORZNAB_CATEGORY, \
|
||||
EXPERIMENTAL, ALTEXPERIMENTAL, \
|
||||
|
@ -431,6 +435,7 @@ def initialize():
|
|||
CheckSection('NZBGet')
|
||||
CheckSection('NZBsu')
|
||||
CheckSection('DOGnzb')
|
||||
CheckSection('OMGWTFNZBS')
|
||||
CheckSection('Experimental')
|
||||
CheckSection('Newznab')
|
||||
CheckSection('Torznab')
|
||||
|
@ -736,6 +741,13 @@ def initialize():
|
|||
PR.append('dognzb')
|
||||
PR_NUM +=1
|
||||
|
||||
OMGWTFNZBS = bool(check_setting_int(CFG, 'OMGWTFNZBS', 'omgwtfnzbs', 0))
|
||||
OMGWTFNZBS_USERNAME = check_setting_str(CFG, 'OMGWTFNZBS', 'omgwtfnzbs_username', '')
|
||||
OMGWTFNZBS_APIKEY = check_setting_str(CFG, 'OMGWTFNZBS', 'omgwtfnzbs_apikey', '')
|
||||
if OMGWTFNZBS:
|
||||
PR.append('OMGWTFNZBS')
|
||||
PR_NUM +=1
|
||||
|
||||
EXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'experimental', 0))
|
||||
ALTEXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'altexperimental', 1))
|
||||
if EXPERIMENTAL:
|
||||
|
@ -1359,6 +1371,11 @@ def config_write():
|
|||
new_config['DOGnzb']['dognzb'] = int(DOGNZB)
|
||||
new_config['DOGnzb']['dognzb_apikey'] = DOGNZB_APIKEY
|
||||
|
||||
new_config['OMGWTFNZBS'] = {}
|
||||
new_config['OMGWTFNZBS']['omgwtfnzbs'] = int(OMGWTFNZBS)
|
||||
new_config['OMGWTFNZBS']['omgwtfnzbs_username'] = OMGWTFNZBS_USERNAME
|
||||
new_config['OMGWTFNZBS']['omgwtfnzbs_apikey'] = OMGWTFNZBS_APIKEY
|
||||
|
||||
new_config['Experimental'] = {}
|
||||
new_config['Experimental']['experimental'] = int(EXPERIMENTAL)
|
||||
new_config['Experimental']['altexperimental'] = int(ALTEXPERIMENTAL)
|
||||
|
|
|
@ -1755,7 +1755,7 @@ def torrent_create(site, linkid, alt=None):
|
|||
if alt is None:
|
||||
url = 'http://torcache.net/torrent/' + str(linkid) + '.torrent'
|
||||
else:
|
||||
url = 'http://torrage.com/torrent/' + str(linkid) + '.torrent'
|
||||
url = 'http://torrage.com/' + str(linkid) + '.torrent'
|
||||
|
||||
return url
|
||||
|
||||
|
@ -1790,6 +1790,48 @@ def parse_32pfeed(rssfeedline):
|
|||
|
||||
return KEYS_32P
|
||||
|
||||
#def file_ops(path,dst):
|
||||
# # path = source path + filename
|
||||
# # dst = destination path + filename
|
||||
|
||||
# #get the crc of the file prior to the operation and then compare after to ensure it's complete.
|
||||
# crc_check = mylar.filechecker.crc(path)
|
||||
|
||||
# #will be either copy / move
|
||||
# if mylar.FILE_OPS == 'copy':
|
||||
# shutil.copy( path , dst )
|
||||
# if crc_check == mylar.filechecker.crc(dst):
|
||||
# return True
|
||||
# else:
|
||||
# return False
|
||||
# elif mylar.FILE_OPS == 'move':
|
||||
# shutil.move( path , dst )
|
||||
# if crc_check == mylar.filechecker.crc(dst):
|
||||
# return True
|
||||
# else:
|
||||
# return False
|
||||
|
||||
# elif mylar.FILE_OPS == 'hardlink':
|
||||
# import sys
|
||||
|
||||
# # Open a file
|
||||
# fd = os.open( path, os.O_RDWR|os.O_CREAT )
|
||||
# os.close( fd )
|
||||
|
||||
# # Now create another copy of the above file.
|
||||
# os.link( path, dst )
|
||||
|
||||
# print "Created hard link successfully!!"
|
||||
# return True
|
||||
# elif mylar.FILE_OPS == 'softlink':
|
||||
# try:
|
||||
# os.symlink( path,dst )
|
||||
# except:
|
||||
# print 'Unable to create symlink.'
|
||||
# return False
|
||||
# return True
|
||||
# else:
|
||||
# return False
|
||||
|
||||
from threading import Thread
|
||||
|
||||
|
|
|
@ -559,7 +559,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
moveit.movefiles(comicid, comlocation, ogcname)
|
||||
else:
|
||||
logger.info('Mass import - Moving not Enabled. Setting Archived Status for import.')
|
||||
moveit.archivefiles(comicid, ogcname)
|
||||
moveit.archivefiles(comicid, comlocation, ogcname)
|
||||
|
||||
#check for existing files...
|
||||
statbefore = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid, str(latestiss)]).fetchone()
|
||||
|
@ -652,8 +652,9 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
results = myDB.select("SELECT * from importresults WHERE ComicName=?", [ogcname])
|
||||
if results is not None:
|
||||
for result in results:
|
||||
controlValue = {"impID": result['impid']}
|
||||
controlValue = {"ComicName": ogcname}
|
||||
newValue = {"Status": "Imported",
|
||||
"SRID": result['SRID'],
|
||||
"ComicID": comicid}
|
||||
myDB.upsert("importresults", newValue, controlValue)
|
||||
|
||||
|
|
|
@ -46,6 +46,9 @@ if platform.python_version() == '2.7.6':
|
|||
def pullsearch(comicapi, comicquery, offset, explicit, type):
|
||||
u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
|
||||
u_comicquery = u_comicquery.replace(" ", "%20")
|
||||
if '-' in u_comicquery:
|
||||
#cause titles like A-Force will return 16,000+ results otherwise
|
||||
u_comicquery = '%22' + u_comicquery + '%22'
|
||||
|
||||
if explicit == 'all' or explicit == 'loose':
|
||||
PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=' + str(type) + '&query=' + u_comicquery + '&field_list=id,name,start_year,first_issue,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&page=' + str(offset)
|
||||
|
@ -135,6 +138,9 @@ def findComic(name, mode, issue, limityear=None, explicit=None, type=None):
|
|||
logger.fdebug("there are " + str(totalResults) + " search results...")
|
||||
if not totalResults:
|
||||
return False
|
||||
if int(totalResults) > 1000:
|
||||
logger.warn('Search returned more than 1000 hits [' + str(totalResults) + ']. Only displaying first 2000 results - use more specifics or the exact ComicID if required.')
|
||||
totalResults = 1000
|
||||
countResults = 0
|
||||
while (countResults < int(totalResults)):
|
||||
#logger.fdebug("querying " + str(countResults))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import mylar
|
||||
from mylar import db, logger, helpers
|
||||
from mylar import db, logger, helpers, updater
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
@ -44,14 +44,25 @@ def movefiles(comicid, comlocation, ogcname, imported=None):
|
|||
myDB.upsert("importresults", newValue, controlValue)
|
||||
return
|
||||
|
||||
def archivefiles(comicid, ogcname):
|
||||
def archivefiles(comicid, ogdir, ogcname):
|
||||
myDB = db.DBConnection()
|
||||
# if move files isn't enabled, let's set all found comics to Archive status :)
|
||||
result = myDB.select("SELECT * FROM importresults WHERE ComicName=?", [ogcname])
|
||||
if result is None: pass
|
||||
if result is None:
|
||||
pass
|
||||
else:
|
||||
ogdir = result['Location']
|
||||
origdir = os.path.join(os.path.dirname(ogdir))
|
||||
scandir = []
|
||||
for res in result:
|
||||
if any([os.path.dirname(res['ComicLocation']) in x for x in scandir]):
|
||||
pass
|
||||
else:
|
||||
scandir.append(os.path.dirname(res['ComicLocation']))
|
||||
|
||||
updater.forceRescan(comicid, archive=origdir) #send to rescanner with archive mode turned on
|
||||
for sdir in scandir:
|
||||
logger.info('Updating issue information and setting status to Archived for location: ' + sdir)
|
||||
updater.forceRescan(comicid, archive=sdir) #send to rescanner with archive mode turned on
|
||||
|
||||
logger.info('Now scanning in files.')
|
||||
updater.forceRescan(comicid)
|
||||
|
||||
return
|
||||
|
|
|
@ -282,6 +282,10 @@ def nzbs(provider=None, forcerss=False):
|
|||
num_items = "&num=100" if forcerss else "" # default is 25
|
||||
_parse_feed('dognzb', 'https://dognzb.cr/rss.cfm?r=' + mylar.DOGNZB_APIKEY + '&t=7030' + num_items)
|
||||
|
||||
if mylar.OMGWTFNZBS == 1:
|
||||
num_items = "&num=100" if forcerss else "" # default is 25
|
||||
_parse_feed('omgwtfnzbs', 'http://api.omgwtfnzbs.org/rss?t=7030&dl=1&i=' + (mylar.OMGWTFNZBS_USERNAME or '1') + '&r=' + mylar.OMGWTFNZBS_APIKEY + num_items)
|
||||
|
||||
for newznab_host in newznab_hosts:
|
||||
site = newznab_host[0].rstrip()
|
||||
(newznabuid, _, newznabcat) = (newznab_host[3] or '').partition('#')
|
||||
|
@ -751,19 +755,26 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
|
|||
# 'User-Agent': str(mylar.USER_AGENT)}
|
||||
|
||||
elif site == 'KAT':
|
||||
stfind = linkit.find('?')
|
||||
if stfind == -1:
|
||||
kat_referrer = helpers.torrent_create('KAT', linkit)
|
||||
#stfind = linkit.find('?')
|
||||
#if stfind == -1:
|
||||
# kat_referrer = helpers.torrent_create('KAT', linkit)
|
||||
#else:
|
||||
# kat_referrer = linkit[:stfind]
|
||||
|
||||
url = helpers.torrent_create('KAT', linkit)
|
||||
|
||||
if url.startswith('https'):
|
||||
kat_referrer = 'https://torcache.net/'
|
||||
else:
|
||||
kat_referrer = linkit[:stfind]
|
||||
kat_referrer = 'http://torcache.net/'
|
||||
|
||||
#logger.fdebug('KAT Referer set to :' + kat_referrer)
|
||||
|
||||
headers = {'Accept-encoding': 'gzip',
|
||||
'User-Agent': str(mylar.USER_AGENT)}
|
||||
#'Referer': kat_referrer}
|
||||
'User-Agent': str(mylar.USER_AGENT),
|
||||
'Referer': kat_referrer}
|
||||
|
||||
url = helpers.torrent_create('KAT', linkit)
|
||||
logger.fdebug('Grabbing torrent from url:' + str(url))
|
||||
|
||||
payload = None
|
||||
verify = False
|
||||
|
|
|
@ -105,6 +105,10 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
if mylar.DOGNZB == 1:
|
||||
nzbprovider.append('dognzb')
|
||||
nzbp+=1
|
||||
if mylar.OMGWTFNZBS == 1:
|
||||
nzbprovider.append('omgwtfnzbs')
|
||||
nzbp+=1
|
||||
|
||||
# --------
|
||||
# Xperimental
|
||||
if mylar.EXPERIMENTAL == 1:
|
||||
|
@ -287,6 +291,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
apikey = mylar.NZBSU_APIKEY
|
||||
elif nzbprov == 'dognzb':
|
||||
apikey = mylar.DOGNZB_APIKEY
|
||||
elif nzbprov == 'omgwtfnzbs':
|
||||
apikey = mylar.OMGWTFNZBS_APIKEY
|
||||
elif nzbprov == 'experimental':
|
||||
apikey = 'none'
|
||||
elif nzbprov == 'newznab':
|
||||
|
@ -495,6 +501,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
findurl = "https://api.dognzb.cr/api?t=search&q=" + str(comsearch) + "&o=xml&cat=7030"
|
||||
elif nzbprov == 'nzb.su':
|
||||
findurl = "https://api.nzb.su/api?t=search&q=" + str(comsearch) + "&o=xml&cat=7030"
|
||||
elif nzbprov == 'omgwtfnzbs':
|
||||
findurl = "https://api.omgwtfnzbs.org/xml/?search=" + str(comsearch) + "&user=" + mylar.OMGWTFNZBS_USERNAME + "&o=xml&cat=7030"
|
||||
elif nzbprov == 'newznab':
|
||||
#let's make sure the host has a '/' at the end, if not add it.
|
||||
if host_newznab[len(host_newznab) -1:len(host_newznab)] != '/':
|
||||
|
@ -511,7 +519,10 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
apikey = mylar.TORZNAB_APIKEY
|
||||
if nzbprov != 'nzbx':
|
||||
# helper function to replace apikey here so we avoid logging it ;)
|
||||
findurl = findurl + "&apikey=" + str(apikey)
|
||||
if nzbprov == 'omgwtfnzbs':
|
||||
findurl = findurl + "&api=" + str(apikey)
|
||||
else:
|
||||
findurl = findurl + "&apikey=" + str(apikey)
|
||||
logsearch = helpers.apiremove(str(findurl), 'nzb')
|
||||
logger.fdebug("search-url: " + str(logsearch))
|
||||
|
||||
|
@ -664,6 +675,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
|
||||
if UseFuzzy == "1":
|
||||
logger.fdebug('Year has been fuzzied for this series, ignoring store date comparison entirely.')
|
||||
postdate_int = None
|
||||
issuedate_int = None
|
||||
else:
|
||||
|
||||
#use store date instead of publication date for comparisons since publication date is usually +2 months
|
||||
|
@ -1174,11 +1187,11 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
#if the found volume is a vol.0, up it to vol.1 (since there is no V0)
|
||||
if F_ComicVersion == '0':
|
||||
#need to convert dates to just be yyyy-mm-dd and do comparison, time operator in the below calc as well which probably throws off some accuracy.
|
||||
if postdate_int >= issuedate_int and nzbprov == '32P':
|
||||
logger.fdebug('32P torrent discovery. Store date (' + str(stdate) + ') is before posting date (' + str(pubdate) + '), forcing volume label to be the same as series label (0-Day Enforcement): v' + str(F_ComicVersion) + ' --> v' + str(S_ComicVersion))
|
||||
F_ComicVersion = D_ComicVersion
|
||||
else:
|
||||
F_ComicVersion = '1'
|
||||
F_ComicVersion = '1'
|
||||
if postdate_int is not None:
|
||||
if postdate_int >= issuedate_int and nzbprov == '32P':
|
||||
logger.fdebug('32P torrent discovery. Store date (' + str(stdate) + ') is before posting date (' + str(pubdate) + '), forcing volume label to be the same as series label (0-Day Enforcement): v' + str(F_ComicVersion) + ' --> v' + str(S_ComicVersion))
|
||||
F_ComicVersion = D_ComicVersion
|
||||
|
||||
logger.fdebug("FCVersion: " + str(F_ComicVersion))
|
||||
logger.fdebug("DCVersion: " + str(D_ComicVersion))
|
||||
|
@ -1670,6 +1683,11 @@ def nzbname_create(provider, title=None, info=None):
|
|||
nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces.
|
||||
logger.fdebug('[SEARCHER] nzbname (\s): ' + nzbname)
|
||||
nzbname = re.sub(' ', '.', nzbname)
|
||||
#remove the [1/9] parts or whatever kinda crap (usually in experimental results)
|
||||
pattern = re.compile(r'\W\d{1,3}\/\d{1,3}\W')
|
||||
match = pattern.search(nzbname)
|
||||
if match:
|
||||
nzbname = re.sub(match.group(), '', nzbname).strip()
|
||||
logger.fdebug('[SEARCHER] end nzbname: ' + nzbname)
|
||||
|
||||
logger.fdebug("nzbname used for post-processing:" + nzbname)
|
||||
|
@ -1757,6 +1775,14 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
down_url = urljoin(link, str(mylar.DOGNZB_APIKEY))
|
||||
verify = False
|
||||
|
||||
elif nzbprov == 'omgwtfnzbs':
|
||||
#omgwtfnzbs.
|
||||
down_url = 'https://api.omgwtfnzbs.org/sn.php?'
|
||||
payload = {'id': str(nzbid),
|
||||
'user': str(mylar.OMGWTFNZBS_USERNAME),
|
||||
'api': str(mylar.OMGWTFNZBS_APIKEY)}
|
||||
verify = True
|
||||
|
||||
else:
|
||||
#experimental - direct link.
|
||||
down_url = link
|
||||
|
@ -2314,6 +2340,10 @@ def generate_id(nzbprov, link):
|
|||
url_parts = urlparse.urlparse(link)
|
||||
path_parts = url_parts[2].rpartition('/')
|
||||
nzbid = path_parts[0].rsplit('/', 1)[1]
|
||||
elif nzbprov == 'omgwtfnzbs':
|
||||
url_parts = urlparse.urlparse(link)
|
||||
path_parts = url_parts[4].split('&')
|
||||
nzbid = path_parts[0].rsplit('=',1)[1]
|
||||
elif nzbprov == 'newznab':
|
||||
#if in format of http://newznab/getnzb/<id>.nzb&i=1&r=apikey
|
||||
tmpid = urlparse.urlparse(link)[4] #param 4 is the query string from the url.
|
||||
|
|
|
@ -808,7 +808,9 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
comiccnt += int(tmpv['comiccount'])
|
||||
fca.append(tmpv)
|
||||
else:
|
||||
fca.append(filechecker.listFiles(dir=archive, watchcomic=rescan['ComicName'], Publisher=rescan['ComicPublisher'], AlternateSearch=rescan['AlternateSearch']))
|
||||
files_arc = filechecker.listFiles(dir=archive, watchcomic=rescan['ComicName'], Publisher=rescan['ComicPublisher'], AlternateSearch=rescan['AlternateSearch'])
|
||||
fca.append(files_arc)
|
||||
comiccnt = int(files_arc['comiccount'])
|
||||
fcb = []
|
||||
fc = {}
|
||||
#if len(fca) > 0:
|
||||
|
@ -1205,11 +1207,12 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
controlValueDict = {"IssueID": iss_id}
|
||||
|
||||
#if Archived, increase the 'Have' count.
|
||||
#if archive:
|
||||
# issStatus = "Archived"
|
||||
|
||||
if haveissue == "yes":
|
||||
if archive:
|
||||
issStatus = "Archived"
|
||||
else:
|
||||
issStatus = "Downloaded"
|
||||
|
||||
if haveissue == "yes":
|
||||
newValueDict = {"Location": isslocation,
|
||||
"ComicSize": issSize,
|
||||
"Status": issStatus
|
||||
|
|
|
@ -194,6 +194,9 @@ class WebInterface(object):
|
|||
if len(name) == 0:
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
if type == 'comic' and mode == 'pullseries':
|
||||
if issue == 0:
|
||||
#if it's an issue 0, CV doesn't have any data populated yet - so bump it up one to at least get the current results.
|
||||
issue = 1
|
||||
searchresults, explicit = mb.findComic(name, mode, issue=issue)
|
||||
elif type == 'comic' and mode == 'series':
|
||||
if name.startswith('4050-'):
|
||||
|
@ -2865,8 +2868,11 @@ class WebInterface(object):
|
|||
mylar.IMP_METADATA = imp_metadata
|
||||
mylar.config_write()
|
||||
#thread the scan.
|
||||
if scan == '1': scan = True
|
||||
else: scan = False
|
||||
if scan == '1':
|
||||
scan = True
|
||||
else:
|
||||
scan = False
|
||||
return
|
||||
|
||||
thread_ = threading.Thread(target=librarysync.scanLibrary, name="LibraryScan", args=[scan, queue])
|
||||
thread_.start()
|
||||
|
@ -3253,6 +3259,9 @@ class WebInterface(object):
|
|||
"nzbsu_api": mylar.NZBSU_APIKEY,
|
||||
"use_dognzb": helpers.checked(mylar.DOGNZB),
|
||||
"dognzb_api": mylar.DOGNZB_APIKEY,
|
||||
"use_omgwtfnzbs": helpers.checked(mylar.OMGWTFNZBS),
|
||||
"omgwtfnzbs_username": mylar.OMGWTFNZBS_USERNAME,
|
||||
"omgwtfnzbs_api": mylar.OMGWTFNZBS_APIKEY,
|
||||
"use_experimental": helpers.checked(mylar.EXPERIMENTAL),
|
||||
"enable_torznab": helpers.checked(mylar.ENABLE_TORZNAB),
|
||||
"torznab_name": mylar.TORZNAB_NAME,
|
||||
|
@ -3555,7 +3564,7 @@ class WebInterface(object):
|
|||
def configUpdate(self, comicvine_api=None, http_host='0.0.0.0', http_username=None, http_port=8090, http_password=None, enable_https=0, https_cert=None, https_key=None, api_enabled=0, api_key=None, launch_browser=0, auto_update=0, logverbose=0, annuals_on=0, max_logsize=None, download_scan_interval=None, nzb_search_interval=None, nzb_startup_search=0, libraryscan_interval=None,
|
||||
nzb_downloader=0, sab_host=None, sab_username=None, sab_apikey=None, sab_password=None, sab_category=None, sab_priority=None, sab_directory=None, sab_to_mylar=0, log_dir=None, log_level=0, blackhole_dir=None,
|
||||
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
|
||||
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
|
||||
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_apikey=None, omgwtfnzbs=0, omgwtfnzbs_username=None, omgwtfnzbs_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
|
||||
enable_torznab=0, torznab_name=None, torznab_host=None, torznab_apikey=None, torznab_category=None, experimental=0, check_folder=None, enable_check_folder=0,
|
||||
enable_meta=0, cmtagger_path=None, ct_tag_cr=0, ct_tag_cbl=0, ct_cbz_overwrite=0, unrar_cmd=None, enable_rss=0, rss_checkinterval=None, failed_download_handling=0, failed_auto=0, enable_torrent_search=0, enable_kat=0, enable_32p=0, mode_32p=0, rssfeed_32p=None, passkey_32p=None, username_32p=None, password_32p=None, snatchedtorrent_notify=0,
|
||||
enable_torrents=0, minseeds=0, torrent_local=0, local_watchdir=None, torrent_seedbox=0, seedbox_watchdir=None, seedbox_user=None, seedbox_pass=None, seedbox_host=None, seedbox_port=None,
|
||||
|
@ -3611,6 +3620,9 @@ class WebInterface(object):
|
|||
mylar.NZBSU_APIKEY = nzbsu_apikey
|
||||
mylar.DOGNZB = dognzb
|
||||
mylar.DOGNZB_APIKEY = dognzb_apikey
|
||||
mylar.OMGWTFNZBS = omgwtfnzbs
|
||||
mylar.OMGWTFNZBS_USERNAME = omgwtfnzbs_username
|
||||
mylar.OMGWTFNZBS_APIKEY = omgwtfnzbs_apikey
|
||||
mylar.ENABLE_TORZNAB = enable_torznab
|
||||
mylar.TORZNAB_NAME = torznab_name
|
||||
mylar.TORZNAB_HOST = torznab_host
|
||||
|
|
Loading…
Reference in New Issue