IMP: (#1089)Torznab provider option (will currently only allow for one torznab entry - use jackett), IMP: (#1095) Will now retain the reading order taken from ComicVine - previously would have some inconsistent reordering results, FIX: Better handling of INH & NOW issues within the issue number when file-checking/post-processing, FIX: manually post-processing annuals would fail, FIX: Download button on Weekly tab will now download issues that are in a Downloaded status as intended, FIX: NotifyMyAndroid Test button now works, FIX: Updated kat default url to use kat.cr, FIX: Removed some unecessary logging lines when performing the weekly pull refresh, IMP: Some moving/restructuring of some items in the rss/search modules to allow for future WIP items.

This commit is contained in:
evilhero 2015-08-11 17:56:22 -04:00
parent ec31c9ec9d
commit 72f915edb1
13 changed files with 280 additions and 263 deletions

View File

@ -548,6 +548,27 @@
</div>
</fieldset>
</div>
<div class="row checkbox left clearfix">
<input type="checkbox" id="enable_torznab" onclick="initConfigCheckbox($(this));" name="enable_torznab" value=1 ${config['enable_torznab']} /><label>Enable Torznab</label>
</div>
<div class="config">
<div class="row">
<label>Torznab Name</label>
<input type="text" name="torznab_name" value="${config['torznab_name']}" size="30">
</div>
<div class="row">
<label>Torznab Host</label>
<input type="text" name="torznab_host" value="${config['torznab_host']}" size="30">
</div>
<div class="row">
<label>Torznab API</label>
<input type="text" name="torznab_apikey" value="${config['torznab_apikey']}" size="36">
</div>
<div class="row">
<label>Torznab Category</label>
<input type="text" name="torznab_category" value="${config['torznab_category']}" size="12">
</div>
</div>
</div>
</fieldset>
@ -1340,6 +1361,7 @@
initConfigCheckbox("#enable_api");
initConfigCheckbox("#sab_to_mylar");
initConfigCheckbox("#usenewznab");
initConfigCheckbox("#enable_torznab");
initConfigCheckbox("#usenzbsu");
initConfigCheckbox("#usedognzb");
initConfigCheckbox("#enable_torrents");

View File

@ -108,7 +108,12 @@
<%def name="javascriptIncludes()">
<script src="js/libs/jquery.dataTables.min.js"></script>
<script type="text/javascript">
function addAction() {
$('#weekfolder').append('<input type="hidden" name="filename" value=True />');
};
$("#menu_link_scan").click(function() {
addAction();
$('#MassDownload').submit();
return true;
});

View File

@ -311,10 +311,10 @@ class PostProcessor(object):
logger.fdebug(module + ' Bi-Annual detected.')
fcdigit = helpers.issuedigits(re.sub('biannual', '', str(biannchk)).strip())
else:
logger.fdebug(module + ' Annual detected.')
fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip())
logger.fdebug(module + ' Annual detected [' + str(fcdigit) +']. ComicID assigned as ' + str(cs['ComicID']))
annchk = "yes"
issuechk = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
issuechk = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
else:
fcdigit = helpers.issuedigits(temploc)
issuechk = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
@ -450,7 +450,6 @@ class PostProcessor(object):
annchk = "yes"
issuechk = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [v[i]['WatchValues']['ComicID'], fcdigit]).fetchone()
else:
logger.info('Issue Number :' + str(temploc))
fcdigit = helpers.issuedigits(temploc)
issuechk = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [v[i]['WatchValues']['ComicID'], fcdigit]).fetchone()

View File

@ -256,11 +256,11 @@ NEWZNAB_ENABLED = False
EXTRA_NEWZNABS = []
NEWZNAB_EXTRA = None
RAW = False
RAW_PROVIDER = None
RAW_USERNAME = None
RAW_PASSWORD = None
RAW_GROUPS = None
ENABLE_TORZNAB = False
TORZNAB_NAME = None
TORZNAB_HOST = None
TORZNAB_APIKEY = None
TORZNAB_CATEGORY = None
EXPERIMENTAL = False
ALTEXPERIMENTAL = False
@ -411,7 +411,8 @@ def initialize():
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_TO_MYLAR, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, \
NEWZNAB, NEWZNAB_NAME, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_UID, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, ALTEXPERIMENTAL, \
ENABLE_TORZNAB, TORZNAB_NAME, TORZNAB_HOST, TORZNAB_APIKEY, TORZNAB_CATEGORY, \
EXPERIMENTAL, ALTEXPERIMENTAL, \
ENABLE_META, CMTAGGER_PATH, CT_TAG_CR, CT_TAG_CBL, CT_CBZ_OVERWRITE, UNRAR_CMD, UPDATE_ENDED, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, SNATCHED_HAVETOTAL, PROVIDER_ORDER, \
dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \
ENABLE_TORRENTS, MINSEEDS, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \
@ -430,9 +431,9 @@ def initialize():
CheckSection('NZBGet')
CheckSection('NZBsu')
CheckSection('DOGnzb')
CheckSection('Raw')
CheckSection('Experimental')
CheckSection('Newznab')
CheckSection('Torznab')
CheckSection('Torrents')
# Set global variables based on config file or use defaults
try:
@ -735,18 +736,21 @@ def initialize():
PR.append('dognzb')
PR_NUM +=1
RAW = bool(check_setting_int(CFG, 'Raw', 'raw', 0))
RAW_PROVIDER = check_setting_str(CFG, 'Raw', 'raw_provider', '')
RAW_USERNAME = check_setting_str(CFG, 'Raw', 'raw_username', '')
RAW_PASSWORD = check_setting_str(CFG, 'Raw', 'raw_password', '')
RAW_GROUPS = check_setting_str(CFG, 'Raw', 'raw_groups', '')
EXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'experimental', 0))
ALTEXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'altexperimental', 1))
if EXPERIMENTAL:
PR.append('Experimental')
PR_NUM +=1
ENABLE_TORZNAB = bool(check_setting_int(CFG, 'Torznab', 'enable_torznab', 0))
TORZNAB_NAME = check_setting_str(CFG, 'Torznab', 'torznab_name', '')
TORZNAB_HOST = check_setting_str(CFG, 'Torznab', 'torznab_host', '')
TORZNAB_APIKEY = check_setting_str(CFG, 'Torznab', 'torznab_apikey', '')
TORZNAB_CATEGORY = check_setting_str(CFG, 'Torznab', 'torznab_category', '')
if ENABLE_TORZNAB:
PR.append('Torznab')
PR_NUM +=1
#print 'PR_NUM::' + str(PR_NUM)
NEWZNAB = bool(check_setting_int(CFG, 'Newznab', 'newznab', 0))
@ -1359,6 +1363,13 @@ def config_write():
new_config['Experimental']['experimental'] = int(EXPERIMENTAL)
new_config['Experimental']['altexperimental'] = int(ALTEXPERIMENTAL)
new_config['Torznab'] = {}
new_config['Torznab']['enable_torznab'] = int(ENABLE_TORZNAB)
new_config['Torznab']['torznab_name'] = TORZNAB_NAME
new_config['Torznab']['torznab_host'] = TORZNAB_HOST
new_config['Torznab']['torznab_apikey'] = TORZNAB_APIKEY
new_config['Torznab']['torznab_category'] = TORZNAB_CATEGORY
new_config['Newznab'] = {}
new_config['Newznab']['newznab'] = int(NEWZNAB)
@ -1400,13 +1411,6 @@ def config_write():
new_config['PUSHBULLET']['pushbullet_deviceid'] = PUSHBULLET_DEVICEID
new_config['PUSHBULLET']['pushbullet_onsnatch'] = int(PUSHBULLET_ONSNATCH)
new_config['Raw'] = {}
new_config['Raw']['raw'] = int(RAW)
new_config['Raw']['raw_provider'] = RAW_PROVIDER
new_config['Raw']['raw_username'] = RAW_USERNAME
new_config['Raw']['raw_password'] = RAW_PASSWORD
new_config['Raw']['raw_groups'] = RAW_GROUPS
new_config.write()
def start():

View File

@ -107,7 +107,13 @@ def getComic(comicid, type, issueid=None, arc=None, arcid=None, arclist=None, co
if comicid is None:
#if comicid is None, it's coming from the story arc search results.
id = arcid
islist = arclist
#since the arclist holds the issueids, and the pertinent reading order - we need to strip out the reading order so this works.
aclist = ''
for ac in arclist.split('|'):
aclist += ac[:ac.find(',')] + '|'
if aclist.endswith('|'):
aclist = aclist[:-1]
islist = aclist
else:
id = comicid
islist = None

View File

@ -61,8 +61,8 @@ def listFiles(dir, watchcomic, Publisher, AlternateSearch=None, manual=None, sar
'\@']
issue_exceptions = ['AU',
'.INH',
'.NOW',
'INH',
'NOW',
'AI',
'A',
'B',
@ -632,7 +632,7 @@ def listFiles(dir, watchcomic, Publisher, AlternateSearch=None, manual=None, sar
findtitlepos = subname.find('-')
if charpos != 0:
logger.fdebug('[FILECHECKER] detected ' + str(len(charpos)) + ' special characters')
logger.fdebug('[FILECHECKER] detected ' + str(len(charpos)) + ' special characters')
for i, j in enumerate(charpos):
logger.fdebug('i,j:' + str(i) + ',' + str(j))
logger.fdebug(str(len(subname)) + ' - subname: ' + subname)
@ -700,7 +700,10 @@ def listFiles(dir, watchcomic, Publisher, AlternateSearch=None, manual=None, sar
justthedigits_1 = 'Annual ' + justthedigits_1
logger.fdebug('[FILECHECKER] after title removed from SUBNAME [' + justthedigits_1 + ']')
exceptionmatch = [x for x in issue_exceptions if x.lower() in justthedigits_1.lower()]
if exceptionmatch:
logger.fdebug('[FILECHECKER] Remapping to accomodate ' + str(exceptionmatch))
digitchk = 0
titlechk = False
if digitchk:

View File

@ -1742,7 +1742,7 @@ def create_https_certificates(ssl_cert, ssl_key):
return True
def torrent_create(site, linkid, alt=None):
if site == '32P':
if site == '32P' or site == 'TOR':
pass
elif site == 'KAT':
if 'http' in linkid:

View File

@ -377,15 +377,17 @@ def storyarcinfo(xmlid):
issuedom = arcdom.getElementsByTagName('issue')
isc = 0
arclist = ''
ordernum = 1
for isd in issuedom:
zeline = isd.getElementsByTagName('id')
isdlen = len( zeline )
isb = 0
while ( isb < isdlen):
if isc == 0:
arclist = str(zeline[isb].firstChild.wholeText).strip()
arclist = str(zeline[isb].firstChild.wholeText).strip() + ',' + str(ordernum)
else:
arclist += '|' + str(zeline[isb].firstChild.wholeText).strip()
arclist += '|' + str(zeline[isb].firstChild.wholeText).strip() + ',' + str(ordernum)
ordernum+=1
isb+=1
isc+=1

View File

@ -88,25 +88,26 @@ class NMA:
def __init__(self):
self.NMA_URL = "https://www.notifymyandroid.com/publicapi/notify"
self.apikey = mylar.NMA_APIKEY
self.priority = mylar.NMA_PRIORITY
self._session = requests.Session()
def _send(self, data, module):
url_data = urllib.urlencode(data)
url = 'https://www.notifymyandroid.com/publicapi/notify'
r = self._session.post(self.NMA_URL, data=data)
req = urllib2.Request(url, url_data)
try:
handle = urllib2.urlopen(req)
except Exception, e:
logger.warn(module + ' Error opening NotifyMyAndroid url: ' % e)
return
response = handle.read().decode(mylar.SYS_ENCODING)
return response
logger.info('[NMA] Status code returned: ' + str(r.status_code))
if r.status_code == 200:
logger.info(module + ' NotifyMyAndroid notifications sent.')
return True
elif r.status_code >= 400 and r.status_code < 500:
logger.error(module + ' NotifyMyAndroid request failed: %s' % r.content)
return False
else:
logger.error(module + ' NotifyMyAndroid notification failed serverside.')
return False
def notify(self, snline=None, prline=None, prline2=None, snatched_nzb=None, sent_to=None, prov=None, module=None):
@ -125,7 +126,7 @@ class NMA:
event = prline
description = prline2
data = {'apikey': apikey, 'application': 'Mylar', 'event': event, 'description': description, 'priority': priority}
data = {'apikey': apikey, 'application': 'Mylar', 'event': event.encode('utf-8'), 'description': description.encode('utf-8'), 'priority': priority}
logger.info(module + ' Sending notification request to NotifyMyAndroid')
request = self._send(data, module)
@ -134,7 +135,11 @@ class NMA:
logger.warn(module + ' Error sending notification request to NotifyMyAndroid')
def test_notify(self):
self.notify(prline='Test Message',prline2='ZOMG Lazors Pewpewpew!')
event = 'Test Message'
description = 'ZOMG Lazors PewPewPew!'
data = {'apikey': self.apikey, 'application': 'Mylar', 'event': event.encode('utf-8'), 'description': description.encode('utf-8'), 'priority': 2}
return self._send(data,'[NOTIFIER]')
# 2013-04-01 Added Pushover.net notifications, based on copy of Prowl class above.
# No extra care has been put into API friendliness at the moment (read: https://pushover.net/api#friendly)

View File

@ -47,7 +47,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
kat_url = mylar.KAT_PROXY + '/'
else:
#switched to https.
kat_url = 'https://kat.ph/'
kat_url = 'https://kat.cr/'
if pickfeed == 'KAT':
#we need to cycle through both categories (comics & other) - so we loop.
@ -66,7 +66,8 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
feeddata = []
myDB = db.DBConnection()
torthekat = []
katinfo = {}
torthe32p = []
torinfo = {}
while (lp < loopit):
if lp == 0 and loopit == 2:
@ -109,9 +110,10 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
logger.error('invalid pickfeed denoted...')
return
#logger.info('feed URL: ' + str(feed))
#logger.info('[' + str(pickfeed) + '] feed URL: ' + str(feed))
feedme = feedparser.parse(feed)
if pickfeed != '4':
feedme = feedparser.parse(feed)
if pickfeed == "3" or pickfeed == "6" or pickfeed == "2" or pickfeed == "5":
picksite = 'KAT'
@ -120,99 +122,114 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
i = 0
for entry in feedme['entries']:
if pickfeed == "3" or pickfeed == "6":
tmpsz = feedme.entries[i].enclosures[0]
feeddata.append({
'site': picksite,
'title': feedme.entries[i].title,
'link': tmpsz['url'],
'pubdate': feedme.entries[i].updated,
'size': tmpsz['length']
})
if pickfeed == '4':
for entry in searchresults['entries']:
justdigits = entry['file_size'] #size not available in follow-list rss feed
seeddigits = entry['seeders'] #number of seeders not available in follow-list rss feed
elif pickfeed == "2" or pickfeed == "5":
tmpsz = feedme.entries[i].enclosures[0]
torthekat.append({
'site': picksite,
'title': feedme.entries[i].title,
'link': tmpsz['url'],
'pubdate': feedme.entries[i].updated,
'size': tmpsz['length']
})
if int(seeddigits) >= int(mylar.MINSEEDS):
torthe32p.append({
'site': picksite,
'title': entry['torrent_seriesname'].lstrip() + ' ' + entry['torrent_seriesvol'] + ' #' + entry['torrent_seriesiss'],
'volume': entry['torrent_seriesvol'], # not stored by mylar yet.
'issue': entry['torrent_seriesiss'], # not stored by mylar yet.
'link': entry['torrent_id'], #just the id for the torrent
'pubdate': entry['pubdate'],
'size': entry['file_size'],
'seeders': entry['seeders'],
'files': entry['num_files']
})
i += 1
else:
for entry in feedme['entries']:
if pickfeed == "3" or pickfeed == "6":
tmpsz = feedme.entries[i].enclosures[0]
torthekat.append({
'site': picksite,
'title': feedme.entries[i].title,
'link': tmpsz['url'],
'pubdate': feedme.entries[i].updated,
'size': tmpsz['length']
})
elif pickfeed == "1" or pickfeed == "4" or int(pickfeed) > 7:
if pickfeed == "1" or int(pickfeed) > 7:
tmpdesc = feedme.entries[i].description
st_pub = feedme.entries[i].title.find('(')
st_end = feedme.entries[i].title.find(')')
pub = feedme.entries[i].title[st_pub +1:st_end] # +1 to not include (
#logger.fdebug('publisher: ' + re.sub("'",'', pub).strip()) #publisher sometimes is given within quotes for some reason, strip 'em.
vol_find = feedme.entries[i].title.find('vol.')
series = feedme.entries[i].title[st_end +1:vol_find].strip()
#logger.fdebug('series title: ' + series)
iss_st = feedme.entries[i].title.find(' - ', vol_find)
vol = re.sub('\.', '', feedme.entries[i].title[vol_find:iss_st]).strip()
#logger.fdebug('volume #: ' + str(vol))
issue = feedme.entries[i].title[iss_st +3:].strip()
#logger.fdebug('issue # : ' + str(issue))
#break it down to get the Size since it's available on THIS 32P feed only so far.
#when it becomes available in the new feeds, this will be working, for now it just nulls out.
sizestart = tmpdesc.find('Size:')
justdigits = 0
if sizestart >= 0:
sizeend = tmpdesc.find('Leechers:')
sizestart +=5 # to get to the end of the word 'Size:'
tmpsize = tmpdesc[sizestart:sizeend].strip()
fdigits = re.sub("[^0123456789\.]", "", tmpsize).strip()
if '.' in fdigits:
decfind = fdigits.find('.')
wholenum = fdigits[:decfind]
decnum = fdigits[decfind +1:]
else:
wholenum = fdigits
decnum = 0
if 'MB' in tmpsize:
wholebytes = int(wholenum) * 1048576
wholedecimal = (int(decnum) * 1048576) / 100
justdigits = wholebytes + wholedecimal
else:
#it's 'GB' then
wholebytes = (int(wholenum) * 1024) * 1048576
wholedecimal = ((int(decnum) * 1024) * 1048576) / 100
justdigits = wholebytes + wholedecimal
#this is not currently working for 32p
#Get the # of seeders.
#seedstart = tmpdesc.find('Seeders:')
#seedend = tmpdesc.find('Added:')
#seedstart +=8 # to get to the end of the word 'Seeders:'
#tmpseed = tmpdesc[seedstart:seedend].strip()
#seeddigits = re.sub("[^0123456789\.]", "", tmpseed).strip()
seeddigits = 0
else:
justdigits = None #size not available in follow-list rss feed
seeddigits = 0 #number of seeders not available in follow-list rss feed
if int(mylar.MINSEEDS) >= int(seeddigits):
link = feedme.entries[i].link
linkst = link.find('&id')
linken = link.find('&', linkst +1)
if linken == -1:
linken = len(link)
newlink = re.sub('&id=', '', link[linkst:linken]).strip()
feeddata.append({
elif pickfeed == "2" or pickfeed == "5":
tmpsz = feedme.entries[i].enclosures[0]
torthekat.append({
'site': picksite,
'title': series.lstrip() + ' ' + vol + ' #' + issue,
'volume': vol, # not stored by mylar yet.
'issue': issue, # not stored by mylar yet.
'link': newlink, #just the id for the torrent
'title': feedme.entries[i].title,
'link': tmpsz['url'],
'pubdate': feedme.entries[i].updated,
'size': justdigits
'size': tmpsz['length']
})
i += 1
elif pickfeed == "1" or int(pickfeed) > 7:
if pickfeed == "1" or int(pickfeed) > 7:
tmpdesc = feedme.entries[i].description
st_pub = feedme.entries[i].title.find('(')
st_end = feedme.entries[i].title.find(')')
pub = feedme.entries[i].title[st_pub +1:st_end] # +1 to not include (
#logger.fdebug('publisher: ' + re.sub("'",'', pub).strip()) #publisher sometimes is given within quotes for some reason, strip 'em.
vol_find = feedme.entries[i].title.find('vol.')
series = feedme.entries[i].title[st_end +1:vol_find].strip()
#logger.fdebug('series title: ' + series)
iss_st = feedme.entries[i].title.find(' - ', vol_find)
vol = re.sub('\.', '', feedme.entries[i].title[vol_find:iss_st]).strip()
#logger.fdebug('volume #: ' + str(vol))
issue = feedme.entries[i].title[iss_st +3:].strip()
#logger.fdebug('issue # : ' + str(issue))
#break it down to get the Size since it's available on THIS 32P feed only so far.
#when it becomes available in the new feeds, this will be working, for now it just nulls out.
sizestart = tmpdesc.find('Size:')
justdigits = 0
if sizestart >= 0:
sizeend = tmpdesc.find('Leechers:')
sizestart +=5 # to get to the end of the word 'Size:'
tmpsize = tmpdesc[sizestart:sizeend].strip()
fdigits = re.sub("[^0123456789\.]", "", tmpsize).strip()
if '.' in fdigits:
decfind = fdigits.find('.')
wholenum = fdigits[:decfind]
decnum = fdigits[decfind +1:]
else:
wholenum = fdigits
decnum = 0
if 'MB' in tmpsize:
wholebytes = int(wholenum) * 1048576
wholedecimal = (int(decnum) * 1048576) / 100
justdigits = wholebytes + wholedecimal
else:
#it's 'GB' then
wholebytes = (int(wholenum) * 1024) * 1048576
wholedecimal = ((int(decnum) * 1024) * 1048576) / 100
justdigits = wholebytes + wholedecimal
#this is not currently working for 32p
#Get the # of seeders.
#seedstart = tmpdesc.find('Seeders:')
#seedend = tmpdesc.find('Added:')
#seedstart +=8 # to get to the end of the word 'Seeders:'
#tmpseed = tmpdesc[seedstart:seedend].strip()
#seeddigits = re.sub("[^0123456789\.]", "", tmpseed).strip()
seeddigits = 0
if int(mylar.MINSEEDS) >= int(seeddigits):
link = feedme.entries[i].link
linkst = link.find('&id')
linken = link.find('&', linkst +1)
if linken == -1:
linken = len(link)
newlink = re.sub('&id=', '', link[linkst:linken]).strip()
feeddata.append({
'site': picksite,
'title': series.lstrip() + ' ' + vol + ' #' + issue,
'volume': vol, # not stored by mylar yet.
'issue': issue, # not stored by mylar yet.
'link': newlink, #just the id for the torrent
'pubdate': feedme.entries[i].updated,
'size': justdigits
})
i += 1
if feedtype is None:
logger.info('[' + picksite + '] there were ' + str(i) + ' results..')
@ -223,10 +240,15 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
lp += 1
if not seriesname:
#rss search results
rssdbupdate(feeddata, totalcount, 'torrent')
else:
katinfo['entries'] = torthekat
return katinfo
#backlog (parsing) search results
if pickfeed == '4':
torinfo['entries'] = torthe32p
else:
torinfo['entries'] = torthekat
return torinfo
return
@ -731,7 +753,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
headers = None #{'Accept-encoding': 'gzip',
# 'User-Agent': str(mylar.USER_AGENT)}
else:
elif site == 'KAT':
stfind = linkit.find('?')
if stfind == -1:
kat_referrer = helpers.torrent_create('KAT', linkit)
@ -749,6 +771,16 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
payload = None
verify = False
else:
headers = {'Accept-encoding': 'gzip',
'User-Agent': str(mylar.USER_AGENT)}
#'Referer': kat_referrer}
url = linkit #helpers.torrent_create('TOR', linkit)
payload = None
verify = False
if not verify:
#32P throws back an insecure warning because it can't validate against the CA. The below suppresses the message just for 32P instead of being displayed.
#disable SSL warnings - too many 'warning' messages about invalid certificates

View File

@ -92,6 +92,9 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
if mylar.ENABLE_KAT:
torprovider.append('kat')
torp+=1
if mylar.ENABLE_TORZNAB:
torprovider.append('torznab')
torp+=1
##nzb provider selection##
##'dognzb' or 'nzb.su' or 'experimental'
nzbprovider = []
@ -199,6 +202,8 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
searchprov = '32P'
elif prov_order[prov_count] == 'kat':
searchprov = 'KAT'
elif prov_order[prov_count] == 'torznab':
searchprov = 'Torznab'
elif 'newznab' in prov_order[prov_count]:
#this is for newznab
searchprov = 'newznab'
@ -231,6 +236,9 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
if findit == 'yes': break
else:
if searchprov == '32P':
logger.fdebug('32P backlog searching is not currently supported.')
break
findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName)
if findit == 'yes':
logger.fdebug("findit = found!")
@ -251,7 +259,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
if searchprov == 'newznab':
searchprov = newznab_host[0].rstrip()
logger.info('Could not find Issue ' + IssueNumber + ' of ' + ComicName + '(' + str(SeriesYear) + ') using ' + str(searchprov))
logger.info('Could not find Issue ' + IssueNumber + ' of ' + ComicName + '(' + str(SeriesYear) + ') using ' + str(searchprov) + ' [' + str(searchmode) + ']')
prov_count+=1
#torprtmp+=1 #torprtmp-=1
@ -263,10 +271,12 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
helpers.incr_snatched(ComicID)
return findit, searchprov
else:
if manualsearch is None:
if searchprov == '32P':
pass
elif manualsearch is None:
logger.info('Finished searching via :' + str(searchmode) + '. Issue not found - status kept as Wanted.')
else:
logger.info('Could not find issue doing a manual search via : ' + str(searchmode))
logger.fdebug('Could not find issue doing a manual search via : ' + str(searchmode))
i+=1
return findit, 'None'
@ -471,14 +481,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
else:
#32P is redudant now since only RSS works
# - just getting it ready for when it's not redudant :)
if nzbprov == '':
bb = "no results"
elif nzbprov == '32P':
#cmname = re.sub("%20", " ", str(comsrc))
#bb = rsscheck.torrents(pickfeed='4', seriesname=cmname, issue=mod_isssearch)
#rss = "no"
#logger.info('bb returned: ' + str(bb))
if nzbprov == '' or nzbprov == '32P':
bb = "no results"
rss = "no"
elif nzbprov == 'KAT':
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to [KAT] for " + str(cmname) + " : " + str(mod_isssearch))
@ -496,6 +501,14 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
host_newznab_fix = str(host_newznab) + "/"
else: host_newznab_fix = host_newznab
findurl = str(host_newznab_fix) + "api?t=search&q=" + str(comsearch) + "&o=xml&cat=" + str(category_newznab)
elif nzbprov == 'Torznab':
if mylar.TORZNAB_HOST.endswith('/'):
#http://localhost:9117/api/iptorrents
torznab_fix = mylar.TORZNAB_HOST[:-1]
else:
torznab_fix = mylar.TORZNAB_HOST
findurl = str(torznab_fix) + "?t=search&q=" + str(comsearch) + "&o=xml&cat=" + str(mylar.TORZNAB_CATEGORY)
apikey = mylar.TORZNAB_APIKEY
if nzbprov != 'nzbx':
# helper function to replace apikey here so we avoid logging it ;)
findurl = findurl + "&apikey=" + str(apikey)
@ -504,7 +517,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
### IF USENET_RETENTION is set, honour it
### For newznab sites, that means appending "&maxage=<whatever>" on the URL
if mylar.USENET_RETENTION != None:
if mylar.USENET_RETENTION != None and nzbprov != 'torznab':
findurl = findurl + "&maxage=" + str(mylar.USENET_RETENTION)
# Add a user-agent
@ -1695,7 +1708,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
nzbid = generate_id(nzbprov, link)
if link and (nzbprov != 'KAT' and nzbprov != '32P'):
if link and (nzbprov != 'KAT' and nzbprov != '32P' and nzbprov != 'Torznab'):
#generate nzbid here.
@ -1876,7 +1889,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
#blackhole
sent_to = None
if mylar.USE_BLACKHOLE and nzbprov != '32P' and nzbprov != 'KAT':
if mylar.USE_BLACKHOLE and nzbprov != '32P' and nzbprov != 'KAT' and nzbprov != 'Torznab':
logger.fdebug("using blackhole directory at : " + str(mylar.BLACKHOLE_DIR))
if os.path.exists(mylar.BLACKHOLE_DIR):
#copy the nzb from nzbpath to blackhole dir.
@ -1891,7 +1904,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
#end blackhole
#torrents (32P & KAT)
elif nzbprov == '32P' or nzbprov == 'KAT':
elif nzbprov == '32P' or nzbprov == 'KAT' or nzbprov == 'Torznab':
logger.fdebug("sending .torrent to watchdir.")
logger.fdebug("ComicName:" + ComicName)
logger.fdebug("link:" + link)
@ -1998,7 +2011,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
elif mylar.SAB_TO_MYLAR:
#if sab & mylar are on different machines, check to see if they are local or external IP's provided for host.
if mylar.HTTP_HOST == 'localhost' or mylar.HTTP_HOST == '0.0.0.0':
if mylar.HTTP_HOST == 'localhost' or mylar.HTTP_HOST == '0.0.0.0' or mylar.HTTP_HOST.startswith('10.') or mylar.HTTP_HOST.startswith('192.') or mylar.HTTP_HOST.startswith('172.'):
#if mylar's local, use the local IP already assigned to LOCAL_IP.
mylar_host = proto + str(mylar.LOCAL_IP) + ':' + str(mylar.HTTP_PORT) + hroot
else:
@ -2313,5 +2326,13 @@ def generate_id(nzbprov, link):
st = tmpid.find('&id')
end = tmpid.find('&', st +1)
nzbid = re.sub('&id=', '', tmpid[st:end]).strip()
elif nzbprov == 'Torznab':
if mylar.TORZNAB_HOST.endswith('/'):
tmphost = mylar.TORZNAB_HOST + 'download/'
else:
tmphost = mylar.TORZNAB_HOST + '/download/'
tmpline = re.sub(tmphost, '', tmphost).strip()
tmpidend = tmpline.find('/')
nzbid = tmpline[:tmpidend]
return nzbid

View File

@ -395,12 +395,13 @@ class WebInterface(object):
logger.warn('API Limit has been reached. Aborting update at this time.')
return
logger.fdebug(module + ' Arcresults: ' + str(arc_results))
logger.fdebug('arclist: ' + str(arclist))
if len(arc_results) > 0:
import random
issuedata = []
if storyarcissues is None:
storyarcissues = len(arc_results)
storyarcissues = len(arc_results['issuechoice'])
if arcid is None:
storyarcid = str(random.randint(1000,9999)) + str(storyarcissues)
else:
@ -436,102 +437,16 @@ class WebInterface(object):
int_issnum = helpers.issuedigits(issnum)
# if issnum.isdigit():
# int_issnum = int(issnum) * 1000
# else:
# if 'a.i.' in issnum.lower() or 'ai' in issnum.lower():
# issnum = re.sub('\.', '', issnum)
# #int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('i')
# if 'au' in issnum.lower():
# int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('u')
# elif 'inh' in issnum.lower():
# int_issnum = (int(issnum[:-4]) * 1000) + ord('i') + ord('n') + ord('h')
# elif 'now' in issnum.lower():
# int_issnum = (int(issnum[:-4]) * 1000) + ord('n') + ord('o') + ord('w')
# elif u'\xbd' in issnum:
# int_issnum = .5 * 1000
# logger.fdebug(module + ' 1/2 issue detected :' + issnum + ' === ' + str(int_issnum))
# elif u'\xbc' in issnum:
# int_issnum = .25 * 1000
# elif u'\xbe' in issnum:
# int_issnum = .75 * 1000
# elif u'\u221e' in issnum:
# #issnum = utf-8 will encode the infinity symbol without any help
# int_issnum = 9999999999 * 1000 # set 9999999999 for integer value of issue
# elif '.' in issnum or ',' in issnum:
# if ',' in issnum: issnum = re.sub(',', '.', issnum)
# issst = str(issnum).find('.')
# #logger.fdebug("issst:" + str(issst))
# if issst == 0:
# issb4dec = 0
# else:
# issb4dec = str(issnum)[:issst]
# #logger.fdebug("issb4dec:" + str(issb4dec))
# #if the length of decimal is only 1 digit, assume it's a tenth
# decis = str(issnum)[issst +1:]
# #logger.fdebug("decis:" + str(decis))
# if len(decis) == 1:
# decisval = int(decis) * 10
# issaftdec = str(decisval)
# elif len(decis) == 2:
# decisval = int(decis)
# issaftdec = str(decisval)
# else:
# decisval = decis
# issaftdec = str(decisval)
# try:
# int_issnum = (int(issb4dec) * 1000) + (int(issaftdec) * 10)
# except ValueError:
# logger.error(module + ' This has no issue # for me to get - Either a Graphic Novel or one-shot.')
# updater.no_searchresults(comicid)
# return
# else:
# try:
# x = float(issnum)
# #validity check
# if x < 0:
# logger.fdebug(module + ' I have encountered a negative issue #: ' + str(issnum) + '. Trying to accomodate.')
# logger.fdebug(module + ' value of x is : ' + str(x))
# int_issnum = (int(x) *1000) - 1
# else: raise ValueError
# except ValueError, e:
# x = 0
# tstord = None
# issno = None
# invchk = "false"
# while (x < len(issnum)):
# if issnum[x].isalpha():
# #take first occurance of alpha in string and carry it through
# tstord = issnum[x:].rstrip()
# issno = issnum[:x].rstrip()
# try:
# isschk = float(issno)
# except ValueError, e:
# if len(issnum) == 1 and issnum.isalpha():
# logger.fdebug(module + ' Detected lone alpha issue. Attempting to figure this out.')
# break
# logger.fdebug(module + ' Invalid numeric for issue - cannot be found. Ignoring.')
# issno = None
# tstord = None
# invchk = "true"
# break
# x+=1
# if tstord is not None and issno is not None:
# a = 0
# ordtot = 0
# if len(issnum) == 1 and issnum.isalpha():
# int_issnum = ord(tstord.lower())
# else:
# while (a < len(tstord)):
# ordtot += ord(tstord[a].lower()) #lower-case the letters for simplicty
# a+=1
# int_issnum = (int(issno) * 1000) + ordtot
# elif invchk == "true":
# logger.fdebug(module + ' This does not have an issue # that I can parse properly.')
# return
# else:
# logger.error(module + ' ' + str(issnum) + ' This has an alpha-numeric in the issue # which I cannot account for.')
# return
#verify the reading order if present.
findorder = arclist.find(issid)
if findorder != -1:
ros = arclist.find('|',findorder+1)
roslen = arclist[findorder:ros]
rosre = re.sub(issid,'', roslen)
readingorder = int(re.sub('[\,\|]','', rosre).strip())
else:
readingorder = 0
logger.info('[' + str(readingorder) + '] issueid:' + str(issid) + ' - findorder#:' + str(findorder))
issuedata.append({"ComicID": comicid,
"IssueID": issid,
@ -542,7 +457,7 @@ class WebInterface(object):
"Issue_Number": issnum,
"IssueDate": issdate,
"ReleaseDate": storedate,
"ReadingOrder": n +1,
"ReadingOrder": readingorder, #n +1,
"Int_IssueNumber": int_issnum})
n+=1
@ -3335,6 +3250,11 @@ class WebInterface(object):
"use_dognzb": helpers.checked(mylar.DOGNZB),
"dognzb_api": mylar.DOGNZB_APIKEY,
"use_experimental": helpers.checked(mylar.EXPERIMENTAL),
"enable_torznab": helpers.checked(mylar.ENABLE_TORZNAB),
"torznab_name": mylar.TORZNAB_NAME,
"torznab_host": mylar.TORZNAB_HOST,
"torznab_apikey": mylar.TORZNAB_APIKEY,
"torznab_category": mylar.TORZNAB_CATEGORY,
"use_newznab": helpers.checked(mylar.NEWZNAB),
"newznab_host": mylar.NEWZNAB_HOST,
"newznab_name": mylar.NEWZNAB_NAME,
@ -3632,7 +3552,7 @@ class WebInterface(object):
nzb_downloader=0, sab_host=None, sab_username=None, sab_apikey=None, sab_password=None, sab_category=None, sab_priority=None, sab_directory=None, sab_to_mylar=0, log_dir=None, log_level=0, blackhole_dir=None,
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
raw=0, raw_provider=None, raw_username=None, raw_password=None, raw_groups=None, experimental=0, check_folder=None, enable_check_folder=0,
enable_torznab=0, torznab_name=None, torznab_host=None, torznab_apikey=None, torznab_category=None, experimental=0, check_folder=None, enable_check_folder=0,
enable_meta=0, cmtagger_path=None, ct_tag_cr=0, ct_tag_cbl=0, ct_cbz_overwrite=0, unrar_cmd=None, enable_rss=0, rss_checkinterval=None, failed_download_handling=0, failed_auto=0, enable_torrent_search=0, enable_kat=0, enable_32p=0, mode_32p=0, rssfeed_32p=None, passkey_32p=None, username_32p=None, password_32p=None, snatchedtorrent_notify=0,
enable_torrents=0, minseeds=0, torrent_local=0, local_watchdir=None, torrent_seedbox=0, seedbox_watchdir=None, seedbox_user=None, seedbox_pass=None, seedbox_host=None, seedbox_port=None,
prowl_enabled=0, prowl_onsnatch=0, prowl_keys=None, prowl_priority=None, nma_enabled=0, nma_apikey=None, nma_priority=0, nma_onsnatch=0, pushover_enabled=0, pushover_onsnatch=0, pushover_apikey=None, pushover_userkey=None, pushover_priority=None, boxcar_enabled=0, boxcar_onsnatch=0, boxcar_token=None,
@ -3663,10 +3583,8 @@ class WebInterface(object):
mylar.SEARCH_DELAY = search_delay
mylar.NZB_DOWNLOADER = int(nzb_downloader)
if tsab:
logger.fdebug('the truth will set you free.')
self.SABtest(sab_host, sab_username, sab_password, sab_apikey)
else:
logger.fdebug('failure of the truth.')
mylar.SAB_HOST = sab_host
mylar.SAB_USERNAME = sab_username
mylar.SAB_PASSWORD = sab_password
@ -3689,11 +3607,11 @@ class WebInterface(object):
mylar.NZBSU_APIKEY = nzbsu_apikey
mylar.DOGNZB = dognzb
mylar.DOGNZB_APIKEY = dognzb_apikey
mylar.RAW = raw
mylar.RAW_PROVIDER = raw_provider
mylar.RAW_USERNAME = raw_username
mylar.RAW_PASSWORD = raw_password
mylar.RAW_GROUPS = raw_groups
mylar.ENABLE_TORZNAB = enable_torznab
mylar.TORZNAB_NAME = torznab_name
mylar.TORZNAB_HOST = torznab_host
mylar.TORZNAB_APIKEY = torznab_apikey
mylar.TORZNAB_CATEGORY = torznab_category
mylar.EXPERIMENTAL = experimental
mylar.NEWZNAB = newznab
#mylar.NEWZNAB_HOST = newznab_host
@ -4128,7 +4046,7 @@ class WebInterface(object):
def testNMA(self):
nma = notifiers.NMA()
result = nma.test_notify()
if result:
if result == True:
return "Successfully sent NMA test - check to make sure it worked"
else:
return "Error sending test message to NMA"

View File

@ -531,7 +531,7 @@ def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurep
st_year = latestdate[:st_date]
year = '20' + st_year
latestdate = str(year) + '-' + str(st_remainder)
logger.fdebug('year set to: ' + latestdate)
#logger.fdebug('year set to: ' + latestdate)
else:
logger.fdebug("invalid date " + str(latestdate) + " appending 01 for day for continuation.")
latest_day = '01'