FIX: Fix for TPSE switching to magnets only - will now send magnet link to chosen client, IMP: Telegram & Pushover notifications will now take on-screen data for test connections instead of always from cached, IMP: SABnzbd test connection will take on-screen data for test connection instead of always from cached, FIX: Quick fix for torrent-auto-snatcher trying to snatch single issues when the directory only contains the 1 file, FIX: On 32p, when doing api search and then verifying posting date of issue against store date of issue, would take the posting date as a string thinking it was an integer and causing mismatches for snatches (usually on mutli-volume series)

This commit is contained in:
evilhero 2017-06-08 20:23:59 -04:00
parent 61d5590796
commit 3d91a5e521
14 changed files with 420 additions and 216 deletions

View File

@ -274,16 +274,16 @@
<fieldset id="sabnzbd_options">
<div class="row">
<label>SABnzbd Host:</label>
<input type="text" name="sab_host" value="${config['sab_host']}" size="30">
<input type="text" name="sab_host" id="sab_host" value="${config['sab_host']}" size="30">
<small>usually http://localhost:8080</small>
</div>
<div class="row">
<label>SABnzbd Username</label>
<input type="text" name="sab_username" value="${config['sab_user']}" size="20">
<input type="text" name="sab_username" id="sab_username" value="${config['sab_user']}" size="20">
</div>
<div class="row">
<label>SABnzbd Password:</label>
<input type="password" name="sab_password" value="${config['sab_pass']| h}" size="20">
<input type="password" name="sab_password" id="sab_password" value="${config['sab_pass']| h}" size="20">
</div>
<div Class="row">
<div class="populatesab">
@ -1201,10 +1201,10 @@
</div>
<div id="pushoveroptions">
<div class="row">
<label>API key</label><input type="text" title="Leave blank if you don't have your own API (recommended to get your own)" name="pushover_apikey" value="${config['pushover_apikey']}" size="50">
<label>API key</label><input type="text" title="Leave blank if you don't have your own API (recommended to get your own)" name="pushover_apikey" id="pushover_apikey" value="${config['pushover_apikey']}" size="50">
</div>
<div class="row">
<label>User key</label><input type="text" name="pushover_userkey" value="${config['pushover_userkey']}" size="50">
<label>User key</label><input type="text" name="pushover_userkey" id="pushover_userkey" value="${config['pushover_userkey']}" size="50">
</div>
<div class="row checkbox">
<input type="checkbox" name="pushover_onsnatch" value="1" ${config['pushover_onsnatch']} /><label>Notify on snatch?</label>
@ -1213,9 +1213,10 @@
<label>Priority (-2,-1,0,1 or 2):</label>
<input type="text" name="pushover_priority" value="${config['pushover_priority']}" size="2">
</div>
<div class="row">
<input type="button" value="Test Pushover" id="pushover_test" />
</div>
<div align="center" class="row">
<input type="button" value="Test Pushover" id="pushover_test" style="float:center" /></br>
<input type="text" name="pushoverstatus" style="text-align:center; font-size:11px;" id="pushoverstatus" size="55" DISABLED />
</div>
</div>
</fieldset>
@ -1272,16 +1273,17 @@
</div>
<div id="telegramoptions">
<div class="row">
<label>Userid</label><input type="text" name="telegram_userid" value="${config['telegram_userid']}" size="50">
<label>Userid</label><input type="text" name="telegram_userid" id="telegram_userid" value="${config['telegram_userid']}" size="50">
</div>
<div class="row">
<label>Token</label><input type="text" name="telegram_token" value="${config['telegram_token']}" size="50">
<label>Token</label><input type="text" name="telegram_token" id="telegram_token" value="${config['telegram_token']}" size="50">
</div>
<div class="row checkbox">
<input type="checkbox" name="telegram_onsnatch" value="1" ${config['telegram_onsnatch']} /><label>Notify on snatch?</label>
</div>
<div class="row">
<input type="button" value="Test Telegram" id="telegram_test" />
<div align="center" class="row">
<input type="button" value="Test Telegram" id="telegram_test" style="float:center" /></br>
<input type="text" name="telegramstatus" style="text-align:center; font-size:11px;" id="telegramstatus" size="55" DISABLED />
</div>
</div>
</fieldset>
@ -1650,6 +1652,7 @@
$('#api_key').val(data);
});
});
$("#test_32p").click(function(){
$.get('test_32p',
function(data){
@ -1660,16 +1663,25 @@
$('#status32p').val(data);
});
});
$("#test_sab").click(function(){
$.get('SABtest',
$('#test_sab').click(function () {
var sabhost = document.getElementById('sab_host').value;
var sabuser = document.getElementById('sab_username').value;
var sabpass = document.getElementById('sab_password').value;
var sabapi = document.getElementById('sab_apikey').value;
$.get("SABtest",
{ sabhost: sabhost, sabusername: sabuser, sabpassword: sabpass, sabapikey: sabapi },
function(data){
if (data.error != undefined) {
alert(data.error);
return;
}
$('#sabstatus').val(data);
$('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>");
});
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
});
if ($("#enable_https").is(":checked"))
{
$("#https_options").show();
@ -1706,9 +1718,12 @@
$("#perms_options").slideUp();
}
});
$('#sab_apikey').click(function(){ $('#sab_apikey').select() });
$("#find_sabapi").click(function(){
var sabhost = document.getElementById('sab_host').value;
var sabuser = document.getElementById('sab_username').value;
var sabpass = document.getElementById('sab_password').value;
$.get('findsabAPI',
{ sabhost: sabhost, sabusername: sabuser, sabpassword: sabpass },
function(data){
if (data.error != undefined) {
alert(data.error);
@ -1731,9 +1746,11 @@
formfields.append("</div>");
$("#add_newznab").before(formfields);
});
function addAction() {
$('#autoadd').append('<input type="hidden" name="tsab" value=1 />');
};
$('#nma_test').click(function () {
var apikey = document.getElementById('nma_apikey').value;
$.get("testNMA",
@ -1756,8 +1773,18 @@
});
$('#pushover_test').click(function () {
var apikey = document.getElementById('pushover_apikey').value;
var userkey = document.getElementById('pushover_userkey').value;
$.get("testpushover",
function (data) { $('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>"); });
{ apikey: apikey, userkey: userkey },
function(data){
if (data.error != undefined) {
alert(data.error);
return;
}
$('#pushoverstatus').val(data);
$('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>");
});
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
});
@ -1783,8 +1810,18 @@
});
$('#telegram_test').click(function () {
var userid = document.getElementById('telegram_userid').value;
var token = document.getElementById('telegram_token').value;
$.get("testtelegram",
function (data) { $('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>"); });
{ userid: userid, token: token },
function(data){
if (data.error != undefined) {
alert(data.error);
return;
}
$('#telegramstatus').val(data);
$('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>");
});
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
});

View File

@ -129,6 +129,48 @@ class RTorrent:
return(func_name)
def load_magnet(self, magneturl, info_hash, start=False, verbose=False, verify_load=True, verify_retries=3):
p = self._get_conn()
info_hash = info_hash.upper()
func_name = self._get_load_function("url", start, verbose)
# load magnet
getattr(p, func_name)(magneturl)
if verify_load:
i = 0
torrent = None
while i < verify_retries:
for tor in self.get_torrents():
if tor.info_hash != info_hash:
continue
else:
torrent = tor
break
if torrent is not None:
break
time.sleep(1)
i += 1
# Resolve magnet to torrent
torrent.start()
assert info_hash in [t.info_hash for t in self.torrents],\
"Adding magnet was unsuccessful."
i = 0
while i < verify_retries:
for torrent in self.get_torrents():
if torrent.info_hash == info_hash:
if str(info_hash) not in str(torrent.name):
time.sleep(1)
i += 1
return(torrent)
def load_torrent(self, torrent, start=False, verbose=False, verify_load=True, verify_retries=3):
"""
Loads torrent into rTorrent (with various enhancements)

View File

@ -2673,7 +2673,7 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False):
if torrent_files > 1:
downlocation = torrent_folder
else:
downlocation = os.path.join(torrent_folder, torrent_info['name'])
downlocation = torrent_info['files'][0] #os.path.join(torrent_folder, torrent_info['name'])
downlocation = re.sub("'", "\\'", downlocation)
downlocation = re.sub("&", "\\&", downlocation)

View File

@ -205,14 +205,22 @@ class NMA:
# No extra care has been put into API friendliness at the moment (read: https://pushover.net/api#friendly)
class PUSHOVER:
def __init__(self):
def __init__(self, test_apikey=None, test_userkey=None):
self.PUSHOVER_URL = 'https://api.pushover.net/1/messages.json'
self.enabled = mylar.PUSHOVER_ENABLED
if mylar.PUSHOVER_APIKEY is None or mylar.PUSHOVER_APIKEY == 'None':
self.apikey = 'a1KZ1L7d8JKdrtHcUR6eFoW2XGBmwG'
if test_apikey is None:
if mylar.PUSHOVER_APIKEY is None or mylar.PUSHOVER_APIKEY == 'None':
self.apikey = 'a1KZ1L7d8JKdrtHcUR6eFoW2XGBmwG'
else:
self.apikey = mylar.PUSHOVER_APIKEY
else:
self.apikey = mylar.PUSHOVER_APIKEY
self.userkey = mylar.PUSHOVER_USERKEY
self.apikey = test_apikey
if test_userkey is None:
self.userkey = mylar.PUSHOVER_USERKEY
else:
self.userkey = test_userkey
self.priority = mylar.PUSHOVER_PRIORITY
self._session = requests.Session()
@ -400,10 +408,16 @@ class PUSHBULLET:
return self.notify(prline='Test Message', prline2='Release the Ninjas!')
class TELEGRAM:
def __init__(self):
self.token = mylar.TELEGRAM_TOKEN
self.userid = mylar.TELEGRAM_USERID
def __init__(self, test_userid=None, test_token=None):
self.TELEGRAM_API = "https://api.telegram.org/bot%s/%s"
if test_userid is None:
self.userid = mylar.TELEGRAM_USERID
else:
self.userid = test_userid
if test_token is None:
self.token = mylar.TELEGRAM_TOKEN
else:
self.token = test_token
def notify(self, message, status):
if not mylar.TELEGRAM_ENABLED:

View File

@ -232,10 +232,13 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
except AttributeError:
logger.warn('Unable to retrieve results - probably just hitting it too fast...')
continue
id = urlparse.urlparse(feedme.entries[i].link).path.rpartition('/')[0]
torthetpse.append({
'site': picksite,
'title': feedme.entries[i].title,
'link': re.sub('.torrent', '', str(urlparse.urlparse(tmpenc['url'])[2].rpartition('/')[2])).strip(),
'id': re.sub('/', '', id).strip(), #make sure to remove the leading '/'
'link': tmpenc['url'], #should change this to magnet
'pubdate': feedme.entries[i].updated,
'size': tmpenc['length']
})
@ -851,26 +854,30 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
# 'User-Agent': str(mylar.USER_AGENT)}
elif site == 'TPSE':
url = helpers.torrent_create('TPSE', linkit)
pass
#linkit should be the magnet link since it's TPSE
#url = linkit
if url.startswith('https'):
tpse_referrer = 'https://torrentproject.se/'
else:
tpse_referrer = 'http://torrentproject.se/'
#url = helpers.torrent_create('TPSE', linkit)
try:
scraper = cfscrape.create_scraper()
cf_cookievalue, cf_user_agent = scraper.get_tokens(url)
headers = {'Accept-encoding': 'gzip',
'User-Agent': cf_user_agent}
#if url.startswith('https'):
# tpse_referrer = 'https://torrentproject.se/'
#else:
# tpse_referrer = 'http://torrentproject.se/'
except Exception, e:
return "fail"
#try:
# scraper = cfscrape.create_scraper()
# cf_cookievalue, cf_user_agent = scraper.get_tokens(url)
# headers = {'Accept-encoding': 'gzip',
# 'User-Agent': cf_user_agent}
logger.fdebug('Grabbing torrent from url:' + str(url))
#except Exception, e:
# return "fail"
payload = None
verify = False
#logger.fdebug('Grabbing torrent from url:' + str(url))
#payload = None
#verify = False
elif site == 'DEM':
url = helpers.torrent_create('DEM', linkit)
@ -915,104 +922,112 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
payload = None
verify = False
if not verify:
#32P throws back an insecure warning because it can't validate against the CA. The below suppresses the message just for 32P instead of being displayed.
#disable SSL warnings - too many 'warning' messages about invalid certificates
try:
from requests.packages.urllib3 import disable_warnings
disable_warnings()
except ImportError:
#this is probably not necessary and redudant, but leaving in for the time being.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings()
if site != 'TPSE':
if not verify:
#32P throws back an insecure warning because it can't validate against the CA. The below suppresses the message just for 32P instead of being displayed.
#disable SSL warnings - too many 'warning' messages about invalid certificates
try:
from urllib3.exceptions import InsecureRequestWarning
urllib3.disable_warnings()
from requests.packages.urllib3 import disable_warnings
disable_warnings()
except ImportError:
logger.warn('[EPIC FAILURE] Cannot load the requests module')
return "fail"
try:
scraper = cfscrape.create_scraper()
if cf_cookievalue:
r = scraper.get(url, params=payload, cookies=cf_cookievalue, verify=verify, stream=True, headers=headers)
else:
r = scraper.get(url, params=payload, verify=verify, stream=True, headers=headers)
#r = requests.get(url, params=payload, verify=verify, stream=True, headers=headers)
except Exception, e:
logger.warn('Error fetching data from %s (%s): %s' % (site, url, e))
if site == '32P':
logger.info('[TOR2CLIENT-32P] Retrying with 32P')
if mylar.MODE_32P == 1:
logger.info('[TOR2CLIENT-32P] Attempting to re-authenticate against 32P and poll new keys as required.')
feed32p = auth32p.info32p(reauthenticate=True)
feedinfo = feed32p.authenticate()
if feedinfo == "disable":
mylar.ENABLE_32P = 0
mylar.config_write()
return "fail"
logger.debug('[TOR2CLIENT-32P] Creating CF Scraper')
scraper = cfscrape.create_scraper()
logger.debug('[TOR2CLIENT-32P] payload: %s \n verify %s \n headers %s \n', payload, verify, headers)
#this is probably not necessary and redudant, but leaving in for the time being.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings()
try:
r = scraper.get(url, params=payload, verify=verify, allow_redirects=True)
except Exception, e:
logger.warn('[TOR2CLIENT-32P] Unable to GET %s (%s): %s' % (site, url, e))
from urllib3.exceptions import InsecureRequestWarning
urllib3.disable_warnings()
except ImportError:
logger.warn('[EPIC FAILURE] Cannot load the requests module')
return "fail"
try:
scraper = cfscrape.create_scraper()
if cf_cookievalue:
r = scraper.get(url, params=payload, cookies=cf_cookievalue, verify=verify, stream=True, headers=headers)
else:
r = scraper.get(url, params=payload, verify=verify, stream=True, headers=headers)
#r = requests.get(url, params=payload, verify=verify, stream=True, headers=headers)
except Exception, e:
logger.warn('Error fetching data from %s (%s): %s' % (site, url, e))
if site == '32P':
logger.info('[TOR2CLIENT-32P] Retrying with 32P')
if mylar.MODE_32P == 1:
logger.info('[TOR2CLIENT-32P] Attempting to re-authenticate against 32P and poll new keys as required.')
feed32p = auth32p.info32p(reauthenticate=True)
feedinfo = feed32p.authenticate()
if feedinfo == "disable":
mylar.ENABLE_32P = 0
mylar.config_write()
return "fail"
logger.debug('[TOR2CLIENT-32P] Creating CF Scraper')
scraper = cfscrape.create_scraper()
logger.debug('[TOR2CLIENT-32P] payload: %s \n verify %s \n headers %s \n', payload, verify, headers)
try:
r = scraper.get(url, params=payload, verify=verify, allow_redirects=True)
except Exception, e:
logger.warn('[TOR2CLIENT-32P] Unable to GET %s (%s): %s' % (site, url, e))
return "fail"
else:
logger.warn('[TOR2CLIENT-32P] Unable to authenticate using existing RSS Feed given. Make sure that you have provided a CURRENT feed from 32P')
return "fail"
else:
logger.warn('[TOR2CLIENT-32P] Unable to authenticate using existing RSS Feed given. Make sure that you have provided a CURRENT feed from 32P')
logger.info('blah: ' + str(r.status_code))
return "fail"
else:
logger.info('blah: ' + str(r.status_code))
if any([site == 'TPSE', site == 'DEM', site == 'WWT']) and any([str(r.status_code) == '403', str(r.status_code) == '404', str(r.status_code) == '503']):
if str(r.status_code) != '503':
logger.warn('Unable to download from ' + site + ' [' + str(r.status_code) + ']')
#retry with the alternate torrent link.
url = helpers.torrent_create(site, linkit, True)
logger.fdebug('Trying alternate url: ' + str(url))
try:
r = requests.get(url, params=payload, verify=verify, stream=True, headers=headers)
except Exception, e:
return "fail"
else:
logger.warn('Cloudflare protection online for ' + site + '. Attempting to bypass...')
try:
scraper = cfscrape.create_scraper()
cf_cookievalue, cf_user_agent = cfscrape.get_cookie_string(url)
headers = {'Accept-encoding': 'gzip',
'User-Agent': cf_user_agent}
r = scraper.get(url, verify=verify, cookies=cf_cookievalue, stream=True, headers=headers)
except Exception, e:
return "fail"
if str(r.status_code) != '200':
logger.warn('Unable to download torrent from ' + site + ' [Status Code returned: ' + str(r.status_code) + ']')
return "fail"
if any([site == 'TPSE', site == 'DEM', site == 'WWT']) and any([str(r.status_code) == '403', str(r.status_code) == '404', str(r.status_code) == '503']):
if str(r.status_code) != '503':
logger.warn('Unable to download from ' + site + ' [' + str(r.status_code) + ']')
#retry with the alternate torrent link.
url = helpers.torrent_create(site, linkit, True)
logger.fdebug('Trying alternate url: ' + str(url))
try:
r = requests.get(url, params=payload, verify=verify, stream=True, headers=headers)
if any([site == 'TPSE', site == 'DEM', site == 'WWT']):
if r.headers.get('Content-Encoding') == 'gzip':
buf = StringIO(r.content)
f = gzip.GzipFile(fileobj=buf)
except Exception, e:
return "fail"
else:
logger.warn('Cloudflare protection online for ' + site + '. Attempting to bypass...')
try:
scraper = cfscrape.create_scraper()
cf_cookievalue, cf_user_agent = cfscrape.get_cookie_string(url)
headers = {'Accept-encoding': 'gzip',
'User-Agent': cf_user_agent}
r = scraper.get(url, verify=verify, cookies=cf_cookievalue, stream=True, headers=headers)
except Exception, e:
return "fail"
with open(filepath, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
if str(r.status_code) != '200':
logger.warn('Unable to download torrent from ' + site + ' [Status Code returned: ' + str(r.status_code) + ']')
return "fail"
logger.fdebug('[' + site + '] Saved torrent file to : ' + filepath)
else:
#tpse is magnet links only...
filepath = linkit
if any([site == 'TPSE', site == 'DEM', site == 'WWT']):
if r.headers.get('Content-Encoding') == 'gzip':
buf = StringIO(r.content)
f = gzip.GzipFile(fileobj=buf)
with open(filepath, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
logger.fdebug('[' + site + '] Saved torrent file to : ' + filepath)
if mylar.USE_UTORRENT:
uTC = utorrent.utorrentclient()
ti = uTC.addfile(filepath, filename)
if site == 'TPSE':
ti = uTC.addurl(linkit)
else:
ti = uTC.addfile(filepath, filename)
if ti == 'fail':
return ti
else:

View File

@ -10,22 +10,22 @@ from decimal import Decimal
from HTMLParser import HTMLParseError
from time import strptime
def sabnzbd():
SAB_USERNAME = mylar.SAB_USERNAME
SAB_PASSWORD = mylar.SAB_PASSWORD
SAB_HOST = mylar.SAB_HOST #'http://localhost:8085/'
if SAB_USERNAME is None or SAB_PASSWORD is None:
def sabnzbd(sabhost=mylar.SAB_HOST, sabusername=mylar.SAB_USERNAME, sabpassword=mylar.SAB_PASSWORD):
#SAB_USERNAME = mylar.SAB_USERNAME
#SAB_PASSWORD = mylar.SAB_PASSWORD
#SAB_HOST = mylar.SAB_HOST #'http://localhost:8085/'
if sabusername is None or sabpassword is None:
logger.fdebug('No Username / Password specified for SABnzbd. Unable to auto-retrieve SAB API')
if 'https' not in SAB_HOST:
sabhost = re.sub('http://', '', SAB_HOST)
if 'https' not in sabhost:
sabhost = re.sub('http://', '', sabhost)
sabhttp = 'http://'
else:
sabhost = re.sub('https://', '', SAB_HOST)
sabhost = re.sub('https://', '', sabhost)
sabhttp = 'https://'
if not sabhost.endswith('/'):
#sabhost = sabhost[:len(sabhost)-1].rstrip()
sabhost = sabhost + '/'
sabline = sabhttp + SAB_USERNAME + ':' + SAB_PASSWORD + '@' + sabhost
sabline = sabhttp + sabusername + ':' + sabpassword + '@' + sabhost
r = requests.get(sabline + 'config/general/')
soup = BeautifulSoup(r.content)
#lenlinks = len(cntlinks)

View File

@ -34,6 +34,7 @@ import urllib2
import email.utils
import datetime
import shutil
from base64 import b16encode, b32decode
from operator import itemgetter
from wsgiref.handlers import format_date_time
@ -848,9 +849,10 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
else:
stdate = StoreDate
postdate_int = None
if nzbprov == '32P' and rss == 'no':
postdate_int = pubdate
else:
if any([postdate_int is None, type(postdate_int) != int]) or not all([nzbprov == '32P', rss == 'no']):
# convert it to a tuple
dateconv = email.utils.parsedate_tz(pubdate)
dateconv2 = datetime.datetime(*dateconv[:6])
@ -1214,7 +1216,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
F_ComicVersion = '1'
if postdate_int is not None:
if postdate_int >= issuedate_int and nzbprov == '32P':
logger.fdebug('32P torrent discovery. Store date (' + str(stdate) + ') is before posting date (' + str(pubdate) + '), forcing volume label to be the same as series label (0-Day Enforcement): v' + str(F_ComicVersion) + ' --> v' + str(S_ComicVersion))
logger.fdebug('32P torrent discovery. Posting date (' + str(pubdate) + ') is after store date (' + str(stdate) + '), forcing volume label to be the same as series label (0-Day Enforcement): v' + str(F_ComicVersion) + ' --> v' + str(S_ComicVersion))
F_ComicVersion = D_ComicVersion
logger.fdebug("FCVersion: " + str(F_ComicVersion))
@ -1613,6 +1615,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
logger.error('[NZBPROVIDER = NONE] Encountered an error using given provider with requested information: ' + comicinfo + '. You have a blank entry most likely in your newznabs, fix it & restart Mylar')
continue
#generate the send-to and actually send the nzb / torrent.
logger.info('entry: %s' % entry)
searchresult = searcher(nzbprov, nzbname, comicinfo, entry['link'], IssueID, ComicID, tmpprov, newznab=newznab_host)
if searchresult == 'downloadchk-fail':
@ -2741,14 +2744,21 @@ def generate_id(nzbprov, link):
#32P just has the torrent id stored.
nzbid = link
elif any([nzbprov == 'TPSE', nzbprov == 'WWT', nzbprov == 'DEM']):
if 'http' not in link and any([nzbprov == 'WWT', nzbprov == 'DEM']):
nzbid = link
if nzbprov == 'TPSE':
#TPSE is magnet links only.
info_hash = re.findall("urn:btih:([\w]{32,40})", link)[0]
if len(info_hash) == 32:
info_hash = b16encode(b32decode(info_hash))
nzbid = info_hash.upper()
else:
#for users that already have the cache in place.
url_parts = urlparse.urlparse(link)
path_parts = url_parts[2].rpartition('/')
nzbtempid = path_parts[2]
nzbid = re.sub('.torrent', '', nzbtempid).rstrip()
if 'http' not in link and any([nzbprov == 'WWT', nzbprov == 'DEM']):
nzbid = link
else:
#for users that already have the cache in place.
url_parts = urlparse.urlparse(link)
path_parts = url_parts[2].rpartition('/')
nzbtempid = path_parts[2]
nzbid = re.sub('.torrent', '', nzbtempid).rstrip()
elif nzbprov == 'nzb.su':
nzbid = os.path.splitext(link)[0].rsplit('/', 1)[1]
elif nzbprov == 'dognzb':

View File

@ -60,7 +60,13 @@ class RTorrent(object):
if filepath:
loadit = self.client.load_torrent(filepath)
if loadit:
torrent_hash = self.get_the_hash(filepath)
if filepath.startswith('magnet'):
torrent_hash = re.findall("urn:btih:([\w]{32,40})", filepath)[0]
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
torrent_hash = torrent_hash.upper()
else:
torrent_hash = self.get_the_hash(filepath)
else:
return

View File

@ -95,40 +95,47 @@ class TorrentClient(object):
if self.client.connected is True:
logger.info('Checking if Torrent Exists!')
torrentcontent = open(filepath, 'rb').read()
hash = str.lower(self.get_the_hash(filepath)) # Deluge expects a lower case hash
if not filepath.startswith('magnet'):
torrentcontent = open(filepath, 'rb').read()
hash = str.lower(self.get_the_hash(filepath)) # Deluge expects a lower case hash
logger.debug('Torrent Hash (load_torrent): "' + hash + '"')
logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath)))
logger.debug('Torrent Hash (load_torrent): "' + hash + '"')
logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath)))
#Check if torrent already added
if self.find_torrent(str.lower(hash)):
logger.info('load_torrent: Torrent already exists!')
#should set something here to denote that it's already loaded, and then the failed download checker not run so it doesn't download
#multiple copies of the same issues that's already downloaded
#Check if torrent already added
if self.find_torrent(str.lower(hash)):
logger.info('load_torrent: Torrent already exists!')
#should set something here to denote that it's already loaded, and then the failed download checker not run so it doesn't download
#multiple copies of the same issues that's already downloaded
else:
logger.info('Torrent not added yet, trying to add it now!')
try:
torrent_id = self.client.call('core.add_torrent_file', str(os.path.basename(filepath)), base64.encodestring(torrentcontent), '')
except Exception as e:
logger.debug('Torrent not added')
return False
else:
logger.info('Torrent not added yet, trying to add it now!')
try:
torrent_id = self.client.call('core.add_torrent_file', str(os.path.basename(filepath)), base64.encodestring(torrentcontent), '')
torrent_id = self.client.call('core.add_torrent_magnet', str(filepath), {})
except Exception as e:
logger.debug('Torrent not added')
return False
# If label enabled put label on torrent in Deluge
if torrent_id and mylar.DELUGE_LABEL:
logger.info ('Setting label to ' + mylar.DELUGE_LABEL)
# If label enabled put label on torrent in Deluge
if torrent_id and mylar.DELUGE_LABEL:
logger.info ('Setting label to ' + mylar.DELUGE_LABEL)
try:
self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL)
except:
#if label isn't set, let's try and create one.
try:
self.client.call('label.add', mylar.DELUGE_LABEL)
self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL)
except:
#if label isn't set, let's try and create one.
try:
self.client.call('label.add', mylar.DELUGE_LABEL)
self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL)
except:
logger.warn('Unable to set label - Either try to create it manually within Deluge, and/or ensure there are no spaces, capitalization or special characters in label')
else:
logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL)
logger.warn('Unable to set label - Either try to create it manually within Deluge, and/or ensure there are no spaces, capitalization or special characters in label')
else:
logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL)
try:
torrent_info = self.get_torrent(torrent_id)

View File

@ -1,6 +1,7 @@
import os
import mylar
import base64
from base64 import b16encode, b32decode
import re
import time
from mylar import logger, helpers
@ -53,15 +54,23 @@ class TorrentClient(object):
def load_torrent(self, filepath):
logger.info('filepath to torrent file set to : ' + filepath)
if not filepath.startswith('magnet'):
logger.info('filepath to torrent file set to : ' + filepath)
if self.client._is_authenticated is True:
logger.info('Checking if Torrent Exists!')
hash = self.get_the_hash(filepath)
if filepath.startswith('magnet'):
torrent_hash = re.findall("urn:btih:([\w]{32,40})", filepath)[0]
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
hash = torrent_hash.upper()
logger.debug('Magnet (load_torrent) initiating')
else:
hash = self.get_the_hash(filepath)
logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath)))
logger.debug('Torrent Hash (load_torrent): "' + hash + '"')
logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath)))
#Check if torrent already added
@ -72,14 +81,23 @@ class TorrentClient(object):
#multiple copies of the same issues that's already downloaded
else:
logger.info('Torrent not added yet, trying to add it now!')
try:
torrent_content = open(filepath, 'rb')
tid = self.client.download_from_file(torrent_content, category=str(mylar.QBITTORRENT_LABEL))
except Exception as e:
logger.debug('Torrent not added')
return {'status': False}
if filepath.startswith('magnet'):
try:
tid = self.client.download_from_link(filepath, category=str(mylar.QBITTORRENT_LABEL))
except Exception as e:
logger.debug('Torrent not added')
return {'status': False}
else:
logger.debug('Successfully submitted for add as a magnet. Verifying item is now on client.')
else:
logger.debug('Successfully submitted for add. Verifying item is now on client.')
try:
torrent_content = open(filepath, 'rb')
tid = self.client.download_from_file(torrent_content, category=str(mylar.QBITTORRENT_LABEL))
except Exception as e:
logger.debug('Torrent not added')
return {'status': False}
else:
logger.debug('Successfully submitted for add via file. Verifying item is now on client.')
if mylar.QBITTORRENT_STARTONLOAD:
logger.info('attempting to start')

View File

@ -1,4 +1,5 @@
import os
import re
from urlparse import urlparse
from lib.rtorrent import RTorrent
@ -97,17 +98,37 @@ class TorrentClient(object):
def load_torrent(self, filepath):
start = bool(mylar.RTORRENT_STARTONLOAD)
logger.info('filepath to torrent file set to : ' + filepath)
torrent = self.conn.load_torrent(filepath, verify_load=True)
if not torrent:
return False
if filepath.startswith('magnet'):
logger.info('torrent magnet link set to : ' + filepath)
torrent_hash = re.findall('urn:btih:([\w]{32,40})', filepath)[0].upper()
# Send request to rTorrent
try:
#cannot verify_load magnet as it might take a very very long time for it to retrieve metadata
torrent = self.conn.load_magnet(filepath, torrent_hash, verify_load=True)
if not torrent:
logger.error('Unable to find the torrent, did it fail to load?')
return False
except Exception as err:
logger.error('Failed to send magnet to rTorrent: %s', err)
return False
else:
logger.info('Torrent successfully loaded into rtorrent using magnet link as source.')
else:
logger.info('filepath to torrent file set to : ' + filepath)
try:
torrent = self.conn.load_torrent(filepath, verify_load=True)
if not torrent:
logger.error('Unable to find the torrent, did it fail to load?')
return False
except Exception as err:
logger.error('Failed to send torrent to rTorrent: %s', err)
return False
#we can cherrypick the torrents here if required and if it's a pack (0day instance)
#torrent.get_files() will return list of files in torrent
#f.set_priority(0,1,2)
for f in torrent.get_files():
logger.info('torrent_get_files: %s' % f)
#for f in torrent.get_files():
# logger.info('torrent_get_files: %s' % f)
# f.set_priority(0) #set them to not download just to see if this works...
#torrent.updated_priorities()

View File

@ -77,8 +77,12 @@ class TorrentClient(object):
down_dir = mylar.CHECK_FOLDER
else:
down_dir = mylar.TRANSMISSION_DIRECTORY
torrent = self.conn.add_torrent('file://%s' % filepath,
download_dir=down_dir)
if filepath.startswith('magnet'):
torrent = self.conn.add_torrent('%s' % filepath,
download_dir=down_dir)
else:
torrent = self.conn.add_torrent('file://%s' % filepath,
download_dir=down_dir)
torrent.start()
return self.get_torrent(torrent)

View File

@ -91,6 +91,29 @@ class utorrentclient(object):
else:
return 'fail'
def addurl(self, url):
params = {'action': 'add-url', 'token': self.token, 's': url}
try:
r = requests.post(url=self.utorrent_url, auth=self.auth, cookies=self.cookies, params=params)
except requests.exceptions.RequestException as err:
logger.debug('URL: ' + str(self.utorrent_url))
logger.debug('Error sending to uTorrent Client. uTorrent responded with error: ' + str(err))
return 'fail'
# (to-do) verify the hash in order to ensure it's loaded here
if str(r.status_code) == '200':
logger.info('Successfully added torrent to uTorrent client.')
hash = self.calculate_torrent_hash(link=url)
if mylar.UTORRENT_LABEL:
try:
self.setlabel(hash)
except:
logger.warn('Unable to set label for torrent.')
return hash
else:
return 'fail'
def setlabel(self, hash):
params = {'token': self.token, 'action': 'setprops', 'hash': hash, 's': 'label', 'v': str(mylar.UTORRENT_LABEL)}
r = requests.post(url=self.utorrent_url, auth=self.auth, cookies=self.cookies, params=params)

View File

@ -4827,35 +4827,40 @@ class WebInterface(object):
configUpdate.exposed = True
def SABtest(self):
sab_host = mylar.SAB_HOST
sab_username = mylar.SAB_USERNAME
sab_password = mylar.SAB_PASSWORD
sab_apikey = mylar.SAB_APIKEY
def SABtest(self, sabhost=None, sabusername=None, sabpassword=None, sabapikey=None):
logger.info('here')
if sabhost is None:
sabhost = mylar.SAB_HOST
if sabusername is None:
sabusername = mylar.SAB_USERNAME
if sabpassword is None:
sabpassword = mylar.SAB_PASSWORD
if sabapikey is None:
sabapikey = mylar.SAB_APIKEY
logger.fdebug('testing SABnzbd connection')
logger.fdebug('sabhost: ' + str(sab_host))
logger.fdebug('sab_username: ' + str(sab_username))
logger.fdebug('sab_password: ' + str(sab_password))
logger.fdebug('sab_apikey: ' + str(sab_apikey))
logger.fdebug('sabhost: ' + str(sabhost))
logger.fdebug('sabusername: ' + str(sabusername))
logger.fdebug('sabpassword: ' + str(sabpassword))
logger.fdebug('sabapikey: ' + str(sabapikey))
if mylar.USE_SABNZBD:
import requests
from xml.dom.minidom import parseString, Element
#if user/pass given, we can auto-fill the API ;)
if sab_username is None or sab_password is None:
if sabusername is None or sabpassword is None:
logger.error('No Username / Password provided for SABnzbd credentials. Unable to test API key')
return "Invalid Username/Password provided"
logger.fdebug('testing connection to SABnzbd @ ' + sab_host)
if sab_host.endswith('/'):
sabhost = sab_host
logger.fdebug('testing connection to SABnzbd @ ' + sabhost)
if sabhost.endswith('/'):
sabhost = sabhost
else:
sabhost = sab_host + '/'
sabhost = sabhost + '/'
querysab = sabhost + 'api'
payload = {'mode': 'get_config',
'section': 'misc',
'output': 'xml',
'apikey': sab_apikey}
'apikey': sabapikey}
if sabhost.startswith('https'):
verify = True
@ -4865,7 +4870,7 @@ class WebInterface(object):
try:
r = requests.get(querysab, params=payload, verify=verify)
except Exception, e:
logger.warn('Error fetching data from %s: %s' % (sab_host, e))
logger.warn('Error fetching data from %s: %s' % (sabhost, e))
if requests.exceptions.SSLError:
logger.warn('Cannot verify ssl certificate. Attempting to authenticate with no ssl-certificate verification.')
try:
@ -4879,7 +4884,7 @@ class WebInterface(object):
try:
r = requests.get(querysab, params=payload, verify=verify)
except Exception, e:
logger.warn('Error fetching data from %s: %s' % (sab_host, e))
logger.warn('Error fetching data from %s: %s' % (sabhost, e))
return 'Unable to retrieve data from SABnzbd'
else:
return 'Unable to retrieve data from SABnzbd'
@ -4912,11 +4917,11 @@ class WebInterface(object):
'name': 'http://www.example.com/example.nzb',
'nzbname': 'NiceName',
'output': 'xml',
'apikey': sab_apikey}
'apikey': sabapikey}
try:
r = requests.get(querysab, params=payload, verify=verify)
except Exception, e:
logger.warn('Error fetching data from %s: %s' % (sab_host, e))
logger.warn('Error fetching data from %s: %s' % (sabhost, e))
return 'Unable to retrieve data from SABnzbd'
dom = parseString(r.content)
@ -4934,8 +4939,8 @@ class WebInterface(object):
if qd == False: return "Invalid APIKey provided."
#test which apikey provided
if q_nzbkey != sab_apikey:
if q_apikey != sab_apikey:
if q_nzbkey != sabapikey:
if q_apikey != sabapikey:
logger.error('APIKey provided does not match with SABnzbd')
return "Invalid APIKey provided"
else:
@ -4990,9 +4995,9 @@ class WebInterface(object):
getComicArtwork.exposed = True
def findsabAPI(self):
def findsabAPI(self, sabhost=None, sabusername=None, sabpassword=None):
import sabparse
sabapi = sabparse.sabnzbd()
sabapi = sabparse.sabnzbd(sabhost, sabusername, sabpassword)
logger.info('SAB NZBKey found as : ' + str(sabapi) + '. You still have to save the config to retain this setting.')
mylar.SAB_APIKEY = sabapi
return sabapi
@ -5212,12 +5217,13 @@ class WebInterface(object):
return "Error sending test message to Boxcar"
testboxcar.exposed = True
def testpushover(self):
pushover = notifiers.PUSHOVER()
def testpushover(self, apikey, userkey):
pushover = notifiers.PUSHOVER(test_apikey=apikey, test_userkey=userkey)
result = pushover.test_notify()
if result == True:
return "Successfully sent PushOver test - check to make sure it worked"
else:
logger.warn('Test variables used [APIKEY: %s][USERKEY: %s]' % (apikey, userkey))
return "Error sending test message to Pushover"
testpushover.exposed = True
@ -5231,12 +5237,13 @@ class WebInterface(object):
return result['message']
testpushbullet.exposed = True
def testtelegram(self):
telegram = notifiers.TELEGRAM()
def testtelegram(self, userid, token):
telegram = notifiers.TELEGRAM(test_userid=userid, test_token=token)
result = telegram.test_notify()
if result == True:
return "Successfully sent Telegram test - check to make sure it worked"
else:
logger.warn('Test variables used [USERID: %s][TOKEN: %s]' % (userid, token))
return "Error sending test message to Telegram"
testtelegram.exposed = True