mirror of https://github.com/evilhero/mylar
FIX:(#1627) Fix for torrents using local watchdir as an option erroring out after a snatch, FIX: Added auto_snatch_script location field to allow for seperate auto_snatch script location in addition to other custom on-snatch script, IMP: torrent hashes were not being saved to snatch db for future reference in some cases
This commit is contained in:
parent
7804a0086a
commit
003d3bad7a
|
@ -376,6 +376,7 @@ TORRENT_DOWNLOADER = None #0 = watchfolder, #1 = uTorrent, #2 = rTorrent, #3 =
|
|||
MINSEEDS = 0
|
||||
|
||||
AUTO_SNATCH = False
|
||||
AUTO_SNATCH_SCRIPT = None
|
||||
ALLOW_PACKS = False
|
||||
|
||||
USE_WATCHDIR = False
|
||||
|
@ -495,7 +496,7 @@ def initialize():
|
|||
|
||||
with INIT_LOCK:
|
||||
global __INITIALIZED__, DBCHOICE, DBUSER, DBPASS, DBNAME, DYNAMIC_UPDATE, COMICVINE_API, DEFAULT_CVAPI, CVAPI_RATE, CV_HEADERS, BLACKLISTED_PUBLISHERS, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, UPCOMING_SNATCHED, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, OLDCONFIG_VERSION, OS_DETECT, \
|
||||
SNATCHED_QUEUE, SNPOOL, AUTO_SNATCH, WANTED_TAB_OFF, LOCAL_IP, EXT_IP, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_CHAIN, HTTPS_FORCE_ON, HOST_RETURN, API_ENABLED, API_KEY, DOWNLOAD_APIKEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, NOWEEKLY, AUTO_UPDATE, \
|
||||
SNATCHED_QUEUE, SNPOOL, AUTO_SNATCH, AUTO_SNATCH_SCRIPT, WANTED_TAB_OFF, LOCAL_IP, EXT_IP, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_CHAIN, HTTPS_FORCE_ON, HOST_RETURN, API_ENABLED, API_KEY, DOWNLOAD_APIKEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, NOWEEKLY, AUTO_UPDATE, \
|
||||
IMPORT_STATUS, IMPORT_FILES, IMPORT_TOTALFILES, IMPORT_CID_COUNT, IMPORT_PARSED_COUNT, IMPORT_FAILURE_COUNT, CHECKENABLED, \
|
||||
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, GIT_USER, GIT_BRANCH, USER_AGENT, DESTINATION_DIR, MULTIPLE_DEST_DIRS, CREATE_FOLDERS, DELETE_REMOVE_DIR, \
|
||||
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, DUPECONSTRAINT, DDUMP, DUPLICATE_DUMP, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
|
||||
|
@ -767,6 +768,7 @@ def initialize():
|
|||
|
||||
ENABLE_TORRENT_SEARCH = bool(check_setting_int(CFG, 'Torrents', 'enable_torrent_search', 0))
|
||||
AUTO_SNATCH = bool(check_setting_int(CFG, 'Torrents', 'auto_snatch', 0))
|
||||
AUTO_SNATCH_SCRIPT = check_setting_str(CFG, 'Torrents', 'auto_snatch_script', '')
|
||||
ENABLE_TPSE = bool(check_setting_int(CFG, 'Torrents', 'enable_tpse', 0))
|
||||
TPSE_PROXY = check_setting_str(CFG, 'Torrents', 'tpse_proxy', '')
|
||||
TPSE_VERIFY = bool(check_setting_int(CFG, 'Torrents', 'tpse_verify', 1))
|
||||
|
@ -1559,6 +1561,7 @@ def config_write():
|
|||
new_config['Torrents'] = {}
|
||||
new_config['Torrents']['enable_torrents'] = int(ENABLE_TORRENTS)
|
||||
new_config['Torrents']['auto_snatch'] = int(AUTO_SNATCH)
|
||||
new_config['Torrents']['auto_snatch_script'] = AUTO_SNATCH_SCRIPT
|
||||
new_config['Torrents']['minseeds'] = int(MINSEEDS)
|
||||
new_config['Torrents']['torrent_local'] = int(TORRENT_LOCAL)
|
||||
new_config['Torrents']['local_watchdir'] = LOCAL_WATCHDIR
|
||||
|
|
|
@ -2615,7 +2615,7 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False):
|
|||
|
||||
import shlex, subprocess
|
||||
logger.info('Torrent is completed and status is currently Snatched. Attempting to auto-retrieve.')
|
||||
with open(mylar.SNATCH_SCRIPT, 'r') as f:
|
||||
with open(mylar.AUTO_SNATCH_SCRIPT, 'r') as f:
|
||||
first_line = f.readline()
|
||||
|
||||
if mylar.SNATCH_SCRIPT.endswith('.sh'):
|
||||
|
@ -2625,7 +2625,7 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False):
|
|||
else:
|
||||
shell_cmd = sys.executable
|
||||
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.SNATCH_SCRIPT).decode("string_escape")
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.AUTO_SNATCH_SCRIPT).decode("string_escape")
|
||||
if torrent_files > 1:
|
||||
downlocation = torrent_folder
|
||||
else:
|
||||
|
@ -2772,13 +2772,18 @@ def script_env(mode, vars):
|
|||
os.environ['mylar_release_hash'] = vars['torrentinfo']['hash']
|
||||
os.environ['mylar_release_name'] = vars['torrentinfo']['name']
|
||||
os.environ['mylar_release_folder'] = vars['torrentinfo']['folder']
|
||||
os.environ['mylar_release_label'] = vars['torrentinfo']['label']
|
||||
if 'label' in vars['torrentinfo']:
|
||||
os.environ['mylar_release_label'] = vars['torrentinfo']['label']
|
||||
os.environ['mylar_release_filesize'] = str(vars['torrentinfo']['total_filesize'])
|
||||
os.environ['mylar_release_start'] = str(vars['torrentinfo']['time_started'])
|
||||
try:
|
||||
os.environ['mylar_release_files'] = "|".join(vars['torrentinfo']['files'])
|
||||
except TypeError:
|
||||
os.environ['mylar_release_files'] = "|".join(json.dumps(vars['torrentinfo']['files']))
|
||||
if 'time_started' in vars['torrentinfo']:
|
||||
os.environ['mylar_release_start'] = str(vars['torrentinfo']['time_started'])
|
||||
if 'filepath' in vars['torrentinfo']:
|
||||
os.environ['mylar_torrent_file'] = str(vars['torrentinfo']['filepath'])
|
||||
else:
|
||||
try:
|
||||
os.environ['mylar_release_files'] = "|".join(vars['torrentinfo']['files'])
|
||||
except TypeError:
|
||||
os.environ['mylar_release_files'] = "|".join(json.dumps(vars['torrentinfo']['files']))
|
||||
elif 'nzbinfo' in vars:
|
||||
os.environ['mylar_release_id'] = vars['nzbinfo']['id']
|
||||
os.environ['mylar_release_nzbname'] = vars['nzbinfo']['nzbname']
|
||||
|
@ -2843,6 +2848,18 @@ def script_env(mode, vars):
|
|||
else:
|
||||
return True
|
||||
|
||||
def get_the_hash(filepath):
|
||||
import hashlib, StringIO
|
||||
import bencode
|
||||
# Open torrent file
|
||||
torrent_file = open(filepath, "rb")
|
||||
metainfo = bencode.decode(torrent_file.read())
|
||||
info = metainfo['info']
|
||||
thehash = hashlib.sha1(bencode.encode(info)).hexdigest().upper()
|
||||
logger.info('Hash of file : ' + thehash)
|
||||
return {'hash': thehash}
|
||||
|
||||
|
||||
def file_ops(path,dst,arc=False,one_off=False):
|
||||
# # path = source path + filename
|
||||
# # dst = destination path + filename
|
||||
|
|
|
@ -1087,7 +1087,12 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
|
|||
|
||||
elif mylar.USE_WATCHDIR:
|
||||
if mylar.TORRENT_LOCAL:
|
||||
return "pass"
|
||||
#get the hash so it doesn't mess up...
|
||||
torrent_info = helpers.get_the_hash(filepath)
|
||||
torrent_info['clientmode'] = 'watchdir'
|
||||
torrent_info['link'] = linkit
|
||||
torrent_info['filepath'] = filepath
|
||||
return torrent_info
|
||||
else:
|
||||
tssh = ftpsshup.putfile(filepath, filename)
|
||||
return tssh
|
||||
|
|
|
@ -1452,7 +1452,7 @@ class WebInterface(object):
|
|||
# file check to see if issue exists and update 'have' count
|
||||
if IssueID is not None:
|
||||
logger.info("passing to updater.")
|
||||
return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov)
|
||||
return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'])
|
||||
if manualsearch:
|
||||
# if it's a manual search, return to null here so the thread will die and not cause http redirect errors.
|
||||
return
|
||||
|
|
Loading…
Reference in New Issue