diff --git a/data/interfaces/default/config.html b/data/interfaces/default/config.html index 124e019f..94340788 100755 --- a/data/interfaces/default/config.html +++ b/data/interfaces/default/config.html @@ -402,13 +402,15 @@ - - Watchdir   - uTorrent   - rTorrent   - Transmission - Deluge - +
+ Watchdir   + uTorrent   + rTorrent
+ Transmission   + Deluge   + qBittorrent +
+
@@ -560,6 +562,36 @@ Label to be used on the torrents
+ +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ Label to be used on the torrents +
+
+ +
+ Folder path where torrents will be assigned to +
+
+ + Automatically start torrent on successful loading within qBittorrent client +
+
+ @@ -913,7 +945,7 @@
- +
@@ -921,9 +953,19 @@ enter in the absolute path to the script
-
+
- + +
+
+
+ + + enter in the absolute path to the script +
+
+
+
@@ -1520,29 +1562,34 @@ if ($("#torrent_downloader_watchlist").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#transmission_options,#deluge_options").hide(); + $("#utorrent_options,#rtorrent_options,#transmission_options,#deluge_options,#qbittorrent_options").hide(); $("#watchlist_options").show(); } if ($("#torrent_downloader_utorrent").is(":checked")) { - $("#watchlist_options,#rtorrent_options,#transmission_options,#deluge_options").hide(); + $("#watchlist_options,#rtorrent_options,#transmission_options,#deluge_options,#qbittorrent_options").hide(); $("#utorrent_options").show(); } - if ($("#torrent_downloader_rtorrent").is(":checked")) + if ($("#torrent_downloader_rtorrent").is(":checked")) { - $("#utorrent_options,#watchlist_options,#transmission_options,#deluge_options").hide(); + $("#utorrent_options,#watchlist_options,#transmission_options,#deluge_options,#qbittorrent_options").hide(); $("#rtorrent_options").show(); } if ($("#torrent_downloader_transmission").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#watchlist_options,#deluge_options").hide(); + $("#utorrent_options,#rtorrent_options,#watchlist_options,#deluge_options,#qbittorrent_options").hide(); $("#transmission_options").show(); } - if ($("#torrent_downloader_deluge").is(":checked")) + if ($("#torrent_downloader_deluge").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options").hide(); + $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options,#qbittorrent_options").hide(); $("#deluge_options").show(); } + if ($("#torrent_downloader_qbittorrent").is(":checked")) + { + $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options,#deluge_options").hide(); + $("#qbittorrent_options").show(); + } $('input[type=radio]').change(function(){ if ($("#nzb_downloader_sabnzbd").is(":checked")) @@ -1559,24 +1606,28 @@ } if ($("#torrent_downloader_watchlist").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#transmission_options,#deluge_options").fadeOut("fast", function() { $("#watchlist_options").fadeIn() }); + $("#utorrent_options,#rtorrent_options,#transmission_options,#deluge_options,#qbittorrent_options").fadeOut("fast", function() { $("#watchlist_options").fadeIn() }); } if ($("#torrent_downloader_utorrent").is(":checked")) { - $("#watchlist_options,#rtorrent_options,#transmission_options,#deluge_options").fadeOut("fast", function() { $("#utorrent_options").fadeIn() }); + $("#watchlist_options,#rtorrent_options,#transmission_options,#deluge_options,#qbittorrent_options").fadeOut("fast", function() { $("#utorrent_options").fadeIn() }); } if ($("#torrent_downloader_rtorrent").is(":checked")) { - $("#utorrent_options,#watchlist_options,#transmission_options,#deluge_options").fadeOut("fast", function() { $("#rtorrent_options").fadeIn() }); + $("#utorrent_options,#watchlist_options,#transmission_options,#deluge_options,#qbittorrent_options").fadeOut("fast", function() { $("#rtorrent_options").fadeIn() }); } if ($("#torrent_downloader_transmission").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#watchlist_options,#deluge_options").fadeOut("fast", function() { $("#transmission_options").fadeIn() }); + $("#utorrent_options,#rtorrent_options,#watchlist_options,#deluge_options,#qbittorrent_options").fadeOut("fast", function() { $("#transmission_options").fadeIn() }); } if ($("#torrent_downloader_deluge").is(":checked")) { - $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options").fadeOut("fast", function() { $("#deluge_options").fadeIn() }); + $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options,#qbittorrent_options").fadeOut("fast", function() { $("#deluge_options").fadeIn() }); } + if ($("#torrent_downloader_qbittorrent").is(":checked")) + { + $("#utorrent_options,#rtorrent_options,#watchlist_options,#transmission_options,#deluge_options").fadeOut("fast", function() { $("#qbittorrent_options").fadeIn() }); + } }); var deletedNewznabs = 0; @@ -1749,6 +1800,7 @@ initConfigCheckbox("#post_processing"); initConfigCheckbox("#enable_check_folder"); initConfigCheckbox("#enable_pre_scripts"); + initConfigCheckbox("#enable_snatch_script"); initConfigCheckbox("#enable_extra_scripts"); } $(document).ready(function() { diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index f863b824..a018c736 100755 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -63,12 +63,8 @@ ${item['ComicName']} ${item['Issue_Number']} ${item['Status']} - %if item['Provider'] == '32P' or item['Provider'] == 'KAT' or item['Provider'] == 'CBT': - - %else: - %if item['Status'] != 'Downloaded' and item['Status'] != 'Post-Processed': - (${item['Provider']}) - %endif + %if item['Status'] == 'Snatched': + (${item['Provider']}) %endif [retry] diff --git a/lib/qbittorrent/LICENSE b/lib/qbittorrent/LICENSE new file mode 100644 index 00000000..839352c9 --- /dev/null +++ b/lib/qbittorrent/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Vikas + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/lib/qbittorrent/__init__.py b/lib/qbittorrent/__init__.py new file mode 100644 index 00000000..85cbb99e --- /dev/null +++ b/lib/qbittorrent/__init__.py @@ -0,0 +1,2 @@ +from .client import Client + diff --git a/lib/qbittorrent/client.py b/lib/qbittorrent/client.py new file mode 100644 index 00000000..73d8d753 --- /dev/null +++ b/lib/qbittorrent/client.py @@ -0,0 +1,633 @@ +import requests +import json + + +class LoginRequired(Exception): + def __str__(self): + return 'Please login first.' + + +class Client(object): + """class to interact with qBittorrent WEB API""" + def __init__(self, url): + if not url.endswith('/'): + url += '/' + self.url = url + + session = requests.Session() + check_prefs = session.get(url+'query/preferences') + + if check_prefs.status_code == 200: + self._is_authenticated = True + self.session = session + + elif check_prefs.status_code == 404: + self._is_authenticated = False + raise RuntimeError(""" + This wrapper only supports qBittorrent applications + with version higher than 3.1.x. + Please use the latest qBittorrent release. + """) + + else: + self._is_authenticated = False + + def _get(self, endpoint, **kwargs): + """ + Method to perform GET request on the API. + + :param endpoint: Endpoint of the API. + :param kwargs: Other keyword arguments for requests. + + :return: Response of the GET request. + """ + return self._request(endpoint, 'get', **kwargs) + + def _post(self, endpoint, data, **kwargs): + """ + Method to perform POST request on the API. + + :param endpoint: Endpoint of the API. + :param data: POST DATA for the request. + :param kwargs: Other keyword arguments for requests. + + :return: Response of the POST request. + """ + return self._request(endpoint, 'post', data, **kwargs) + + def _request(self, endpoint, method, data=None, **kwargs): + """ + Method to hanle both GET and POST requests. + + :param endpoint: Endpoint of the API. + :param method: Method of HTTP request. + :param data: POST DATA for the request. + :param kwargs: Other keyword arguments. + + :return: Response for the request. + """ + final_url = self.url + endpoint + + if not self._is_authenticated: + raise LoginRequired + + rq = self.session + if method == 'get': + request = rq.get(final_url, **kwargs) + else: + request = rq.post(final_url, data, **kwargs) + + request.raise_for_status() + request.encoding = 'utf_8' + + if len(request.text) == 0: + data = json.loads('{}') + else: + try: + data = json.loads(request.text) + except ValueError: + data = request.text + + return data + + def login(self, username='admin', password='admin'): + """ + Method to authenticate the qBittorrent Client. + + Declares a class attribute named ``session`` which + stores the authenticated session if the login is correct. + Else, shows the login error. + + :param username: Username. + :param password: Password. + + :return: Response to login request to the API. + """ + self.session = requests.Session() + login = self.session.post(self.url+'login', + data={'username': username, + 'password': password}) + if login.text == 'Ok.': + self._is_authenticated = True + else: + return login.text + + def logout(self): + """ + Logout the current session. + """ + response = self._get('logout') + self._is_authenticated = False + return response + + @property + def qbittorrent_version(self): + """ + Get qBittorrent version. + """ + return self._get('version/qbittorrent') + + @property + def api_version(self): + """ + Get WEB API version. + """ + return self._get('version/api') + + @property + def api_min_version(self): + """ + Get minimum WEB API version. + """ + return self._get('version/api_min') + + def shutdown(self): + """ + Shutdown qBittorrent. + """ + return self._get('command/shutdown') + + def torrents(self, **filters): + """ + Returns a list of torrents matching the supplied filters. + + :param filter: Current status of the torrents. + :param category: Fetch all torrents with the supplied label. + :param sort: Sort torrents by. + :param reverse: Enable reverse sorting. + :param limit: Limit the number of torrents returned. + :param offset: Set offset (if less than 0, offset from end). + + :return: list() of torrent with matching filter. + """ + params = {} + for name, value in filters.items(): + # make sure that old 'status' argument still works + name = 'filter' if name == 'status' else name + params[name] = value + + return self._get('query/torrents', params=params) + + def get_torrent(self, infohash): + """ + Get details of the torrent. + + :param infohash: INFO HASH of the torrent. + """ + return self._get('query/propertiesGeneral/' + infohash.lower()) + + def get_torrent_trackers(self, infohash): + """ + Get trackers for the torrent. + + :param infohash: INFO HASH of the torrent. + """ + return self._get('query/propertiesTrackers/' + infohash.lower()) + + def get_torrent_webseeds(self, infohash): + """ + Get webseeds for the torrent. + + :param infohash: INFO HASH of the torrent. + """ + return self._get('query/propertiesWebSeeds/' + infohash.lower()) + + def get_torrent_files(self, infohash): + """ + Get list of files for the torrent. + + :param infohash: INFO HASH of the torrent. + """ + return self._get('query/propertiesFiles/' + infohash.lower()) + + @property + def global_transfer_info(self): + """ + Get JSON data of the global transfer info of qBittorrent. + """ + return self._get('query/transferInfo') + + @property + def preferences(self): + """ + Get the current qBittorrent preferences. + Can also be used to assign individual preferences. + For setting multiple preferences at once, + see ``set_preferences`` method. + + Note: Even if this is a ``property``, + to fetch the current preferences dict, you are required + to call it like a bound method. + + Wrong:: + + qb.preferences + + Right:: + + qb.preferences() + + """ + prefs = self._get('query/preferences') + + class Proxy(Client): + """ + Proxy class to to allow assignment of individual preferences. + this class overrides some methods to ease things. + + Because of this, settings can be assigned like:: + + In [5]: prefs = qb.preferences() + + In [6]: prefs['autorun_enabled'] + Out[6]: True + + In [7]: prefs['autorun_enabled'] = False + + In [8]: prefs['autorun_enabled'] + Out[8]: False + + """ + + def __init__(self, url, prefs, auth, session): + super(Proxy, self).__init__(url) + self.prefs = prefs + self._is_authenticated = auth + self.session = session + + def __getitem__(self, key): + return self.prefs[key] + + def __setitem__(self, key, value): + kwargs = {key: value} + return self.set_preferences(**kwargs) + + def __call__(self): + return self.prefs + + return Proxy(self.url, prefs, self._is_authenticated, self.session) + + def sync(self, rid=0): + """ + Sync the torrents by supplied LAST RESPONSE ID. + Read more @ http://git.io/vEgXr + + :param rid: Response ID of last request. + """ + return self._get('sync/maindata', params={'rid': rid}) + + def download_from_link(self, link, **kwargs): + """ + Download torrent using a link. + + :param link: URL Link or list of. + :param savepath: Path to download the torrent. + :param category: Label or Category of the torrent(s). + + :return: Empty JSON data. + """ + # old:new format + old_arg_map = {'save_path': 'savepath'} # , 'label': 'category'} + + # convert old option names to new option names + options = kwargs.copy() + for old_arg, new_arg in old_arg_map.items(): + if options.get(old_arg) and not options.get(new_arg): + options[new_arg] = options[old_arg] + + options['urls'] = link + + # workaround to send multipart/formdata request + # http://stackoverflow.com/a/23131823/4726598 + dummy_file = {'_dummy': (None, '_dummy')} + + return self._post('command/download', data=options, files=dummy_file) + + def download_from_file(self, file_buffer, **kwargs): + """ + Download torrent using a file. + + :param file_buffer: Single file() buffer or list of. + :param save_path: Path to download the torrent. + :param label: Label of the torrent(s). + + :return: Empty JSON data. + """ + if isinstance(file_buffer, list): + torrent_files = {} + for i, f in enumerate(file_buffer): + torrent_files.update({'torrents%s' % i: f}) + else: + torrent_files = {'torrents': file_buffer} + + data = kwargs.copy() + + if data.get('save_path'): + data.update({'savepath': data['save_path']}) + return self._post('command/upload', data=data, files=torrent_files) + + def add_trackers(self, infohash, trackers): + """ + Add trackers to a torrent. + + :param infohash: INFO HASH of torrent. + :param trackers: Trackers. + """ + data = {'hash': infohash.lower(), + 'urls': trackers} + return self._post('command/addTrackers', data=data) + + @staticmethod + def _process_infohash_list(infohash_list): + """ + Method to convert the infohash_list to qBittorrent API friendly values. + + :param infohash_list: List of infohash. + """ + if isinstance(infohash_list, list): + data = {'hashes': '|'.join([h.lower() for h in infohash_list])} + else: + data = {'hashes': infohash_list.lower()} + return data + + def pause(self, infohash): + """ + Pause a torrent. + + :param infohash: INFO HASH of torrent. + """ + return self._post('command/pause', data={'hash': infohash.lower()}) + + def pause_all(self): + """ + Pause all torrents. + """ + return self._get('command/pauseAll') + + def pause_multiple(self, infohash_list): + """ + Pause multiple torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/pauseAll', data=data) + + def set_label(self, infohash_list, label): + """ + Set the label on multiple torrents. + IMPORTANT: OLD API method, kept as it is to avoid breaking stuffs. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + data['label'] = label + return self._post('command/setLabel', data=data) + + def set_category(self, infohash_list, category): + """ + Set the category on multiple torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + data['category'] = category + return self._post('command/setCategory', data=data) + + def resume(self, infohash): + """ + Resume a paused torrent. + + :param infohash: INFO HASH of torrent. + """ + return self._post('command/resume', data={'hash': infohash.lower()}) + + def resume_all(self): + """ + Resume all torrents. + """ + return self._get('command/resumeAll') + + def resume_multiple(self, infohash_list): + """ + Resume multiple paused torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/resumeAll', data=data) + + def delete(self, infohash_list): + """ + Delete torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/delete', data=data) + + def delete_permanently(self, infohash_list): + """ + Permanently delete torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/deletePerm', data=data) + + def recheck(self, infohash_list): + """ + Recheck torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/recheck', data=data) + + def increase_priority(self, infohash_list): + """ + Increase priority of torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/increasePrio', data=data) + + def decrease_priority(self, infohash_list): + """ + Decrease priority of torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/decreasePrio', data=data) + + def set_max_priority(self, infohash_list): + """ + Set torrents to maximum priority level. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/topPrio', data=data) + + def set_min_priority(self, infohash_list): + """ + Set torrents to minimum priority level. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/bottomPrio', data=data) + + def set_file_priority(self, infohash, file_id, priority): + """ + Set file of a torrent to a supplied priority level. + + :param infohash: INFO HASH of torrent. + :param file_id: ID of the file to set priority. + :param priority: Priority level of the file. + """ + if priority not in [0, 1, 2, 7]: + raise ValueError("Invalid priority, refer WEB-UI docs for info.") + elif not isinstance(file_id, int): + raise TypeError("File ID must be an int") + + data = {'hash': infohash.lower(), + 'id': file_id, + 'priority': priority} + + return self._post('command/setFilePrio', data=data) + + # Get-set global download and upload speed limits. + + def get_global_download_limit(self): + """ + Get global download speed limit. + """ + return self._get('command/getGlobalDlLimit') + + def set_global_download_limit(self, limit): + """ + Set global download speed limit. + + :param limit: Speed limit in bytes. + """ + return self._post('command/setGlobalDlLimit', data={'limit': limit}) + + global_download_limit = property(get_global_download_limit, + set_global_download_limit) + + def get_global_upload_limit(self): + """ + Get global upload speed limit. + """ + return self._get('command/getGlobalUpLimit') + + def set_global_upload_limit(self, limit): + """ + Set global upload speed limit. + + :param limit: Speed limit in bytes. + """ + return self._post('command/setGlobalUpLimit', data={'limit': limit}) + + global_upload_limit = property(get_global_upload_limit, + set_global_upload_limit) + + # Get-set download and upload speed limits of the torrents. + def get_torrent_download_limit(self, infohash_list): + """ + Get download speed limit of the supplied torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/getTorrentsDlLimit', data=data) + + def set_torrent_download_limit(self, infohash_list, limit): + """ + Set download speed limit of the supplied torrents. + + :param infohash_list: Single or list() of infohashes. + :param limit: Speed limit in bytes. + """ + data = self._process_infohash_list(infohash_list) + data.update({'limit': limit}) + return self._post('command/setTorrentsDlLimit', data=data) + + def get_torrent_upload_limit(self, infohash_list): + """ + Get upoload speed limit of the supplied torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/getTorrentsUpLimit', data=data) + + def set_torrent_upload_limit(self, infohash_list, limit): + """ + Set upload speed limit of the supplied torrents. + + :param infohash_list: Single or list() of infohashes. + :param limit: Speed limit in bytes. + """ + data = self._process_infohash_list(infohash_list) + data.update({'limit': limit}) + return self._post('command/setTorrentsUpLimit', data=data) + + # setting preferences + def set_preferences(self, **kwargs): + """ + Set preferences of qBittorrent. + Read all possible preferences @ http://git.io/vEgDQ + + :param kwargs: set preferences in kwargs form. + """ + json_data = "json={}".format(json.dumps(kwargs)) + headers = {'content-type': 'application/x-www-form-urlencoded'} + return self._post('command/setPreferences', data=json_data, + headers=headers) + + def get_alternative_speed_status(self): + """ + Get Alternative speed limits. (1/0) + """ + return self._get('command/alternativeSpeedLimitsEnabled') + + alternative_speed_status = property(get_alternative_speed_status) + + def toggle_alternative_speed(self): + """ + Toggle alternative speed limits. + """ + return self._get('command/toggleAlternativeSpeedLimits') + + def toggle_sequential_download(self, infohash_list): + """ + Toggle sequential download in supplied torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/toggleSequentialDownload', data=data) + + def toggle_first_last_piece_priority(self, infohash_list): + """ + Toggle first/last piece priority of supplied torrents. + + :param infohash_list: Single or list() of infohashes. + """ + data = self._process_infohash_list(infohash_list) + return self._post('command/toggleFirstLastPiecePrio', data=data) + + def force_start(self, infohash_list, value=True): + """ + Force start selected torrents. + + :param infohash_list: Single or list() of infohashes. + :param value: Force start value (bool) + """ + data = self._process_infohash_list(infohash_list) + data.update({'value': json.dumps(value)}) + return self._post('command/setForceStart', data=data) diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index 69f2127a..950d2f6d 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -932,7 +932,7 @@ class PostProcessor(object): comicid = tmpiss['ComicID'] comicname = tmpiss['ComicName'] issuenumber = tmpiss['Issue_Number'] - + #use issueid to get publisher, series, year, issue number annchk = "no" @@ -1031,7 +1031,7 @@ class PostProcessor(object): else: arcpub = arcdata['Publisher'] - grdst = helpers.arcformat(arc['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) + grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) if comicid is None: comicid = arcdata['ComicID'] diff --git a/mylar/__init__.py b/mylar/__init__.py index 077c5354..bfca9d79 100755 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -83,7 +83,7 @@ WeeklyScheduler = None VersionScheduler = None FolderMonitorScheduler = None -QUEUE = Queue.Queue() +SNATCHED_QUEUE = Queue.Queue() DATA_DIR = None DBLOCK = False @@ -298,10 +298,13 @@ QUAL_SCANNER = None QUAL_TYPE = None QUAL_QUALITY = None -ENABLE_EXTRA_SCRIPTS = 1 +ENABLE_EXTRA_SCRIPTS = False EXTRA_SCRIPTS = None -ENABLE_PRE_SCRIPTS = 1 +ENABLE_SNATCH_SCRIPT = False +SNATCH_SCRIPT = None + +ENABLE_PRE_SCRIPTS = False PRE_SCRIPTS = None COUNT_COMICS = 0 @@ -328,7 +331,7 @@ ARC_FOLDERFORMAT = None ARC_FILEOPS = 'copy' CVURL = None -CV_VERIFY = 0 +CV_VERIFY = True CURRENT_WEEKNUMBER = None CURRENT_YEAR = None PULL_REFRESH = None @@ -369,9 +372,10 @@ FAILED_DOWNLOAD_HANDLING = 0 FAILED_AUTO = 0 ENABLE_TORRENTS = 0 -TORRENT_DOWNLOADER = None #0 = watchfolder, #1 = uTorrent, #2 = rTorrent, #3 = transmission +TORRENT_DOWNLOADER = None #0 = watchfolder, #1 = uTorrent, #2 = rTorrent, #3 = transmission, #4 = deluge, #5 = qbittorrent MINSEEDS = 0 +AUTO_SNATCH = False ALLOW_PACKS = False USE_WATCHDIR = False @@ -433,6 +437,13 @@ DELUGE_USERNAME = None DELUGE_PASSWORD = None DELUGE_LABEL = None +USE_QBITTORRENT = False +QBITTORRENT_HOST = None +QBITTORRENT_USERNAME = None +QBITTORRENT_PASSWORD = None +QBITTORRENT_LABEL = None +QBITTORRENT_FOLDER = None +QBITTORRENT_STARTONLOAD = 0 def CheckSection(sec): """ Check if INI section exists, if not create it """ @@ -484,7 +495,7 @@ def initialize(): with INIT_LOCK: global __INITIALIZED__, DBCHOICE, DBUSER, DBPASS, DBNAME, DYNAMIC_UPDATE, COMICVINE_API, DEFAULT_CVAPI, CVAPI_RATE, CV_HEADERS, BLACKLISTED_PUBLISHERS, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, UPCOMING_SNATCHED, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, OLDCONFIG_VERSION, OS_DETECT, \ - queue, WANTED_TAB_OFF, LOCAL_IP, EXT_IP, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_CHAIN, HTTPS_FORCE_ON, HOST_RETURN, API_ENABLED, API_KEY, DOWNLOAD_APIKEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, NOWEEKLY, AUTO_UPDATE, \ + SNATCHED_QUEUE, SNPOOL, AUTO_SNATCH, WANTED_TAB_OFF, LOCAL_IP, EXT_IP, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_CHAIN, HTTPS_FORCE_ON, HOST_RETURN, API_ENABLED, API_KEY, DOWNLOAD_APIKEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, NOWEEKLY, AUTO_UPDATE, \ IMPORT_STATUS, IMPORT_FILES, IMPORT_TOTALFILES, IMPORT_CID_COUNT, IMPORT_PARSED_COUNT, IMPORT_FAILURE_COUNT, CHECKENABLED, \ CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, GIT_USER, GIT_BRANCH, USER_AGENT, DESTINATION_DIR, MULTIPLE_DEST_DIRS, CREATE_FOLDERS, DELETE_REMOVE_DIR, \ DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, DUPECONSTRAINT, DDUMP, DUPLICATE_DUMP, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \ @@ -494,6 +505,7 @@ def initialize(): ENABLE_TORZNAB, TORZNAB_NAME, TORZNAB_HOST, TORZNAB_APIKEY, TORZNAB_CATEGORY, TORZNAB_VERIFY, EXPERIMENTAL, ALTEXPERIMENTAL, \ USE_RTORRENT, RTORRENT_HOST, RTORRENT_AUTHENTICATION, RTORRENT_RPC_URL, RTORRENT_SSL, RTORRENT_VERIFY, RTORRENT_CA_BUNDLE, RTORRENT_USERNAME, RTORRENT_PASSWORD, RTORRENT_STARTONLOAD, RTORRENT_LABEL, RTORRENT_DIRECTORY, \ USE_UTORRENT, UTORRENT_HOST, UTORRENT_USERNAME, UTORRENT_PASSWORD, UTORRENT_LABEL, USE_TRANSMISSION, TRANSMISSION_HOST, TRANSMISSION_USERNAME, TRANSMISSION_PASSWORD, TRANSMISSION_DIRECTORY, USE_DELUGE, DELUGE_HOST, DELUGE_USERNAME, DELUGE_PASSWORD, DELUGE_LABEL, \ + USE_QBITTORRENT, QBITTORRENT_HOST, QBITTORRENT_USERNAME, QBITTORRENT_PASSWORD, QBITTORRENT_LABEL, QBITTORRENT_FOLDER, QBITTORRENT_STARTONLOAD, \ ENABLE_META, CMTAGGER_PATH, CBR2CBZ_ONLY, CT_TAG_CR, CT_TAG_CBL, CT_CBZ_OVERWRITE, UNRAR_CMD, CT_SETTINGSPATH, CMTAG_VOLUME, CMTAG_START_YEAR_AS_VOLUME, UPDATE_ENDED, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, SNATCHED_HAVETOTAL, PROVIDER_ORDER, TMP_PROV, \ dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \ ALLOW_PACKS, ENABLE_TORRENTS, TORRENT_DOWNLOADER, MINSEEDS, USE_WATCHDIR, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \ @@ -504,7 +516,7 @@ def initialize(): FOLDER_FORMAT, SETDEFAULTVOLUME, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, POST_PROCESSING_SCRIPT, \ FILE_OPTS, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, SEND2READ, MAINTAINSERIESFOLDER, TAB_ENABLE, TAB_HOST, TAB_USER, TAB_PASS, TAB_DIRECTORY, \ STORYARCDIR, COPY2ARCDIR, ARC_FOLDERFORMAT, ARC_FILEOPS, CVURL, CV_VERIFY, CHECK_FOLDER, ENABLE_CHECK_FOLDER, \ - COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, ALT_PULL, PULLBYFILE, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, \ + COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_SNATCH_SCRIPT, SNATCH_SCRIPT, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, ALT_PULL, PULLBYFILE, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, \ SYNO_FIX, ENFORCE_PERMS, CHMOD_FILE, CHMOD_DIR, CHOWNER, CHGROUP, ANNUALS_ON, CV_ONLY, CV_ONETIMER, CURRENT_WEEKNUMBER, CURRENT_YEAR, PULL_REFRESH, WEEKFOLDER, WEEKFOLDER_LOC, WEEKFOLDER_FORMAT, UMASK, \ TELEGRAM_ENABLED, TELEGRAM_TOKEN, TELEGRAM_USERID @@ -550,7 +562,7 @@ def initialize(): if not COMICVINE_API: COMICVINE_API = None CVAPI_RATE = check_setting_int(CFG, 'General', 'cvapi_rate', 2) - CV_VERIFY = bool(check_setting_int(CFG, 'General', 'cv_verify', 0)) + CV_VERIFY = bool(check_setting_int(CFG, 'General', 'cv_verify', 1)) HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0') HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '') HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '') @@ -707,7 +719,8 @@ def initialize(): LOG_LEVEL = check_setting_str(CFG, 'General', 'log_level', '') ENABLE_EXTRA_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_extra_scripts', 0)) EXTRA_SCRIPTS = check_setting_str(CFG, 'General', 'extra_scripts', '') - + ENABLE_SNATCH_SCRIPT = bool(check_setting_int(CFG, 'General', 'enable_snatch_script', 0)) + SNATCH_SCRIPT = check_setting_str(CFG, 'General', 'snatch_script', '') ENABLE_PRE_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_pre_scripts', 0)) PRE_SCRIPTS = check_setting_str(CFG, 'General', 'pre_scripts', '') POST_PROCESSING = bool(check_setting_int(CFG, 'General', 'post_processing', 1)) @@ -753,6 +766,7 @@ def initialize(): SEEDBOX_WATCHDIR = check_setting_str(CFG, 'Torrents', 'seedbox_watchdir', '') ENABLE_TORRENT_SEARCH = bool(check_setting_int(CFG, 'Torrents', 'enable_torrent_search', 0)) + AUTO_SNATCH = bool(check_setting_int(CFG, 'Torrents', 'auto_snatch', 0)) ENABLE_TPSE = bool(check_setting_int(CFG, 'Torrents', 'enable_tpse', 0)) TPSE_PROXY = check_setting_str(CFG, 'Torrents', 'tpse_proxy', '') TPSE_VERIFY = bool(check_setting_int(CFG, 'Torrents', 'tpse_verify', 1)) @@ -854,6 +868,9 @@ def initialize(): elif TORRENT_DOWNLOADER == 4: TORRENT_LOCAL = False USE_DELUGE = True + elif TORRENT_DOWNLOADER == 5: + TORRENT_LOCAL = False + USE_QBITTORRENT = True else: TORRENT_DOWNLOADER = 0 USE_WATCHDIR = True @@ -873,6 +890,13 @@ def initialize(): DELUGE_USERNAME = check_setting_str(CFG, 'Deluge', 'deluge_username', '') DELUGE_PASSWORD = check_setting_str(CFG, 'Deluge', 'deluge_password', '') DELUGE_LABEL = check_setting_str(CFG, 'Deluge', 'deluge_label', '') + + QBITTORRENT_HOST = check_setting_str(CFG, 'qBittorrent', 'qbittorrent_host', '') + QBITTORRENT_USERNAME = check_setting_str(CFG, 'qBittorrent', 'qbittorrent_username', '') + QBITTORRENT_PASSWORD = check_setting_str(CFG, 'qBittorrent', 'qbittorrent_password', '') + QBITTORRENT_LABEL = check_setting_str(CFG, 'qBittorrent', 'qbitttorent_label', '') + QBITTORRENT_FOLDER = check_setting_str(CFG, 'qBittorrent', 'qbitttorent_folder', '') + QBITTORRENT_STARTONLOAD = bool(check_setting_int(CFG, 'qBittorrent', 'qbitttorent_startonload', 0)) #add torrents to provider counter. if ENABLE_TORRENT_SEARCH: @@ -1237,6 +1261,8 @@ def initialize(): if LOCMOVE: helpers.updateComicLocation() + SNPOOL = None + #logger.fdebug('platform detected as : ' + OS_DETECT) #Ordering comics here @@ -1288,7 +1314,6 @@ def initialize(): runImmediately=True, delay=60) - # Store the original umask UMASK = os.umask(0) os.umask(UMASK) @@ -1485,6 +1510,8 @@ def config_write(): new_config['General']['log_level'] = LOG_LEVEL new_config['General']['enable_extra_scripts'] = int(ENABLE_EXTRA_SCRIPTS) new_config['General']['extra_scripts'] = EXTRA_SCRIPTS + new_config['General']['enable_snatch_script'] = int(ENABLE_SNATCH_SCRIPT) + new_config['General']['snatch_script'] = SNATCH_SCRIPT new_config['General']['enable_pre_scripts'] = int(ENABLE_PRE_SCRIPTS) new_config['General']['pre_scripts'] = PRE_SCRIPTS new_config['General']['post_processing'] = int(POST_PROCESSING) @@ -1531,6 +1558,7 @@ def config_write(): new_config['Torrents'] = {} new_config['Torrents']['enable_torrents'] = int(ENABLE_TORRENTS) + new_config['Torrents']['auto_snatch'] = int(AUTO_SNATCH) new_config['Torrents']['minseeds'] = int(MINSEEDS) new_config['Torrents']['torrent_local'] = int(TORRENT_LOCAL) new_config['Torrents']['local_watchdir'] = LOCAL_WATCHDIR @@ -1591,12 +1619,12 @@ def config_write(): new_config['NZBsu']['nzbsu'] = int(NZBSU) new_config['NZBsu']['nzbsu_uid'] = NZBSU_UID new_config['NZBsu']['nzbsu_apikey'] = NZBSU_APIKEY - new_config['NZBsu']['nzbsu_verify'] = NZBSU_VERIFY + new_config['NZBsu']['nzbsu_verify'] = int(NZBSU_VERIFY) new_config['DOGnzb'] = {} new_config['DOGnzb']['dognzb'] = int(DOGNZB) new_config['DOGnzb']['dognzb_apikey'] = DOGNZB_APIKEY - new_config['DOGnzb']['dognzb_verify'] = DOGNZB_VERIFY + new_config['DOGnzb']['dognzb_verify'] = int(DOGNZB_VERIFY) new_config['Experimental'] = {} new_config['Experimental']['experimental'] = int(EXPERIMENTAL) @@ -1608,7 +1636,7 @@ def config_write(): new_config['Torznab']['torznab_host'] = TORZNAB_HOST new_config['Torznab']['torznab_apikey'] = TORZNAB_APIKEY new_config['Torznab']['torznab_category'] = TORZNAB_CATEGORY - new_config['Torznab']['torznab_verify'] = TORZNAB_VERIFY + new_config['Torznab']['torznab_verify'] = int(TORZNAB_VERIFY) new_config['Newznab'] = {} new_config['Newznab']['newznab'] = int(NEWZNAB) @@ -1630,7 +1658,15 @@ def config_write(): new_config['Deluge']['deluge_username'] = DELUGE_USERNAME new_config['Deluge']['deluge_password'] = DELUGE_PASSWORD new_config['Deluge']['deluge_label'] = DELUGE_LABEL - + + new_config['qBittorrent'] = {} + new_config['qBittorrent']['qbittorrent_host'] = QBITTORRENT_HOST + new_config['qBittorrent']['qbittorrent_username'] = QBITTORRENT_USERNAME + new_config['qBittorrent']['qbittorrent_password'] = QBITTORRENT_PASSWORD + new_config['qBittorrent']['qbittorrent_label'] = QBITTORRENT_LABEL + new_config['qBittorrent']['qbittorrent_folder'] = QBITTORRENT_FOLDER + new_config['qBittorrent']['qbittorrent_startonload'] = int(QBITTORRENT_STARTONLOAD) + # Need to unpack the extra newznabs for saving in config.ini flattened_newznabs = [] for newznab in EXTRA_NEWZNABS: @@ -1694,6 +1730,12 @@ def start(): #SCHED.add_interval_job(updater.dbUpdate, hours=48) #SCHED.add_interval_job(search.searchforissue, minutes=SEARCH_INTERVAL) + if all([ENABLE_TORRENTS, AUTO_SNATCH, OS_DETECT != 'Windows']) and any([TORRENT_DOWNLOADER == 2, TORRENT_DOWNLOADER == 4]): + logger.info('Auto-Snatch of comleted torrents enabled & attempting to backgroun load....') + SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER") + SNPOOL.start() + logger.info('Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....') + #start the db updater scheduler logger.info('Initializing the DB Updater.') dbUpdateScheduler.thread.start() @@ -2396,6 +2438,17 @@ def halt(): except: pass + if SNPOOL is not None: + logger.info('Terminating the auto-snatch thread.') + try: + SNPOOL.join(10) + logger.info('Joined pool for termination - successful') + except KeyboardInterrupt: + SNATCHED_QUEUE.put('exit') + SNPOOL.join(5) + except AssertionError: + os._exit(0) + __INITIALIZED__ = False def shutdown(restart=False, update=False): diff --git a/mylar/helpers.py b/mylar/helpers.py index 1b690fff..02583f08 100755 --- a/mylar/helpers.py +++ b/mylar/helpers.py @@ -1,4 +1,5 @@ # This file is part of Mylar. +# -*- coding: utf-8 -*- # # Mylar is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,7 +18,11 @@ import time from operator import itemgetter import datetime from datetime import timedelta, date +import subprocess +import shlex +import json import re +import sys import platform import itertools import shutil @@ -247,7 +252,12 @@ def decimal_issue(iss): def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=None, annualize=None, arc=False): import db, logger myDB = db.DBConnection() - logger.fdebug('comicid: ' + str(comicid)) + comicid = str(comicid) # it's coming in unicoded... + + logger.fdebug(type(comicid)) + logger.fdebug(type(issueid)) + logger.fdebug(type(issue)) + logger.fdebug('comicid: ' + comicid) logger.fdebug('issue#: ' + issue) # the issue here is a non-decimalized version, we need to see if it's got a decimal and if not, add '.00' # iss_find = issue.find('.') @@ -275,7 +285,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N #this has to be adjusted to be able to include story arc issues that span multiple arcs chkissue = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone() else: - if annualize is None: + if all([annualize is None, not mylar.ANNUALS_ON]): chkissue = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone() else: chkissue = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone() @@ -286,13 +296,12 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N chkissue = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone() else: chkissue = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone() - if annualize: + if all([annualize == 'yes', mylar.ANNUALS_ON]): chkissue = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone() if chkissue is None: - if chkissue is None: - logger.error('Invalid Issue_Number - please validate.') - return + logger.error('Invalid Issue_Number - please validate.') + return else: logger.info('Int Issue_number compare found. continuing...') issueid = chkissue['IssueID'] @@ -371,7 +380,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N for issexcept in issue_exceptions: if issexcept.lower() in issuenum.lower(): logger.fdebug('ALPHANUMERIC EXCEPTION : [' + issexcept + ']') - v_chk = [v in issuenum for v in valid_spaces] + v_chk = [v for v in valid_spaces if v in issuenum] if v_chk: iss_space = v_chk[0] logger.fdebug('character space denoted as : ' + iss_space) @@ -960,9 +969,9 @@ def issuedigits(issnum): x = [vals[key] for key in vals if key in issnum] if x: - logger.fdebug('Unicode Issue present - adjusting.') + #logger.fdebug('Unicode Issue present - adjusting.') int_issnum = x[0] * 1000 - logger.fdebug('int_issnum: ' + str(int_issnum)) + #logger.fdebug('int_issnum: ' + str(int_issnum)) else: if any(['.' in issnum, ',' in issnum]): #logger.fdebug('decimal detected.') @@ -2544,6 +2553,110 @@ def arcformat(arc, spanyears, publisher): return dstloc +def torrentinfo(issueid=None, torrent_hash=None, download=False): + import db + from base64 import b16encode, b32decode + + #check the status of the issueid to make sure it's in Snatched status and was grabbed via torrent. + if issueid: + myDB = db.DBConnection() + cinfo = myDB.selectone('SELECT a.Issue_Number, a.ComicName, a.Status, b.Hash from issues as a inner join snatched as b ON a.IssueID=b.IssueID WHERE a.IssueID=?', [issueid]).fetchone() + if cinfo is None: + logger.warn('Unable to locate IssueID of : ' + issueid) + snatch_status = 'ERROR' + + if cinfo['Status'] != 'Snatched' or cinfo['Hash'] is None: + logger.warn(cinfo['ComicName'] + ' #' + cinfo['Issue_Number'] + ' is currently in a ' + cinfo['Status'] + ' Status.') + snatch_status = 'ERROR' + + torrent_hash = cinfo['Hash'] + + logger.fdebug("Working on torrent: " + torrent_hash) + if len(torrent_hash) == 32: + torrent_hash = b16encode(b32decode(torrent_hash)) + + if not len(torrent_hash) == 40: + logger.error("Torrent hash is missing, or an invalid hash value has been passed") + snatch_status = 'ERROR' + else: + if mylar.USE_RTORRENT: + import test + rp = test.RTorrent() + torrent_info = rp.main(torrent_hash, check=True) + elif mylar.USE_DELUGE: + #need to set the connect here as well.... + import torrent.clients.deluge as delu + dp = delu.TorrentClient() + if not dp.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD): + logger.warn('Not connected to Deluge!') + + torrent_info = dp.get_torrent(torrent_hash) + else: + snatch_status = 'ERROR' + return + + if torrent_info is False or len(torrent_info) == 0: + logger.warn('torrent returned no information. Check logs - aborting auto-snatch at this time.') + snatch_status = 'ERROR' + else: + if mylar.USE_DELUGE: + torrent_status = torrent_info['is_finished'] + torrent_files = torrent_info['num_files'] + torrent_folder = torrent_info['save_path'] + elif mylar.USE_RTORRENT: + torrent_status = torrent_info['completed'] + torrent_files = len(torrent_info['files']) + torrent_folder = torrent_info['folder'] + + if all([torrent_status is True, download is True]): + if not issueid: + torrent_info['snatch_status'] = 'STARTING...' + #yield torrent_info + + import shlex, subprocess + logger.info('Torrent is completed and status is currently Snatched. Attempting to auto-retrieve.') + with open(mylar.SNATCH_SCRIPT, 'r') as f: + first_line = f.readline() + + if mylar.SNATCH_SCRIPT.endswith('.sh'): + shell_cmd = re.sub('#!', '', first_line) + if shell_cmd == '' or shell_cmd is None: + shell_cmd = '/bin/bash' + else: + shell_cmd = sys.executable + + curScriptName = shell_cmd + ' ' + str(mylar.SNATCH_SCRIPT).decode("string_escape") + if torrent_files > 1: + downlocation = torrent_folder + else: + downlocation = os.path.join(torrent_folder, torrent_info['name']) + + downlocation = re.sub("'", "\\'", downlocation) + + script_cmd = shlex.split(curScriptName, posix=False) + [downlocation] + logger.fdebug(u"Executing command " +str(script_cmd)) + try: + p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=mylar.PROG_DIR) + out, err = p.communicate() + logger.fdebug(u"Script result: " + out) + except OSError, e: + logger.warn(u"Unable to run extra_script: " + e) + snatch_status = 'ERROR' + else: + if 'Access failed: No such file' in out: + logger.fdebug('Not located in location it is supposed to be in - probably has been moved by some script and I got the wrong location due to timing. Trying again...') + snatch_status = 'IN PROGRESS' + else: + snatch_status = 'COMPLETED' + else: + if download is True: + snatch_status = 'IN PROGRESS' + else: + snatch_status = 'NOT SNATCHED' + + torrent_info['snatch_status'] = snatch_status + return torrent_info + def weekly_info(week=None, year=None): #find the current week and save it as a reference point. todaydate = datetime.datetime.today() @@ -2636,6 +2749,99 @@ def latestdate_update(): ctrlVal = {'ComicID': a['ComicID']} logger.info('updating latest date for : ' + a['ComicID'] + ' to ' + a['LatestDate'] + ' #' + a['LatestIssue']) myDB.upsert("comics", newVal, ctrlVal) + +def worker_main(queue): + while True: + item = queue.get(True) + logger.info('Now loading from queue: ' + item) + if item == 'exit': + logger.info('Cleaning up workers for shutdown') + break + snstat = torrentinfo(torrent_hash=item, download=True) + if snstat['snatch_status'] == 'IN PROGRESS': + logger.info('Still downloading in client....let us try again momentarily.') + time.sleep(15) + mylar.SNATCHED_QUEUE.put(item) + +def script_env(mode, vars): + #mode = on-snatch, pre-postprocess, post-postprocess + #var = dictionary containing variables to pass + if mode == 'on-snatch': + runscript = mylar.SNATCH_SCRIPT + if 'torrentinfo' in vars: + os.environ['mylar_release_hash'] = vars['torrentinfo']['hash'] + os.environ['mylar_release_name'] = vars['torrentinfo']['name'] + os.environ['mylar_release_folder'] = vars['torrentinfo']['folder'] + os.environ['mylar_release_label'] = vars['torrentinfo']['label'] + os.environ['mylar_release_filesize'] = str(vars['torrentinfo']['total_filesize']) + os.environ['mylar_release_start'] = str(vars['torrentinfo']['time_started']) + try: + os.environ['mylar_release_files'] = "|".join(vars['torrentinfo']['files']) + except TypeError: + os.environ['mylar_release_files'] = "|".join(json.dumps(vars['torrentinfo']['files'])) + elif 'nzbinfo' in vars: + os.environ['mylar_release_id'] = vars['nzbinfo']['id'] + os.environ['mylar_release_nzbname'] = vars['nzbinfo']['nzbname'] + os.environ['mylar_release_link'] = vars['nzbinfo']['link'] + os.environ['mylar_release_nzbpath'] = vars['nzbinfo']['nzbpath'] + if 'blackhole' in vars['nzbinfo']: + os.environ['mylar_release_blackhole'] = vars['nzbinfo']['blackhole'] + os.environ['mylar_release_provider'] = vars['provider'] + if 'comicinfo' in vars: + os.environ['mylar_comicid'] = vars['comicinfo']['comicid'] + os.environ['mylar_issueid'] = vars['comicinfo']['issueid'] + os.environ['mylar_comicname'] = vars['comicinfo']['comicname'] + os.environ['mylar_issuenumber'] = vars['comicinfo']['issuenumber'] + try: + os.environ['mylar_comicvolume'] = str(vars['comicinfo']['volume']) + except: + pass + try: + os.environ['mylar_seriesyear'] = vars['comicinfo']['seriesyear'] + except: + pass + try: + os.environ['mylar_issuedate'] = vars['comicinfo']['issuedate'] + except: + pass + + os.environ['mylar_release_pack'] = str(vars['pack']) + if vars['pack'] is True: + os.environ['mylar_release_pack_numbers'] = vars['pack_numbers'] + os.environ['mylar_release_pack_issuelist'] = vars['pack_issuelist'] + os.environ['mylar_method'] = vars['method'] + os.environ['mylar_client'] = vars['clientmode'] + + elif mode == 'post-process': + #to-do + runscript = mylar.EXTRA_SCRIPTS + elif mode == 'pre-process': + #to-do + runscript = mylar.PRE_SCRIPTS + + logger.fdebug('Initiating ' + mode + ' script detection.') + with open(runscript, 'r') as f: + first_line = f.readline() + + if runscript.endswith('.sh'): + shell_cmd = re.sub('#!', '', first_line) + if shell_cmd == '' or shell_cmd is None: + shell_cmd = '/bin/bash' + else: + shell_cmd = sys.executable + + curScriptName = shell_cmd + ' ' + runscript.decode("string_escape") + logger.fdebug("snatch script detected...enabling: " + str(curScriptName)) + + script_cmd = shlex.split(curScriptName) + logger.fdebug(u"Executing command " +str(script_cmd)) + try: + subprocess.call(script_cmd, env=dict(os.environ)) + except OSError, e: + logger.warn(u"Unable to run extra_script: " + str(script_cmd)) + return False + else: + return True def file_ops(path,dst,arc=False,one_off=False): # # path = source path + filename diff --git a/mylar/importer.py b/mylar/importer.py index 675c271d..0f9347b6 100644 --- a/mylar/importer.py +++ b/mylar/importer.py @@ -193,8 +193,6 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No comicVol = oldcomversion if all([mylar.SETDEFAULTVOLUME is True, comicVol is None]): comicVol = 'v1' - else: - comicVol = None @@ -206,6 +204,9 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No if comlocation is None: comicdir = comicname_filesafe series = comicdir + if series[-1:] == '.': + series[:-1] + publisher = re.sub('!', '', comic['ComicPublisher']) # thanks Boom! publisher = helpers.filesafe(publisher) year = SeriesYear @@ -395,6 +396,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No "ComicPublisher": comic['ComicPublisher'], # "Description": Cdesc, #.dencode('utf-8', 'replace'), "DetailURL": comic['ComicURL'], +# "AlternateSearch": comic['Aliases'], # "ComicPublished": gcdinfo['resultPublished'], "ComicPublished": "Unknown", "Type": comic['Type'], diff --git a/mylar/locg.py b/mylar/locg.py index eaeac058..c7e41947 100755 --- a/mylar/locg.py +++ b/mylar/locg.py @@ -84,11 +84,13 @@ def locg(pulldate=None,weeknumber=None,year=None): shipdate = x['shipdate'] myDB = db.DBConnection() - #myDB.action("drop table if exists weekly") + myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text, IssueID text, CV_Last_Update text, DynamicName text, weeknumber text, year text, rowid INTEGER PRIMARY KEY)") #clear out the upcoming table here so they show the new values properly. - #myDB.action('DELETE FROM UPCOMING WHERE IssueDate=?',[shipdate]) + if pulldate == '00000000': + logger.info('Re-creating pullist to ensure everything\'s fresh.') + myDB.action('DELETE FROM weekly WHERE weeknumber=? AND year=?',[str(weeknumber), str(year)]) for x in pull: comicid = None diff --git a/mylar/rsscheck.py b/mylar/rsscheck.py index 7a2453a2..5225c8b3 100755 --- a/mylar/rsscheck.py +++ b/mylar/rsscheck.py @@ -17,6 +17,7 @@ import mylar from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent import torrent.clients.transmission as transmission import torrent.clients.deluge as deluge +import torrent.clients.qbittorrent as qbittorrent def _start_newznab_attr(self, attrsD): context = self._getContext() @@ -226,7 +227,11 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None): for entry in feedme['entries']: #TP.SE RSS SEARCH RESULT if pickfeed == "2": - tmpenc = feedme.entries[i].enclosures[0] + try: + tmpenc = feedme.entries[i].enclosures[0] + except AttributeError: + logger.warn('Unable to retrieve results - probably just hitting it too fast...') + continue torthetpse.append({ 'site': picksite, 'title': feedme.entries[i].title, @@ -777,7 +782,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): if linkit[-7:] != "torrent": filename += ".torrent" - if any([mylar.USE_UTORRENT, mylar.USE_RTORRENT, mylar.USE_TRANSMISSION,mylar.USE_DELUGE]): + if any([mylar.USE_UTORRENT, mylar.USE_RTORRENT, mylar.USE_TRANSMISSION, mylar.USE_DELUGE, mylar.USE_QBITTORRENT]): filepath = os.path.join(mylar.CACHE_DIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) @@ -921,7 +926,6 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): except ImportError: logger.warn('[EPIC FAILURE] Cannot load the requests module') return "fail" - try: scraper = cfscrape.create_scraper() if cf_cookievalue: @@ -1003,27 +1007,36 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): logger.fdebug('[' + site + '] Saved torrent file to : ' + filepath) if mylar.USE_UTORRENT: uTC = utorrent.utorrentclient() - resp = uTC.addfile(filepath, filename) - return resp #resp = pass / fail + torrent_info = uTC.addfile(filepath, filename) + if torrent_info: + torrent_info['clientmode'] = 'utorrent' + torrent_info['link'] = linkit + return torrent_info + else: + return "fail" elif mylar.USE_RTORRENT: import test rp = test.RTorrent() + torrent_info = rp.main(filepath=filepath) - logger.info(torrent_info) if torrent_info: - return "pass" + torrent_info['clientmode'] = 'rtorrent' + torrent_info['link'] = linkit + return torrent_info else: - return "fail" - + return 'fail' elif mylar.USE_TRANSMISSION: try: rpc = transmission.TorrentClient() if not rpc.connect(mylar.TRANSMISSION_HOST, mylar.TRANSMISSION_USERNAME, mylar.TRANSMISSION_PASSWORD): return "fail" - if rpc.load_torrent(filepath): - return "pass" + torrent_info = rpc.load_torrent(filepath) + if torrent_info: + torrent_info['clientmode'] = 'transmission' + torrent_info['link'] = linkit + return torrent_info else: return "fail" except Exception as e: @@ -1035,18 +1048,42 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): dc = deluge.TorrentClient() if not dc.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD): return "fail" - logger.info('Not connected to Deluge! (rsscheck)') + logger.info('Not connected to Deluge!') else: - logger.info('Connected to Deluge! Will try to add torrent now! (rsscheck)') - if dc.load_torrent(filepath): - return "pass" + logger.info('Connected to Deluge! Will try to add torrent now!') + torrent_info = dc.load_torrent(filepath) + + if torrent_info: + torrent_info['clientmode'] = 'deluge' + torrent_info['link'] = linkit + return torrent_info else: return "fail" - logger.info('Unable to connect to Deluge (rsscheck)') + logger.info('Unable to connect to Deluge!') except Exception as e: logger.error(e) return "fail" + elif mylar.USE_QBITTORRENT: + try: + qc = qbittorrent.TorrentClient() + if not qc.connect(mylar.QBITTORRENT_HOST, mylar.QBITTORRENT_USERNAME, mylar.QBITTORRENT_PASSWORD): + logger.info('Not connected to qBittorrent - Make sure the Web UI is enabled and the port is correct!') + return "fail" + else: + logger.info('Connected to qBittorrent! Will try to add torrent now!') + torrent_info = qc.load_torrent(filepath) + + if torrent_info: + torrent_info['clientmode'] = 'qbittorrent' + torrent_info['link'] = linkit + return torrent_info + else: + logger.info('Unable to add torrent to qBittorrent') + return "fail" + except Exception as e: + logger.error(e) + return "fail" elif mylar.USE_WATCHDIR: if mylar.TORRENT_LOCAL: diff --git a/mylar/search.py b/mylar/search.py index 08ffe88e..b65ddb79 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -147,13 +147,14 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD #nzbpr = providercount - 1 #if nzbpr < 0: # nzbpr == 0 - findit = 'no' + findit = {} + findit['status'] = False totalproviders = providercount + torp if totalproviders == 0: logger.error('[WARNING] You have ' + str(totalproviders) + ' search providers enabled. I need at least ONE provider to work. Aborting search.') - findit = "no" + findit['status'] = False nzbprov = None return findit, nzbprov @@ -193,7 +194,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD if i == 1: searchmode = 'rss' #order of ops - this will be used first. elif i == 2: searchmode = 'api' - if findit == 'yes': + if findit['status'] is True: logger.fdebug('Found result on first run, exiting search module now.') break @@ -236,10 +237,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD continue if searchmode == 'rss': findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName) - if findit == 'yes': - logger.fdebug("findit = found!") - break - else: + if findit['status'] is False: if AlternateSearch is not None and AlternateSearch != "None": chkthealt = AlternateSearch.split('##') if chkthealt == 0: @@ -249,16 +247,17 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD AS_Alternate = re.sub('##', '', calt) logger.info(u"Alternate Search pattern detected...re-adjusting to : " + str(AS_Alternate)) findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=AS_Alternate, allow_packs=allow_packs) - if findit == 'yes': + if findit['status'] is True: break - if findit == 'yes': break + if findit['status'] is True: + break + else: + logger.fdebug("findit = found!") + break else: findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs) - if findit == 'yes': - logger.fdebug("findit = found!") - break - else: + if findit['status'] is False: if AlternateSearch is not None and AlternateSearch != "None": chkthealt = AlternateSearch.split('##') if chkthealt == 0: @@ -268,9 +267,13 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD AS_Alternate = re.sub('##', '', calt) logger.info(u"Alternate Search pattern detected...re-adjusting to : " + str(AS_Alternate)) findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs) - if findit == 'yes': + if findit['status'] is True: break - if findit == 'yes': break + if findit['status'] is True: + break + else: + logger.fdebug("findit = found!") + break if searchprov == 'newznab': searchprov = newznab_host[0].rstrip() @@ -278,7 +281,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD prov_count+=1 #torprtmp+=1 #torprtmp-=1 - if findit == 'yes': + if findit['status'] is True: #check for snatched_havetotal being enabled here and adjust counts now. #IssueID being the catch/check for one-offs as they won't exist on the watchlist and error out otherwise. if mylar.SNATCHED_HAVETOTAL and IssueID is not None: @@ -305,8 +308,10 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, nzbprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host=None, ComicVersion=None, SARC=None, IssueArcID=None, RSS=None, ComicID=None, issuetitle=None, unaltered_ComicName=None, allow_packs=None): - if any([allow_packs is None, allow_packs == 'None', allow_packs == 0]) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]): + if any([allow_packs is None, allow_packs == 'None', allow_packs == 0, allow_packs == '0']) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]): allow_packs = False + elif any([allow_packs == 1, allow_packs == '1']) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]): + allow_packs = True newznab_local = False @@ -471,6 +476,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa #print ("------RESULTS OF SEARCH-------------------") findloop = 0 foundcomic = [] + foundc = {} + foundc['status'] = False done = False seperatealpha = "no" #---issue problem @@ -481,7 +488,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa #logger.fdebug('findloop: ' + str(findloop) + ' / findcount: ' + str(findcount)) comsrc = comsearch if nzbprov == 'dognzb' and not mylar.DOGNZB: - foundc = "no" + foundc['status'] = False done = True break while (cmloopit >= 1): @@ -643,16 +650,18 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa r = requests.get(findurl, params=payload, verify=verify, headers=headers) except requests.exceptions.Timeout as e: logger.warn('Timeout occured fetching data from %s: %s' % (nzbprov, e)) + foundc['status'] = False break except requests.exceptions.ConnectionError as e: logger.warn('Connection error trying to retrieve data from %s: %s' % (nzbprov, e)) + foundc['status'] = False break except requests.exceptions.RequestException as e: logger.warn('General Error fetching data from %s: %s' % (nzbprov, e)) if e.r.status_code == 503: #HTTP Error 503 logger.warn('Aborting search due to Provider unavailability') - foundc = "no" + foundc['status'] = False break try: @@ -675,11 +684,11 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa if bb['feed']['error']['code'] == '910': logger.warn('DAILY API limit reached. Disabling provider usage until 12:01am') mylar.DOGNZB = 0 - foundc = False + foundc['status'] = False done = True else: logger.warn('API Error. Check the error message and take action if required.') - foundc = False + foundc['status'] = False done = True break except: @@ -692,12 +701,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa #cmloopit == 1 done = False - foundc = "no" log2file = "" - if bb == "no results": - pass - foundc = "no" - else: + if not bb == "no results": for entry in bb['entries']: #logger.info(entry) #<--- uncomment this to see what the search result(s) are #brief match here against 32p since it returns the direct issue number @@ -1231,9 +1236,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa else: nzbprov = 'DEM' - if nzbprov == '32P' and allow_packs and RSS == 'no': + if all([nzbprov == '32P', allow_packs == True, RSS == 'no']): logger.fdebug('pack:' + entry['pack']) - if all([nzbprov == '32P', RSS == 'no', allow_packs]) and any([entry['pack'] == '1', entry['pack'] == '2']): + if all([nzbprov == '32P', RSS == 'no', allow_packs == True]) and any([entry['pack'] == '1', entry['pack'] == '2']): if nzbprov == '32P': if entry['pack'] == '2': logger.fdebug('[PACK-QUEUE] Diamond FreeLeech Pack detected.') @@ -1255,7 +1260,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa #pack support. comicinfo = [] comicinfo.append({"ComicName": ComicName, + "ComicVolume": ComicVersion, "IssueNumber": IssueNumber, + "IssueDate": IssueDate, "comyear": comyear, "pack": True, "pack_numbers": pack_issuelist, @@ -1272,6 +1279,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa hyphensplit = None hyphenfail = False issue_firstword = None + #issue_pack = None if unaltered_ComicName is not None: ComicName = unaltered_ComicName for m in re.finditer('[-/:]', comic_andiss): @@ -1285,6 +1293,29 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa logger.fdebug('Assuming position start is : ' + str(m.start())) hyphensplit = comic_andiss[m.start():].split() try: + #test for packs here as they will be like #1-20, #1-7.3, 1-20+Annuals + #thisentry = Fear Itself vol0 #1-7.3 + #if '#' in thisentry: + # issue_pack_startpos = thisentry.find('#') + 1 #(take first digit after dash) + # issue_pack_dash = hyphensplit[1] #this should be the end of the pack sequence + # issue_pack_endpos = thisentry.find('-', issue_pack_startpos) + # issue_pack_start = thisentry[issue_pack_startpos:issue_pack_endpos] + # if thisentry.find(' ', issue_pack_endpos) == -1: + # endit = len(thisentry) + # else: + # endit = thisentry.find(' ', issue_pack_endpos) + # issue_pack_end = thisentry[issue_pack_endpos+1:endit].strip() + # if issue_pack_start.isdigit(): + # logger.fdebug('first position prior to hyphen is digit [' + str(issue_pack_start) + ']') + # try: + # float(issue_pack_end) + # except: + # pass + # else: + # issue_pack = str(issue_pack_start) + '-' + str(issue_pack_end) + # logger.info('This should be a pack: ' + issue_pack) + + #if issue_pack is None: issue_firstword = hyphensplit[1] logger.fdebug('First word of issue stored as : ' + str(issue_firstword)) except: @@ -1380,6 +1411,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa else: comic_iss = tmpiss splitst = len(splitit) - 1 + logger.fdebug("adjusting from: " + str(comic_iss_b4) + " to: " + str(comic_iss)) # make sure that things like - in watchcomic are accounted for when comparing to nzb. @@ -1545,17 +1577,19 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa else: cyear = comyear - comicinfo.append({"ComicName": ComicName, - "IssueNumber": IssueNumber, - "IssueDate": IssueDate, - "comyear": cyear, - "pack": False, - "pack_numbers": None, - "modcomicname": modcomicname}) + comicinfo.append({"ComicName": ComicName, + "ComicVolume": ComicVersion, + "IssueNumber": IssueNumber, + "IssueDate": IssueDate, + "comyear": cyear, + "pack": False, + "pack_numbers": None, + "pack_issuelist": None, + "modcomicname": modcomicname}) else: log2file = log2file + "issues don't match.." + "\n" downloadit = False - foundc = "no" + foundc['status'] = False if downloadit: try: @@ -1576,14 +1610,16 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa if searchresult == 'downloadchk-fail': continue elif searchresult == 'torrent-fail' or searchresult == 'nzbget-fail' or searchresult == 'sab-fail' or searchresult == 'blackhole-fail': - return - else: - #nzbid, nzbname, sent_to - nzbid = searchresult[0]['nzbid'] - nzbname = searchresult[0]['nzbname'] - sent_to = searchresult[0]['sent_to'] - alt_nzbname = searchresult[0]['alt_nzbname'] - foundc = "yes" + return foundc + + #nzbid, nzbname, sent_to + nzbid = searchresult['nzbid'] + nzbname = searchresult['nzbname'] + sent_to = searchresult['sent_to'] + alt_nzbname = searchresult['alt_nzbname'] + t_hash = searchresult['t_hash'] + foundc['info'] = searchresult + foundc['status'] = True done = True break @@ -1591,13 +1627,13 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa cmloopit == 1 #let's make sure it STOPS searching after a sucessful match. break cmloopit-=1 - if cmloopit < 1 and c_alpha is not None and seperatealpha == "no" and foundc == "no": + if cmloopit < 1 and c_alpha is not None and seperatealpha == "no" and foundc['status'] is False: logger.info("Alphanumerics detected within IssueNumber. Seperating from Issue # and re-trying.") cmloopit = origcmloopit seperatealpha = "yes" findloop+=1 - if foundc == "yes": + if foundc['status'] is True: if 'TPSE' in tmpprov and any([nzbprov == 'WWT', nzbprov == 'DEM']): tmpprov = re.sub('TPSE', nzbprov, tmpprov) foundcomic.append("yes") @@ -1629,7 +1665,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa mylar.TMP_PROV = nzbprov return foundc - if foundc == "no":# and prov_count == 0: + else: #logger.fdebug('prov_count: ' + str(prov_count)) foundcomic.append("no") if IssDateFix == "no": @@ -1724,14 +1760,11 @@ def searchforissue(issueid=None, new=False, rsscheck=None): else: AllowPacks = False mode = result['mode'] - if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): + if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): foundNZB, prov = search_init(comic['ComicName'], result['Issue_Number'], str(ComicYear), comic['ComicYear'], Publisher, IssueDate, StoreDate, result['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, rsscheck=rsscheck, ComicID=result['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks) - if foundNZB == "yes": - #print ("found!") - updater.foundsearch(result['ComicID'], result['IssueID'], mode=mode, provider=prov) - else: - pass - #print ("not found!") + if foundNZB['status'] is True: + logger.info(foundNZB) + updater.foundsearch(result['ComicID'], result['IssueID'], mode=mode, provider=prov, hash=foundNZB['info']['t_hash']) if rsscheck: logger.info('Completed RSS Search scan') @@ -1768,14 +1801,11 @@ def searchforissue(issueid=None, new=False, rsscheck=None): AllowPacks = False foundNZB = "none" - if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): + if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): foundNZB, prov = search_init(comic['ComicName'], result['Issue_Number'], str(IssueYear), comic['ComicYear'], Publisher, IssueDate, StoreDate, result['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, rsscheck=rsscheck, ComicID=result['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks) - if foundNZB == "yes": + if foundNZB['status'] is True: logger.fdebug("I found " + comic['ComicName'] + ' #:' + str(result['Issue_Number'])) - updater.foundsearch(ComicID=result['ComicID'], IssueID=result['IssueID'], mode=mode, provider=prov) - else: - pass - #print ("not found!") + updater.foundsearch(ComicID=result['ComicID'], IssueID=result['IssueID'], mode=mode, provider=prov, hash=foundNZB['info']['t_hash']) return def searchIssueIDList(issuelist): @@ -1810,14 +1840,10 @@ def searchIssueIDList(issuelist): else: AllowPacks = False - if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_32P or mylar.ENABLE_TPSE) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): + if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_32P or mylar.ENABLE_TPSE or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE): foundNZB, prov = search_init(comic['ComicName'], issue['Issue_Number'], str(IssueYear), comic['ComicYear'], Publisher, issue['IssueDate'], issue['ReleaseDate'], issue['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, ComicID=issue['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks) - if foundNZB == "yes": - #print ("found!") - updater.foundsearch(ComicID=issue['ComicID'], IssueID=issue['IssueID'], mode=mode, provider=prov) - else: - pass - #print ("not found!") + if foundNZB['status'] is True: + updater.foundsearch(ComicID=issue['ComicID'], IssueID=issue['IssueID'], mode=mode, provider=prov, hash=foundNZB['info']['t_hash']) @@ -2163,6 +2189,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc #blackhole sent_to = None + t_hash = None if mylar.USE_BLACKHOLE and all([nzbprov != '32P', nzbprov != 'TPSE', nzbprov != 'WWT', nzbprov != 'DEM', nzbprov != 'Torznab']): logger.fdebug("using blackhole directory at : " + str(mylar.BLACKHOLE_DIR)) if os.path.exists(mylar.BLACKHOLE_DIR): @@ -2175,6 +2202,37 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc logger.fdebug("filename saved to your blackhole as : " + nzbname) logger.info(u"Successfully sent .nzb to your Blackhole directory : " + os.path.join(mylar.BLACKHOLE_DIR, nzbname)) sent_to = "your Blackhole Directory" + + if mylar.ENABLE_SNATCH_SCRIPT: + if comicinfo[0]['pack'] is False: + pnumbers = None + plist = None + else: + pnumbers = '|'.join(comicinfo[0]['pack_numbers']) + plist= '|'.join(comicinfo[0]['pack_issuelist']) + snatch_vars = {'nzbinfo': {'link': link, + 'id': nzbid, + 'nzbname': nzbname, + 'nzbpath': nzbpath, + 'blackhole': mylar.BLACKHOLE_DIR}, + 'comicinfo': {'comicname': ComicName, + 'volume': comicinfo[0]['ComicVolume'], + 'comicid': ComicID, + 'issueid': IssueID, + 'issuenumber': IssueNumber, + 'issuedate': comicinfo[0]['IssueDate'], + 'seriesyear': comyear}, + 'pack': comicinfo[0]['pack'], + 'pack_numbers': pnumbers, + 'pack_issuelist': plist, + 'provider': nzbprov, + 'method': 'nzb', + 'clientmode': 'blackhole'} + snatchitup = helpers.script_env('on-snatch',snatch_vars) + if snatchitup is True: + logger.info('Successfully submitted on-grab script as requested.') + else: + logger.info('Could not Successfully submit on-grab script as requested. Please check logs...') #end blackhole #torrents (32P & TPSE & DEM) @@ -2182,7 +2240,6 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc logger.fdebug("ComicName:" + ComicName) logger.fdebug("link:" + link) logger.fdebug("Torrent Provider:" + nzbprov) - foundc = "yes" rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov) if rcheck == "fail": @@ -2202,6 +2259,43 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc else: logger.error('Unable to send torrent - check logs and settings (this would be marked as a BAD torrent if Failed Handling was enabled)') return "torrent-fail" + else: + #start the auto-snatch segway here (if rcheck isn't False, it contains the info of the torrent) + #since this is torrentspecific snatch, the vars will be different than nzb snatches. + #torrent_info{'folder','name',['total_filesize','label','hash','files','time_started'} + t_hash = rcheck['hash'] + + if any([mylar.USE_RTORRENT, mylar.USE_DELUGE]) and mylar.AUTO_SNATCH: + mylar.SNATCHED_QUEUE.put(rcheck['hash']) + else: + if mylar.ENABLE_SNATCH_SCRIPT: + if comicinfo[0]['pack'] is False: + pnumbers = None + plist = None + else: + pnumbers = '|'.join(comicinfo[0]['pack_numbers']) + plist= '|'.join(comicinfo[0]['pack_issuelist']) + snatch_vars = {'comicinfo': {'comicname': ComicName, + 'volume': comicinfo[0]['ComicVolume'], + 'issuenumber': IssueNumber, + 'issuedate': comicinfo[0]['IssueDate'], + 'seriesyear': comyear, + 'comicid': ComicID, + 'issueid': IssueID}, + 'pack': comicinfo[0]['pack'], + 'pack_numbers': pnumbers, + 'pack_issuelist': plist, + 'provider': nzbprov, + 'method': 'torrent', + 'clientmode': rcheck['clientmode'], + 'torrentinfo': rcheck} + + snatchitup = helpers.script_env('on-snatch',snatch_vars) + if snatchitup is True: + logger.info('Successfully submitted on-grab script as requested.') + else: + logger.info('Could not Successfully submit on-grab script as requested. Please check logs...') + if mylar.USE_WATCHDIR: if mylar.TORRENT_LOCAL: sent_to = "your local Watch folder" @@ -2214,7 +2308,9 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc elif mylar.USE_TRANSMISSION: sent_to = "your Transmission client" elif mylar.USE_DELUGE: - sent_to = "your Deluge client" + sent_to = "your Deluge client" + elif mylar.USE_QBITTORRENT: + sent_to = "your qBittorrent client" #end torrents else: @@ -2382,15 +2478,51 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc sent_to = "SABnzbd+" logger.info(u"Successfully sent nzb file to SABnzbd") + if mylar.ENABLE_SNATCH_SCRIPT: + if mylar.USE_NZBGET: + clientmode = 'nzbget' + elif mylar.USE_SABNZBD: + clientmode = 'sabnzbd' + + if comicinfo[0]['pack'] is False: + pnumbers = None + plist = None + else: + pnumbers = '|'.join(comicinfo[0]['pack_numbers']) + plist= '|'.join(comicinfo[0]['pack_issuelist']) + snatch_vars = {'nzbinfo': {'link': link, + 'id': nzbid, + 'nzbname': nzbname, + 'nzbpath': nzbpath}, + 'comicinfo': {'comicname': comicinfo[0]['ComicName'], + 'volume': comicinfo[0]['ComicVolume'], + 'comicid': ComicID, + 'issueid': IssueID, + 'issuenumber': IssueNumber, + 'issuedate': comicinfo[0]['IssueDate'], + 'seriesyear': comyear}, + 'pack': comicinfo[0]['pack'], + 'pack_numbers': pnumbers, + 'pack_issuelist': plist, + 'provider': nzbprov, + 'method': 'nzb', + 'clientmode': clientmode} + + snatchitup = helpers.script_env('on-snatch',snatch_vars) + if snatchitup is True: + logger.info('Successfully submitted on-grab script as requested.') + else: + logger.info('Could not Successfully submit on-grab script as requested. Please check logs...') #nzbid, nzbname, sent_to nzbname = re.sub('.nzb', '', nzbname).strip() - return_val = [] - return_val.append({"nzbid": nzbid, - "nzbname": nzbname, - "sent_to": sent_to, - "alt_nzbname": alt_nzbname}) + return_val = {} + return_val = {"nzbid": nzbid, + "nzbname": nzbname, + "sent_to": sent_to, + "alt_nzbname": alt_nzbname, + "t_hash": t_hash} #if it's a directsend link (ie. via a retry). if directsend is None: diff --git a/mylar/torrent/clients/deluge.py b/mylar/torrent/clients/deluge.py index b3cd4f99..24ace0ee 100644 --- a/mylar/torrent/clients/deluge.py +++ b/mylar/torrent/clients/deluge.py @@ -44,14 +44,15 @@ class TorrentClient(object): return False def get_torrent(self, hash): - logger.debug('Getting Torrent info hash: ' + hash) + logger.debug('Getting Torrent info from hash: ' + hash) try: torrent_info = self.client.call('core.get_torrent_status', hash, '') except Exception as e: logger.error('Could not get torrent info for ' + hash) return False else: - logger.info('Getting Torrent Info!') + if torrent_info is None: + torrent_info = False return torrent_info @@ -80,7 +81,7 @@ class TorrentClient(object): self.client.call('core.pause_torrent', hash) except Exception as e: logger.error('Torrent failed to be stopped: ' + e) - return false + return False else: logger.info('Torrent ' + hash + ' was stopped') return True @@ -104,6 +105,8 @@ class TorrentClient(object): #Check if torrent already added if self.find_torrent(str.lower(hash)): logger.info('load_torrent: Torrent already exists!') + #should set something here to denote that it's already loaded, and then the failed download checker not run so it doesn't download + #multiple copies of the same issues that's already downloaded else: logger.info('Torrent not added yet, trying to add it now!') try: @@ -111,8 +114,6 @@ class TorrentClient(object): except Exception as e: logger.debug('Torrent not added') return False - else: - logger.debug('TorrentID: ' + torrent_id) # If label enabled put label on torrent in Deluge if torrent_id and mylar.DELUGE_LABEL: @@ -126,17 +127,25 @@ class TorrentClient(object): self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL) except: logger.warn('Unable to set label - Either try to create it manually within Deluge, and/or ensure there are no spaces, capitalization or special characters in label') - return False - logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL) + else: + logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL) + try: - self.find_torrent(torrent_id) - logger.info('Double checking torrent was added.') + torrent_info = self.get_torrent(torrent_id) + logger.info('Double checking that the torrent was added.') except Exception as e: logger.warn('Torrent was not added! Please check logs') return False else: logger.info('Torrent successfully added!') - return True + return {'hash': torrent_info['hash'], + 'label': torrent_info['label'], + 'folder': torrent_info['save_path'], + 'total_filesize': torrent_info['total_size'], + 'name': torrent_info['name'], + 'files': torrent_info['files'], + 'time_started': torrent_info['active_time'], + 'completed': torrent_info['is_finished']} def delete_torrent(self, hash, removeData=False): diff --git a/mylar/torrent/clients/qbittorrent.py b/mylar/torrent/clients/qbittorrent.py new file mode 100644 index 00000000..89fc9b2e --- /dev/null +++ b/mylar/torrent/clients/qbittorrent.py @@ -0,0 +1,130 @@ +import os +import mylar +import base64 +import time +from mylar import logger, helpers + +from lib.qbittorrent import client + +class TorrentClient(object): + def __init__(self): + self.conn = None + + def connect(self, host, username, password): + if self.conn is not None: + return self.connect + + if not host: + return False + + try: + logger.info(host) + self.client = client.Client(host) + except Exception as e: + logger.error('Could not create qBittorrent Object' + str(e)) + return False + else: + try: + self.client.login(username, password) + except Exception as e: + logger.error('Could not connect to qBittorrent ' + host) + else: + return self.client + + def find_torrent(self, hash): + logger.debug('Finding Torrent hash: ' + hash) + torrent_info = self.get_torrent(hash) + if torrent_info: + return True + else: + return False + + def get_torrent(self, hash): + logger.debug('Getting Torrent info hash: ' + hash) + try: + torrent_info = self.client.get_torrent(hash) + except Exception as e: + logger.error('Could not get torrent info for ' + hash) + return False + else: + logger.info('Successfully located information for torrent') + return torrent_info + + + def load_torrent(self, filepath): + + logger.info('filepath to torrent file set to : ' + filepath) + + if self.client._is_authenticated is True: + logger.info('Checking if Torrent Exists!') + + hash = self.get_the_hash(filepath) + + logger.debug('Torrent Hash (load_torrent): "' + hash + '"') + logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath))) + + + #Check if torrent already added + if self.find_torrent(hash): + logger.info('load_torrent: Torrent already exists!') + return False + #should set something here to denote that it's already loaded, and then the failed download checker not run so it doesn't download + #multiple copies of the same issues that's already downloaded + else: + logger.info('Torrent not added yet, trying to add it now!') + try: + torrent_content = open(filepath, 'rb') + tid = self.client.download_from_file(torrent_content, label=mylar.QBITTORRENT_LABEL) + except Exception as e: + logger.debug('Torrent not added') + return False + else: + logger.debug('Successfully submitted for add. Verifying item is now on client.') + + if mylar.QBITTORRENT_STARTONLOAD: + logger.info('attempting to start') + startit = self.client.force_start(hash) + logger.info('startit returned:' + str(startit)) + else: + logger.info('attempting to pause torrent incase it starts') + try: + startit = self.client.pause(hash) + logger.info('startit paused:' + str(startit)) + except: + logger.warn('Unable to pause torrent - possibly already paused?') + + try: + time.sleep(5) # wait 5 in case it's not populated yet. + tinfo = self.get_torrent(hash) + except Exception as e: + logger.warn('Torrent was not added! Please check logs') + return False + else: + torrent_info = [] + logger.info('Torrent successfully added!') + torrent_info['hash'] = hash + filelist = self.client.get_torrent_files(hash) + if len(filelist) == 1: + torrent_info['name'] = filelist['name'] + else: + torrent_info['name'] = tinfo['save_path'] + torrent_info['total_filesize'] = tinfo['total_size'] + torrent_info['folder'] = tinfo['save_path'] + torrent_info['files'] = filelist + torrent_info['time_started'] = tinfo['addition_date'] + torrent_info['label'] = mylar.QBITTORRENT_LABEL + return torrent_info + + + def get_the_hash(self, filepath): + import hashlib, StringIO + import bencode + + # Open torrent file + torrent_file = open(filepath, "rb") + metainfo = bencode.decode(torrent_file.read()) + info = metainfo['info'] + thehash = hashlib.sha1(bencode.encode(info)).hexdigest().upper() + logger.debug('Hash: ' + thehash) + return thehash + diff --git a/mylar/torrent/clients/transmission.py b/mylar/torrent/clients/transmission.py index ee3871ce..b975f38e 100755 --- a/mylar/torrent/clients/transmission.py +++ b/mylar/torrent/clients/transmission.py @@ -55,6 +55,7 @@ class TorrentClient(object): 'name': torrent.name, 'folder': torrent.downloadDir, 'completed': torrent.progress == 100, + 'label': 'None', ## labels not supported in transmission - for when it's in transmission 'files': torrent_files, 'upload_total': torrent.uploadedEver, 'download_total': torrent.downloadedEver, diff --git a/mylar/webserve.py b/mylar/webserve.py index fa1d2829..be086908 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -1218,19 +1218,44 @@ class WebInterface(object): logger.error("Unable to send torrent - check logs and settings.") continue else: - logger.info('Successfully retried issue.') - break + if mylar.ENABLE_SNATCH_SCRIPT: + #packs not supported on retry atm - Volume and Issuedate also not included due to limitations... + + snatch_vars = {'comicinfo': {'comicname': ComicName, + 'issuenumber': IssueNumber, + 'seriesyear': ComicYear, + 'comicid': ComicID, + 'issueid': IssueID}, + 'pack': False, + 'pack_numbers': None, + 'pack_issuelist': None, + 'provider': fullprov, + 'method': 'torrent', + 'clientmode': rcheck['clientmode'], + 'torrentinfo': rcheck} + + snatchitup = helpers.script_env('on-snatch',snatch_vars) + if snatchitup is True: + logger.info('Successfully submitted on-grab script as requested.') + else: + logger.info('Could not Successfully submit on-grab script as requested. Please check logs...') + + logger.info('Successfully retried issue.') + break else: - annualize = myDB.selectone('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone() - if annualize is None: - modcomicname = ComicName + ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN annuals as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + if ckthis is None: + ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.Volume, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + modcomicname = chkthis['ComicName'] else: - modcomicname = ComicName + ' Annual' + modcomicname = chkthis['ComicName'] + ' Annual' comicinfo = [] - comicinfo.append({"ComicName": ComicName, - "IssueNumber": IssueNumber, - "comyear": ComicYear, + comicinfo.append({"ComicName": chkthis['ComicName'], + "ComicVolume": chkthis['ComicVersion'], + "IssueNumber": chkthis['IssueNumber'], + "comyear": chkthis['ComicYear'], + "IssueDate": chkthis['IssueDate'], "modcomicname": modcomicname}) newznabinfo = None @@ -1343,7 +1368,7 @@ class WebInterface(object): newStatus = {"Status": "Wanted"} myDB.upsert("readinglist", newStatus, controlValueDict) foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=StoreDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=dateload['Volume'], SARC=SARC, IssueArcID=IssueArcID) - if foundcom == "yes": + if foundcom['status'] is True: logger.info(u"Downloaded " + ComicName + " #" + ComicIssue + " (" + str(ComicYear) + ")") controlValueDict = {"IssueArcID": IssueArcID} newStatus = {"Status": "Snatched"} @@ -1363,7 +1388,7 @@ class WebInterface(object): if Publisher == 'COMICS': Publisher = None logger.info(u"Marking " + ComicName + " " + ComicIssue + " as wanted...") foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=IssueDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, allow_packs=False) - if foundcom == "yes": + if foundcom['status'] is True: logger.info(u"Downloaded " + ComicName + " " + ComicIssue) raise cherrypy.HTTPRedirect("pullist") #return @@ -1423,7 +1448,7 @@ class WebInterface(object): #UseAFuzzy = miy['UseFuzzy'] #ComicVersion = miy['ComicVersion'] foundcom, prov = search.search_init(ComicName, ComicIssue, ComicYear, SeriesYear, Publisher, issues['IssueDate'], storedate, IssueID, AlternateSearch, UseAFuzzy, ComicVersion, mode=mode, ComicID=ComicID, manualsearch=manualsearch, filesafe=ComicName_Filesafe, allow_packs=AllowPacks) - if foundcom == "yes": + if foundcom['status'] is True: # file check to see if issue exists and update 'have' count if IssueID is not None: logger.info("passing to updater.") @@ -2079,7 +2104,7 @@ class WebInterface(object): annualize = 'yes' else: annualize = None - renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=None, annualize=annualize) + renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=issue['IssueID'], annualize=annualize) nfilename = renameiss['nfilename'] srciss = os.path.join(comicdir, filename) if filename != nfilename: @@ -3147,7 +3172,7 @@ class WebInterface(object): logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number'])) foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID) - if foundcom == "yes": + if foundcom['status'] is True: logger.fdebug('sucessfully found.') #update the status - this is necessary for torrents as they are in 'snatched' status. updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID) @@ -3206,7 +3231,7 @@ class WebInterface(object): logger.fdebug("-- watched series queue.") logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number'])) foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID, mode=None, rsscheck=None, ComicID=None) - if foundcom == "yes": + if foundcom['status'] is True: updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID) else: logger.fdebug('Watchlist issue not sucessfully found') @@ -4103,6 +4128,7 @@ class WebInterface(object): "torrent_downloader_rtorrent": helpers.radio(mylar.TORRENT_DOWNLOADER, 2), "torrent_downloader_transmission": helpers.radio(mylar.TORRENT_DOWNLOADER, 3), "torrent_downloader_deluge": helpers.radio(mylar.TORRENT_DOWNLOADER, 4), + "torrent_downloader_qbittorrent": helpers.radio(mylar.TORRENT_DOWNLOADER, 5), "utorrent_host": mylar.UTORRENT_HOST, "utorrent_username": mylar.UTORRENT_USERNAME, "utorrent_password": mylar.UTORRENT_PASSWORD, @@ -4121,10 +4147,16 @@ class WebInterface(object): "transmission_username": mylar.TRANSMISSION_USERNAME, "transmission_password": mylar.TRANSMISSION_PASSWORD, "transmission_directory": mylar.TRANSMISSION_DIRECTORY, - "deluge_host": mylar.DELUGE_HOST, + "deluge_host": mylar.DELUGE_HOST, "deluge_username": mylar.DELUGE_USERNAME, "deluge_password": mylar.DELUGE_PASSWORD, "deluge_label": mylar.DELUGE_LABEL, + "qbittorrent_host": mylar.QBITTORRENT_HOST, + "qbittorrent_username": mylar.QBITTORRENT_USERNAME, + "qbittorrent_password": mylar.QBITTORRENT_PASSWORD, + "qbittorrent_label": mylar.QBITTORRENT_LABEL, + "qbittorrent_folder": mylar.QBITTORRENT_FOLDER, + "qbittorrent_startonload": mylar.QBITTORRENT_STARTONLOAD, "blackhole_dir": mylar.BLACKHOLE_DIR, "usenet_retention": mylar.USENET_RETENTION, "use_nzbsu": helpers.checked(mylar.NZBSU), @@ -4230,6 +4262,10 @@ class WebInterface(object): "telegram_userid": mylar.TELEGRAM_USERID, "enable_extra_scripts": helpers.checked(mylar.ENABLE_EXTRA_SCRIPTS), "extra_scripts": mylar.EXTRA_SCRIPTS, + "enable_snatch_script": helpers.checked(mylar.ENABLE_SNATCH_SCRIPT), + "snatch_script": mylar.SNATCH_SCRIPT, + "enable_pre_scripts": helpers.checked(mylar.ENABLE_PRE_SCRIPTS), + "pre_scripts": mylar.PRE_SCRIPTS, "post_processing": helpers.checked(mylar.POST_PROCESSING), "file_opts": mylar.FILE_OPTS, "enable_meta": helpers.checked(mylar.ENABLE_META), @@ -4251,8 +4287,6 @@ class WebInterface(object): "config_file": mylar.CONFIG_FILE, "branch_history": 'None', # "branch_history" : br_hist, - "enable_pre_scripts": helpers.checked(mylar.ENABLE_PRE_SCRIPTS), - "pre_scripts": mylar.PRE_SCRIPTS, "log_dir": mylar.LOG_DIR } return serve_template(templatename="config.html", title="Settings", config=config, comicinfo=comicinfo) @@ -4469,7 +4503,8 @@ class WebInterface(object): prowl_enabled=0, prowl_onsnatch=0, prowl_keys=None, prowl_priority=None, nma_enabled=0, nma_apikey=None, nma_priority=0, nma_onsnatch=0, pushover_enabled=0, pushover_onsnatch=0, pushover_apikey=None, pushover_userkey=None, pushover_priority=None, boxcar_enabled=0, boxcar_onsnatch=0, boxcar_token=None, pushbullet_enabled=0, pushbullet_apikey=None, pushbullet_deviceid=None, pushbullet_onsnatch=0, telegram_enabled=0, telegram_token=None, telegram_userid=None, telegram_onsnatch=0, torrent_downloader=0, torrent_local=0, torrent_seedbox=0, utorrent_host=None, utorrent_username=None, utorrent_password=None, utorrent_label=None, rtorrent_host=None, rtorrent_ssl=0, rtorrent_verify=0, rtorrent_authentication='basic', rtorrent_rpc_url=None, rtorrent_username=None, rtorrent_password=None, rtorrent_directory=None, rtorrent_label=None, rtorrent_startonload=0, transmission_host=None, transmission_username=None, transmission_password=None, transmission_directory=None,deluge_host=None, deluge_username=None, deluge_password=None, deluge_label=None, - preferred_quality=0, move_files=0, rename_files=0, add_to_csv=1, cvinfo=0, lowercase_filenames=0, folder_format=None, file_format=None, enable_extra_scripts=0, extra_scripts=None, enable_pre_scripts=0, pre_scripts=None, post_processing=0, file_opts=None, syno_fix=0, search_delay=None, enforce_perms=0, chmod_dir=0777, chmod_file=0660, chowner=None, chgroup=None, + qbittorrent_host=None, qbittorrent_username=None, qbittorrent_password=None, qbittorrent_label=None, qbittorrent_folder=None, qbittorrent_startonload=0, + preferred_quality=0, move_files=0, rename_files=0, add_to_csv=1, cvinfo=0, lowercase_filenames=0, folder_format=None, file_format=None, enable_extra_scripts=0, extra_scripts=None, enable_snatch_script=0, snatch_script=None, enable_pre_scripts=0, pre_scripts=None, post_processing=0, file_opts=None, syno_fix=0, search_delay=None, enforce_perms=0, chmod_dir=0777, chmod_file=0660, chowner=None, chgroup=None, tsab=None, destination_dir=None, create_folders=1, replace_spaces=0, replace_char=None, use_minsize=0, minsize=None, use_maxsize=0, maxsize=None, autowant_all=0, autowant_upcoming=0, comic_cover_local=0, zero_level=0, zero_level_n=None, interface=None, dupeconstraint=None, ddump=0, duplicate_dump=None, **kwargs): mylar.COMICVINE_API = comicvine_api mylar.HTTP_HOST = http_host @@ -4564,6 +4599,12 @@ class WebInterface(object): mylar.DELUGE_USERNAME = deluge_username mylar.DELUGE_PASSWORD = deluge_password mylar.DELUGE_LABEL = deluge_label + mylar.QBITTORRENT_HOST = qbittorrent_host + mylar.QBITTORRENT_USERNAME = qbittorrent_username + mylar.QBITTORRENT_PASSWORD = qbittorrent_password + mylar.QBITTORRENT_LABEL = qbittorrent_label + mylar.QBITTORRENT_FOLDER = qbittorrent_folder + mylar.QBITTORRENT_STARTONLOAD = int(qbittorrent_startonload) mylar.ENABLE_TORRENT_SEARCH = int(enable_torrent_search) mylar.ENABLE_TPSE = int(enable_tpse) mylar.ENABLE_32P = int(enable_32p) @@ -4627,10 +4668,12 @@ class WebInterface(object): mylar.DUPLICATE_DUMP = duplicate_dump mylar.ENABLE_EXTRA_SCRIPTS = enable_extra_scripts mylar.EXTRA_SCRIPTS = extra_scripts + mylar.ENABLE_SNATCH_SCRIPT = enable_snatch_script + mylar.SNATCH_SCRIPT = snatch_script mylar.ENABLE_PRE_SCRIPTS = enable_pre_scripts + mylar.PRE_SCRIPTS = pre_scripts mylar.POST_PROCESSING = post_processing mylar.FILE_OPTS = file_opts - mylar.PRE_SCRIPTS = pre_scripts mylar.ENABLE_META = enable_meta mylar.CBR2CBZ_ONLY = cbr2cbz_only mylar.CMTAGGER_PATH = cmtagger_path diff --git a/post-processing/get.conf.sample b/post-processing/get.conf.sample new file mode 100644 index 00000000..5e152096 --- /dev/null +++ b/post-processing/get.conf.sample @@ -0,0 +1,7 @@ +[rtorrent] +HOST=hostname/ip +PORT=port# +USER=username +PASSWD=password +LOCALCD=local directory where torrents are to be saved to +KEYFILE=if you use a keyfile for ssh access, enter full path here to public key file (password above can be left blank then) diff --git a/post-processing/getlftp.sh b/post-processing/getlftp.sh new file mode 100755 index 00000000..e7cfead7 --- /dev/null +++ b/post-processing/getlftp.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +##-- start configuration + +#this needs to be edited to the full path to the get.conf file containing the torrent client information +configfile='' + +#this is the temporary location where it will make sure the conf is safe for use (by default this should be fine if left alone) +configfile_secured='/tmp/get.conf' + +##-- end configuration + + +## --- don't change stuff below here ---- + +# check if the file contains something we don't want +if egrep -q -v '^#|^[^ ]*=[^;]*' "$configfile"; then + # echo "Config file is unclean, cleaning it..." >&2 + # filter the original to a new file + egrep '^#|^[^ ]*=[^;&]*' "$configfile" > "$configfile_secured" + configfile="$configfile_secured" +fi + +# now source it, either the original or the filtered variant +source "$configfile" + +cd $LOCALCD +filename="$1" + +if [[ "${filename##*.}" == "cbr" || "${filename##*.}" == "cbz" ]]; then + LCMD="pget -n 6 '$1'" +else + LCMD="mirror -P 2 --use-pget-n=6 '$1'" +fi + +if [[ -z $KEYFILE ]]; then + PARAM="$USER $PASSWD" +else + PARAM="$USER $KEYFILE" +fi + +lftp<