mirror of https://github.com/evilhero/mylar
Fixed some incorrect lib references
This commit is contained in:
parent
8e77667cf4
commit
a036e554d2
|
@ -3,9 +3,9 @@ import logging
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
from lib.requests.sessions import Session
|
from requests.sessions import Session
|
||||||
import lib.js2py as js2py
|
import js2py
|
||||||
from lib.js2py import eval_js as eval_js
|
from js2py import eval_js
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from lib.js2py.base import *
|
from js2py.base import *
|
||||||
|
|
||||||
@Js
|
@Js
|
||||||
def console():
|
def console():
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from lib.js2py.base import *
|
from js2py.base import *
|
||||||
import inspect
|
import inspect
|
||||||
try:
|
try:
|
||||||
from lib.js2py.translators.translator import translate_js
|
from js2py.translators.translator import translate_js
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from lib.js2py.base import *
|
from js2py.base import *
|
||||||
|
|
||||||
RADIX_CHARS = {'1': 1, '0': 0, '3': 3, '2': 2, '5': 5, '4': 4, '7': 7, '6': 6, '9': 9, '8': 8, 'a': 10, 'c': 12,
|
RADIX_CHARS = {'1': 1, '0': 0, '3': 3, '2': 2, '5': 5, '4': 4, '7': 7, '6': 6, '9': 9, '8': 8, 'a': 10, 'c': 12,
|
||||||
'b': 11, 'e': 14, 'd': 13, 'g': 16, 'f': 15, 'i': 18, 'h': 17, 'k': 20, 'j': 19, 'm': 22, 'l': 21,
|
'b': 11, 'e': 14, 'd': 13, 'g': 16, 'f': 15, 'i': 18, 'h': 17, 'k': 20, 'j': 19, 'm': 22, 'l': 21,
|
||||||
|
|
|
@ -18,7 +18,7 @@ from __future__ import division
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck
|
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck
|
||||||
|
|
||||||
import lib.feedparser as feedparser
|
import feedparser as feedparser
|
||||||
import urllib
|
import urllib
|
||||||
import os, errno
|
import os, errno
|
||||||
import string
|
import string
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import db, mb, importer, search, PostProcessor, versioncheck, logger
|
from mylar import db, mb, importer, search, PostProcessor, versioncheck, logger
|
||||||
import lib.simplejson as simplejson
|
import simplejson as simplejson
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import os
|
import os
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
|
@ -3,7 +3,7 @@ import re
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import requests as requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from cookielib import LWPCookieJar
|
from cookielib import LWPCookieJar
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import mylar
|
||||||
import platform
|
import platform
|
||||||
from bs4 import BeautifulSoup as Soup
|
from bs4 import BeautifulSoup as Soup
|
||||||
import httplib
|
import httplib
|
||||||
import lib.requests as requests
|
import requests
|
||||||
|
|
||||||
def patch_http_response_read(func):
|
def patch_http_response_read(func):
|
||||||
def inner(*args):
|
def inner(*args):
|
||||||
|
|
|
@ -2,8 +2,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import lib.feedparser as feedparser
|
import feedparser
|
||||||
#import feedparser
|
|
||||||
import re
|
import re
|
||||||
import logger
|
import logger
|
||||||
import mylar
|
import mylar
|
||||||
|
|
|
@ -28,7 +28,7 @@ import shutil
|
||||||
import imghdr
|
import imghdr
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import lib.requests as requests
|
import requests
|
||||||
import gzip
|
import gzip
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
|
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import lib.requests as requests
|
import requests
|
||||||
from bs4 import BeautifulSoup, UnicodeDammit
|
from bs4 import BeautifulSoup, UnicodeDammit
|
||||||
import datetime
|
import datetime
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -21,7 +21,7 @@ import threading
|
||||||
import platform
|
import platform
|
||||||
import urllib, urllib2
|
import urllib, urllib2
|
||||||
from xml.dom.minidom import parseString, Element
|
from xml.dom.minidom import parseString, Element
|
||||||
import lib.requests as requests
|
import requests
|
||||||
|
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger, db, cv
|
from mylar import logger, db, cv
|
||||||
|
|
|
@ -12,7 +12,7 @@ import unicodedata
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from HTMLParser import HTMLParseError
|
from HTMLParser import HTMLParseError
|
||||||
from time import strptime
|
from time import strptime
|
||||||
import lib.requests as requests
|
import requests
|
||||||
|
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger
|
from mylar import logger
|
||||||
|
|
|
@ -24,9 +24,9 @@ from urllib import urlencode
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
import lib.simplejson as simplejson
|
import simplejson
|
||||||
import json
|
import json
|
||||||
import lib.requests as requests
|
import requests
|
||||||
|
|
||||||
# This was obviously all taken from headphones with great appreciation :)
|
# This was obviously all taken from headphones with great appreciation :)
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import re
|
import re
|
||||||
import lib.feedparser as feedparser
|
import feedparser
|
||||||
import lib.requests as requests
|
import requests
|
||||||
import lib.cfscrape as cfscrape
|
import cfscrape
|
||||||
import urlparse
|
import urlparse
|
||||||
import ftpsshup
|
import ftpsshup
|
||||||
import datetime
|
import datetime
|
||||||
|
@ -14,7 +14,7 @@ from StringIO import StringIO
|
||||||
|
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent
|
from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent
|
||||||
import mylar.torrent.clients.transmission as transmission
|
import torrent.clients.transmission as transmission
|
||||||
|
|
||||||
|
|
||||||
def _start_newznab_attr(self, attrsD):
|
def _start_newznab_attr(self, attrsD):
|
||||||
|
@ -146,8 +146,18 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
||||||
payload = None
|
payload = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
scraper = cfscrape.create_scraper()
|
cf_cookievalue = None
|
||||||
r = scraper.get(feed, verify=verify)#requests.get(feed, params=payload, verify=verify)
|
if pickfeed == '2':
|
||||||
|
scraper = cfscrape.create_scraper()
|
||||||
|
cf_cookievalue, cf_user_agent = scraper.get_tokens(feed)
|
||||||
|
headers = {'Accept-encoding': 'gzip',
|
||||||
|
'User-Agent': cf_user_agent}
|
||||||
|
logger.info(cf_cookievalue)
|
||||||
|
|
||||||
|
if cf_cookievalue:
|
||||||
|
r = scraper.get(feed, verify=verify, cookies=cf_cookievalue, headers=headers)
|
||||||
|
else:
|
||||||
|
r = scraper.get(feed, verify=verify)#requests.get(feed, params=payload, verify=verify)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.warn('Error fetching RSS Feed Data from %s: %s' % (picksite, e))
|
logger.warn('Error fetching RSS Feed Data from %s: %s' % (picksite, e))
|
||||||
return
|
return
|
||||||
|
@ -828,7 +838,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
scraper = cfscrape.create_scraper()
|
scraper = cfscrape.create_scraper()
|
||||||
cf_cookievalue, cf_user_agent = cfscrape.get_tokens(url)
|
cf_cookievalue, cf_user_agent = scraper.get_tokens(url)
|
||||||
headers = {'Accept-encoding': 'gzip',
|
headers = {'Accept-encoding': 'gzip',
|
||||||
'User-Agent': cf_user_agent}
|
'User-Agent': cf_user_agent}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger
|
from mylar import logger
|
||||||
|
|
||||||
import lib.requests as requests
|
import requests
|
||||||
from bs4 import BeautifulSoup, UnicodeDammit
|
from bs4 import BeautifulSoup, UnicodeDammit
|
||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
|
|
|
@ -18,8 +18,8 @@ from __future__ import division
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck, Failed, filechecker, auth32p
|
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck, Failed, filechecker, auth32p
|
||||||
|
|
||||||
import lib.feedparser as feedparser
|
import feedparser
|
||||||
import lib.requests as requests
|
import requests
|
||||||
import urllib
|
import urllib
|
||||||
import os, errno
|
import os, errno
|
||||||
import string
|
import string
|
||||||
|
|
|
@ -8,7 +8,7 @@ from base64 import b16encode, b32decode
|
||||||
|
|
||||||
from torrent.helpers.variable import link, symlink, is_rarfile
|
from torrent.helpers.variable import link, symlink, is_rarfile
|
||||||
|
|
||||||
import lib.requests as requests
|
import requests
|
||||||
#from lib.unrar2 import RarFile
|
#from lib.unrar2 import RarFile
|
||||||
|
|
||||||
import torrent.clients.rtorrent as TorClient
|
import torrent.clients.rtorrent as TorClient
|
||||||
|
@ -64,7 +64,7 @@ class RTorrent(object):
|
||||||
|
|
||||||
def get_the_hash(self, filepath):
|
def get_the_hash(self, filepath):
|
||||||
import hashlib, StringIO
|
import hashlib, StringIO
|
||||||
import lib.rtorrent.lib.bencode as bencode
|
import rtorrent.lib.bencode as bencode
|
||||||
|
|
||||||
# Open torrent file
|
# Open torrent file
|
||||||
torrent_file = open(filepath, "rb")
|
torrent_file = open(filepath, "rb")
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from rtorrent import RTorrent
|
from lib.rtorrent import RTorrent
|
||||||
|
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger, helpers
|
from mylar import logger, helpers
|
||||||
|
|
|
@ -18,7 +18,7 @@ import platform, subprocess, re, os, urllib2, tarfile
|
||||||
import mylar
|
import mylar
|
||||||
from mylar import logger, version
|
from mylar import logger, version
|
||||||
|
|
||||||
import lib.requests as requests
|
import requests
|
||||||
import re
|
import re
|
||||||
|
|
||||||
#user = "evilhero"
|
#user = "evilhero"
|
||||||
|
|
|
@ -39,7 +39,7 @@ import mylar
|
||||||
|
|
||||||
from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers #,rsscheck
|
from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers #,rsscheck
|
||||||
|
|
||||||
import lib.simplejson as simplejson
|
import simplejson as simplejson
|
||||||
|
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
|
@ -4419,7 +4419,7 @@ class WebInterface(object):
|
||||||
logger.fdebug('sab_password: ' + str(sab_password))
|
logger.fdebug('sab_password: ' + str(sab_password))
|
||||||
logger.fdebug('sab_apikey: ' + str(sab_apikey))
|
logger.fdebug('sab_apikey: ' + str(sab_apikey))
|
||||||
if mylar.USE_SABNZBD:
|
if mylar.USE_SABNZBD:
|
||||||
import lib.requests as requests
|
import requests
|
||||||
from xml.dom.minidom import parseString, Element
|
from xml.dom.minidom import parseString, Element
|
||||||
|
|
||||||
#if user/pass given, we can auto-fill the API ;)
|
#if user/pass given, we can auto-fill the API ;)
|
||||||
|
@ -4450,7 +4450,7 @@ class WebInterface(object):
|
||||||
if requests.exceptions.SSLError:
|
if requests.exceptions.SSLError:
|
||||||
logger.warn('Cannot verify ssl certificate. Attempting to authenticate with no ssl-certificate verification.')
|
logger.warn('Cannot verify ssl certificate. Attempting to authenticate with no ssl-certificate verification.')
|
||||||
try:
|
try:
|
||||||
from lib.requests.packages.urllib3 import disable_warnings
|
from requests.packages.urllib3 import disable_warnings
|
||||||
disable_warnings()
|
disable_warnings()
|
||||||
except:
|
except:
|
||||||
logger.warn('Unable to disable https warnings. Expect some spam if using https nzb providers.')
|
logger.warn('Unable to disable https warnings. Expect some spam if using https nzb providers.')
|
||||||
|
@ -4842,7 +4842,7 @@ class WebInterface(object):
|
||||||
|
|
||||||
def get_the_hash(self, filepath):
|
def get_the_hash(self, filepath):
|
||||||
import hashlib, StringIO
|
import hashlib, StringIO
|
||||||
import lib.rtorrent.lib.bencode as bencode
|
import rtorrent.lib.bencode as bencode
|
||||||
|
|
||||||
# Open torrent file
|
# Open torrent file
|
||||||
torrent_file = open(os.path.join('/home/hero/mylar/cache', filepath), "rb")
|
torrent_file = open(os.path.join('/home/hero/mylar/cache', filepath), "rb")
|
||||||
|
|
Loading…
Reference in New Issue