mirror of
https://github.com/evilhero/mylar
synced 2025-03-15 08:18:44 +00:00
Fixed some incorrect lib references
This commit is contained in:
parent
8e77667cf4
commit
a036e554d2
21 changed files with 46 additions and 37 deletions
|
@ -3,9 +3,9 @@ import logging
|
|||
import random
|
||||
import re
|
||||
import os
|
||||
from lib.requests.sessions import Session
|
||||
import lib.js2py as js2py
|
||||
from lib.js2py import eval_js as eval_js
|
||||
from requests.sessions import Session
|
||||
import js2py
|
||||
from js2py import eval_js
|
||||
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from lib.js2py.base import *
|
||||
from js2py.base import *
|
||||
|
||||
@Js
|
||||
def console():
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from lib.js2py.base import *
|
||||
from js2py.base import *
|
||||
import inspect
|
||||
try:
|
||||
from lib.js2py.translators.translator import translate_js
|
||||
from js2py.translators.translator import translate_js
|
||||
except:
|
||||
pass
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from lib.js2py.base import *
|
||||
from js2py.base import *
|
||||
|
||||
RADIX_CHARS = {'1': 1, '0': 0, '3': 3, '2': 2, '5': 5, '4': 4, '7': 7, '6': 6, '9': 9, '8': 8, 'a': 10, 'c': 12,
|
||||
'b': 11, 'e': 14, 'd': 13, 'g': 16, 'f': 15, 'i': 18, 'h': 17, 'k': 20, 'j': 19, 'm': 22, 'l': 21,
|
||||
|
|
|
@ -18,7 +18,7 @@ from __future__ import division
|
|||
import mylar
|
||||
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck
|
||||
|
||||
import lib.feedparser as feedparser
|
||||
import feedparser as feedparser
|
||||
import urllib
|
||||
import os, errno
|
||||
import string
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
import mylar
|
||||
from mylar import db, mb, importer, search, PostProcessor, versioncheck, logger
|
||||
import lib.simplejson as simplejson
|
||||
import simplejson as simplejson
|
||||
import cherrypy
|
||||
import os
|
||||
import urllib2
|
||||
|
|
|
@ -3,7 +3,7 @@ import re
|
|||
import time
|
||||
import datetime
|
||||
import os
|
||||
import requests as requests
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from cookielib import LWPCookieJar
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import mylar
|
|||
import platform
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
import httplib
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
|
||||
def patch_http_response_read(func):
|
||||
def inner(*args):
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
|
||||
import os
|
||||
import sys
|
||||
import lib.feedparser as feedparser
|
||||
#import feedparser
|
||||
import feedparser
|
||||
import re
|
||||
import logger
|
||||
import mylar
|
||||
|
|
|
@ -28,7 +28,7 @@ import shutil
|
|||
import imghdr
|
||||
import sqlite3
|
||||
import cherrypy
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
import gzip
|
||||
from StringIO import StringIO
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
from bs4 import BeautifulSoup, UnicodeDammit
|
||||
import datetime
|
||||
import re
|
||||
|
|
|
@ -21,7 +21,7 @@ import threading
|
|||
import platform
|
||||
import urllib, urllib2
|
||||
from xml.dom.minidom import parseString, Element
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
|
||||
import mylar
|
||||
from mylar import logger, db, cv
|
||||
|
|
|
@ -12,7 +12,7 @@ import unicodedata
|
|||
from decimal import Decimal
|
||||
from HTMLParser import HTMLParseError
|
||||
from time import strptime
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
|
||||
import mylar
|
||||
from mylar import logger
|
||||
|
|
|
@ -24,9 +24,9 @@ from urllib import urlencode
|
|||
import os.path
|
||||
import subprocess
|
||||
import time
|
||||
import lib.simplejson as simplejson
|
||||
import simplejson
|
||||
import json
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
|
||||
# This was obviously all taken from headphones with great appreciation :)
|
||||
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
import os, sys
|
||||
import re
|
||||
import lib.feedparser as feedparser
|
||||
import lib.requests as requests
|
||||
import lib.cfscrape as cfscrape
|
||||
import feedparser
|
||||
import requests
|
||||
import cfscrape
|
||||
import urlparse
|
||||
import ftpsshup
|
||||
import datetime
|
||||
|
@ -14,7 +14,7 @@ from StringIO import StringIO
|
|||
|
||||
import mylar
|
||||
from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent
|
||||
import mylar.torrent.clients.transmission as transmission
|
||||
import torrent.clients.transmission as transmission
|
||||
|
||||
|
||||
def _start_newznab_attr(self, attrsD):
|
||||
|
@ -146,8 +146,18 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
payload = None
|
||||
|
||||
try:
|
||||
scraper = cfscrape.create_scraper()
|
||||
r = scraper.get(feed, verify=verify)#requests.get(feed, params=payload, verify=verify)
|
||||
cf_cookievalue = None
|
||||
if pickfeed == '2':
|
||||
scraper = cfscrape.create_scraper()
|
||||
cf_cookievalue, cf_user_agent = scraper.get_tokens(feed)
|
||||
headers = {'Accept-encoding': 'gzip',
|
||||
'User-Agent': cf_user_agent}
|
||||
logger.info(cf_cookievalue)
|
||||
|
||||
if cf_cookievalue:
|
||||
r = scraper.get(feed, verify=verify, cookies=cf_cookievalue, headers=headers)
|
||||
else:
|
||||
r = scraper.get(feed, verify=verify)#requests.get(feed, params=payload, verify=verify)
|
||||
except Exception, e:
|
||||
logger.warn('Error fetching RSS Feed Data from %s: %s' % (picksite, e))
|
||||
return
|
||||
|
@ -828,7 +838,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
|
|||
|
||||
try:
|
||||
scraper = cfscrape.create_scraper()
|
||||
cf_cookievalue, cf_user_agent = cfscrape.get_tokens(url)
|
||||
cf_cookievalue, cf_user_agent = scraper.get_tokens(url)
|
||||
headers = {'Accept-encoding': 'gzip',
|
||||
'User-Agent': cf_user_agent}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import mylar
|
||||
from mylar import logger
|
||||
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
from bs4 import BeautifulSoup, UnicodeDammit
|
||||
import re
|
||||
import datetime
|
||||
|
|
|
@ -18,8 +18,8 @@ from __future__ import division
|
|||
import mylar
|
||||
from mylar import logger, db, updater, helpers, parseit, findcomicfeed, notifiers, rsscheck, Failed, filechecker, auth32p
|
||||
|
||||
import lib.feedparser as feedparser
|
||||
import lib.requests as requests
|
||||
import feedparser
|
||||
import requests
|
||||
import urllib
|
||||
import os, errno
|
||||
import string
|
||||
|
|
|
@ -8,7 +8,7 @@ from base64 import b16encode, b32decode
|
|||
|
||||
from torrent.helpers.variable import link, symlink, is_rarfile
|
||||
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
#from lib.unrar2 import RarFile
|
||||
|
||||
import torrent.clients.rtorrent as TorClient
|
||||
|
@ -64,7 +64,7 @@ class RTorrent(object):
|
|||
|
||||
def get_the_hash(self, filepath):
|
||||
import hashlib, StringIO
|
||||
import lib.rtorrent.lib.bencode as bencode
|
||||
import rtorrent.lib.bencode as bencode
|
||||
|
||||
# Open torrent file
|
||||
torrent_file = open(filepath, "rb")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from rtorrent import RTorrent
|
||||
from lib.rtorrent import RTorrent
|
||||
|
||||
import mylar
|
||||
from mylar import logger, helpers
|
||||
|
|
|
@ -18,7 +18,7 @@ import platform, subprocess, re, os, urllib2, tarfile
|
|||
import mylar
|
||||
from mylar import logger, version
|
||||
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
import re
|
||||
|
||||
#user = "evilhero"
|
||||
|
|
|
@ -39,7 +39,7 @@ import mylar
|
|||
|
||||
from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers #,rsscheck
|
||||
|
||||
import lib.simplejson as simplejson
|
||||
import simplejson as simplejson
|
||||
|
||||
from operator import itemgetter
|
||||
|
||||
|
@ -4419,7 +4419,7 @@ class WebInterface(object):
|
|||
logger.fdebug('sab_password: ' + str(sab_password))
|
||||
logger.fdebug('sab_apikey: ' + str(sab_apikey))
|
||||
if mylar.USE_SABNZBD:
|
||||
import lib.requests as requests
|
||||
import requests
|
||||
from xml.dom.minidom import parseString, Element
|
||||
|
||||
#if user/pass given, we can auto-fill the API ;)
|
||||
|
@ -4450,7 +4450,7 @@ class WebInterface(object):
|
|||
if requests.exceptions.SSLError:
|
||||
logger.warn('Cannot verify ssl certificate. Attempting to authenticate with no ssl-certificate verification.')
|
||||
try:
|
||||
from lib.requests.packages.urllib3 import disable_warnings
|
||||
from requests.packages.urllib3 import disable_warnings
|
||||
disable_warnings()
|
||||
except:
|
||||
logger.warn('Unable to disable https warnings. Expect some spam if using https nzb providers.')
|
||||
|
@ -4842,7 +4842,7 @@ class WebInterface(object):
|
|||
|
||||
def get_the_hash(self, filepath):
|
||||
import hashlib, StringIO
|
||||
import lib.rtorrent.lib.bencode as bencode
|
||||
import rtorrent.lib.bencode as bencode
|
||||
|
||||
# Open torrent file
|
||||
torrent_file = open(os.path.join('/home/hero/mylar/cache', filepath), "rb")
|
||||
|
|
Loading…
Add table
Reference in a new issue