1
0
Fork 0
mirror of https://github.com/evilhero/mylar synced 2024-12-25 01:01:47 +00:00

Repalce normal requests with CFscraper requests

This commit is contained in:
Undeadhunter 2016-12-28 10:38:29 +01:00 committed by evilhero
parent 3e7339f066
commit cbcb12516e

View file

@ -6,6 +6,7 @@ import os
import requests
from bs4 import BeautifulSoup
from cookielib import LWPCookieJar
import cfscrape
from operator import itemgetter
@ -71,7 +72,9 @@ class info32p(object):
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# post to the login form
r = s.post(self.url, verify=verify)
scraper = cfscrape.create_scraper()
r = scraper.post(self.url, verify=verify)
#need a way to find response code (200=OK), but returns 200 for everything even failed signons (returns a blank page)
#logger.info('[32P] response: ' + str(r.content))
@ -229,7 +232,8 @@ class info32p(object):
url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F
params = {'action': 'serieslist', 'filter': series_search}
time.sleep(1) #just to make sure we don't hammer, 1s pause.
t = s.get(url, params=params, verify=True)
scraper = cfscrape.create_scraper()
t = scraper.get(url, params=params, verify=True)
soup = BeautifulSoup(t.content, "html.parser")
results = soup.find_all("a", {"class":"object-qtip"},{"data-type":"torrentgroup"})
@ -302,9 +306,9 @@ class info32p(object):
logger.info('payload: ' + str(payload))
url = 'https://32pag.es/ajax.php'
scraper = cfscrape.create_scraper()
time.sleep(1) #just to make sure we don't hammer, 1s pause.
d = s.get(url, params=payload, verify=True)
d = scraper.get(url, params=payload, verify=True)
try:
searchResults = d.json()
@ -447,28 +451,34 @@ class info32p(object):
u = 'https://32pag.es/login.php?ajax=1'
try:
r = self.ses.post(u, data=postdata, timeout=60, allow_redirects=False)
scraper = cfscrape.create_scraper(self.ses)
r = scraper.post(u, data=postdata, timeout=60, allow_redirects=True)
logger.debug(self.module + ' Status Code: ' + str(r.status_code))
except Exception as e:
logger.error("Got an exception when trying to login to %s POST", u)
logger.error(self.module + " Got an exception when trying to login to %s POST", u)
self.error = {'status':'exception', 'message':'Exception when trying to login'}
return False
if r.status_code != 200:
logger.warn("Got bad status code from login POST: %d\n%s\n%s", r.status_code, r.text, r.headers)
logger.warn(self.module + " Got bad status code from login POST: %d\n%s\n%s", r.status_code, r.text, r.headers)
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
self.error = {'status':'Bad Status code', 'message':(r.status_code, r.text, r.headers)}
return False
try:
logger.debug(self.module + ' Trying to analyze login JSON reply from 32P: %s', r.text)
d = r.json()
except:
logger.error("The data returned by the login page was not JSON: %s", r.text)
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
logger.error(self.module + " The data returned by the login page was not JSON: %s", r.text)
self.error = {'status':'JSON not returned', 'message':r.text}
return False
if d['status'] == 'success':
return True
logger.error("Got unexpected status result: %s", d)
logger.error(self.module + " Got unexpected status result: %s", d)
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
self.error = d
return False