mirror of
https://github.com/evilhero/mylar
synced 2024-12-25 09:11:46 +00:00
Repalce normal requests with CFscraper requests
This commit is contained in:
parent
3e7339f066
commit
cbcb12516e
1 changed files with 19 additions and 9 deletions
|
@ -6,6 +6,7 @@ import os
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from cookielib import LWPCookieJar
|
from cookielib import LWPCookieJar
|
||||||
|
import cfscrape
|
||||||
|
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
|
@ -71,7 +72,9 @@ class info32p(object):
|
||||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
# post to the login form
|
# post to the login form
|
||||||
r = s.post(self.url, verify=verify)
|
scraper = cfscrape.create_scraper()
|
||||||
|
|
||||||
|
r = scraper.post(self.url, verify=verify)
|
||||||
|
|
||||||
#need a way to find response code (200=OK), but returns 200 for everything even failed signons (returns a blank page)
|
#need a way to find response code (200=OK), but returns 200 for everything even failed signons (returns a blank page)
|
||||||
#logger.info('[32P] response: ' + str(r.content))
|
#logger.info('[32P] response: ' + str(r.content))
|
||||||
|
@ -229,7 +232,8 @@ class info32p(object):
|
||||||
url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F
|
url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F
|
||||||
params = {'action': 'serieslist', 'filter': series_search}
|
params = {'action': 'serieslist', 'filter': series_search}
|
||||||
time.sleep(1) #just to make sure we don't hammer, 1s pause.
|
time.sleep(1) #just to make sure we don't hammer, 1s pause.
|
||||||
t = s.get(url, params=params, verify=True)
|
scraper = cfscrape.create_scraper()
|
||||||
|
t = scraper.get(url, params=params, verify=True)
|
||||||
soup = BeautifulSoup(t.content, "html.parser")
|
soup = BeautifulSoup(t.content, "html.parser")
|
||||||
results = soup.find_all("a", {"class":"object-qtip"},{"data-type":"torrentgroup"})
|
results = soup.find_all("a", {"class":"object-qtip"},{"data-type":"torrentgroup"})
|
||||||
|
|
||||||
|
@ -302,9 +306,9 @@ class info32p(object):
|
||||||
|
|
||||||
logger.info('payload: ' + str(payload))
|
logger.info('payload: ' + str(payload))
|
||||||
url = 'https://32pag.es/ajax.php'
|
url = 'https://32pag.es/ajax.php'
|
||||||
|
scraper = cfscrape.create_scraper()
|
||||||
time.sleep(1) #just to make sure we don't hammer, 1s pause.
|
time.sleep(1) #just to make sure we don't hammer, 1s pause.
|
||||||
d = s.get(url, params=payload, verify=True)
|
d = scraper.get(url, params=payload, verify=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
searchResults = d.json()
|
searchResults = d.json()
|
||||||
|
@ -447,28 +451,34 @@ class info32p(object):
|
||||||
u = 'https://32pag.es/login.php?ajax=1'
|
u = 'https://32pag.es/login.php?ajax=1'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = self.ses.post(u, data=postdata, timeout=60, allow_redirects=False)
|
scraper = cfscrape.create_scraper(self.ses)
|
||||||
|
r = scraper.post(u, data=postdata, timeout=60, allow_redirects=True)
|
||||||
|
logger.debug(self.module + ' Status Code: ' + str(r.status_code))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Got an exception when trying to login to %s POST", u)
|
logger.error(self.module + " Got an exception when trying to login to %s POST", u)
|
||||||
self.error = {'status':'exception', 'message':'Exception when trying to login'}
|
self.error = {'status':'exception', 'message':'Exception when trying to login'}
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
logger.warn("Got bad status code from login POST: %d\n%s\n%s", r.status_code, r.text, r.headers)
|
logger.warn(self.module + " Got bad status code from login POST: %d\n%s\n%s", r.status_code, r.text, r.headers)
|
||||||
|
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
|
||||||
self.error = {'status':'Bad Status code', 'message':(r.status_code, r.text, r.headers)}
|
self.error = {'status':'Bad Status code', 'message':(r.status_code, r.text, r.headers)}
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logger.debug(self.module + ' Trying to analyze login JSON reply from 32P: %s', r.text)
|
||||||
d = r.json()
|
d = r.json()
|
||||||
except:
|
except:
|
||||||
logger.error("The data returned by the login page was not JSON: %s", r.text)
|
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
|
||||||
|
logger.error(self.module + " The data returned by the login page was not JSON: %s", r.text)
|
||||||
self.error = {'status':'JSON not returned', 'message':r.text}
|
self.error = {'status':'JSON not returned', 'message':r.text}
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if d['status'] == 'success':
|
if d['status'] == 'success':
|
||||||
return True
|
return True
|
||||||
|
|
||||||
logger.error("Got unexpected status result: %s", d)
|
logger.error(self.module + " Got unexpected status result: %s", d)
|
||||||
|
logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d)
|
||||||
self.error = d
|
self.error = d
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue