1
0
Fork 0
mirror of https://github.com/evilhero/mylar synced 2025-03-09 21:33:42 +00:00

- Unrolling RSSDB loops

This commit is contained in:
Cybr Wizrd 2015-03-21 10:09:34 -04:00 committed by evilhero
parent ff0421d1ee
commit 88c3a3b158

View file

@ -260,187 +260,119 @@ def torrents(pickfeed=None,seriesname=None,issue=None):
return katinfo return katinfo
return return
def nzbs(provider=None, forcerss=False): def nzbs(provider=None, forcerss=False):
nzbprovider = []
nzbp = 0
if mylar.NZBSU == 1:
nzbprovider.append('nzb.su')
nzbp+=1
if mylar.DOGNZB == 1:
nzbprovider.append('dognzb')
nzbp+=1
# --------
# Xperimental
if mylar.EXPERIMENTAL == 1:
nzbprovider.append('experimental')
nzbp+=1
newznabs = 0
newznab_hosts = [] newznab_hosts = []
if mylar.NEWZNAB == 1: if mylar.NEWZNAB == 1:
for newznab_host in mylar.EXTRA_NEWZNABS: for newznab_host in mylar.EXTRA_NEWZNABS:
if newznab_host[4] == '1' or newznab_host[4] == 1: if newznab_host[4] == '1' or newznab_host[4] == 1:
newznab_hosts.append(newznab_host) newznab_hosts.append(newznab_host)
nzbprovider.append('newznab') logger.fdebug('newznab name: ' + str(newznab_host[0]) + ' - enabled: ' + str(newznab_host[4]))
newznabs+=1
logger.fdebug('newznab name:' + str(newznab_host[0]) + ' - enabled: ' + str(newznab_host[4]))
# -------- providercount = len(newznab_hosts) + int(mylar.EXPERIMENTAL == 1) + int(mylar.NZBSU == 1) + int(mylar.DOGNZB == 1)
providercount = int(nzbp + newznabs) logger.fdebug('[RSS] You have enabled ' + str(providercount) + ' NZB RSS search providers.')
logger.fdebug('there are : ' + str(providercount) + ' nzb RSS search providers you have enabled.')
nzbpr = providercount - 1
if nzbpr < 0:
nzbpr == 0
feeddata = [] feeddata = []
feedthis = [] feedthis = []
ft = 0
totNum = 0
nonexp = "no"
user_agent = str(mylar.USER_AGENT) user_agent = str(mylar.USER_AGENT)
while (nzbpr >= 0 ): if mylar.EXPERIMENTAL == 1:
if nzbprovider[nzbpr] == 'experimental': site = 'experimental'
max_entries = "50" logger.fdebug('[RSS] Fetching items from ' + site)
if forcerss: if forcerss:
max_entries = "250" max_entries = "250"
feed = feedparser.parse("http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&max=" + max_entries + "&more=1", agent=user_agent)
totNum = len(feed.entries)
site = 'experimental'
keyPair = {}
regList = []
entries = []
mres = {}
countUp = 0
i = 0
for entry in feed['entries']:
tmpsz = feed.entries[i].enclosures[0]
feeddata.append({
'Site': site,
'Title': feed.entries[i].title,
'Link': tmpsz['url'], #feed.entries[i].link,
'Pubdate': feed.entries[i].updated,
'Size': tmpsz['length']
})
# print ("Site:" + str(site))
# print ("Title:" + str(feed.entries[i].title))
# print ("Link:" + str(feed.entries[i].link))
# print ("Pubdate:" + str(feed.entries[i].updated))
# print ("Size:" + str(tmpsz['length']))
i+=1
logger.info(str(i) + ' results from Experimental feed indexed.')
nzbpr-=1
else: else:
if nzbprovider[nzbpr] == 'newznab': max_entries = "50"
for newznab_host in newznab_hosts: feed = 'http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&max=' + max_entries + '&more=1'
if newznab_host[3] is None: feedme = feedparser.parse(feed, agent=user_agent)
newznabuid = '1' feedthis.append({"site": site,
newznabcat = '7030' "feed": feedme})
else:
if '#' not in newznab_host[3]:
newznabuid = newznab_host[3]
newznabcat = '7030'
else:
newzst = newznab_host[3].find('#')
newznabuid = newznab_host[3][:newzst]
newznabcat = newznab_host[3][newzst+1:]
# 11-21-2014: added &num=100 to return 100 results (or maximum) - unsure of cross-reliablity
feed = newznab_host[1].rstrip() + '/rss?t=' + str(newznabcat) + '&dl=1&i=' + str(newznabuid) + '&num=100&&r=' + newznab_host[2].rstrip()
feedme = feedparser.parse(feed, agent=user_agent)
site = newznab_host[0].rstrip()
feedthis.append({"feed": feedme,
"site": site})
totNum+=len(feedme.entries)
ft+=1
nonexp = "yes"
nzbpr-=1
elif nzbprovider[nzbpr] == 'nzb.su':
if mylar.NZBSU_UID is None:
mylar.NZBSU_UID = '1'
if forcerss:
num_items = "&num=100"
else:
num_items = "" # default is 25
feed = 'http://api.nzb.su/rss?t=7030&dl=1&i=' + mylar.NZBSU_UID + '&r=' + mylar.NZBSU_APIKEY + num_items
feedme = feedparser.parse(feed, agent=user_agent)
site = nzbprovider[nzbpr]
feedthis.append({"feed": feedme,
"site": site })
totNum+=len(feedme.entries)
ft+=1
nonexp = "yes"
nzbpr-=1
elif nzbprovider[nzbpr] == 'dognzb':
if mylar.DOGNZB_UID is None:
mylar.DOGNZB_UID = '1'
if forcerss:
num_items = "&num=100"
else:
num_items = "" # default is 25
feed = 'https://dognzb.cr/rss.cfm?r=' + mylar.DOGNZB_APIKEY + '&t=7030' + num_items
feedme = feedparser.parse(feed, agent=user_agent)
site = nzbprovider[nzbpr]
ft+=1
nonexp = "yes"
feedthis.append({"feed": feedme,
"site": site })
totNum+=len(feedme.entries)
nzbpr-=1
i = 0 if mylar.NZBSU == 1:
if nonexp == "yes": if mylar.NZBSU_UID is None:
#print str(ft) + " sites checked. There are " + str(totNum) + " entries to be updated." mylar.NZBSU_UID = '1'
#print feedme if forcerss:
num_items = "&num=100"
else:
num_items = "" # default is 25
feed = 'http://api.nzb.su/rss?t=7030&dl=1&i=' + mylar.NZBSU_UID + '&r=' + mylar.NZBSU_APIKEY + num_items
feedme = feedparser.parse(feed, agent=user_agent)
feedthis.append({"site": 'nzb.su',
"feed": feedme})
for ft in feedthis: if mylar.DOGNZB == 1:
sitei = 0 if forcerss:
site = ft['site'] num_items = "&num=100"
logger.fdebug(str(site) + " now being updated...") else:
#logger.fdebug('feedthis:' + str(ft)) num_items = "" # default is 25
for entry in ft['feed'].entries: feed = 'https://dognzb.cr/rss.cfm?r=' + mylar.DOGNZB_APIKEY + '&t=7030' + num_items
if site == 'dognzb': feedme = feedparser.parse(feed, agent=user_agent)
#because the rss of dog doesn't carry the enclosure item, we'll use the newznab size value feedthis.append({"site": 'dognzb',
tmpsz = 0 "feed": feedme})
if 'newznab' in entry and 'size' in entry['newznab']:
tmpsz = entry['newznab']['size']
feeddata.append({ for newznab_host in newznab_hosts:
'Site': site, site = newznab_host[0].rstrip()
'Title': entry.title, #ft['feed'].entries[i].title, if newznab_host[3] is None:
'Link': entry.link, #ft['feed'].entries[i].link, newznabuid = '1'
'Pubdate': entry.updated, #ft['feed'].entries[i].updated, newznabcat = '7030'
'Size': tmpsz else:
}) if '#' not in newznab_host[3]:
else: newznabuid = newznab_host[3]
#this should work for all newznabs (nzb.su included) newznabcat = '7030'
#only difference is the size of the file between this and above (which is probably the same) else:
tmpsz = entry.enclosures[0] #ft['feed'].entries[i].enclosures[0] newzst = newznab_host[3].find('#')
feeddata.append({ newznabuid = newznab_host[3][:newzst]
'Site': site, newznabcat = newznab_host[3][newzst + 1:]
'Title': entry.title, #ft['feed'].entries[i].title, # 11-21-2014: added &num=100 to return 100 results (or maximum) - unsure of cross-reliablity
'Link': entry.link, #ft['feed'].entries[i].link, feed = newznab_host[1].rstrip() + '/rss?t=' + str(newznabcat) + '&dl=1&i=' + str(newznabuid) + '&num=100&&r=' + newznab_host[2].rstrip()
'Pubdate': entry.updated, #ft['feed'].entries[i].updated, feedme = feedparser.parse(feed, agent=user_agent)
'Size': tmpsz['length'] feedthis.append({"site": site,
}) "feed": feedme})
#logger.fdebug("Site: " + str(feeddata[i]['Site'])) for ft in feedthis:
#logger.fdebug("Title: " + str(feeddata[i]['Title'])) site = ft['site']
#logger.fdebug("Link: " + str(feeddata[i]['Link'])) logger.fdebug('[RSS] (' + site + ') now being updated...')
#logger.fdebug("pubdate: " + str(feeddata[i]['Pubdate']))
#logger.fdebug("size: " + str(feeddata[i]['Size'])) for entry in ft['feed'].entries:
sitei+=1
logger.info('[' + str(site) + '] ' + str(sitei) + ' entries indexed.') # Size
i+=sitei if site == 'dognzb':
if i > 0: #because the rss of dog doesn't carry the enclosure item, we'll use the newznab size value
size = 0
if 'newznab' in entry and 'size' in entry['newznab']:
size = entry['newznab']['size']
else:
# experimental, nzb.su, newznab
size = entry.enclosures[0]['length']
# Link
if site == 'experimental':
link = entry.enclosures[0]['url']
else:
# dognzb, nzb.su, newznab
link = entry.link
feeddata.append({'Site': site,
'Title': entry.title,
'Link': link,
'Pubdate': entry.updated,
'Size': size})
# logger.fdebug(" Site: " + site)
# logger.fdebug(" Title: " + entry.title)
# logger.fdebug(" Link: " + link)
# logger.fdebug(" pubdate: " + entry.updated)
# logger.fdebug(" size: " + size)
logger.info('[RSS] (' + site + ') ' + str(len(ft['feed'].entries)) + ' entries indexed.')
i = len(feeddata)
if i > 0:
logger.info('[RSS] ' + str(i) + ' entries have been indexed and are now going to be stored for caching.') logger.info('[RSS] ' + str(i) + ' entries have been indexed and are now going to be stored for caching.')
rssdbupdate(feeddata,i,'usenet') rssdbupdate(feeddata, i, 'usenet')
return return
def rssdbupdate(feeddata,i,type): def rssdbupdate(feeddata,i,type):