mirror of
https://github.com/evilhero/mylar
synced 2025-03-09 21:33:42 +00:00
FIX for ComicVine (inadvertently?) switching to beta API site. Configuration/Web Interface option now available to turn on/off (on = beta..., off = normal api)
This commit is contained in:
parent
4613e215e9
commit
188463470b
4 changed files with 26 additions and 6 deletions
|
@ -125,6 +125,11 @@
|
|||
<input type="checkbox" name="syno_fix" value="1" ${config['syno_fix']} /> <label>Synology Fix</label>
|
||||
<br/><small>*Use this if experiencing parsing problems*</small>
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" name="cvapifix" value="1" ${config['cvapifix']} /> <label>Comicvine URL Fix</label>
|
||||
<br/><small>*Use this if CV's URL has changed*</small>
|
||||
</div>
|
||||
|
||||
|
||||
</fieldset>
|
||||
</td>
|
||||
|
|
|
@ -206,6 +206,8 @@ CV_ONETIMER = 1
|
|||
GRABBAG_DIR = None
|
||||
HIGHCOUNT = 0
|
||||
READ2FILENAME = 0
|
||||
CVAPIFIX = 0
|
||||
CVURL = None
|
||||
|
||||
def CheckSection(sec):
|
||||
""" Check if INI section exists, if not create it """
|
||||
|
@ -266,7 +268,7 @@ def initialize():
|
|||
NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS,\
|
||||
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \
|
||||
PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, \
|
||||
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, \
|
||||
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, CVURL, \
|
||||
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER
|
||||
|
||||
if __INITIALIZED__:
|
||||
|
@ -344,6 +346,9 @@ def initialize():
|
|||
if not GRABBAG_DIR:
|
||||
#default to ComicLocation
|
||||
GRABBAG_DIR = DESTINATION_DIR
|
||||
CVAPIFIX = bool(check_setting_int(CFG, 'General', 'cvapifix', 0))
|
||||
if CVAPIFIX is None:
|
||||
CVAPIFIX = 0
|
||||
HIGHCOUNT = check_setting_str(CFG, 'General', 'highcount', '')
|
||||
if not HIGHCOUNT: HIGHCOUNT = 0
|
||||
READ2FILENAME = bool(check_setting_int(CFG, 'General', 'read2filename', 0))
|
||||
|
@ -559,6 +564,13 @@ def initialize():
|
|||
else:
|
||||
logger.info("Synology Parsing Fix already implemented. No changes required at this time.")
|
||||
|
||||
#CV sometimes points to the incorrect DNS - here's the fix.
|
||||
if CVAPIFIX == 1:
|
||||
CVURL = 'http://beta.comicvine.com/api/'
|
||||
logger.info("CVAPIFIX enabled: ComicVine set to beta API site")
|
||||
else:
|
||||
CVURL = 'http://api.comicvine.com/'
|
||||
logger.info("CVAPIFIX disabled: Comicvine set to normal API site")
|
||||
#Ordering comics here
|
||||
logger.info("Remapping the sorting to allow for new additions.")
|
||||
COMICSORT = helpers.ComicSort(sequence='startup')
|
||||
|
@ -644,7 +656,7 @@ def config_write():
|
|||
new_config['General']['annuals_on'] = int(ANNUALS_ON)
|
||||
new_config['General']['cv_only'] = int(CV_ONLY)
|
||||
new_config['General']['cv_onetimer'] = int(CV_ONETIMER)
|
||||
|
||||
new_config['General']['cvapifix'] = int(CVAPIFIX)
|
||||
new_config['General']['check_github'] = int(CHECK_GITHUB)
|
||||
new_config['General']['check_github_on_startup'] = int(CHECK_GITHUB_ON_STARTUP)
|
||||
new_config['General']['check_github_interval'] = CHECK_GITHUB_INTERVAL
|
||||
|
|
|
@ -29,9 +29,10 @@ def pulldetails(comicid,type,issueid=None,offset=1):
|
|||
#import easy to use xml parser called minidom:
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
print ("mylar.CVURL: " + str(mylar.CVURL))
|
||||
comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
if type == 'comic':
|
||||
PULLURL='http://api.comicvine.com/volume/' + str(comicid) + '/?api_key=' + str(comicapi) + '&format=xml&field_list=name,count_of_issues,issues,start_year,site_detail_url,image,publisher,description,first_issue'
|
||||
PULLURL= mylar.CVURL + 'volume/' + str(comicid) + '/?api_key=' + str(comicapi) + '&format=xml&field_list=name,count_of_issues,issues,start_year,site_detail_url,image,publisher,description,first_issue'
|
||||
elif type == 'issue':
|
||||
if mylar.CV_ONLY:
|
||||
cv_type = 'issues'
|
||||
|
@ -39,10 +40,12 @@ def pulldetails(comicid,type,issueid=None,offset=1):
|
|||
else:
|
||||
cv_type = 'volume/' + str(comicid)
|
||||
searchset = 'name,count_of_issues,issues,start_year,site_detail_url,image,publisher,description'
|
||||
PULLURL = 'http://api.comicvine.com/' + str(cv_type) + '/?api_key=' + str(comicapi) + '&format=xml&' + str(searchset) + '&offset=' + str(offset)
|
||||
PULLURL = mylar.CVURL + str(cv_type) + '/?api_key=' + str(comicapi) + '&format=xml&' + str(searchset) + '&offset=' + str(offset)
|
||||
elif type == 'firstissue':
|
||||
#this is used ONLY for CV_ONLY
|
||||
PULLURL = 'http://api.comicvine.com/issues/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
|
||||
PULLURL = mylar.CVURL + 'issues/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
|
||||
elif type == 'storyarc':
|
||||
PULLURL = mylar.CVURL + 'story_arc/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
|
||||
|
||||
#download the file:
|
||||
file = urllib2.urlopen(PULLURL)
|
||||
|
|
|
@ -28,7 +28,7 @@ mb_lock = threading.Lock()
|
|||
|
||||
|
||||
def pullsearch(comicapi,comicquery,offset):
|
||||
PULLURL='http://api.comicvine.com/search?api_key=' + str(comicapi) + '&resources=volume&query=' + str(comicquery) + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher&format=xml&page=' + str(offset)
|
||||
PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=volume&query=' + str(comicquery) + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher&format=xml&page=' + str(offset)
|
||||
|
||||
#all these imports are standard on most modern python implementations
|
||||
#download the file:
|
||||
|
|
Loading…
Add table
Reference in a new issue