mirror of https://github.com/evilhero/mylar
FIX: Able to specify own CV API key now (almost a requirement) - warning implemented throughout as well (api key usage 200 hits/15 minutes per api key), IMP: If Store in Weekly Directory option enabled in Weekly Pull-list, when post-processing pull-list files will automatically copy the finished files into the weekly directory as well as normal series directory, FIX:(#737) When using manual Rename with Annuals would get extra words appended to filenames by mistake, FIX: Default set for Safestart mode.
This commit is contained in:
parent
cc7a2621c7
commit
1c35ba48fb
|
@ -140,12 +140,18 @@
|
|||
<td>
|
||||
<fieldset>
|
||||
<legend>API</legend>
|
||||
<div class="row">
|
||||
<label>ComicVine API Key</label>
|
||||
<input type="text" name="comicvine_api" value="${config['comicvine_api']}" title="get one for free @ http://api.comicvine.com" size="40">
|
||||
<small>specify your own CV API key here </small>
|
||||
</div>
|
||||
|
||||
<div class="row checkbox">
|
||||
<input id="api_enabled" type="checkbox" onclick="initConfigCheckbox($(this));" name="api_enabled" value="1" ${config['api_enabled']} /><label>Enable API</label>
|
||||
</div>
|
||||
<div class="apioptions">
|
||||
<div Class="row">
|
||||
<label>API key</label>
|
||||
<label>Mylar API key</label>
|
||||
<input type="text" name="api_key" id="api_key" value="${config['api_key']}" size="20">
|
||||
<input type="button" value="Generate" id="generate_api">
|
||||
<small>Current API key: <strong>${config['api_key']}</strong></small>
|
||||
|
|
|
@ -28,7 +28,7 @@ import sqlite3
|
|||
from xml.dom.minidom import parseString
|
||||
|
||||
|
||||
from mylar import logger, db, helpers, updater, notifiers, filechecker
|
||||
from mylar import logger, db, helpers, updater, notifiers, filechecker, weeklypull
|
||||
|
||||
class PostProcessor(object):
|
||||
"""
|
||||
|
@ -865,6 +865,10 @@ class PostProcessor(object):
|
|||
logger.info(u"Post-Processing completed for: " + series + " " + dispiss )
|
||||
self._log(u"Post Processing SUCCESSFUL! ")
|
||||
|
||||
if mylar.WEEKFOLDER:
|
||||
#if enabled, will *copy* the post-processed file to the weeklypull list folder for the given week.
|
||||
weeklypull.weekly_singlecopy(comicid,issuenum,str(nfilename+ext),dst)
|
||||
|
||||
# retrieve/create the corresponding comic objects
|
||||
if mylar.ENABLE_EXTRA_SCRIPTS:
|
||||
folderp = str(dst) #folder location after move/rename
|
||||
|
|
|
@ -49,21 +49,23 @@ VERBOSE = 1
|
|||
DAEMON = False
|
||||
PIDFILE= None
|
||||
CREATEPID = False
|
||||
SAFESTART = False
|
||||
|
||||
SCHED = Scheduler()
|
||||
|
||||
INIT_LOCK = threading.Lock()
|
||||
#INIT_LOCK = Lock()
|
||||
__INITIALIZED__ = False
|
||||
started = False
|
||||
WRITELOCK = False
|
||||
|
||||
dbUpdateScheduler = None
|
||||
searchScheduler = None
|
||||
RSSScheduler = None
|
||||
WeeklyScheduler = None
|
||||
VersionScheduler = None
|
||||
FolderMonitorScheduler = None
|
||||
## for use with updated scheduler (not working atm)
|
||||
#INIT_LOCK = Lock()
|
||||
#dbUpdateScheduler = None
|
||||
#searchScheduler = None
|
||||
#RSSScheduler = None
|
||||
#WeeklyScheduler = None
|
||||
#VersionScheduler = None
|
||||
#FolderMonitorScheduler = None
|
||||
|
||||
DATA_DIR = None
|
||||
DBLOCK = False
|
||||
|
@ -102,6 +104,9 @@ COMMITS_BEHIND = None
|
|||
USER_AGENT = None
|
||||
SEARCH_DELAY = 1
|
||||
|
||||
COMICVINE_API = None
|
||||
DEFAULT_CVAPI = '583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
|
||||
CHECK_GITHUB = False
|
||||
CHECK_GITHUB_ON_STARTUP = False
|
||||
CHECK_GITHUB_INTERVAL = None
|
||||
|
@ -333,8 +338,8 @@ def initialize():
|
|||
|
||||
with INIT_LOCK:
|
||||
|
||||
global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
|
||||
HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, \
|
||||
global __INITIALIZED__, COMICVINE_API, DEFAULT_CVAPI, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
|
||||
HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, \
|
||||
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, DESTINATION_DIR, \
|
||||
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
|
||||
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
|
||||
|
@ -373,6 +378,9 @@ def initialize():
|
|||
HTTP_PORT = 8090
|
||||
|
||||
CONFIG_VERSION = check_setting_str(CFG, 'General', 'config_version', '')
|
||||
COMICVINE_API = check_setting_str(CFG, 'General', 'comicvine_api', '')
|
||||
if not COMICVINE_API:
|
||||
COMICVINE_API = None
|
||||
HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
|
||||
HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
|
||||
HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
|
||||
|
@ -791,6 +799,10 @@ def initialize():
|
|||
except OSError:
|
||||
logger.error('Could not create cache dir. Check permissions of datadir: ' + DATA_DIR)
|
||||
|
||||
#ComicVine API Check
|
||||
if COMICVINE_API is None or COMICVINE_API == '':
|
||||
logger.error('No User Comicvine API key specified. I will not work very well due to api limits - http://api.comicvine.com/ and get your own free key.')
|
||||
|
||||
# Sanity check for search interval. Set it to at least 6 hours
|
||||
if SEARCH_INTERVAL < 360:
|
||||
logger.info('Search interval too low. Resetting to 6 hour minimum')
|
||||
|
@ -997,6 +1009,7 @@ def config_write():
|
|||
new_config.encoding = 'UTF8'
|
||||
new_config['General'] = {}
|
||||
new_config['General']['config_version'] = CONFIG_VERSION
|
||||
new_config['General']['comicvine_api'] = COMICVINE_API
|
||||
new_config['General']['http_port'] = HTTP_PORT
|
||||
new_config['General']['http_host'] = HTTP_HOST
|
||||
new_config['General']['http_username'] = HTTP_USERNAME
|
||||
|
|
|
@ -29,7 +29,12 @@ def pulldetails(comicid,type,issueid=None,offset=1):
|
|||
#import easy to use xml parser called minidom:
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
|
||||
comicapi = mylar.DEFAULT_CVAPI
|
||||
else:
|
||||
comicapi = mylar.COMICVINE_API
|
||||
|
||||
if type == 'comic':
|
||||
if not comicid.startswith('4050-'): comicid = '4050-' + comicid
|
||||
PULLURL= mylar.CVURL + 'volume/' + str(comicid) + '/?api_key=' + str(comicapi) + '&format=xml&field_list=name,count_of_issues,issues,start_year,site_detail_url,image,publisher,description,first_issue,deck,aliases'
|
||||
|
@ -47,6 +52,7 @@ def pulldetails(comicid,type,issueid=None,offset=1):
|
|||
elif type == 'storyarc':
|
||||
PULLURL = mylar.CVURL + 'story_arc/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
|
||||
|
||||
|
||||
#download the file:
|
||||
file = urllib2.urlopen(PULLURL)
|
||||
#convert to string:
|
||||
|
|
|
@ -802,7 +802,7 @@ def validateAndCreateDirectory(dir, create=False):
|
|||
os.umask(0) # this is probably redudant, but it doesn't hurt to clear the umask here.
|
||||
os.makedirs(dir.rstrip(), permission )
|
||||
except OSError:
|
||||
raise SystemExit('Could not create data directory: ' + mylar.DATA_DIR + '. Exiting....')
|
||||
raise SystemExit('Could not create directory: ' + dir + '. Exiting....')
|
||||
return True
|
||||
else:
|
||||
logger.warn('Provided directory is blank, aborting')
|
||||
|
|
|
@ -295,7 +295,6 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
return
|
||||
else:
|
||||
annualize = True
|
||||
logger.fdebug('blah')
|
||||
#comicid = issuenzb['ComicID']
|
||||
issuenum = issuenzb['Issue_Number']
|
||||
#issueno = str(issuenum).split('.')[0]
|
||||
|
@ -419,11 +418,54 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
|
||||
else:
|
||||
logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
|
||||
if '$Annual' not in chunk_file_format:
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('prettycomiss: ' + str(prettycomiss))
|
||||
if mylar.ANNUALS_ON:
|
||||
if 'annual' in series.lower():
|
||||
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
#prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('[' + series + '][ANNUALS-ON][ANNUAL IN SERIES][NOT $ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
#because it exists within title, strip it then use formatting tag for placement of wording.
|
||||
chunk_f_f = re.sub('\$Annual','',chunk_file_format)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
logger.fdebug('[' + series + '][ANNUALS-ON][ANNUAL IN SERIES][$ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('[' + series + '][ANNUALS-ON][ANNUAL NOT IN SERIES][NOT $ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
logger.fdebug('[' + series + '][ANNUALS-ON][ANNUAL NOT IN SERIES][$ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
|
||||
else:
|
||||
#if annuals aren't enabled, then annuals are being tracked as independent series.
|
||||
#annualize will be true since it's an annual in the seriesname.
|
||||
if 'annual' in series.lower():
|
||||
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
#prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('[' + series + '][ANNUALS-OFF][ANNUAL IN SERIES][NOT $ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
#because it exists within title, strip it then use formatting tag for placement of wording.
|
||||
chunk_f_f = re.sub('\$Annual','',chunk_file_format)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
logger.fdebug('[' + series + '][ANNUALS-OFF][ANNUAL IN SERIES][$ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('[' + series + '][ANNUALS-OFF][ANNUAL NOT IN SERIES][NOT $ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
else:
|
||||
logger.fdebug('[' + series + '][ANNUALS-OFF][ANNUAL NOT IN SERIES][$ANNUAL] prettycomiss: ' + str(prettycomiss))
|
||||
|
||||
|
||||
logger.fdebug('Annual detected within series title of ' + series + '. Not auto-correcting issue #')
|
||||
|
||||
file_values = {'$Series': series,
|
||||
'$Issue': prettycomiss,
|
||||
|
|
|
@ -123,7 +123,6 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
#if vari_loop == '99':
|
||||
gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=comicid, TotalIssues=0, issvariation="no", resultPublished=None)
|
||||
|
||||
logger.info('Sucessfully retrieved details for ' + comic['ComicName'] )
|
||||
# print ("Series Published" + parseit.resultPublished)
|
||||
|
||||
CV_NoYearGiven = "no"
|
||||
|
@ -143,6 +142,8 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
else:
|
||||
SeriesYear = comic['ComicYear']
|
||||
|
||||
logger.info('Sucessfully retrieved details for ' + comic['ComicName'] )
|
||||
|
||||
#since the weekly issue check could return either annuals or issues, let's initialize it here so it carries through properly.
|
||||
weeklyissue_check = []
|
||||
|
||||
|
|
|
@ -87,7 +87,12 @@ def findComic(name, mode, issue, limityear=None, explicit=None):
|
|||
comicquery = name.replace(" ", " AND ")
|
||||
explicit = 'all'
|
||||
|
||||
comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
|
||||
comicapi = mylar.DEFAULT_CVAPI
|
||||
else:
|
||||
comicapi = mylar.COMICVINE_API
|
||||
|
||||
#let's find out how many results we get from the query...
|
||||
searched = pullsearch(comicapi,comicquery,0,explicit)
|
||||
|
|
|
@ -1902,9 +1902,10 @@ class WebInterface(object):
|
|||
|
||||
downloadLocal.exposed = True
|
||||
|
||||
def MassWeeklyDownload(self, pulldate, weekfolder=0):
|
||||
mylar.WEEKFOLDER = int(weekfolder)
|
||||
mylar.config_write()
|
||||
def MassWeeklyDownload(self, pulldate, weekfolder=0, filename=None):
|
||||
if filename is None:
|
||||
mylar.WEEKFOLDER = int(weekfolder)
|
||||
mylar.config_write()
|
||||
|
||||
# this will download all downloaded comics from the weekly pull list and throw them
|
||||
# into a 'weekly' pull folder for those wanting to transfer directly to a 3rd party device.
|
||||
|
@ -1946,7 +1947,6 @@ class WebInterface(object):
|
|||
raise cherrypy.HTTPRedirect("pullist")
|
||||
MassWeeklyDownload.exposed = True
|
||||
|
||||
#for testing.
|
||||
def idirectory(self):
|
||||
return serve_template(templatename="idirectory.html", title="Import a Directory")
|
||||
idirectory.exposed = True
|
||||
|
@ -2279,6 +2279,7 @@ class WebInterface(object):
|
|||
"COUNT_SIZE" : COUNT_SIZE }
|
||||
|
||||
config = {
|
||||
"comicvine_api" : mylar.COMICVINE_API,
|
||||
"http_host" : mylar.HTTP_HOST,
|
||||
"http_user" : mylar.HTTP_USERNAME,
|
||||
"http_port" : mylar.HTTP_PORT,
|
||||
|
@ -2572,7 +2573,7 @@ class WebInterface(object):
|
|||
readOptions.exposed = True
|
||||
|
||||
|
||||
def configUpdate(self, http_host='0.0.0.0', http_username=None, http_port=8090, http_password=None, api_enabled=0, api_key=None, launch_browser=0, logverbose=0, max_logsize=None, download_scan_interval=None, nzb_search_interval=None, nzb_startup_search=0, libraryscan_interval=None,
|
||||
def configUpdate(self, comicvine_api=None, http_host='0.0.0.0', http_username=None, http_port=8090, http_password=None, api_enabled=0, api_key=None, launch_browser=0, logverbose=0, max_logsize=None, download_scan_interval=None, nzb_search_interval=None, nzb_startup_search=0, libraryscan_interval=None,
|
||||
nzb_downloader=0, sab_host=None, sab_username=None, sab_apikey=None, sab_password=None, sab_category=None, sab_priority=None, sab_directory=None, log_dir=None, log_level=0, blackhole_dir=None,
|
||||
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
|
||||
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_uid=None, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
|
||||
|
@ -2583,6 +2584,7 @@ class WebInterface(object):
|
|||
pushbullet_enabled=0, pushbullet_apikey=None, pushbullet_deviceid=None, pushbullet_onsnatch=0,
|
||||
preferred_quality=0, move_files=0, rename_files=0, add_to_csv=1, cvinfo=0, lowercase_filenames=0, folder_format=None, file_format=None, enable_extra_scripts=0, extra_scripts=None, enable_pre_scripts=0, pre_scripts=None, post_processing=0, syno_fix=0, search_delay=None, chmod_dir=0777, chmod_file=0660, cvapifix=0,
|
||||
tsab=None, destination_dir=None, replace_spaces=0, replace_char=None, use_minsize=0, minsize=None, use_maxsize=0, maxsize=None, autowant_all=0, autowant_upcoming=0, comic_cover_local=0, zero_level=0, zero_level_n=None, interface=None, **kwargs):
|
||||
mylar.COMICVINE_API = comicvine_api
|
||||
mylar.HTTP_HOST = http_host
|
||||
mylar.HTTP_PORT = http_port
|
||||
mylar.HTTP_USERNAME = http_username
|
||||
|
@ -2729,6 +2731,10 @@ class WebInterface(object):
|
|||
mylar.EXTRA_NEWZNABS.append((newznab_name, newznab_host, newznab_api, newznab_uid, newznab_enabled))
|
||||
|
||||
# Sanity checking
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API == '' or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.info('Personal Comicvine API key not provided. This will severely impact the usage of Mylar - you have been warned.')
|
||||
mylar.COMICVINE_API = None
|
||||
|
||||
if mylar.SEARCH_INTERVAL < 360:
|
||||
logger.info("Search interval too low. Resetting to 6 hour minimum")
|
||||
mylar.SEARCH_INTERVAL = 360
|
||||
|
|
|
@ -26,6 +26,7 @@ import os
|
|||
import time
|
||||
import re
|
||||
import datetime
|
||||
import shutil
|
||||
|
||||
import mylar
|
||||
from mylar import db, updater, helpers, logger
|
||||
|
@ -840,3 +841,49 @@ def checkthis(datecheck,datestatus,usedate):
|
|||
valid_check = False
|
||||
|
||||
return valid_check
|
||||
|
||||
def weekly_singlecopy(comicid, issuenum, file, path):
|
||||
myDB = db.DBConnection()
|
||||
try:
|
||||
pull_date = myDB.selectone("SELECT SHIPDATE from weekly").fetchone()
|
||||
if (pull_date is None):
|
||||
pulldate = '00000000'
|
||||
else:
|
||||
pulldate = pull_date['SHIPDATE']
|
||||
|
||||
logger.fdebug(u"Weekly pull list detected as : " + str(pulldate))
|
||||
|
||||
except (sqlite3.OperationalError, TypeError),msg:
|
||||
logger.info(u"Error determining current weekly pull-list date - you should refresh the pull-list manually probably.")
|
||||
return
|
||||
|
||||
chkit = myDB.selectone('SELECT * FROM weekly WHERE ComicID=? AND ISSUE=?',[comicid, issuenum]).fetchone()
|
||||
if chkit is None:
|
||||
logger.fdebug(file + ' is not on the weekly pull-list or it is a one-off download that is not supported as of yet.')
|
||||
return
|
||||
|
||||
logger.info('issue on weekly pull-list.')
|
||||
|
||||
if mylar.WEEKFOLDER:
|
||||
desdir = os.path.join(mylar.DESTINATION_DIR, pulldate)
|
||||
dircheck = mylar.filechecker.validateAndCreateDirectory(desdir, True)
|
||||
if dircheck:
|
||||
pass
|
||||
else:
|
||||
desdir = mylar.DESTINATION_DIR
|
||||
|
||||
else:
|
||||
desdir = mylar.GRABBAG_DIR
|
||||
|
||||
desfile = os.path.join(desdir, file)
|
||||
srcfile = os.path.join(path)
|
||||
|
||||
try:
|
||||
shutil.copy2(srcfile, desfile)
|
||||
except IOError as e:
|
||||
logger.error('Could not copy ' + str(srcfile) + ' to ' + str(desfile))
|
||||
return
|
||||
|
||||
logger.debug('sucessfully copied to ' + desfile.encode('utf-8').strip() )
|
||||
return
|
||||
|
||||
|
|
Loading…
Reference in New Issue