1
0
Fork 0
mirror of https://github.com/evilhero/mylar synced 2025-03-09 21:33:42 +00:00
mylar/mylar/db.py
evilhero cdc3e8a7a0 IMP: Cleaned up interface for StoryArcs / Story Arc Details, IMP: Cleaned up interface for Reading list Management, IMP: Added better reading list management - new status (added, downloaded, read), IMP: Added sync option for use with another device for reading list transfer (ie. tablet) Android only, IMP: Autopopulate new weekly pull releases to reading list, IMP: 'Watch' option in weekly pull list now fully functional. Will watch CV for series' that do not have any series data yet as they are new starting series. Will auto-add once available, IMP: Auto-watch check is run after every refresh/recreate of the weeklypull list, IMP: Improved the Add a Series option where it will now look for issues that are 'new' or 'wanted' during add sequence, IMP: Main page interface now has coloured have/total bars to denote series completion, IMP: New scheduler / threading locks in place in an attempt to avoid database locks, FIX: Removed some erroneous locking that was going on when importing a directory was being run, IMP: Stat counter now present when post-processing multiple issues in sequence, FIX: for issue number error when post-processing and issue number was a non-alphanumeric, FIX: for metatagging: when original file was .cbz, would try to convert and fail, FIX: for issues that were negative and were preceeded by a # in the filename (filechecker), FIX: for publisher having non-alphanumeric character in name when attempting to determine publisher, FIX: if annuals enabled, would incorrectly show as being 'already in library' when viewing search results if results constained annuals, FIX:(#944) for incorrect nzbname being used when post-processing was being performed from an nzb client (experimental mainly), IMP: Turned off logging for ComicVine API counter, FIX: Added retry attempts when connecting to ComicVine in order to avoid errors when adding a series, IMP:(#963) Added ability to add snatched to filter when viewing Wanted issues on Wanted tab, FIX: When importing and then selecting a series to import via the select screen, will now flip back to the importresults and add the selected series in the background, IMP:(#952) Main page is now sorted in ascending order by Continuing/Ended status (and subbed by whether is Active/Paused).Custom sorting is still available, FIX: Dupecheck will now automatically assume existing 0-byte files are to be overwritten when performing post-processing, FIX: If publication date for series contained a '?' (usually with brand new series) will force to 'Present' to allow for pull-list comparisons to take place, FIX: Mylar will now disallow search results which have 'covers only' or 'variant' in the filename, IMP: Better nzbname generation/retrieval (will check inside nzb for possible names) to be used when post-processing, IMP: DB Update will now perform update to all active comics in descending order by Latest Date (instead of random order), FIX: Enforce the 5hr limit rule when running DB update (will only update series that haven't been updated in >5 hours), FIX: Annuals will now have/retain the proper status upon doing DB Update, FIX: Have totals will now be updated when doing a recheck files (sometimes wouldn't get updated depending on various states of status'), FIX:(#966) Added urllib2.URLError exeception trap when attempting to check Git for updates, IMP: Removed the individual sqlite calls for weeklypull, and brought them into line with using the db module (which will minimize concurrent access, which seemed to be causing db locks), IMP: Cleaned up some code and shuffled some functions so they are in more appropriate locations
2015-03-27 13:27:59 -04:00

184 lines
6.6 KiB
Python
Executable file

# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
#####################################
## Stolen from Sick-Beard's db.py ##
#####################################
from __future__ import with_statement
import os
import sqlite3
import threading
import time
import Queue
import mylar
from mylar import logger
db_lock = threading.Lock()
mylarQueue = Queue.Queue()
def dbFilename(filename="mylar.db"):
return os.path.join(mylar.DATA_DIR, filename)
class WriteOnly:
def __init__(self):
t = threading.Thread(target=self.worker, name="DB-WRITER")
t.daemon = True
t.start()
logger.fdebug('Thread WriteOnly initialized.')
def worker(self):
myDB = DBConnection()
#this should be in it's own thread somewhere, constantly polling the queue and sending them to the writer.
logger.fdebug('worker started.')
while True:
thisthread = threading.currentThread().name
if not mylarQueue.empty():
# Rename the main thread
logger.fdebug('[' + str(thisthread) + '] queue is not empty yet...')
(QtableName, QvalueDict, QkeyDict) = mylarQueue.get(block=True, timeout=None)
logger.fdebug('[REQUEUE] Table: ' + str(QtableName) + ' values: ' + str(QvalueDict) + ' keys: ' + str(QkeyDict))
sqlResult = myDB.upsert(QtableName, QvalueDict, QkeyDict)
if sqlResult:
mylarQueue.task_done()
return sqlResult
else:
time.sleep(1)
#logger.fdebug('[' + str(thisthread) + '] sleeping until active.')
class DBConnection:
def __init__(self, filename="mylar.db"):
self.filename = filename
self.connection = sqlite3.connect(dbFilename(filename), timeout=20)
self.connection.row_factory = sqlite3.Row
self.queue = mylarQueue
def fetch(self, query, args=None):
with db_lock:
if query == None:
return
sqlResult = None
attempt = 0
while attempt < 5:
try:
if args == None:
#logger.fdebug("[FETCH] : " + query)
cursor = self.connection.cursor()
sqlResult = cursor.execute(query)
else:
#logger.fdebug("[FETCH] : " + query + " with args " + str(args))
cursor = self.connection.cursor()
sqlResult = cursor.execute(query, args)
# get out of the connection attempt loop since we were successful
break
except sqlite3.OperationalError, e:
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
logger.warn('Database Error: %s' % e)
attempt += 1
time.sleep(1)
else:
logger.warn('DB error: %s' % e)
raise
except sqlite3.DatabaseError, e:
logger.error('Fatal error executing query: %s' % e)
raise
return sqlResult
def action(self, query, args=None):
with db_lock:
if query == None:
return
sqlResult = None
attempt = 0
while attempt < 5:
try:
if args == None:
#logger.fdebug("[ACTION] : " + query)
sqlResult = self.connection.execute(query)
else:
#logger.fdebug("[ACTION] : " + query + " with args " + str(args))
sqlResult = self.connection.execute(query, args)
self.connection.commit()
break
except sqlite3.OperationalError, e:
if "unable to open database file" in e.message or "database is locked" in e.message:
logger.warn('Database Error: %s' % e)
logger.warn('sqlresult: %s' % query)
attempt += 1
time.sleep(1)
else:
logger.error('Database error executing %s :: %s' % (query, e))
raise
return sqlResult
def select(self, query, args=None):
sqlResults = self.fetch(query, args).fetchall()
if sqlResults == None:
return []
return sqlResults
def selectone(self, query, args=None):
sqlResults = self.fetch(query, args)
if sqlResults == None:
return []
return sqlResults
def upsert(self, tableName, valueDict, keyDict):
thisthread = threading.currentThread().name
changesBefore = self.connection.total_changes
genParams = lambda myDict : [x + " = ?" for x in myDict.keys()]
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict))
self.action(query, valueDict.values() + keyDict.values())
if self.connection.total_changes == changesBefore:
query = "INSERT INTO "+tableName+" (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")"
self.action(query, valueDict.values() + keyDict.values())
#else:
# logger.info('[' + str(thisthread) + '] db is currently locked for writing. Queuing this action until it is free')
# logger.info('Table: ' + str(tableName) + ' Values: ' + str(valueDict) + ' Keys: ' + str(keyDict))
# self.queue.put( (tableName, valueDict, keyDict) )
# #assuming this is coming in from a seperate thread, so loop it until it's free to write.
# #self.queuesend()