2019-10-27 14:15:45 +01:00
|
|
|
"""
|
|
|
|
SQL-related functions defined here are really pass the queries (or other SQL
|
|
|
|
commands) to :class:`.threads.sqlThread` through `sqlSubmitQueue` queue and check
|
|
|
|
or return the result got from `sqlReturnQueue`.
|
2019-10-10 09:26:39 +02:00
|
|
|
|
2019-10-27 14:15:45 +01:00
|
|
|
This is done that way because :mod:`sqlite3` is so thread-unsafe that they
|
|
|
|
won't even let you call it from different threads using your own locks.
|
|
|
|
SQLite objects can only be used from one thread.
|
2019-10-10 09:26:39 +02:00
|
|
|
|
2019-10-27 14:15:45 +01:00
|
|
|
.. note:: This actually only applies for certain deployments, and/or
|
|
|
|
really old version of sqlite. I haven't actually seen it anywhere.
|
|
|
|
Current versions do have support for threading and multiprocessing.
|
|
|
|
I don't see an urgent reason to refactor this, but it should be noted
|
|
|
|
in the comment that the problem is mostly not valid. Sadly, last time
|
|
|
|
I checked, there is no reliable way to check whether the library is
|
|
|
|
or isn't thread-safe.
|
|
|
|
"""
|
2018-04-07 09:29:09 +02:00
|
|
|
|
2020-01-24 15:03:13 +01:00
|
|
|
import threading
|
2013-08-29 14:03:45 +02:00
|
|
|
|
2021-07-07 21:08:45 +02:00
|
|
|
from six.moves import queue
|
|
|
|
|
2021-03-04 15:15:41 +01:00
|
|
|
|
2021-07-07 21:08:45 +02:00
|
|
|
sqlSubmitQueue = queue.Queue()
|
2019-10-27 14:15:45 +01:00
|
|
|
"""the queue for SQL"""
|
2021-07-07 21:08:45 +02:00
|
|
|
sqlReturnQueue = queue.Queue()
|
2019-10-27 14:15:45 +01:00
|
|
|
"""the queue for results"""
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock = threading.Lock()
|
|
|
|
""" lock to prevent queueing a new request until the previous response
|
|
|
|
is available """
|
|
|
|
sql_available = False
|
|
|
|
"""set to True by `.threads.sqlThread` immediately upon start"""
|
|
|
|
sql_ready = threading.Event()
|
|
|
|
"""set by `.threads.sqlThread` when ready for processing (after
|
|
|
|
initialization is done)"""
|
2023-09-21 15:46:32 +02:00
|
|
|
sql_timeout = 60
|
|
|
|
"""timeout for waiting for sql_ready in seconds"""
|
2013-08-27 02:00:30 +02:00
|
|
|
|
2018-04-07 09:29:09 +02:00
|
|
|
|
2021-01-01 13:51:35 +01:00
|
|
|
def sqlQuery(sql_statement, *args):
|
2019-10-27 14:15:45 +01:00
|
|
|
"""
|
|
|
|
Query sqlite and return results
|
|
|
|
|
2021-01-01 13:51:35 +01:00
|
|
|
:param str sql_statement: SQL statement string
|
2019-10-27 14:15:45 +01:00
|
|
|
:param list args: SQL query parameters
|
|
|
|
:rtype: list
|
|
|
|
"""
|
2021-01-01 13:51:35 +01:00
|
|
|
assert sql_available
|
|
|
|
sql_lock.acquire()
|
|
|
|
sqlSubmitQueue.put(sql_statement)
|
2013-08-27 02:00:30 +02:00
|
|
|
|
|
|
|
if args == ():
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put('')
|
2018-04-07 09:29:09 +02:00
|
|
|
elif isinstance(args[0], (list, tuple)):
|
2015-11-27 00:37:44 +01:00
|
|
|
sqlSubmitQueue.put(args[0])
|
2013-08-27 02:00:30 +02:00
|
|
|
else:
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put(args)
|
2018-04-07 09:29:09 +02:00
|
|
|
queryreturn, _ = sqlReturnQueue.get()
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock.release()
|
2013-08-27 02:00:30 +02:00
|
|
|
|
|
|
|
return queryreturn
|
|
|
|
|
2018-01-25 22:04:38 +01:00
|
|
|
|
2021-01-01 13:51:35 +01:00
|
|
|
def sqlExecuteChunked(sql_statement, idCount, *args):
|
2019-10-07 10:08:26 +02:00
|
|
|
"""Execute chunked SQL statement to avoid argument limit"""
|
2018-01-25 22:04:38 +01:00
|
|
|
# SQLITE_MAX_VARIABLE_NUMBER,
|
|
|
|
# unfortunately getting/setting isn't exposed to python
|
2021-01-01 13:51:35 +01:00
|
|
|
assert sql_available
|
2017-11-30 19:39:31 +01:00
|
|
|
sqlExecuteChunked.chunkSize = 999
|
|
|
|
|
|
|
|
if idCount == 0 or idCount > len(args):
|
|
|
|
return 0
|
|
|
|
|
2021-01-01 13:51:35 +01:00
|
|
|
total_row_count = 0
|
|
|
|
with sql_lock:
|
2018-01-25 22:04:38 +01:00
|
|
|
for i in range(
|
2021-01-01 13:51:35 +01:00
|
|
|
len(args) - idCount, len(args),
|
|
|
|
sqlExecuteChunked.chunkSize - (len(args) - idCount)
|
2018-01-25 22:04:38 +01:00
|
|
|
):
|
|
|
|
chunk_slice = args[
|
2018-04-07 09:29:09 +02:00
|
|
|
i:i + sqlExecuteChunked.chunkSize - (len(args) - idCount)
|
2018-01-25 22:04:38 +01:00
|
|
|
]
|
|
|
|
sqlSubmitQueue.put(
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_statement.format(','.join('?' * len(chunk_slice)))
|
2018-01-25 22:04:38 +01:00
|
|
|
)
|
2017-11-30 19:39:31 +01:00
|
|
|
# first static args, and then iterative chunk
|
2018-01-25 22:04:38 +01:00
|
|
|
sqlSubmitQueue.put(
|
2018-04-07 09:29:09 +02:00
|
|
|
args[0:len(args) - idCount] + chunk_slice
|
2018-01-25 22:04:38 +01:00
|
|
|
)
|
2021-01-01 13:51:35 +01:00
|
|
|
ret_val = sqlReturnQueue.get()
|
|
|
|
total_row_count += ret_val[1]
|
2017-11-30 19:39:31 +01:00
|
|
|
sqlSubmitQueue.put('commit')
|
2021-01-01 13:51:35 +01:00
|
|
|
return total_row_count
|
2017-11-30 19:39:31 +01:00
|
|
|
|
2018-01-25 22:04:38 +01:00
|
|
|
|
2021-01-01 13:51:35 +01:00
|
|
|
def sqlExecute(sql_statement, *args):
|
2019-10-05 11:52:28 +02:00
|
|
|
"""Execute SQL statement (optionally with arguments)"""
|
2021-01-01 13:51:35 +01:00
|
|
|
assert sql_available
|
|
|
|
sql_lock.acquire()
|
|
|
|
sqlSubmitQueue.put(sql_statement)
|
2013-08-27 02:00:30 +02:00
|
|
|
|
|
|
|
if args == ():
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put('')
|
2013-08-27 02:00:30 +02:00
|
|
|
else:
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put(args)
|
2018-04-07 09:48:58 +02:00
|
|
|
_, rowcount = sqlReturnQueue.get()
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put('commit')
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock.release()
|
2016-03-22 17:17:45 +01:00
|
|
|
return rowcount
|
2013-08-27 02:00:30 +02:00
|
|
|
|
2019-10-10 09:26:39 +02:00
|
|
|
|
2021-03-04 15:15:41 +01:00
|
|
|
def sqlExecuteScript(sql_statement):
|
|
|
|
"""Execute SQL script statement"""
|
|
|
|
|
|
|
|
statements = sql_statement.split(";")
|
|
|
|
with SqlBulkExecute() as sql:
|
|
|
|
for q in statements:
|
|
|
|
sql.execute("{}".format(q))
|
|
|
|
|
|
|
|
|
2013-08-27 02:00:30 +02:00
|
|
|
def sqlStoredProcedure(procName):
|
2019-10-10 09:26:39 +02:00
|
|
|
"""Schedule procName to be run"""
|
2021-01-01 13:51:35 +01:00
|
|
|
assert sql_available
|
|
|
|
sql_lock.acquire()
|
2013-08-29 14:03:45 +02:00
|
|
|
sqlSubmitQueue.put(procName)
|
2021-03-04 15:15:41 +01:00
|
|
|
if procName == "exit":
|
|
|
|
sqlSubmitQueue.task_done()
|
|
|
|
sqlSubmitQueue.put("terminate")
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock.release()
|
2013-08-31 16:40:11 +02:00
|
|
|
|
2018-04-07 09:29:09 +02:00
|
|
|
|
2019-10-10 09:26:39 +02:00
|
|
|
class SqlBulkExecute(object):
|
2018-04-07 15:50:29 +02:00
|
|
|
"""This is used when you have to execute the same statement in a cycle."""
|
|
|
|
|
2013-08-31 16:40:11 +02:00
|
|
|
def __enter__(self):
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock.acquire()
|
2013-08-31 16:40:11 +02:00
|
|
|
return self
|
|
|
|
|
2018-04-07 09:29:09 +02:00
|
|
|
def __exit__(self, exc_type, value, traceback):
|
2013-08-31 16:40:11 +02:00
|
|
|
sqlSubmitQueue.put('commit')
|
2021-01-01 13:51:35 +01:00
|
|
|
sql_lock.release()
|
2013-08-31 16:40:11 +02:00
|
|
|
|
2018-04-07 09:29:09 +02:00
|
|
|
@staticmethod
|
2021-01-01 13:51:35 +01:00
|
|
|
def execute(sql_statement, *args):
|
2018-04-07 15:50:29 +02:00
|
|
|
"""Used for statements that do not return results."""
|
2021-01-01 13:51:35 +01:00
|
|
|
assert sql_available
|
|
|
|
sqlSubmitQueue.put(sql_statement)
|
2018-04-30 16:55:10 +02:00
|
|
|
|
2013-08-31 16:40:11 +02:00
|
|
|
if args == ():
|
|
|
|
sqlSubmitQueue.put('')
|
|
|
|
else:
|
|
|
|
sqlSubmitQueue.put(args)
|
|
|
|
sqlReturnQueue.get()
|