cocluto v1.04
- added to the class `ISqlDatabaseBackend` the ability to create a sql table - also added `SqliteDb`: an implementation of `ISqlDatabaseBackend` that implements the sql database as a sqlite3 database work related to [https://bugzilla.ipr.univ-rennes.fr/show_bug.cgi?id=3958]
This commit is contained in:
parent
e86197c083
commit
101fb6d8b2
|
@ -1,3 +1,7 @@
|
|||
from typing import Union, List
|
||||
from pathlib import Path
|
||||
from enum import Enum
|
||||
import logging
|
||||
import MySQLdb # sudo port install py-mysql; sudo apt install python-mysqldb or pip install mysqlclient
|
||||
import time
|
||||
import sys
|
||||
|
@ -37,16 +41,61 @@ def is_machine_responding(machineName):
|
|||
return False
|
||||
|
||||
|
||||
SqlQuery = str
|
||||
|
||||
|
||||
class SqlTableField():
|
||||
'''description of a field of a sql table
|
||||
'''
|
||||
class Type(Enum):
|
||||
FIELD_TYPE_STRING = 0
|
||||
FIELD_TYPE_INT = 1
|
||||
FIELD_TYPE_FLOAT = 2
|
||||
FIELD_TYPE_TIME = 3
|
||||
|
||||
name: str # the name of the field, eg 'matrix_size'
|
||||
field_type: Type # the type of the field, eg 'PARAM_TYPE_INT'
|
||||
description: str # the description of the field, eg 'the size n of the n*n matrix '
|
||||
is_autoinc_index: bool # indicates if theis field is used as an autoincrement index in the table
|
||||
|
||||
def __init__(self, name: str, field_type: Type, description: str, is_autoinc_index=False):
|
||||
if is_autoinc_index:
|
||||
assert field_type == SqlTableField.Type.FIELD_TYPE_INT, 'only an integer field can be used as a autoincrement table index'
|
||||
self.name = name
|
||||
self.field_type = field_type
|
||||
self.description = description
|
||||
self.is_autoinc_index = is_autoinc_index
|
||||
|
||||
|
||||
class ISqlDatabaseBackend(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def query(self, sql_query):
|
||||
def query(self, sql_query: SqlQuery):
|
||||
"""
|
||||
:param str sql_query: the sql query to perform
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""returns true if the given table exists in the database
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_table(self, table_name: str, fields: List[SqlTableField]):
|
||||
"""creates the table in this sql database
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_field_directive(self, field_name: str, field_sql_type: str, field_description: str) -> str:
|
||||
"""returns the sql directive for the declaration of the given table field (eg "`matrix_size` real NOT NULL COMMENT 'the size of the matrix'")
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dump(self, sql_file_path: Path):
|
||||
"""dumps this database into the given sql file"""
|
||||
|
||||
|
||||
class RemoteMysqlDb(ISqlDatabaseBackend):
|
||||
def __init__(self, db_server_fqdn, db_user, db_name):
|
||||
|
@ -72,9 +121,101 @@ class RemoteMysqlDb(ISqlDatabaseBackend):
|
|||
rows = self._conn.store_result()
|
||||
return rows
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
rows = self.query(f"SHOW TABLES LIKE '{table_name}';")
|
||||
assert len(rows) <= 1, f'Unexpected case: more than one ({len(rows)}) tables match the table name {table_name}.'
|
||||
return len(rows) == 1
|
||||
|
||||
class SqlFile(ISqlDatabaseBackend):
|
||||
def __init__(self, sql_file_path, truncate_hex_strings=False):
|
||||
def create_table(self, table_name: str, fields: List[SqlTableField]):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_field_directive(self, field_name: str, field_sql_type: str, field_description: str) -> str:
|
||||
return f'`{field_name}` {field_sql_type} COMMENT \'{field_description}\''
|
||||
|
||||
def dump(self, sql_file_path: Path):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class SqliteDb(ISqlDatabaseBackend):
|
||||
sqlite_db_path: Union[Path, str] # ':memory:' # sqlite-specific special name for a file stored in memory. We could use something like '/tmp/simpadb.sqlite' here but this would make parsing really slow (1 minute instead of 1s), unless either :
|
||||
_con: sqlite3.Connection
|
||||
_cur: sqlite3.Cursor
|
||||
|
||||
def __init__(self, sqlite_db_path: Path):
|
||||
"""
|
||||
:param str sqlite_db_path: the path of the sqlite database
|
||||
:param str database_name: the name of the database withing the sqlite database (eg "iprbench")
|
||||
"""
|
||||
self.sqlite_db_path = sqlite_db_path
|
||||
self._cur = None
|
||||
|
||||
check_same_thread = False
|
||||
# this is to prevent the following error when run from apache/django : SQLite objects created in a thread can only be used in that same thread.The object was created in thread id 139672342353664 and this is thread id 139672333960960
|
||||
# accordig to https://stackoverflow.com/questions/48218065/programmingerror-sqlite-objects-created-in-a-thread-can-only-be-used-in-that-sa this is ok, as long as there are no concurrent writes
|
||||
# If set False, the returned connection may be shared across multiple threads. When using multiple threads with the same connection writing operations should be serialized by the user to avoid data corruption
|
||||
# I hope it's safe here but I'm not 100% sure though. Anyway, if the database gets corrupt, it not a big deal since this memory resident database gets reconstructed from the sql file...
|
||||
|
||||
if sqlite_db_path != ':memory:' and not sqlite_db_path.exists():
|
||||
logging.debug('creating sqlite database in %s', sqlite_db_path)
|
||||
self._con = sqlite3.connect(sqlite_db_path, check_same_thread=check_same_thread)
|
||||
else:
|
||||
logging.debug('reusing existing sqlite database in %s', sqlite_db_path)
|
||||
self._con = sqlite3.connect(sqlite_db_path, check_same_thread=check_same_thread)
|
||||
self._cur = self._con.cursor()
|
||||
logging.debug('self._con = %s', self._con)
|
||||
logging.debug('self._cur = %s', self._cur)
|
||||
|
||||
_ = self.query('PRAGMA encoding="UTF-8";')
|
||||
|
||||
def query(self, sql_query):
|
||||
"""
|
||||
:param str sql_query: the sql query to perform
|
||||
"""
|
||||
self._cur.execute(sql_query)
|
||||
rows = self._cur.fetchall()
|
||||
self._con.commit()
|
||||
return rows
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
rows = self.query(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';")
|
||||
assert len(rows) <= 1, f'Unexpected case: more than one ({len(rows)}) tables match the table name {table_name}.'
|
||||
return len(rows) == 1
|
||||
|
||||
def create_table(self, table_name: str, fields: List[SqlTableField]):
|
||||
# https://www.sqlite.org/autoinc.html
|
||||
# > The AUTOINCREMENT keyword imposes extra CPU, memory, disk space, and disk I/O overhead and should be avoided if not strictly needed. It is usually not needed.
|
||||
fields_sql_descriptions = []
|
||||
for field in fields:
|
||||
sql_field_type = {
|
||||
SqlTableField.Type.FIELD_TYPE_FLOAT: 'real NOT NULL',
|
||||
SqlTableField.Type.FIELD_TYPE_INT: 'int(11) NOT NULL',
|
||||
SqlTableField.Type.FIELD_TYPE_STRING: 'varchar(256) NOT NULL',
|
||||
SqlTableField.Type.FIELD_TYPE_TIME: 'datetime NOT NULL',
|
||||
}[field.field_type]
|
||||
if field.is_autoinc_index:
|
||||
assert field.field_type == SqlTableField.Type.FIELD_TYPE_INT
|
||||
sql_field_type = 'INTEGER PRIMARY KEY'
|
||||
fields_sql_description = self.get_field_directive(field.name, sql_field_type, field.description)
|
||||
fields_sql_descriptions.append(fields_sql_description)
|
||||
|
||||
sql_create_table_command = f'CREATE TABLE `{table_name}` ({",".join(fields_sql_descriptions)});'
|
||||
logging.debug('sql_create_table_command = %s', sql_create_table_command)
|
||||
self.query(sql_create_table_command)
|
||||
|
||||
def get_field_directive(self, field_name: str, field_sql_type: str, field_description: str) -> str:
|
||||
# sqlite doesn't understand the COMMENT keyword, so we use sql comments ( "--" ), as explained in [https://stackoverflow.com/questions/7426205/sqlite-adding-comment-on-descriptions-to-tables-and-columns]
|
||||
return f'`{field_name}` {field_sql_type} -- {field_description}\n'
|
||||
|
||||
def dump(self, sql_file_path: Path):
|
||||
with open(sql_file_path, 'wt', encoding='utf8') as f:
|
||||
for line in self._con.iterdump():
|
||||
f.write(line)
|
||||
|
||||
|
||||
class SqlFile(SqliteDb):
|
||||
_sql_file_path: Path
|
||||
|
||||
def __init__(self, sql_file_path: Path, truncate_hex_strings=False):
|
||||
"""
|
||||
:param str sql_file_path: the path of the sql file containing the inventory database
|
||||
"""
|
||||
|
@ -88,12 +229,7 @@ class SqlFile(ISqlDatabaseBackend):
|
|||
os.remove(sqlite_db_path)
|
||||
except BaseException:
|
||||
pass
|
||||
check_same_thread = False
|
||||
# this is to prevent the following error when run from apache/django : SQLite objects created in a thread can only be used in that same thread.The object was created in thread id 139672342353664 and this is thread id 139672333960960
|
||||
# accordig to https://stackoverflow.com/questions/48218065/programmingerror-sqlite-objects-created-in-a-thread-can-only-be-used-in-that-sa this is ok, as long as there are no concurrent writes
|
||||
# If set False, the returned connection may be shared across multiple threads. When using multiple threads with the same connection writing operations should be serialized by the user to avoid data corruption
|
||||
# I hope it's safe here but I'm not 100% sure though. Anyway, if the database gets corrupt, it not a big deal since this memory resident database gets reconstructed from the sql file...
|
||||
self._con = sqlite3.connect(sqlite_db_path, check_same_thread=check_same_thread)
|
||||
super().__init__(sqlite_db_path)
|
||||
with open(str(self._sql_file_path), 'r', encoding='utf8') as f: # str conversion has been added to support older versions of python in which open don't accept arguments of type Path
|
||||
sql = f.read() # watch out for built-in `str`
|
||||
# print(sql)
|
||||
|
@ -106,14 +242,6 @@ class SqlFile(ISqlDatabaseBackend):
|
|||
# sqlite_sql = f.read()
|
||||
self._cur.executescript(sqlite_sql)
|
||||
|
||||
def query(self, sql_query):
|
||||
"""
|
||||
:param str sql_query: the sql query to perform
|
||||
"""
|
||||
self._cur.execute(sql_query)
|
||||
rows = self._cur.fetchall()
|
||||
return rows
|
||||
|
||||
|
||||
class TableAttrNotFound(Exception):
|
||||
def __init__(self, table, key_name, key_value, attr_name):
|
||||
|
@ -127,17 +255,17 @@ class TableAttrNotFound(Exception):
|
|||
|
||||
class SqlDatabaseReader(object):
|
||||
|
||||
def __init__(self, inv_provider):
|
||||
def __init__(self, inv_provider: ISqlDatabaseBackend):
|
||||
"""
|
||||
:param ISqlDatabaseBackend inv_provider: the input that provides the inventory data
|
||||
"""
|
||||
self._inv_provider = inv_provider
|
||||
|
||||
def query(self, sql_query):
|
||||
def query(self, sql_query: SqlQuery):
|
||||
"""
|
||||
performs a query on the sql database
|
||||
|
||||
:param str sql_query: the sql query to perform
|
||||
:param SqlQuery sql_query: the sql query to perform
|
||||
"""
|
||||
return self._inv_provider.query(sql_query)
|
||||
|
||||
|
|
Loading…
Reference in New Issue