Initial import from Docker volume
This commit is contained in:
39
backend/__init__.py
Executable file
39
backend/__init__.py
Executable file
@@ -0,0 +1,39 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
import importlib
|
||||
import urllib.parse
|
||||
|
||||
try:
|
||||
from backports.entry_points_selectable import entry_points
|
||||
except ImportError:
|
||||
from importlib.metadata import entry_points
|
||||
|
||||
from trytond.config import config
|
||||
|
||||
__all__ = [
|
||||
'name', 'Database', 'TableHandler',
|
||||
'DatabaseIntegrityError', 'DatabaseDataError', 'DatabaseOperationalError',
|
||||
'DatabaseTimeoutError']
|
||||
|
||||
|
||||
name = urllib.parse.urlparse(config.get('database', 'uri', default='')).scheme
|
||||
|
||||
_modname = 'trytond.backend.%s' % name
|
||||
try:
|
||||
_module = importlib.import_module(_modname)
|
||||
except ImportError:
|
||||
for ep in entry_points().select(group='trytond.backend', name=name):
|
||||
try:
|
||||
_module = ep.load()
|
||||
break
|
||||
except ImportError:
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
|
||||
Database = _module.Database
|
||||
DatabaseIntegrityError = _module.DatabaseIntegrityError
|
||||
DatabaseDataError = _module.DatabaseDataError
|
||||
DatabaseOperationalError = _module.DatabaseOperationalError
|
||||
DatabaseTimeoutError = _module.DatabaseTimeoutError
|
||||
TableHandler = _module.TableHandler
|
||||
BIN
backend/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
BIN
backend/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
backend/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/__pycache__/database.cpython-311.opt-1.pyc
Executable file
BIN
backend/__pycache__/database.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/__pycache__/database.cpython-311.pyc
Executable file
BIN
backend/__pycache__/database.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/__pycache__/table.cpython-311.opt-1.pyc
Executable file
BIN
backend/__pycache__/table.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/__pycache__/table.cpython-311.pyc
Executable file
BIN
backend/__pycache__/table.cpython-311.pyc
Executable file
Binary file not shown.
183
backend/database.py
Executable file
183
backend/database.py
Executable file
@@ -0,0 +1,183 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
from collections import namedtuple
|
||||
|
||||
from sql import For
|
||||
|
||||
DatabaseIntegrityError = None
|
||||
DatabaseOperationalError = None
|
||||
DatabaseTimeoutError = None
|
||||
|
||||
SQLType = namedtuple('SQLType', 'base type')
|
||||
|
||||
|
||||
class DatabaseInterface(object):
|
||||
'''
|
||||
Define generic interface for database connection
|
||||
'''
|
||||
flavor = None
|
||||
IN_MAX = 1000
|
||||
|
||||
def __new__(cls, name=''):
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, name=''):
|
||||
self.name = name
|
||||
|
||||
def connect(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_connection(
|
||||
self, autocommit=False, readonly=False, statement_timeout=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def put_connection(self, connection, close=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def close(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def create(cls, connection, database_name):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def drop(cls, connection, database_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def list(self, hostname=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def init(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def test(self, hostname=None):
|
||||
'''
|
||||
Test if it is a Tryton database.
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
def nextid(self, connection, table, count=1):
|
||||
pass
|
||||
|
||||
def setnextid(self, connection, table, value):
|
||||
pass
|
||||
|
||||
def currid(self, connection, table):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def lock(cls, connection, table):
|
||||
raise NotImplementedError
|
||||
|
||||
def lock_id(self, id, timeout=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def has_constraint(self, constraint):
|
||||
raise NotImplementedError
|
||||
|
||||
def has_returning(self):
|
||||
return False
|
||||
|
||||
def has_multirow_insert(self):
|
||||
return False
|
||||
|
||||
def has_select_for(self):
|
||||
return False
|
||||
|
||||
def get_select_for_skip_locked(self):
|
||||
return For
|
||||
|
||||
def has_window_functions(self):
|
||||
return False
|
||||
|
||||
def has_unaccent(self):
|
||||
return False
|
||||
|
||||
def has_unaccent_indexable(self):
|
||||
return False
|
||||
|
||||
def unaccent(self, value):
|
||||
return value
|
||||
|
||||
def has_similarity(self):
|
||||
return False
|
||||
|
||||
def similarity(self, column, value):
|
||||
raise NotImplementedError
|
||||
|
||||
def has_search_full_text(self):
|
||||
return False
|
||||
|
||||
def format_full_text(self, *documents, language=None):
|
||||
return '\n'.join(documents)
|
||||
|
||||
def format_full_text_query(self, query, language=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def search_full_text(self, document, query):
|
||||
raise NotImplementedError
|
||||
|
||||
def rank_full_text(self, document, query, normalize=None):
|
||||
"Return the expression that ranks query on document"
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def has_sequence(cls):
|
||||
return False
|
||||
|
||||
def sequence_exist(self, connection, name):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def sequence_create(
|
||||
self, connection, name, number_increment=1, start_value=1):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def sequence_update(
|
||||
self, connection, name, number_increment=1, start_value=1):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def sequence_rename(self, connection, old_name, new_name):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def sequence_delete(self, connection, name):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def sequence_next_number(self, connection, name):
|
||||
if not self.has_sequence():
|
||||
return
|
||||
raise NotImplementedError
|
||||
|
||||
def has_channel(self):
|
||||
return False
|
||||
|
||||
def sql_type(self, type_):
|
||||
pass
|
||||
|
||||
def sql_format(self, type_, value):
|
||||
pass
|
||||
|
||||
def json_get(self, column, key=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def json_key_exists(self, column, key):
|
||||
raise NotImplementedError
|
||||
|
||||
def json_any_keys_exist(self, column, keys):
|
||||
raise NotImplementedError
|
||||
|
||||
def json_all_keys_exist(self, column, keys):
|
||||
raise NotImplementedError
|
||||
|
||||
def json_contains(self, column, json):
|
||||
raise NotImplementedError
|
||||
12
backend/postgresql/__init__.py
Executable file
12
backend/postgresql/__init__.py
Executable file
@@ -0,0 +1,12 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
|
||||
from .database import (
|
||||
Database, DatabaseDataError, DatabaseIntegrityError,
|
||||
DatabaseOperationalError, DatabaseTimeoutError)
|
||||
from .table import TableHandler
|
||||
|
||||
__all__ = [
|
||||
Database, TableHandler,
|
||||
DatabaseIntegrityError, DatabaseDataError, DatabaseOperationalError,
|
||||
DatabaseTimeoutError]
|
||||
BIN
backend/postgresql/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
BIN
backend/postgresql/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/postgresql/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
backend/postgresql/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/postgresql/__pycache__/database.cpython-311.opt-1.pyc
Executable file
BIN
backend/postgresql/__pycache__/database.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/postgresql/__pycache__/database.cpython-311.pyc
Executable file
BIN
backend/postgresql/__pycache__/database.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/postgresql/__pycache__/table.cpython-311.opt-1.pyc
Executable file
BIN
backend/postgresql/__pycache__/table.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/postgresql/__pycache__/table.cpython-311.pyc
Executable file
BIN
backend/postgresql/__pycache__/table.cpython-311.pyc
Executable file
Binary file not shown.
814
backend/postgresql/database.py
Executable file
814
backend/postgresql/database.py
Executable file
@@ -0,0 +1,814 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from itertools import chain, repeat
|
||||
from threading import RLock
|
||||
|
||||
from psycopg2 import Binary, connect
|
||||
from psycopg2.extensions import (
|
||||
ISOLATION_LEVEL_REPEATABLE_READ, UNICODE, AsIs, cursor, register_adapter,
|
||||
register_type)
|
||||
from psycopg2.pool import PoolError, ThreadedConnectionPool
|
||||
from psycopg2.sql import SQL, Identifier
|
||||
|
||||
try:
|
||||
from psycopg2.extensions import PYDATE, PYDATETIME, PYINTERVAL, PYTIME
|
||||
except ImportError:
|
||||
PYDATE, PYDATETIME, PYTIME, PYINTERVAL = None, None, None, None
|
||||
from psycopg2 import DataError as DatabaseDataError
|
||||
from psycopg2 import IntegrityError as DatabaseIntegrityError
|
||||
from psycopg2 import InterfaceError
|
||||
from psycopg2 import OperationalError as DatabaseOperationalError
|
||||
from psycopg2 import ProgrammingError
|
||||
from psycopg2.errors import QueryCanceled as DatabaseTimeoutError
|
||||
from psycopg2.extras import register_default_json, register_default_jsonb
|
||||
from sql import Cast, Flavor, For, Table
|
||||
from sql.conditionals import Coalesce
|
||||
from sql.functions import Function
|
||||
from sql.operators import BinaryOperator, Concat
|
||||
|
||||
from trytond.backend.database import DatabaseInterface, SQLType
|
||||
from trytond.config import config, parse_uri
|
||||
from trytond.tools.gevent import is_gevent_monkey_patched
|
||||
|
||||
__all__ = [
|
||||
'Database',
|
||||
'DatabaseIntegrityError', 'DatabaseDataError', 'DatabaseOperationalError',
|
||||
'DatabaseTimeoutError']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
os.environ['PGTZ'] = os.environ.get('TZ', '')
|
||||
_timeout = config.getint('database', 'timeout')
|
||||
_minconn = config.getint('database', 'minconn', default=1)
|
||||
_maxconn = config.getint('database', 'maxconn', default=64)
|
||||
_default_name = config.get('database', 'default_name', default='template1')
|
||||
|
||||
|
||||
def unescape_quote(s):
|
||||
if s.startswith('"') and s.endswith('"'):
|
||||
return s.strip('"').replace('""', '"')
|
||||
return s
|
||||
|
||||
|
||||
def replace_special_values(s, **mapping):
|
||||
for name, value in mapping.items():
|
||||
s = s.replace('$' + name, value)
|
||||
return s
|
||||
|
||||
|
||||
class LoggingCursor(cursor):
|
||||
def execute(self, sql, args=None):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(self.mogrify(sql, args))
|
||||
cursor.execute(self, sql, args)
|
||||
|
||||
|
||||
class ForSkipLocked(For):
|
||||
def __str__(self):
|
||||
assert not self.nowait, "Can not use both NO WAIT and SKIP LOCKED"
|
||||
return super().__str__() + (' SKIP LOCKED' if not self.nowait else '')
|
||||
|
||||
|
||||
class Unaccent(Function):
|
||||
__slots__ = ()
|
||||
_function = config.get('database', 'unaccent_function', default='unaccent')
|
||||
|
||||
|
||||
class Similarity(Function):
|
||||
__slots__ = ()
|
||||
_function = config.get(
|
||||
'database', 'similarity_function', default='similarity')
|
||||
|
||||
|
||||
class Match(BinaryOperator):
|
||||
__slots__ = ()
|
||||
_operator = '@@'
|
||||
|
||||
|
||||
class ToTsvector(Function):
|
||||
__slots__ = ()
|
||||
_function = 'to_tsvector'
|
||||
|
||||
|
||||
class Setweight(Function):
|
||||
__slots__ = ()
|
||||
_function = 'setweight'
|
||||
|
||||
|
||||
class TsQuery(Function):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class ToTsQuery(TsQuery):
|
||||
__slots__ = ()
|
||||
_function = 'to_tsquery'
|
||||
|
||||
|
||||
class PlainToTsQuery(TsQuery):
|
||||
__slots__ = ()
|
||||
_function = 'plainto_tsquery'
|
||||
|
||||
|
||||
class PhraseToTsQuery(TsQuery):
|
||||
__slots__ = ()
|
||||
_function = 'phraseto_tsquery'
|
||||
|
||||
|
||||
class WebsearchToTsQuery(TsQuery):
|
||||
__slots__ = ()
|
||||
_function = 'websearch_to_tsquery'
|
||||
|
||||
|
||||
class TsRank(Function):
|
||||
__slots__ = ()
|
||||
_function = 'ts_rank'
|
||||
|
||||
|
||||
class AdvisoryLock(Function):
|
||||
_function = 'pg_advisory_xact_lock'
|
||||
|
||||
|
||||
class TryAdvisoryLock(Function):
|
||||
_function = 'pg_try_advisory_xact_lock'
|
||||
|
||||
|
||||
class JSONBExtractPath(Function):
|
||||
__slots__ = ()
|
||||
_function = 'jsonb_extract_path'
|
||||
|
||||
|
||||
class JSONKeyExists(BinaryOperator):
|
||||
__slots__ = ()
|
||||
_operator = '?'
|
||||
|
||||
|
||||
class _BinaryOperatorArray(BinaryOperator):
|
||||
"Binary Operator that convert list into Array"
|
||||
|
||||
@property
|
||||
def _operands(self):
|
||||
if isinstance(self.right, list):
|
||||
return (self.left, None)
|
||||
return super()._operands
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
params = super().params
|
||||
if isinstance(self.right, list):
|
||||
params = params[:-1] + (self.right,)
|
||||
return params
|
||||
|
||||
|
||||
class JSONAnyKeyExist(_BinaryOperatorArray):
|
||||
__slots__ = ()
|
||||
_operator = '?|'
|
||||
|
||||
|
||||
class JSONAllKeyExist(_BinaryOperatorArray):
|
||||
__slots__ = ()
|
||||
_operator = '?&'
|
||||
|
||||
|
||||
class JSONContains(BinaryOperator):
|
||||
__slots__ = ()
|
||||
_operator = '@>'
|
||||
|
||||
|
||||
class Database(DatabaseInterface):
|
||||
|
||||
index_translators = []
|
||||
_lock = RLock()
|
||||
_databases = defaultdict(dict)
|
||||
_connpool = None
|
||||
_list_cache = {}
|
||||
_list_cache_timestamp = {}
|
||||
_search_path = None
|
||||
_current_user = None
|
||||
_has_returning = None
|
||||
_has_select_for_skip_locked = None
|
||||
_has_proc = defaultdict(lambda: defaultdict(dict))
|
||||
_extensions = defaultdict(dict)
|
||||
_search_full_text_languages = defaultdict(dict)
|
||||
flavor = Flavor(ilike=True)
|
||||
|
||||
TYPES_MAPPING = {
|
||||
'SMALLINT': SQLType('INT2', 'INT2'),
|
||||
'BIGINT': SQLType('INT8', 'INT8'),
|
||||
'BLOB': SQLType('BYTEA', 'BYTEA'),
|
||||
'DATETIME': SQLType('TIMESTAMP', 'TIMESTAMP(0)'),
|
||||
'REAL': SQLType('FLOAT4', 'FLOAT4'),
|
||||
'FLOAT': SQLType('FLOAT8', 'FLOAT8'),
|
||||
'FULLTEXT': SQLType('TSVECTOR', 'TSVECTOR'),
|
||||
'INTEGER': SQLType('INT4', 'INT4'),
|
||||
'JSON': SQLType('JSONB', 'JSONB'),
|
||||
'TIMESTAMP': SQLType('TIMESTAMP', 'TIMESTAMP(6)'),
|
||||
}
|
||||
|
||||
def __new__(cls, name=_default_name):
|
||||
with cls._lock:
|
||||
now = datetime.now()
|
||||
databases = cls._databases[os.getpid()]
|
||||
for database in list(databases.values()):
|
||||
if ((now - database._last_use).total_seconds() > _timeout
|
||||
and database.name != name
|
||||
and not database._connpool._used):
|
||||
database.close()
|
||||
if name in databases:
|
||||
inst = databases[name]
|
||||
else:
|
||||
inst = DatabaseInterface.__new__(cls, name=name)
|
||||
try:
|
||||
inst._connpool = ThreadedConnectionPool(
|
||||
_minconn, _maxconn, **cls._connection_params(name),
|
||||
cursor_factory=LoggingCursor)
|
||||
except Exception:
|
||||
logger.error(
|
||||
'connection to "%s" failed', name, exc_info=True)
|
||||
raise
|
||||
else:
|
||||
logger.info('connection to "%s" succeeded', name)
|
||||
databases[name] = inst
|
||||
inst._last_use = datetime.now()
|
||||
return inst
|
||||
|
||||
def __init__(self, name=_default_name):
|
||||
super(Database, self).__init__(name)
|
||||
|
||||
@classmethod
|
||||
def _connection_params(cls, name):
|
||||
uri = parse_uri(config.get('database', 'uri'))
|
||||
if uri.path and uri.path != '/':
|
||||
warnings.warn("The path specified in the URI will be overridden")
|
||||
params = {
|
||||
'dsn': uri._replace(path=name).geturl(),
|
||||
'fallback_application_name': os.environ.get(
|
||||
'TRYTOND_APPNAME', 'trytond'),
|
||||
}
|
||||
return params
|
||||
|
||||
def connect(self):
|
||||
return self
|
||||
|
||||
def get_connection(
|
||||
self, autocommit=False, readonly=False, statement_timeout=None):
|
||||
retry = max(config.getint('database', 'retry'), _maxconn)
|
||||
for count in range(retry, -1, -1):
|
||||
try:
|
||||
conn = self._connpool.getconn()
|
||||
except (PoolError, DatabaseOperationalError):
|
||||
if count and not self._connpool.closed:
|
||||
logger.info('waiting a connection')
|
||||
time.sleep(1)
|
||||
continue
|
||||
raise
|
||||
except Exception:
|
||||
logger.error(
|
||||
'connection to "%s" failed', self.name, exc_info=True)
|
||||
raise
|
||||
try:
|
||||
conn.set_session(
|
||||
isolation_level=ISOLATION_LEVEL_REPEATABLE_READ,
|
||||
readonly=readonly,
|
||||
autocommit=autocommit)
|
||||
with conn.cursor() as cur:
|
||||
if statement_timeout:
|
||||
cur.execute('SET statement_timeout=%s' %
|
||||
(statement_timeout * 1000))
|
||||
else:
|
||||
# Detect disconnection
|
||||
cur.execute('SELECT 1')
|
||||
except DatabaseOperationalError:
|
||||
self._connpool.putconn(conn, close=True)
|
||||
continue
|
||||
break
|
||||
return conn
|
||||
|
||||
def put_connection(self, connection, close=False):
|
||||
try:
|
||||
connection.reset()
|
||||
except InterfaceError:
|
||||
pass
|
||||
self._connpool.putconn(connection, close=close)
|
||||
|
||||
def close(self):
|
||||
with self._lock:
|
||||
logger.info('disconnection from "%s"', self.name)
|
||||
self._connpool.closeall()
|
||||
self._databases[os.getpid()].pop(self.name)
|
||||
|
||||
@classmethod
|
||||
def create(cls, connection, database_name, template='template0'):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
SQL(
|
||||
"CREATE DATABASE {} TEMPLATE {} ENCODING 'unicode'")
|
||||
.format(
|
||||
Identifier(database_name),
|
||||
Identifier(template)))
|
||||
connection.commit()
|
||||
cls._list_cache.clear()
|
||||
|
||||
@classmethod
|
||||
def drop(cls, connection, database_name):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(SQL("DROP DATABASE {}")
|
||||
.format(Identifier(database_name)))
|
||||
cls._list_cache.clear()
|
||||
cls._has_proc.pop(database_name, None)
|
||||
cls._search_full_text_languages.pop(database_name, None)
|
||||
|
||||
def get_version(self, connection):
|
||||
version = connection.server_version
|
||||
major, rest = divmod(int(version), 10000)
|
||||
minor, patch = divmod(rest, 100)
|
||||
return (major, minor, patch)
|
||||
|
||||
def list(self, hostname=None):
|
||||
now = time.time()
|
||||
timeout = config.getint('session', 'timeout')
|
||||
res = self.__class__._list_cache.get(hostname)
|
||||
timestamp = self.__class__._list_cache_timestamp.get(hostname, now)
|
||||
if res and abs(timestamp - now) < timeout:
|
||||
return res
|
||||
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute('SELECT datname FROM pg_database '
|
||||
'WHERE datistemplate = false ORDER BY datname')
|
||||
res = []
|
||||
for db_name, in cursor:
|
||||
try:
|
||||
conn = connect(**self._connection_params(db_name))
|
||||
try:
|
||||
with conn:
|
||||
if self._test(conn, hostname=hostname):
|
||||
res.append(db_name)
|
||||
finally:
|
||||
conn.close()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
'Test failed for "%s"', db_name, exc_info=True)
|
||||
continue
|
||||
finally:
|
||||
self.put_connection(connection, close=True)
|
||||
|
||||
self.__class__._list_cache[hostname] = res
|
||||
self.__class__._list_cache_timestamp[hostname] = now
|
||||
return res
|
||||
|
||||
def init(self):
|
||||
from trytond.modules import get_module_info
|
||||
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
sql_file = os.path.join(os.path.dirname(__file__), 'init.sql')
|
||||
with open(sql_file) as fp:
|
||||
for line in fp.read().split(';'):
|
||||
if (len(line) > 0) and (not line.isspace()):
|
||||
cursor.execute(line)
|
||||
|
||||
for module in ['ir', 'res']:
|
||||
info = get_module_info(module)
|
||||
cursor.execute('INSERT INTO ir_module '
|
||||
'(create_uid, create_date, name, state) '
|
||||
'VALUES (%s, now(), %s, %s) '
|
||||
'RETURNING id',
|
||||
(0, module, 'to activate'))
|
||||
module_id = cursor.fetchone()[0]
|
||||
for dependency in info.get('depends', []):
|
||||
cursor.execute('INSERT INTO ir_module_dependency '
|
||||
'(create_uid, create_date, module, name) '
|
||||
'VALUES (%s, now(), %s, %s)',
|
||||
(0, module_id, dependency))
|
||||
|
||||
connection.commit()
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
|
||||
def test(self, hostname=None):
|
||||
try:
|
||||
connection = self.get_connection()
|
||||
except Exception:
|
||||
logger.debug('Test failed for "%s"', self.name, exc_info=True)
|
||||
return False
|
||||
try:
|
||||
return self._test(connection, hostname=hostname)
|
||||
finally:
|
||||
self.put_connection(connection, close=True)
|
||||
|
||||
@classmethod
|
||||
def _test(cls, connection, hostname=None):
|
||||
cursor = connection.cursor()
|
||||
tables = ('ir_model', 'ir_model_field', 'ir_ui_view', 'ir_ui_menu',
|
||||
'res_user', 'res_group', 'ir_module', 'ir_module_dependency',
|
||||
'ir_translation', 'ir_lang', 'ir_configuration')
|
||||
cursor.execute('SELECT table_name FROM information_schema.tables '
|
||||
'WHERE table_name IN %s', (tables,))
|
||||
if len(cursor.fetchall()) != len(tables):
|
||||
return False
|
||||
if hostname:
|
||||
try:
|
||||
cursor.execute(
|
||||
'SELECT hostname FROM ir_configuration')
|
||||
hostnames = {h for h, in cursor if h}
|
||||
if hostnames and hostname not in hostnames:
|
||||
return False
|
||||
except ProgrammingError:
|
||||
pass
|
||||
return True
|
||||
|
||||
def nextid(self, connection, table, count=1):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SELECT nextval(pg_get_serial_sequence(format(%s, %s), %s)) "
|
||||
"FROM generate_series(1, %s)",
|
||||
('%I', table, 'id', count))
|
||||
if count == 1:
|
||||
return cursor.fetchone()[0]
|
||||
else:
|
||||
return [id for id, in cursor]
|
||||
|
||||
def setnextid(self, connection, table, value):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SELECT setval(pg_get_serial_sequence(format(%s, %s), %s), %s)",
|
||||
('%I', table, 'id', value))
|
||||
|
||||
def currid(self, connection, table):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SELECT pg_get_serial_sequence(format(%s, %s), %s)",
|
||||
('%I', table, 'id'))
|
||||
sequence_name, = cursor.fetchone()
|
||||
cursor.execute(f"SELECT last_value FROM {sequence_name}")
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def lock(self, connection, table):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(SQL('LOCK {} IN EXCLUSIVE MODE NOWAIT').format(
|
||||
Identifier(table)))
|
||||
|
||||
def lock_id(self, id, timeout=None):
|
||||
if not timeout:
|
||||
return TryAdvisoryLock(id)
|
||||
else:
|
||||
return AdvisoryLock(id)
|
||||
|
||||
def has_constraint(self, constraint):
|
||||
return True
|
||||
|
||||
def has_multirow_insert(self):
|
||||
return True
|
||||
|
||||
def get_table_schema(self, connection, table_name):
|
||||
cursor = connection.cursor()
|
||||
for schema in self.search_path:
|
||||
cursor.execute('SELECT 1 '
|
||||
'FROM information_schema.tables '
|
||||
'WHERE table_name = %s AND table_schema = %s',
|
||||
(table_name, schema))
|
||||
if cursor.rowcount:
|
||||
return schema
|
||||
|
||||
@property
|
||||
def current_user(self):
|
||||
if self._current_user is None:
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute('SELECT current_user')
|
||||
self._current_user = cursor.fetchone()[0]
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
return self._current_user
|
||||
|
||||
@property
|
||||
def search_path(self):
|
||||
if self._search_path is None:
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute('SHOW search_path')
|
||||
path, = cursor.fetchone()
|
||||
special_values = {
|
||||
'user': self.current_user,
|
||||
}
|
||||
self._search_path = [
|
||||
unescape_quote(replace_special_values(
|
||||
p.strip(), **special_values))
|
||||
for p in path.split(',')]
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
return self._search_path
|
||||
|
||||
def has_returning(self):
|
||||
if self._has_returning is None:
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
# RETURNING clause is available since PostgreSQL 8.2
|
||||
self._has_returning = self.get_version(connection) >= (8, 2)
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
return self._has_returning
|
||||
|
||||
def has_select_for(self):
|
||||
return True
|
||||
|
||||
def get_select_for_skip_locked(self):
|
||||
if self._has_select_for_skip_locked is None:
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
# SKIP LOCKED clause is available since PostgreSQL 9.5
|
||||
self._has_select_for_skip_locked = (
|
||||
self.get_version(connection) >= (9, 5))
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
if self._has_select_for_skip_locked:
|
||||
return ForSkipLocked
|
||||
else:
|
||||
return For
|
||||
|
||||
def has_window_functions(self):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def has_sequence(cls):
|
||||
return True
|
||||
|
||||
def has_proc(self, name, property='oid'):
|
||||
if (name in self._has_proc[self.name]
|
||||
and property in self._has_proc[self.name][name]):
|
||||
return self._has_proc[self.name][name][property]
|
||||
connection = self.get_connection()
|
||||
result = False
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
SQL('SELECT {} FROM pg_proc WHERE proname=%s').format(
|
||||
Identifier(property)), (name,))
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
result, = result
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
self._has_proc[self.name][name][property] = result
|
||||
return result
|
||||
|
||||
def has_unaccent(self):
|
||||
return self.has_proc(Unaccent._function)
|
||||
|
||||
def has_unaccent_indexable(self):
|
||||
return self.has_proc(Unaccent._function, 'provolatile') == 'i'
|
||||
|
||||
def has_similarity(self):
|
||||
return self.has_proc(Similarity._function)
|
||||
|
||||
def similarity(self, column, value):
|
||||
return Similarity(column, value)
|
||||
|
||||
def has_search_full_text(self):
|
||||
return True
|
||||
|
||||
def _search_full_text_language(self, language):
|
||||
languages = self._search_full_text_languages[self.name]
|
||||
if language not in languages:
|
||||
lang = Table('ir_lang')
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(*lang.select(
|
||||
Coalesce(lang.pg_text_search, 'simple'),
|
||||
where=lang.code == language,
|
||||
limit=1))
|
||||
config_name, = cursor.fetchone()
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
languages[language] = config_name
|
||||
else:
|
||||
config_name = languages[language]
|
||||
return config_name
|
||||
|
||||
def format_full_text(self, *documents, language=None):
|
||||
size = max(len(documents) // 4, 1)
|
||||
if len(documents) > 1:
|
||||
weights = chain(
|
||||
['A'] * size, ['B'] * size, ['C'] * size, repeat('D'))
|
||||
else:
|
||||
weights = [None]
|
||||
expression = None
|
||||
if language:
|
||||
config_name = self._search_full_text_language(language)
|
||||
else:
|
||||
config_name = None
|
||||
for document, weight in zip(documents, weights):
|
||||
if not document:
|
||||
continue
|
||||
if config_name:
|
||||
ts_vector = ToTsvector(config_name, document)
|
||||
else:
|
||||
ts_vector = ToTsvector('simple', document)
|
||||
if weight:
|
||||
ts_vector = Setweight(ts_vector, weight)
|
||||
if expression is None:
|
||||
expression = ts_vector
|
||||
else:
|
||||
expression = Concat(expression, ts_vector)
|
||||
return expression
|
||||
|
||||
def format_full_text_query(self, query, language=None):
|
||||
connection = self.get_connection()
|
||||
try:
|
||||
version = self.get_version(connection)
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
if not isinstance(query, TsQuery):
|
||||
if version >= (11, 0):
|
||||
ToTsQuery = WebsearchToTsQuery
|
||||
else:
|
||||
ToTsQuery = PlainToTsQuery
|
||||
if language:
|
||||
config_name = self._search_full_text_language(language)
|
||||
else:
|
||||
config_name = 'simple'
|
||||
query = ToTsQuery(config_name, query)
|
||||
return query
|
||||
|
||||
def search_full_text(self, document, query):
|
||||
return Match(document, query)
|
||||
|
||||
def rank_full_text(self, document, query, normalize=None):
|
||||
# TODO: weights and cover density
|
||||
norm_int = 0
|
||||
if normalize:
|
||||
values = {
|
||||
'document log': 1,
|
||||
'document': 2,
|
||||
'mean': 4,
|
||||
'word': 8,
|
||||
'word log': 16,
|
||||
'rank': 32,
|
||||
}
|
||||
for norm in normalize:
|
||||
norm_int |= values.get(norm, 0)
|
||||
return TsRank(document, query, norm_int)
|
||||
|
||||
def sql_type(self, type_):
|
||||
if type_ in self.TYPES_MAPPING:
|
||||
return self.TYPES_MAPPING[type_]
|
||||
if type_.startswith('VARCHAR'):
|
||||
return SQLType('VARCHAR', type_)
|
||||
return SQLType(type_, type_)
|
||||
|
||||
def sql_format(self, type_, value):
|
||||
if type_ == 'BLOB':
|
||||
if value is not None:
|
||||
return Binary(value)
|
||||
return value
|
||||
|
||||
def unaccent(self, value):
|
||||
if self.has_unaccent():
|
||||
return Unaccent(value)
|
||||
return value
|
||||
|
||||
def sequence_exist(self, connection, name):
|
||||
cursor = connection.cursor()
|
||||
for schema in self.search_path:
|
||||
cursor.execute('SELECT 1 '
|
||||
'FROM information_schema.sequences '
|
||||
'WHERE sequence_name = %s AND sequence_schema = %s',
|
||||
(name, schema))
|
||||
if cursor.rowcount:
|
||||
return True
|
||||
return False
|
||||
|
||||
def sequence_create(
|
||||
self, connection, name, number_increment=1, start_value=1):
|
||||
cursor = connection.cursor()
|
||||
|
||||
cursor.execute(
|
||||
SQL("CREATE SEQUENCE {} INCREMENT BY %s START WITH %s").format(
|
||||
Identifier(name)),
|
||||
(number_increment, start_value))
|
||||
|
||||
def sequence_update(
|
||||
self, connection, name, number_increment=1, start_value=1):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
SQL("ALTER SEQUENCE {} INCREMENT BY %s RESTART WITH %s").format(
|
||||
Identifier(name)),
|
||||
(number_increment, start_value))
|
||||
|
||||
def sequence_rename(self, connection, old_name, new_name):
|
||||
cursor = connection.cursor()
|
||||
if (self.sequence_exist(connection, old_name)
|
||||
and not self.sequence_exist(connection, new_name)):
|
||||
cursor.execute(
|
||||
SQL("ALTER TABLE {} RENAME TO {}").format(
|
||||
Identifier(old_name),
|
||||
Identifier(new_name)))
|
||||
|
||||
def sequence_delete(self, connection, name):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(SQL("DROP SEQUENCE {}").format(
|
||||
Identifier(name)))
|
||||
|
||||
def sequence_next_number(self, connection, name):
|
||||
cursor = connection.cursor()
|
||||
version = self.get_version(connection)
|
||||
if version >= (10, 0):
|
||||
cursor.execute(
|
||||
'SELECT increment_by '
|
||||
'FROM pg_sequences '
|
||||
'WHERE sequencename=%s',
|
||||
(name,))
|
||||
increment, = cursor.fetchone()
|
||||
cursor.execute(
|
||||
SQL(
|
||||
'SELECT CASE WHEN NOT is_called THEN last_value '
|
||||
'ELSE last_value + %s '
|
||||
'END '
|
||||
'FROM {}').format(Identifier(name)),
|
||||
(increment,))
|
||||
else:
|
||||
cursor.execute(
|
||||
SQL(
|
||||
'SELECT CASE WHEN NOT is_called THEN last_value '
|
||||
'ELSE last_value + increment_by '
|
||||
'END '
|
||||
'FROM {}').format(Identifier(name)))
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def has_channel(self):
|
||||
return True
|
||||
|
||||
def has_extension(self, extension_name):
|
||||
if extension_name in self._extensions[self.name]:
|
||||
return self._extensions[self.name][extension_name]
|
||||
|
||||
connection = self.get_connection()
|
||||
result = False
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SELECT 1 FROM pg_extension WHERE extname=%s",
|
||||
(extension_name,))
|
||||
result = bool(cursor.rowcount)
|
||||
finally:
|
||||
self.put_connection(connection)
|
||||
self._extensions[self.name][extension_name] = result
|
||||
return result
|
||||
|
||||
def json_get(self, column, key=None):
|
||||
column = Cast(column, 'jsonb')
|
||||
if key:
|
||||
column = JSONBExtractPath(column, key)
|
||||
return column
|
||||
|
||||
def json_key_exists(self, column, key):
|
||||
return JSONKeyExists(Cast(column, 'jsonb'), key)
|
||||
|
||||
def json_any_keys_exist(self, column, keys):
|
||||
return JSONAnyKeyExist(Cast(column, 'jsonb'), keys)
|
||||
|
||||
def json_all_keys_exist(self, column, keys):
|
||||
return JSONAllKeyExist(Cast(column, 'jsonb'), keys)
|
||||
|
||||
def json_contains(self, column, json):
|
||||
return JSONContains(Cast(column, 'jsonb'), Cast(json, 'jsonb'))
|
||||
|
||||
|
||||
register_type(UNICODE)
|
||||
if PYDATE:
|
||||
register_type(PYDATE)
|
||||
if PYDATETIME:
|
||||
register_type(PYDATETIME)
|
||||
if PYTIME:
|
||||
register_type(PYTIME)
|
||||
if PYINTERVAL:
|
||||
register_type(PYINTERVAL)
|
||||
register_adapter(float, lambda value: AsIs(repr(value)))
|
||||
register_adapter(Decimal, lambda value: AsIs(str(value)))
|
||||
|
||||
|
||||
def convert_json(value):
|
||||
from trytond.protocols.jsonrpc import JSONDecoder
|
||||
return json.loads(value, object_hook=JSONDecoder())
|
||||
|
||||
|
||||
register_default_json(loads=convert_json)
|
||||
register_default_jsonb(loads=convert_json)
|
||||
|
||||
if is_gevent_monkey_patched():
|
||||
from psycopg2.extensions import set_wait_callback
|
||||
from psycopg2.extras import wait_select
|
||||
set_wait_callback(wait_select)
|
||||
156
backend/postgresql/init.sql
Executable file
156
backend/postgresql/init.sql
Executable file
@@ -0,0 +1,156 @@
|
||||
CREATE TABLE ir_configuration (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_configuration_id_positive CHECK(id >= 0),
|
||||
language VARCHAR,
|
||||
hostname VARCHAR,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE ir_model (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_model_id_positive CHECK(id >= 0),
|
||||
model VARCHAR NOT NULL,
|
||||
name VARCHAR,
|
||||
info TEXT,
|
||||
module VARCHAR,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
ALTER TABLE ir_model ADD CONSTRAINT ir_model_model_uniq UNIQUE (model);
|
||||
|
||||
CREATE TABLE ir_model_field (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_model_field_id_positive CHECK(id >= 0),
|
||||
model VARCHAR NOT NULL,
|
||||
name VARCHAR NOT NULL,
|
||||
relation VARCHAR,
|
||||
field_description VARCHAR,
|
||||
ttype VARCHAR,
|
||||
help TEXT,
|
||||
module VARCHAR,
|
||||
"access" BOOL,
|
||||
PRIMARY KEY(id),
|
||||
FOREIGN KEY (model) REFERENCES ir_model(model) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
ALTER TABLE ir_model_field ADD CONSTRAINT ir_model_field_name_model_uniq UNIQUE (name, model);
|
||||
|
||||
CREATE TABLE ir_ui_view (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_ui_view_id_positive CHECK(id >= 0),
|
||||
model VARCHAR NOT NULL,
|
||||
"type" VARCHAR,
|
||||
data TEXT NOT NULL,
|
||||
field_childs VARCHAR,
|
||||
priority INTEGER NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE ir_ui_menu (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_ui_menu_id_positive CHECK(id >= 0),
|
||||
parent INTEGER,
|
||||
name VARCHAR NOT NULL,
|
||||
icon VARCHAR,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (parent) REFERENCES ir_ui_menu (id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ir_translation (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_translation_id_positive CHECK(id >= 0),
|
||||
lang VARCHAR,
|
||||
src TEXT,
|
||||
name VARCHAR NOT NULL,
|
||||
res_id INTEGER,
|
||||
value TEXT,
|
||||
"type" VARCHAR,
|
||||
module VARCHAR,
|
||||
fuzzy BOOLEAN NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE ir_lang (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_lang_id_positive CHECK(id >= 0),
|
||||
name VARCHAR NOT NULL,
|
||||
code VARCHAR NOT NULL,
|
||||
translatable BOOLEAN NOT NULL,
|
||||
parent VARCHAR,
|
||||
active BOOLEAN NOT NULL,
|
||||
direction VARCHAR NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE res_user (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT res_user_id_positive CHECK(id >= 0),
|
||||
name VARCHAR NOT NULL,
|
||||
active BOOLEAN NOT NULL,
|
||||
login VARCHAR NOT NULL,
|
||||
password VARCHAR,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
ALTER TABLE res_user ADD CONSTRAINT res_user_login_key UNIQUE (login);
|
||||
|
||||
INSERT INTO res_user (id, login, password, name, active) VALUES (0, 'root', NULL, 'Root', False);
|
||||
|
||||
CREATE TABLE res_group (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT res_group_id_positive CHECK(id >= 0),
|
||||
name VARCHAR NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE "res_user-res_group" (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT "res_user-res_group_id_positive" CHECK(id >= 0),
|
||||
"user" INTEGER NOT NULL,
|
||||
"group" INTEGER NOT NULL,
|
||||
FOREIGN KEY ("user") REFERENCES res_user (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY ("group") REFERENCES res_group (id) ON DELETE CASCADE,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
|
||||
CREATE TABLE ir_module (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_module_id_positive CHECK(id >= 0),
|
||||
create_uid INTEGER NOT NULL,
|
||||
create_date TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
write_date TIMESTAMP WITHOUT TIME ZONE,
|
||||
write_uid INTEGER,
|
||||
name VARCHAR NOT NULL,
|
||||
state VARCHAR,
|
||||
PRIMARY KEY(id),
|
||||
FOREIGN KEY (create_uid) REFERENCES res_user ON DELETE SET NULL,
|
||||
FOREIGN KEY (write_uid) REFERENCES res_user ON DELETE SET NULL
|
||||
);
|
||||
|
||||
ALTER TABLE ir_module ADD CONSTRAINT ir_module_name_uniq UNIQUE (name);
|
||||
|
||||
CREATE TABLE ir_module_dependency (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_module_dependency_id_positive CHECK(id >= 0),
|
||||
create_uid INTEGER NOT NULL,
|
||||
create_date TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
write_date TIMESTAMP WITHOUT TIME ZONE,
|
||||
write_uid INTEGER,
|
||||
name VARCHAR,
|
||||
module INTEGER,
|
||||
PRIMARY KEY(id),
|
||||
FOREIGN KEY (create_uid) REFERENCES res_user ON DELETE SET NULL,
|
||||
FOREIGN KEY (write_uid) REFERENCES res_user ON DELETE SET NULL,
|
||||
FOREIGN KEY (module) REFERENCES ir_module ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE ir_cache (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
|
||||
CONSTRAINT ir_cache_id_positive CHECK(id >= 0),
|
||||
name VARCHAR NOT NULL,
|
||||
"timestamp" TIMESTAMP WITHOUT TIME ZONE,
|
||||
create_date TIMESTAMP WITHOUT TIME ZONE,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP WITHOUT TIME ZONE,
|
||||
write_uid INTEGER
|
||||
);
|
||||
731
backend/postgresql/table.py
Executable file
731
backend/postgresql/table.py
Executable file
@@ -0,0 +1,731 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
import logging
|
||||
import re
|
||||
|
||||
from psycopg2.sql import SQL, Identifier
|
||||
|
||||
from trytond.backend.table import (
|
||||
IndexTranslatorInterface, TableHandlerInterface)
|
||||
from trytond.transaction import Transaction
|
||||
|
||||
__all__ = ['TableHandler']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
VARCHAR_SIZE_RE = re.compile(r'VARCHAR\(([0-9]+)\)')
|
||||
|
||||
|
||||
class TableHandler(TableHandlerInterface):
|
||||
namedatalen = 64
|
||||
index_translators = []
|
||||
|
||||
def _init(self, model, history=False):
|
||||
super()._init(model, history=history)
|
||||
self.__columns = None
|
||||
self.__constraints = None
|
||||
self.__fk_deltypes = None
|
||||
self.__indexes = None
|
||||
|
||||
transaction = Transaction()
|
||||
cursor = transaction.connection.cursor()
|
||||
|
||||
# Create new table if necessary
|
||||
if not self.table_exist(self.table_name):
|
||||
cursor.execute(SQL('CREATE TABLE {} ()').format(
|
||||
Identifier(self.table_name)))
|
||||
self.table_schema = transaction.database.get_table_schema(
|
||||
transaction.connection, self.table_name)
|
||||
|
||||
cursor.execute('SELECT tableowner = current_user FROM pg_tables '
|
||||
'WHERE tablename = %s AND schemaname = %s',
|
||||
(self.table_name, self.table_schema))
|
||||
self.is_owner, = cursor.fetchone()
|
||||
|
||||
if model.__doc__ and self.is_owner:
|
||||
cursor.execute(SQL('COMMENT ON TABLE {} IS %s').format(
|
||||
Identifier(self.table_name)),
|
||||
(model.__doc__,))
|
||||
|
||||
def migrate_to_identity(table, column):
|
||||
previous_seq_name = f"{table}_{column}_seq"
|
||||
cursor.execute(
|
||||
"SELECT nextval(format(%s, %s))", ('%I', previous_seq_name,))
|
||||
next_val, = cursor.fetchone()
|
||||
cursor.execute(
|
||||
"SELECT seqincrement, seqmax, seqmin, seqcache "
|
||||
"FROM pg_sequence WHERE seqrelid = %s::regclass",
|
||||
(previous_seq_name,))
|
||||
increment, s_max, s_min, cache = cursor.fetchone()
|
||||
# Previously created sequences were setting bigint values for those
|
||||
# identity column mimic the type of the underlying column
|
||||
if (s_max > 2 ** 31 - 1
|
||||
and self._columns[column]['typname'] != 'int8'):
|
||||
s_max = 2 ** 31 - 1
|
||||
if (s_min < -(2 ** 31)
|
||||
and self._columns[column]['typname'] != 'int8'):
|
||||
s_min = -(2 ** 31)
|
||||
cursor.execute(
|
||||
SQL("ALTER TABLE {} ALTER COLUMN {} DROP DEFAULT").format(
|
||||
Identifier(table), Identifier(column)))
|
||||
cursor.execute(
|
||||
SQL("DROP SEQUENCE {}").format(
|
||||
Identifier(previous_seq_name)))
|
||||
cursor.execute(
|
||||
SQL("ALTER TABLE {} ALTER COLUMN {} "
|
||||
"ADD GENERATED BY DEFAULT AS IDENTITY").format(
|
||||
Identifier(table), Identifier(column)))
|
||||
cursor.execute(
|
||||
"SELECT pg_get_serial_sequence(format(%s, %s), %s)",
|
||||
('%I', table, column))
|
||||
serial_seq_name, = cursor.fetchone()
|
||||
cursor.execute(
|
||||
(f"ALTER SEQUENCE {serial_seq_name} INCREMENT BY %s "
|
||||
"MINVALUE %s MAXVALUE %s RESTART WITH %s CACHE %s"),
|
||||
(increment, s_min, s_max, next_val, cache))
|
||||
|
||||
update_definitions = False
|
||||
if 'id' not in self._columns:
|
||||
update_definitions = True
|
||||
if not self.history:
|
||||
cursor.execute(
|
||||
SQL(
|
||||
"ALTER TABLE {} ADD COLUMN id INTEGER "
|
||||
"GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY").format(
|
||||
Identifier(self.table_name)))
|
||||
else:
|
||||
cursor.execute(
|
||||
SQL('ALTER TABLE {} ADD COLUMN id INTEGER')
|
||||
.format(Identifier(self.table_name)))
|
||||
else:
|
||||
if not self.history and not self.__columns['id']['identity']:
|
||||
update_definitions = True
|
||||
migrate_to_identity(self.table_name, 'id')
|
||||
if self.history and '__id' not in self._columns:
|
||||
update_definitions = True
|
||||
cursor.execute(
|
||||
SQL(
|
||||
"ALTER TABLE {} ADD COLUMN __id INTEGER "
|
||||
"GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY").format(
|
||||
Identifier(self.table_name)))
|
||||
elif self.history:
|
||||
if not self.__columns['__id']['identity']:
|
||||
update_definitions = True
|
||||
cursor.execute(
|
||||
SQL("ALTER TABLE {} ALTER COLUMN id DROP DEFAULT").format(
|
||||
Identifier(self.table_name)))
|
||||
migrate_to_identity(self.table_name, '__id')
|
||||
if update_definitions:
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
@classmethod
|
||||
def table_exist(cls, table_name):
|
||||
transaction = Transaction()
|
||||
return bool(transaction.database.get_table_schema(
|
||||
transaction.connection, table_name))
|
||||
|
||||
@classmethod
|
||||
def table_rename(cls, old_name, new_name):
|
||||
transaction = Transaction()
|
||||
cursor = transaction.connection.cursor()
|
||||
# Rename table
|
||||
if (cls.table_exist(old_name)
|
||||
and not cls.table_exist(new_name)):
|
||||
cursor.execute(SQL('ALTER TABLE {} RENAME TO {}').format(
|
||||
Identifier(old_name), Identifier(new_name)))
|
||||
# Migrate from 6.6: rename old sequence
|
||||
old_sequence = old_name + '_id_seq'
|
||||
new_sequence = new_name + '_id_seq'
|
||||
transaction.database.sequence_rename(
|
||||
transaction.connection, old_sequence, new_sequence)
|
||||
# Rename history table
|
||||
old_history = old_name + "__history"
|
||||
new_history = new_name + "__history"
|
||||
if (cls.table_exist(old_history)
|
||||
and not cls.table_exist(new_history)):
|
||||
cursor.execute('ALTER TABLE "%s" RENAME TO "%s"'
|
||||
% (old_history, new_history))
|
||||
|
||||
def column_exist(self, column_name):
|
||||
return column_name in self._columns
|
||||
|
||||
def column_rename(self, old_name, new_name):
|
||||
cursor = Transaction().connection.cursor()
|
||||
if self.column_exist(old_name):
|
||||
if not self.column_exist(new_name):
|
||||
cursor.execute(SQL(
|
||||
'ALTER TABLE {} RENAME COLUMN {} TO {}').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(old_name),
|
||||
Identifier(new_name)))
|
||||
self._update_definitions(columns=True)
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to rename column %s on table %s to %s.',
|
||||
old_name, self.table_name, new_name)
|
||||
|
||||
@property
|
||||
def _columns(self):
|
||||
if self.__columns is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
self.__columns = {}
|
||||
# Fetch columns definitions from the table
|
||||
cursor.execute('SELECT '
|
||||
'column_name, udt_name, is_nullable, '
|
||||
'character_maximum_length, '
|
||||
'column_default, is_identity '
|
||||
'FROM information_schema.columns '
|
||||
'WHERE table_name = %s AND table_schema = %s',
|
||||
(self.table_name, self.table_schema))
|
||||
for column, typname, nullable, size, default, identity in cursor:
|
||||
self.__columns[column] = {
|
||||
'typname': typname,
|
||||
'notnull': True if nullable == 'NO' else False,
|
||||
'size': size,
|
||||
'default': default,
|
||||
'identity': False if identity == 'NO' else True,
|
||||
}
|
||||
return self.__columns
|
||||
|
||||
@property
|
||||
def _constraints(self):
|
||||
if self.__constraints is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
# fetch constraints for the table
|
||||
cursor.execute('SELECT constraint_name '
|
||||
'FROM information_schema.table_constraints '
|
||||
'WHERE table_name = %s AND table_schema = %s',
|
||||
(self.table_name, self.table_schema))
|
||||
self.__constraints = [c for c, in cursor]
|
||||
|
||||
# add nonstandard exclude constraint
|
||||
cursor.execute('SELECT c.conname '
|
||||
'FROM pg_namespace nc, '
|
||||
'pg_namespace nr, '
|
||||
'pg_constraint c, '
|
||||
'pg_class r '
|
||||
'WHERE nc.oid = c.connamespace AND nr.oid = r.relnamespace '
|
||||
'AND c.conrelid = r.oid '
|
||||
"AND c.contype = 'x' " # exclude type
|
||||
"AND r.relkind IN ('r', 'p') "
|
||||
'AND r.relname = %s AND nr.nspname = %s',
|
||||
(self.table_name, self.table_schema))
|
||||
self.__constraints.extend((c for c, in cursor))
|
||||
return self.__constraints
|
||||
|
||||
@property
|
||||
def _fk_deltypes(self):
|
||||
if self.__fk_deltypes is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute('SELECT k.column_name, r.delete_rule '
|
||||
'FROM information_schema.key_column_usage AS k '
|
||||
'JOIN information_schema.referential_constraints AS r '
|
||||
'ON r.constraint_schema = k.constraint_schema '
|
||||
'AND r.constraint_name = k.constraint_name '
|
||||
'WHERE k.table_name = %s AND k.table_schema = %s',
|
||||
(self.table_name, self.table_schema))
|
||||
self.__fk_deltypes = dict(cursor)
|
||||
return self.__fk_deltypes
|
||||
|
||||
@property
|
||||
def _indexes(self):
|
||||
if self.__indexes is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
# Fetch indexes defined for the table
|
||||
cursor.execute("SELECT cl2.relname "
|
||||
"FROM pg_index ind "
|
||||
"JOIN pg_class cl on (cl.oid = ind.indrelid) "
|
||||
"JOIN pg_namespace n ON (cl.relnamespace = n.oid) "
|
||||
"JOIN pg_class cl2 on (cl2.oid = ind.indexrelid) "
|
||||
"WHERE cl.relname = %s AND n.nspname = %s "
|
||||
"AND NOT ind.indisprimary AND NOT ind.indisunique",
|
||||
(self.table_name, self.table_schema))
|
||||
self.__indexes = [l[0] for l in cursor]
|
||||
return self.__indexes
|
||||
|
||||
def _update_definitions(self, columns=None, constraints=None):
|
||||
if columns is None and constraints is None:
|
||||
columns = constraints = True
|
||||
if columns:
|
||||
self.__columns = None
|
||||
if constraints:
|
||||
self.__constraints = None
|
||||
self.__fk_deltypes = None
|
||||
|
||||
def alter_size(self, column_name, column_type):
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(
|
||||
SQL("ALTER TABLE {} ALTER COLUMN {} TYPE {}").format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name),
|
||||
SQL(column_type)))
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
def alter_type(self, column_name, column_type):
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(SQL('ALTER TABLE {} ALTER {} TYPE {}').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name),
|
||||
SQL(column_type)))
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
def column_is_type(self, column_name, type_, *, size=-1):
|
||||
db_type = self._columns[column_name]['typname'].upper()
|
||||
|
||||
database = Transaction().database
|
||||
base_type = database.sql_type(type_).base.upper()
|
||||
if base_type == 'VARCHAR' and (size is None or size >= 0):
|
||||
same_size = self._columns[column_name]['size'] == size
|
||||
else:
|
||||
same_size = True
|
||||
|
||||
return base_type == db_type and same_size
|
||||
|
||||
def db_default(self, column_name, value):
|
||||
if value in [True, False]:
|
||||
test = str(value).lower()
|
||||
else:
|
||||
test = value
|
||||
if self._columns[column_name]['default'] != test:
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(
|
||||
SQL(
|
||||
'ALTER TABLE {} ALTER COLUMN {} SET DEFAULT %s').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)),
|
||||
(value,))
|
||||
|
||||
def add_column(self, column_name, sql_type, default=None, comment=''):
|
||||
cursor = Transaction().connection.cursor()
|
||||
database = Transaction().database
|
||||
|
||||
column_type = database.sql_type(sql_type)
|
||||
match = VARCHAR_SIZE_RE.match(sql_type)
|
||||
field_size = int(match.group(1)) if match else None
|
||||
|
||||
def add_comment():
|
||||
if comment and self.is_owner:
|
||||
cursor.execute(
|
||||
SQL('COMMENT ON COLUMN {}.{} IS %s').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)),
|
||||
(comment,))
|
||||
if self.column_exist(column_name):
|
||||
if (column_name in ('create_date', 'write_date')
|
||||
and column_type[1].lower() != 'timestamp(6)'):
|
||||
# Migrate dates from timestamp(0) to timestamp
|
||||
cursor.execute(
|
||||
SQL(
|
||||
'ALTER TABLE {} ALTER COLUMN {} TYPE timestamp')
|
||||
.format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)))
|
||||
|
||||
add_comment()
|
||||
base_type = column_type[0].lower()
|
||||
typname = self._columns[column_name]['typname']
|
||||
if base_type != typname:
|
||||
if (typname, base_type) in [
|
||||
('varchar', 'text'),
|
||||
('text', 'varchar'),
|
||||
('date', 'timestamp'),
|
||||
('int2', 'int4'),
|
||||
('int2', 'float4'),
|
||||
('int2', 'int8'),
|
||||
('int2', 'float8'),
|
||||
('int2', 'numeric'),
|
||||
('int4', 'int8'),
|
||||
('int4', 'float8'),
|
||||
('int4', 'numeric'),
|
||||
('int8', 'float8'),
|
||||
('int8', 'numeric'),
|
||||
('float4', 'numeric'),
|
||||
('float4', 'float8'),
|
||||
('float8', 'numeric'),
|
||||
]:
|
||||
self.alter_type(column_name, base_type)
|
||||
elif (typname, base_type) in [
|
||||
('int8', 'int4'),
|
||||
('int8', 'int2'),
|
||||
('int4', 'int2'),
|
||||
('float8', 'float4'),
|
||||
]:
|
||||
pass
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to migrate column %s on table %s '
|
||||
'from %s to %s.',
|
||||
column_name, self.table_name, typname, base_type)
|
||||
|
||||
if base_type == typname == 'varchar':
|
||||
# Migrate size
|
||||
from_size = self._columns[column_name]['size']
|
||||
if field_size is None:
|
||||
if from_size:
|
||||
self.alter_size(column_name, base_type)
|
||||
elif from_size == field_size:
|
||||
pass
|
||||
elif from_size and from_size < field_size:
|
||||
self.alter_size(column_name, column_type[1])
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to migrate column %s on table %s '
|
||||
'from varchar(%s) to varchar(%s).',
|
||||
column_name, self.table_name,
|
||||
from_size if from_size and from_size > 0 else "",
|
||||
field_size)
|
||||
return
|
||||
|
||||
column_type = column_type[1]
|
||||
cursor.execute(
|
||||
SQL('ALTER TABLE {} ADD COLUMN {} {}').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name),
|
||||
SQL(column_type)))
|
||||
add_comment()
|
||||
|
||||
if default:
|
||||
# check if table is non-empty:
|
||||
cursor.execute('SELECT 1 FROM "%s" limit 1' % self.table_name)
|
||||
if cursor.rowcount:
|
||||
# Populate column with default values:
|
||||
cursor.execute(
|
||||
SQL('UPDATE {} SET {} = %s').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)),
|
||||
(default(),))
|
||||
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
def add_fk(self, columns, reference, ref_columns=None, on_delete=None):
|
||||
if on_delete is not None:
|
||||
on_delete = on_delete.upper()
|
||||
else:
|
||||
on_delete = 'SET NULL'
|
||||
if isinstance(columns, str):
|
||||
columns = [columns]
|
||||
|
||||
cursor = Transaction().connection.cursor()
|
||||
if ref_columns:
|
||||
ref_columns_name = '_' + '_'.join(ref_columns)
|
||||
else:
|
||||
ref_columns_name = ''
|
||||
name = self.convert_name(
|
||||
self.table_name + '_' + '_'.join(columns)
|
||||
+ ref_columns_name + '_fkey')
|
||||
if name in self._constraints:
|
||||
for column_name in columns:
|
||||
if self._fk_deltypes.get(column_name) != on_delete:
|
||||
self.drop_fk(columns, ref_columns)
|
||||
add = True
|
||||
break
|
||||
else:
|
||||
add = False
|
||||
else:
|
||||
add = True
|
||||
if add:
|
||||
columns = SQL(', ').join(map(Identifier, columns))
|
||||
if not ref_columns:
|
||||
ref_columns = ['id']
|
||||
ref_columns = SQL(', ').join(map(Identifier, ref_columns))
|
||||
cursor.execute(
|
||||
SQL(
|
||||
"ALTER TABLE {table} "
|
||||
"ADD CONSTRAINT {constraint} "
|
||||
"FOREIGN KEY ({columns}) "
|
||||
"REFERENCES {reference} ({ref_columns}) "
|
||||
"ON DELETE {action}"
|
||||
)
|
||||
.format(
|
||||
table=Identifier(self.table_name),
|
||||
constraint=Identifier(name),
|
||||
columns=columns,
|
||||
reference=Identifier(reference),
|
||||
ref_columns=ref_columns,
|
||||
action=SQL(on_delete)))
|
||||
self._update_definitions(constraints=True)
|
||||
|
||||
def drop_fk(self, columns, ref_columns=None, table=None):
|
||||
if isinstance(columns, str):
|
||||
columns = [columns]
|
||||
if ref_columns:
|
||||
ref_columns_name = '_' + '_'.join(ref_columns)
|
||||
else:
|
||||
ref_columns_name = ''
|
||||
self.drop_constraint(
|
||||
'_'.join(columns) + ref_columns_name + '_fkey', table=table)
|
||||
|
||||
def not_null_action(self, column_name, action='add'):
|
||||
if not self.column_exist(column_name):
|
||||
return
|
||||
|
||||
with Transaction().connection.cursor() as cursor:
|
||||
if action == 'add':
|
||||
if self._columns[column_name]['notnull']:
|
||||
return
|
||||
cursor.execute(SQL(
|
||||
'SELECT id FROM {} WHERE {} IS NULL LIMIT 1').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)))
|
||||
if not cursor.rowcount:
|
||||
cursor.execute(
|
||||
SQL(
|
||||
'ALTER TABLE {} ALTER COLUMN {} SET NOT NULL')
|
||||
.format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)))
|
||||
self._update_definitions(columns=True)
|
||||
else:
|
||||
logger.warning(
|
||||
"Unable to set not null on column %s of table %s.\n"
|
||||
"Try restarting one more time.\n"
|
||||
"If that doesn't work update the records and restart "
|
||||
"again.",
|
||||
column_name, self.table_name)
|
||||
elif action == 'remove':
|
||||
if not self._columns[column_name]['notnull']:
|
||||
return
|
||||
cursor.execute(
|
||||
SQL('ALTER TABLE {} ALTER COLUMN {} DROP NOT NULL')
|
||||
.format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)))
|
||||
self._update_definitions(columns=True)
|
||||
else:
|
||||
raise Exception('Not null action not supported!')
|
||||
|
||||
def add_constraint(self, ident, constraint):
|
||||
ident = self.convert_name(self.table_name + "_" + ident)
|
||||
if ident in self._constraints:
|
||||
# This constrain already exist
|
||||
return
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(
|
||||
SQL('ALTER TABLE {} ADD CONSTRAINT {} {}').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(ident),
|
||||
SQL(str(constraint))),
|
||||
constraint.params)
|
||||
self._update_definitions(constraints=True)
|
||||
|
||||
def drop_constraint(self, ident, table=None):
|
||||
ident = self.convert_name((table or self.table_name) + "_" + ident)
|
||||
if ident not in self._constraints:
|
||||
return
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(
|
||||
SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
|
||||
Identifier(self.table_name), Identifier(ident)))
|
||||
self._update_definitions(constraints=True)
|
||||
|
||||
def set_indexes(self, indexes, concurrently=False):
|
||||
cursor = Transaction().connection.cursor()
|
||||
old = set(self._indexes)
|
||||
for index in indexes:
|
||||
translator = self.index_translator_for(index)
|
||||
if translator:
|
||||
name, query, params = translator.definition(index)
|
||||
name = '_'.join([self.table_name, name])
|
||||
name = 'idx_' + self.convert_name(name, reserved=len('idx_'))
|
||||
cursor.execute(
|
||||
'SELECT idx.indisvalid '
|
||||
'FROM pg_index idx '
|
||||
'JOIN pg_class cls ON cls.oid = idx.indexrelid '
|
||||
'WHERE cls.relname = %s',
|
||||
(name,))
|
||||
if (idx_valid := cursor.fetchone()) and not idx_valid[0]:
|
||||
cursor.execute(
|
||||
SQL("DROP INDEX {}").format(Identifier(name)))
|
||||
cursor.execute(
|
||||
SQL('CREATE INDEX {} IF NOT EXISTS {} ON {} USING {}')
|
||||
.format(
|
||||
SQL('CONCURRENTLY' if concurrently else ''),
|
||||
Identifier(name),
|
||||
Identifier(self.table_name),
|
||||
query),
|
||||
params)
|
||||
old.discard(name)
|
||||
for name in old:
|
||||
if name.startswith('idx_') or name.endswith('_index'):
|
||||
cursor.execute(SQL('DROP INDEX {}').format(Identifier(name)))
|
||||
self.__indexes = None
|
||||
|
||||
def drop_column(self, column_name):
|
||||
if not self.column_exist(column_name):
|
||||
return
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute(SQL('ALTER TABLE {} DROP COLUMN {}').format(
|
||||
Identifier(self.table_name),
|
||||
Identifier(column_name)))
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
@classmethod
|
||||
def drop_table(cls, model, table, cascade=False):
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute('DELETE FROM ir_model_data WHERE model = %s', (model,))
|
||||
|
||||
query = 'DROP TABLE {}'
|
||||
if cascade:
|
||||
query = query + ' CASCADE'
|
||||
cursor.execute(SQL(query).format(Identifier(table)))
|
||||
|
||||
|
||||
class IndexMixin:
|
||||
|
||||
_type = None
|
||||
|
||||
def __init_subclass__(cls):
|
||||
TableHandler.index_translators.append(cls)
|
||||
|
||||
@classmethod
|
||||
def definition(cls, index):
|
||||
expr_template = SQL('{expression} {collate} {opclass} {order}')
|
||||
indexed_expressions = cls._get_indexed_expressions(index)
|
||||
expressions = []
|
||||
params = []
|
||||
for expression, usage in indexed_expressions:
|
||||
expressions.append(expr_template.format(
|
||||
**cls._get_expression_variables(expression, usage)))
|
||||
params.extend(expression.params)
|
||||
|
||||
include = SQL('')
|
||||
if index.options.get('include'):
|
||||
include = SQL('INCLUDE ({columns})').format(
|
||||
columns=SQL(',').join(map(
|
||||
lambda c: SQL(str(c)),
|
||||
index.options.get('include'))))
|
||||
|
||||
where = SQL('')
|
||||
if index.options.get('where'):
|
||||
where = SQL('WHERE {where}').format(
|
||||
where=SQL(str(index.options['where'])))
|
||||
params.extend(index.options['where'].params)
|
||||
|
||||
query = SQL('{type} ({expressions}) {include} {where}').format(
|
||||
type=SQL(cls._type),
|
||||
expressions=SQL(',').join(expressions),
|
||||
include=include,
|
||||
where=where)
|
||||
name = cls._get_name(query, params)
|
||||
return name, query, params
|
||||
|
||||
@classmethod
|
||||
def _get_indexed_expressions(cls, index):
|
||||
return index.expressions
|
||||
|
||||
@classmethod
|
||||
def _get_expression_variables(cls, expression, usage):
|
||||
variables = {
|
||||
'expression': SQL(str(expression)),
|
||||
'collate': SQL(''),
|
||||
'opclass': SQL(''),
|
||||
'order': SQL(''),
|
||||
}
|
||||
if usage.options.get('collation'):
|
||||
variables['collate'] = SQL('COLLATE {}').format(
|
||||
usage.options['collation'])
|
||||
if usage.options.get('order'):
|
||||
order = usage.options['order'].upper()
|
||||
variables['order'] = SQL(order)
|
||||
return variables
|
||||
|
||||
|
||||
class HashTranslator(IndexMixin, IndexTranslatorInterface):
|
||||
_type = 'HASH'
|
||||
|
||||
@classmethod
|
||||
def score(cls, index):
|
||||
if (len(index.expressions) > 1
|
||||
or index.expressions[0][1].__class__.__name__ != 'Equality'):
|
||||
return 0
|
||||
if index.options.get('include'):
|
||||
return 0
|
||||
return 100
|
||||
|
||||
@classmethod
|
||||
def _get_indexed_expressions(cls, index):
|
||||
return [
|
||||
(e, u) for e, u in index.expressions
|
||||
if u.__class__.__name__ == 'Equality'][:1]
|
||||
|
||||
|
||||
class BTreeTranslator(IndexMixin, IndexTranslatorInterface):
|
||||
_type = 'BTREE'
|
||||
|
||||
@classmethod
|
||||
def score(cls, index):
|
||||
score = 0
|
||||
for _, usage in index.expressions:
|
||||
if usage.__class__.__name__ == 'Range':
|
||||
score += 100
|
||||
elif usage.__class__.__name__ == 'Equality':
|
||||
score += 50
|
||||
elif usage.__class__.__name__ == 'Similarity':
|
||||
score += 20
|
||||
if usage.options.get('begin'):
|
||||
score += 100
|
||||
return score
|
||||
|
||||
@classmethod
|
||||
def _get_indexed_expressions(cls, index):
|
||||
return [
|
||||
(e, u) for e, u in index.expressions
|
||||
if u.__class__.__name__ in {'Equality', 'Range', 'Similarity'}]
|
||||
|
||||
@classmethod
|
||||
def _get_expression_variables(cls, expression, usage):
|
||||
params = super()._get_expression_variables(expression, usage)
|
||||
if (usage.__class__.__name__ == 'Similarity'
|
||||
and not usage.options.get('collation')):
|
||||
# text_pattern_ops and varchar_pattern_ops are the same
|
||||
params['opclass'] = SQL('varchar_pattern_ops')
|
||||
return params
|
||||
|
||||
|
||||
class TrigramTranslator(IndexMixin, IndexTranslatorInterface):
|
||||
_type = 'GIN'
|
||||
|
||||
@classmethod
|
||||
def score(cls, index):
|
||||
database = Transaction().database
|
||||
has_btree_gin = database.has_extension('btree_gin')
|
||||
has_trigram = database.has_extension('pg_trgm')
|
||||
if not has_btree_gin and not has_trigram:
|
||||
return 0
|
||||
|
||||
score = 0
|
||||
for _, usage in index.expressions:
|
||||
if usage.__class__.__name__ == 'Similarity':
|
||||
if has_trigram:
|
||||
score += 100
|
||||
else:
|
||||
score += 50
|
||||
elif has_btree_gin:
|
||||
if usage.__class__.__name__ == 'Range':
|
||||
score += 90
|
||||
elif usage.__class__.__name__ == 'Equality':
|
||||
score += 40
|
||||
else:
|
||||
return 0
|
||||
return score
|
||||
|
||||
@classmethod
|
||||
def _get_indexed_expressions(cls, index):
|
||||
database = Transaction().database
|
||||
has_btree_gin = database.has_extension('btree_gin')
|
||||
has_trigram = database.has_extension('pg_trgm')
|
||||
|
||||
def filter(usage):
|
||||
if usage.__class__.__name__ == 'Similarity':
|
||||
return has_trigram
|
||||
elif usage.__class__.__name__ in {'Range', 'Equality'}:
|
||||
return has_btree_gin
|
||||
else:
|
||||
return False
|
||||
return [(e, u) for e, u in index.expressions if filter(u)]
|
||||
|
||||
@classmethod
|
||||
def _get_expression_variables(cls, expression, usage):
|
||||
params = super()._get_expression_variables(expression, usage)
|
||||
if usage.__class__.__name__ == 'Similarity':
|
||||
params['opclass'] = SQL('gin_trgm_ops')
|
||||
return params
|
||||
12
backend/sqlite/__init__.py
Executable file
12
backend/sqlite/__init__.py
Executable file
@@ -0,0 +1,12 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
|
||||
from .database import (
|
||||
Database, DatabaseDataError, DatabaseIntegrityError,
|
||||
DatabaseOperationalError, DatabaseTimeoutError)
|
||||
from .table import TableHandler
|
||||
|
||||
__all__ = [
|
||||
Database, TableHandler,
|
||||
DatabaseIntegrityError, DatabaseDataError, DatabaseOperationalError,
|
||||
DatabaseTimeoutError]
|
||||
BIN
backend/sqlite/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
BIN
backend/sqlite/__pycache__/__init__.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/sqlite/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
backend/sqlite/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/sqlite/__pycache__/database.cpython-311.opt-1.pyc
Executable file
BIN
backend/sqlite/__pycache__/database.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/sqlite/__pycache__/database.cpython-311.pyc
Executable file
BIN
backend/sqlite/__pycache__/database.cpython-311.pyc
Executable file
Binary file not shown.
BIN
backend/sqlite/__pycache__/table.cpython-311.opt-1.pyc
Executable file
BIN
backend/sqlite/__pycache__/table.cpython-311.opt-1.pyc
Executable file
Binary file not shown.
BIN
backend/sqlite/__pycache__/table.cpython-311.pyc
Executable file
BIN
backend/sqlite/__pycache__/table.cpython-311.pyc
Executable file
Binary file not shown.
648
backend/sqlite/database.py
Executable file
648
backend/sqlite/database.py
Executable file
@@ -0,0 +1,648 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
import datetime
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import random
|
||||
import sqlite3 as sqlite
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
import warnings
|
||||
from decimal import Decimal
|
||||
from sqlite3 import DatabaseError
|
||||
from sqlite3 import IntegrityError as DatabaseIntegrityError
|
||||
from sqlite3 import OperationalError as DatabaseOperationalError
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
from sql import Expression, Flavor, Literal, Null, Query, Table
|
||||
from sql.conditionals import NullIf
|
||||
from sql.functions import (
|
||||
CharLength, CurrentTimestamp, Extract, Function, Overlay, Position,
|
||||
Substring, Trim)
|
||||
|
||||
from trytond.backend.database import DatabaseInterface, SQLType
|
||||
from trytond.config import config, parse_uri
|
||||
from trytond.tools import safe_join
|
||||
from trytond.transaction import Transaction
|
||||
|
||||
__all__ = [
|
||||
'Database',
|
||||
'DatabaseIntegrityError', 'DatabaseDataError', 'DatabaseOperationalError',
|
||||
'DatabaseTimeoutError']
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_default_name = config.get('database', 'default_name', default=':memory:')
|
||||
|
||||
|
||||
class DatabaseDataError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseTimeoutError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SQLiteExtract(Function):
|
||||
__slots__ = ()
|
||||
_function = 'EXTRACT'
|
||||
|
||||
@staticmethod
|
||||
def extract(lookup_type, date):
|
||||
if date is None:
|
||||
return None
|
||||
if len(date) == 10:
|
||||
year, month, day = map(int, date.split('-'))
|
||||
date = datetime.date(year, month, day)
|
||||
else:
|
||||
datepart, timepart = date.split(" ")
|
||||
year, month, day = map(int, datepart.split("-"))
|
||||
timepart_full = timepart.split(".")
|
||||
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
|
||||
if len(timepart_full) == 2:
|
||||
microseconds = int(timepart_full[1])
|
||||
else:
|
||||
microseconds = 0
|
||||
date = datetime.datetime(year, month, day, hours, minutes, seconds,
|
||||
microseconds)
|
||||
if lookup_type.lower() == 'century':
|
||||
return date.year / 100 + (date.year % 100 and 1 or 0)
|
||||
elif lookup_type.lower() == 'decade':
|
||||
return date.year / 10
|
||||
elif lookup_type.lower() == 'dow':
|
||||
return (date.weekday() + 1) % 7
|
||||
elif lookup_type.lower() == 'doy':
|
||||
return date.timetuple().tm_yday
|
||||
elif lookup_type.lower() == 'epoch':
|
||||
return int(time.mktime(date.timetuple()))
|
||||
elif lookup_type.lower() == 'microseconds':
|
||||
return date.microsecond
|
||||
elif lookup_type.lower() == 'millennium':
|
||||
return date.year / 1000 + (date.year % 1000 and 1 or 0)
|
||||
elif lookup_type.lower() == 'milliseconds':
|
||||
return date.microsecond / 1000
|
||||
elif lookup_type.lower() == 'quarter':
|
||||
return date.month / 4 + 1
|
||||
elif lookup_type.lower() == 'week':
|
||||
return date.isocalendar()[1]
|
||||
return getattr(date, lookup_type.lower())
|
||||
|
||||
|
||||
def date_trunc(_type, date):
|
||||
if not _type:
|
||||
return date
|
||||
if date is None:
|
||||
return None
|
||||
for format_ in [
|
||||
'%Y-%m-%d %H:%M:%S.%f',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
'%Y-%m-%d',
|
||||
'%H:%M:%S',
|
||||
]:
|
||||
try:
|
||||
value = datetime.datetime.strptime(date, format_)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
for attribute, replace in [
|
||||
('microsecond', 0),
|
||||
('second', 0),
|
||||
('minute', 0),
|
||||
('hour', 0),
|
||||
('day', 1),
|
||||
('month', 1)]:
|
||||
if _type.lower().startswith(attribute):
|
||||
break
|
||||
value = value.replace(**{attribute: replace})
|
||||
return str(value)
|
||||
|
||||
|
||||
def split_part(text, delimiter, count):
|
||||
if text is None:
|
||||
return None
|
||||
return (text.split(delimiter) + [''] * (count - 1))[count - 1]
|
||||
|
||||
|
||||
class SQLitePosition(Function):
|
||||
__slots__ = ()
|
||||
_function = 'POSITION'
|
||||
|
||||
@staticmethod
|
||||
def position(substring, string):
|
||||
if string is None:
|
||||
return
|
||||
try:
|
||||
return string.index(substring) + 1
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
|
||||
def replace(text, pattern, replacement):
|
||||
return str(text).replace(pattern, replacement)
|
||||
|
||||
|
||||
def now():
|
||||
transaction = Transaction()
|
||||
return _nows.setdefault(transaction, {}).setdefault(
|
||||
transaction.started_at, datetime.datetime.now().isoformat(' '))
|
||||
|
||||
|
||||
_nows = WeakKeyDictionary()
|
||||
|
||||
|
||||
def to_char(value, format):
|
||||
try:
|
||||
value = datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S.%f')
|
||||
except ValueError:
|
||||
try:
|
||||
value = datetime.datetime.strptime(value, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
if isinstance(value, datetime.date):
|
||||
# Convert SQL pattern into compatible Python
|
||||
return value.strftime(format
|
||||
.replace('%', '%%')
|
||||
.replace('HH12', '%I')
|
||||
.replace('HH24', '%H')
|
||||
.replace('HH', '%I')
|
||||
.replace('MI', '%M')
|
||||
.replace('SS', '%S')
|
||||
.replace('US', '%f')
|
||||
.replace('AM', '%p')
|
||||
.replace('A.M.', '%p')
|
||||
.replace('PM', '%p')
|
||||
.replace('P.M.', '%p')
|
||||
.replace('am', '%p')
|
||||
.replace('a.m.', '%p')
|
||||
.replace('pm', '%p')
|
||||
.replace('p.m.', '%p')
|
||||
.replace('YYYY', '%Y')
|
||||
.replace('YY', '%y')
|
||||
.replace('Month', '%B')
|
||||
.replace('Mon', '%b')
|
||||
.replace('MM', '%m')
|
||||
.replace('Day', '%A')
|
||||
.replace('Dy', '%a')
|
||||
.replace('DDD', '%j')
|
||||
.replace('DD', '%d')
|
||||
.replace('D', '%w')
|
||||
.replace('TZ', '%Z')
|
||||
)
|
||||
elif isinstance(value, datetime.timedelta):
|
||||
raise NotImplementedError
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SQLiteSubstring(Function):
|
||||
__slots__ = ()
|
||||
_function = 'SUBSTR'
|
||||
|
||||
|
||||
class SQLiteOverlay(Function):
|
||||
__slots__ = ()
|
||||
_function = 'OVERLAY'
|
||||
|
||||
@staticmethod
|
||||
def overlay(string, placing_string, from_, for_=None):
|
||||
if for_ is None:
|
||||
for_ = len(placing_string)
|
||||
return string[:from_ - 1] + placing_string + string[from_ - 1 + for_:]
|
||||
|
||||
|
||||
class SQLiteCharLength(Function):
|
||||
__slots__ = ()
|
||||
_function = 'LENGTH'
|
||||
|
||||
|
||||
class SQLiteCurrentTimestamp(Function):
|
||||
__slots__ = ()
|
||||
_function = 'NOW' # More precise
|
||||
|
||||
|
||||
class SQLiteTrim(Trim):
|
||||
|
||||
def __str__(self):
|
||||
flavor = Flavor.get()
|
||||
param = flavor.param
|
||||
|
||||
function = {
|
||||
'BOTH': 'TRIM',
|
||||
'LEADING': 'LTRIM',
|
||||
'TRAILING': 'RTRIM',
|
||||
}[self.position]
|
||||
|
||||
def format(arg):
|
||||
if isinstance(arg, str):
|
||||
return param
|
||||
else:
|
||||
return str(arg)
|
||||
return function + '(%s, %s)' % (
|
||||
format(self.string), format(self.characters))
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
if isinstance(self.string, str):
|
||||
params = [self.string]
|
||||
else:
|
||||
params = list(self.string.params)
|
||||
params.append(self.characters)
|
||||
return params
|
||||
|
||||
|
||||
def sign(value):
|
||||
if value > 0:
|
||||
return 1
|
||||
elif value < 0:
|
||||
return -1
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def greatest(*args):
|
||||
args = [a for a in args if a is not None]
|
||||
if args:
|
||||
return max(args)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def least(*args):
|
||||
args = [a for a in args if a is not None]
|
||||
if args:
|
||||
return min(args)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def bool_and(*args):
|
||||
return all(args)
|
||||
|
||||
|
||||
def bool_or(*args):
|
||||
return any(args)
|
||||
|
||||
|
||||
def cbrt(value):
|
||||
return math.pow(value, 1 / 3)
|
||||
|
||||
|
||||
def div(a, b):
|
||||
return a // b
|
||||
|
||||
|
||||
def trunc(value, digits):
|
||||
return math.trunc(value * 10 ** digits) / 10 ** digits
|
||||
|
||||
|
||||
MAPPING = {
|
||||
Extract: SQLiteExtract,
|
||||
Position: SQLitePosition,
|
||||
Substring: SQLiteSubstring,
|
||||
Overlay: SQLiteOverlay,
|
||||
CharLength: SQLiteCharLength,
|
||||
CurrentTimestamp: SQLiteCurrentTimestamp,
|
||||
Trim: SQLiteTrim,
|
||||
}
|
||||
|
||||
|
||||
class JSONExtract(Function):
|
||||
__slots__ = ()
|
||||
_function = 'JSON_EXTRACT'
|
||||
|
||||
|
||||
class JSONQuote(Function):
|
||||
__slots__ = ()
|
||||
_function = 'JSON_QUOTE'
|
||||
|
||||
|
||||
class SQLiteCursor(sqlite.Cursor):
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
pass
|
||||
|
||||
|
||||
class SQLiteConnection(sqlite.Connection):
|
||||
|
||||
def cursor(self):
|
||||
return super(SQLiteConnection, self).cursor(SQLiteCursor)
|
||||
|
||||
|
||||
class Database(DatabaseInterface):
|
||||
|
||||
_local = threading.local()
|
||||
_conn = None
|
||||
flavor = Flavor(
|
||||
paramstyle='qmark', function_mapping=MAPPING, null_ordering=False,
|
||||
max_limit=-1)
|
||||
IN_MAX = 200
|
||||
|
||||
TYPES_MAPPING = {
|
||||
'BIGINT': SQLType('INTEGER', 'INTEGER'),
|
||||
'BOOL': SQLType('BOOLEAN', 'BOOLEAN'),
|
||||
'DATETIME': SQLType('TIMESTAMP', 'TIMESTAMP'),
|
||||
'FULLTEXT': SQLType('TEXT', 'TEXT'),
|
||||
'JSON': SQLType('TEXT', 'TEXT'),
|
||||
}
|
||||
|
||||
def __new__(cls, name=_default_name):
|
||||
if (name == ':memory:'
|
||||
and getattr(cls._local, 'memory_database', None)):
|
||||
return cls._local.memory_database
|
||||
return DatabaseInterface.__new__(cls, name=name)
|
||||
|
||||
def __init__(self, name=_default_name):
|
||||
super(Database, self).__init__(name=name)
|
||||
if name == ':memory:':
|
||||
Database._local.memory_database = self
|
||||
|
||||
def connect(self):
|
||||
if self._conn is not None:
|
||||
return self
|
||||
self._conn = sqlite.connect(
|
||||
self._make_uri(), uri=True,
|
||||
detect_types=sqlite.PARSE_DECLTYPES | sqlite.PARSE_COLNAMES,
|
||||
factory=SQLiteConnection)
|
||||
self._conn.create_function('extract', 2, SQLiteExtract.extract)
|
||||
self._conn.create_function('date_trunc', 2, date_trunc)
|
||||
self._conn.create_function('split_part', 3, split_part)
|
||||
self._conn.create_function('to_char', 2, to_char)
|
||||
if sqlite.sqlite_version_info < (3, 3, 14):
|
||||
self._conn.create_function('replace', 3, replace)
|
||||
self._conn.create_function('now', 0, now)
|
||||
self._conn.create_function('greatest', -1, greatest)
|
||||
self._conn.create_function('least', -1, least)
|
||||
self._conn.create_function('bool_and', -1, bool_and)
|
||||
self._conn.create_function('bool_or', -1, bool_or)
|
||||
|
||||
# Mathematical functions
|
||||
self._conn.create_function('cbrt', 1, cbrt)
|
||||
self._conn.create_function('ceil', 1, math.ceil)
|
||||
self._conn.create_function('degrees', 1, math.degrees)
|
||||
self._conn.create_function('div', 2, div)
|
||||
self._conn.create_function('exp', 1, math.exp)
|
||||
self._conn.create_function('floor', 1, math.floor)
|
||||
self._conn.create_function('ln', 1, math.log)
|
||||
self._conn.create_function('log', 1, math.log10)
|
||||
self._conn.create_function('mod', 2, math.fmod)
|
||||
self._conn.create_function('pi', 0, lambda: math.pi)
|
||||
self._conn.create_function('power', 2, math.pow)
|
||||
self._conn.create_function('radians', 1, math.radians)
|
||||
self._conn.create_function('sign', 1, sign)
|
||||
self._conn.create_function('sqrt', 1, math.sqrt)
|
||||
self._conn.create_function('trunc', 1, math.trunc)
|
||||
self._conn.create_function('trunc', 2, trunc)
|
||||
|
||||
# Trigonomentric functions
|
||||
self._conn.create_function('acos', 1, math.acos)
|
||||
self._conn.create_function('asin', 1, math.asin)
|
||||
self._conn.create_function('atan', 1, math.atan)
|
||||
self._conn.create_function('atan2', 2, math.atan2)
|
||||
self._conn.create_function('cos', 1, math.cos)
|
||||
self._conn.create_function(
|
||||
'cot', 1, lambda x: 1 / math.tan(x) if x else math.inf)
|
||||
self._conn.create_function('sin', 1, math.sin)
|
||||
self._conn.create_function('tan', 1, math.tan)
|
||||
|
||||
# Random functions
|
||||
self._conn.create_function('random', 0, random.random)
|
||||
self._conn.create_function('setseed', 1, random.seed)
|
||||
|
||||
# String functions
|
||||
self._conn.create_function('overlay', 3, SQLiteOverlay.overlay)
|
||||
self._conn.create_function('overlay', 4, SQLiteOverlay.overlay)
|
||||
self._conn.create_function('position', 2, SQLitePosition.position)
|
||||
|
||||
if (hasattr(self._conn, 'set_trace_callback')
|
||||
and logger.isEnabledFor(logging.DEBUG)):
|
||||
self._conn.set_trace_callback(logger.debug)
|
||||
self._conn.execute('PRAGMA foreign_keys = ON')
|
||||
return self
|
||||
|
||||
def _make_uri(self):
|
||||
uri = config.get('database', 'uri')
|
||||
base_uri = parse_uri(uri)
|
||||
if base_uri.path and base_uri.path != '/':
|
||||
warnings.warn("The path specified in the URI will be overridden")
|
||||
|
||||
if self.name == ':memory:':
|
||||
query_string = urllib.parse.parse_qs(base_uri.query)
|
||||
query_string['mode'] = 'memory'
|
||||
query = urllib.parse.urlencode(query_string, doseq=True)
|
||||
db_uri = base_uri._replace(netloc='', path='/', query=query)
|
||||
else:
|
||||
db_path = safe_join(
|
||||
config.get('database', 'path'), self.name + '.sqlite')
|
||||
if not os.path.isfile(db_path):
|
||||
raise IOError("Database '%s' doesn't exist!" % db_path)
|
||||
db_uri = base_uri._replace(path=db_path)
|
||||
|
||||
# Use unparse before replacing sqlite with file because SQLite accepts
|
||||
# a relative path URI like file:db/test.sqlite which doesn't conform to
|
||||
# RFC8089 which urllib follows and enforces when the scheme is 'file'
|
||||
db_uri = urllib.parse.urlunparse(db_uri)
|
||||
return db_uri.replace('sqlite', 'file', 1)
|
||||
|
||||
def get_connection(
|
||||
self, autocommit=False, readonly=False, statement_timeout=None):
|
||||
if self._conn is None:
|
||||
self.connect()
|
||||
if autocommit:
|
||||
self._conn.isolation_level = None
|
||||
else:
|
||||
self._conn.isolation_level = 'IMMEDIATE'
|
||||
return self._conn
|
||||
|
||||
def put_connection(self, connection=None, close=False):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
if self.name == ':memory:':
|
||||
return
|
||||
if self._conn is None:
|
||||
return
|
||||
self._conn = None
|
||||
|
||||
@classmethod
|
||||
def create(cls, connection, database_name):
|
||||
if database_name == ':memory:':
|
||||
path = ':memory:'
|
||||
else:
|
||||
if os.sep in database_name:
|
||||
return
|
||||
path = os.path.join(config.get('database', 'path'),
|
||||
database_name + '.sqlite')
|
||||
with sqlite.connect(path) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.close()
|
||||
|
||||
@classmethod
|
||||
def drop(cls, connection, database_name):
|
||||
if database_name == ':memory:':
|
||||
cls._local.memory_database._conn = None
|
||||
return
|
||||
if os.sep in database_name:
|
||||
return
|
||||
os.remove(os.path.join(config.get('database', 'path'),
|
||||
database_name + '.sqlite'))
|
||||
|
||||
def list(self, hostname=None):
|
||||
res = []
|
||||
listdir = [':memory:']
|
||||
try:
|
||||
listdir += os.listdir(config.get('database', 'path'))
|
||||
except OSError:
|
||||
pass
|
||||
for db_file in listdir:
|
||||
if db_file.endswith('.sqlite') or db_file == ':memory:':
|
||||
if db_file == ':memory:':
|
||||
db_name = ':memory:'
|
||||
else:
|
||||
db_name = db_file[:-7]
|
||||
try:
|
||||
database = Database(db_name).connect()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
'Test failed for "%s"', db_name, exc_info=True)
|
||||
continue
|
||||
if database.test(hostname=hostname):
|
||||
res.append(db_name)
|
||||
database.close()
|
||||
return res
|
||||
|
||||
def init(self):
|
||||
from trytond.modules import get_module_info
|
||||
Flavor.set(self.flavor)
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
sql_file = os.path.join(os.path.dirname(__file__), 'init.sql')
|
||||
with open(sql_file) as fp:
|
||||
for line in fp.read().split(';'):
|
||||
if (len(line) > 0) and (not line.isspace()):
|
||||
cursor.execute(line)
|
||||
|
||||
ir_module = Table('ir_module')
|
||||
ir_module_dependency = Table('ir_module_dependency')
|
||||
for module in ['ir', 'res']:
|
||||
info = get_module_info(module)
|
||||
insert = ir_module.insert(
|
||||
[ir_module.create_uid, ir_module.create_date,
|
||||
ir_module.name, ir_module.state],
|
||||
[[0, CurrentTimestamp(), module, 'to activate']])
|
||||
cursor.execute(*insert)
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
module_id, = cursor.fetchone()
|
||||
for dependency in info.get('depends', []):
|
||||
insert = ir_module_dependency.insert(
|
||||
[ir_module_dependency.create_uid,
|
||||
ir_module_dependency.create_date,
|
||||
ir_module_dependency.module,
|
||||
ir_module_dependency.name,
|
||||
],
|
||||
[[0, CurrentTimestamp(), module_id, dependency]])
|
||||
cursor.execute(*insert)
|
||||
conn.commit()
|
||||
|
||||
def test(self, hostname=None):
|
||||
Flavor.set(self.flavor)
|
||||
tables = ['ir_model', 'ir_model_field', 'ir_ui_view', 'ir_ui_menu',
|
||||
'res_user', 'res_group', 'ir_module', 'ir_module_dependency',
|
||||
'ir_translation', 'ir_lang', 'ir_configuration']
|
||||
sqlite_master = Table('sqlite_master')
|
||||
select = sqlite_master.select(sqlite_master.name)
|
||||
select.where = sqlite_master.type == 'table'
|
||||
select.where &= sqlite_master.name.in_(tables)
|
||||
with self._conn as conn:
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
cursor.execute(*select)
|
||||
except Exception:
|
||||
return False
|
||||
if len(cursor.fetchall()) != len(tables):
|
||||
return False
|
||||
if hostname:
|
||||
configuration = Table('ir_configuration')
|
||||
try:
|
||||
cursor.execute(*configuration.select(
|
||||
configuration.hostname))
|
||||
except Exception:
|
||||
return False
|
||||
hostnames = {h for h, in cursor if h}
|
||||
if hostnames and hostname not in hostnames:
|
||||
return False
|
||||
return True
|
||||
|
||||
def lastid(self, cursor):
|
||||
# This call is not thread safe
|
||||
return cursor.lastrowid
|
||||
|
||||
def lock(self, connection, table):
|
||||
pass
|
||||
|
||||
def lock_id(self, id, timeout=None):
|
||||
return Literal(True)
|
||||
|
||||
def has_constraint(self, constraint):
|
||||
return False
|
||||
|
||||
def has_multirow_insert(self):
|
||||
return True
|
||||
|
||||
def has_window_functions(self):
|
||||
return sqlite.sqlite_version_info >= (3, 25, 0)
|
||||
|
||||
def sql_type(self, type_):
|
||||
if type_ in self.TYPES_MAPPING:
|
||||
return self.TYPES_MAPPING[type_]
|
||||
if type_.startswith('VARCHAR'):
|
||||
return SQLType('VARCHAR', type_)
|
||||
return SQLType(type_, type_)
|
||||
|
||||
def sql_format(self, type_, value):
|
||||
if type_ in ('INTEGER', 'BIGINT'):
|
||||
if (value is not None
|
||||
and not isinstance(value, (Query, Expression))):
|
||||
value = int(value)
|
||||
return value
|
||||
|
||||
def json_get(self, column, key=None):
|
||||
if key:
|
||||
column = JSONExtract(column, '$.%s' % key)
|
||||
return NullIf(JSONQuote(column), JSONQuote(Null))
|
||||
|
||||
|
||||
sqlite.register_converter('NUMERIC', lambda val: Decimal(val.decode('utf-8')))
|
||||
sqlite.register_adapter(Decimal, lambda val: str(val).encode('utf-8'))
|
||||
|
||||
|
||||
def adapt_datetime(val):
|
||||
return val.replace(tzinfo=None).isoformat(" ")
|
||||
|
||||
|
||||
sqlite.register_adapter(datetime.datetime, adapt_datetime)
|
||||
sqlite.register_adapter(datetime.time, lambda val: val.isoformat())
|
||||
sqlite.register_converter('TIME',
|
||||
lambda val: datetime.time(*map(int, val.decode('utf-8').split(':'))))
|
||||
sqlite.register_adapter(datetime.timedelta, lambda val: val.total_seconds())
|
||||
|
||||
|
||||
def convert_interval(value):
|
||||
value = float(value)
|
||||
# It is not allowed to instatiate timedelta with the min/max total seconds
|
||||
if value >= _interval_max:
|
||||
return datetime.timedelta.max
|
||||
elif value <= _interval_min:
|
||||
return datetime.timedelta.min
|
||||
return datetime.timedelta(seconds=value)
|
||||
|
||||
|
||||
_interval_max = datetime.timedelta.max.total_seconds()
|
||||
_interval_min = datetime.timedelta.min.total_seconds()
|
||||
sqlite.register_converter('INTERVAL', convert_interval)
|
||||
185
backend/sqlite/init.sql
Executable file
185
backend/sqlite/init.sql
Executable file
@@ -0,0 +1,185 @@
|
||||
CREATE TABLE ir_configuration (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
language VARCHAR,
|
||||
hostname VARCHAR,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_model (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
model VARCHAR,
|
||||
name VARCHAR,
|
||||
info TEXT,
|
||||
module VARCHAR,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_model_field (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
model VARCHAR,
|
||||
name VARCHAR,
|
||||
relation VARCHAR,
|
||||
field_description VARCHAR,
|
||||
ttype VARCHAR,
|
||||
help TEXT,
|
||||
module VARCHAR,
|
||||
"access" BOOLEAN,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE ir_ui_view (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
model VARCHAR,
|
||||
"type" VARCHAR,
|
||||
data TEXT,
|
||||
field_childs VARCHAR,
|
||||
priority INTEGER,
|
||||
domain VARCHAR,
|
||||
inherit INTEGER,
|
||||
module VARCHAR,
|
||||
name VARCHAR,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_ui_menu (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
parent INTEGER,
|
||||
name VARCHAR,
|
||||
icon VARCHAR,
|
||||
active BOOLEAN,
|
||||
sequence INTEGER,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_translation (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
lang VARCHAR,
|
||||
src TEXT,
|
||||
name VARCHAR,
|
||||
res_id INTEGER,
|
||||
value TEXT,
|
||||
"type" VARCHAR,
|
||||
module VARCHAR,
|
||||
fuzzy BOOLEAN,
|
||||
overriding_module VARCHAR,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_lang (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR,
|
||||
code VARCHAR,
|
||||
translatable BOOLEAN,
|
||||
parent VARCHAR,
|
||||
active BOOLEAN,
|
||||
direction VARCHAR,
|
||||
am VARCHAR,
|
||||
pm VARCHAR,
|
||||
"date" VARCHAR,
|
||||
grouping VARCHAR,
|
||||
decimal_point VARCHAR,
|
||||
thousands_sep VARCHAR,
|
||||
mon_grouping VARCHAR,
|
||||
mon_decimal_point VARCHAR,
|
||||
mon_thousands_sep VARCHAR,
|
||||
p_sign_posn INTEGER,
|
||||
n_sign_posn INTEGER,
|
||||
positive_sign VARCHAR,
|
||||
negative_sign VARCHAR,
|
||||
p_cs_precedes BOOLEAN,
|
||||
n_cs_precedes BOOLEAN,
|
||||
p_sep_by_space BOOLEAN,
|
||||
n_sep_by_space BOOLEAN,
|
||||
pg_text_search VARCHAR,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE res_user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR,
|
||||
active BOOLEAN,
|
||||
login VARCHAR,
|
||||
password VARCHAR,
|
||||
email VARCHAR,
|
||||
language INTEGER,
|
||||
menu INTEGER,
|
||||
password_hash VARCHAR,
|
||||
password_reset VARCHAR,
|
||||
password_reset_expire TIMESTAMP,
|
||||
signature TEXT,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
INSERT INTO res_user (id, login, password, name, active) VALUES (0, 'root', NULL, 'Root', 0);
|
||||
|
||||
CREATE TABLE res_group (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR
|
||||
);
|
||||
|
||||
CREATE TABLE "res_user-res_group" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"user" INTEGER,
|
||||
"group" INTEGER,
|
||||
active BOOLEAN,
|
||||
parent INTEGER,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_module (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
create_uid INTEGER,
|
||||
create_date TIMESTAMP,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER,
|
||||
name VARCHAR,
|
||||
state VARCHAR
|
||||
);
|
||||
|
||||
CREATE TABLE ir_module_dependency (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
create_uid INTEGER,
|
||||
create_date TIMESTAMP,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER,
|
||||
name VARCHAR,
|
||||
module INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE ir_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR,
|
||||
"timestamp" TIMESTAMP,
|
||||
create_date TIMESTAMP,
|
||||
create_uid INTEGER,
|
||||
write_date TIMESTAMP,
|
||||
write_uid INTEGER
|
||||
);
|
||||
383
backend/sqlite/table.py
Executable file
383
backend/sqlite/table.py
Executable file
@@ -0,0 +1,383 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
|
||||
import logging
|
||||
import re
|
||||
import warnings
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
from trytond.backend.table import (
|
||||
IndexTranslatorInterface, TableHandlerInterface)
|
||||
from trytond.transaction import Transaction
|
||||
|
||||
from .database import sqlite
|
||||
|
||||
__all__ = ['TableHandler']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
VARCHAR_SIZE_RE = re.compile(r'VARCHAR\(([0-9]+)\)')
|
||||
|
||||
|
||||
def _escape_identifier(name):
|
||||
return '"%s"' % name.replace('"', '""')
|
||||
|
||||
|
||||
class TableHandler(TableHandlerInterface):
|
||||
__handlers = WeakKeyDictionary()
|
||||
index_translators = []
|
||||
|
||||
def _init(self, model, history=False):
|
||||
super()._init(model, history=history)
|
||||
self.__columns = None
|
||||
self.__indexes = None
|
||||
self._model = model
|
||||
|
||||
cursor = Transaction().connection.cursor()
|
||||
# Create new table if necessary
|
||||
if not self.table_exist(self.table_name):
|
||||
if not self.history:
|
||||
cursor.execute('CREATE TABLE %s '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT)'
|
||||
% _escape_identifier(self.table_name))
|
||||
else:
|
||||
cursor.execute('CREATE TABLE %s '
|
||||
'(__id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'id INTEGER)' % _escape_identifier(self.table_name))
|
||||
|
||||
self._update_definitions()
|
||||
|
||||
@classmethod
|
||||
def table_exist(cls, table_name):
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute("SELECT sql FROM sqlite_master "
|
||||
"WHERE type = 'table' AND name = ?",
|
||||
(table_name,))
|
||||
res = cursor.fetchone()
|
||||
if not res:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def table_rename(cls, old_name, new_name):
|
||||
cursor = Transaction().connection.cursor()
|
||||
if (cls.table_exist(old_name)
|
||||
and not cls.table_exist(new_name)):
|
||||
cursor.execute('ALTER TABLE %s RENAME TO %s'
|
||||
% (_escape_identifier(old_name), _escape_identifier(new_name)))
|
||||
# Rename history table
|
||||
old_history = old_name + "__history"
|
||||
new_history = new_name + "__history"
|
||||
if (cls.table_exist(old_history)
|
||||
and not cls.table_exist(new_history)):
|
||||
cursor.execute('ALTER TABLE %s RENAME TO %s'
|
||||
% (_escape_identifier(old_history),
|
||||
_escape_identifier(new_history)))
|
||||
|
||||
def column_exist(self, column_name):
|
||||
return column_name in self._columns
|
||||
|
||||
def _recreate_table(self, update_columns=None, drop_columns=None):
|
||||
if update_columns is None:
|
||||
update_columns = {}
|
||||
if drop_columns is None:
|
||||
drop_columns = []
|
||||
transaction = Transaction()
|
||||
database = transaction.database
|
||||
cursor = transaction.connection.cursor()
|
||||
temp_table = '__temp_%s' % self.table_name
|
||||
temp_columns = dict(self._columns)
|
||||
self.table_rename(self.table_name, temp_table)
|
||||
self._init(self._model, history=self.history)
|
||||
columns, old_columns = [], []
|
||||
for name, values in temp_columns.items():
|
||||
if name in drop_columns:
|
||||
continue
|
||||
typname = update_columns.get(name, {}).get(
|
||||
'typname', values['typname'])
|
||||
size = update_columns.get(name, {}).get('size', values['size'])
|
||||
name = update_columns.get(name, {}).get('name', name)
|
||||
self._add_raw_column(
|
||||
name, database.sql_type(typname), field_size=size)
|
||||
columns.append(name)
|
||||
old_columns.append(name)
|
||||
cursor.execute(('INSERT INTO %s ('
|
||||
+ ','.join(_escape_identifier(x) for x in columns)
|
||||
+ ') SELECT '
|
||||
+ ','.join(_escape_identifier(x) for x in old_columns)
|
||||
+ ' FROM %s') % (
|
||||
_escape_identifier(self.table_name),
|
||||
_escape_identifier(temp_table)))
|
||||
cursor.execute('DROP TABLE %s' % _escape_identifier(temp_table))
|
||||
self._update_definitions()
|
||||
|
||||
def column_rename(self, old_name, new_name):
|
||||
cursor = Transaction().connection.cursor()
|
||||
if self.column_exist(old_name):
|
||||
if not self.column_exist(new_name):
|
||||
if sqlite.sqlite_version_info >= (3, 25, 0):
|
||||
cursor.execute('ALTER TABLE %s RENAME COLUMN %s TO %s' % (
|
||||
_escape_identifier(self.table_name),
|
||||
_escape_identifier(old_name),
|
||||
_escape_identifier(new_name)))
|
||||
self._update_definitions(columns=True)
|
||||
else:
|
||||
self._recreate_table({old_name: {'name': new_name}})
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to rename column %s on table %s to %s.',
|
||||
old_name, self.table_name, new_name)
|
||||
|
||||
@property
|
||||
def _columns(self):
|
||||
if self.__columns is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute('PRAGMA table_info("' + self.table_name + '")')
|
||||
self.__columns = {}
|
||||
for _, column, type_, notnull, hasdef, _ in cursor:
|
||||
column = re.sub(r'^\"|\"$', '', column)
|
||||
match = re.match(r'(\w+)(\((.*?)\))?', type_)
|
||||
if match:
|
||||
typname = match.group(1).upper()
|
||||
size = match.group(3) and int(match.group(3)) or 0
|
||||
else:
|
||||
typname = type_.upper()
|
||||
size = None
|
||||
self.__columns[column] = {
|
||||
'notnull': notnull,
|
||||
'hasdef': hasdef,
|
||||
'size': size,
|
||||
'typname': typname,
|
||||
}
|
||||
return self.__columns
|
||||
|
||||
@property
|
||||
def _indexes(self):
|
||||
if self.__indexes is None:
|
||||
cursor = Transaction().connection.cursor()
|
||||
try:
|
||||
cursor.execute('PRAGMA index_list("' + self.table_name + '")')
|
||||
except IndexError: # There is sometimes IndexError
|
||||
cursor.execute('PRAGMA index_list("' + self.table_name + '")')
|
||||
self.__indexes = [l[1] for l in cursor]
|
||||
return self.__indexes
|
||||
|
||||
def _update_definitions(self, columns=True):
|
||||
if columns:
|
||||
self.__columns = None
|
||||
|
||||
def alter_size(self, column_name, column_type):
|
||||
self._recreate_table({column_name: {'size': column_type}})
|
||||
|
||||
def alter_type(self, column_name, column_type):
|
||||
self._recreate_table({column_name: {'typname': column_type}})
|
||||
|
||||
def column_is_type(self, column_name, type_, *, size=-1):
|
||||
db_type = self._columns[column_name]['typname'].upper()
|
||||
|
||||
database = Transaction().database
|
||||
base_type = database.sql_type(type_).base.upper()
|
||||
if base_type == 'VARCHAR' and (size is None or size >= 0):
|
||||
same_size = self._columns[column_name]['size'] == size
|
||||
else:
|
||||
same_size = True
|
||||
|
||||
return base_type == db_type and same_size
|
||||
|
||||
def db_default(self, column_name, value):
|
||||
warnings.warn('Unable to set default on column with SQLite backend')
|
||||
|
||||
def add_column(self, column_name, sql_type, default=None, comment=''):
|
||||
database = Transaction().database
|
||||
column_type = database.sql_type(sql_type)
|
||||
match = VARCHAR_SIZE_RE.match(sql_type)
|
||||
field_size = int(match.group(1)) if match else None
|
||||
|
||||
self._add_raw_column(column_name, column_type, default, field_size,
|
||||
comment)
|
||||
|
||||
def _add_raw_column(self, column_name, column_type, default=None,
|
||||
field_size=None, string=''):
|
||||
if self.column_exist(column_name):
|
||||
base_type = column_type[0].upper()
|
||||
if base_type != self._columns[column_name]['typname']:
|
||||
if (self._columns[column_name]['typname'], base_type) in [
|
||||
('VARCHAR', 'TEXT'),
|
||||
('TEXT', 'VARCHAR'),
|
||||
('DATE', 'TIMESTAMP'),
|
||||
('INTEGER', 'FLOAT'),
|
||||
('INTEGER', 'NUMERIC'),
|
||||
('FLOAT', 'NUMERIC'),
|
||||
]:
|
||||
self.alter_type(column_name, base_type)
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to migrate column %s on table %s '
|
||||
'from %s to %s.',
|
||||
column_name, self.table_name,
|
||||
self._columns[column_name]['typname'], base_type)
|
||||
|
||||
if (base_type == 'VARCHAR'
|
||||
and self._columns[column_name]['typname'] == 'VARCHAR'):
|
||||
# Migrate size
|
||||
from_size = self._columns[column_name]['size']
|
||||
if field_size is None:
|
||||
if from_size > 0:
|
||||
self.alter_size(column_name, base_type)
|
||||
elif from_size == field_size:
|
||||
pass
|
||||
elif from_size and from_size < field_size:
|
||||
self.alter_size(column_name, column_type[1])
|
||||
else:
|
||||
logger.warning(
|
||||
'Unable to migrate column %s on table %s '
|
||||
'from varchar(%s) to varchar(%s).',
|
||||
column_name, self.table_name,
|
||||
from_size if from_size and from_size > 0 else "",
|
||||
field_size)
|
||||
return
|
||||
|
||||
cursor = Transaction().connection.cursor()
|
||||
column_type = column_type[1]
|
||||
cursor.execute(('ALTER TABLE %s ADD COLUMN %s %s') % (
|
||||
_escape_identifier(self.table_name),
|
||||
_escape_identifier(column_name),
|
||||
column_type))
|
||||
|
||||
if default:
|
||||
# check if table is non-empty:
|
||||
cursor.execute('SELECT 1 FROM %s limit 1'
|
||||
% _escape_identifier(self.table_name))
|
||||
if cursor.fetchone():
|
||||
# Populate column with default values:
|
||||
cursor.execute('UPDATE ' + _escape_identifier(self.table_name)
|
||||
+ ' SET ' + _escape_identifier(column_name) + ' = ?',
|
||||
(default(),))
|
||||
|
||||
self._update_definitions(columns=True)
|
||||
|
||||
def add_fk(self, columns, reference, ref_columns=None, on_delete=None):
|
||||
warnings.warn('Unable to add foreign key with SQLite backend')
|
||||
|
||||
def drop_fk(self, columns=None, ref_columns=None, table=None):
|
||||
warnings.warn('Unable to drop foreign key with SQLite backend')
|
||||
|
||||
def not_null_action(self, column_name, action='add'):
|
||||
if not self.column_exist(column_name):
|
||||
return
|
||||
|
||||
if action == 'add':
|
||||
warnings.warn('Unable to set not null with SQLite backend')
|
||||
elif action == 'remove':
|
||||
warnings.warn('Unable to remove not null with SQLite backend')
|
||||
else:
|
||||
raise Exception('Not null action not supported!')
|
||||
|
||||
def add_constraint(self, ident, constraint):
|
||||
warnings.warn('Unable to add constraint with SQLite backend')
|
||||
|
||||
def drop_constraint(self, ident, table=None):
|
||||
warnings.warn('Unable to drop constraint with SQLite backend')
|
||||
|
||||
def set_indexes(self, indexes, concurrently=False):
|
||||
cursor = Transaction().connection.cursor()
|
||||
old = set(self._indexes)
|
||||
for index in indexes:
|
||||
translator = self.index_translator_for(index)
|
||||
if translator:
|
||||
name, query, params = translator.definition(index)
|
||||
name = '_'.join([self.table_name, name])
|
||||
name = 'idx_' + self.convert_name(name, reserved=len('idx_'))
|
||||
# SQLite does not support parameters for index creation
|
||||
if not params:
|
||||
cursor.execute(
|
||||
'CREATE INDEX IF NOT EXISTS %s ON %s %s' % (
|
||||
_escape_identifier(name),
|
||||
_escape_identifier(self.table_name),
|
||||
query),
|
||||
params)
|
||||
else:
|
||||
warnings.warn("Can not create index with parameters")
|
||||
old.discard(name)
|
||||
for name in old:
|
||||
if name.startswith('idx_') or name.endswith('_index'):
|
||||
cursor.execute('DROP INDEX %s' % _escape_identifier(name))
|
||||
self.__indexes = None
|
||||
|
||||
def drop_column(self, column_name):
|
||||
if not self.column_exist(column_name):
|
||||
return
|
||||
transaction = Transaction()
|
||||
cursor = transaction.connection.cursor()
|
||||
if sqlite.sqlite_version_info >= (3, 35, 0):
|
||||
cursor.execute('ALTER TABLE %s DROP COLUMN %s' % (
|
||||
_escape_identifier(self.table_name),
|
||||
_escape_identifier(column_name)))
|
||||
self._update_definitions(columns=True)
|
||||
else:
|
||||
self._recreate_table(drop_columns=[column_name])
|
||||
|
||||
@classmethod
|
||||
def drop_table(cls, model, table, cascade=False):
|
||||
cursor = Transaction().connection.cursor()
|
||||
cursor.execute('DELETE from ir_model_data where model = ?',
|
||||
(model,))
|
||||
|
||||
query = 'DROP TABLE %s' % _escape_identifier(table)
|
||||
if cascade:
|
||||
query = query + ' CASCADE'
|
||||
cursor.execute(query)
|
||||
|
||||
|
||||
class IndexMixin:
|
||||
|
||||
def __init_subclass__(cls):
|
||||
TableHandler.index_translators.append(cls)
|
||||
|
||||
@classmethod
|
||||
def definition(cls, index):
|
||||
expr_template = '%(expression)s %(collate)s %(order)s'
|
||||
params = []
|
||||
expressions = []
|
||||
for expression, usage in index.expressions:
|
||||
expressions.append(expr_template %
|
||||
cls._get_expression_variables(expression, usage))
|
||||
params.extend(expression.params)
|
||||
|
||||
where = ''
|
||||
if index.options.get('where'):
|
||||
where = 'WHERE %s' % index.options['where']
|
||||
params.extend(index.options['where'].params)
|
||||
|
||||
query = '(%(expressions)s) %(where)s' % {
|
||||
'expressions': ','.join(expressions),
|
||||
'where': where,
|
||||
}
|
||||
name = cls._get_name(query, params)
|
||||
return name, query, params
|
||||
|
||||
@classmethod
|
||||
def _get_expression_variables(cls, expression, usage):
|
||||
variables = {
|
||||
'expression': str(expression),
|
||||
'collate': '',
|
||||
'order': '',
|
||||
}
|
||||
if usage.options.get('collation'):
|
||||
variables['collate'] = 'COLLATE %s' % usage.options['collation']
|
||||
if usage.options.get('order'):
|
||||
order = usage.options['order'].upper()
|
||||
for predicate in ['NULLS FIRST', 'NULLS LAST']:
|
||||
if order.endswith(predicate):
|
||||
order = order[:-len(predicate)]
|
||||
variables['order'] = order
|
||||
return variables
|
||||
|
||||
|
||||
class IndexTranslator(IndexMixin, IndexTranslatorInterface):
|
||||
|
||||
@classmethod
|
||||
def score(cls, index):
|
||||
supported_indexes_count = sum(
|
||||
int(u.__class__.__name__ in {'Equality', 'Range'})
|
||||
for _, u in index.expressions)
|
||||
return supported_indexes_count * 100
|
||||
140
backend/table.py
Executable file
140
backend/table.py
Executable file
@@ -0,0 +1,140 @@
|
||||
# This file is part of Tryton. The COPYRIGHT file at the top level of
|
||||
# this repository contains the full copyright notices and license terms.
|
||||
import hashlib
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
from trytond.transaction import Transaction
|
||||
|
||||
|
||||
class TableHandlerInterface(object):
|
||||
'''
|
||||
Define generic interface to handle database table
|
||||
'''
|
||||
namedatalen = None
|
||||
index_translators = None
|
||||
__handlers = WeakKeyDictionary()
|
||||
|
||||
def __new__(cls, model, history=False):
|
||||
transaction = Transaction()
|
||||
handlers = cls.__handlers.setdefault(transaction, {})
|
||||
key = (model.__name__, history)
|
||||
if key not in handlers:
|
||||
instance = handlers[key] = super().__new__(cls)
|
||||
instance._init(model, history=history)
|
||||
return handlers[key]
|
||||
|
||||
def _init(self, model, history=False):
|
||||
'''
|
||||
:param model: the Model linked to the table
|
||||
:param module_name: the module name
|
||||
:param history: a boolean to define if it is a history table
|
||||
'''
|
||||
super(TableHandlerInterface, self).__init__()
|
||||
if history:
|
||||
self.table_name = model._table + '__history'
|
||||
else:
|
||||
self.table_name = model._table
|
||||
self.object_name = model.__name__
|
||||
if history:
|
||||
self.sequence_name = self.table_name + '___id_seq'
|
||||
else:
|
||||
self.sequence_name = self.table_name + '_id_seq'
|
||||
self.history = history
|
||||
|
||||
@classmethod
|
||||
def table_exist(cls, table_name):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def table_rename(cls, old_name, new_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def column_exist(self, column_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def column_rename(self, old_name, new_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def alter_size(self, column_name, column_type):
|
||||
raise NotImplementedError
|
||||
|
||||
def alter_type(self, column_name, column_type):
|
||||
raise NotImplementedError
|
||||
|
||||
def column_is_type(self, column_name, type_, *, size=-1):
|
||||
raise NotImplementedError
|
||||
|
||||
def db_default(self, column_name, value):
|
||||
raise NotImplementedError
|
||||
|
||||
def add_column(self, column_name, abstract_type, default=None, comment=''):
|
||||
raise NotImplementedError
|
||||
|
||||
def add_fk(self, columns, reference, ref_columns=None, on_delete=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def drop_fk(self, columns, ref_columns=None, table=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def not_null_action(self, column_name, action='add'):
|
||||
raise NotImplementedError
|
||||
|
||||
def add_constraint(self, ident, constraint):
|
||||
raise NotImplementedError
|
||||
|
||||
def drop_constraint(self, ident, table=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def create_index(self, index):
|
||||
raise NotImplementedError
|
||||
|
||||
def drop_column(self, column_name):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def drop_table(cls, model, table, cascade=False):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def convert_name(cls, name, reserved=0):
|
||||
if cls.namedatalen:
|
||||
length = cls.namedatalen - reserved
|
||||
if length <= 0:
|
||||
raise ValueError
|
||||
if len(name) >= length:
|
||||
if isinstance(name, str):
|
||||
name = name.encode('utf-8')
|
||||
name = hashlib.sha256(name).hexdigest()[:length - 1]
|
||||
return name
|
||||
|
||||
def set_indexes(self, indexes, concurrently=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def index_translator_for(self, index):
|
||||
return next(
|
||||
filter(
|
||||
lambda t: t.score(index) > 0,
|
||||
sorted(
|
||||
self.index_translators, key=lambda t: t.score(index),
|
||||
reverse=True)),
|
||||
None)
|
||||
|
||||
|
||||
class IndexTranslatorInterface:
|
||||
|
||||
@classmethod
|
||||
def _get_name(cls, query, params):
|
||||
def hash_(s):
|
||||
return hashlib.shake_128(s.encode('utf-8')).hexdigest(16)
|
||||
names = [str(query)]
|
||||
if params:
|
||||
names.append(str(params))
|
||||
return '_'.join(map(hash_, names))
|
||||
|
||||
@classmethod
|
||||
def definition(cls, index):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def score(cls, index):
|
||||
raise NotImplementedError
|
||||
Reference in New Issue
Block a user