Initial import from Docker volume

This commit is contained in:
root
2025-12-26 13:11:43 +00:00
commit 4998dc066a
13336 changed files with 1767801 additions and 0 deletions

12
backend/postgresql/__init__.py Executable file
View File

@@ -0,0 +1,12 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from .database import (
Database, DatabaseDataError, DatabaseIntegrityError,
DatabaseOperationalError, DatabaseTimeoutError)
from .table import TableHandler
__all__ = [
Database, TableHandler,
DatabaseIntegrityError, DatabaseDataError, DatabaseOperationalError,
DatabaseTimeoutError]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

814
backend/postgresql/database.py Executable file
View File

@@ -0,0 +1,814 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import json
import logging
import os
import time
import warnings
from collections import defaultdict
from datetime import datetime
from decimal import Decimal
from itertools import chain, repeat
from threading import RLock
from psycopg2 import Binary, connect
from psycopg2.extensions import (
ISOLATION_LEVEL_REPEATABLE_READ, UNICODE, AsIs, cursor, register_adapter,
register_type)
from psycopg2.pool import PoolError, ThreadedConnectionPool
from psycopg2.sql import SQL, Identifier
try:
from psycopg2.extensions import PYDATE, PYDATETIME, PYINTERVAL, PYTIME
except ImportError:
PYDATE, PYDATETIME, PYTIME, PYINTERVAL = None, None, None, None
from psycopg2 import DataError as DatabaseDataError
from psycopg2 import IntegrityError as DatabaseIntegrityError
from psycopg2 import InterfaceError
from psycopg2 import OperationalError as DatabaseOperationalError
from psycopg2 import ProgrammingError
from psycopg2.errors import QueryCanceled as DatabaseTimeoutError
from psycopg2.extras import register_default_json, register_default_jsonb
from sql import Cast, Flavor, For, Table
from sql.conditionals import Coalesce
from sql.functions import Function
from sql.operators import BinaryOperator, Concat
from trytond.backend.database import DatabaseInterface, SQLType
from trytond.config import config, parse_uri
from trytond.tools.gevent import is_gevent_monkey_patched
__all__ = [
'Database',
'DatabaseIntegrityError', 'DatabaseDataError', 'DatabaseOperationalError',
'DatabaseTimeoutError']
logger = logging.getLogger(__name__)
os.environ['PGTZ'] = os.environ.get('TZ', '')
_timeout = config.getint('database', 'timeout')
_minconn = config.getint('database', 'minconn', default=1)
_maxconn = config.getint('database', 'maxconn', default=64)
_default_name = config.get('database', 'default_name', default='template1')
def unescape_quote(s):
if s.startswith('"') and s.endswith('"'):
return s.strip('"').replace('""', '"')
return s
def replace_special_values(s, **mapping):
for name, value in mapping.items():
s = s.replace('$' + name, value)
return s
class LoggingCursor(cursor):
def execute(self, sql, args=None):
if logger.isEnabledFor(logging.DEBUG):
logger.debug(self.mogrify(sql, args))
cursor.execute(self, sql, args)
class ForSkipLocked(For):
def __str__(self):
assert not self.nowait, "Can not use both NO WAIT and SKIP LOCKED"
return super().__str__() + (' SKIP LOCKED' if not self.nowait else '')
class Unaccent(Function):
__slots__ = ()
_function = config.get('database', 'unaccent_function', default='unaccent')
class Similarity(Function):
__slots__ = ()
_function = config.get(
'database', 'similarity_function', default='similarity')
class Match(BinaryOperator):
__slots__ = ()
_operator = '@@'
class ToTsvector(Function):
__slots__ = ()
_function = 'to_tsvector'
class Setweight(Function):
__slots__ = ()
_function = 'setweight'
class TsQuery(Function):
__slots__ = ()
class ToTsQuery(TsQuery):
__slots__ = ()
_function = 'to_tsquery'
class PlainToTsQuery(TsQuery):
__slots__ = ()
_function = 'plainto_tsquery'
class PhraseToTsQuery(TsQuery):
__slots__ = ()
_function = 'phraseto_tsquery'
class WebsearchToTsQuery(TsQuery):
__slots__ = ()
_function = 'websearch_to_tsquery'
class TsRank(Function):
__slots__ = ()
_function = 'ts_rank'
class AdvisoryLock(Function):
_function = 'pg_advisory_xact_lock'
class TryAdvisoryLock(Function):
_function = 'pg_try_advisory_xact_lock'
class JSONBExtractPath(Function):
__slots__ = ()
_function = 'jsonb_extract_path'
class JSONKeyExists(BinaryOperator):
__slots__ = ()
_operator = '?'
class _BinaryOperatorArray(BinaryOperator):
"Binary Operator that convert list into Array"
@property
def _operands(self):
if isinstance(self.right, list):
return (self.left, None)
return super()._operands
@property
def params(self):
params = super().params
if isinstance(self.right, list):
params = params[:-1] + (self.right,)
return params
class JSONAnyKeyExist(_BinaryOperatorArray):
__slots__ = ()
_operator = '?|'
class JSONAllKeyExist(_BinaryOperatorArray):
__slots__ = ()
_operator = '?&'
class JSONContains(BinaryOperator):
__slots__ = ()
_operator = '@>'
class Database(DatabaseInterface):
index_translators = []
_lock = RLock()
_databases = defaultdict(dict)
_connpool = None
_list_cache = {}
_list_cache_timestamp = {}
_search_path = None
_current_user = None
_has_returning = None
_has_select_for_skip_locked = None
_has_proc = defaultdict(lambda: defaultdict(dict))
_extensions = defaultdict(dict)
_search_full_text_languages = defaultdict(dict)
flavor = Flavor(ilike=True)
TYPES_MAPPING = {
'SMALLINT': SQLType('INT2', 'INT2'),
'BIGINT': SQLType('INT8', 'INT8'),
'BLOB': SQLType('BYTEA', 'BYTEA'),
'DATETIME': SQLType('TIMESTAMP', 'TIMESTAMP(0)'),
'REAL': SQLType('FLOAT4', 'FLOAT4'),
'FLOAT': SQLType('FLOAT8', 'FLOAT8'),
'FULLTEXT': SQLType('TSVECTOR', 'TSVECTOR'),
'INTEGER': SQLType('INT4', 'INT4'),
'JSON': SQLType('JSONB', 'JSONB'),
'TIMESTAMP': SQLType('TIMESTAMP', 'TIMESTAMP(6)'),
}
def __new__(cls, name=_default_name):
with cls._lock:
now = datetime.now()
databases = cls._databases[os.getpid()]
for database in list(databases.values()):
if ((now - database._last_use).total_seconds() > _timeout
and database.name != name
and not database._connpool._used):
database.close()
if name in databases:
inst = databases[name]
else:
inst = DatabaseInterface.__new__(cls, name=name)
try:
inst._connpool = ThreadedConnectionPool(
_minconn, _maxconn, **cls._connection_params(name),
cursor_factory=LoggingCursor)
except Exception:
logger.error(
'connection to "%s" failed', name, exc_info=True)
raise
else:
logger.info('connection to "%s" succeeded', name)
databases[name] = inst
inst._last_use = datetime.now()
return inst
def __init__(self, name=_default_name):
super(Database, self).__init__(name)
@classmethod
def _connection_params(cls, name):
uri = parse_uri(config.get('database', 'uri'))
if uri.path and uri.path != '/':
warnings.warn("The path specified in the URI will be overridden")
params = {
'dsn': uri._replace(path=name).geturl(),
'fallback_application_name': os.environ.get(
'TRYTOND_APPNAME', 'trytond'),
}
return params
def connect(self):
return self
def get_connection(
self, autocommit=False, readonly=False, statement_timeout=None):
retry = max(config.getint('database', 'retry'), _maxconn)
for count in range(retry, -1, -1):
try:
conn = self._connpool.getconn()
except (PoolError, DatabaseOperationalError):
if count and not self._connpool.closed:
logger.info('waiting a connection')
time.sleep(1)
continue
raise
except Exception:
logger.error(
'connection to "%s" failed', self.name, exc_info=True)
raise
try:
conn.set_session(
isolation_level=ISOLATION_LEVEL_REPEATABLE_READ,
readonly=readonly,
autocommit=autocommit)
with conn.cursor() as cur:
if statement_timeout:
cur.execute('SET statement_timeout=%s' %
(statement_timeout * 1000))
else:
# Detect disconnection
cur.execute('SELECT 1')
except DatabaseOperationalError:
self._connpool.putconn(conn, close=True)
continue
break
return conn
def put_connection(self, connection, close=False):
try:
connection.reset()
except InterfaceError:
pass
self._connpool.putconn(connection, close=close)
def close(self):
with self._lock:
logger.info('disconnection from "%s"', self.name)
self._connpool.closeall()
self._databases[os.getpid()].pop(self.name)
@classmethod
def create(cls, connection, database_name, template='template0'):
cursor = connection.cursor()
cursor.execute(
SQL(
"CREATE DATABASE {} TEMPLATE {} ENCODING 'unicode'")
.format(
Identifier(database_name),
Identifier(template)))
connection.commit()
cls._list_cache.clear()
@classmethod
def drop(cls, connection, database_name):
cursor = connection.cursor()
cursor.execute(SQL("DROP DATABASE {}")
.format(Identifier(database_name)))
cls._list_cache.clear()
cls._has_proc.pop(database_name, None)
cls._search_full_text_languages.pop(database_name, None)
def get_version(self, connection):
version = connection.server_version
major, rest = divmod(int(version), 10000)
minor, patch = divmod(rest, 100)
return (major, minor, patch)
def list(self, hostname=None):
now = time.time()
timeout = config.getint('session', 'timeout')
res = self.__class__._list_cache.get(hostname)
timestamp = self.__class__._list_cache_timestamp.get(hostname, now)
if res and abs(timestamp - now) < timeout:
return res
connection = self.get_connection()
try:
cursor = connection.cursor()
cursor.execute('SELECT datname FROM pg_database '
'WHERE datistemplate = false ORDER BY datname')
res = []
for db_name, in cursor:
try:
conn = connect(**self._connection_params(db_name))
try:
with conn:
if self._test(conn, hostname=hostname):
res.append(db_name)
finally:
conn.close()
except Exception:
logger.debug(
'Test failed for "%s"', db_name, exc_info=True)
continue
finally:
self.put_connection(connection, close=True)
self.__class__._list_cache[hostname] = res
self.__class__._list_cache_timestamp[hostname] = now
return res
def init(self):
from trytond.modules import get_module_info
connection = self.get_connection()
try:
cursor = connection.cursor()
sql_file = os.path.join(os.path.dirname(__file__), 'init.sql')
with open(sql_file) as fp:
for line in fp.read().split(';'):
if (len(line) > 0) and (not line.isspace()):
cursor.execute(line)
for module in ['ir', 'res']:
info = get_module_info(module)
cursor.execute('INSERT INTO ir_module '
'(create_uid, create_date, name, state) '
'VALUES (%s, now(), %s, %s) '
'RETURNING id',
(0, module, 'to activate'))
module_id = cursor.fetchone()[0]
for dependency in info.get('depends', []):
cursor.execute('INSERT INTO ir_module_dependency '
'(create_uid, create_date, module, name) '
'VALUES (%s, now(), %s, %s)',
(0, module_id, dependency))
connection.commit()
finally:
self.put_connection(connection)
def test(self, hostname=None):
try:
connection = self.get_connection()
except Exception:
logger.debug('Test failed for "%s"', self.name, exc_info=True)
return False
try:
return self._test(connection, hostname=hostname)
finally:
self.put_connection(connection, close=True)
@classmethod
def _test(cls, connection, hostname=None):
cursor = connection.cursor()
tables = ('ir_model', 'ir_model_field', 'ir_ui_view', 'ir_ui_menu',
'res_user', 'res_group', 'ir_module', 'ir_module_dependency',
'ir_translation', 'ir_lang', 'ir_configuration')
cursor.execute('SELECT table_name FROM information_schema.tables '
'WHERE table_name IN %s', (tables,))
if len(cursor.fetchall()) != len(tables):
return False
if hostname:
try:
cursor.execute(
'SELECT hostname FROM ir_configuration')
hostnames = {h for h, in cursor if h}
if hostnames and hostname not in hostnames:
return False
except ProgrammingError:
pass
return True
def nextid(self, connection, table, count=1):
cursor = connection.cursor()
cursor.execute(
"SELECT nextval(pg_get_serial_sequence(format(%s, %s), %s)) "
"FROM generate_series(1, %s)",
('%I', table, 'id', count))
if count == 1:
return cursor.fetchone()[0]
else:
return [id for id, in cursor]
def setnextid(self, connection, table, value):
cursor = connection.cursor()
cursor.execute(
"SELECT setval(pg_get_serial_sequence(format(%s, %s), %s), %s)",
('%I', table, 'id', value))
def currid(self, connection, table):
cursor = connection.cursor()
cursor.execute(
"SELECT pg_get_serial_sequence(format(%s, %s), %s)",
('%I', table, 'id'))
sequence_name, = cursor.fetchone()
cursor.execute(f"SELECT last_value FROM {sequence_name}")
return cursor.fetchone()[0]
def lock(self, connection, table):
cursor = connection.cursor()
cursor.execute(SQL('LOCK {} IN EXCLUSIVE MODE NOWAIT').format(
Identifier(table)))
def lock_id(self, id, timeout=None):
if not timeout:
return TryAdvisoryLock(id)
else:
return AdvisoryLock(id)
def has_constraint(self, constraint):
return True
def has_multirow_insert(self):
return True
def get_table_schema(self, connection, table_name):
cursor = connection.cursor()
for schema in self.search_path:
cursor.execute('SELECT 1 '
'FROM information_schema.tables '
'WHERE table_name = %s AND table_schema = %s',
(table_name, schema))
if cursor.rowcount:
return schema
@property
def current_user(self):
if self._current_user is None:
connection = self.get_connection()
try:
cursor = connection.cursor()
cursor.execute('SELECT current_user')
self._current_user = cursor.fetchone()[0]
finally:
self.put_connection(connection)
return self._current_user
@property
def search_path(self):
if self._search_path is None:
connection = self.get_connection()
try:
cursor = connection.cursor()
cursor.execute('SHOW search_path')
path, = cursor.fetchone()
special_values = {
'user': self.current_user,
}
self._search_path = [
unescape_quote(replace_special_values(
p.strip(), **special_values))
for p in path.split(',')]
finally:
self.put_connection(connection)
return self._search_path
def has_returning(self):
if self._has_returning is None:
connection = self.get_connection()
try:
# RETURNING clause is available since PostgreSQL 8.2
self._has_returning = self.get_version(connection) >= (8, 2)
finally:
self.put_connection(connection)
return self._has_returning
def has_select_for(self):
return True
def get_select_for_skip_locked(self):
if self._has_select_for_skip_locked is None:
connection = self.get_connection()
try:
# SKIP LOCKED clause is available since PostgreSQL 9.5
self._has_select_for_skip_locked = (
self.get_version(connection) >= (9, 5))
finally:
self.put_connection(connection)
if self._has_select_for_skip_locked:
return ForSkipLocked
else:
return For
def has_window_functions(self):
return True
@classmethod
def has_sequence(cls):
return True
def has_proc(self, name, property='oid'):
if (name in self._has_proc[self.name]
and property in self._has_proc[self.name][name]):
return self._has_proc[self.name][name][property]
connection = self.get_connection()
result = False
try:
cursor = connection.cursor()
cursor.execute(
SQL('SELECT {} FROM pg_proc WHERE proname=%s').format(
Identifier(property)), (name,))
result = cursor.fetchone()
if result:
result, = result
finally:
self.put_connection(connection)
self._has_proc[self.name][name][property] = result
return result
def has_unaccent(self):
return self.has_proc(Unaccent._function)
def has_unaccent_indexable(self):
return self.has_proc(Unaccent._function, 'provolatile') == 'i'
def has_similarity(self):
return self.has_proc(Similarity._function)
def similarity(self, column, value):
return Similarity(column, value)
def has_search_full_text(self):
return True
def _search_full_text_language(self, language):
languages = self._search_full_text_languages[self.name]
if language not in languages:
lang = Table('ir_lang')
connection = self.get_connection()
try:
cursor = connection.cursor()
cursor.execute(*lang.select(
Coalesce(lang.pg_text_search, 'simple'),
where=lang.code == language,
limit=1))
config_name, = cursor.fetchone()
finally:
self.put_connection(connection)
languages[language] = config_name
else:
config_name = languages[language]
return config_name
def format_full_text(self, *documents, language=None):
size = max(len(documents) // 4, 1)
if len(documents) > 1:
weights = chain(
['A'] * size, ['B'] * size, ['C'] * size, repeat('D'))
else:
weights = [None]
expression = None
if language:
config_name = self._search_full_text_language(language)
else:
config_name = None
for document, weight in zip(documents, weights):
if not document:
continue
if config_name:
ts_vector = ToTsvector(config_name, document)
else:
ts_vector = ToTsvector('simple', document)
if weight:
ts_vector = Setweight(ts_vector, weight)
if expression is None:
expression = ts_vector
else:
expression = Concat(expression, ts_vector)
return expression
def format_full_text_query(self, query, language=None):
connection = self.get_connection()
try:
version = self.get_version(connection)
finally:
self.put_connection(connection)
if not isinstance(query, TsQuery):
if version >= (11, 0):
ToTsQuery = WebsearchToTsQuery
else:
ToTsQuery = PlainToTsQuery
if language:
config_name = self._search_full_text_language(language)
else:
config_name = 'simple'
query = ToTsQuery(config_name, query)
return query
def search_full_text(self, document, query):
return Match(document, query)
def rank_full_text(self, document, query, normalize=None):
# TODO: weights and cover density
norm_int = 0
if normalize:
values = {
'document log': 1,
'document': 2,
'mean': 4,
'word': 8,
'word log': 16,
'rank': 32,
}
for norm in normalize:
norm_int |= values.get(norm, 0)
return TsRank(document, query, norm_int)
def sql_type(self, type_):
if type_ in self.TYPES_MAPPING:
return self.TYPES_MAPPING[type_]
if type_.startswith('VARCHAR'):
return SQLType('VARCHAR', type_)
return SQLType(type_, type_)
def sql_format(self, type_, value):
if type_ == 'BLOB':
if value is not None:
return Binary(value)
return value
def unaccent(self, value):
if self.has_unaccent():
return Unaccent(value)
return value
def sequence_exist(self, connection, name):
cursor = connection.cursor()
for schema in self.search_path:
cursor.execute('SELECT 1 '
'FROM information_schema.sequences '
'WHERE sequence_name = %s AND sequence_schema = %s',
(name, schema))
if cursor.rowcount:
return True
return False
def sequence_create(
self, connection, name, number_increment=1, start_value=1):
cursor = connection.cursor()
cursor.execute(
SQL("CREATE SEQUENCE {} INCREMENT BY %s START WITH %s").format(
Identifier(name)),
(number_increment, start_value))
def sequence_update(
self, connection, name, number_increment=1, start_value=1):
cursor = connection.cursor()
cursor.execute(
SQL("ALTER SEQUENCE {} INCREMENT BY %s RESTART WITH %s").format(
Identifier(name)),
(number_increment, start_value))
def sequence_rename(self, connection, old_name, new_name):
cursor = connection.cursor()
if (self.sequence_exist(connection, old_name)
and not self.sequence_exist(connection, new_name)):
cursor.execute(
SQL("ALTER TABLE {} RENAME TO {}").format(
Identifier(old_name),
Identifier(new_name)))
def sequence_delete(self, connection, name):
cursor = connection.cursor()
cursor.execute(SQL("DROP SEQUENCE {}").format(
Identifier(name)))
def sequence_next_number(self, connection, name):
cursor = connection.cursor()
version = self.get_version(connection)
if version >= (10, 0):
cursor.execute(
'SELECT increment_by '
'FROM pg_sequences '
'WHERE sequencename=%s',
(name,))
increment, = cursor.fetchone()
cursor.execute(
SQL(
'SELECT CASE WHEN NOT is_called THEN last_value '
'ELSE last_value + %s '
'END '
'FROM {}').format(Identifier(name)),
(increment,))
else:
cursor.execute(
SQL(
'SELECT CASE WHEN NOT is_called THEN last_value '
'ELSE last_value + increment_by '
'END '
'FROM {}').format(Identifier(name)))
return cursor.fetchone()[0]
def has_channel(self):
return True
def has_extension(self, extension_name):
if extension_name in self._extensions[self.name]:
return self._extensions[self.name][extension_name]
connection = self.get_connection()
result = False
try:
cursor = connection.cursor()
cursor.execute(
"SELECT 1 FROM pg_extension WHERE extname=%s",
(extension_name,))
result = bool(cursor.rowcount)
finally:
self.put_connection(connection)
self._extensions[self.name][extension_name] = result
return result
def json_get(self, column, key=None):
column = Cast(column, 'jsonb')
if key:
column = JSONBExtractPath(column, key)
return column
def json_key_exists(self, column, key):
return JSONKeyExists(Cast(column, 'jsonb'), key)
def json_any_keys_exist(self, column, keys):
return JSONAnyKeyExist(Cast(column, 'jsonb'), keys)
def json_all_keys_exist(self, column, keys):
return JSONAllKeyExist(Cast(column, 'jsonb'), keys)
def json_contains(self, column, json):
return JSONContains(Cast(column, 'jsonb'), Cast(json, 'jsonb'))
register_type(UNICODE)
if PYDATE:
register_type(PYDATE)
if PYDATETIME:
register_type(PYDATETIME)
if PYTIME:
register_type(PYTIME)
if PYINTERVAL:
register_type(PYINTERVAL)
register_adapter(float, lambda value: AsIs(repr(value)))
register_adapter(Decimal, lambda value: AsIs(str(value)))
def convert_json(value):
from trytond.protocols.jsonrpc import JSONDecoder
return json.loads(value, object_hook=JSONDecoder())
register_default_json(loads=convert_json)
register_default_jsonb(loads=convert_json)
if is_gevent_monkey_patched():
from psycopg2.extensions import set_wait_callback
from psycopg2.extras import wait_select
set_wait_callback(wait_select)

156
backend/postgresql/init.sql Executable file
View File

@@ -0,0 +1,156 @@
CREATE TABLE ir_configuration (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_configuration_id_positive CHECK(id >= 0),
language VARCHAR,
hostname VARCHAR,
PRIMARY KEY(id)
);
CREATE TABLE ir_model (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_model_id_positive CHECK(id >= 0),
model VARCHAR NOT NULL,
name VARCHAR,
info TEXT,
module VARCHAR,
PRIMARY KEY(id)
);
ALTER TABLE ir_model ADD CONSTRAINT ir_model_model_uniq UNIQUE (model);
CREATE TABLE ir_model_field (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_model_field_id_positive CHECK(id >= 0),
model VARCHAR NOT NULL,
name VARCHAR NOT NULL,
relation VARCHAR,
field_description VARCHAR,
ttype VARCHAR,
help TEXT,
module VARCHAR,
"access" BOOL,
PRIMARY KEY(id),
FOREIGN KEY (model) REFERENCES ir_model(model) ON DELETE CASCADE
);
ALTER TABLE ir_model_field ADD CONSTRAINT ir_model_field_name_model_uniq UNIQUE (name, model);
CREATE TABLE ir_ui_view (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_ui_view_id_positive CHECK(id >= 0),
model VARCHAR NOT NULL,
"type" VARCHAR,
data TEXT NOT NULL,
field_childs VARCHAR,
priority INTEGER NOT NULL,
PRIMARY KEY(id)
);
CREATE TABLE ir_ui_menu (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_ui_menu_id_positive CHECK(id >= 0),
parent INTEGER,
name VARCHAR NOT NULL,
icon VARCHAR,
PRIMARY KEY (id),
FOREIGN KEY (parent) REFERENCES ir_ui_menu (id) ON DELETE SET NULL
);
CREATE TABLE ir_translation (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_translation_id_positive CHECK(id >= 0),
lang VARCHAR,
src TEXT,
name VARCHAR NOT NULL,
res_id INTEGER,
value TEXT,
"type" VARCHAR,
module VARCHAR,
fuzzy BOOLEAN NOT NULL,
PRIMARY KEY(id)
);
CREATE TABLE ir_lang (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_lang_id_positive CHECK(id >= 0),
name VARCHAR NOT NULL,
code VARCHAR NOT NULL,
translatable BOOLEAN NOT NULL,
parent VARCHAR,
active BOOLEAN NOT NULL,
direction VARCHAR NOT NULL,
PRIMARY KEY(id)
);
CREATE TABLE res_user (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT res_user_id_positive CHECK(id >= 0),
name VARCHAR NOT NULL,
active BOOLEAN NOT NULL,
login VARCHAR NOT NULL,
password VARCHAR,
PRIMARY KEY(id)
);
ALTER TABLE res_user ADD CONSTRAINT res_user_login_key UNIQUE (login);
INSERT INTO res_user (id, login, password, name, active) VALUES (0, 'root', NULL, 'Root', False);
CREATE TABLE res_group (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT res_group_id_positive CHECK(id >= 0),
name VARCHAR NOT NULL,
PRIMARY KEY(id)
);
CREATE TABLE "res_user-res_group" (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT "res_user-res_group_id_positive" CHECK(id >= 0),
"user" INTEGER NOT NULL,
"group" INTEGER NOT NULL,
FOREIGN KEY ("user") REFERENCES res_user (id) ON DELETE CASCADE,
FOREIGN KEY ("group") REFERENCES res_group (id) ON DELETE CASCADE,
PRIMARY KEY(id)
);
CREATE TABLE ir_module (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_module_id_positive CHECK(id >= 0),
create_uid INTEGER NOT NULL,
create_date TIMESTAMP WITHOUT TIME ZONE NOT NULL,
write_date TIMESTAMP WITHOUT TIME ZONE,
write_uid INTEGER,
name VARCHAR NOT NULL,
state VARCHAR,
PRIMARY KEY(id),
FOREIGN KEY (create_uid) REFERENCES res_user ON DELETE SET NULL,
FOREIGN KEY (write_uid) REFERENCES res_user ON DELETE SET NULL
);
ALTER TABLE ir_module ADD CONSTRAINT ir_module_name_uniq UNIQUE (name);
CREATE TABLE ir_module_dependency (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_module_dependency_id_positive CHECK(id >= 0),
create_uid INTEGER NOT NULL,
create_date TIMESTAMP WITHOUT TIME ZONE NOT NULL,
write_date TIMESTAMP WITHOUT TIME ZONE,
write_uid INTEGER,
name VARCHAR,
module INTEGER,
PRIMARY KEY(id),
FOREIGN KEY (create_uid) REFERENCES res_user ON DELETE SET NULL,
FOREIGN KEY (write_uid) REFERENCES res_user ON DELETE SET NULL,
FOREIGN KEY (module) REFERENCES ir_module ON DELETE CASCADE
);
CREATE TABLE ir_cache (
id INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL
CONSTRAINT ir_cache_id_positive CHECK(id >= 0),
name VARCHAR NOT NULL,
"timestamp" TIMESTAMP WITHOUT TIME ZONE,
create_date TIMESTAMP WITHOUT TIME ZONE,
create_uid INTEGER,
write_date TIMESTAMP WITHOUT TIME ZONE,
write_uid INTEGER
);

731
backend/postgresql/table.py Executable file
View File

@@ -0,0 +1,731 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import logging
import re
from psycopg2.sql import SQL, Identifier
from trytond.backend.table import (
IndexTranslatorInterface, TableHandlerInterface)
from trytond.transaction import Transaction
__all__ = ['TableHandler']
logger = logging.getLogger(__name__)
VARCHAR_SIZE_RE = re.compile(r'VARCHAR\(([0-9]+)\)')
class TableHandler(TableHandlerInterface):
namedatalen = 64
index_translators = []
def _init(self, model, history=False):
super()._init(model, history=history)
self.__columns = None
self.__constraints = None
self.__fk_deltypes = None
self.__indexes = None
transaction = Transaction()
cursor = transaction.connection.cursor()
# Create new table if necessary
if not self.table_exist(self.table_name):
cursor.execute(SQL('CREATE TABLE {} ()').format(
Identifier(self.table_name)))
self.table_schema = transaction.database.get_table_schema(
transaction.connection, self.table_name)
cursor.execute('SELECT tableowner = current_user FROM pg_tables '
'WHERE tablename = %s AND schemaname = %s',
(self.table_name, self.table_schema))
self.is_owner, = cursor.fetchone()
if model.__doc__ and self.is_owner:
cursor.execute(SQL('COMMENT ON TABLE {} IS %s').format(
Identifier(self.table_name)),
(model.__doc__,))
def migrate_to_identity(table, column):
previous_seq_name = f"{table}_{column}_seq"
cursor.execute(
"SELECT nextval(format(%s, %s))", ('%I', previous_seq_name,))
next_val, = cursor.fetchone()
cursor.execute(
"SELECT seqincrement, seqmax, seqmin, seqcache "
"FROM pg_sequence WHERE seqrelid = %s::regclass",
(previous_seq_name,))
increment, s_max, s_min, cache = cursor.fetchone()
# Previously created sequences were setting bigint values for those
# identity column mimic the type of the underlying column
if (s_max > 2 ** 31 - 1
and self._columns[column]['typname'] != 'int8'):
s_max = 2 ** 31 - 1
if (s_min < -(2 ** 31)
and self._columns[column]['typname'] != 'int8'):
s_min = -(2 ** 31)
cursor.execute(
SQL("ALTER TABLE {} ALTER COLUMN {} DROP DEFAULT").format(
Identifier(table), Identifier(column)))
cursor.execute(
SQL("DROP SEQUENCE {}").format(
Identifier(previous_seq_name)))
cursor.execute(
SQL("ALTER TABLE {} ALTER COLUMN {} "
"ADD GENERATED BY DEFAULT AS IDENTITY").format(
Identifier(table), Identifier(column)))
cursor.execute(
"SELECT pg_get_serial_sequence(format(%s, %s), %s)",
('%I', table, column))
serial_seq_name, = cursor.fetchone()
cursor.execute(
(f"ALTER SEQUENCE {serial_seq_name} INCREMENT BY %s "
"MINVALUE %s MAXVALUE %s RESTART WITH %s CACHE %s"),
(increment, s_min, s_max, next_val, cache))
update_definitions = False
if 'id' not in self._columns:
update_definitions = True
if not self.history:
cursor.execute(
SQL(
"ALTER TABLE {} ADD COLUMN id INTEGER "
"GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY").format(
Identifier(self.table_name)))
else:
cursor.execute(
SQL('ALTER TABLE {} ADD COLUMN id INTEGER')
.format(Identifier(self.table_name)))
else:
if not self.history and not self.__columns['id']['identity']:
update_definitions = True
migrate_to_identity(self.table_name, 'id')
if self.history and '__id' not in self._columns:
update_definitions = True
cursor.execute(
SQL(
"ALTER TABLE {} ADD COLUMN __id INTEGER "
"GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY").format(
Identifier(self.table_name)))
elif self.history:
if not self.__columns['__id']['identity']:
update_definitions = True
cursor.execute(
SQL("ALTER TABLE {} ALTER COLUMN id DROP DEFAULT").format(
Identifier(self.table_name)))
migrate_to_identity(self.table_name, '__id')
if update_definitions:
self._update_definitions(columns=True)
@classmethod
def table_exist(cls, table_name):
transaction = Transaction()
return bool(transaction.database.get_table_schema(
transaction.connection, table_name))
@classmethod
def table_rename(cls, old_name, new_name):
transaction = Transaction()
cursor = transaction.connection.cursor()
# Rename table
if (cls.table_exist(old_name)
and not cls.table_exist(new_name)):
cursor.execute(SQL('ALTER TABLE {} RENAME TO {}').format(
Identifier(old_name), Identifier(new_name)))
# Migrate from 6.6: rename old sequence
old_sequence = old_name + '_id_seq'
new_sequence = new_name + '_id_seq'
transaction.database.sequence_rename(
transaction.connection, old_sequence, new_sequence)
# Rename history table
old_history = old_name + "__history"
new_history = new_name + "__history"
if (cls.table_exist(old_history)
and not cls.table_exist(new_history)):
cursor.execute('ALTER TABLE "%s" RENAME TO "%s"'
% (old_history, new_history))
def column_exist(self, column_name):
return column_name in self._columns
def column_rename(self, old_name, new_name):
cursor = Transaction().connection.cursor()
if self.column_exist(old_name):
if not self.column_exist(new_name):
cursor.execute(SQL(
'ALTER TABLE {} RENAME COLUMN {} TO {}').format(
Identifier(self.table_name),
Identifier(old_name),
Identifier(new_name)))
self._update_definitions(columns=True)
else:
logger.warning(
'Unable to rename column %s on table %s to %s.',
old_name, self.table_name, new_name)
@property
def _columns(self):
if self.__columns is None:
cursor = Transaction().connection.cursor()
self.__columns = {}
# Fetch columns definitions from the table
cursor.execute('SELECT '
'column_name, udt_name, is_nullable, '
'character_maximum_length, '
'column_default, is_identity '
'FROM information_schema.columns '
'WHERE table_name = %s AND table_schema = %s',
(self.table_name, self.table_schema))
for column, typname, nullable, size, default, identity in cursor:
self.__columns[column] = {
'typname': typname,
'notnull': True if nullable == 'NO' else False,
'size': size,
'default': default,
'identity': False if identity == 'NO' else True,
}
return self.__columns
@property
def _constraints(self):
if self.__constraints is None:
cursor = Transaction().connection.cursor()
# fetch constraints for the table
cursor.execute('SELECT constraint_name '
'FROM information_schema.table_constraints '
'WHERE table_name = %s AND table_schema = %s',
(self.table_name, self.table_schema))
self.__constraints = [c for c, in cursor]
# add nonstandard exclude constraint
cursor.execute('SELECT c.conname '
'FROM pg_namespace nc, '
'pg_namespace nr, '
'pg_constraint c, '
'pg_class r '
'WHERE nc.oid = c.connamespace AND nr.oid = r.relnamespace '
'AND c.conrelid = r.oid '
"AND c.contype = 'x' " # exclude type
"AND r.relkind IN ('r', 'p') "
'AND r.relname = %s AND nr.nspname = %s',
(self.table_name, self.table_schema))
self.__constraints.extend((c for c, in cursor))
return self.__constraints
@property
def _fk_deltypes(self):
if self.__fk_deltypes is None:
cursor = Transaction().connection.cursor()
cursor.execute('SELECT k.column_name, r.delete_rule '
'FROM information_schema.key_column_usage AS k '
'JOIN information_schema.referential_constraints AS r '
'ON r.constraint_schema = k.constraint_schema '
'AND r.constraint_name = k.constraint_name '
'WHERE k.table_name = %s AND k.table_schema = %s',
(self.table_name, self.table_schema))
self.__fk_deltypes = dict(cursor)
return self.__fk_deltypes
@property
def _indexes(self):
if self.__indexes is None:
cursor = Transaction().connection.cursor()
# Fetch indexes defined for the table
cursor.execute("SELECT cl2.relname "
"FROM pg_index ind "
"JOIN pg_class cl on (cl.oid = ind.indrelid) "
"JOIN pg_namespace n ON (cl.relnamespace = n.oid) "
"JOIN pg_class cl2 on (cl2.oid = ind.indexrelid) "
"WHERE cl.relname = %s AND n.nspname = %s "
"AND NOT ind.indisprimary AND NOT ind.indisunique",
(self.table_name, self.table_schema))
self.__indexes = [l[0] for l in cursor]
return self.__indexes
def _update_definitions(self, columns=None, constraints=None):
if columns is None and constraints is None:
columns = constraints = True
if columns:
self.__columns = None
if constraints:
self.__constraints = None
self.__fk_deltypes = None
def alter_size(self, column_name, column_type):
cursor = Transaction().connection.cursor()
cursor.execute(
SQL("ALTER TABLE {} ALTER COLUMN {} TYPE {}").format(
Identifier(self.table_name),
Identifier(column_name),
SQL(column_type)))
self._update_definitions(columns=True)
def alter_type(self, column_name, column_type):
cursor = Transaction().connection.cursor()
cursor.execute(SQL('ALTER TABLE {} ALTER {} TYPE {}').format(
Identifier(self.table_name),
Identifier(column_name),
SQL(column_type)))
self._update_definitions(columns=True)
def column_is_type(self, column_name, type_, *, size=-1):
db_type = self._columns[column_name]['typname'].upper()
database = Transaction().database
base_type = database.sql_type(type_).base.upper()
if base_type == 'VARCHAR' and (size is None or size >= 0):
same_size = self._columns[column_name]['size'] == size
else:
same_size = True
return base_type == db_type and same_size
def db_default(self, column_name, value):
if value in [True, False]:
test = str(value).lower()
else:
test = value
if self._columns[column_name]['default'] != test:
cursor = Transaction().connection.cursor()
cursor.execute(
SQL(
'ALTER TABLE {} ALTER COLUMN {} SET DEFAULT %s').format(
Identifier(self.table_name),
Identifier(column_name)),
(value,))
def add_column(self, column_name, sql_type, default=None, comment=''):
cursor = Transaction().connection.cursor()
database = Transaction().database
column_type = database.sql_type(sql_type)
match = VARCHAR_SIZE_RE.match(sql_type)
field_size = int(match.group(1)) if match else None
def add_comment():
if comment and self.is_owner:
cursor.execute(
SQL('COMMENT ON COLUMN {}.{} IS %s').format(
Identifier(self.table_name),
Identifier(column_name)),
(comment,))
if self.column_exist(column_name):
if (column_name in ('create_date', 'write_date')
and column_type[1].lower() != 'timestamp(6)'):
# Migrate dates from timestamp(0) to timestamp
cursor.execute(
SQL(
'ALTER TABLE {} ALTER COLUMN {} TYPE timestamp')
.format(
Identifier(self.table_name),
Identifier(column_name)))
add_comment()
base_type = column_type[0].lower()
typname = self._columns[column_name]['typname']
if base_type != typname:
if (typname, base_type) in [
('varchar', 'text'),
('text', 'varchar'),
('date', 'timestamp'),
('int2', 'int4'),
('int2', 'float4'),
('int2', 'int8'),
('int2', 'float8'),
('int2', 'numeric'),
('int4', 'int8'),
('int4', 'float8'),
('int4', 'numeric'),
('int8', 'float8'),
('int8', 'numeric'),
('float4', 'numeric'),
('float4', 'float8'),
('float8', 'numeric'),
]:
self.alter_type(column_name, base_type)
elif (typname, base_type) in [
('int8', 'int4'),
('int8', 'int2'),
('int4', 'int2'),
('float8', 'float4'),
]:
pass
else:
logger.warning(
'Unable to migrate column %s on table %s '
'from %s to %s.',
column_name, self.table_name, typname, base_type)
if base_type == typname == 'varchar':
# Migrate size
from_size = self._columns[column_name]['size']
if field_size is None:
if from_size:
self.alter_size(column_name, base_type)
elif from_size == field_size:
pass
elif from_size and from_size < field_size:
self.alter_size(column_name, column_type[1])
else:
logger.warning(
'Unable to migrate column %s on table %s '
'from varchar(%s) to varchar(%s).',
column_name, self.table_name,
from_size if from_size and from_size > 0 else "",
field_size)
return
column_type = column_type[1]
cursor.execute(
SQL('ALTER TABLE {} ADD COLUMN {} {}').format(
Identifier(self.table_name),
Identifier(column_name),
SQL(column_type)))
add_comment()
if default:
# check if table is non-empty:
cursor.execute('SELECT 1 FROM "%s" limit 1' % self.table_name)
if cursor.rowcount:
# Populate column with default values:
cursor.execute(
SQL('UPDATE {} SET {} = %s').format(
Identifier(self.table_name),
Identifier(column_name)),
(default(),))
self._update_definitions(columns=True)
def add_fk(self, columns, reference, ref_columns=None, on_delete=None):
if on_delete is not None:
on_delete = on_delete.upper()
else:
on_delete = 'SET NULL'
if isinstance(columns, str):
columns = [columns]
cursor = Transaction().connection.cursor()
if ref_columns:
ref_columns_name = '_' + '_'.join(ref_columns)
else:
ref_columns_name = ''
name = self.convert_name(
self.table_name + '_' + '_'.join(columns)
+ ref_columns_name + '_fkey')
if name in self._constraints:
for column_name in columns:
if self._fk_deltypes.get(column_name) != on_delete:
self.drop_fk(columns, ref_columns)
add = True
break
else:
add = False
else:
add = True
if add:
columns = SQL(', ').join(map(Identifier, columns))
if not ref_columns:
ref_columns = ['id']
ref_columns = SQL(', ').join(map(Identifier, ref_columns))
cursor.execute(
SQL(
"ALTER TABLE {table} "
"ADD CONSTRAINT {constraint} "
"FOREIGN KEY ({columns}) "
"REFERENCES {reference} ({ref_columns}) "
"ON DELETE {action}"
)
.format(
table=Identifier(self.table_name),
constraint=Identifier(name),
columns=columns,
reference=Identifier(reference),
ref_columns=ref_columns,
action=SQL(on_delete)))
self._update_definitions(constraints=True)
def drop_fk(self, columns, ref_columns=None, table=None):
if isinstance(columns, str):
columns = [columns]
if ref_columns:
ref_columns_name = '_' + '_'.join(ref_columns)
else:
ref_columns_name = ''
self.drop_constraint(
'_'.join(columns) + ref_columns_name + '_fkey', table=table)
def not_null_action(self, column_name, action='add'):
if not self.column_exist(column_name):
return
with Transaction().connection.cursor() as cursor:
if action == 'add':
if self._columns[column_name]['notnull']:
return
cursor.execute(SQL(
'SELECT id FROM {} WHERE {} IS NULL LIMIT 1').format(
Identifier(self.table_name),
Identifier(column_name)))
if not cursor.rowcount:
cursor.execute(
SQL(
'ALTER TABLE {} ALTER COLUMN {} SET NOT NULL')
.format(
Identifier(self.table_name),
Identifier(column_name)))
self._update_definitions(columns=True)
else:
logger.warning(
"Unable to set not null on column %s of table %s.\n"
"Try restarting one more time.\n"
"If that doesn't work update the records and restart "
"again.",
column_name, self.table_name)
elif action == 'remove':
if not self._columns[column_name]['notnull']:
return
cursor.execute(
SQL('ALTER TABLE {} ALTER COLUMN {} DROP NOT NULL')
.format(
Identifier(self.table_name),
Identifier(column_name)))
self._update_definitions(columns=True)
else:
raise Exception('Not null action not supported!')
def add_constraint(self, ident, constraint):
ident = self.convert_name(self.table_name + "_" + ident)
if ident in self._constraints:
# This constrain already exist
return
cursor = Transaction().connection.cursor()
cursor.execute(
SQL('ALTER TABLE {} ADD CONSTRAINT {} {}').format(
Identifier(self.table_name),
Identifier(ident),
SQL(str(constraint))),
constraint.params)
self._update_definitions(constraints=True)
def drop_constraint(self, ident, table=None):
ident = self.convert_name((table or self.table_name) + "_" + ident)
if ident not in self._constraints:
return
cursor = Transaction().connection.cursor()
cursor.execute(
SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
Identifier(self.table_name), Identifier(ident)))
self._update_definitions(constraints=True)
def set_indexes(self, indexes, concurrently=False):
cursor = Transaction().connection.cursor()
old = set(self._indexes)
for index in indexes:
translator = self.index_translator_for(index)
if translator:
name, query, params = translator.definition(index)
name = '_'.join([self.table_name, name])
name = 'idx_' + self.convert_name(name, reserved=len('idx_'))
cursor.execute(
'SELECT idx.indisvalid '
'FROM pg_index idx '
'JOIN pg_class cls ON cls.oid = idx.indexrelid '
'WHERE cls.relname = %s',
(name,))
if (idx_valid := cursor.fetchone()) and not idx_valid[0]:
cursor.execute(
SQL("DROP INDEX {}").format(Identifier(name)))
cursor.execute(
SQL('CREATE INDEX {} IF NOT EXISTS {} ON {} USING {}')
.format(
SQL('CONCURRENTLY' if concurrently else ''),
Identifier(name),
Identifier(self.table_name),
query),
params)
old.discard(name)
for name in old:
if name.startswith('idx_') or name.endswith('_index'):
cursor.execute(SQL('DROP INDEX {}').format(Identifier(name)))
self.__indexes = None
def drop_column(self, column_name):
if not self.column_exist(column_name):
return
cursor = Transaction().connection.cursor()
cursor.execute(SQL('ALTER TABLE {} DROP COLUMN {}').format(
Identifier(self.table_name),
Identifier(column_name)))
self._update_definitions(columns=True)
@classmethod
def drop_table(cls, model, table, cascade=False):
cursor = Transaction().connection.cursor()
cursor.execute('DELETE FROM ir_model_data WHERE model = %s', (model,))
query = 'DROP TABLE {}'
if cascade:
query = query + ' CASCADE'
cursor.execute(SQL(query).format(Identifier(table)))
class IndexMixin:
_type = None
def __init_subclass__(cls):
TableHandler.index_translators.append(cls)
@classmethod
def definition(cls, index):
expr_template = SQL('{expression} {collate} {opclass} {order}')
indexed_expressions = cls._get_indexed_expressions(index)
expressions = []
params = []
for expression, usage in indexed_expressions:
expressions.append(expr_template.format(
**cls._get_expression_variables(expression, usage)))
params.extend(expression.params)
include = SQL('')
if index.options.get('include'):
include = SQL('INCLUDE ({columns})').format(
columns=SQL(',').join(map(
lambda c: SQL(str(c)),
index.options.get('include'))))
where = SQL('')
if index.options.get('where'):
where = SQL('WHERE {where}').format(
where=SQL(str(index.options['where'])))
params.extend(index.options['where'].params)
query = SQL('{type} ({expressions}) {include} {where}').format(
type=SQL(cls._type),
expressions=SQL(',').join(expressions),
include=include,
where=where)
name = cls._get_name(query, params)
return name, query, params
@classmethod
def _get_indexed_expressions(cls, index):
return index.expressions
@classmethod
def _get_expression_variables(cls, expression, usage):
variables = {
'expression': SQL(str(expression)),
'collate': SQL(''),
'opclass': SQL(''),
'order': SQL(''),
}
if usage.options.get('collation'):
variables['collate'] = SQL('COLLATE {}').format(
usage.options['collation'])
if usage.options.get('order'):
order = usage.options['order'].upper()
variables['order'] = SQL(order)
return variables
class HashTranslator(IndexMixin, IndexTranslatorInterface):
_type = 'HASH'
@classmethod
def score(cls, index):
if (len(index.expressions) > 1
or index.expressions[0][1].__class__.__name__ != 'Equality'):
return 0
if index.options.get('include'):
return 0
return 100
@classmethod
def _get_indexed_expressions(cls, index):
return [
(e, u) for e, u in index.expressions
if u.__class__.__name__ == 'Equality'][:1]
class BTreeTranslator(IndexMixin, IndexTranslatorInterface):
_type = 'BTREE'
@classmethod
def score(cls, index):
score = 0
for _, usage in index.expressions:
if usage.__class__.__name__ == 'Range':
score += 100
elif usage.__class__.__name__ == 'Equality':
score += 50
elif usage.__class__.__name__ == 'Similarity':
score += 20
if usage.options.get('begin'):
score += 100
return score
@classmethod
def _get_indexed_expressions(cls, index):
return [
(e, u) for e, u in index.expressions
if u.__class__.__name__ in {'Equality', 'Range', 'Similarity'}]
@classmethod
def _get_expression_variables(cls, expression, usage):
params = super()._get_expression_variables(expression, usage)
if (usage.__class__.__name__ == 'Similarity'
and not usage.options.get('collation')):
# text_pattern_ops and varchar_pattern_ops are the same
params['opclass'] = SQL('varchar_pattern_ops')
return params
class TrigramTranslator(IndexMixin, IndexTranslatorInterface):
_type = 'GIN'
@classmethod
def score(cls, index):
database = Transaction().database
has_btree_gin = database.has_extension('btree_gin')
has_trigram = database.has_extension('pg_trgm')
if not has_btree_gin and not has_trigram:
return 0
score = 0
for _, usage in index.expressions:
if usage.__class__.__name__ == 'Similarity':
if has_trigram:
score += 100
else:
score += 50
elif has_btree_gin:
if usage.__class__.__name__ == 'Range':
score += 90
elif usage.__class__.__name__ == 'Equality':
score += 40
else:
return 0
return score
@classmethod
def _get_indexed_expressions(cls, index):
database = Transaction().database
has_btree_gin = database.has_extension('btree_gin')
has_trigram = database.has_extension('pg_trgm')
def filter(usage):
if usage.__class__.__name__ == 'Similarity':
return has_trigram
elif usage.__class__.__name__ in {'Range', 'Equality'}:
return has_btree_gin
else:
return False
return [(e, u) for e, u in index.expressions if filter(u)]
@classmethod
def _get_expression_variables(cls, expression, usage):
params = super()._get_expression_variables(expression, usage)
if usage.__class__.__name__ == 'Similarity':
params['opclass'] = SQL('gin_trgm_ops')
return params