Initial import from Docker volume

This commit is contained in:
root
2025-12-26 13:11:43 +00:00
commit 4998dc066a
13336 changed files with 1767801 additions and 0 deletions

12
backend/sqlite/__init__.py Executable file
View File

@@ -0,0 +1,12 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from .database import (
Database, DatabaseDataError, DatabaseIntegrityError,
DatabaseOperationalError, DatabaseTimeoutError)
from .table import TableHandler
__all__ = [
Database, TableHandler,
DatabaseIntegrityError, DatabaseDataError, DatabaseOperationalError,
DatabaseTimeoutError]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

648
backend/sqlite/database.py Executable file
View File

@@ -0,0 +1,648 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import datetime
import logging
import math
import os
import random
import sqlite3 as sqlite
import threading
import time
import urllib.parse
import warnings
from decimal import Decimal
from sqlite3 import DatabaseError
from sqlite3 import IntegrityError as DatabaseIntegrityError
from sqlite3 import OperationalError as DatabaseOperationalError
from weakref import WeakKeyDictionary
from sql import Expression, Flavor, Literal, Null, Query, Table
from sql.conditionals import NullIf
from sql.functions import (
CharLength, CurrentTimestamp, Extract, Function, Overlay, Position,
Substring, Trim)
from trytond.backend.database import DatabaseInterface, SQLType
from trytond.config import config, parse_uri
from trytond.tools import safe_join
from trytond.transaction import Transaction
__all__ = [
'Database',
'DatabaseIntegrityError', 'DatabaseDataError', 'DatabaseOperationalError',
'DatabaseTimeoutError']
logger = logging.getLogger(__name__)
_default_name = config.get('database', 'default_name', default=':memory:')
class DatabaseDataError(DatabaseError):
pass
class DatabaseTimeoutError(Exception):
pass
class SQLiteExtract(Function):
__slots__ = ()
_function = 'EXTRACT'
@staticmethod
def extract(lookup_type, date):
if date is None:
return None
if len(date) == 10:
year, month, day = map(int, date.split('-'))
date = datetime.date(year, month, day)
else:
datepart, timepart = date.split(" ")
year, month, day = map(int, datepart.split("-"))
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
if len(timepart_full) == 2:
microseconds = int(timepart_full[1])
else:
microseconds = 0
date = datetime.datetime(year, month, day, hours, minutes, seconds,
microseconds)
if lookup_type.lower() == 'century':
return date.year / 100 + (date.year % 100 and 1 or 0)
elif lookup_type.lower() == 'decade':
return date.year / 10
elif lookup_type.lower() == 'dow':
return (date.weekday() + 1) % 7
elif lookup_type.lower() == 'doy':
return date.timetuple().tm_yday
elif lookup_type.lower() == 'epoch':
return int(time.mktime(date.timetuple()))
elif lookup_type.lower() == 'microseconds':
return date.microsecond
elif lookup_type.lower() == 'millennium':
return date.year / 1000 + (date.year % 1000 and 1 or 0)
elif lookup_type.lower() == 'milliseconds':
return date.microsecond / 1000
elif lookup_type.lower() == 'quarter':
return date.month / 4 + 1
elif lookup_type.lower() == 'week':
return date.isocalendar()[1]
return getattr(date, lookup_type.lower())
def date_trunc(_type, date):
if not _type:
return date
if date is None:
return None
for format_ in [
'%Y-%m-%d %H:%M:%S.%f',
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d',
'%H:%M:%S',
]:
try:
value = datetime.datetime.strptime(date, format_)
except ValueError:
continue
else:
break
else:
return None
for attribute, replace in [
('microsecond', 0),
('second', 0),
('minute', 0),
('hour', 0),
('day', 1),
('month', 1)]:
if _type.lower().startswith(attribute):
break
value = value.replace(**{attribute: replace})
return str(value)
def split_part(text, delimiter, count):
if text is None:
return None
return (text.split(delimiter) + [''] * (count - 1))[count - 1]
class SQLitePosition(Function):
__slots__ = ()
_function = 'POSITION'
@staticmethod
def position(substring, string):
if string is None:
return
try:
return string.index(substring) + 1
except ValueError:
return 0
def replace(text, pattern, replacement):
return str(text).replace(pattern, replacement)
def now():
transaction = Transaction()
return _nows.setdefault(transaction, {}).setdefault(
transaction.started_at, datetime.datetime.now().isoformat(' '))
_nows = WeakKeyDictionary()
def to_char(value, format):
try:
value = datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S.%f')
except ValueError:
try:
value = datetime.datetime.strptime(value, '%Y-%m-%d').date()
except ValueError:
pass
if isinstance(value, datetime.date):
# Convert SQL pattern into compatible Python
return value.strftime(format
.replace('%', '%%')
.replace('HH12', '%I')
.replace('HH24', '%H')
.replace('HH', '%I')
.replace('MI', '%M')
.replace('SS', '%S')
.replace('US', '%f')
.replace('AM', '%p')
.replace('A.M.', '%p')
.replace('PM', '%p')
.replace('P.M.', '%p')
.replace('am', '%p')
.replace('a.m.', '%p')
.replace('pm', '%p')
.replace('p.m.', '%p')
.replace('YYYY', '%Y')
.replace('YY', '%y')
.replace('Month', '%B')
.replace('Mon', '%b')
.replace('MM', '%m')
.replace('Day', '%A')
.replace('Dy', '%a')
.replace('DDD', '%j')
.replace('DD', '%d')
.replace('D', '%w')
.replace('TZ', '%Z')
)
elif isinstance(value, datetime.timedelta):
raise NotImplementedError
else:
raise NotImplementedError
class SQLiteSubstring(Function):
__slots__ = ()
_function = 'SUBSTR'
class SQLiteOverlay(Function):
__slots__ = ()
_function = 'OVERLAY'
@staticmethod
def overlay(string, placing_string, from_, for_=None):
if for_ is None:
for_ = len(placing_string)
return string[:from_ - 1] + placing_string + string[from_ - 1 + for_:]
class SQLiteCharLength(Function):
__slots__ = ()
_function = 'LENGTH'
class SQLiteCurrentTimestamp(Function):
__slots__ = ()
_function = 'NOW' # More precise
class SQLiteTrim(Trim):
def __str__(self):
flavor = Flavor.get()
param = flavor.param
function = {
'BOTH': 'TRIM',
'LEADING': 'LTRIM',
'TRAILING': 'RTRIM',
}[self.position]
def format(arg):
if isinstance(arg, str):
return param
else:
return str(arg)
return function + '(%s, %s)' % (
format(self.string), format(self.characters))
@property
def params(self):
if isinstance(self.string, str):
params = [self.string]
else:
params = list(self.string.params)
params.append(self.characters)
return params
def sign(value):
if value > 0:
return 1
elif value < 0:
return -1
else:
return value
def greatest(*args):
args = [a for a in args if a is not None]
if args:
return max(args)
else:
return None
def least(*args):
args = [a for a in args if a is not None]
if args:
return min(args)
else:
return None
def bool_and(*args):
return all(args)
def bool_or(*args):
return any(args)
def cbrt(value):
return math.pow(value, 1 / 3)
def div(a, b):
return a // b
def trunc(value, digits):
return math.trunc(value * 10 ** digits) / 10 ** digits
MAPPING = {
Extract: SQLiteExtract,
Position: SQLitePosition,
Substring: SQLiteSubstring,
Overlay: SQLiteOverlay,
CharLength: SQLiteCharLength,
CurrentTimestamp: SQLiteCurrentTimestamp,
Trim: SQLiteTrim,
}
class JSONExtract(Function):
__slots__ = ()
_function = 'JSON_EXTRACT'
class JSONQuote(Function):
__slots__ = ()
_function = 'JSON_QUOTE'
class SQLiteCursor(sqlite.Cursor):
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class SQLiteConnection(sqlite.Connection):
def cursor(self):
return super(SQLiteConnection, self).cursor(SQLiteCursor)
class Database(DatabaseInterface):
_local = threading.local()
_conn = None
flavor = Flavor(
paramstyle='qmark', function_mapping=MAPPING, null_ordering=False,
max_limit=-1)
IN_MAX = 200
TYPES_MAPPING = {
'BIGINT': SQLType('INTEGER', 'INTEGER'),
'BOOL': SQLType('BOOLEAN', 'BOOLEAN'),
'DATETIME': SQLType('TIMESTAMP', 'TIMESTAMP'),
'FULLTEXT': SQLType('TEXT', 'TEXT'),
'JSON': SQLType('TEXT', 'TEXT'),
}
def __new__(cls, name=_default_name):
if (name == ':memory:'
and getattr(cls._local, 'memory_database', None)):
return cls._local.memory_database
return DatabaseInterface.__new__(cls, name=name)
def __init__(self, name=_default_name):
super(Database, self).__init__(name=name)
if name == ':memory:':
Database._local.memory_database = self
def connect(self):
if self._conn is not None:
return self
self._conn = sqlite.connect(
self._make_uri(), uri=True,
detect_types=sqlite.PARSE_DECLTYPES | sqlite.PARSE_COLNAMES,
factory=SQLiteConnection)
self._conn.create_function('extract', 2, SQLiteExtract.extract)
self._conn.create_function('date_trunc', 2, date_trunc)
self._conn.create_function('split_part', 3, split_part)
self._conn.create_function('to_char', 2, to_char)
if sqlite.sqlite_version_info < (3, 3, 14):
self._conn.create_function('replace', 3, replace)
self._conn.create_function('now', 0, now)
self._conn.create_function('greatest', -1, greatest)
self._conn.create_function('least', -1, least)
self._conn.create_function('bool_and', -1, bool_and)
self._conn.create_function('bool_or', -1, bool_or)
# Mathematical functions
self._conn.create_function('cbrt', 1, cbrt)
self._conn.create_function('ceil', 1, math.ceil)
self._conn.create_function('degrees', 1, math.degrees)
self._conn.create_function('div', 2, div)
self._conn.create_function('exp', 1, math.exp)
self._conn.create_function('floor', 1, math.floor)
self._conn.create_function('ln', 1, math.log)
self._conn.create_function('log', 1, math.log10)
self._conn.create_function('mod', 2, math.fmod)
self._conn.create_function('pi', 0, lambda: math.pi)
self._conn.create_function('power', 2, math.pow)
self._conn.create_function('radians', 1, math.radians)
self._conn.create_function('sign', 1, sign)
self._conn.create_function('sqrt', 1, math.sqrt)
self._conn.create_function('trunc', 1, math.trunc)
self._conn.create_function('trunc', 2, trunc)
# Trigonomentric functions
self._conn.create_function('acos', 1, math.acos)
self._conn.create_function('asin', 1, math.asin)
self._conn.create_function('atan', 1, math.atan)
self._conn.create_function('atan2', 2, math.atan2)
self._conn.create_function('cos', 1, math.cos)
self._conn.create_function(
'cot', 1, lambda x: 1 / math.tan(x) if x else math.inf)
self._conn.create_function('sin', 1, math.sin)
self._conn.create_function('tan', 1, math.tan)
# Random functions
self._conn.create_function('random', 0, random.random)
self._conn.create_function('setseed', 1, random.seed)
# String functions
self._conn.create_function('overlay', 3, SQLiteOverlay.overlay)
self._conn.create_function('overlay', 4, SQLiteOverlay.overlay)
self._conn.create_function('position', 2, SQLitePosition.position)
if (hasattr(self._conn, 'set_trace_callback')
and logger.isEnabledFor(logging.DEBUG)):
self._conn.set_trace_callback(logger.debug)
self._conn.execute('PRAGMA foreign_keys = ON')
return self
def _make_uri(self):
uri = config.get('database', 'uri')
base_uri = parse_uri(uri)
if base_uri.path and base_uri.path != '/':
warnings.warn("The path specified in the URI will be overridden")
if self.name == ':memory:':
query_string = urllib.parse.parse_qs(base_uri.query)
query_string['mode'] = 'memory'
query = urllib.parse.urlencode(query_string, doseq=True)
db_uri = base_uri._replace(netloc='', path='/', query=query)
else:
db_path = safe_join(
config.get('database', 'path'), self.name + '.sqlite')
if not os.path.isfile(db_path):
raise IOError("Database '%s' doesn't exist!" % db_path)
db_uri = base_uri._replace(path=db_path)
# Use unparse before replacing sqlite with file because SQLite accepts
# a relative path URI like file:db/test.sqlite which doesn't conform to
# RFC8089 which urllib follows and enforces when the scheme is 'file'
db_uri = urllib.parse.urlunparse(db_uri)
return db_uri.replace('sqlite', 'file', 1)
def get_connection(
self, autocommit=False, readonly=False, statement_timeout=None):
if self._conn is None:
self.connect()
if autocommit:
self._conn.isolation_level = None
else:
self._conn.isolation_level = 'IMMEDIATE'
return self._conn
def put_connection(self, connection=None, close=False):
pass
def close(self):
if self.name == ':memory:':
return
if self._conn is None:
return
self._conn = None
@classmethod
def create(cls, connection, database_name):
if database_name == ':memory:':
path = ':memory:'
else:
if os.sep in database_name:
return
path = os.path.join(config.get('database', 'path'),
database_name + '.sqlite')
with sqlite.connect(path) as conn:
cursor = conn.cursor()
cursor.close()
@classmethod
def drop(cls, connection, database_name):
if database_name == ':memory:':
cls._local.memory_database._conn = None
return
if os.sep in database_name:
return
os.remove(os.path.join(config.get('database', 'path'),
database_name + '.sqlite'))
def list(self, hostname=None):
res = []
listdir = [':memory:']
try:
listdir += os.listdir(config.get('database', 'path'))
except OSError:
pass
for db_file in listdir:
if db_file.endswith('.sqlite') or db_file == ':memory:':
if db_file == ':memory:':
db_name = ':memory:'
else:
db_name = db_file[:-7]
try:
database = Database(db_name).connect()
except Exception:
logger.debug(
'Test failed for "%s"', db_name, exc_info=True)
continue
if database.test(hostname=hostname):
res.append(db_name)
database.close()
return res
def init(self):
from trytond.modules import get_module_info
Flavor.set(self.flavor)
with self.get_connection() as conn:
cursor = conn.cursor()
sql_file = os.path.join(os.path.dirname(__file__), 'init.sql')
with open(sql_file) as fp:
for line in fp.read().split(';'):
if (len(line) > 0) and (not line.isspace()):
cursor.execute(line)
ir_module = Table('ir_module')
ir_module_dependency = Table('ir_module_dependency')
for module in ['ir', 'res']:
info = get_module_info(module)
insert = ir_module.insert(
[ir_module.create_uid, ir_module.create_date,
ir_module.name, ir_module.state],
[[0, CurrentTimestamp(), module, 'to activate']])
cursor.execute(*insert)
cursor.execute('SELECT last_insert_rowid()')
module_id, = cursor.fetchone()
for dependency in info.get('depends', []):
insert = ir_module_dependency.insert(
[ir_module_dependency.create_uid,
ir_module_dependency.create_date,
ir_module_dependency.module,
ir_module_dependency.name,
],
[[0, CurrentTimestamp(), module_id, dependency]])
cursor.execute(*insert)
conn.commit()
def test(self, hostname=None):
Flavor.set(self.flavor)
tables = ['ir_model', 'ir_model_field', 'ir_ui_view', 'ir_ui_menu',
'res_user', 'res_group', 'ir_module', 'ir_module_dependency',
'ir_translation', 'ir_lang', 'ir_configuration']
sqlite_master = Table('sqlite_master')
select = sqlite_master.select(sqlite_master.name)
select.where = sqlite_master.type == 'table'
select.where &= sqlite_master.name.in_(tables)
with self._conn as conn:
cursor = conn.cursor()
try:
cursor.execute(*select)
except Exception:
return False
if len(cursor.fetchall()) != len(tables):
return False
if hostname:
configuration = Table('ir_configuration')
try:
cursor.execute(*configuration.select(
configuration.hostname))
except Exception:
return False
hostnames = {h for h, in cursor if h}
if hostnames and hostname not in hostnames:
return False
return True
def lastid(self, cursor):
# This call is not thread safe
return cursor.lastrowid
def lock(self, connection, table):
pass
def lock_id(self, id, timeout=None):
return Literal(True)
def has_constraint(self, constraint):
return False
def has_multirow_insert(self):
return True
def has_window_functions(self):
return sqlite.sqlite_version_info >= (3, 25, 0)
def sql_type(self, type_):
if type_ in self.TYPES_MAPPING:
return self.TYPES_MAPPING[type_]
if type_.startswith('VARCHAR'):
return SQLType('VARCHAR', type_)
return SQLType(type_, type_)
def sql_format(self, type_, value):
if type_ in ('INTEGER', 'BIGINT'):
if (value is not None
and not isinstance(value, (Query, Expression))):
value = int(value)
return value
def json_get(self, column, key=None):
if key:
column = JSONExtract(column, '$.%s' % key)
return NullIf(JSONQuote(column), JSONQuote(Null))
sqlite.register_converter('NUMERIC', lambda val: Decimal(val.decode('utf-8')))
sqlite.register_adapter(Decimal, lambda val: str(val).encode('utf-8'))
def adapt_datetime(val):
return val.replace(tzinfo=None).isoformat(" ")
sqlite.register_adapter(datetime.datetime, adapt_datetime)
sqlite.register_adapter(datetime.time, lambda val: val.isoformat())
sqlite.register_converter('TIME',
lambda val: datetime.time(*map(int, val.decode('utf-8').split(':'))))
sqlite.register_adapter(datetime.timedelta, lambda val: val.total_seconds())
def convert_interval(value):
value = float(value)
# It is not allowed to instatiate timedelta with the min/max total seconds
if value >= _interval_max:
return datetime.timedelta.max
elif value <= _interval_min:
return datetime.timedelta.min
return datetime.timedelta(seconds=value)
_interval_max = datetime.timedelta.max.total_seconds()
_interval_min = datetime.timedelta.min.total_seconds()
sqlite.register_converter('INTERVAL', convert_interval)

185
backend/sqlite/init.sql Executable file
View File

@@ -0,0 +1,185 @@
CREATE TABLE ir_configuration (
id INTEGER PRIMARY KEY AUTOINCREMENT,
language VARCHAR,
hostname VARCHAR,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_model (
id INTEGER PRIMARY KEY AUTOINCREMENT,
model VARCHAR,
name VARCHAR,
info TEXT,
module VARCHAR,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_model_field (
id INTEGER PRIMARY KEY AUTOINCREMENT,
model VARCHAR,
name VARCHAR,
relation VARCHAR,
field_description VARCHAR,
ttype VARCHAR,
help TEXT,
module VARCHAR,
"access" BOOLEAN,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_ui_view (
id INTEGER PRIMARY KEY AUTOINCREMENT,
model VARCHAR,
"type" VARCHAR,
data TEXT,
field_childs VARCHAR,
priority INTEGER,
domain VARCHAR,
inherit INTEGER,
module VARCHAR,
name VARCHAR,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_ui_menu (
id INTEGER PRIMARY KEY AUTOINCREMENT,
parent INTEGER,
name VARCHAR,
icon VARCHAR,
active BOOLEAN,
sequence INTEGER,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_translation (
id INTEGER PRIMARY KEY AUTOINCREMENT,
lang VARCHAR,
src TEXT,
name VARCHAR,
res_id INTEGER,
value TEXT,
"type" VARCHAR,
module VARCHAR,
fuzzy BOOLEAN,
overriding_module VARCHAR,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_lang (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR,
code VARCHAR,
translatable BOOLEAN,
parent VARCHAR,
active BOOLEAN,
direction VARCHAR,
am VARCHAR,
pm VARCHAR,
"date" VARCHAR,
grouping VARCHAR,
decimal_point VARCHAR,
thousands_sep VARCHAR,
mon_grouping VARCHAR,
mon_decimal_point VARCHAR,
mon_thousands_sep VARCHAR,
p_sign_posn INTEGER,
n_sign_posn INTEGER,
positive_sign VARCHAR,
negative_sign VARCHAR,
p_cs_precedes BOOLEAN,
n_cs_precedes BOOLEAN,
p_sep_by_space BOOLEAN,
n_sep_by_space BOOLEAN,
pg_text_search VARCHAR,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE res_user (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR,
active BOOLEAN,
login VARCHAR,
password VARCHAR,
email VARCHAR,
language INTEGER,
menu INTEGER,
password_hash VARCHAR,
password_reset VARCHAR,
password_reset_expire TIMESTAMP,
signature TEXT,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
INSERT INTO res_user (id, login, password, name, active) VALUES (0, 'root', NULL, 'Root', 0);
CREATE TABLE res_group (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR
);
CREATE TABLE "res_user-res_group" (
id INTEGER PRIMARY KEY AUTOINCREMENT,
"user" INTEGER,
"group" INTEGER,
active BOOLEAN,
parent INTEGER,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);
CREATE TABLE ir_module (
id INTEGER PRIMARY KEY AUTOINCREMENT,
create_uid INTEGER,
create_date TIMESTAMP,
write_date TIMESTAMP,
write_uid INTEGER,
name VARCHAR,
state VARCHAR
);
CREATE TABLE ir_module_dependency (
id INTEGER PRIMARY KEY AUTOINCREMENT,
create_uid INTEGER,
create_date TIMESTAMP,
write_date TIMESTAMP,
write_uid INTEGER,
name VARCHAR,
module INTEGER
);
CREATE TABLE ir_cache (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR,
"timestamp" TIMESTAMP,
create_date TIMESTAMP,
create_uid INTEGER,
write_date TIMESTAMP,
write_uid INTEGER
);

383
backend/sqlite/table.py Executable file
View File

@@ -0,0 +1,383 @@
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import logging
import re
import warnings
from weakref import WeakKeyDictionary
from trytond.backend.table import (
IndexTranslatorInterface, TableHandlerInterface)
from trytond.transaction import Transaction
from .database import sqlite
__all__ = ['TableHandler']
logger = logging.getLogger(__name__)
VARCHAR_SIZE_RE = re.compile(r'VARCHAR\(([0-9]+)\)')
def _escape_identifier(name):
return '"%s"' % name.replace('"', '""')
class TableHandler(TableHandlerInterface):
__handlers = WeakKeyDictionary()
index_translators = []
def _init(self, model, history=False):
super()._init(model, history=history)
self.__columns = None
self.__indexes = None
self._model = model
cursor = Transaction().connection.cursor()
# Create new table if necessary
if not self.table_exist(self.table_name):
if not self.history:
cursor.execute('CREATE TABLE %s '
'(id INTEGER PRIMARY KEY AUTOINCREMENT)'
% _escape_identifier(self.table_name))
else:
cursor.execute('CREATE TABLE %s '
'(__id INTEGER PRIMARY KEY AUTOINCREMENT, '
'id INTEGER)' % _escape_identifier(self.table_name))
self._update_definitions()
@classmethod
def table_exist(cls, table_name):
cursor = Transaction().connection.cursor()
cursor.execute("SELECT sql FROM sqlite_master "
"WHERE type = 'table' AND name = ?",
(table_name,))
res = cursor.fetchone()
if not res:
return False
return True
@classmethod
def table_rename(cls, old_name, new_name):
cursor = Transaction().connection.cursor()
if (cls.table_exist(old_name)
and not cls.table_exist(new_name)):
cursor.execute('ALTER TABLE %s RENAME TO %s'
% (_escape_identifier(old_name), _escape_identifier(new_name)))
# Rename history table
old_history = old_name + "__history"
new_history = new_name + "__history"
if (cls.table_exist(old_history)
and not cls.table_exist(new_history)):
cursor.execute('ALTER TABLE %s RENAME TO %s'
% (_escape_identifier(old_history),
_escape_identifier(new_history)))
def column_exist(self, column_name):
return column_name in self._columns
def _recreate_table(self, update_columns=None, drop_columns=None):
if update_columns is None:
update_columns = {}
if drop_columns is None:
drop_columns = []
transaction = Transaction()
database = transaction.database
cursor = transaction.connection.cursor()
temp_table = '__temp_%s' % self.table_name
temp_columns = dict(self._columns)
self.table_rename(self.table_name, temp_table)
self._init(self._model, history=self.history)
columns, old_columns = [], []
for name, values in temp_columns.items():
if name in drop_columns:
continue
typname = update_columns.get(name, {}).get(
'typname', values['typname'])
size = update_columns.get(name, {}).get('size', values['size'])
name = update_columns.get(name, {}).get('name', name)
self._add_raw_column(
name, database.sql_type(typname), field_size=size)
columns.append(name)
old_columns.append(name)
cursor.execute(('INSERT INTO %s ('
+ ','.join(_escape_identifier(x) for x in columns)
+ ') SELECT '
+ ','.join(_escape_identifier(x) for x in old_columns)
+ ' FROM %s') % (
_escape_identifier(self.table_name),
_escape_identifier(temp_table)))
cursor.execute('DROP TABLE %s' % _escape_identifier(temp_table))
self._update_definitions()
def column_rename(self, old_name, new_name):
cursor = Transaction().connection.cursor()
if self.column_exist(old_name):
if not self.column_exist(new_name):
if sqlite.sqlite_version_info >= (3, 25, 0):
cursor.execute('ALTER TABLE %s RENAME COLUMN %s TO %s' % (
_escape_identifier(self.table_name),
_escape_identifier(old_name),
_escape_identifier(new_name)))
self._update_definitions(columns=True)
else:
self._recreate_table({old_name: {'name': new_name}})
else:
logger.warning(
'Unable to rename column %s on table %s to %s.',
old_name, self.table_name, new_name)
@property
def _columns(self):
if self.__columns is None:
cursor = Transaction().connection.cursor()
cursor.execute('PRAGMA table_info("' + self.table_name + '")')
self.__columns = {}
for _, column, type_, notnull, hasdef, _ in cursor:
column = re.sub(r'^\"|\"$', '', column)
match = re.match(r'(\w+)(\((.*?)\))?', type_)
if match:
typname = match.group(1).upper()
size = match.group(3) and int(match.group(3)) or 0
else:
typname = type_.upper()
size = None
self.__columns[column] = {
'notnull': notnull,
'hasdef': hasdef,
'size': size,
'typname': typname,
}
return self.__columns
@property
def _indexes(self):
if self.__indexes is None:
cursor = Transaction().connection.cursor()
try:
cursor.execute('PRAGMA index_list("' + self.table_name + '")')
except IndexError: # There is sometimes IndexError
cursor.execute('PRAGMA index_list("' + self.table_name + '")')
self.__indexes = [l[1] for l in cursor]
return self.__indexes
def _update_definitions(self, columns=True):
if columns:
self.__columns = None
def alter_size(self, column_name, column_type):
self._recreate_table({column_name: {'size': column_type}})
def alter_type(self, column_name, column_type):
self._recreate_table({column_name: {'typname': column_type}})
def column_is_type(self, column_name, type_, *, size=-1):
db_type = self._columns[column_name]['typname'].upper()
database = Transaction().database
base_type = database.sql_type(type_).base.upper()
if base_type == 'VARCHAR' and (size is None or size >= 0):
same_size = self._columns[column_name]['size'] == size
else:
same_size = True
return base_type == db_type and same_size
def db_default(self, column_name, value):
warnings.warn('Unable to set default on column with SQLite backend')
def add_column(self, column_name, sql_type, default=None, comment=''):
database = Transaction().database
column_type = database.sql_type(sql_type)
match = VARCHAR_SIZE_RE.match(sql_type)
field_size = int(match.group(1)) if match else None
self._add_raw_column(column_name, column_type, default, field_size,
comment)
def _add_raw_column(self, column_name, column_type, default=None,
field_size=None, string=''):
if self.column_exist(column_name):
base_type = column_type[0].upper()
if base_type != self._columns[column_name]['typname']:
if (self._columns[column_name]['typname'], base_type) in [
('VARCHAR', 'TEXT'),
('TEXT', 'VARCHAR'),
('DATE', 'TIMESTAMP'),
('INTEGER', 'FLOAT'),
('INTEGER', 'NUMERIC'),
('FLOAT', 'NUMERIC'),
]:
self.alter_type(column_name, base_type)
else:
logger.warning(
'Unable to migrate column %s on table %s '
'from %s to %s.',
column_name, self.table_name,
self._columns[column_name]['typname'], base_type)
if (base_type == 'VARCHAR'
and self._columns[column_name]['typname'] == 'VARCHAR'):
# Migrate size
from_size = self._columns[column_name]['size']
if field_size is None:
if from_size > 0:
self.alter_size(column_name, base_type)
elif from_size == field_size:
pass
elif from_size and from_size < field_size:
self.alter_size(column_name, column_type[1])
else:
logger.warning(
'Unable to migrate column %s on table %s '
'from varchar(%s) to varchar(%s).',
column_name, self.table_name,
from_size if from_size and from_size > 0 else "",
field_size)
return
cursor = Transaction().connection.cursor()
column_type = column_type[1]
cursor.execute(('ALTER TABLE %s ADD COLUMN %s %s') % (
_escape_identifier(self.table_name),
_escape_identifier(column_name),
column_type))
if default:
# check if table is non-empty:
cursor.execute('SELECT 1 FROM %s limit 1'
% _escape_identifier(self.table_name))
if cursor.fetchone():
# Populate column with default values:
cursor.execute('UPDATE ' + _escape_identifier(self.table_name)
+ ' SET ' + _escape_identifier(column_name) + ' = ?',
(default(),))
self._update_definitions(columns=True)
def add_fk(self, columns, reference, ref_columns=None, on_delete=None):
warnings.warn('Unable to add foreign key with SQLite backend')
def drop_fk(self, columns=None, ref_columns=None, table=None):
warnings.warn('Unable to drop foreign key with SQLite backend')
def not_null_action(self, column_name, action='add'):
if not self.column_exist(column_name):
return
if action == 'add':
warnings.warn('Unable to set not null with SQLite backend')
elif action == 'remove':
warnings.warn('Unable to remove not null with SQLite backend')
else:
raise Exception('Not null action not supported!')
def add_constraint(self, ident, constraint):
warnings.warn('Unable to add constraint with SQLite backend')
def drop_constraint(self, ident, table=None):
warnings.warn('Unable to drop constraint with SQLite backend')
def set_indexes(self, indexes, concurrently=False):
cursor = Transaction().connection.cursor()
old = set(self._indexes)
for index in indexes:
translator = self.index_translator_for(index)
if translator:
name, query, params = translator.definition(index)
name = '_'.join([self.table_name, name])
name = 'idx_' + self.convert_name(name, reserved=len('idx_'))
# SQLite does not support parameters for index creation
if not params:
cursor.execute(
'CREATE INDEX IF NOT EXISTS %s ON %s %s' % (
_escape_identifier(name),
_escape_identifier(self.table_name),
query),
params)
else:
warnings.warn("Can not create index with parameters")
old.discard(name)
for name in old:
if name.startswith('idx_') or name.endswith('_index'):
cursor.execute('DROP INDEX %s' % _escape_identifier(name))
self.__indexes = None
def drop_column(self, column_name):
if not self.column_exist(column_name):
return
transaction = Transaction()
cursor = transaction.connection.cursor()
if sqlite.sqlite_version_info >= (3, 35, 0):
cursor.execute('ALTER TABLE %s DROP COLUMN %s' % (
_escape_identifier(self.table_name),
_escape_identifier(column_name)))
self._update_definitions(columns=True)
else:
self._recreate_table(drop_columns=[column_name])
@classmethod
def drop_table(cls, model, table, cascade=False):
cursor = Transaction().connection.cursor()
cursor.execute('DELETE from ir_model_data where model = ?',
(model,))
query = 'DROP TABLE %s' % _escape_identifier(table)
if cascade:
query = query + ' CASCADE'
cursor.execute(query)
class IndexMixin:
def __init_subclass__(cls):
TableHandler.index_translators.append(cls)
@classmethod
def definition(cls, index):
expr_template = '%(expression)s %(collate)s %(order)s'
params = []
expressions = []
for expression, usage in index.expressions:
expressions.append(expr_template %
cls._get_expression_variables(expression, usage))
params.extend(expression.params)
where = ''
if index.options.get('where'):
where = 'WHERE %s' % index.options['where']
params.extend(index.options['where'].params)
query = '(%(expressions)s) %(where)s' % {
'expressions': ','.join(expressions),
'where': where,
}
name = cls._get_name(query, params)
return name, query, params
@classmethod
def _get_expression_variables(cls, expression, usage):
variables = {
'expression': str(expression),
'collate': '',
'order': '',
}
if usage.options.get('collation'):
variables['collate'] = 'COLLATE %s' % usage.options['collation']
if usage.options.get('order'):
order = usage.options['order'].upper()
for predicate in ['NULLS FIRST', 'NULLS LAST']:
if order.endswith(predicate):
order = order[:-len(predicate)]
variables['order'] = order
return variables
class IndexTranslator(IndexMixin, IndexTranslatorInterface):
@classmethod
def score(cls, index):
supported_indexes_count = sum(
int(u.__class__.__name__ in {'Equality', 'Range'})
for _, u in index.expressions)
return supported_indexes_count * 100