Coverage for src/pydal2sql_core/core.py: 100%
84 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-04 18:34 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-04 18:34 +0100
1"""
2Main functionality.
3"""
4import pickle # nosec: B403
5import typing
6from pathlib import Path
7from typing import Any
9from pydal.adapters import MySQL, Postgre, SQLite
10from pydal.dialects import Dialect, MySQLDialect, PostgreDialect, SQLiteDialect
11from pydal.migrator import Migrator
12from pydal.objects import Table
14from .helpers import TempdirOrExistingDir, get_typing_args
15from .types import (
16 SUPPORTED_DATABASE_TYPES,
17 SUPPORTED_DATABASE_TYPES_WITH_ALIASES,
18 CustomAdapter,
19 DummyDAL,
20 SQLAdapter,
21)
24def _build_dummy_migrator(_driver_name: SUPPORTED_DATABASE_TYPES_WITH_ALIASES, /, db_folder: str) -> Migrator:
25 """
26 Create a Migrator specific to the sql dialect of _driver_name.
27 """
28 db = DummyDAL(None, migrate=False, folder=db_folder)
30 aliases = {
31 "postgresql": "psycopg2",
32 "postgres": "psycopg2",
33 "psql": "psycopg2",
34 "sqlite": "sqlite3",
35 "sqlite:memory": "sqlite3",
36 "mysql": "pymysql",
37 }
39 driver_name = _driver_name.lower()
40 driver_name = aliases.get(driver_name, driver_name)
42 if driver_name not in get_typing_args(SUPPORTED_DATABASE_TYPES):
43 raise ValueError(
44 f"Unsupported database type {driver_name}. "
45 f"Choose one of {get_typing_args(SUPPORTED_DATABASE_TYPES_WITH_ALIASES)}"
46 )
48 adapters_per_database: dict[str, typing.Type[SQLAdapter]] = {
49 "psycopg2": Postgre,
50 "sqlite3": SQLite,
51 "pymysql": MySQL,
52 }
54 dialects_per_database: dict[str, typing.Type[Dialect]] = {
55 "psycopg2": PostgreDialect,
56 "sqlite3": SQLiteDialect,
57 "pymysql": MySQLDialect,
58 }
60 adapter_cls = adapters_per_database[driver_name]
62 installed_driver = db._drivers_available.get(driver_name)
64 if not installed_driver: # pragma: no cover
65 raise ValueError(f"Please install the correct driver for database type {driver_name}")
67 sql_dialect = dialects_per_database[driver_name]
69 class DummyAdapter(CustomAdapter):
70 types = adapter_cls.types
71 driver = installed_driver
72 dbengine = adapter_cls.dbengine
74 commit_on_alter_table = True
76 adapter = DummyAdapter(db, "", adapter_args={"driver": installed_driver})
78 adapter.dialect = sql_dialect(adapter)
79 db._adapter = adapter
81 return Migrator(adapter)
84def generate_create_statement(
85 define_table: Table, db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None, *, db_folder: str = None
86) -> str:
87 """
88 Given a Table object (result of `db.define_table('mytable')` or simply db.mytable) \
89 and a db type (e.g. postgres, sqlite, mysql), generate the `CREATE TABLE` SQL for that dialect.
91 If no db_type is supplied, the type is guessed from the specified table.
92 However, your db_type can differ from the current database used.
93 You can even use a dummy database to generate SQL code with:
94 `db = pydal.DAL(None, migrate=False)`
96 db_folder is the database folder where migration (`.table`) files are stored.
97 By default, a random temporary dir is created.
98 """
99 if not db_type:
100 db_type = getattr(define_table._db, "_dbname", None)
102 if db_type is None:
103 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
105 with TempdirOrExistingDir(db_folder) as db_folder:
106 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
108 sql: str = migrator.create_table(
109 define_table,
110 migrate=False,
111 fake_migrate=True,
112 )
114 return sql
117def sql_fields_through_tablefile(
118 define_table: Table,
119 db_folder: typing.Optional[str | Path] = None,
120 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
121) -> dict[str, Any]:
122 """
123 Generate SQL fields for the given `Table` object by simulating migration via a table file.
125 Args:
126 define_table (Table): The `Table` object representing the table for which SQL fields are generated.
127 db_folder (str or Path, optional): The path to the database folder or directory to use. If not specified,
128 a temporary directory is used for the operation. Defaults to None.
129 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
130 "mysql", etc.). If not provided, the database type will be guessed based on the `define_table` object.
131 If the guess fails, a ValueError is raised. Defaults to None.
133 Returns:
134 dict[str, Any]: A dictionary containing the generated SQL fields for the `Table` object. The keys
135 of the dictionary are field names, and the values are additional field information.
137 Raises:
138 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
139 """
140 if not db_type:
141 db_type = getattr(define_table._db, "_dbname", None)
143 if db_type is None:
144 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
146 with TempdirOrExistingDir(db_folder) as db_folder:
147 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
149 migrator.create_table(
150 define_table,
151 migrate=True,
152 fake_migrate=True,
153 )
155 with (Path(db_folder) / define_table._dbt).open("rb") as tfile:
156 loaded_tables = pickle.load(tfile) # nosec B301
158 return typing.cast(dict[str, Any], loaded_tables)
161def generate_alter_statement(
162 define_table_old: Table,
163 define_table_new: Table,
164 /,
165 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
166 *,
167 db_folder: str = None,
168) -> str:
169 """
170 Generate SQL ALTER statements to update the `define_table_old` to `define_table_new`.
172 Args:
173 define_table_old (Table): The `Table` object representing the old version of the table.
174 define_table_new (Table): The `Table` object representing the new version of the table.
175 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
176 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
177 `define_table_old` and `define_table_new` objects.
178 If the guess fails, a ValueError is raised. Defaults to None.
179 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
180 a temporary directory is used for the operation. Defaults to None.
182 Returns:
183 str: A string containing SQL ALTER statements that update the `define_table_old` to `define_table_new`.
185 Raises:
186 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table_old` and
187 `define_table_new` objects.
188 """
189 if not db_type:
190 db_type = getattr(define_table_old._db, "_dbname", None) or getattr(define_table_new._db, "_dbname", None)
192 if db_type is None:
193 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
195 result = ""
197 # other db_folder than new!
198 old_fields = sql_fields_through_tablefile(define_table_old, db_type=db_type, db_folder=None)
200 with TempdirOrExistingDir(db_folder) as db_folder:
201 db_folder_path = Path(db_folder)
202 new_fields = sql_fields_through_tablefile(define_table_new, db_type=db_type, db_folder=db_folder)
204 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
206 sql_log = db_folder_path / "sql.log"
207 sql_log.unlink(missing_ok=True) # remove old crap
209 original_db_old = define_table_old._db
210 original_db_new = define_table_new._db
211 try:
212 define_table_old._db = migrator.db
213 define_table_new._db = migrator.db
215 migrator.migrate_table(
216 define_table_new,
217 new_fields,
218 old_fields,
219 new_fields,
220 str(db_folder_path / "<deprecated>"),
221 fake_migrate=True,
222 )
224 if not sql_log.exists():
225 # no changes!
226 return ""
228 with sql_log.open() as f:
229 for line in f:
230 if not line.startswith(("ALTER", "UPDATE")):
231 continue
233 result += line
234 finally:
235 define_table_new._db = original_db_new
236 define_table_old._db = original_db_old
238 return result
241def generate_sql(
242 define_table: Table,
243 define_table_new: typing.Optional[Table] = None,
244 /,
245 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
246 *,
247 db_folder: str = None,
248) -> str:
249 """
250 Generate SQL statements based on the provided `Table` object or a comparison of two `Table` objects.
252 If `define_table_new` is provided, the function generates ALTER statements to update `define_table` to
253 `define_table_new`. If `define_table_new` is not provided, the function generates CREATE statements for
254 `define_table`.
256 Args:
257 define_table (Table): The `Table` object representing the table to generate SQL for.
258 define_table_new (Table, optional): The `Table` object representing the new version of the table
259 (used to generate ALTER statements). Defaults to None.
260 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
261 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
262 `define_table` object. If the guess fails, a ValueError is raised. Defaults to None.
263 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
264 a temporary directory is used for the operation. Defaults to None.
266 Returns:
267 str: A string containing the generated SQL statements.
269 Raises:
270 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
271 """
272 if define_table_new:
273 return generate_alter_statement(define_table, define_table_new, db_type=db_type, db_folder=db_folder)
274 else:
275 return generate_create_statement(define_table, db_type=db_type, db_folder=db_folder)