Coverage for src/pydal2sql_core/core.py: 100%
84 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-11-20 11:58 +0100
« prev ^ index » next coverage.py v7.2.7, created at 2023-11-20 11:58 +0100
1"""
2Main functionality.
3"""
4import pickle # nosec: B403
5import typing
6from pathlib import Path
7from typing import Any
9from pydal.adapters import MySQL, Postgre, SQLite
10from pydal.dialects import Dialect, MySQLDialect, PostgreDialect, SQLiteDialect
11from pydal.migrator import Migrator
12from pydal.objects import Table
14from .helpers import TempdirOrExistingDir, get_typing_args
15from .types import (
16 SUPPORTED_DATABASE_TYPES,
17 SUPPORTED_DATABASE_TYPES_WITH_ALIASES,
18 CustomAdapter,
19 DummyDAL,
20 SQLAdapter,
21)
24def _build_dummy_migrator(_driver_name: SUPPORTED_DATABASE_TYPES_WITH_ALIASES, /, db_folder: str) -> Migrator:
25 """
26 Create a Migrator specific to the sql dialect of _driver_name.
27 """
28 db = DummyDAL(None, migrate=False, folder=db_folder)
30 aliases = {
31 "postgresql": "psycopg2",
32 "postgres": "psycopg2",
33 "psql": "psycopg2",
34 "sqlite": "sqlite3",
35 "mysql": "pymysql",
36 }
38 driver_name = _driver_name.lower()
39 driver_name = aliases.get(driver_name, driver_name)
41 if driver_name not in get_typing_args(SUPPORTED_DATABASE_TYPES):
42 raise ValueError(
43 f"Unsupported database type {driver_name}. "
44 f"Choose one of {get_typing_args(SUPPORTED_DATABASE_TYPES_WITH_ALIASES)}"
45 )
47 adapters_per_database: dict[str, typing.Type[SQLAdapter]] = {
48 "psycopg2": Postgre,
49 "sqlite3": SQLite,
50 "pymysql": MySQL,
51 }
53 dialects_per_database: dict[str, typing.Type[Dialect]] = {
54 "psycopg2": PostgreDialect,
55 "sqlite3": SQLiteDialect,
56 "pymysql": MySQLDialect,
57 }
59 adapter_cls = adapters_per_database[driver_name]
61 installed_driver = db._drivers_available.get(driver_name)
63 if not installed_driver: # pragma: no cover
64 raise ValueError(f"Please install the correct driver for database type {driver_name}")
66 sql_dialect = dialects_per_database[driver_name]
68 class DummyAdapter(CustomAdapter):
69 types = adapter_cls.types
70 driver = installed_driver
71 dbengine = adapter_cls.dbengine
73 commit_on_alter_table = True
75 adapter = DummyAdapter(db, "", adapter_args={"driver": installed_driver})
77 adapter.dialect = sql_dialect(adapter)
78 db._adapter = adapter
80 return Migrator(adapter)
83def generate_create_statement(
84 define_table: Table, db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None, *, db_folder: str = None
85) -> str:
86 """
87 Given a Table object (result of `db.define_table('mytable')` or simply db.mytable) \
88 and a db type (e.g. postgres, sqlite, mysql), generate the `CREATE TABLE` SQL for that dialect.
90 If no db_type is supplied, the type is guessed from the specified table.
91 However, your db_type can differ from the current database used.
92 You can even use a dummy database to generate SQL code with:
93 `db = pydal.DAL(None, migrate=False)`
95 db_folder is the database folder where migration (`.table`) files are stored.
96 By default, a random temporary dir is created.
97 """
98 if not db_type:
99 db_type = getattr(define_table._db, "_dbname", None)
101 if db_type is None:
102 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
104 with TempdirOrExistingDir(db_folder) as db_folder:
105 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
107 sql: str = migrator.create_table(
108 define_table,
109 migrate=False,
110 fake_migrate=True,
111 )
113 return sql
116def sql_fields_through_tablefile(
117 define_table: Table,
118 db_folder: typing.Optional[str | Path] = None,
119 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
120) -> dict[str, Any]:
121 """
122 Generate SQL fields for the given `Table` object by simulating migration via a table file.
124 Args:
125 define_table (Table): The `Table` object representing the table for which SQL fields are generated.
126 db_folder (str or Path, optional): The path to the database folder or directory to use. If not specified,
127 a temporary directory is used for the operation. Defaults to None.
128 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
129 "mysql", etc.). If not provided, the database type will be guessed based on the `define_table` object.
130 If the guess fails, a ValueError is raised. Defaults to None.
132 Returns:
133 dict[str, Any]: A dictionary containing the generated SQL fields for the `Table` object. The keys
134 of the dictionary are field names, and the values are additional field information.
136 Raises:
137 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
138 """
139 if not db_type:
140 db_type = getattr(define_table._db, "_dbname", None)
142 if db_type is None:
143 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
145 with TempdirOrExistingDir(db_folder) as db_folder:
146 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
148 migrator.create_table(
149 define_table,
150 migrate=True,
151 fake_migrate=True,
152 )
154 with (Path(db_folder) / define_table._dbt).open("rb") as tfile:
155 loaded_tables = pickle.load(tfile) # nosec B301
157 return typing.cast(dict[str, Any], loaded_tables)
160def generate_alter_statement(
161 define_table_old: Table,
162 define_table_new: Table,
163 /,
164 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
165 *,
166 db_folder: str = None,
167) -> str:
168 """
169 Generate SQL ALTER statements to update the `define_table_old` to `define_table_new`.
171 Args:
172 define_table_old (Table): The `Table` object representing the old version of the table.
173 define_table_new (Table): The `Table` object representing the new version of the table.
174 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
175 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
176 `define_table_old` and `define_table_new` objects.
177 If the guess fails, a ValueError is raised. Defaults to None.
178 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
179 a temporary directory is used for the operation. Defaults to None.
181 Returns:
182 str: A string containing SQL ALTER statements that update the `define_table_old` to `define_table_new`.
184 Raises:
185 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table_old` and
186 `define_table_new` objects.
187 """
188 if not db_type:
189 db_type = getattr(define_table_old._db, "_dbname", None) or getattr(define_table_new._db, "_dbname", None)
191 if db_type is None:
192 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
194 result = ""
196 # other db_folder than new!
197 old_fields = sql_fields_through_tablefile(define_table_old, db_type=db_type, db_folder=None)
199 with TempdirOrExistingDir(db_folder) as db_folder:
200 db_folder_path = Path(db_folder)
201 new_fields = sql_fields_through_tablefile(define_table_new, db_type=db_type, db_folder=db_folder)
203 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
205 sql_log = db_folder_path / "sql.log"
206 sql_log.unlink(missing_ok=True) # remove old crap
208 original_db_old = define_table_old._db
209 original_db_new = define_table_new._db
210 try:
211 define_table_old._db = migrator.db
212 define_table_new._db = migrator.db
214 migrator.migrate_table(
215 define_table_new,
216 new_fields,
217 old_fields,
218 new_fields,
219 str(db_folder_path / "<deprecated>"),
220 fake_migrate=True,
221 )
223 if not sql_log.exists():
224 # no changes!
225 return ""
227 with sql_log.open() as f:
228 for line in f:
229 if not line.startswith(("ALTER", "UPDATE")):
230 continue
232 result += line
233 finally:
234 define_table_new._db = original_db_new
235 define_table_old._db = original_db_old
237 return result
240def generate_sql(
241 define_table: Table,
242 define_table_new: typing.Optional[Table] = None,
243 /,
244 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
245 *,
246 db_folder: str = None,
247) -> str:
248 """
249 Generate SQL statements based on the provided `Table` object or a comparison of two `Table` objects.
251 If `define_table_new` is provided, the function generates ALTER statements to update `define_table` to
252 `define_table_new`. If `define_table_new` is not provided, the function generates CREATE statements for
253 `define_table`.
255 Args:
256 define_table (Table): The `Table` object representing the table to generate SQL for.
257 define_table_new (Table, optional): The `Table` object representing the new version of the table
258 (used to generate ALTER statements). Defaults to None.
259 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
260 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
261 `define_table` object. If the guess fails, a ValueError is raised. Defaults to None.
262 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
263 a temporary directory is used for the operation. Defaults to None.
265 Returns:
266 str: A string containing the generated SQL statements.
268 Raises:
269 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
270 """
271 if define_table_new:
272 return generate_alter_statement(define_table, define_table_new, db_type=db_type, db_folder=db_folder)
273 else:
274 return generate_create_statement(define_table, db_type=db_type, db_folder=db_folder)