Coverage for src/pydal2sql_core/core.py: 100%
82 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-31 21:39 +0200
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-31 21:39 +0200
1"""
2Main functionality.
3"""
4import pickle # nosec: B403
5import typing
6from pathlib import Path
8import pydal
9from pydal.adapters import MySQL, Postgre, SQLAdapter, SQLite
10from pydal.migrator import Migrator
11from pydal.objects import Table
13from .helpers import TempdirOrExistingDir, get_typing_args
14from .types import SUPPORTED_DATABASE_TYPES, SUPPORTED_DATABASE_TYPES_WITH_ALIASES
17class DummyDAL(pydal.DAL): # type: ignore
18 """
19 Subclass of DAL that disables committing.
20 """
22 def commit(self) -> None:
23 """
24 Do Nothing.
25 """
28def _build_dummy_migrator(_driver_name: SUPPORTED_DATABASE_TYPES_WITH_ALIASES, /, db_folder: str) -> Migrator:
29 """
30 Create a Migrator specific to the sql dialect of _driver_name.
31 """
32 db = DummyDAL(None, migrate=False, folder=db_folder)
34 aliases = {
35 "postgresql": "psycopg2",
36 "postgres": "psycopg2",
37 "psql": "psycopg2",
38 "sqlite": "sqlite3",
39 "mysql": "pymysql",
40 }
42 driver_name = _driver_name.lower()
43 driver_name = aliases.get(driver_name, driver_name)
45 if driver_name not in get_typing_args(SUPPORTED_DATABASE_TYPES):
46 raise ValueError(
47 f"Unsupported database type {driver_name}. "
48 f"Choose one of {get_typing_args(SUPPORTED_DATABASE_TYPES_WITH_ALIASES)}"
49 )
51 adapters_per_database: dict[str, typing.Type[SQLAdapter]] = {
52 "psycopg2": Postgre,
53 "sqlite3": SQLite,
54 "pymysql": MySQL,
55 }
57 adapter = adapters_per_database[driver_name]
59 installed_driver = db._drivers_available.get(driver_name)
61 if not installed_driver: # pragma: no cover
62 raise ValueError(f"Please install the correct driver for database type {driver_name}")
64 class DummyAdaptor(SQLAdapter): # type: ignore
65 types = adapter.types
66 driver = installed_driver
67 dbengine = adapter.dbengine
69 commit_on_alter_table = True
71 adapter = DummyAdaptor(db, "", adapter_args={"driver": installed_driver})
72 db._adapter = adapter
73 return Migrator(adapter)
76def generate_create_statement(
77 define_table: Table, db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None, *, db_folder: str = None
78) -> str:
79 """
80 Given a Table object (result of `db.define_table('mytable')` or simply db.mytable) \
81 and a db type (e.g. postgres, sqlite, mysql), generate the `CREATE TABLE` SQL for that dialect.
83 If no db_type is supplied, the type is guessed from the specified table.
84 However, your db_type can differ from the current database used.
85 You can even use a dummy database to generate SQL code with:
86 `db = pydal.DAL(None, migrate=False)`
88 db_folder is the database folder where migration (`.table`) files are stored.
89 By default, a random temporary dir is created.
90 """
91 if not db_type:
92 db_type = getattr(define_table._db, "_dbname", None)
94 if db_type is None:
95 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
97 with TempdirOrExistingDir(db_folder) as db_folder:
98 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
100 sql: str = migrator.create_table(
101 define_table,
102 migrate=True,
103 fake_migrate=True,
104 )
105 return sql
108def sql_fields_through_tablefile(
109 define_table: Table,
110 db_folder: typing.Optional[str | Path] = None,
111 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
112) -> dict[str, typing.Any]:
113 """
114 Generate SQL fields for the given `Table` object by simulating migration via a table file.
116 Args:
117 define_table (Table): The `Table` object representing the table for which SQL fields are generated.
118 db_folder (str or Path, optional): The path to the database folder or directory to use. If not specified,
119 a temporary directory is used for the operation. Defaults to None.
120 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
121 "mysql", etc.). If not provided, the database type will be guessed based on the `define_table` object.
122 If the guess fails, a ValueError is raised. Defaults to None.
124 Returns:
125 dict[str, typing.Any]: A dictionary containing the generated SQL fields for the `Table` object. The keys
126 of the dictionary are field names, and the values are additional field information.
128 Raises:
129 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
130 """
131 if not db_type:
132 db_type = getattr(define_table._db, "_dbname", None)
134 if db_type is None:
135 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
137 with TempdirOrExistingDir(db_folder) as db_folder:
138 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
140 migrator.create_table(
141 define_table,
142 migrate=True,
143 fake_migrate=True,
144 )
146 with (Path(db_folder) / define_table._dbt).open("rb") as tfile:
147 loaded_tables = pickle.load(tfile) # nosec B301
149 return typing.cast(dict[str, typing.Any], loaded_tables)
152def generate_alter_statement(
153 define_table_old: Table,
154 define_table_new: Table,
155 /,
156 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
157 *,
158 db_folder: str = None,
159) -> str:
160 """
161 Generate SQL ALTER statements to update the `define_table_old` to `define_table_new`.
163 Args:
164 define_table_old (Table): The `Table` object representing the old version of the table.
165 define_table_new (Table): The `Table` object representing the new version of the table.
166 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
167 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
168 `define_table_old` and `define_table_new` objects.
169 If the guess fails, a ValueError is raised. Defaults to None.
170 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
171 a temporary directory is used for the operation. Defaults to None.
173 Returns:
174 str: A string containing SQL ALTER statements that update the `define_table_old` to `define_table_new`.
176 Raises:
177 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table_old` and
178 `define_table_new` objects.
179 """
180 if not db_type:
181 db_type = getattr(define_table_old._db, "_dbname", None) or getattr(define_table_new._db, "_dbname", None)
183 if db_type is None:
184 raise ValueError("Database dialect could not be guessed from code; Please manually define a database type!")
186 result = ""
188 # other db_folder than new!
189 old_fields = sql_fields_through_tablefile(define_table_old, db_type=db_type, db_folder=None)
191 with TempdirOrExistingDir(db_folder) as db_folder:
192 db_folder_path = Path(db_folder)
193 new_fields = sql_fields_through_tablefile(define_table_new, db_type=db_type, db_folder=db_folder)
195 migrator = _build_dummy_migrator(db_type, db_folder=db_folder)
197 sql_log = db_folder_path / "sql.log"
198 sql_log.unlink(missing_ok=True) # remove old crap
200 original_db_old = define_table_old._db
201 original_db_new = define_table_new._db
202 try:
203 define_table_old._db = migrator.db
204 define_table_new._db = migrator.db
206 migrator.migrate_table(
207 define_table_new,
208 new_fields,
209 old_fields,
210 new_fields,
211 str(db_folder_path / "<deprecated>"),
212 fake_migrate=True,
213 )
215 if not sql_log.exists():
216 # no changes!
217 return ""
219 with sql_log.open() as f:
220 for line in f:
221 if not line.startswith(("ALTER", "UPDATE")):
222 continue
224 result += line
225 finally:
226 define_table_new._db = original_db_new
227 define_table_old._db = original_db_old
229 return result
232def generate_sql(
233 define_table: Table,
234 define_table_new: typing.Optional[Table] = None,
235 /,
236 db_type: SUPPORTED_DATABASE_TYPES_WITH_ALIASES = None,
237 *,
238 db_folder: str = None,
239) -> str:
240 """
241 Generate SQL statements based on the provided `Table` object or a comparison of two `Table` objects.
243 If `define_table_new` is provided, the function generates ALTER statements to update `define_table` to
244 `define_table_new`. If `define_table_new` is not provided, the function generates CREATE statements for
245 `define_table`.
247 Args:
248 define_table (Table): The `Table` object representing the table to generate SQL for.
249 define_table_new (Table, optional): The `Table` object representing the new version of the table
250 (used to generate ALTER statements). Defaults to None.
251 db_type (str or SUPPORTED_DATABASE_TYPES_WITH_ALIASES, optional): The type of the database (e.g., "postgres",
252 "mysql", etc.). If not provided, the database type will be guessed based on the `_db` attribute of the
253 `define_table` object. If the guess fails, a ValueError is raised. Defaults to None.
254 db_folder (str, optional): The path to the database folder or directory to use. If not specified,
255 a temporary directory is used for the operation. Defaults to None.
257 Returns:
258 str: A string containing the generated SQL statements.
260 Raises:
261 ValueError: If the `db_type` is not provided, and it cannot be guessed from the `define_table` object.
262 """
263 if define_table_new:
264 return generate_alter_statement(define_table, define_table_new, db_type=db_type, db_folder=db_folder)
265 else:
266 return generate_create_statement(define_table, db_type=db_type, db_folder=db_folder)