Coverage for src/typedal/cli.py: 93%
92 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-02 16:19 +0200
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-02 16:19 +0200
1"""
2Typer CLI for TypeDAL.
3"""
5import fnmatch
6import sys
7import typing
8import warnings
9from pathlib import Path
10from typing import Optional
12import tomli
13from configuraptor import asdict
14from configuraptor.alias import is_alias
15from configuraptor.helpers import is_optional
17from .types import AnyDict
19try:
20 import edwh_migrate
21 import pydal2sql # noqa: F401
22 import questionary
23 import rich
24 import tomlkit
25 import typer
26 from tabulate import tabulate
27except ImportError as e: # pragma: no cover
28 # ImportWarning is hidden by default
29 warnings.warn(
30 "`migrations` extra not installed. Please run `pip install typedal[migrations]` to fix this.",
31 source=e,
32 category=RuntimeWarning,
33 )
34 exit(127) # command not found
36from pydal2sql.typer_support import IS_DEBUG, with_exit_code
37from pydal2sql.types import (
38 DBType_Option,
39 OptionalArgument,
40 OutputFormat_Option,
41 Tables_Option,
42)
43from pydal2sql_core import core_alter, core_create
44from typing_extensions import Never
46from . import caching
47from .__about__ import __version__
48from .config import TypeDALConfig, _fill_defaults, load_config, transform
49from .core import TypeDAL
51app = typer.Typer(
52 no_args_is_help=True,
53)
55questionary_types: dict[typing.Hashable, Optional[AnyDict]] = {
56 str: {
57 "type": "text",
58 "validate": lambda text: True if len(text) > 0 else "Please enter a value",
59 },
60 Optional[str]: {
61 "type": "text",
62 # no validate because it's optional
63 },
64 bool: {
65 "type": "confirm",
66 },
67 int: {"type": "text", "validate": lambda text: True if text.isdigit() else "Please enter a number"},
68 # specific props:
69 "dialect": {
70 "type": "select",
71 "choices": ["sqlite", "postgres", "mysql"],
72 },
73 "folder": {
74 "type": "path",
75 "message": "Database directory:",
76 "only_directories": True,
77 # "default": "",
78 },
79 "input": {
80 "type": "path",
81 "message": "Python file containing table definitions.",
82 "file_filter": lambda file: "." not in file or file.endswith(".py"),
83 },
84 "output": {
85 "type": "path",
86 "message": "Python file where migrations will be written to.",
87 "file_filter": lambda file: "." not in file or file.endswith(".py"),
88 },
89 # disabled props:
90 "pyproject": None, # internal
91 "noop": None, # only for debugging
92 "connection": None, # internal
93 "migrate": None, # will probably conflict
94 "fake_migrate": None, # only enable via config if required
95}
97T = typing.TypeVar("T")
99notfound = object()
102def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]: # pragma: no cover
103 question = questionary_types.get(prop, notfound)
104 if question is notfound:
105 # None means skip the question, notfound means use the type default!
106 question = questionary_types.get(annotation) # type: ignore
108 if not question:
109 return None
110 # make a copy so the original is not overwritten:
111 return question.copy() # type: ignore
114def get_question(prop: str, annotation: typing.Type[T], default: T | None) -> Optional[T]: # pragma: no cover
115 """
116 Generate a question based on a config property and prompt the user for it.
117 """
118 if not (question := _get_question(prop, annotation)):
119 return default
121 question["name"] = prop
122 question["message"] = question.get("message", f"{prop}? ")
123 default = typing.cast(T, default or question.get("default") or "")
125 if annotation == int:
126 default = typing.cast(T, str(default))
128 response = questionary.unsafe_prompt([question], default=default)[prop]
129 return typing.cast(T, response)
132@app.command()
133@with_exit_code(hide_tb=IS_DEBUG)
134def setup(
135 config_file: typing.Annotated[Optional[str], typer.Option("--config", "-c")] = None,
136 minimal: bool = False,
137) -> None: # pragma: no cover
138 """
139 Setup a [tool.typedal] entry in the local pyproject.toml.
140 """
141 # 1. check if [tool.typedal] in pyproject.toml and ask missing questions (excl .env vars)
142 # 2. else if [tool.migrate] and/or [tool.pydal2sql] exist in the config, ask the user with copied defaults
143 # 3. else: ask the user every question or minimal questions based on cli arg
145 config = load_config(config_file)
147 toml_path = Path(config.pyproject)
149 if not (config.pyproject and toml_path.exists()):
150 # no pyproject.toml found!
151 toml_path = toml_path if config.pyproject else Path("pyproject.toml")
152 rich.print(f"[blue]Config toml doesn't exist yet, creating {toml_path}[/blue]", file=sys.stderr)
153 toml_path.touch()
155 toml_contents = toml_path.read_text()
156 # tomli has native Python types, tomlkit doesn't but preserves comments
157 toml_obj: AnyDict = tomli.loads(toml_contents)
159 if "[tool.pydal2sql]" in toml_contents:
160 mapping = {"": ""} # <- placeholder
162 extra_config = toml_obj["tool"]["pydal2sql"]
163 extra_config = {mapping.get(k, k): v for k, v in extra_config.items()}
164 extra_config.pop("format", None) # always edwh-migrate
165 config.update(**extra_config)
167 if "[tool.migrate]" in toml_contents:
168 mapping = {"migrate_uri": "database"}
170 extra_config = toml_obj["tool"]["migrate"]
171 extra_config = {mapping.get(k, k): v for k, v in extra_config.items()}
173 config.update(**extra_config)
175 if "[tool.typedal]" in toml_contents:
176 section = toml_obj["tool"]["typedal"]
177 config.update(**section, _overwrite=True)
179 data = asdict(config, with_top_level_key=False)
180 data["migrate"] = None # determined based on existence of input/output file.
182 for prop, annotation in TypeDALConfig.__annotations__.items():
183 if is_alias(config.__class__, prop):
184 # don't store aliases!
185 data.pop(prop, None)
186 continue
188 if minimal and getattr(config, prop, None) not in (None, "") or is_optional(annotation):
189 # property already present or not required, SKIP!
190 data[prop] = getattr(config, prop, None)
191 continue
193 _fill_defaults(data, prop, data.get(prop))
194 default_value = data.get(prop, None)
195 answer: typing.Any = get_question(prop, annotation, default_value)
197 if isinstance(answer, str):
198 answer = answer.strip()
200 if annotation == bool:
201 answer = bool(answer)
202 elif annotation == int:
203 answer = int(answer)
205 config.update(**{prop: answer})
206 data[prop] = answer
208 for prop in TypeDALConfig.__annotations__:
209 transform(data, prop)
211 with toml_path.open("r") as f:
212 old_contents: AnyDict = tomlkit.load(f)
214 if "tool" not in old_contents:
215 old_contents["tool"] = {}
217 data.pop("pyproject", None)
218 data.pop("connection", None)
220 # ignore any None:
221 old_contents["tool"]["typedal"] = {k: v for k, v in data.items() if v is not None}
223 with toml_path.open("w") as f:
224 tomlkit.dump(old_contents, f)
226 rich.print(f"[green]Wrote updated config to {toml_path}![/green]")
229@app.command(name="migrations.generate")
230@with_exit_code(hide_tb=IS_DEBUG)
231def generate_migrations(
232 connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
233 filename_before: OptionalArgument[str] = None,
234 filename_after: OptionalArgument[str] = None,
235 dialect: DBType_Option = None,
236 tables: Tables_Option = None,
237 magic: Optional[bool] = None,
238 noop: Optional[bool] = None,
239 function: Optional[str] = None,
240 output_format: OutputFormat_Option = None,
241 output_file: Optional[str] = None,
242 dry_run: bool = False,
243) -> bool: # pragma: no cover
244 """
245 Run pydal2sql based on the typedal config.
246 """
247 # 1. choose CREATE or ALTER based on whether 'output' exists?
248 # 2. pass right args based on 'config' to function chosen in 1.
249 generic_config = load_config(connection)
250 pydal2sql_config = generic_config.to_pydal2sql()
251 pydal2sql_config.update(
252 magic=magic,
253 noop=noop,
254 tables=tables,
255 db_type=dialect.value if dialect else None,
256 function=function,
257 format=output_format,
258 input=filename_before,
259 output=output_file,
260 _skip_none=True,
261 )
263 if pydal2sql_config.output and Path(pydal2sql_config.output).exists():
264 if dry_run:
265 print("Would run `pyda2sql alter` with config", asdict(pydal2sql_config), file=sys.stderr)
266 sys.stderr.flush()
268 return True
269 else: # pragma: no cover
270 return core_alter(
271 pydal2sql_config.input,
272 filename_after or pydal2sql_config.input,
273 db_type=pydal2sql_config.db_type,
274 tables=pydal2sql_config.tables,
275 noop=pydal2sql_config.noop,
276 magic=pydal2sql_config.magic,
277 function=pydal2sql_config.function,
278 output_format=pydal2sql_config.format,
279 output_file=pydal2sql_config.output,
280 )
281 else:
282 if dry_run:
283 print("Would run `pyda2sql create` with config", asdict(pydal2sql_config), file=sys.stderr)
284 sys.stderr.flush()
286 return True
287 else: # pragma: no cover
288 return core_create(
289 filename=pydal2sql_config.input,
290 db_type=pydal2sql_config.db_type,
291 tables=pydal2sql_config.tables,
292 noop=pydal2sql_config.noop,
293 magic=pydal2sql_config.magic,
294 function=pydal2sql_config.function,
295 output_format=pydal2sql_config.format,
296 output_file=pydal2sql_config.output,
297 )
300@app.command(name="migrations.run")
301@with_exit_code(hide_tb=IS_DEBUG)
302def run_migrations(
303 connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
304 migrations_file: OptionalArgument[str] = None,
305 db_uri: Optional[str] = None,
306 db_folder: Optional[str] = None,
307 schema_version: Optional[str] = None,
308 redis_host: Optional[str] = None,
309 migrate_cat_command: Optional[str] = None,
310 database_to_restore: Optional[str] = None,
311 migrate_table: Optional[str] = None,
312 flag_location: Optional[str] = None,
313 schema: Optional[str] = None,
314 create_flag_location: Optional[bool] = None,
315 dry_run: bool = False,
316) -> bool: # pragma: no cover
317 """
318 Run edwh-migrate based on the typedal config.
319 """
320 # 1. build migrate Config from TypeDAL config
321 # 2. import right file
322 # 3. `activate_migrations`
323 generic_config = load_config(connection)
324 migrate_config = generic_config.to_migrate()
326 migrate_config.update(
327 migrate_uri=db_uri,
328 schema_version=schema_version,
329 redis_host=redis_host,
330 migrate_cat_command=migrate_cat_command,
331 database_to_restore=database_to_restore,
332 migrate_table=migrate_table,
333 flag_location=flag_location,
334 schema=schema,
335 create_flag_location=create_flag_location,
336 db_folder=db_folder,
337 migrations_file=migrations_file,
338 _skip_none=True,
339 )
341 if dry_run:
342 print("Would run `migrate` with config", asdict(migrate_config), file=sys.stderr)
343 else: # pragma: no cover
344 edwh_migrate.console_hook([], config=migrate_config)
345 return True
348def match_strings(patterns: list[str] | str, string_list: list[str]) -> list[str]:
349 """
350 Glob but on a list of strings.
351 """
352 if isinstance(patterns, str):
353 patterns = [patterns]
355 matches = []
356 for pattern in patterns:
357 matches.extend([s for s in string_list if fnmatch.fnmatch(s, pattern)])
359 return matches
362@app.command(name="migrations.fake")
363@with_exit_code(hide_tb=IS_DEBUG)
364def fake_migrations(
365 names: typing.Annotated[list[str], typer.Argument()] = None,
366 all: bool = False, # noqa: A002
367 connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
368 migrations_file: Optional[str] = None,
369 db_uri: Optional[str] = None,
370 db_folder: Optional[str] = None,
371 migrate_table: Optional[str] = None,
372 dry_run: bool = False,
373) -> int: # pragma: no cover
374 """
375 Mark one or more migrations as completed in the database, without executing the SQL code.
377 glob is supported in 'names'
378 """
379 if not (names or all):
380 rich.print("Please provide one or more migration names, or pass --all to fake all.")
381 return 1
383 generic_config = load_config(connection)
384 migrate_config = generic_config.to_migrate()
386 migrate_config.update(
387 migrate_uri=db_uri,
388 migrate_table=migrate_table,
389 db_folder=db_folder,
390 migrations_file=migrations_file,
391 _skip_none=True,
392 )
394 migrations = edwh_migrate.list_migrations(migrate_config)
396 migration_names = list(migrations.keys())
398 to_fake = migration_names if all else match_strings(names or [], migration_names)
400 try:
401 db = edwh_migrate.setup_db(config=migrate_config)
402 except edwh_migrate.migrate.DatabaseNotYetInitialized:
403 db = edwh_migrate.setup_db(
404 config=migrate_config, migrate=True, migrate_enabled=True, remove_migrate_tablefile=True
405 )
407 previously_migrated = (
408 db(
409 db.ewh_implemented_features.name.belongs(to_fake)
410 & (db.ewh_implemented_features.installed == True) # noqa E712
411 )
412 .select(db.ewh_implemented_features.name)
413 .column("name")
414 )
416 if dry_run:
417 rich.print("Would migrate these:", [_ for _ in to_fake if _ not in previously_migrated])
418 return 0
420 n = len(to_fake)
421 print(f"{len(previously_migrated)} / {n} were already installed.")
423 for name in to_fake:
424 if name in previously_migrated:
425 continue
427 edwh_migrate.mark_migration(db, name=name, installed=True)
429 db.commit()
430 rich.print(f"Faked {n} new migrations.")
431 return 0
434AnyNestedDict: typing.TypeAlias = dict[str, AnyDict]
437def tabulate_data(data: AnyNestedDict) -> None:
438 """
439 Print a nested dict of data in a nice, human-readable table.
440 """
441 flattened_data = []
442 for key, inner_dict in data.items():
443 temp_dict = {"": key}
444 temp_dict.update(inner_dict)
445 flattened_data.append(temp_dict)
447 # Display the tabulated data from the transposed dictionary
448 print(tabulate(flattened_data, headers="keys"))
451FormatOptions: typing.TypeAlias = typing.Literal["plaintext", "json", "yaml", "toml"]
454def get_output_format(fmt: FormatOptions) -> typing.Callable[[AnyNestedDict], None]:
455 """
456 This function takes a format option as input and \
457 returns a function that can be used to output data in the specified format.
458 """
459 match fmt:
460 case "plaintext":
461 output = tabulate_data
462 case "json":
464 def output(_data: AnyDict | AnyNestedDict) -> None:
465 import json
467 print(json.dumps(_data, indent=2))
469 case "yaml":
471 def output(_data: AnyDict | AnyNestedDict) -> None:
472 import yaml
474 print(yaml.dump(_data))
476 case "toml":
478 def output(_data: AnyDict | AnyNestedDict) -> None:
479 import tomli_w
481 print(tomli_w.dumps(_data))
483 case _:
484 options = typing.get_args(FormatOptions)
485 raise ValueError(f"Invalid format '{fmt}'. Please choose one of {options}.")
487 return output
490@app.command(name="cache.stats")
491@with_exit_code(hide_tb=IS_DEBUG)
492def cache_stats(
493 identifier: typing.Annotated[str, typer.Argument()] = "",
494 connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
495 fmt: typing.Annotated[
496 str, typer.Option("--format", "--fmt", "-f", help="plaintext (default) or json")
497 ] = "plaintext",
498) -> None: # pragma: no cover
499 """
500 Collect caching stats.
502 Examples:
503 typedal cache.stats
504 typedal cache.stats user
505 typedal cache.stats user.3
506 """
507 config = load_config(connection)
508 db = TypeDAL(config=config, migrate=False, fake_migrate=False)
510 output = get_output_format(typing.cast(FormatOptions, fmt))
512 data: AnyDict
513 parts = identifier.split(".")
514 match parts:
515 case [] | [""]:
516 # generic stats
517 data = caching.calculate_stats(db) # type: ignore
518 case [table]:
519 # table stats
520 data = caching.table_stats(db, table) # type: ignore
521 case [table, row_id]:
522 # row stats
523 data = caching.row_stats(db, table, row_id) # type: ignore
524 case _:
525 raise ValueError("Please use the format `table` or `table.id` for this command.")
527 output(data)
529 # todo:
530 # - sort by most dependencies
531 # - sort by biggest data
532 # - include size for table_stats, row_stats
533 # - group by table
536@app.command(name="cache.clear")
537@with_exit_code(hide_tb=IS_DEBUG)
538def cache_clear(
539 connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
540 purge: typing.Annotated[bool, typer.Option("--all", "--purge", "-p")] = False,
541) -> None: # pragma: no cover
542 """
543 Clear (expired) items from the cache.
545 Args:
546 connection (optional): [tool.typedal.<connection>]
547 purge (default: no): remove all items, not only expired
548 """
549 config = load_config(connection)
550 db = TypeDAL(config=config, migrate=False, fake_migrate=False)
552 if purge:
553 caching.clear_cache()
554 print("Emptied cache")
555 else:
556 n = caching.clear_expired()
557 print(f"Removed {n} expired from cache")
559 db.commit()
562def version_callback() -> Never:
563 """
564 --version requested!
565 """
566 print(f"pydal2sql Version: {__version__}")
568 raise typer.Exit(0)
571def config_callback() -> Never:
572 """
573 --show-config requested.
574 """
575 config = load_config()
577 print(repr(config))
579 raise typer.Exit(0)
582@app.callback(invoke_without_command=True)
583def main(
584 _: typer.Context,
585 # stops the program:
586 show_config: bool = False,
587 version: bool = False,
588) -> None:
589 """
590 This script can be used to generate the create or alter sql from pydal or typedal.
591 """
592 if show_config:
593 config_callback()
594 elif version:
595 version_callback()
596 # else: just continue
599if __name__ == "__main__": # pragma: no cover
600 app()