fancy_dataclass
1# top-level class exports 2from .cli import ArgparseDataclass, CLIDataclass 3from .config import Config, ConfigDataclass, DictConfig 4from .dict import DictDataclass 5from .func import func_dataclass 6from .json import JSONBaseDataclass, JSONDataclass, JSONSerializable 7from .mixin import DataclassMixin 8from .sql import SQLDataclass 9from .subprocess import SubprocessDataclass 10from .toml import TOMLDataclass 11 12 13__version__ = '0.7.3' 14 15__all__ = [ 16 'ArgparseDataclass', 17 'CLIDataclass', 18 'Config', 19 'ConfigDataclass', 20 'DataclassMixin', 21 'DictConfig', 22 'DictDataclass', 23 'JSONBaseDataclass', 24 'JSONDataclass', 25 'JSONSerializable', 26 'SQLDataclass', 27 'SubprocessDataclass', 28 'TOMLDataclass', 29]
143class ArgparseDataclass(DataclassMixin): 144 """Mixin class providing a means of setting up an [`argparse`](https://docs.python.org/3/library/argparse.html) parser with the dataclass fields, and then converting the namespace of parsed arguments into an instance of the class. 145 146 The parser's argument names and types will be derived from the dataclass's fields. 147 148 Per-field settings can be passed into the `metadata` argument of each `dataclasses.field`. See [`ArgparseDataclassFieldSettings`][fancy_dataclass.cli.ArgparseDataclassFieldSettings] for the full list of settings.""" 149 150 __settings_type__ = ArgparseDataclassSettings 151 __settings__ = ArgparseDataclassSettings() 152 __field_settings_type__ = ArgparseDataclassFieldSettings 153 154 # name of subcommand field, if present 155 subcommand_field_name: ClassVar[Optional[str]] = None 156 # name of the `argparse.Namespace` attribute associated with the subcommand 157 # The convention is for this name to contain both the subcommand name and the class name. 158 # This is because nested `ArgparseDataclass` fields may have the same subcommand name, causing conflicts. 159 subcommand_dest_name: ClassVar[str] 160 161 @classmethod 162 def __init_subclass__(cls, **kwargs: Any) -> None: 163 super().__init_subclass__(**kwargs) 164 cls.subcommand_dest_name = f'_subcommand_{cls.__name__}' 165 # if command_name was not specified in the settings, use a default name 166 if cls.__settings__.command_name is None: 167 cls.__settings__.command_name = camel_case_to_kebab_case(cls.__name__) 168 169 @classmethod 170 def __post_dataclass_wrap__(cls, wrapped_cls: Type[Self]) -> None: 171 subcommand = None 172 names = set() 173 for fld in fields(wrapped_cls): # type: ignore[arg-type] 174 if not fld.metadata.get('subcommand', False): 175 continue 176 if subcommand is None: 177 # check field type is ArgparseDataclass or Union thereof 178 subcommand = fld.name 179 tp = cast(type, fld.type) 180 if issubclass_safe(tp, ArgparseDataclass): 181 continue 182 err = TypeError(f'invalid subcommand field {fld.name!r}, type must be an ArgparseDataclass or Union thereof') 183 if get_origin(tp) == Union: 184 tp_args = [arg for arg in get_args(tp) if (arg is not type(None))] 185 for arg in tp_args: 186 if not issubclass_safe(arg, ArgparseDataclass): 187 raise err 188 name = arg.__settings__.command_name 189 if name in names: 190 raise TypeError(f'duplicate command name {name!r} in subcommand field {subcommand!r}') 191 names.add(name) 192 continue 193 raise err 194 raise TypeError(f'multiple fields ({subcommand} and {fld.name}) are registered as subcommands, at most one is allowed') 195 # store the name of the subcommand field as a class attribute 196 cls.subcommand_field_name = subcommand 197 198 @property 199 def subcommand_name(self) -> Optional[str]: 200 """Gets the name of the chosen subcommand associated with the type of the object's subcommand field. 201 202 Returns: 203 Name of the subcommand, if a subcommand field exists, and `None` otherwise""" 204 if self.subcommand_field_name is not None: 205 tp: Type[ArgparseDataclass] = type(getattr(self, self.subcommand_field_name)) 206 return tp.__settings__.command_name 207 return None 208 209 @classmethod 210 def _parser_description(cls) -> Optional[str]: 211 if (descr := cls.__settings__.help_descr) is None: 212 return cls.__doc__ 213 return descr 214 215 @classmethod 216 def _parser_description_brief(cls) -> Optional[str]: 217 if (brief := cls.__settings__.help_descr_brief) is None: 218 brief = cls._parser_description() 219 if brief: 220 brief = brief[0].lower() + brief[1:] 221 if brief.endswith('.'): 222 brief = brief[:-1] 223 return brief 224 225 @classmethod 226 def parser_kwargs(cls) -> Dict[str, Any]: 227 """Gets keyword arguments that will be passed to the top-level argument parser. 228 229 Returns: 230 Keyword arguments passed upon construction of the `ArgumentParser`""" 231 kwargs: Dict[str, Any] = {'description': cls._parser_description()} 232 if (fmt_cls := cls.__settings__.formatter_class) is not None: 233 kwargs['formatter_class'] = fmt_cls 234 return kwargs 235 236 @classmethod 237 def _parser_argument_kwarg_names(cls) -> List[str]: 238 """Gets keyword argument names that will be passed when adding arguments to the argument parser. 239 240 Returns: 241 Keyword argument names passed when adding arguments to the parser""" 242 return ['action', 'nargs', 'const', 'choices', 'help', 'metavar'] 243 244 @classmethod 245 def new_parser(cls) -> ArgumentParser: 246 """Constructs a new top-level argument parser.. 247 248 Returns: 249 New top-level parser derived from the class's fields""" 250 return cls.__settings__.parser_class(**cls.parser_kwargs()) 251 252 @classmethod 253 def configure_argument(cls, parser: ArgParser, name: str) -> None: 254 """Given an argument parser and a field name, configures the parser with an argument of that name. 255 256 Attempts to provide reasonable default behavior based on the dataclass field name, type, default, and metadata. 257 258 Subclasses may override this method to implement custom behavior. 259 260 Args: 261 parser: parser object to update with a new argument 262 name: Name of the argument to configure""" 263 def is_nested(tp: type) -> TypeGuard[ArgparseDataclass]: 264 return issubclass_safe(tp, ArgparseDataclass) 265 kwargs: Dict[str, Any] = {} 266 fld = cls.__dataclass_fields__[name] # type: ignore[attr-defined] 267 settings = cls._field_settings(fld).adapt_to(ArgparseDataclassFieldSettings) 268 if settings.parse_exclude: # exclude the argument from the parser 269 return 270 # determine the type of the parser argument for the field 271 tp: type = settings.type or fld.type # type: ignore[assignment] 272 if isinstance(tp, str): # resolve type 273 tp = get_type_hints(cls)[name] 274 action = settings.action or 'store' 275 origin_type = get_origin(tp) 276 if origin_type is not None: # compound type 277 if type_is_optional(tp): 278 kwargs['default'] = None 279 if origin_type == ClassVar: # by default, exclude ClassVars from the parser 280 return 281 tp_args = get_args(tp) 282 if tp_args: # Union/List/Optional 283 if origin_type == Union: 284 tp_args = tuple(arg for arg in tp_args if (arg is not type(None))) 285 if (len(tp_args) > 1) and (not settings.subcommand): 286 raise ValueError(f'union type {tp} not allowed as ArgparseDataclass field except as subcommand') 287 elif issubclass_safe(origin_type, list) or issubclass_safe(origin_type, tuple): 288 for arg in tp_args: 289 if is_nested(arg): 290 name = f'list of {arg.__name__}' if issubclass_safe(origin_type, list) else f'tuple with {arg}' # type: ignore[attr-defined] 291 raise ValueError(f'{name} not allowed in ArgparseDataclass parser') 292 tp = tp_args[0] 293 if origin_type == Literal: # literal options will become choices 294 tp = type(tp) 295 kwargs['choices'] = tp_args 296 else: # type cannot be inferred 297 raise ValueError(f'cannot infer type of items in field {name!r}') 298 if issubclass_safe(origin_type, list) and (action == 'store'): 299 kwargs['nargs'] = '*' # allow multiple arguments by default 300 if issubclass_safe(tp, IntEnum): 301 # use a bare int type 302 tp = int 303 kwargs['type'] = tp 304 # determine the default value 305 if fld.default == MISSING: 306 if fld.default_factory != MISSING: 307 kwargs['default'] = fld.default_factory() 308 else: 309 kwargs['default'] = fld.default 310 # get the names of the arguments associated with the field 311 args = settings.args 312 if args is not None: 313 if isinstance(args, str): 314 args = [args] 315 # argument is positional if it is explicitly given without a leading dash 316 positional = not args[0].startswith('-') 317 if (not positional) and ('default' not in kwargs): 318 # no default available, so make the field a required option 319 kwargs['required'] = True 320 else: 321 positional = (tp is not bool) and ('default' not in kwargs) 322 if positional: 323 args = [fld.name] 324 else: 325 # use a single dash for 1-letter names 326 prefix = '-' if (len(fld.name) == 1) else '--' 327 argname = fld.name.replace('_', '-') 328 args = [prefix + argname] 329 if args and (not positional): 330 # store the argument based on the name of the field, and not whatever flag name was provided 331 kwargs['dest'] = fld.name 332 if settings.required is not None: 333 kwargs['required'] = settings.required 334 has_default = 'default' in kwargs 335 default = kwargs.get('default') 336 if fld.type is bool: # use boolean flag instead of an argument 337 action = settings.action or 'store_true' 338 kwargs['action'] = action 339 if isinstance(action, str) and (action not in ['store_true', 'store_false']): 340 raise ValueError(f'invalid action {action!r} for boolean flag field {name!r}') 341 if default is not None: 342 if (action != 'store_false') == default: 343 raise ValueError(f'cannot use default value of {default} for action {action!r} with boolean flag field {name!r}') 344 for key in ('type', 'required'): 345 with suppress(KeyError): 346 kwargs.pop(key) 347 # extract additional items from metadata 348 for key in cls._parser_argument_kwarg_names(): 349 if key in fld.metadata: 350 kwargs[key] = fld.metadata[key] 351 if kwargs.get('action') == 'store_const': 352 del kwargs['type'] 353 # determine if the field show its default in the help string 354 if cls.__settings__.default_help: 355 # include default if there is one, and the flag is not overridden to False at the field level 356 default_help = has_default and (settings.default_help is not False) 357 else: 358 # include default if the field-level flag is set to True 359 default_help = bool(settings.default_help) 360 if default_help: 361 if not has_default: 362 raise ValueError(f'cannot use default_help=True for field {name!r} since it has no default') 363 help_str = kwargs.get('help', None) 364 # append the default value to the help string 365 help_str = ((help_str + ' ') if help_str else '') + f'(default: {default})' 366 kwargs['help'] = help_str 367 if (result := _get_parser_group_name(settings, fld.name)) is not None: 368 # add argument to the group instead of the main parser 369 (group_name, is_exclusive) = result 370 if is_exclusive: 371 group: Optional[Union[_ArgumentGroup, _MutuallyExclusiveGroup]] = _get_parser_exclusive_group(parser, group_name) 372 else: 373 group = _get_parser_group(parser, group_name) 374 if not group: # group not found, so create it 375 if is_exclusive: 376 group = _add_exclusive_group(parser, group_name, kwargs.get('required', False)) 377 else: 378 # get kwargs from nested ArgparseDataclass 379 group_kwargs = tp.parser_kwargs() if is_nested(tp) else {} 380 group = _add_group(parser, group_name, **group_kwargs) 381 parser = group 382 if settings.subcommand: 383 # create subparsers for each variant 384 assert isinstance(parser, ArgumentParser) 385 dest = cls.subcommand_dest_name 386 required = kwargs.get('required', not has_default) 387 if (not required) and (not has_default): 388 raise ValueError(f'{name!r} field cannot set required=False with no default value') 389 subparsers = parser.add_subparsers(dest=dest, required=required, help=settings.help, metavar='subcommand') 390 tp_args = (tp,) if (origin_type is None) else tp_args 391 for arg in tp_args: 392 assert issubclass_safe(arg, ArgparseDataclass) 393 descr_brief = arg._parser_description_brief() 394 subparser_kwargs = arg.parser_kwargs() 395 if 'formatter_class' not in subparser_kwargs: 396 # inherit formatter_class from the parent 397 subparser_kwargs['formatter_class'] = parser.formatter_class 398 subparser = subparsers.add_parser(arg.__settings__.command_name, help=descr_brief, **subparser_kwargs) 399 arg.configure_parser(subparser) 400 return 401 if is_nested(tp): # recursively configure a nested ArgparseDataclass field 402 tp.configure_parser(parser) 403 else: 404 # prevent duplicate positional args 405 if not hasattr(parser, '_pos_args'): 406 parser._pos_args = set() # type: ignore[union-attr] 407 if positional: 408 pos_args = parser._pos_args 409 if args[0] in pos_args: 410 raise ValueError(f'duplicate positional argument {args[0]!r}') 411 pos_args.add(args[0]) 412 parser.add_argument(*args, **kwargs) 413 414 @classmethod 415 def configure_parser(cls, parser: Union[ArgumentParser, _ArgumentGroup]) -> None: 416 """Configures an argument parser by adding the appropriate arguments. 417 418 By default, this will simply call [`configure_argument`][fancy_dataclass.cli.ArgparseDataclass.configure_argument] for each dataclass field. 419 420 Args: 421 parser: `ArgumentParser` to configure""" 422 check_dataclass(cls) 423 if (version := cls.__settings__.version): 424 parser.add_argument('--version', action='version', version=version) 425 subcommand = None 426 for fld in fields(cls): # type: ignore[arg-type] 427 if fld.metadata.get('subcommand', False): 428 # TODO: check field type is ArgparseDataclass or Union thereof 429 # TODO: move this to __init_dataclass__ 430 if subcommand is None: 431 subcommand = fld.name 432 else: 433 raise ValueError(f'multiple fields ({subcommand!r} and {fld.name!r}) registered as subcommands, at most one is allowed') 434 cls.configure_argument(parser, fld.name) 435 436 @classmethod 437 def make_parser(cls) -> ArgumentParser: 438 """Constructs an argument parser and configures it with arguments corresponding to the dataclass's fields. 439 440 Returns: 441 The configured `ArgumentParser`""" 442 parser = cls.new_parser() 443 cls.configure_parser(parser) 444 return parser 445 446 @classmethod 447 def args_to_dict(cls, args: Namespace) -> Dict[str, Any]: 448 """Converts a [`Namespace`](https://docs.python.org/3/library/argparse.html#argparse.Namespace) object to a dict that can be converted to the dataclass type. 449 450 Override this to enable custom behavior. 451 452 Args: 453 args: `Namespace` object storing parsed arguments 454 455 Returns: 456 A dict mapping from field names to values""" 457 check_dataclass(cls) 458 d = {} 459 for field in fields(cls): # type: ignore[arg-type] 460 nested_field = False 461 tp = cast(type, field.type) 462 if issubclass_safe(tp, ArgparseDataclass): 463 # recursively gather arguments for nested ArgparseDataclass 464 val = tp.args_to_dict(args) # type: ignore[attr-defined] 465 nested_field = True 466 elif hasattr(args, field.name): # extract arg from the namespace 467 val = getattr(args, field.name) 468 else: # argument not present 469 continue 470 if nested_field: # merge in nested ArgparseDataclass 471 d.update(val) 472 else: 473 d[field.name] = val 474 return d 475 476 @classmethod 477 def from_args(cls, args: Namespace) -> Self: 478 """Constructs an [`ArgparseDataclass`][fancy_dataclass.cli.ArgparseDataclass] from a `Namespace` object. 479 480 Args: 481 args: `Namespace` object storing parsed arguments 482 483 Returns: 484 An instance of this class derived from the parsed arguments""" 485 d = cls.args_to_dict(args) 486 kwargs = {} 487 for fld in fields(cls): # type: ignore[arg-type] 488 name = fld.name 489 tp: Optional[type] = cast(type, fld.type) 490 is_subcommand = fld.metadata.get('subcommand', False) 491 origin_type = get_origin(tp) 492 if origin_type == Union: 493 tp_args = [arg for arg in get_args(tp) if (arg is not type(None))] 494 subcommand = getattr(args, cls.subcommand_dest_name, None) 495 if is_subcommand and subcommand: 496 tp_args = [arg for arg in tp_args if (arg.__settings__.command_name == subcommand)] 497 assert len(tp_args) == 1, f'exactly one type within {tp} should have command name {subcommand}' 498 assert issubclass_safe(tp_args[0], ArgparseDataclass) 499 tp = tp_args[0] if (subcommand or (not is_subcommand)) else None 500 if tp and issubclass_safe(tp, ArgparseDataclass): 501 # handle nested ArgparseDataclass 502 kwargs[name] = tp.from_args(args) # type: ignore[attr-defined] 503 elif name in d: 504 if (origin_type is tuple) and isinstance(d.get(name), list): 505 kwargs[name] = tuple(d[name]) 506 else: 507 kwargs[name] = d[name] 508 elif type_is_optional(cast(type, fld.type)) and (fld.default == MISSING) and (fld.default_factory == MISSING): 509 # positional optional argument with no default: fill in None 510 kwargs[name] = None 511 return cls(**kwargs) 512 513 @classmethod 514 def process_args(cls, parser: ArgumentParser, args: Namespace) -> None: 515 """Processes arguments from an ArgumentParser, after they are parsed. 516 517 Override this to enable custom behavior. 518 519 Args: 520 parser: `ArgumentParser` used to parse arguments 521 args: `Namespace` containing parsed arguments""" 522 pass 523 524 @classmethod 525 def from_cli_args(cls, arg_list: Optional[List[str]] = None) -> Self: 526 """Constructs and configures an argument parser, then parses the given command-line arguments and uses them to construct an instance of the class. 527 528 Args: 529 arg_list: List of arguments as strings (if `None`, uses `sys.argv`) 530 531 Returns: 532 An instance of this class derived from the parsed arguments""" 533 parser = cls.make_parser() # create and configure parser 534 args = parser.parse_args(args=arg_list) # parse arguments (uses sys.argv if None) 535 cls.process_args(parser, args) # process arguments 536 return cls.from_args(args)
Mixin class providing a means of setting up an argparse
parser with the dataclass fields, and then converting the namespace of parsed arguments into an instance of the class.
The parser's argument names and types will be derived from the dataclass's fields.
Per-field settings can be passed into the metadata
argument of each dataclasses.field
. See [ArgparseDataclassFieldSettings
][fancy_dataclass.cli.ArgparseDataclassFieldSettings] for the full list of settings.
198 @property 199 def subcommand_name(self) -> Optional[str]: 200 """Gets the name of the chosen subcommand associated with the type of the object's subcommand field. 201 202 Returns: 203 Name of the subcommand, if a subcommand field exists, and `None` otherwise""" 204 if self.subcommand_field_name is not None: 205 tp: Type[ArgparseDataclass] = type(getattr(self, self.subcommand_field_name)) 206 return tp.__settings__.command_name 207 return None
Gets the name of the chosen subcommand associated with the type of the object's subcommand field.
Returns:
Name of the subcommand, if a subcommand field exists, and None
otherwise
225 @classmethod 226 def parser_kwargs(cls) -> Dict[str, Any]: 227 """Gets keyword arguments that will be passed to the top-level argument parser. 228 229 Returns: 230 Keyword arguments passed upon construction of the `ArgumentParser`""" 231 kwargs: Dict[str, Any] = {'description': cls._parser_description()} 232 if (fmt_cls := cls.__settings__.formatter_class) is not None: 233 kwargs['formatter_class'] = fmt_cls 234 return kwargs
Gets keyword arguments that will be passed to the top-level argument parser.
Returns:
Keyword arguments passed upon construction of the ArgumentParser
244 @classmethod 245 def new_parser(cls) -> ArgumentParser: 246 """Constructs a new top-level argument parser.. 247 248 Returns: 249 New top-level parser derived from the class's fields""" 250 return cls.__settings__.parser_class(**cls.parser_kwargs())
Constructs a new top-level argument parser..
Returns: New top-level parser derived from the class's fields
252 @classmethod 253 def configure_argument(cls, parser: ArgParser, name: str) -> None: 254 """Given an argument parser and a field name, configures the parser with an argument of that name. 255 256 Attempts to provide reasonable default behavior based on the dataclass field name, type, default, and metadata. 257 258 Subclasses may override this method to implement custom behavior. 259 260 Args: 261 parser: parser object to update with a new argument 262 name: Name of the argument to configure""" 263 def is_nested(tp: type) -> TypeGuard[ArgparseDataclass]: 264 return issubclass_safe(tp, ArgparseDataclass) 265 kwargs: Dict[str, Any] = {} 266 fld = cls.__dataclass_fields__[name] # type: ignore[attr-defined] 267 settings = cls._field_settings(fld).adapt_to(ArgparseDataclassFieldSettings) 268 if settings.parse_exclude: # exclude the argument from the parser 269 return 270 # determine the type of the parser argument for the field 271 tp: type = settings.type or fld.type # type: ignore[assignment] 272 if isinstance(tp, str): # resolve type 273 tp = get_type_hints(cls)[name] 274 action = settings.action or 'store' 275 origin_type = get_origin(tp) 276 if origin_type is not None: # compound type 277 if type_is_optional(tp): 278 kwargs['default'] = None 279 if origin_type == ClassVar: # by default, exclude ClassVars from the parser 280 return 281 tp_args = get_args(tp) 282 if tp_args: # Union/List/Optional 283 if origin_type == Union: 284 tp_args = tuple(arg for arg in tp_args if (arg is not type(None))) 285 if (len(tp_args) > 1) and (not settings.subcommand): 286 raise ValueError(f'union type {tp} not allowed as ArgparseDataclass field except as subcommand') 287 elif issubclass_safe(origin_type, list) or issubclass_safe(origin_type, tuple): 288 for arg in tp_args: 289 if is_nested(arg): 290 name = f'list of {arg.__name__}' if issubclass_safe(origin_type, list) else f'tuple with {arg}' # type: ignore[attr-defined] 291 raise ValueError(f'{name} not allowed in ArgparseDataclass parser') 292 tp = tp_args[0] 293 if origin_type == Literal: # literal options will become choices 294 tp = type(tp) 295 kwargs['choices'] = tp_args 296 else: # type cannot be inferred 297 raise ValueError(f'cannot infer type of items in field {name!r}') 298 if issubclass_safe(origin_type, list) and (action == 'store'): 299 kwargs['nargs'] = '*' # allow multiple arguments by default 300 if issubclass_safe(tp, IntEnum): 301 # use a bare int type 302 tp = int 303 kwargs['type'] = tp 304 # determine the default value 305 if fld.default == MISSING: 306 if fld.default_factory != MISSING: 307 kwargs['default'] = fld.default_factory() 308 else: 309 kwargs['default'] = fld.default 310 # get the names of the arguments associated with the field 311 args = settings.args 312 if args is not None: 313 if isinstance(args, str): 314 args = [args] 315 # argument is positional if it is explicitly given without a leading dash 316 positional = not args[0].startswith('-') 317 if (not positional) and ('default' not in kwargs): 318 # no default available, so make the field a required option 319 kwargs['required'] = True 320 else: 321 positional = (tp is not bool) and ('default' not in kwargs) 322 if positional: 323 args = [fld.name] 324 else: 325 # use a single dash for 1-letter names 326 prefix = '-' if (len(fld.name) == 1) else '--' 327 argname = fld.name.replace('_', '-') 328 args = [prefix + argname] 329 if args and (not positional): 330 # store the argument based on the name of the field, and not whatever flag name was provided 331 kwargs['dest'] = fld.name 332 if settings.required is not None: 333 kwargs['required'] = settings.required 334 has_default = 'default' in kwargs 335 default = kwargs.get('default') 336 if fld.type is bool: # use boolean flag instead of an argument 337 action = settings.action or 'store_true' 338 kwargs['action'] = action 339 if isinstance(action, str) and (action not in ['store_true', 'store_false']): 340 raise ValueError(f'invalid action {action!r} for boolean flag field {name!r}') 341 if default is not None: 342 if (action != 'store_false') == default: 343 raise ValueError(f'cannot use default value of {default} for action {action!r} with boolean flag field {name!r}') 344 for key in ('type', 'required'): 345 with suppress(KeyError): 346 kwargs.pop(key) 347 # extract additional items from metadata 348 for key in cls._parser_argument_kwarg_names(): 349 if key in fld.metadata: 350 kwargs[key] = fld.metadata[key] 351 if kwargs.get('action') == 'store_const': 352 del kwargs['type'] 353 # determine if the field show its default in the help string 354 if cls.__settings__.default_help: 355 # include default if there is one, and the flag is not overridden to False at the field level 356 default_help = has_default and (settings.default_help is not False) 357 else: 358 # include default if the field-level flag is set to True 359 default_help = bool(settings.default_help) 360 if default_help: 361 if not has_default: 362 raise ValueError(f'cannot use default_help=True for field {name!r} since it has no default') 363 help_str = kwargs.get('help', None) 364 # append the default value to the help string 365 help_str = ((help_str + ' ') if help_str else '') + f'(default: {default})' 366 kwargs['help'] = help_str 367 if (result := _get_parser_group_name(settings, fld.name)) is not None: 368 # add argument to the group instead of the main parser 369 (group_name, is_exclusive) = result 370 if is_exclusive: 371 group: Optional[Union[_ArgumentGroup, _MutuallyExclusiveGroup]] = _get_parser_exclusive_group(parser, group_name) 372 else: 373 group = _get_parser_group(parser, group_name) 374 if not group: # group not found, so create it 375 if is_exclusive: 376 group = _add_exclusive_group(parser, group_name, kwargs.get('required', False)) 377 else: 378 # get kwargs from nested ArgparseDataclass 379 group_kwargs = tp.parser_kwargs() if is_nested(tp) else {} 380 group = _add_group(parser, group_name, **group_kwargs) 381 parser = group 382 if settings.subcommand: 383 # create subparsers for each variant 384 assert isinstance(parser, ArgumentParser) 385 dest = cls.subcommand_dest_name 386 required = kwargs.get('required', not has_default) 387 if (not required) and (not has_default): 388 raise ValueError(f'{name!r} field cannot set required=False with no default value') 389 subparsers = parser.add_subparsers(dest=dest, required=required, help=settings.help, metavar='subcommand') 390 tp_args = (tp,) if (origin_type is None) else tp_args 391 for arg in tp_args: 392 assert issubclass_safe(arg, ArgparseDataclass) 393 descr_brief = arg._parser_description_brief() 394 subparser_kwargs = arg.parser_kwargs() 395 if 'formatter_class' not in subparser_kwargs: 396 # inherit formatter_class from the parent 397 subparser_kwargs['formatter_class'] = parser.formatter_class 398 subparser = subparsers.add_parser(arg.__settings__.command_name, help=descr_brief, **subparser_kwargs) 399 arg.configure_parser(subparser) 400 return 401 if is_nested(tp): # recursively configure a nested ArgparseDataclass field 402 tp.configure_parser(parser) 403 else: 404 # prevent duplicate positional args 405 if not hasattr(parser, '_pos_args'): 406 parser._pos_args = set() # type: ignore[union-attr] 407 if positional: 408 pos_args = parser._pos_args 409 if args[0] in pos_args: 410 raise ValueError(f'duplicate positional argument {args[0]!r}') 411 pos_args.add(args[0]) 412 parser.add_argument(*args, **kwargs)
Given an argument parser and a field name, configures the parser with an argument of that name.
Attempts to provide reasonable default behavior based on the dataclass field name, type, default, and metadata.
Subclasses may override this method to implement custom behavior.
Args: parser: parser object to update with a new argument name: Name of the argument to configure
414 @classmethod 415 def configure_parser(cls, parser: Union[ArgumentParser, _ArgumentGroup]) -> None: 416 """Configures an argument parser by adding the appropriate arguments. 417 418 By default, this will simply call [`configure_argument`][fancy_dataclass.cli.ArgparseDataclass.configure_argument] for each dataclass field. 419 420 Args: 421 parser: `ArgumentParser` to configure""" 422 check_dataclass(cls) 423 if (version := cls.__settings__.version): 424 parser.add_argument('--version', action='version', version=version) 425 subcommand = None 426 for fld in fields(cls): # type: ignore[arg-type] 427 if fld.metadata.get('subcommand', False): 428 # TODO: check field type is ArgparseDataclass or Union thereof 429 # TODO: move this to __init_dataclass__ 430 if subcommand is None: 431 subcommand = fld.name 432 else: 433 raise ValueError(f'multiple fields ({subcommand!r} and {fld.name!r}) registered as subcommands, at most one is allowed') 434 cls.configure_argument(parser, fld.name)
Configures an argument parser by adding the appropriate arguments.
By default, this will simply call [configure_argument
][fancy_dataclass.cli.ArgparseDataclass.configure_argument] for each dataclass field.
Args:
parser: ArgumentParser
to configure
436 @classmethod 437 def make_parser(cls) -> ArgumentParser: 438 """Constructs an argument parser and configures it with arguments corresponding to the dataclass's fields. 439 440 Returns: 441 The configured `ArgumentParser`""" 442 parser = cls.new_parser() 443 cls.configure_parser(parser) 444 return parser
Constructs an argument parser and configures it with arguments corresponding to the dataclass's fields.
Returns:
The configured ArgumentParser
446 @classmethod 447 def args_to_dict(cls, args: Namespace) -> Dict[str, Any]: 448 """Converts a [`Namespace`](https://docs.python.org/3/library/argparse.html#argparse.Namespace) object to a dict that can be converted to the dataclass type. 449 450 Override this to enable custom behavior. 451 452 Args: 453 args: `Namespace` object storing parsed arguments 454 455 Returns: 456 A dict mapping from field names to values""" 457 check_dataclass(cls) 458 d = {} 459 for field in fields(cls): # type: ignore[arg-type] 460 nested_field = False 461 tp = cast(type, field.type) 462 if issubclass_safe(tp, ArgparseDataclass): 463 # recursively gather arguments for nested ArgparseDataclass 464 val = tp.args_to_dict(args) # type: ignore[attr-defined] 465 nested_field = True 466 elif hasattr(args, field.name): # extract arg from the namespace 467 val = getattr(args, field.name) 468 else: # argument not present 469 continue 470 if nested_field: # merge in nested ArgparseDataclass 471 d.update(val) 472 else: 473 d[field.name] = val 474 return d
Converts a Namespace
object to a dict that can be converted to the dataclass type.
Override this to enable custom behavior.
Args:
args: Namespace
object storing parsed arguments
Returns: A dict mapping from field names to values
476 @classmethod 477 def from_args(cls, args: Namespace) -> Self: 478 """Constructs an [`ArgparseDataclass`][fancy_dataclass.cli.ArgparseDataclass] from a `Namespace` object. 479 480 Args: 481 args: `Namespace` object storing parsed arguments 482 483 Returns: 484 An instance of this class derived from the parsed arguments""" 485 d = cls.args_to_dict(args) 486 kwargs = {} 487 for fld in fields(cls): # type: ignore[arg-type] 488 name = fld.name 489 tp: Optional[type] = cast(type, fld.type) 490 is_subcommand = fld.metadata.get('subcommand', False) 491 origin_type = get_origin(tp) 492 if origin_type == Union: 493 tp_args = [arg for arg in get_args(tp) if (arg is not type(None))] 494 subcommand = getattr(args, cls.subcommand_dest_name, None) 495 if is_subcommand and subcommand: 496 tp_args = [arg for arg in tp_args if (arg.__settings__.command_name == subcommand)] 497 assert len(tp_args) == 1, f'exactly one type within {tp} should have command name {subcommand}' 498 assert issubclass_safe(tp_args[0], ArgparseDataclass) 499 tp = tp_args[0] if (subcommand or (not is_subcommand)) else None 500 if tp and issubclass_safe(tp, ArgparseDataclass): 501 # handle nested ArgparseDataclass 502 kwargs[name] = tp.from_args(args) # type: ignore[attr-defined] 503 elif name in d: 504 if (origin_type is tuple) and isinstance(d.get(name), list): 505 kwargs[name] = tuple(d[name]) 506 else: 507 kwargs[name] = d[name] 508 elif type_is_optional(cast(type, fld.type)) and (fld.default == MISSING) and (fld.default_factory == MISSING): 509 # positional optional argument with no default: fill in None 510 kwargs[name] = None 511 return cls(**kwargs)
Constructs an [ArgparseDataclass
][fancy_dataclass.cli.ArgparseDataclass] from a Namespace
object.
Args:
args: Namespace
object storing parsed arguments
Returns: An instance of this class derived from the parsed arguments
513 @classmethod 514 def process_args(cls, parser: ArgumentParser, args: Namespace) -> None: 515 """Processes arguments from an ArgumentParser, after they are parsed. 516 517 Override this to enable custom behavior. 518 519 Args: 520 parser: `ArgumentParser` used to parse arguments 521 args: `Namespace` containing parsed arguments""" 522 pass
Processes arguments from an ArgumentParser, after they are parsed.
Override this to enable custom behavior.
Args:
parser: ArgumentParser
used to parse arguments
args: Namespace
containing parsed arguments
524 @classmethod 525 def from_cli_args(cls, arg_list: Optional[List[str]] = None) -> Self: 526 """Constructs and configures an argument parser, then parses the given command-line arguments and uses them to construct an instance of the class. 527 528 Args: 529 arg_list: List of arguments as strings (if `None`, uses `sys.argv`) 530 531 Returns: 532 An instance of this class derived from the parsed arguments""" 533 parser = cls.make_parser() # create and configure parser 534 args = parser.parse_args(args=arg_list) # parse arguments (uses sys.argv if None) 535 cls.process_args(parser, args) # process arguments 536 return cls.from_args(args)
Constructs and configures an argument parser, then parses the given command-line arguments and uses them to construct an instance of the class.
Args:
arg_list: List of arguments as strings (if None
, uses sys.argv
)
Returns: An instance of this class derived from the parsed arguments
539class CLIDataclass(ArgparseDataclass): 540 """This subclass of [`ArgparseDataclass`][fancy_dataclass.cli.ArgparseDataclass] allows the user to execute arbitrary program logic using the parsed arguments as input. 541 542 Subclasses should override the `run` method to implement custom behavior.""" 543 544 def run(self) -> None: 545 """Runs the main body of the program. 546 547 Subclasses should implement this to provide custom behavior. 548 549 If the class has a subcommand defined, and it is an instance of `CLIDataclass`, the default implementation of `run` will be to call the subcommand's own implementation.""" 550 # delegate to the subcommand's `run` method, if it exists 551 if self.subcommand_field_name: 552 val = getattr(self, self.subcommand_field_name) 553 if isinstance(val, CLIDataclass): 554 return val.run() 555 raise NotImplementedError 556 557 @classmethod 558 def main(cls, arg_list: Optional[List[str]] = None) -> None: 559 """Executes the following procedures in sequence: 560 561 1. Constructs a new argument parser. 562 2. Configures the parser with appropriate arguments. 563 3. Parses command-line arguments. 564 4. Post-processes the arguments. 565 5. Constructs a dataclass instance from the parsed arguments. 566 6. Runs the main body of the program, using the parsed arguments. 567 568 Args: 569 arg_list: List of arguments as strings (if `None`, uses `sys.argv`)""" 570 obj = cls.from_cli_args(arg_list) # steps 1-5 571 obj.run() # step 6
This subclass of [ArgparseDataclass
][fancy_dataclass.cli.ArgparseDataclass] allows the user to execute arbitrary program logic using the parsed arguments as input.
Subclasses should override the run
method to implement custom behavior.
544 def run(self) -> None: 545 """Runs the main body of the program. 546 547 Subclasses should implement this to provide custom behavior. 548 549 If the class has a subcommand defined, and it is an instance of `CLIDataclass`, the default implementation of `run` will be to call the subcommand's own implementation.""" 550 # delegate to the subcommand's `run` method, if it exists 551 if self.subcommand_field_name: 552 val = getattr(self, self.subcommand_field_name) 553 if isinstance(val, CLIDataclass): 554 return val.run() 555 raise NotImplementedError
Runs the main body of the program.
Subclasses should implement this to provide custom behavior.
If the class has a subcommand defined, and it is an instance of CLIDataclass
, the default implementation of run
will be to call the subcommand's own implementation.
557 @classmethod 558 def main(cls, arg_list: Optional[List[str]] = None) -> None: 559 """Executes the following procedures in sequence: 560 561 1. Constructs a new argument parser. 562 2. Configures the parser with appropriate arguments. 563 3. Parses command-line arguments. 564 4. Post-processes the arguments. 565 5. Constructs a dataclass instance from the parsed arguments. 566 6. Runs the main body of the program, using the parsed arguments. 567 568 Args: 569 arg_list: List of arguments as strings (if `None`, uses `sys.argv`)""" 570 obj = cls.from_cli_args(arg_list) # steps 1-5 571 obj.run() # step 6
Executes the following procedures in sequence:
- Constructs a new argument parser.
- Configures the parser with appropriate arguments.
- Parses command-line arguments.
- Post-processes the arguments.
- Constructs a dataclass instance from the parsed arguments.
- Runs the main body of the program, using the parsed arguments.
Args:
arg_list: List of arguments as strings (if None
, uses sys.argv
)
16class Config: 17 """Base class for storing a collection of configurations. 18 19 Subclasses may store a class attribute, `_config`, with the current global configurations, which can be retrieved or updated by the user.""" 20 21 _config: ClassVar[Optional[Self]] = None 22 23 @classmethod 24 def get_config(cls) -> Optional[Self]: 25 """Gets the current global configuration. 26 27 Returns: 28 Global configuration object (`None` if not set)""" 29 return cls._config 30 31 @classmethod 32 def _set_config(cls, config: Optional[Self]) -> None: 33 """Sets the global configuration to a given value.""" 34 # NOTE: this is private to avoid confusion with update_config 35 cls._config = config 36 37 @classmethod 38 def clear_config(cls) -> None: 39 """Clears the global configuration by setting it to `None`.""" 40 cls._set_config(None) 41 42 def update_config(self) -> None: 43 """Updates the global configuration, setting it equal to this object.""" 44 type(self)._set_config(self) 45 46 @contextmanager 47 def as_config(self) -> Iterator[None]: 48 """Context manager which temporarily updates the global configuration with this object.""" 49 try: 50 orig_config = type(self).get_config() 51 self.update_config() 52 yield 53 finally: 54 type(self)._set_config(orig_config)
Base class for storing a collection of configurations.
Subclasses may store a class attribute, _config
, with the current global configurations, which can be retrieved or updated by the user.
23 @classmethod 24 def get_config(cls) -> Optional[Self]: 25 """Gets the current global configuration. 26 27 Returns: 28 Global configuration object (`None` if not set)""" 29 return cls._config
Gets the current global configuration.
Returns:
Global configuration object (None
if not set)
37 @classmethod 38 def clear_config(cls) -> None: 39 """Clears the global configuration by setting it to `None`.""" 40 cls._set_config(None)
Clears the global configuration by setting it to None
.
42 def update_config(self) -> None: 43 """Updates the global configuration, setting it equal to this object.""" 44 type(self)._set_config(self)
Updates the global configuration, setting it equal to this object.
46 @contextmanager 47 def as_config(self) -> Iterator[None]: 48 """Context manager which temporarily updates the global configuration with this object.""" 49 try: 50 orig_config = type(self).get_config() 51 self.update_config() 52 yield 53 finally: 54 type(self)._set_config(orig_config)
Context manager which temporarily updates the global configuration with this object.
86class ConfigDataclass(DictDataclass, FileConfig, suppress_defaults=False): 87 """A dataclass representing a collection of configurations. 88 89 The configurations can be loaded from a file, the type of which will be inferred from its extension. 90 Supported file types are: 91 92 - JSON 93 - TOML 94 """ 95 96 @staticmethod 97 def _wrap_config_dataclass(mixin_cls: Type[DataclassMixin], cls: Type['ConfigDataclass']) -> Type[DataclassMixin]: 98 """Recursively wraps a DataclassMixin class around a ConfigDataclass so that nested dataclass fields inherit from the same mixin.""" 99 def _wrap(tp: type) -> type: 100 if is_dataclass(tp): 101 wrapped_cls = mixin_cls.wrap_dataclass(tp) 102 field_data = [(fld.name, fld.type, fld) for fld in get_dataclass_fields(tp, include_classvars=True)] 103 return make_dataclass(tp.__name__, field_data, bases=wrapped_cls.__bases__) 104 return tp 105 return _wrap(dataclass_type_map(cls, _wrap)) # type: ignore[arg-type] 106 107 @classmethod 108 def load_config(cls, path: AnyPath) -> Self: # noqa: D102 109 tp = _get_dataclass_type_for_path(path) 110 new_cls: Type[FileSerializable] = ConfigDataclass._wrap_config_dataclass(tp, cls) # type: ignore 111 with open(path) as fp: 112 cfg: Self = coerce_to_dataclass(cls, new_cls._from_file(fp)) 113 cfg.update_config() 114 return cfg
A dataclass representing a collection of configurations.
The configurations can be loaded from a file, the type of which will be inferred from its extension. Supported file types are:
- JSON
- TOML
107 @classmethod 108 def load_config(cls, path: AnyPath) -> Self: # noqa: D102 109 tp = _get_dataclass_type_for_path(path) 110 new_cls: Type[FileSerializable] = ConfigDataclass._wrap_config_dataclass(tp, cls) # type: ignore 111 with open(path) as fp: 112 cfg: Self = coerce_to_dataclass(cls, new_cls._from_file(fp)) 113 cfg.update_config() 114 return cfg
Loads configurations from a file and sets them to be the global configurations for this class.
Args: path: File from which to load configurations
Returns: The newly loaded global configurations
99class DataclassMixin: 100 """Mixin class for adding some kind functionality to a dataclass. 101 102 For example, this could provide features for conversion to/from JSON ([`JSONDataclass`][fancy_dataclass.json.JSONDataclass]), the ability to construct CLI argument parsers ([`ArgparseDataclass`][fancy_dataclass.cli.ArgparseDataclass]), etc. 103 104 This mixin also provides a [`wrap_dataclass`][fancy_dataclass.mixin.DataclassMixin.wrap_dataclass] decorator which can be used to wrap an existing dataclass type into one that provides the mixin's functionality.""" 105 106 __settings_type__: ClassVar[Optional[Type[MixinSettings]]] = None 107 __settings__: ClassVar[Optional[MixinSettings]] = None 108 __field_settings_type__: ClassVar[Optional[Type[FieldSettings]]] = None 109 110 @classmethod 111 def __init_subclass__(cls, **kwargs: Any) -> None: 112 """When inheriting from this class, you may pass various keyword arguments after the list of base classes. 113 114 If the base class has a `__settings_type__` class attribute (subclass of [`MixinSettings`][fancy_dataclass.settings.MixinSettings]), that class will be instantiated with the provided arguments and stored as a `__settings__` attribute on the subclass. These settings can be used to customize the behavior of the subclass. 115 116 Additionally, the mixin may set the `__field_settings_type__` class attribute to indicate the type (subclass of [`FieldSettings`][fancy_dataclass.settings.FieldSettings]) that should be used for field settings, which are extracted from each field's `metadata` dict.""" 117 super().__init_subclass__() 118 _configure_mixin_settings(cls, **kwargs) 119 _configure_field_settings_type(cls) 120 121 @classmethod 122 def __post_dataclass_wrap__(cls, wrapped_cls: Type[Self]) -> None: 123 """A hook that is called after the [`dataclasses.dataclass`](https://docs.python.org/3/library/dataclasses.html#dataclasses.dataclass) decorator is applied to the mixin subclass. 124 125 This can be used, for instance, to validate the dataclass fields at definition time. 126 127 NOTE: this function should be _idempotent_, meaning it can be called multiple times with the same effect. This is because it will be called for every base class of the `dataclass`-wrapped class, which may result in duplicate calls. 128 129 Args: 130 wrapped_cls: Class wrapped by the `dataclass` decorator""" 131 _check_field_settings(wrapped_cls) 132 133 @classmethod 134 def _field_settings(cls, field: dataclasses.Field) -> FieldSettings: # type: ignore[type-arg] 135 """Gets the class-specific FieldSettings extracted from the metadata stored on a Field object.""" 136 stype = cls.__field_settings_type__ or FieldSettings 137 return stype.from_field(field) 138 139 @classmethod 140 def wrap_dataclass(cls: Type[Self], tp: Type[T], **kwargs: Any) -> Type[Self]: 141 """Wraps a dataclass type into a new one which inherits from this mixin class and is otherwise the same. 142 143 Args: 144 tp: A dataclass type 145 kwargs: Keyword arguments to type constructor 146 147 Returns: 148 New dataclass type inheriting from the mixin 149 150 Raises: 151 TypeError: If the given type is not a dataclass""" 152 check_dataclass(tp) 153 if issubclass(tp, cls): # the type is already a subclass of this one, so just return it 154 return tp 155 # otherwise, create a new type that inherits from this class 156 try: 157 return type(tp.__name__, (tp, cls), {}, **kwargs) 158 except TypeError as e: 159 if 'Cannot create a consistent' in str(e): 160 # try the opposite order of inheritance 161 return type(tp.__name__, (cls, tp), {}, **kwargs) 162 raise 163 164 def _replace(self, **kwargs: Any) -> Self: 165 """Constructs a new object with the provided fields modified. 166 167 Args: 168 **kwargs: Dataclass fields to modify 169 170 Returns: 171 New object with selected fields modified 172 173 Raises: 174 TypeError: If an invalid dataclass field is provided""" 175 assert hasattr(self, '__dataclass_fields__'), f'{obj_class_name(self)} is not a dataclass type' 176 d = {fld.name: getattr(self, fld.name) for fld in dataclasses.fields(self)} # type: ignore[arg-type] 177 for (key, val) in kwargs.items(): 178 if key in d: 179 d[key] = val 180 else: 181 raise TypeError(f'{key!r} is not a valid field for {obj_class_name(self)}') 182 return self.__class__(**d) 183 184 @classmethod 185 def get_subclass_with_name(cls, typename: str) -> Type[Self]: 186 """Gets the subclass of this class with the given name. 187 188 Args: 189 typename: Name of subclass 190 191 Returns: 192 Subclass with the given name 193 194 Raises: 195 TypeError: If no subclass with the given name exists""" 196 return get_subclass_with_name(cls, typename)
Mixin class for adding some kind functionality to a dataclass.
For example, this could provide features for conversion to/from JSON ([JSONDataclass
][fancy_dataclass.json.JSONDataclass]), the ability to construct CLI argument parsers ([ArgparseDataclass
][fancy_dataclass.cli.ArgparseDataclass]), etc.
This mixin also provides a [wrap_dataclass
][fancy_dataclass.mixin.DataclassMixin.wrap_dataclass] decorator which can be used to wrap an existing dataclass type into one that provides the mixin's functionality.
139 @classmethod 140 def wrap_dataclass(cls: Type[Self], tp: Type[T], **kwargs: Any) -> Type[Self]: 141 """Wraps a dataclass type into a new one which inherits from this mixin class and is otherwise the same. 142 143 Args: 144 tp: A dataclass type 145 kwargs: Keyword arguments to type constructor 146 147 Returns: 148 New dataclass type inheriting from the mixin 149 150 Raises: 151 TypeError: If the given type is not a dataclass""" 152 check_dataclass(tp) 153 if issubclass(tp, cls): # the type is already a subclass of this one, so just return it 154 return tp 155 # otherwise, create a new type that inherits from this class 156 try: 157 return type(tp.__name__, (tp, cls), {}, **kwargs) 158 except TypeError as e: 159 if 'Cannot create a consistent' in str(e): 160 # try the opposite order of inheritance 161 return type(tp.__name__, (cls, tp), {}, **kwargs) 162 raise
Wraps a dataclass type into a new one which inherits from this mixin class and is otherwise the same.
Args: tp: A dataclass type kwargs: Keyword arguments to type constructor
Returns: New dataclass type inheriting from the mixin
Raises: TypeError: If the given type is not a dataclass
184 @classmethod 185 def get_subclass_with_name(cls, typename: str) -> Type[Self]: 186 """Gets the subclass of this class with the given name. 187 188 Args: 189 typename: Name of subclass 190 191 Returns: 192 Subclass with the given name 193 194 Raises: 195 TypeError: If no subclass with the given name exists""" 196 return get_subclass_with_name(cls, typename)
Gets the subclass of this class with the given name.
Args: typename: Name of subclass
Returns: Subclass with the given name
Raises: TypeError: If no subclass with the given name exists
117class DictConfig(FileConfig, Dict[Any, Any]): 118 """A collection of configurations, stored as a Python dict. 119 120 To impose a type schema on the configurations, use [`ConfigDataclass`][fancy_dataclass.config.ConfigDataclass] instead. 121 122 The configurations can be loaded from a file, the type of which will be inferred from its extension. 123 Supported file types are: 124 125 - JSON 126 - TOML 127 """ 128 129 @classmethod 130 def load_config(cls, path: AnyPath) -> Self: # noqa: D102 131 tp = _get_dataclass_type_for_path(path) 132 with open(path) as fp: 133 cfg = cls(tp._text_file_to_dict(fp)) # type: ignore[attr-defined] 134 cfg.update_config() 135 return cfg
A collection of configurations, stored as a Python dict.
To impose a type schema on the configurations, use [ConfigDataclass
][fancy_dataclass.config.ConfigDataclass] instead.
The configurations can be loaded from a file, the type of which will be inferred from its extension. Supported file types are:
- JSON
- TOML
107class DictDataclass(DataclassMixin): 108 """Mixin class for dataclasses that can be converted to and from a Python dict. 109 110 A subclass may configure settings by using [`DictDataclassSettings`][fancy_dataclass.dict.DictDataclassSettings] fields as keyword arguments when inheriting from `DictDataclass`. 111 112 Per-field settings can be passed into the `metadata` argument of each `dataclasses.field`. See [`DictDataclassFieldSettings`][fancy_dataclass.dict.DictDataclassFieldSettings] for the full list of settings.""" 113 114 __settings_type__ = DictDataclassSettings 115 __settings__ = DictDataclassSettings() 116 __field_settings_type__ = DictDataclassFieldSettings 117 118 def __init_subclass__(cls, **kwargs: Any) -> None: 119 super().__init_subclass__(**kwargs) 120 # if store_type mode is 'auto', use base class to resolve it 121 if getattr(cls.__settings__, 'store_type', None) == 'auto': 122 for base in cls.mro(): 123 if issubclass(base, DictDataclass) and (base.__settings__.store_type != 'auto'): 124 cls.__settings__._store_type = base.__settings__.store_type 125 break 126 else: # by default, do not store 127 cls.__settings__._store_type = 'off' 128 129 @classmethod 130 def __post_dataclass_wrap__(cls, wrapped_cls: Type[Self]) -> None: 131 # disallow 'type' field when the type needs to be stored in the output dict 132 if wrapped_cls.__settings__.should_store_type(): 133 for fld in dataclasses.fields(wrapped_cls): # type: ignore[arg-type] 134 if fld.name == 'type': 135 raise TypeError(f"'type' is a reserved dict field for {cls.__name__}, cannot be used as dataclass field") 136 137 def _dict_init(self) -> AnyDict: 138 """Gets the basic skeleton for a dict generated by this type. 139 If `store_type` is `'name'` or `'qualname'`, will include a `type` field to store the type.""" 140 if self.__settings__._store_type == 'name': 141 return {'type': obj_class_name(self)} 142 if self.__settings__._store_type == 'qualname': 143 return {'type': fully_qualified_class_name(self.__class__)} 144 return {} 145 146 @classmethod 147 def _to_dict_value_basic(cls, val: Any) -> Any: 148 """Converts a value with a basic type to a form appropriate for dict values. 149 150 By default this will return the original value. Subclasses may override the behavior, e.g. to perform custom type coercion.""" 151 return val 152 153 @classmethod 154 def _to_dict_value(cls, val: Any, full: bool) -> Any: 155 """Converts an arbitrary value to a form appropriate for dict values. 156 157 This will recursively process values within containers (lists, dicts, etc.).""" 158 if isinstance(val, DictDataclass): 159 return val.to_dict(full=full) 160 if isinstance(val, list): 161 return [cls._to_dict_value(elt, full) for elt in val] 162 if isinstance(val, tuple): 163 return tuple(cls._to_dict_value(elt, full) for elt in val) 164 if isinstance(val, dict): 165 return {k: cls._to_dict_value(v, full) for (k, v) in val.items()} 166 return cls._to_dict_value_basic(val) 167 168 def _to_dict(self, full: bool) -> AnyDict: 169 if self.__settings__.flattened: 170 cls = type(self) 171 flat_obj = _flatten_dataclass(cls)[1].forward(self) 172 return flat_obj._to_dict(full) # type: ignore 173 d = self._dict_init() 174 class_suppress_none = self.__settings__.suppress_none 175 class_suppress_defaults = self.__settings__.suppress_defaults 176 for (name, fld) in self.__dataclass_fields__.items(): # type: ignore[attr-defined] 177 is_class_var = get_origin(fld.type) is ClassVar 178 settings = self._field_settings(fld).adapt_to(DictDataclassFieldSettings) 179 # suppress field by default if it is a ClassVar or init=False 180 if (is_class_var or (not fld.init)) if (settings.suppress is None) else settings.suppress: 181 continue 182 val = getattr(self, name) 183 if (not full) and (settings.suppress is None): 184 # suppress None if field specifies it (falling back on class setting) 185 if (val is None) and (class_suppress_none if (settings.suppress_none is None) else settings.suppress_none): 186 continue 187 # suppress default if field specifies it (falling back on class setting) 188 if (class_suppress_defaults if (settings.suppress_default is None) else settings.suppress_default): 189 # suppress values that match the default 190 try: 191 if val == fld.default: 192 continue 193 if (fld.default_factory != dataclasses.MISSING) and (val == fld.default_factory()): 194 continue 195 except ValueError: # some types may fail to compare 196 pass 197 safe_dict_insert(d, name, self._to_dict_value(val, full)) 198 return d 199 200 def to_dict(self, **kwargs: Any) -> AnyDict: 201 """Converts the object to a Python dict which, by default, suppresses values matching their dataclass defaults. 202 203 Args: 204 kwargs: Keyword arguments <ul><li>`full`: if `True`, does not suppress `None` or default values</li></ul> 205 206 Returns: 207 A dict whose keys match the dataclass's fields""" 208 full = kwargs.get('full', False) 209 return self._to_dict(full) 210 211 @staticmethod 212 def _from_dict_value_convertible(tp: Type['DictDataclass'], val: Any, strict: bool) -> Any: 213 if isinstance(val, tp): # already converted from a dict 214 return val 215 # otherwise, convert from a dict 216 return tp.from_dict(val, strict=strict) 217 218 @classmethod 219 def _from_dict_value_basic(cls, tp: type, val: Any) -> Any: 220 """Given a basic type and a value, attempts to convert the value to the given type. 221 222 By default this will return the original value. Subclasses may override the behavior, e.g. to perform custom validation or type coercion.""" 223 if cls.__settings__.validate and (not isinstance(val, tp)): # validate type 224 raise TypeConversionError(tp, val) 225 # NOTE: alternatively, we could coerce to the type 226 # if val is None: # do not coerce None 227 # raise TypeConversionError(tp, val) 228 # try: 229 # return tp(val) # type: ignore[call-arg] 230 # except TypeError as e: 231 # raise TypeConversionError(tp, val) from e 232 return val 233 234 @classmethod 235 def _from_dict_value(cls, tp: type, val: Any, strict: bool = False) -> Any: 236 """Given a type and a value, attempts to convert the value to the given type.""" 237 def err() -> TypeConversionError: 238 return TypeConversionError(tp, val) 239 convert_val = partial(cls._from_dict_value, strict=strict) 240 if tp is type(None): 241 if val is None: 242 return None 243 raise err() 244 if tp in [Any, 'typing.Any']: # assume basic data type 245 return val 246 ttp = type(tp) 247 if ttp is _AnnotatedAlias: # Annotated: just ignore the annotation 248 return convert_val(get_args(tp)[0], val) 249 if issubclass_safe(tp, list): 250 # class may inherit from List[T], so get the parent class 251 assert hasattr(tp, '__orig_bases__') 252 for base in tp.__orig_bases__: 253 origin_type = get_origin(base) 254 if origin_type and issubclass_safe(origin_type, list): 255 tp = base 256 break 257 origin_type = get_origin(tp) 258 if origin_type is None: # basic class or type 259 if ttp == TypeVar: # type: ignore[comparison-overlap] 260 # can't refer to instantiated type, so we assume a basic data type 261 # this limitation means we can only use TypeVar for basic types 262 return val 263 if hasattr(tp, 'from_dict'): # handle nested fields which are themselves convertible from a dict 264 return cls._from_dict_value_convertible(tp, val, strict) 265 if issubclass(tp, tuple): 266 return tp(*val) 267 if issubclass(tp, dict): 268 if ttp is _TypedDictMeta: # validate TypedDict fields 269 anns = tp.__annotations__ 270 if cls.__settings__.validate and ((not isinstance(val, dict)) or (set(anns) != set(val))): 271 raise err() 272 return {key: convert_val(valtype, val[key]) for (key, valtype) in anns.items()} 273 return tp(val) 274 # basic data type 275 return cls._from_dict_value_basic(tp, val) 276 # compound data type 277 args = get_args(tp) 278 if origin_type is list: 279 subtype = args[0] 280 return [convert_val(subtype, elt) for elt in val] 281 if origin_type is dict: 282 (keytype, valtype) = args 283 return {convert_val(keytype, k): convert_val(valtype, v) for (k, v) in val.items()} 284 if origin_type is tuple: 285 subtypes = args 286 if subtypes[-1] == Ellipsis: # treat it like a list 287 subtype = subtypes[0] 288 return tuple(convert_val(subtype, elt) for elt in val) 289 return tuple(convert_val(subtype, elt) for (subtype, elt) in zip(args, val)) 290 if origin_type in _UNION_TYPES: 291 for subtype in args: 292 try: 293 # NB: will resolve to the first valid type in the Union 294 return convert_val(subtype, val) 295 except Exception: 296 continue 297 elif origin_type == Literal: 298 if any((val == arg) for arg in args): 299 # one of the Literal options is matched 300 return val 301 elif hasattr(origin_type, 'from_dict'): 302 return cls._from_dict_value_convertible(origin_type, val, strict) 303 elif issubclass_safe(origin_type, Iterable): # arbitrary iterable 304 subtype = args[0] 305 return type(val)(convert_val(subtype, elt) for elt in val) 306 raise err() 307 308 @classmethod 309 def _get_missing_value(cls, fld: Field) -> Any: # type: ignore[type-arg] 310 raise ValueError(f'{fld.name!r} field is required') 311 312 @classmethod 313 def dataclass_args_from_dict(cls, d: AnyDict, strict: bool = False) -> AnyDict: 314 """Given a dict of arguments, performs type conversion and/or validity checking, then returns a new dict that can be passed to the class's constructor.""" 315 check_dataclass(cls) 316 kwargs = {} 317 bases = cls.mro() 318 fields = dataclasses.fields(cls) # type: ignore[arg-type] 319 if strict: # check there are no extraneous fields 320 field_names = {field.name for field in fields} 321 for key in d: 322 if key not in field_names: 323 raise ValueError(f'{key!r} is not a valid field for {cls.__name__}') 324 for fld in fields: 325 if not fld.init: # suppress fields where init=False 326 continue 327 if fld.name in d: 328 # field may be defined in the dataclass itself or one of its ancestor dataclasses 329 for base in bases: 330 try: 331 field_type = dataclass_field_type(base, fld.name) 332 kwargs[fld.name] = cls._from_dict_value(field_type, d[fld.name], strict=strict) 333 break 334 except (AttributeError, KeyError): 335 pass 336 else: 337 raise ValueError(f'could not locate field {fld.name!r}') 338 elif fld.default == dataclasses.MISSING: 339 if fld.default_factory == dataclasses.MISSING: 340 val = cls._get_missing_value(fld) 341 else: 342 val = fld.default_factory() 343 # raise ValueError(f'{fld.name!r} field is required') 344 kwargs[fld.name] = val 345 return kwargs 346 347 @classmethod 348 def from_dict(cls, d: AnyDict, **kwargs: Any) -> Self: 349 """Constructs an object from a dictionary of fields. 350 351 This may also perform some basic type/validity checking. 352 353 Args: 354 d: Dict to convert into an object 355 kwargs: Keyword arguments <ul><li>`strict`: if `True`, raise an error if extraneous dict fields are present</li></ul> 356 357 Returns: 358 Converted object of this class""" 359 # first establish the type, which may be present in the 'type' field of the dict 360 typename = d.get('type') 361 if typename is None: # type field unspecified, so use the calling class 362 tp = cls 363 else: 364 cls_name = fully_qualified_class_name(cls) if ('.' in typename) else cls.__name__ 365 if cls_name == typename: # type name already matches this class 366 tp = cls 367 else: 368 # tp must be a subclass of cls 369 # the name must be in scope to be found, allowing two alternatives for retrieval: 370 # option 1: all subclasses of this DictDataclass are defined in the same module as the base class 371 # option 2: the name is fully qualified, so the name can be loaded into scope 372 # call from_dict on the subclass in case it has its own custom implementation 373 # (remove the type name before passing to the constructor) 374 d2 = {key: val for (key, val) in d.items() if (key != 'type')} 375 return cls.get_subclass_with_name(typename).from_dict(d2, **kwargs) 376 conv = None 377 if cls.__settings__.flattened: 378 # produce equivalent subfield-flattened type 379 settings = copy(tp.__settings__) 380 settings.flattened = True 381 conv = _flatten_dataclass(tp, cls.__bases__)[1] 382 tp = conv.to_type # type: ignore[assignment] 383 tp.__settings__ = settings 384 strict = kwargs.get('strict', False) 385 result: Self = tp(**tp.dataclass_args_from_dict(d, strict=strict)) 386 return conv.backward(result) if cls.__settings__.flattened else result # type: ignore
Mixin class for dataclasses that can be converted to and from a Python dict.
A subclass may configure settings by using [DictDataclassSettings
][fancy_dataclass.dict.DictDataclassSettings] fields as keyword arguments when inheriting from DictDataclass
.
Per-field settings can be passed into the metadata
argument of each dataclasses.field
. See [DictDataclassFieldSettings
][fancy_dataclass.dict.DictDataclassFieldSettings] for the full list of settings.
200 def to_dict(self, **kwargs: Any) -> AnyDict: 201 """Converts the object to a Python dict which, by default, suppresses values matching their dataclass defaults. 202 203 Args: 204 kwargs: Keyword arguments <ul><li>`full`: if `True`, does not suppress `None` or default values</li></ul> 205 206 Returns: 207 A dict whose keys match the dataclass's fields""" 208 full = kwargs.get('full', False) 209 return self._to_dict(full)
Converts the object to a Python dict which, by default, suppresses values matching their dataclass defaults.
Args: kwargs: Keyword arguments
full
: ifTrue
, does not suppressNone
or default values
Returns: A dict whose keys match the dataclass's fields
312 @classmethod 313 def dataclass_args_from_dict(cls, d: AnyDict, strict: bool = False) -> AnyDict: 314 """Given a dict of arguments, performs type conversion and/or validity checking, then returns a new dict that can be passed to the class's constructor.""" 315 check_dataclass(cls) 316 kwargs = {} 317 bases = cls.mro() 318 fields = dataclasses.fields(cls) # type: ignore[arg-type] 319 if strict: # check there are no extraneous fields 320 field_names = {field.name for field in fields} 321 for key in d: 322 if key not in field_names: 323 raise ValueError(f'{key!r} is not a valid field for {cls.__name__}') 324 for fld in fields: 325 if not fld.init: # suppress fields where init=False 326 continue 327 if fld.name in d: 328 # field may be defined in the dataclass itself or one of its ancestor dataclasses 329 for base in bases: 330 try: 331 field_type = dataclass_field_type(base, fld.name) 332 kwargs[fld.name] = cls._from_dict_value(field_type, d[fld.name], strict=strict) 333 break 334 except (AttributeError, KeyError): 335 pass 336 else: 337 raise ValueError(f'could not locate field {fld.name!r}') 338 elif fld.default == dataclasses.MISSING: 339 if fld.default_factory == dataclasses.MISSING: 340 val = cls._get_missing_value(fld) 341 else: 342 val = fld.default_factory() 343 # raise ValueError(f'{fld.name!r} field is required') 344 kwargs[fld.name] = val 345 return kwargs
Given a dict of arguments, performs type conversion and/or validity checking, then returns a new dict that can be passed to the class's constructor.
347 @classmethod 348 def from_dict(cls, d: AnyDict, **kwargs: Any) -> Self: 349 """Constructs an object from a dictionary of fields. 350 351 This may also perform some basic type/validity checking. 352 353 Args: 354 d: Dict to convert into an object 355 kwargs: Keyword arguments <ul><li>`strict`: if `True`, raise an error if extraneous dict fields are present</li></ul> 356 357 Returns: 358 Converted object of this class""" 359 # first establish the type, which may be present in the 'type' field of the dict 360 typename = d.get('type') 361 if typename is None: # type field unspecified, so use the calling class 362 tp = cls 363 else: 364 cls_name = fully_qualified_class_name(cls) if ('.' in typename) else cls.__name__ 365 if cls_name == typename: # type name already matches this class 366 tp = cls 367 else: 368 # tp must be a subclass of cls 369 # the name must be in scope to be found, allowing two alternatives for retrieval: 370 # option 1: all subclasses of this DictDataclass are defined in the same module as the base class 371 # option 2: the name is fully qualified, so the name can be loaded into scope 372 # call from_dict on the subclass in case it has its own custom implementation 373 # (remove the type name before passing to the constructor) 374 d2 = {key: val for (key, val) in d.items() if (key != 'type')} 375 return cls.get_subclass_with_name(typename).from_dict(d2, **kwargs) 376 conv = None 377 if cls.__settings__.flattened: 378 # produce equivalent subfield-flattened type 379 settings = copy(tp.__settings__) 380 settings.flattened = True 381 conv = _flatten_dataclass(tp, cls.__bases__)[1] 382 tp = conv.to_type # type: ignore[assignment] 383 tp.__settings__ = settings 384 strict = kwargs.get('strict', False) 385 result: Self = tp(**tp.dataclass_args_from_dict(d, strict=strict)) 386 return conv.backward(result) if cls.__settings__.flattened else result # type: ignore
Constructs an object from a dictionary of fields.
This may also perform some basic type/validity checking.
Args: d: Dict to convert into an object kwargs: Keyword arguments
strict
: ifTrue
, raise an error if extraneous dict fields are present
Returns: Converted object of this class
177class JSONBaseDataclass(JSONDataclass, store_type='qualname'): 178 """This class should be used in place of [`JSONDataclass`][fancy_dataclass.json.JSONDataclass] when you intend to inherit from the class. 179 180 When converting a subclass to a dict with [`to_dict`][fancy_dataclass.dict.DictDataclass.to_dict], it will store the subclass's fully qualified type in the `type` field. It will also resolve this type when calling [`from_dict`][fancy_dataclass.dict.DictDataclass.from_dict]."""
This class should be used in place of [JSONDataclass
][fancy_dataclass.json.JSONDataclass] when you intend to inherit from the class.
When converting a subclass to a dict with [to_dict
][fancy_dataclass.dict.DictDataclass.to_dict], it will store the subclass's fully qualified type in the type
field. It will also resolve this type when calling [from_dict
][fancy_dataclass.dict.DictDataclass.from_dict].
112class JSONDataclass(DictFileSerializableDataclass, JSONSerializable): # type: ignore[misc] 113 """Dataclass mixin enabling default serialization of dataclass objects to and from JSON.""" 114 115 @classmethod 116 def __init_subclass__(cls, **kwargs: Any) -> None: 117 super().__init_subclass__(**kwargs) 118 # if the class already inherits from JSONDataclass, raise an error if store_type="auto" 119 # this is because resolving the type from a dict may be ambiguous 120 if getattr(cls.__settings__, 'store_type', None) == 'auto': 121 for base in cls.mro(): 122 if (base not in [cls, JSONDataclass]) and issubclass(base, JSONDataclass): 123 raise TypeError("when subclassing a JSONDataclass, you must set store_type to a value other than 'auto', or subclass JSONBaseDataclass instead") 124 125 @classmethod 126 def _to_json_value(cls, obj: Self) -> Any: 127 return cls.to_dict(obj) 128 129 @classmethod 130 def _dict_to_text_file(cls, d: AnyDict, fp: IO[str], **kwargs: Any) -> None: 131 return _dump_value_to_json(d, fp, cls.json_encoder(), **kwargs) 132 133 @classmethod 134 def _text_file_to_dict(cls, fp: IO[str], **kwargs: Any) -> AnyDict: 135 d = json.load(fp, **kwargs) 136 if not isinstance(d, dict): 137 raise ValueError('loaded JSON is not a dict') 138 return d 139 140 @classmethod 141 def _to_dict_value_basic(cls, val: Any) -> Any: 142 return to_dict_value_basic(val) 143 144 @classmethod 145 def _to_dict_value(cls, val: Any, full: bool) -> Any: 146 if isinstance(val, tuple) and hasattr(val, '_fields'): 147 # if a namedtuple, render as a dict with named fields rather than a tuple 148 return {k: cls._to_dict_value(v, full) for (k, v) in zip(val._fields, val)} 149 return super()._to_dict_value(val, full) 150 151 @classmethod 152 def _from_dict_value_basic(cls, tp: type, val: Any) -> Any: 153 if issubclass(tp, datetime): 154 return tp.fromisoformat(val) if isinstance(val, str) else val 155 return super()._from_dict_value_basic(tp, from_dict_value_basic(tp, val)) 156 157 @classmethod 158 def _from_dict_value(cls, tp: type, val: Any, strict: bool = False) -> Any: 159 # customize behavior for JSONSerializable 160 origin_type = get_origin(tp) 161 if (origin_type is None) and issubclass_safe(tp, tuple) and isinstance(val, dict) and hasattr(tp, '_fields'): # namedtuple 162 try: 163 vals = [] 164 for key in tp._fields: 165 # if NamedTuple's types are annotated, check them 166 valtype = getattr(tp, '__annotations__', {}).get(key) 167 vals.append(val[key] if (valtype is None) else cls._from_dict_value(valtype, val[key], strict=strict)) 168 return tp(*vals) 169 except KeyError as e: 170 raise TypeConversionError(tp, val) from e 171 if origin_type is dict: # decode keys to be valid JSON 172 (keytype, valtype) = get_args(tp) 173 return {cls.json_key_decoder(cls._from_dict_value(keytype, k)): cls._from_dict_value(valtype, v, strict=strict) for (k, v) in val.items()} 174 return super()._from_dict_value(tp, val)
Dataclass mixin enabling default serialization of dataclass objects to and from JSON.
25class JSONSerializable(TextFileSerializable): 26 """Mixin class enabling conversion of an object to/from JSON.""" 27 28 @classmethod 29 def json_encoder(cls) -> Type[JSONEncoder]: 30 """Override this method to create a custom `JSONEncoder` to handle specific data types. 31 A skeleton for this looks like: 32 33 ``` 34 class Encoder(JSONEncoder): 35 def default(self, obj): 36 return json.JSONEncoder.default(self, obj) 37 ``` 38 """ 39 class Encoder(JSONEncoder): 40 def default(self, obj: Any) -> Any: 41 if isinstance(obj, datetime): 42 return obj.isoformat() 43 return JSONEncoder.default(self, obj) 44 return Encoder 45 46 @classmethod 47 def json_key_decoder(cls, key: Any) -> Any: 48 """Override this method to decode a JSON key, for use with `from_dict`.""" 49 return key 50 51 @classmethod 52 @abstractmethod 53 def _to_json_value(cls, obj: Self) -> Any: 54 """Converts the object to a value that can be JSON serialized.""" 55 56 @classmethod 57 def _to_text_file(cls, obj: Self, fp: IO[str], **kwargs: Any) -> None: 58 json_val = type(obj)._to_json_value(obj) 59 _dump_value_to_json(json_val, fp, obj.json_encoder(), **kwargs) 60 61 def to_json(self, fp: IOBase, **kwargs: Any) -> None: 62 """Writes the object as JSON to a file-like object (text or binary). 63 If binary, applies UTF-8 encoding. 64 65 Args: 66 fp: A writable file-like object 67 kwargs: Keyword arguments""" 68 return type(self)._to_file(self, fp, **kwargs) # type: ignore[arg-type] 69 70 def to_json_string(self, **kwargs: Any) -> str: 71 """Converts the object into a JSON string. 72 73 Args: 74 kwargs: Keyword arguments 75 76 Returns: 77 Object rendered as a JSON string""" 78 with StringIO() as stream: 79 JSONSerializable._to_text_file(self, stream, **kwargs) 80 return stream.getvalue() 81 82 @classmethod 83 def _from_binary_file(cls, fp: IO[bytes], **kwargs: Any) -> Self: 84 # json.load accepts binary file, so we avoid the string conversion 85 return cls._from_text_file(cast(IO[str], fp), **kwargs) 86 87 @classmethod 88 def from_json(cls, fp: AnyIO, **kwargs: Any) -> Self: 89 """Constructs an object from a JSON file-like object (text or binary). 90 91 Args: 92 fp: A readable file-like object 93 kwargs: Keyword arguments 94 95 Returns: 96 Converted object of this class""" 97 return cls._from_file(fp, **kwargs) 98 99 @classmethod 100 def from_json_string(cls, s: str, **kwargs: Any) -> Self: 101 """Constructs an object from a JSON string. 102 103 Args: 104 s: JSON string 105 kwargs: Keyword arguments 106 107 Returns: 108 Converted object of this class""" 109 return cls._from_string(s, **kwargs)
Mixin class enabling conversion of an object to/from JSON.
28 @classmethod 29 def json_encoder(cls) -> Type[JSONEncoder]: 30 """Override this method to create a custom `JSONEncoder` to handle specific data types. 31 A skeleton for this looks like: 32 33 ``` 34 class Encoder(JSONEncoder): 35 def default(self, obj): 36 return json.JSONEncoder.default(self, obj) 37 ``` 38 """ 39 class Encoder(JSONEncoder): 40 def default(self, obj: Any) -> Any: 41 if isinstance(obj, datetime): 42 return obj.isoformat() 43 return JSONEncoder.default(self, obj) 44 return Encoder
Override this method to create a custom JSONEncoder
to handle specific data types.
A skeleton for this looks like:
class Encoder(JSONEncoder):
def default(self, obj):
return json.JSONEncoder.default(self, obj)
46 @classmethod 47 def json_key_decoder(cls, key: Any) -> Any: 48 """Override this method to decode a JSON key, for use with `from_dict`.""" 49 return key
Override this method to decode a JSON key, for use with from_dict
.
61 def to_json(self, fp: IOBase, **kwargs: Any) -> None: 62 """Writes the object as JSON to a file-like object (text or binary). 63 If binary, applies UTF-8 encoding. 64 65 Args: 66 fp: A writable file-like object 67 kwargs: Keyword arguments""" 68 return type(self)._to_file(self, fp, **kwargs) # type: ignore[arg-type]
Writes the object as JSON to a file-like object (text or binary). If binary, applies UTF-8 encoding.
Args: fp: A writable file-like object kwargs: Keyword arguments
70 def to_json_string(self, **kwargs: Any) -> str: 71 """Converts the object into a JSON string. 72 73 Args: 74 kwargs: Keyword arguments 75 76 Returns: 77 Object rendered as a JSON string""" 78 with StringIO() as stream: 79 JSONSerializable._to_text_file(self, stream, **kwargs) 80 return stream.getvalue()
Converts the object into a JSON string.
Args: kwargs: Keyword arguments
Returns: Object rendered as a JSON string
87 @classmethod 88 def from_json(cls, fp: AnyIO, **kwargs: Any) -> Self: 89 """Constructs an object from a JSON file-like object (text or binary). 90 91 Args: 92 fp: A readable file-like object 93 kwargs: Keyword arguments 94 95 Returns: 96 Converted object of this class""" 97 return cls._from_file(fp, **kwargs)
Constructs an object from a JSON file-like object (text or binary).
Args: fp: A readable file-like object kwargs: Keyword arguments
Returns: Converted object of this class
99 @classmethod 100 def from_json_string(cls, s: str, **kwargs: Any) -> Self: 101 """Constructs an object from a JSON string. 102 103 Args: 104 s: JSON string 105 kwargs: Keyword arguments 106 107 Returns: 108 Converted object of this class""" 109 return cls._from_string(s, **kwargs)
Constructs an object from a JSON string.
Args: s: JSON string kwargs: Keyword arguments
Returns: Converted object of this class
57class SQLDataclass(DataclassMixin): 58 """A dataclass backed by a SQL table using the [sqlalchemy](https://www.sqlalchemy.org) ORM. 59 60 Per-field settings can be passed into the `metadata` argument of each `dataclasses.field`. See [`SQLDataclassFieldSettings`][fancy_dataclass.sql.SQLDataclassFieldSettings] for the full list of settings. 61 62 All dataclass fields will correspond to SQL columns unless their metadata is marked with `sql=False`. 63 64 Each field may also contain a `"column"` entry in its `metadata` dict. This will provide optional keyword arguments to be passed to sqlalchemy's [`Column`](https://docs.sqlalchemy.org/en/20/core/metadata.html#sqlalchemy.schema.Column) constructor. 65 66 Some types are invalid for SQL columns; if such a type occurs, a `TypeError` will be raised.""" 67 68 __field_settings_type__ = SQLDataclassFieldSettings 69 __table__: ClassVar[Table] 70 71 @classmethod 72 def get_columns(cls) -> Dict[str, Column[Any]]: 73 """Gets a mapping from the class's field names to sqlalchemy `Column` objects. 74 75 Returns: 76 Dict from column names to `Column` objects""" 77 cols = {} 78 for fld in fields(cls): # type: ignore[arg-type] 79 settings = cls._field_settings(fld).adapt_to(SQLDataclassFieldSettings) 80 nullable = False 81 if not settings.sql: # skip fields whose 'sql' setting is False 82 continue 83 tp: type = cast(type, fld.type) 84 origin = get_origin(tp) 85 if origin: # compound type 86 if origin is Union: # use the first type of a Union (also handles Optional) 87 # column should be nullable by default if the type is optional 88 tp_args = get_args(tp) 89 nullable |= (type(None) in tp_args) 90 tp = tp_args[0] 91 else: # some other compound type 92 tp = origin 93 if issubclass(tp, SQLDataclass): # nested SQLDataclass 94 cols.update(tp.get_columns()) 95 else: 96 # TODO: making columns non-nullable seems to break things for nested SQLDataclasses 97 # column_kwargs = {'nullable' : nullable} 98 column_kwargs = {} 99 if fld.default is not MISSING: 100 column_kwargs['default'] = fld.default 101 elif fld.default_factory is not MISSING: 102 column_kwargs['default'] = fld.default_factory 103 # get additional keyword arguments from 'column' section of metadata, if present 104 column_kwargs.update(settings.column or {}) 105 cols[fld.name] = Column(fld.name, get_column_type(tp), **column_kwargs) 106 return cols
A dataclass backed by a SQL table using the sqlalchemy ORM.
Per-field settings can be passed into the metadata
argument of each dataclasses.field
. See [SQLDataclassFieldSettings
][fancy_dataclass.sql.SQLDataclassFieldSettings] for the full list of settings.
All dataclass fields will correspond to SQL columns unless their metadata is marked with sql=False
.
Each field may also contain a "column"
entry in its metadata
dict. This will provide optional keyword arguments to be passed to sqlalchemy's Column
constructor.
Some types are invalid for SQL columns; if such a type occurs, a TypeError
will be raised.
71 @classmethod 72 def get_columns(cls) -> Dict[str, Column[Any]]: 73 """Gets a mapping from the class's field names to sqlalchemy `Column` objects. 74 75 Returns: 76 Dict from column names to `Column` objects""" 77 cols = {} 78 for fld in fields(cls): # type: ignore[arg-type] 79 settings = cls._field_settings(fld).adapt_to(SQLDataclassFieldSettings) 80 nullable = False 81 if not settings.sql: # skip fields whose 'sql' setting is False 82 continue 83 tp: type = cast(type, fld.type) 84 origin = get_origin(tp) 85 if origin: # compound type 86 if origin is Union: # use the first type of a Union (also handles Optional) 87 # column should be nullable by default if the type is optional 88 tp_args = get_args(tp) 89 nullable |= (type(None) in tp_args) 90 tp = tp_args[0] 91 else: # some other compound type 92 tp = origin 93 if issubclass(tp, SQLDataclass): # nested SQLDataclass 94 cols.update(tp.get_columns()) 95 else: 96 # TODO: making columns non-nullable seems to break things for nested SQLDataclasses 97 # column_kwargs = {'nullable' : nullable} 98 column_kwargs = {} 99 if fld.default is not MISSING: 100 column_kwargs['default'] = fld.default 101 elif fld.default_factory is not MISSING: 102 column_kwargs['default'] = fld.default_factory 103 # get additional keyword arguments from 'column' section of metadata, if present 104 column_kwargs.update(settings.column or {}) 105 cols[fld.name] = Column(fld.name, get_column_type(tp), **column_kwargs) 106 return cols
Gets a mapping from the class's field names to sqlalchemy Column
objects.
Returns:
Dict from column names to Column
objects
39class SubprocessDataclass(DataclassMixin): 40 """Mixin class providing a means of converting dataclass fields to command-line arguments that can be used to make a [subprocess](https://docs.python.org/3/library/subprocess.html) call. 41 42 Per-field settings can be passed into the `metadata` argument of each `dataclasses.field`. See [`SubprocessDataclassFieldSettings`][fancy_dataclass.subprocess.SubprocessDataclassFieldSettings] for the full list of settings.""" 43 44 __settings_type__ = SubprocessDataclassSettings 45 __settings__ = SubprocessDataclassSettings() 46 __field_settings_type__ = SubprocessDataclassFieldSettings 47 48 @classmethod 49 def __post_dataclass_wrap__(cls, wrapped_cls: Type[Self]) -> None: 50 cls_exec_field = wrapped_cls.__settings__.exec 51 # make sure there is at most one exec field 52 exec_field = None 53 for fld in get_dataclass_fields(wrapped_cls, include_classvars=True): 54 fld_settings = cls._field_settings(fld).adapt_to(SubprocessDataclassFieldSettings) 55 if fld_settings.exec: 56 if cls_exec_field is not None: 57 raise TypeError(f"cannot set field's 'exec' flag to True (class already set executable to {cls_exec_field})") 58 if exec_field is not None: 59 raise TypeError(f"cannot have more than one field with 'exec' flag set to True (already set executable to {exec_field})") 60 exec_field = fld.name 61 62 def get_arg(self, name: str, suppress_defaults: bool = False) -> List[str]: 63 """Gets the command-line arguments for the given dataclass field. 64 65 Args: 66 name: Name of dataclass field 67 suppress_defaults: If `True`, suppresses arguments that are equal to the default values 68 69 Returns: 70 List of command-line args corresponding to the field""" 71 fld = self.__dataclass_fields__[name] # type: ignore[attr-defined] 72 settings = self._field_settings(fld).adapt_to(SubprocessDataclassFieldSettings) 73 args = settings.args 74 args = [args] if isinstance(args, str) else args 75 if args == []: # exclude the argument 76 return [] 77 if settings.exec: # this field is the executable, so return no arguments 78 return [] 79 if get_origin(fld.type) is ClassVar: 80 # ignore fields associated with the class, rather than the instance 81 return [] 82 val = getattr(self, name, None) 83 if val is None: # optional value is None 84 return [] 85 if isinstance(val, SubprocessDataclass): # get args via nested SubprocessDataclass 86 return val.get_args(suppress_defaults=suppress_defaults) 87 if suppress_defaults: # if value matches the default, suppress the argument 88 default = None 89 has_default = True 90 if fld.default == MISSING: 91 if fld.default_factory == MISSING: 92 has_default = False 93 else: 94 default = fld.default_factory() 95 else: 96 default = fld.default 97 if has_default and (val == default): 98 return [] 99 if args: # use arg name provided by the metadata 100 arg: Optional[str] = args[0] 101 if not arg.startswith('-'): # type: ignore[union-attr] 102 arg = None 103 else: # use the field name (assume a single dash if it is a single letter) 104 prefix = '-' if (len(name) == 1) else '--' 105 arg = prefix + name.replace('_', '-') 106 if isinstance(val, bool): 107 # make it a boolean flag if True, otherwise omit it 108 if not val: 109 arg = None 110 val = [] 111 elif isinstance(val, (list, tuple)): 112 if val: 113 val = [str(x) for x in val] 114 else: 115 arg = None 116 elif val is not None: # convert the field value to a string 117 val = str(val) 118 args = [arg] if arg else [] 119 args += val if isinstance(val, list) else [val] 120 return args 121 122 def get_executable(self) -> Optional[str]: 123 """Gets the name of an executable to run with the appropriate arguments. 124 125 By default, this obtains the name of the executable as follows: 126 127 1. If the class settings specify an `exec` member, uses that. 128 2. Otherwise, returns the value of the first dataclass field whose `exec` metadata flag is set to `True`, and `None` otherwise. 129 130 Returns: 131 Name of the executable to run 132 133 Raises: 134 ValueError: If the executable is not a string""" 135 def _check_type(val: Any) -> str: 136 if isinstance(val, str): 137 return val 138 raise ValueError(f'executable is {val} (must be a string)') 139 if self.__settings__.exec: 140 return _check_type(self.__settings__.exec) 141 for fld in get_dataclass_fields(self, include_classvars=True): 142 if fld.metadata.get('exec', False): 143 return _check_type(getattr(self, fld.name, None)) 144 return None 145 146 def get_args(self, suppress_defaults: bool = False) -> List[str]: 147 """Converts dataclass fields to a list of command-line arguments for a subprocess call. 148 149 This includes the executable name itself as the first argument. 150 151 Args: 152 suppress_defaults: If `True`, suppresses arguments that are equal to the default values 153 154 Returns: 155 List of command-line args corresponding to the dataclass fields""" 156 executable = self.get_executable() 157 if not executable: 158 raise ValueError(f'no executable identified for use with {obj_class_name(self)} instance') 159 args = [executable] 160 for fld in fields(self): # type: ignore[arg-type] 161 args += [arg for arg in self.get_arg(fld.name, suppress_defaults=suppress_defaults) if arg] 162 return args 163 164 def run_subprocess(self, **kwargs: Any) -> subprocess.CompletedProcess: # type: ignore[type-arg] 165 """Executes the full subprocess command corresponding to the dataclass parameters. 166 167 Args: 168 kwargs: Keyword arguments passed to `subprocess.run` 169 170 Returns: 171 `CompletedProcess` object produced by `subprocess.run` 172 173 Raises: 174 ValueError: If no executable was found from the `get_executable` method""" 175 return subprocess.run(self.get_args(), **kwargs)
Mixin class providing a means of converting dataclass fields to command-line arguments that can be used to make a subprocess call.
Per-field settings can be passed into the metadata
argument of each dataclasses.field
. See [SubprocessDataclassFieldSettings
][fancy_dataclass.subprocess.SubprocessDataclassFieldSettings] for the full list of settings.
62 def get_arg(self, name: str, suppress_defaults: bool = False) -> List[str]: 63 """Gets the command-line arguments for the given dataclass field. 64 65 Args: 66 name: Name of dataclass field 67 suppress_defaults: If `True`, suppresses arguments that are equal to the default values 68 69 Returns: 70 List of command-line args corresponding to the field""" 71 fld = self.__dataclass_fields__[name] # type: ignore[attr-defined] 72 settings = self._field_settings(fld).adapt_to(SubprocessDataclassFieldSettings) 73 args = settings.args 74 args = [args] if isinstance(args, str) else args 75 if args == []: # exclude the argument 76 return [] 77 if settings.exec: # this field is the executable, so return no arguments 78 return [] 79 if get_origin(fld.type) is ClassVar: 80 # ignore fields associated with the class, rather than the instance 81 return [] 82 val = getattr(self, name, None) 83 if val is None: # optional value is None 84 return [] 85 if isinstance(val, SubprocessDataclass): # get args via nested SubprocessDataclass 86 return val.get_args(suppress_defaults=suppress_defaults) 87 if suppress_defaults: # if value matches the default, suppress the argument 88 default = None 89 has_default = True 90 if fld.default == MISSING: 91 if fld.default_factory == MISSING: 92 has_default = False 93 else: 94 default = fld.default_factory() 95 else: 96 default = fld.default 97 if has_default and (val == default): 98 return [] 99 if args: # use arg name provided by the metadata 100 arg: Optional[str] = args[0] 101 if not arg.startswith('-'): # type: ignore[union-attr] 102 arg = None 103 else: # use the field name (assume a single dash if it is a single letter) 104 prefix = '-' if (len(name) == 1) else '--' 105 arg = prefix + name.replace('_', '-') 106 if isinstance(val, bool): 107 # make it a boolean flag if True, otherwise omit it 108 if not val: 109 arg = None 110 val = [] 111 elif isinstance(val, (list, tuple)): 112 if val: 113 val = [str(x) for x in val] 114 else: 115 arg = None 116 elif val is not None: # convert the field value to a string 117 val = str(val) 118 args = [arg] if arg else [] 119 args += val if isinstance(val, list) else [val] 120 return args
Gets the command-line arguments for the given dataclass field.
Args:
name: Name of dataclass field
suppress_defaults: If True
, suppresses arguments that are equal to the default values
Returns: List of command-line args corresponding to the field
122 def get_executable(self) -> Optional[str]: 123 """Gets the name of an executable to run with the appropriate arguments. 124 125 By default, this obtains the name of the executable as follows: 126 127 1. If the class settings specify an `exec` member, uses that. 128 2. Otherwise, returns the value of the first dataclass field whose `exec` metadata flag is set to `True`, and `None` otherwise. 129 130 Returns: 131 Name of the executable to run 132 133 Raises: 134 ValueError: If the executable is not a string""" 135 def _check_type(val: Any) -> str: 136 if isinstance(val, str): 137 return val 138 raise ValueError(f'executable is {val} (must be a string)') 139 if self.__settings__.exec: 140 return _check_type(self.__settings__.exec) 141 for fld in get_dataclass_fields(self, include_classvars=True): 142 if fld.metadata.get('exec', False): 143 return _check_type(getattr(self, fld.name, None)) 144 return None
Gets the name of an executable to run with the appropriate arguments.
By default, this obtains the name of the executable as follows:
- If the class settings specify an
exec
member, uses that. - Otherwise, returns the value of the first dataclass field whose
exec
metadata flag is set toTrue
, andNone
otherwise.
Returns: Name of the executable to run
Raises: ValueError: If the executable is not a string
146 def get_args(self, suppress_defaults: bool = False) -> List[str]: 147 """Converts dataclass fields to a list of command-line arguments for a subprocess call. 148 149 This includes the executable name itself as the first argument. 150 151 Args: 152 suppress_defaults: If `True`, suppresses arguments that are equal to the default values 153 154 Returns: 155 List of command-line args corresponding to the dataclass fields""" 156 executable = self.get_executable() 157 if not executable: 158 raise ValueError(f'no executable identified for use with {obj_class_name(self)} instance') 159 args = [executable] 160 for fld in fields(self): # type: ignore[arg-type] 161 args += [arg for arg in self.get_arg(fld.name, suppress_defaults=suppress_defaults) if arg] 162 return args
Converts dataclass fields to a list of command-line arguments for a subprocess call.
This includes the executable name itself as the first argument.
Args:
suppress_defaults: If True
, suppresses arguments that are equal to the default values
Returns: List of command-line args corresponding to the dataclass fields
164 def run_subprocess(self, **kwargs: Any) -> subprocess.CompletedProcess: # type: ignore[type-arg] 165 """Executes the full subprocess command corresponding to the dataclass parameters. 166 167 Args: 168 kwargs: Keyword arguments passed to `subprocess.run` 169 170 Returns: 171 `CompletedProcess` object produced by `subprocess.run` 172 173 Raises: 174 ValueError: If no executable was found from the `get_executable` method""" 175 return subprocess.run(self.get_args(), **kwargs)
Executes the full subprocess command corresponding to the dataclass parameters.
Args:
kwargs: Keyword arguments passed to subprocess.run
Returns:
CompletedProcess
object produced by subprocess.run
Raises:
ValueError: If no executable was found from the get_executable
method
90class TOMLDataclass(DictFileSerializableDataclass, TOMLSerializable, suppress_defaults=False, store_type='off'): # type: ignore[misc] 91 """Dataclass mixin enabling default serialization of dataclass objects to and from TOML.""" 92 93 __settings_type__ = TOMLDataclassSettings 94 95 @classmethod 96 def _dict_to_text_file(cls, d: AnyDict, fp: IO[str], **kwargs: Any) -> None: 97 def _get_body(obj: Any) -> Any: 98 return obj.body if hasattr(obj, 'body') else obj.value.body 99 def _fix_element(obj: Any) -> Any: 100 if isinstance(obj, dict): 101 tbl: Union[tk.toml_document.TOMLDocument, tk.items.Table] = tk.document() if isinstance(obj, tk.toml_document.TOMLDocument) else tk.table() 102 container = _get_body(obj) 103 for (i, (key, val)) in enumerate(container): 104 if isinstance(val, NoneProxy): # show key with empty value, commented 105 tbl.add(tk.comment(f'{key} = ')) 106 else: 107 tbl.add(key, _fix_element(val)) 108 if (i > 0) and isinstance(val, dict) and isinstance(container[i - 1][1], tk.items.Comment): 109 # move newline above comment preceding a table 110 body = _get_body(tbl) 111 comment = body[-2][1] 112 val = body[-1][1] 113 comment.trivia.indent = val.trivia.indent 114 val.trivia.indent = '' 115 return tbl 116 if isinstance(obj, (tuple, list)): 117 return [_fix_element(elt) for elt in obj] 118 return obj 119 d = _fix_element(d) 120 tk.dump(d, fp, **kwargs) 121 122 @classmethod 123 def _text_file_to_dict(cls, fp: IO[str], **kwargs: Any) -> AnyDict: 124 return tk.load(fp) 125 126 @classmethod 127 def _to_dict_value_basic(cls, val: Any) -> Any: 128 return to_dict_value_basic(val) 129 130 @classmethod 131 def _top_level_comments(cls) -> Optional[List[str]]: 132 """Returns a list of top-level comments to place before all of the fields in the TOML output.""" 133 settings = cls.__settings__.adapt_to(TOMLDataclassSettings) 134 comment = None 135 if settings.comment is not None: 136 comment = settings.comment 137 elif settings.doc_as_comment: 138 comment = cls.__doc__ 139 if comment is None: 140 return None 141 # each line must be its own comment 142 return comment.splitlines() 143 144 def _to_dict(self, full: bool) -> AnyDict: 145 d = super()._to_dict(full) 146 def _is_nested(val: Any) -> bool: 147 return isinstance(val, (dict, list, tuple)) 148 d = dict(sorted(d.items(), key=lambda pair: _is_nested(pair[1]))) 149 doc = tk.document() 150 if (comments := self._top_level_comments()) is not None: 151 for comment in comments: 152 doc.add(tk.comment(comment) if comment.strip() else tk.nl()) 153 doc.add(tk.nl()) 154 for (key, val) in d.items(): 155 if (fld := self.__dataclass_fields__.get(key)): # type: ignore[attr-defined] 156 # TODO: handle None values (comment with empty RHS) 157 settings = self._field_settings(fld).adapt_to(DictDataclassFieldSettings) 158 if settings.doc is not None: 159 doc.add(tk.comment(str(settings.doc))) 160 val = NoneProxy() if (val is None) else val 161 if isinstance(val, dict): 162 # to preserve, comments, must convert value from TOMLDocument to Table 163 tbl = tk.table() 164 if (len(val) > 1) and any(isinstance(subval, dict) for subval in val.values()): 165 tbl.add(tk.nl()) 166 pair_iter = val.body if isinstance(val, tk.TOMLDocument) else val.items() 167 for pair in pair_iter: 168 tbl.add(*pair) 169 val = tbl 170 doc.add(key, val) 171 return doc 172 173 @classmethod 174 def _from_dict_value(cls, tp: type, val: Any, strict: bool = False) -> Any: 175 if isinstance(val, NoneProxy): 176 return None 177 return super()._from_dict_value(tp, val, strict=strict) 178 179 @classmethod 180 def _from_dict_value_basic(cls, tp: type, val: Any) -> Any: 181 return super()._from_dict_value_basic(tp, from_dict_value_basic(tp, val)) 182 183 @classmethod 184 def _get_missing_value(cls, fld: Field) -> Any: # type: ignore[type-arg] 185 # replace any missing required fields with a default of None 186 return None
Dataclass mixin enabling default serialization of dataclass objects to and from TOML.