tomfoolery.engine
1from typing import Any 2 3import ast_comments as ast 4import black 5import isort 6from pathier import Pathier, Pathish 7 8from tomfoolery import utilities 9 10root = Pathier(__file__).parent 11 12 13class TomFoolery: 14 def __init__(self, module: ast.Module | None = None, recursive: bool = True): 15 """If no `module` is given, an empty new one will be created. 16 17 When generating a `dataclass` from a dictionary, 18 if `recursive` is `True` then values that are also dictionaries will have a `dataclass` generated. 19 20 The annotation for that field in the original `dataclass` will be typed as an instance of the second `dataclass`. 21 22 If `recursive` is `False`, values that are dictionaries will be typed as such. 23 24 i.e. 25 from a file named "chonker.toml" 26 >>> { 27 >>> "name": "yeehaw", 28 >>> "stats": { 29 >>> "average": 77.54, 30 >>> "max": 94.22, 31 >>> "min": 22.76 32 >>> } 33 >>> } 34 35 With recursive == True 36 >>> @dataclass 37 >>> class Stats: 38 >>> average: float 39 >>> max: float 40 >>> min: float 41 >>> 42 >>> @dataclass 43 >>> class Chonker: 44 >>> name: str 45 >>> stats: Stats 46 47 With recursive == False 48 >>> @dataclass 49 >>> class Chonker: 50 >>> name: str 51 >>> stats: dict 52 """ 53 self.module: ast.Module = module or ast.Module([], []) 54 self.recursive = recursive 55 56 @property 57 def class_names(self) -> list[str]: 58 """List of class names in `self.module.body`.""" 59 return [node.name for node in self.module.body if type(node) == ast.ClassDef] 60 61 @property 62 def source(self) -> str: 63 """Returns the source code this object represents.""" 64 try: 65 return self.format_str(ast.unparse(self.module)) 66 except Exception as e: 67 return ast.unparse(self.module) 68 69 def format_str(self, code: str) -> str: 70 """Sort imports and format with `black`.""" 71 return black.format_str(isort.api.sort_code_string(code), mode=black.Mode()) # type: ignore 72 73 # Seat |===================================== Import Nodes =====================================| 74 75 @property 76 def dacite_import_node(self) -> ast.Import: 77 return ast.Import([ast.alias("dacite")]) 78 79 @property 80 def dataclass_import_node(self) -> ast.ImportFrom: 81 return ast.ImportFrom( 82 "dataclasses", [ast.alias("dataclass"), ast.alias("asdict")], 0 83 ) 84 85 @property 86 def import_nodes(self) -> list[ast.Import | ast.ImportFrom]: 87 return [ 88 self.dacite_import_node, 89 self.dataclass_import_node, 90 self.pathier_import_node, 91 self.typing_extensions_import_node, 92 ] 93 94 @property 95 def pathier_import_node(self) -> ast.ImportFrom: 96 return ast.ImportFrom( 97 "pathier", [ast.alias("Pathier"), ast.alias("Pathish")], 0 98 ) 99 100 @property 101 def typing_extensions_import_node(self) -> ast.ImportFrom: 102 return ast.ImportFrom("typing_extensions", [ast.alias("Self")]) 103 104 # Seat |======================================== Nodes ========================================| 105 106 @property 107 def dataclass_node(self) -> ast.Name: 108 """A node representing `@dataclass`.""" 109 return ast.Name("dataclass", ast.Load()) 110 111 @property 112 def dump_node(self) -> ast.FunctionDef: 113 """The dumping function for the generated `dataclass`.""" 114 dump = self.nodes_from_file(root / "_dump.py")[0] 115 return dump if isinstance(dump, ast.FunctionDef) else ast.FunctionDef() 116 117 @property 118 def load_node(self) -> ast.FunctionDef: 119 """The loading function for the generated `dataclass`.""" 120 load = self.nodes_from_file(root / "_load.py")[0] 121 return load if isinstance(load, ast.FunctionDef) else ast.FunctionDef() 122 123 def add_dataclass(self, dataclass: ast.ClassDef): 124 """Add or merge `dataclass` into `self.module.body`.""" 125 if dataclass.name not in self.class_names: 126 self.module.body.append(dataclass) 127 else: 128 classdex = self.class_index(dataclass.name) 129 self.module.body[classdex] = self.merge_dataclasses(self.module.body[classdex], dataclass) # type: ignore 130 131 def class_index(self, class_name: str) -> int: 132 """Return the `self.module.body` index for a class with `class_name`.""" 133 for i, node in enumerate(self.module.body): 134 if isinstance(node, ast.ClassDef) and node.name == class_name: 135 return i 136 return len(self.module.body) 137 138 def fix_order(self): 139 """Reorder `self.module.body` so that definitions preceede instances. 140 141 i.e. A newly added class is defined before another class creates an instance.""" 142 new_body = [] 143 for node in self.module.body: 144 if isinstance(node, ast.ClassDef): 145 placed = False 146 for i, new_node in enumerate(new_body): 147 if node.name in ast.unparse(new_node): 148 new_body.insert(i, node) 149 placed = True 150 break 151 if not placed: 152 new_body.append(node) 153 else: 154 new_body.append(node) 155 self.module.body = new_body 156 157 def last_annassign_index(self, node: ast.ClassDef) -> int: 158 """Return the `node.body` index of the last annotated assignment node. 159 Assumes all annotated assignments are sequential and the first elements of `node`. 160 """ 161 for i, child in enumerate(node.body): 162 if not isinstance(child, ast.AnnAssign): 163 return i - 1 164 return len(node.body) 165 166 def merge_dataclasses( 167 self, class1: ast.ClassDef, class2: ast.ClassDef 168 ) -> ast.ClassDef: 169 """Add annotated assignments and functions from `class2` to `class1` and return the result.""" 170 funcs = [node.name for node in class1.body if isinstance(node, ast.FunctionDef)] 171 assigns = [ 172 node.target.id 173 for node in class1.body 174 if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name) 175 ] 176 for node in class2.body: 177 if isinstance(node, ast.FunctionDef) and node.name not in funcs: 178 class1.body.append(node) 179 elif ( 180 isinstance(node, ast.AnnAssign) 181 and isinstance(node.target, ast.Name) 182 and (node.target.id not in assigns) 183 ): 184 class1.body.insert(self.last_annassign_index(class1) + 1, node) 185 return class1 186 187 def nodes_from_file(self, file: Pathish) -> list[ast.stmt]: 188 """Return ast-parsed module body from `file`.""" 189 node = ast.parse(Pathier(file).read_text()) 190 return node.body if isinstance(node, ast.Module) else [] 191 192 # Seat |======================================= Builders =======================================| 193 194 def annotated_assignments_from_dict( 195 self, data: dict[str, Any] 196 ) -> list[ast.AnnAssign]: 197 """Return a list of annotated assignment nodes built from `data`. 198 199 If `recursive` is `True` (the default), 200 any values in `data` that are themselves a dictionary, 201 will have a `dataclass` built and inserted in `self.classes`. 202 203 The field for that value will be annotated as an instance of that secondary `dataclass`. 204 """ 205 assigns = [] 206 for key, val in data.items(): 207 if self.recursive and isinstance(val, dict): 208 dataclass = self.build_dataclass(key, val) 209 self.add_dataclass(dataclass) 210 assigns.append( 211 self.build_annotated_assignment( 212 key, utilities.key_to_classname(key), False 213 ) 214 ) 215 else: 216 assigns.append(self.build_annotated_assignment(key, val)) 217 return assigns 218 219 def build_annotated_assignment( 220 self, name: str, val: Any, evaluate_type: bool = True 221 ) -> ast.AnnAssign: 222 """Return an annotated assignment node with `name` and an annotation based on the type of `val`. 223 224 If `evaluate_type` is `False`, then `val` will be used directly as the type annotation instead of `type(val).__name__`. 225 """ 226 return ast.AnnAssign( 227 ast.Name(name, ast.Store()), 228 ast.Name(utilities.build_type(val) if evaluate_type else val, ast.Load()), 229 None, 230 1, 231 ) 232 233 def build_dataclass( 234 self, name: str, data: dict[str, Any], add_methods: bool = False 235 ) -> ast.ClassDef: 236 """Build a `dataclass` with `name` from `data` and insert it into `self.classes`. 237 238 If `add_methods` is `True`, `load()` and `dump()` functions will be added to the class. 239 """ 240 class_ = ast.ClassDef( 241 utilities.key_to_classname(name), 242 [], 243 [], 244 self.annotated_assignments_from_dict(data), 245 [self.dataclass_node], 246 ) 247 if add_methods: 248 class_.body.extend([self.load_node, self.dump_node]) 249 return class_ 250 251 # Seat |======================================== Main ========================================| 252 253 def generate(self, name: str, data: dict[str, Any]) -> str: 254 """Generate a `dataclass` with `name` from `data` and return the source code. 255 256 Currently, all keys in `data` and any of its nested dictionaries must be valid Python variable names. 257 """ 258 for node in self.import_nodes: 259 if node not in self.module.body: 260 self.module.body.insert(0, node) 261 dataclass = self.build_dataclass(name, data, True) 262 self.add_dataclass(dataclass) 263 self.fix_order() 264 return self.source 265 266 267def generate_from_file( 268 datapath: Pathish, outpath: Pathish | None = None, recursive: bool = True 269): 270 """Generate a `dataclass` named after the file `datapath` points at. 271 272 If `outpath` is not given, the output file will be the same as `datapath`, but with a `.py` extension. 273 274 Can be any `.toml` or `.json` file where all keys are valid Python variable names. 275 276 If `recursive` is `True`, dictionary values will be converted to dataclasses. 277 """ 278 279 datapath = Pathier(datapath) 280 if outpath: 281 outpath = Pathier(outpath) 282 else: 283 outpath = datapath.with_suffix(".py") 284 module = ast.parse(outpath.read_text()) if outpath.exists() else None 285 data = datapath.loads() 286 fool = TomFoolery(module, recursive) # type: ignore 287 source = fool.generate(datapath.stem, data) 288 source = source.replace("filepath", datapath.name) 289 try: 290 source = fool.format_str(source) 291 except Exception as e: 292 print("Unable to format output.") 293 outpath.write_text(source)
14class TomFoolery: 15 def __init__(self, module: ast.Module | None = None, recursive: bool = True): 16 """If no `module` is given, an empty new one will be created. 17 18 When generating a `dataclass` from a dictionary, 19 if `recursive` is `True` then values that are also dictionaries will have a `dataclass` generated. 20 21 The annotation for that field in the original `dataclass` will be typed as an instance of the second `dataclass`. 22 23 If `recursive` is `False`, values that are dictionaries will be typed as such. 24 25 i.e. 26 from a file named "chonker.toml" 27 >>> { 28 >>> "name": "yeehaw", 29 >>> "stats": { 30 >>> "average": 77.54, 31 >>> "max": 94.22, 32 >>> "min": 22.76 33 >>> } 34 >>> } 35 36 With recursive == True 37 >>> @dataclass 38 >>> class Stats: 39 >>> average: float 40 >>> max: float 41 >>> min: float 42 >>> 43 >>> @dataclass 44 >>> class Chonker: 45 >>> name: str 46 >>> stats: Stats 47 48 With recursive == False 49 >>> @dataclass 50 >>> class Chonker: 51 >>> name: str 52 >>> stats: dict 53 """ 54 self.module: ast.Module = module or ast.Module([], []) 55 self.recursive = recursive 56 57 @property 58 def class_names(self) -> list[str]: 59 """List of class names in `self.module.body`.""" 60 return [node.name for node in self.module.body if type(node) == ast.ClassDef] 61 62 @property 63 def source(self) -> str: 64 """Returns the source code this object represents.""" 65 try: 66 return self.format_str(ast.unparse(self.module)) 67 except Exception as e: 68 return ast.unparse(self.module) 69 70 def format_str(self, code: str) -> str: 71 """Sort imports and format with `black`.""" 72 return black.format_str(isort.api.sort_code_string(code), mode=black.Mode()) # type: ignore 73 74 # Seat |===================================== Import Nodes =====================================| 75 76 @property 77 def dacite_import_node(self) -> ast.Import: 78 return ast.Import([ast.alias("dacite")]) 79 80 @property 81 def dataclass_import_node(self) -> ast.ImportFrom: 82 return ast.ImportFrom( 83 "dataclasses", [ast.alias("dataclass"), ast.alias("asdict")], 0 84 ) 85 86 @property 87 def import_nodes(self) -> list[ast.Import | ast.ImportFrom]: 88 return [ 89 self.dacite_import_node, 90 self.dataclass_import_node, 91 self.pathier_import_node, 92 self.typing_extensions_import_node, 93 ] 94 95 @property 96 def pathier_import_node(self) -> ast.ImportFrom: 97 return ast.ImportFrom( 98 "pathier", [ast.alias("Pathier"), ast.alias("Pathish")], 0 99 ) 100 101 @property 102 def typing_extensions_import_node(self) -> ast.ImportFrom: 103 return ast.ImportFrom("typing_extensions", [ast.alias("Self")]) 104 105 # Seat |======================================== Nodes ========================================| 106 107 @property 108 def dataclass_node(self) -> ast.Name: 109 """A node representing `@dataclass`.""" 110 return ast.Name("dataclass", ast.Load()) 111 112 @property 113 def dump_node(self) -> ast.FunctionDef: 114 """The dumping function for the generated `dataclass`.""" 115 dump = self.nodes_from_file(root / "_dump.py")[0] 116 return dump if isinstance(dump, ast.FunctionDef) else ast.FunctionDef() 117 118 @property 119 def load_node(self) -> ast.FunctionDef: 120 """The loading function for the generated `dataclass`.""" 121 load = self.nodes_from_file(root / "_load.py")[0] 122 return load if isinstance(load, ast.FunctionDef) else ast.FunctionDef() 123 124 def add_dataclass(self, dataclass: ast.ClassDef): 125 """Add or merge `dataclass` into `self.module.body`.""" 126 if dataclass.name not in self.class_names: 127 self.module.body.append(dataclass) 128 else: 129 classdex = self.class_index(dataclass.name) 130 self.module.body[classdex] = self.merge_dataclasses(self.module.body[classdex], dataclass) # type: ignore 131 132 def class_index(self, class_name: str) -> int: 133 """Return the `self.module.body` index for a class with `class_name`.""" 134 for i, node in enumerate(self.module.body): 135 if isinstance(node, ast.ClassDef) and node.name == class_name: 136 return i 137 return len(self.module.body) 138 139 def fix_order(self): 140 """Reorder `self.module.body` so that definitions preceede instances. 141 142 i.e. A newly added class is defined before another class creates an instance.""" 143 new_body = [] 144 for node in self.module.body: 145 if isinstance(node, ast.ClassDef): 146 placed = False 147 for i, new_node in enumerate(new_body): 148 if node.name in ast.unparse(new_node): 149 new_body.insert(i, node) 150 placed = True 151 break 152 if not placed: 153 new_body.append(node) 154 else: 155 new_body.append(node) 156 self.module.body = new_body 157 158 def last_annassign_index(self, node: ast.ClassDef) -> int: 159 """Return the `node.body` index of the last annotated assignment node. 160 Assumes all annotated assignments are sequential and the first elements of `node`. 161 """ 162 for i, child in enumerate(node.body): 163 if not isinstance(child, ast.AnnAssign): 164 return i - 1 165 return len(node.body) 166 167 def merge_dataclasses( 168 self, class1: ast.ClassDef, class2: ast.ClassDef 169 ) -> ast.ClassDef: 170 """Add annotated assignments and functions from `class2` to `class1` and return the result.""" 171 funcs = [node.name for node in class1.body if isinstance(node, ast.FunctionDef)] 172 assigns = [ 173 node.target.id 174 for node in class1.body 175 if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name) 176 ] 177 for node in class2.body: 178 if isinstance(node, ast.FunctionDef) and node.name not in funcs: 179 class1.body.append(node) 180 elif ( 181 isinstance(node, ast.AnnAssign) 182 and isinstance(node.target, ast.Name) 183 and (node.target.id not in assigns) 184 ): 185 class1.body.insert(self.last_annassign_index(class1) + 1, node) 186 return class1 187 188 def nodes_from_file(self, file: Pathish) -> list[ast.stmt]: 189 """Return ast-parsed module body from `file`.""" 190 node = ast.parse(Pathier(file).read_text()) 191 return node.body if isinstance(node, ast.Module) else [] 192 193 # Seat |======================================= Builders =======================================| 194 195 def annotated_assignments_from_dict( 196 self, data: dict[str, Any] 197 ) -> list[ast.AnnAssign]: 198 """Return a list of annotated assignment nodes built from `data`. 199 200 If `recursive` is `True` (the default), 201 any values in `data` that are themselves a dictionary, 202 will have a `dataclass` built and inserted in `self.classes`. 203 204 The field for that value will be annotated as an instance of that secondary `dataclass`. 205 """ 206 assigns = [] 207 for key, val in data.items(): 208 if self.recursive and isinstance(val, dict): 209 dataclass = self.build_dataclass(key, val) 210 self.add_dataclass(dataclass) 211 assigns.append( 212 self.build_annotated_assignment( 213 key, utilities.key_to_classname(key), False 214 ) 215 ) 216 else: 217 assigns.append(self.build_annotated_assignment(key, val)) 218 return assigns 219 220 def build_annotated_assignment( 221 self, name: str, val: Any, evaluate_type: bool = True 222 ) -> ast.AnnAssign: 223 """Return an annotated assignment node with `name` and an annotation based on the type of `val`. 224 225 If `evaluate_type` is `False`, then `val` will be used directly as the type annotation instead of `type(val).__name__`. 226 """ 227 return ast.AnnAssign( 228 ast.Name(name, ast.Store()), 229 ast.Name(utilities.build_type(val) if evaluate_type else val, ast.Load()), 230 None, 231 1, 232 ) 233 234 def build_dataclass( 235 self, name: str, data: dict[str, Any], add_methods: bool = False 236 ) -> ast.ClassDef: 237 """Build a `dataclass` with `name` from `data` and insert it into `self.classes`. 238 239 If `add_methods` is `True`, `load()` and `dump()` functions will be added to the class. 240 """ 241 class_ = ast.ClassDef( 242 utilities.key_to_classname(name), 243 [], 244 [], 245 self.annotated_assignments_from_dict(data), 246 [self.dataclass_node], 247 ) 248 if add_methods: 249 class_.body.extend([self.load_node, self.dump_node]) 250 return class_ 251 252 # Seat |======================================== Main ========================================| 253 254 def generate(self, name: str, data: dict[str, Any]) -> str: 255 """Generate a `dataclass` with `name` from `data` and return the source code. 256 257 Currently, all keys in `data` and any of its nested dictionaries must be valid Python variable names. 258 """ 259 for node in self.import_nodes: 260 if node not in self.module.body: 261 self.module.body.insert(0, node) 262 dataclass = self.build_dataclass(name, data, True) 263 self.add_dataclass(dataclass) 264 self.fix_order() 265 return self.source
15 def __init__(self, module: ast.Module | None = None, recursive: bool = True): 16 """If no `module` is given, an empty new one will be created. 17 18 When generating a `dataclass` from a dictionary, 19 if `recursive` is `True` then values that are also dictionaries will have a `dataclass` generated. 20 21 The annotation for that field in the original `dataclass` will be typed as an instance of the second `dataclass`. 22 23 If `recursive` is `False`, values that are dictionaries will be typed as such. 24 25 i.e. 26 from a file named "chonker.toml" 27 >>> { 28 >>> "name": "yeehaw", 29 >>> "stats": { 30 >>> "average": 77.54, 31 >>> "max": 94.22, 32 >>> "min": 22.76 33 >>> } 34 >>> } 35 36 With recursive == True 37 >>> @dataclass 38 >>> class Stats: 39 >>> average: float 40 >>> max: float 41 >>> min: float 42 >>> 43 >>> @dataclass 44 >>> class Chonker: 45 >>> name: str 46 >>> stats: Stats 47 48 With recursive == False 49 >>> @dataclass 50 >>> class Chonker: 51 >>> name: str 52 >>> stats: dict 53 """ 54 self.module: ast.Module = module or ast.Module([], []) 55 self.recursive = recursive
If no module
is given, an empty new one will be created.
When generating a dataclass
from a dictionary,
if recursive
is True
then values that are also dictionaries will have a dataclass
generated.
The annotation for that field in the original dataclass
will be typed as an instance of the second dataclass
.
If recursive
is False
, values that are dictionaries will be typed as such.
i.e. from a file named "chonker.toml"
>>> {
>>> "name": "yeehaw",
>>> "stats": {
>>> "average": 77.54,
>>> "max": 94.22,
>>> "min": 22.76
>>> }
>>> }
With recursive == True
>>> @dataclass
>>> class Stats:
>>> average: float
>>> max: float
>>> min: float
>>>
>>> @dataclass
>>> class Chonker:
>>> name: str
>>> stats: Stats
With recursive == False
>>> @dataclass
>>> class Chonker:
>>> name: str
>>> stats: dict
70 def format_str(self, code: str) -> str: 71 """Sort imports and format with `black`.""" 72 return black.format_str(isort.api.sort_code_string(code), mode=black.Mode()) # type: ignore
Sort imports and format with black
.
124 def add_dataclass(self, dataclass: ast.ClassDef): 125 """Add or merge `dataclass` into `self.module.body`.""" 126 if dataclass.name not in self.class_names: 127 self.module.body.append(dataclass) 128 else: 129 classdex = self.class_index(dataclass.name) 130 self.module.body[classdex] = self.merge_dataclasses(self.module.body[classdex], dataclass) # type: ignore
Add or merge dataclass
into self.module.body
.
132 def class_index(self, class_name: str) -> int: 133 """Return the `self.module.body` index for a class with `class_name`.""" 134 for i, node in enumerate(self.module.body): 135 if isinstance(node, ast.ClassDef) and node.name == class_name: 136 return i 137 return len(self.module.body)
Return the self.module.body
index for a class with class_name
.
139 def fix_order(self): 140 """Reorder `self.module.body` so that definitions preceede instances. 141 142 i.e. A newly added class is defined before another class creates an instance.""" 143 new_body = [] 144 for node in self.module.body: 145 if isinstance(node, ast.ClassDef): 146 placed = False 147 for i, new_node in enumerate(new_body): 148 if node.name in ast.unparse(new_node): 149 new_body.insert(i, node) 150 placed = True 151 break 152 if not placed: 153 new_body.append(node) 154 else: 155 new_body.append(node) 156 self.module.body = new_body
Reorder self.module.body
so that definitions preceede instances.
i.e. A newly added class is defined before another class creates an instance.
158 def last_annassign_index(self, node: ast.ClassDef) -> int: 159 """Return the `node.body` index of the last annotated assignment node. 160 Assumes all annotated assignments are sequential and the first elements of `node`. 161 """ 162 for i, child in enumerate(node.body): 163 if not isinstance(child, ast.AnnAssign): 164 return i - 1 165 return len(node.body)
Return the node.body
index of the last annotated assignment node.
Assumes all annotated assignments are sequential and the first elements of node
.
167 def merge_dataclasses( 168 self, class1: ast.ClassDef, class2: ast.ClassDef 169 ) -> ast.ClassDef: 170 """Add annotated assignments and functions from `class2` to `class1` and return the result.""" 171 funcs = [node.name for node in class1.body if isinstance(node, ast.FunctionDef)] 172 assigns = [ 173 node.target.id 174 for node in class1.body 175 if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name) 176 ] 177 for node in class2.body: 178 if isinstance(node, ast.FunctionDef) and node.name not in funcs: 179 class1.body.append(node) 180 elif ( 181 isinstance(node, ast.AnnAssign) 182 and isinstance(node.target, ast.Name) 183 and (node.target.id not in assigns) 184 ): 185 class1.body.insert(self.last_annassign_index(class1) + 1, node) 186 return class1
Add annotated assignments and functions from class2
to class1
and return the result.
188 def nodes_from_file(self, file: Pathish) -> list[ast.stmt]: 189 """Return ast-parsed module body from `file`.""" 190 node = ast.parse(Pathier(file).read_text()) 191 return node.body if isinstance(node, ast.Module) else []
Return ast-parsed module body from file
.
195 def annotated_assignments_from_dict( 196 self, data: dict[str, Any] 197 ) -> list[ast.AnnAssign]: 198 """Return a list of annotated assignment nodes built from `data`. 199 200 If `recursive` is `True` (the default), 201 any values in `data` that are themselves a dictionary, 202 will have a `dataclass` built and inserted in `self.classes`. 203 204 The field for that value will be annotated as an instance of that secondary `dataclass`. 205 """ 206 assigns = [] 207 for key, val in data.items(): 208 if self.recursive and isinstance(val, dict): 209 dataclass = self.build_dataclass(key, val) 210 self.add_dataclass(dataclass) 211 assigns.append( 212 self.build_annotated_assignment( 213 key, utilities.key_to_classname(key), False 214 ) 215 ) 216 else: 217 assigns.append(self.build_annotated_assignment(key, val)) 218 return assigns
Return a list of annotated assignment nodes built from data
.
If recursive
is True
(the default),
any values in data
that are themselves a dictionary,
will have a dataclass
built and inserted in self.classes
.
The field for that value will be annotated as an instance of that secondary dataclass
.
220 def build_annotated_assignment( 221 self, name: str, val: Any, evaluate_type: bool = True 222 ) -> ast.AnnAssign: 223 """Return an annotated assignment node with `name` and an annotation based on the type of `val`. 224 225 If `evaluate_type` is `False`, then `val` will be used directly as the type annotation instead of `type(val).__name__`. 226 """ 227 return ast.AnnAssign( 228 ast.Name(name, ast.Store()), 229 ast.Name(utilities.build_type(val) if evaluate_type else val, ast.Load()), 230 None, 231 1, 232 )
Return an annotated assignment node with name
and an annotation based on the type of val
.
If evaluate_type
is False
, then val
will be used directly as the type annotation instead of type(val).__name__
.
234 def build_dataclass( 235 self, name: str, data: dict[str, Any], add_methods: bool = False 236 ) -> ast.ClassDef: 237 """Build a `dataclass` with `name` from `data` and insert it into `self.classes`. 238 239 If `add_methods` is `True`, `load()` and `dump()` functions will be added to the class. 240 """ 241 class_ = ast.ClassDef( 242 utilities.key_to_classname(name), 243 [], 244 [], 245 self.annotated_assignments_from_dict(data), 246 [self.dataclass_node], 247 ) 248 if add_methods: 249 class_.body.extend([self.load_node, self.dump_node]) 250 return class_
Build a dataclass
with name
from data
and insert it into self.classes
.
If add_methods
is True
, load()
and dump()
functions will be added to the class.
254 def generate(self, name: str, data: dict[str, Any]) -> str: 255 """Generate a `dataclass` with `name` from `data` and return the source code. 256 257 Currently, all keys in `data` and any of its nested dictionaries must be valid Python variable names. 258 """ 259 for node in self.import_nodes: 260 if node not in self.module.body: 261 self.module.body.insert(0, node) 262 dataclass = self.build_dataclass(name, data, True) 263 self.add_dataclass(dataclass) 264 self.fix_order() 265 return self.source
Generate a dataclass
with name
from data
and return the source code.
Currently, all keys in data
and any of its nested dictionaries must be valid Python variable names.
268def generate_from_file( 269 datapath: Pathish, outpath: Pathish | None = None, recursive: bool = True 270): 271 """Generate a `dataclass` named after the file `datapath` points at. 272 273 If `outpath` is not given, the output file will be the same as `datapath`, but with a `.py` extension. 274 275 Can be any `.toml` or `.json` file where all keys are valid Python variable names. 276 277 If `recursive` is `True`, dictionary values will be converted to dataclasses. 278 """ 279 280 datapath = Pathier(datapath) 281 if outpath: 282 outpath = Pathier(outpath) 283 else: 284 outpath = datapath.with_suffix(".py") 285 module = ast.parse(outpath.read_text()) if outpath.exists() else None 286 data = datapath.loads() 287 fool = TomFoolery(module, recursive) # type: ignore 288 source = fool.generate(datapath.stem, data) 289 source = source.replace("filepath", datapath.name) 290 try: 291 source = fool.format_str(source) 292 except Exception as e: 293 print("Unable to format output.") 294 outpath.write_text(source)
Generate a dataclass
named after the file datapath
points at.
If outpath
is not given, the output file will be the same as datapath
, but with a .py
extension.
Can be any .toml
or .json
file where all keys are valid Python variable names.
If recursive
is True
, dictionary values will be converted to dataclasses.