Coverage for src/hdmf/spec/spec.py: 93%

810 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-08-18 20:49 +0000

1import re 

2from abc import ABCMeta 

3from collections import OrderedDict 

4from copy import deepcopy 

5from warnings import warn 

6 

7from ..utils import docval, getargs, popargs, get_docval 

8 

9NAME_WILDCARD = None # this is no longer used, but kept for backward compatibility 

10ZERO_OR_ONE = '?' 

11ZERO_OR_MANY = '*' 

12ONE_OR_MANY = '+' 

13DEF_QUANTITY = 1 

14FLAGS = { 

15 'zero_or_one': ZERO_OR_ONE, 

16 'zero_or_many': ZERO_OR_MANY, 

17 'one_or_many': ONE_OR_MANY 

18} 

19 

20 

21class DtypeHelper: 

22 # Dict where the keys are the primary data type and the values are list of strings with synonyms for the dtype 

23 # make sure keys are consistent between hdmf.spec.spec.DtypeHelper.primary_dtype_synonyms, 

24 # hdmf.build.objectmapper.ObjectMapper.__dtypes, hdmf.build.manager.TypeMap._spec_dtype_map, 

25 # hdmf.validate.validator.__allowable, and backend dtype maps 

26 # see https://hdmf-schema-language.readthedocs.io/en/latest/description.html#dtype 

27 primary_dtype_synonyms = { 

28 'float': ["float", "float32"], 

29 'double': ["double", "float64"], 

30 'short': ["int16", "short"], 

31 'int': ["int32", "int"], 

32 'long': ["int64", "long"], 

33 'utf': ["text", "utf", "utf8", "utf-8"], 

34 'ascii': ["ascii", "bytes"], 

35 'bool': ["bool"], 

36 'int8': ["int8"], 

37 'uint8': ["uint8"], 

38 'uint16': ["uint16"], 

39 'uint32': ["uint32", "uint"], 

40 'uint64': ["uint64"], 

41 'object': ['object'], 

42 'region': ['region'], 

43 'numeric': ['numeric'], 

44 'isodatetime': ["isodatetime", "datetime", "date"] 

45 } 

46 

47 # List of recommended primary dtype strings. These are the keys of primary_dtype_string_synonyms 

48 recommended_primary_dtypes = list(primary_dtype_synonyms.keys()) 

49 

50 # List of valid primary data type strings 

51 valid_primary_dtypes = set(list(primary_dtype_synonyms.keys()) + 

52 [vi for v in primary_dtype_synonyms.values() for vi in v]) 

53 

54 @staticmethod 

55 def simplify_cpd_type(cpd_type): 

56 ''' 

57 Transform a list of DtypeSpecs into a list of strings. 

58 Use for simple representation of compound type and 

59 validation. 

60 

61 :param cpd_type: The list of DtypeSpecs to simplify 

62 :type cpd_type: list 

63 

64 ''' 

65 ret = list() 

66 for exp in cpd_type: 

67 exp_key = exp.dtype 

68 if isinstance(exp_key, RefSpec): 

69 exp_key = exp_key.reftype 

70 ret.append(exp_key) 

71 return ret 

72 

73 @staticmethod 

74 def check_dtype(dtype): 

75 """Check that the dtype string is a reference or a valid primary dtype.""" 

76 if not isinstance(dtype, RefSpec) and dtype not in DtypeHelper.valid_primary_dtypes: 

77 raise ValueError("dtype '%s' is not a valid primary data type. Allowed dtypes: %s" 

78 % (dtype, str(DtypeHelper.valid_primary_dtypes))) 

79 return dtype 

80 

81 

82class ConstructableDict(dict, metaclass=ABCMeta): 

83 @classmethod 

84 def build_const_args(cls, spec_dict): 

85 ''' Build constructor arguments for this ConstructableDict class from a dictionary ''' 

86 # main use cases are when spec_dict is a ConstructableDict or a spec dict read from a file 

87 return deepcopy(spec_dict) 

88 

89 @classmethod 

90 def build_spec(cls, spec_dict): 

91 ''' Build a Spec object from the given Spec dict ''' 

92 # main use cases are when spec_dict is a ConstructableDict or a spec dict read from a file 

93 vargs = cls.build_const_args(spec_dict) 

94 kwargs = dict() 

95 # iterate through the Spec docval and construct kwargs based on matching values in spec_dict 

96 for x in get_docval(cls.__init__): 

97 if x['name'] in vargs: 

98 kwargs[x['name']] = vargs.get(x['name']) 

99 return cls(**kwargs) 

100 

101 

102class Spec(ConstructableDict): 

103 ''' A base specification class 

104 ''' 

105 

106 @docval({'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, 

107 {'name': 'name', 'type': str, 'doc': 'The name of this attribute', 'default': None}, 

108 {'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required', 'default': True}, 

109 {'name': 'parent', 'type': 'Spec', 'doc': 'the parent of this spec', 'default': None}) 

110 def __init__(self, **kwargs): 

111 name, doc, required, parent = getargs('name', 'doc', 'required', 'parent', kwargs) 

112 super().__init__() 

113 self['doc'] = doc 

114 if name is not None: 

115 self['name'] = name 

116 if not required: 

117 self['required'] = required 

118 self._parent = parent 

119 

120 @property 

121 def doc(self): 

122 ''' Documentation on what this Spec is specifying ''' 

123 return self.get('doc', None) 

124 

125 @property 

126 def name(self): 

127 ''' The name of the object being specified ''' 

128 return self.get('name', None) 

129 

130 @property 

131 def parent(self): 

132 ''' The parent specification of this specification ''' 

133 return self._parent 

134 

135 @parent.setter 

136 def parent(self, spec): 

137 ''' Set the parent of this specification ''' 

138 if self._parent is not None: 

139 raise AttributeError('Cannot re-assign parent.') 

140 self._parent = spec 

141 

142 @classmethod 

143 def build_const_args(cls, spec_dict): 

144 ''' Build constructor arguments for this Spec class from a dictionary ''' 

145 ret = super().build_const_args(spec_dict) 

146 return ret 

147 

148 def __hash__(self): 

149 return id(self) 

150 

151 @property 

152 def path(self): 

153 stack = list() 

154 tmp = self 

155 while tmp is not None: 

156 name = tmp.name 

157 if name is None: 

158 name = tmp.data_type_def 

159 if name is None: 

160 name = tmp.data_type_inc 

161 stack.append(name) 

162 tmp = tmp.parent 

163 return "/".join(reversed(stack)) 

164 

165 

166# def __eq__(self, other): 

167# return id(self) == id(other) 

168 

169 

170_target_type_key = 'target_type' 

171 

172_ref_args = [ 

173 {'name': _target_type_key, 'type': str, 'doc': 'the target type GroupSpec or DatasetSpec'}, 

174 {'name': 'reftype', 'type': str, 'doc': 'the type of references this is i.e. region or object'}, 

175] 

176 

177 

178class RefSpec(ConstructableDict): 

179 __allowable_types = ('object', 'region') 

180 

181 @docval(*_ref_args) 

182 def __init__(self, **kwargs): 

183 target_type, reftype = getargs(_target_type_key, 'reftype', kwargs) 

184 self[_target_type_key] = target_type 

185 if reftype not in self.__allowable_types: 

186 msg = "reftype must be one of the following: %s" % ", ".join(self.__allowable_types) 

187 raise ValueError(msg) 

188 self['reftype'] = reftype 

189 

190 @property 

191 def target_type(self): 

192 '''The data_type of the target of the reference''' 

193 return self[_target_type_key] 

194 

195 @property 

196 def reftype(self): 

197 '''The type of reference''' 

198 return self['reftype'] 

199 

200 @docval(rtype=bool, returns='True if this RefSpec specifies a region reference, False otherwise') 

201 def is_region(self): 

202 return self['reftype'] == 'region' 

203 

204 

205_attr_args = [ 

206 {'name': 'name', 'type': str, 'doc': 'The name of this attribute'}, 

207 {'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, 

208 {'name': 'dtype', 'type': (str, RefSpec), 'doc': 'The data type of this attribute'}, 

209 {'name': 'shape', 'type': (list, tuple), 'doc': 'the shape of this dataset', 'default': None}, 

210 {'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None}, 

211 {'name': 'required', 'type': bool, 

212 'doc': 'whether or not this attribute is required. ignored when "value" is specified', 'default': True}, 

213 {'name': 'parent', 'type': 'BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, 

214 {'name': 'value', 'type': None, 'doc': 'a constant value for this attribute', 'default': None}, 

215 {'name': 'default_value', 'type': None, 'doc': 'a default value for this attribute', 'default': None} 

216] 

217 

218 

219class AttributeSpec(Spec): 

220 ''' Specification for attributes 

221 ''' 

222 

223 @docval(*_attr_args) 

224 def __init__(self, **kwargs): 

225 name, dtype, doc, dims, shape, required, parent, value, default_value = getargs( 

226 'name', 'dtype', 'doc', 'dims', 'shape', 'required', 'parent', 'value', 'default_value', kwargs) 

227 super().__init__(doc, name=name, required=required, parent=parent) 

228 self['dtype'] = DtypeHelper.check_dtype(dtype) 

229 if value is not None: 

230 self.pop('required', None) 

231 self['value'] = value 

232 if default_value is not None: 

233 if value is not None: 

234 raise ValueError("cannot specify 'value' and 'default_value'") 

235 self['default_value'] = default_value 

236 self['required'] = False 

237 if shape is not None: 

238 self['shape'] = shape 

239 if dims is not None: 

240 self['dims'] = dims 

241 if 'shape' not in self: 

242 self['shape'] = tuple([None] * len(dims)) 

243 if self.shape is not None and self.dims is not None: 

244 if len(self['dims']) != len(self['shape']): 

245 raise ValueError("'dims' and 'shape' must be the same length") 

246 

247 @property 

248 def dtype(self): 

249 ''' The data type of the attribute ''' 

250 return self.get('dtype', None) 

251 

252 @property 

253 def value(self): 

254 ''' The constant value of the attribute. "None" if this attribute is not constant ''' 

255 return self.get('value', None) 

256 

257 @property 

258 def default_value(self): 

259 ''' The default value of the attribute. "None" if this attribute has no default value ''' 

260 return self.get('default_value', None) 

261 

262 @property 

263 def required(self): 

264 ''' True if this attribute is required, False otherwise. ''' 

265 return self.get('required', True) 

266 

267 @property 

268 def dims(self): 

269 ''' The dimensions of this attribute's value ''' 

270 return self.get('dims', None) 

271 

272 @property 

273 def shape(self): 

274 ''' The shape of this attribute's value ''' 

275 return self.get('shape', None) 

276 

277 @classmethod 

278 def build_const_args(cls, spec_dict): 

279 ''' Build constructor arguments for this Spec class from a dictionary ''' 

280 ret = super().build_const_args(spec_dict) 

281 if isinstance(ret['dtype'], dict): 

282 ret['dtype'] = RefSpec.build_spec(ret['dtype']) 

283 return ret 

284 

285 

286_attrbl_args = [ 

287 {'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, 

288 {'name': 'name', 'type': str, 

289 'doc': 'the name of this base storage container, allowed only if quantity is not \'%s\' or \'%s\'' 

290 % (ONE_OR_MANY, ZERO_OR_MANY), 'default': None}, 

291 {'name': 'default_name', 'type': str, 

292 'doc': 'The default name of this base storage container, used only if name is None', 'default': None}, 

293 {'name': 'attributes', 'type': list, 'doc': 'the attributes on this group', 'default': list()}, 

294 {'name': 'linkable', 'type': bool, 'doc': 'whether or not this group can be linked', 'default': True}, 

295 {'name': 'quantity', 'type': (str, int), 'doc': 'the required number of allowed instance', 'default': 1}, 

296 {'name': 'data_type_def', 'type': str, 'doc': 'the data type this specification represents', 'default': None}, 

297 {'name': 'data_type_inc', 'type': (str, 'BaseStorageSpec'), 

298 'doc': 'the data type this specification extends', 'default': None}, 

299] 

300 

301 

302class BaseStorageSpec(Spec): 

303 ''' A specification for any object that can hold attributes. ''' 

304 

305 __inc_key = 'data_type_inc' 

306 __def_key = 'data_type_def' 

307 __type_key = 'data_type' 

308 __id_key = 'object_id' 

309 

310 @docval(*_attrbl_args) 

311 def __init__(self, **kwargs): 

312 name, doc, quantity, attributes, linkable, data_type_def, data_type_inc = \ 

313 getargs('name', 'doc', 'quantity', 'attributes', 'linkable', 'data_type_def', 'data_type_inc', kwargs) 

314 if name is None and data_type_def is None and data_type_inc is None: 

315 raise ValueError("Cannot create Group or Dataset spec with no name " 

316 "without specifying '%s' and/or '%s'." % (self.def_key(), self.inc_key())) 

317 super().__init__(doc, name=name) 

318 default_name = getargs('default_name', kwargs) 

319 if default_name: 

320 if name is not None: 

321 warn("found 'default_name' with 'name' - ignoring 'default_name'") 

322 else: 

323 self['default_name'] = default_name 

324 self.__attributes = dict() 

325 if quantity in (ONE_OR_MANY, ZERO_OR_MANY): 

326 if name is not None: 

327 raise ValueError("Cannot give specific name to something that can " 

328 "exist multiple times: name='%s', quantity='%s'" % (name, quantity)) 

329 if quantity != DEF_QUANTITY: 

330 self['quantity'] = quantity 

331 if not linkable: 

332 self['linkable'] = False 

333 resolve = False 

334 if data_type_inc is not None: 

335 if isinstance(data_type_inc, BaseStorageSpec): 

336 self[self.inc_key()] = data_type_inc.data_type_def 

337 else: 

338 self[self.inc_key()] = data_type_inc 

339 if data_type_def is not None: 

340 self.pop('required', None) 

341 self[self.def_key()] = data_type_def 

342 # resolve inherited and overridden fields only if data_type_inc is a spec 

343 # NOTE: this does not happen when loading specs from a file 

344 if data_type_inc is not None and isinstance(data_type_inc, BaseStorageSpec): 

345 resolve = True 

346 

347 # self.attributes / self['attributes']: tuple/list of attributes 

348 # self.__attributes: dict of all attributes, including attributes from parent (data_type_inc) types 

349 # self.__new_attributes: set of attribute names that do not exist in the parent type 

350 # self.__overridden_attributes: set of attribute names that exist in this spec and the parent type 

351 # self.__new_attributes and self.__overridden_attributes are only set properly if resolve = True 

352 # add all attributes described in this spec 

353 for attribute in attributes: 

354 self.set_attribute(attribute) 

355 self.__new_attributes = set(self.__attributes.keys()) 

356 self.__overridden_attributes = set() 

357 self.__resolved = False 

358 if resolve: 

359 self.resolve_spec(data_type_inc) 

360 

361 @property 

362 def default_name(self): 

363 '''The default name for this spec''' 

364 return self.get('default_name', None) 

365 

366 @property 

367 def resolved(self): 

368 return self.__resolved 

369 

370 @property 

371 def required(self): 

372 ''' Whether or not the this spec represents a required field ''' 

373 return self.quantity not in (ZERO_OR_ONE, ZERO_OR_MANY) 

374 

375 @docval({'name': 'inc_spec', 'type': 'BaseStorageSpec', 'doc': 'the data type this specification represents'}) 

376 def resolve_spec(self, **kwargs): 

377 """Add attributes from the inc_spec to this spec and track which attributes are new and overridden.""" 

378 inc_spec = getargs('inc_spec', kwargs) 

379 for attribute in inc_spec.attributes: 

380 self.__new_attributes.discard(attribute.name) 

381 if attribute.name in self.__attributes: 

382 self.__overridden_attributes.add(attribute.name) 

383 else: 

384 self.set_attribute(attribute) 

385 self.__resolved = True 

386 

387 @docval({'name': 'spec', 'type': (Spec, str), 'doc': 'the specification to check'}) 

388 def is_inherited_spec(self, **kwargs): 

389 ''' 

390 Return True if this spec was inherited from the parent type, False otherwise. 

391 

392 Returns False if the spec is not found. 

393 ''' 

394 spec = getargs('spec', kwargs) 

395 if isinstance(spec, Spec): 

396 spec = spec.name 

397 if spec in self.__attributes: 

398 return self.is_inherited_attribute(spec) 

399 return False 

400 

401 @docval({'name': 'spec', 'type': (Spec, str), 'doc': 'the specification to check'}) 

402 def is_overridden_spec(self, **kwargs): 

403 ''' 

404 Return True if this spec overrides a specification from the parent type, False otherwise. 

405 

406 Returns False if the spec is not found. 

407 ''' 

408 spec = getargs('spec', kwargs) 

409 if isinstance(spec, Spec): 

410 spec = spec.name 

411 if spec in self.__attributes: 

412 return self.is_overridden_attribute(spec) 

413 return False 

414 

415 @docval({'name': 'name', 'type': str, 'doc': 'the name of the attribute to check'}) 

416 def is_inherited_attribute(self, **kwargs): 

417 ''' 

418 Return True if the attribute was inherited from the parent type, False otherwise. 

419 

420 Raises a ValueError if the spec is not found. 

421 ''' 

422 name = getargs('name', kwargs) 

423 if name not in self.__attributes: 

424 raise ValueError("Attribute '%s' not found" % name) 

425 return name not in self.__new_attributes 

426 

427 @docval({'name': 'name', 'type': str, 'doc': 'the name of the attribute to check'}) 

428 def is_overridden_attribute(self, **kwargs): 

429 ''' 

430 Return True if the given attribute overrides the specification from the parent, False otherwise. 

431 

432 Raises a ValueError if the spec is not found. 

433 ''' 

434 name = getargs('name', kwargs) 

435 if name not in self.__attributes: 

436 raise ValueError("Attribute '%s' not found" % name) 

437 return name in self.__overridden_attributes 

438 

439 def is_many(self): 

440 return self.quantity not in (1, ZERO_OR_ONE) 

441 

442 @classmethod 

443 def get_data_type_spec(cls, data_type_def): # unused 

444 return AttributeSpec(cls.type_key(), 'the data type of this object', 'text', value=data_type_def) 

445 

446 @classmethod 

447 def get_namespace_spec(cls): # unused 

448 return AttributeSpec('namespace', 'the namespace for the data type of this object', 'text', required=False) 

449 

450 @property 

451 def attributes(self): 

452 ''' Tuple of attribute specifications for this specification ''' 

453 return tuple(self.get('attributes', tuple())) 

454 

455 @property 

456 def linkable(self): 

457 ''' True if object can be a link, False otherwise ''' 

458 return self.get('linkable', True) 

459 

460 @classmethod 

461 def id_key(cls): 

462 ''' Get the key used to store data ID on an instance 

463 

464 Override this method to use a different name for 'object_id' 

465 ''' 

466 return cls.__id_key 

467 

468 @classmethod 

469 def type_key(cls): 

470 ''' Get the key used to store data type on an instance 

471 

472 Override this method to use a different name for 'data_type'. HDMF supports combining schema 

473 that uses 'data_type' and at most one different name for 'data_type'. 

474 ''' 

475 return cls.__type_key 

476 

477 @classmethod 

478 def inc_key(cls): 

479 ''' Get the key used to define a data_type include. 

480 

481 Override this method to use a different keyword for 'data_type_inc'. HDMF supports combining schema 

482 that uses 'data_type_inc' and at most one different name for 'data_type_inc'. 

483 ''' 

484 return cls.__inc_key 

485 

486 @classmethod 

487 def def_key(cls): 

488 ''' Get the key used to define a data_type definition. 

489 

490 Override this method to use a different keyword for 'data_type_def' HDMF supports combining schema 

491 that uses 'data_type_def' and at most one different name for 'data_type_def'. 

492 ''' 

493 return cls.__def_key 

494 

495 @property 

496 def data_type_inc(self): 

497 ''' The data type this specification inherits ''' 

498 return self.get(self.inc_key()) 

499 

500 @property 

501 def data_type_def(self): 

502 ''' The data type this specification defines ''' 

503 return self.get(self.def_key(), None) 

504 

505 @property 

506 def data_type(self): 

507 ''' The data type of this specification ''' 

508 return self.data_type_def or self.data_type_inc 

509 

510 @property 

511 def quantity(self): 

512 ''' The number of times the object being specified should be present ''' 

513 return self.get('quantity', DEF_QUANTITY) 

514 

515 @docval(*_attr_args) 

516 def add_attribute(self, **kwargs): 

517 ''' Add an attribute to this specification ''' 

518 spec = AttributeSpec(**kwargs) 

519 self.set_attribute(spec) 

520 return spec 

521 

522 @docval({'name': 'spec', 'type': AttributeSpec, 'doc': 'the specification for the attribute to add'}) 

523 def set_attribute(self, **kwargs): 

524 ''' Set an attribute on this specification ''' 

525 spec = kwargs.get('spec') 

526 attributes = self.setdefault('attributes', list()) 

527 if spec.parent is not None: 

528 spec = AttributeSpec.build_spec(spec) 

529 # if attribute name already exists in self.__attributes, 

530 # 1. find the attribute in self['attributes'] list and replace it with the given spec 

531 # 2. replace the value for the name key in the self.__attributes dict 

532 # otherwise, add the attribute spec to the self['attributes'] list and self.__attributes dict 

533 # the values of self['attributes'] and self.__attributes should always be the same 

534 # the former enables the spec to act like a dict with the 'attributes' key and 

535 # the latter is useful for name-based access of attributes 

536 if spec.name in self.__attributes: 

537 idx = -1 

538 for i, attribute in enumerate(attributes): # pragma: no cover (execution should break) 

539 if attribute.name == spec.name: 

540 idx = i 

541 break 

542 if idx >= 0: 

543 attributes[idx] = spec 

544 else: # pragma: no cover 

545 raise ValueError('%s in __attributes but not in spec record' % spec.name) 

546 else: 

547 attributes.append(spec) 

548 self.__attributes[spec.name] = spec 

549 spec.parent = self 

550 

551 @docval({'name': 'name', 'type': str, 'doc': 'the name of the attribute to the Spec for'}) 

552 def get_attribute(self, **kwargs): 

553 ''' Get an attribute on this specification ''' 

554 name = getargs('name', kwargs) 

555 return self.__attributes.get(name) 

556 

557 @classmethod 

558 def build_const_args(cls, spec_dict): 

559 ''' Build constructor arguments for this Spec class from a dictionary ''' 

560 ret = super().build_const_args(spec_dict) 

561 if 'attributes' in ret: 

562 ret['attributes'] = [AttributeSpec.build_spec(sub_spec) for sub_spec in ret['attributes']] 

563 return ret 

564 

565 

566_dt_args = [ 

567 {'name': 'name', 'type': str, 'doc': 'the name of this column'}, 

568 {'name': 'doc', 'type': str, 'doc': 'a description about what this data type is'}, 

569 {'name': 'dtype', 'type': (str, list, RefSpec), 'doc': 'the data type of this column'}, 

570] 

571 

572 

573class DtypeSpec(ConstructableDict): 

574 '''A class for specifying a component of a compound type''' 

575 

576 @docval(*_dt_args) 

577 def __init__(self, **kwargs): 

578 doc, name, dtype = getargs('doc', 'name', 'dtype', kwargs) 

579 self['doc'] = doc 

580 self['name'] = name 

581 self.check_valid_dtype(dtype) 

582 self['dtype'] = dtype 

583 

584 @property 

585 def doc(self): 

586 '''Documentation about this component''' 

587 return self['doc'] 

588 

589 @property 

590 def name(self): 

591 '''The name of this component''' 

592 return self['name'] 

593 

594 @property 

595 def dtype(self): 

596 ''' The data type of this component''' 

597 return self['dtype'] 

598 

599 @staticmethod 

600 def assertValidDtype(dtype): 

601 # Calls check_valid_dtype. This method is maintained for backwards compatibility 

602 return DtypeSpec.check_valid_dtype(dtype) 

603 

604 @staticmethod 

605 def check_valid_dtype(dtype): 

606 if isinstance(dtype, dict): 

607 if _target_type_key not in dtype: 

608 msg = "'dtype' must have the key '%s'" % _target_type_key 

609 raise ValueError(msg) 

610 else: 

611 DtypeHelper.check_dtype(dtype) 

612 return True 

613 

614 @staticmethod 

615 @docval({'name': 'spec', 'type': (str, dict), 'doc': 'the spec object to check'}, is_method=False) 

616 def is_ref(**kwargs): 

617 spec = getargs('spec', kwargs) 

618 spec_is_ref = False 

619 if isinstance(spec, dict): 619 ↛ 624line 619 didn't jump to line 624, because the condition on line 619 was never false

620 if _target_type_key in spec: 620 ↛ 621line 620 didn't jump to line 621, because the condition on line 620 was never true

621 spec_is_ref = True 

622 elif 'dtype' in spec and isinstance(spec['dtype'], dict) and _target_type_key in spec['dtype']: 

623 spec_is_ref = True 

624 return spec_is_ref 

625 

626 @classmethod 

627 def build_const_args(cls, spec_dict): 

628 ''' Build constructor arguments for this Spec class from a dictionary ''' 

629 ret = super().build_const_args(spec_dict) 

630 if isinstance(ret['dtype'], list): 630 ↛ 631line 630 didn't jump to line 631, because the condition on line 630 was never true

631 ret['dtype'] = list(map(cls.build_const_args, ret['dtype'])) 

632 elif isinstance(ret['dtype'], dict): 

633 ret['dtype'] = RefSpec.build_spec(ret['dtype']) 

634 return ret 

635 

636 

637_dataset_args = [ 

638 {'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, 

639 {'name': 'dtype', 'type': (str, list, RefSpec), 

640 'doc': 'The data type of this attribute. Use a list of DtypeSpecs to specify a compound data type.', 

641 'default': None}, 

642 {'name': 'name', 'type': str, 'doc': 'The name of this dataset', 'default': None}, 

643 {'name': 'default_name', 'type': str, 'doc': 'The default name of this dataset', 'default': None}, 

644 {'name': 'shape', 'type': (list, tuple), 'doc': 'the shape of this dataset', 'default': None}, 

645 {'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None}, 

646 {'name': 'attributes', 'type': list, 'doc': 'the attributes on this group', 'default': list()}, 

647 {'name': 'linkable', 'type': bool, 'doc': 'whether or not this group can be linked', 'default': True}, 

648 {'name': 'quantity', 'type': (str, int), 'doc': 'the required number of allowed instance', 'default': 1}, 

649 {'name': 'default_value', 'type': None, 'doc': 'a default value for this dataset', 'default': None}, 

650 {'name': 'data_type_def', 'type': str, 'doc': 'the data type this specification represents', 'default': None}, 

651 {'name': 'data_type_inc', 'type': (str, 'DatasetSpec'), 

652 'doc': 'the data type this specification extends', 'default': None}, 

653] 

654 

655 

656class DatasetSpec(BaseStorageSpec): 

657 ''' Specification for datasets 

658 

659 To specify a table-like dataset i.e. a compound data type. 

660 ''' 

661 

662 @docval(*_dataset_args) 

663 def __init__(self, **kwargs): 

664 doc, shape, dims, dtype, default_value = popargs('doc', 'shape', 'dims', 'dtype', 'default_value', kwargs) 

665 if shape is not None: 

666 self['shape'] = shape 

667 if dims is not None: 

668 self['dims'] = dims 

669 if 'shape' not in self: 

670 self['shape'] = tuple([None] * len(dims)) 

671 if self.shape is not None and self.dims is not None: 

672 if len(self['dims']) != len(self['shape']): 672 ↛ 673line 672 didn't jump to line 673, because the condition on line 672 was never true

673 raise ValueError("'dims' and 'shape' must be the same length") 

674 if dtype is not None: 

675 if isinstance(dtype, list): # Dtype is a compound data type 

676 for _i, col in enumerate(dtype): 

677 if not isinstance(col, DtypeSpec): 

678 msg = ('must use DtypeSpec if defining compound dtype - found %s at element %d' 

679 % (type(col), _i)) 

680 raise ValueError(msg) 

681 else: 

682 DtypeHelper.check_dtype(dtype) 

683 self['dtype'] = dtype 

684 super().__init__(doc, **kwargs) 

685 if default_value is not None: 

686 self['default_value'] = default_value 

687 if self.name is not None: 

688 valid_quant_vals = [1, 'zero_or_one', ZERO_OR_ONE] 

689 if self.quantity not in valid_quant_vals: 

690 raise ValueError("quantity %s invalid for spec with fixed name. Valid values are: %s" % 

691 (self.quantity, str(valid_quant_vals))) 

692 

693 @classmethod 

694 def __get_prec_level(cls, dtype): 

695 m = re.search('[0-9]+', dtype) 

696 if m is not None: 696 ↛ 699line 696 didn't jump to line 699, because the condition on line 696 was never false

697 prec = int(m.group()) 

698 else: 

699 prec = 32 

700 return (dtype[0], prec) 

701 

702 @classmethod 

703 def __is_sub_dtype(cls, orig, new): 

704 if isinstance(orig, RefSpec): 704 ↛ 705line 704 didn't jump to line 705, because the condition on line 704 was never true

705 if not isinstance(new, RefSpec): 

706 return False 

707 return orig == new 

708 else: 

709 orig_prec = cls.__get_prec_level(orig) 

710 new_prec = cls.__get_prec_level(new) 

711 if orig_prec[0] != new_prec[0]: 

712 # cannot extend int to float and vice-versa 

713 return False 

714 return new_prec >= orig_prec 

715 

716 @docval({'name': 'inc_spec', 'type': 'DatasetSpec', 'doc': 'the data type this specification represents'}) 

717 def resolve_spec(self, **kwargs): 

718 inc_spec = getargs('inc_spec', kwargs) 

719 if isinstance(self.dtype, list): 

720 # merge the new types 

721 inc_dtype = inc_spec.dtype 

722 if isinstance(inc_dtype, str): 722 ↛ 723line 722 didn't jump to line 723, because the condition on line 722 was never true

723 msg = 'Cannot extend simple data type to compound data type' 

724 raise ValueError(msg) 

725 order = OrderedDict() 

726 if inc_dtype is not None: 726 ↛ 729line 726 didn't jump to line 729, because the condition on line 726 was never false

727 for dt in inc_dtype: 

728 order[dt['name']] = dt 

729 for dt in self.dtype: 

730 name = dt['name'] 

731 if name in order: 

732 # verify that the extension has supplied 

733 # a valid subtyping of existing type 

734 orig = order[name].dtype 

735 new = dt.dtype 

736 if not self.__is_sub_dtype(orig, new): 

737 msg = 'Cannot extend %s to %s' % (str(orig), str(new)) 

738 raise ValueError(msg) 

739 order[name] = dt 

740 self['dtype'] = list(order.values()) 

741 super().resolve_spec(inc_spec) 

742 

743 @property 

744 def dims(self): 

745 ''' The dimensions of this Dataset ''' 

746 return self.get('dims', None) 

747 

748 @property 

749 def dtype(self): 

750 ''' The data type of the Dataset ''' 

751 return self.get('dtype', None) 

752 

753 @property 

754 def shape(self): 

755 ''' The shape of the dataset ''' 

756 return self.get('shape', None) 

757 

758 @property 

759 def default_value(self): 

760 '''The default value of the dataset or None if not specified''' 

761 return self.get('default_value', None) 

762 

763 @classmethod 

764 def dtype_spec_cls(cls): 

765 ''' The class to use when constructing DtypeSpec objects 

766 

767 Override this if extending to use a class other than DtypeSpec to build 

768 dataset specifications 

769 ''' 

770 return DtypeSpec 

771 

772 @classmethod 

773 def build_const_args(cls, spec_dict): 

774 ''' Build constructor arguments for this Spec class from a dictionary ''' 

775 ret = super().build_const_args(spec_dict) 

776 if 'dtype' in ret: 

777 if isinstance(ret['dtype'], list): 

778 ret['dtype'] = list(map(cls.dtype_spec_cls().build_spec, ret['dtype'])) 

779 elif isinstance(ret['dtype'], dict): 

780 ret['dtype'] = RefSpec.build_spec(ret['dtype']) 

781 return ret 

782 

783 

784_link_args = [ 

785 {'name': 'doc', 'type': str, 'doc': 'a description about what this link represents'}, 

786 {'name': _target_type_key, 'type': (str, BaseStorageSpec), 'doc': 'the target type GroupSpec or DatasetSpec'}, 

787 {'name': 'quantity', 'type': (str, int), 'doc': 'the required number of allowed instance', 'default': 1}, 

788 {'name': 'name', 'type': str, 'doc': 'the name of this link', 'default': None} 

789] 

790 

791 

792class LinkSpec(Spec): 

793 

794 @docval(*_link_args) 

795 def __init__(self, **kwargs): 

796 doc, target_type, name, quantity = popargs('doc', _target_type_key, 'name', 'quantity', kwargs) 

797 super().__init__(doc, name, **kwargs) 

798 if isinstance(target_type, BaseStorageSpec): 

799 if target_type.data_type_def is None: 

800 msg = ("'%s' must be a string or a GroupSpec or DatasetSpec with a '%s' key." 

801 % (_target_type_key, target_type.def_key())) 

802 raise ValueError(msg) 

803 self[_target_type_key] = target_type.data_type_def 

804 else: 

805 self[_target_type_key] = target_type 

806 if quantity != 1: 

807 self['quantity'] = quantity 

808 

809 @property 

810 def target_type(self): 

811 ''' The data type of target specification ''' 

812 return self.get(_target_type_key) 

813 

814 @property 

815 def data_type_inc(self): 

816 ''' The data type of target specification ''' 

817 return self.get(_target_type_key) 

818 

819 def is_many(self): 

820 return self.quantity not in (1, ZERO_OR_ONE) 

821 

822 @property 

823 def quantity(self): 

824 ''' The number of times the object being specified should be present ''' 

825 return self.get('quantity', DEF_QUANTITY) 

826 

827 @property 

828 def required(self): 

829 ''' Whether or not the this spec represents a required field ''' 

830 return self.quantity not in (ZERO_OR_ONE, ZERO_OR_MANY) 

831 

832 

833_group_args = [ 

834 {'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, 

835 { 

836 'name': 'name', 

837 'type': str, 

838 'doc': 'the name of the Group that is written to the file. If this argument is omitted, users will be ' 

839 'required to enter a ``name`` field when creating instances of this data type in the API. Another ' 

840 'option is to specify ``default_name``, in which case this name will be used as the name of the Group ' 

841 'if no other name is provided.', 

842 'default': None, 

843 }, 

844 {'name': 'default_name', 'type': str, 'doc': 'The default name of this group', 'default': None}, 

845 {'name': 'groups', 'type': list, 'doc': 'the subgroups in this group', 'default': list()}, 

846 {'name': 'datasets', 'type': list, 'doc': 'the datasets in this group', 'default': list()}, 

847 {'name': 'attributes', 'type': list, 'doc': 'the attributes on this group', 'default': list()}, 

848 {'name': 'links', 'type': list, 'doc': 'the links in this group', 'default': list()}, 

849 {'name': 'linkable', 'type': bool, 'doc': 'whether or not this group can be linked', 'default': True}, 

850 { 

851 'name': 'quantity', 

852 'type': (str, int), 

853 'doc': "the allowable number of instance of this group in a certain location. See table of options " 

854 "`here <https://schema-language.readthedocs.io/en/latest/description.html#quantity>`_. Note that if you" 

855 "specify ``name``, ``quantity`` cannot be ``'*'``, ``'+'``, or an integer greater that 1, because you " 

856 "cannot have more than one group of the same name in the same parent group.", 

857 'default': 1, 

858 }, 

859 {'name': 'data_type_def', 'type': str, 'doc': 'the data type this specification represents', 'default': None}, 

860 {'name': 'data_type_inc', 'type': (str, 'GroupSpec'), 

861 'doc': 'the data type this specification data_type_inc', 'default': None}, 

862] 

863 

864 

865class GroupSpec(BaseStorageSpec): 

866 ''' Specification for groups 

867 ''' 

868 

869 @docval(*_group_args) 

870 def __init__(self, **kwargs): 

871 doc, groups, datasets, links = popargs('doc', 'groups', 'datasets', 'links', kwargs) 

872 self.__data_types = dict() # for GroupSpec/DatasetSpec data_type_def/inc 

873 self.__target_types = dict() # for LinkSpec target_types 

874 self.__groups = dict() 

875 for group in groups: 

876 self.set_group(group) 

877 self.__datasets = dict() 

878 for dataset in datasets: 

879 self.set_dataset(dataset) 

880 self.__links = dict() 

881 for link in links: 

882 self.set_link(link) 

883 self.__new_data_types = set(self.__data_types.keys()) 

884 self.__new_target_types = set(self.__target_types.keys()) 

885 self.__new_datasets = set(self.__datasets.keys()) 

886 self.__overridden_datasets = set() 

887 self.__new_links = set(self.__links.keys()) 

888 self.__overridden_links = set() 

889 self.__new_groups = set(self.__groups.keys()) 

890 self.__overridden_groups = set() 

891 super().__init__(doc, **kwargs) 

892 

893 @docval({'name': 'inc_spec', 'type': 'GroupSpec', 'doc': 'the data type this specification represents'}) 

894 def resolve_spec(self, **kwargs): 

895 inc_spec = getargs('inc_spec', kwargs) 

896 data_types = list() 

897 target_types = list() 

898 # resolve inherited datasets 

899 for dataset in inc_spec.datasets: 

900 if dataset.name is None: 

901 data_types.append(dataset) 

902 continue 

903 self.__new_datasets.discard(dataset.name) 

904 if dataset.name in self.__datasets: 

905 # if the included dataset spec was added earlier during resolution, don't add it again 

906 # but resolve the spec using the included dataset spec - the included spec may contain 

907 # properties not specified in the version of this spec added earlier during resolution 

908 self.__datasets[dataset.name].resolve_spec(dataset) 

909 self.__overridden_datasets.add(dataset.name) 

910 else: 

911 self.set_dataset(dataset) 

912 # resolve inherited groups 

913 for group in inc_spec.groups: 913 ↛ 914line 913 didn't jump to line 914, because the loop on line 913 never started

914 if group.name is None: 

915 data_types.append(group) 

916 continue 

917 self.__new_groups.discard(group.name) 

918 if group.name in self.__groups: 

919 self.__groups[group.name].resolve_spec(group) 

920 self.__overridden_groups.add(group.name) 

921 else: 

922 self.set_group(group) 

923 # resolve inherited links 

924 for link in inc_spec.links: 

925 if link.name is None: 

926 target_types.append(link) 

927 continue 

928 self.__new_links.discard(link.name) 

929 if link.name in self.__links: 

930 self.__overridden_links.add(link.name) 

931 else: 

932 self.set_link(link) 

933 # resolve inherited data_types 

934 for dt_spec in data_types: 

935 dt = dt_spec.data_type_def 

936 if dt is None: 936 ↛ 938line 936 didn't jump to line 938, because the condition on line 936 was never false

937 dt = dt_spec.data_type_inc 

938 self.__new_data_types.discard(dt) 

939 existing_dt_spec = self.get_data_type(dt) 

940 if (existing_dt_spec is None or 940 ↛ 934line 940 didn't jump to line 934, because the condition on line 940 was never false

941 ((isinstance(existing_dt_spec, list) or existing_dt_spec.name is not None) and 

942 dt_spec.name is None)): 

943 if isinstance(dt_spec, DatasetSpec): 943 ↛ 946line 943 didn't jump to line 946, because the condition on line 943 was never false

944 self.set_dataset(dt_spec) 

945 else: 

946 self.set_group(dt_spec) 

947 # resolve inherited target_types 

948 for link_spec in target_types: 

949 dt = link_spec.target_type 

950 self.__new_target_types.discard(dt) 

951 existing_dt_spec = self.get_target_type(dt) 

952 if (existing_dt_spec is None or 952 ↛ 948line 952 didn't jump to line 948, because the condition on line 952 was never false

953 (isinstance(existing_dt_spec, list) or existing_dt_spec.name is not None) and 

954 link_spec.name is None): 

955 self.set_link(link_spec) 

956 super().resolve_spec(inc_spec) 

957 

958 @docval({'name': 'name', 'type': str, 'doc': 'the name of the dataset'}, 

959 raises="ValueError, if 'name' is not part of this spec") 

960 def is_inherited_dataset(self, **kwargs): 

961 '''Return true if a dataset with the given name was inherited''' 

962 name = getargs('name', kwargs) 

963 if name not in self.__datasets: 963 ↛ 964line 963 didn't jump to line 964, because the condition on line 963 was never true

964 raise ValueError("Dataset '%s' not found in spec" % name) 

965 return name not in self.__new_datasets 

966 

967 @docval({'name': 'name', 'type': str, 'doc': 'the name of the dataset'}, 

968 raises="ValueError, if 'name' is not part of this spec") 

969 def is_overridden_dataset(self, **kwargs): 

970 '''Return true if a dataset with the given name overrides a specification from the parent type''' 

971 name = getargs('name', kwargs) 

972 if name not in self.__datasets: 972 ↛ 973line 972 didn't jump to line 973, because the condition on line 972 was never true

973 raise ValueError("Dataset '%s' not found in spec" % name) 

974 return name in self.__overridden_datasets 

975 

976 @docval({'name': 'name', 'type': str, 'doc': 'the name of the group'}, 

977 raises="ValueError, if 'name' is not part of this spec") 

978 def is_inherited_group(self, **kwargs): 

979 '''Return true if a group with the given name was inherited''' 

980 name = getargs('name', kwargs) 

981 if name not in self.__groups: 981 ↛ 982line 981 didn't jump to line 982, because the condition on line 981 was never true

982 raise ValueError("Group '%s' not found in spec" % name) 

983 return name not in self.__new_groups 

984 

985 @docval({'name': 'name', 'type': str, 'doc': 'the name of the group'}, 

986 raises="ValueError, if 'name' is not part of this spec") 

987 def is_overridden_group(self, **kwargs): 

988 '''Return true if a group with the given name overrides a specification from the parent type''' 

989 name = getargs('name', kwargs) 

990 if name not in self.__groups: 

991 raise ValueError("Group '%s' not found in spec" % name) 

992 return name in self.__overridden_groups 

993 

994 @docval({'name': 'name', 'type': str, 'doc': 'the name of the link'}, 

995 raises="ValueError, if 'name' is not part of this spec") 

996 def is_inherited_link(self, **kwargs): 

997 '''Return true if a link with the given name was inherited''' 

998 name = getargs('name', kwargs) 

999 if name not in self.__links: 999 ↛ 1000line 999 didn't jump to line 1000, because the condition on line 999 was never true

1000 raise ValueError("Link '%s' not found in spec" % name) 

1001 return name not in self.__new_links 

1002 

1003 @docval({'name': 'name', 'type': str, 'doc': 'the name of the link'}, 

1004 raises="ValueError, if 'name' is not part of this spec") 

1005 def is_overridden_link(self, **kwargs): 

1006 '''Return true if a link with the given name overrides a specification from the parent type''' 

1007 name = getargs('name', kwargs) 

1008 if name not in self.__links: 1008 ↛ 1009line 1008 didn't jump to line 1009, because the condition on line 1008 was never true

1009 raise ValueError("Link '%s' not found in spec" % name) 

1010 return name in self.__overridden_links 

1011 

1012 @docval({'name': 'spec', 'type': (Spec, str), 'doc': 'the specification to check'}) 

1013 def is_inherited_spec(self, **kwargs): 

1014 ''' Returns 'True' if specification was inherited from a parent type ''' 

1015 spec = getargs('spec', kwargs) 

1016 if isinstance(spec, Spec): 

1017 name = spec.name 

1018 if name is None and hasattr(spec, 'data_type_def'): 

1019 name = spec.data_type_def 

1020 if name is None: # NOTE: this will return the target type for LinkSpecs 

1021 name = spec.data_type_inc 

1022 if name is None: # pragma: no cover 

1023 # this should not be possible 

1024 raise ValueError('received Spec with wildcard name but no data_type_inc or data_type_def') 

1025 spec = name 

1026 # if the spec has a name, it will be found in __links/__groups/__datasets before __data_types/__target_types 

1027 if spec in self.__links: 

1028 return self.is_inherited_link(spec) 

1029 elif spec in self.__groups: 

1030 return self.is_inherited_group(spec) 

1031 elif spec in self.__datasets: 

1032 return self.is_inherited_dataset(spec) 

1033 elif spec in self.__data_types: 

1034 # NOTE: the same data type can be both an unnamed data type and an unnamed target type 

1035 return self.is_inherited_type(spec) 

1036 elif spec in self.__target_types: 

1037 return self.is_inherited_target_type(spec) 

1038 else: 

1039 if super().is_inherited_spec(spec): 

1040 return True 

1041 else: 

1042 for s in self.__datasets: 

1043 if self.is_inherited_dataset(s): 

1044 if self.__datasets[s].get_attribute(spec) is not None: 

1045 return True 

1046 for s in self.__groups: 

1047 if self.is_inherited_group(s): 1047 ↛ 1048line 1047 didn't jump to line 1048, because the condition on line 1047 was never true

1048 if self.__groups[s].get_attribute(spec) is not None: 

1049 return True 

1050 return False 

1051 

1052 @docval({'name': 'spec', 'type': (Spec, str), 'doc': 'the specification to check'}) 

1053 def is_overridden_spec(self, **kwargs): # noqa: C901 

1054 ''' Returns 'True' if specification overrides a specification from the parent type ''' 

1055 spec = getargs('spec', kwargs) 

1056 if isinstance(spec, Spec): 

1057 name = spec.name 

1058 if name is None: 

1059 if isinstance(spec, LinkSpec): # unnamed LinkSpec cannot be overridden 

1060 return False 

1061 if spec.is_many(): # this is a wildcard spec, so it cannot be overridden 1061 ↛ 1063line 1061 didn't jump to line 1063, because the condition on line 1061 was never false

1062 return False 

1063 name = spec.data_type_def 

1064 if name is None: # NOTE: this will return the target type for LinkSpecs 

1065 name = spec.data_type_inc 

1066 if name is None: # pragma: no cover 

1067 # this should not happen 

1068 raise ValueError('received Spec with wildcard name but no data_type_inc or data_type_def') 

1069 spec = name 

1070 # if the spec has a name, it will be found in __links/__groups/__datasets before __data_types/__target_types 

1071 if spec in self.__links: 

1072 return self.is_overridden_link(spec) 

1073 elif spec in self.__groups: 1073 ↛ 1074line 1073 didn't jump to line 1074, because the condition on line 1073 was never true

1074 return self.is_overridden_group(spec) 

1075 elif spec in self.__datasets: 

1076 return self.is_overridden_dataset(spec) 

1077 elif spec in self.__data_types: 1077 ↛ 1078line 1077 didn't jump to line 1078, because the condition on line 1077 was never true

1078 return self.is_overridden_type(spec) 

1079 else: 

1080 if super().is_overridden_spec(spec): # check if overridden attribute 

1081 return True 

1082 else: 

1083 for s in self.__datasets: 

1084 if self.is_overridden_dataset(s): 1084 ↛ 1085line 1084 didn't jump to line 1085, because the condition on line 1084 was never true

1085 if self.__datasets[s].is_overridden_spec(spec): 

1086 return True 

1087 for s in self.__groups: 1087 ↛ 1088line 1087 didn't jump to line 1088, because the loop on line 1087 never started

1088 if self.is_overridden_group(s): 

1089 if self.__groups[s].is_overridden_spec(spec): 

1090 return True 

1091 return False 

1092 

1093 @docval({'name': 'spec', 'type': (BaseStorageSpec, str), 'doc': 'the specification to check'}) 

1094 def is_inherited_type(self, **kwargs): 

1095 ''' Returns True if `spec` represents a data type that was inherited ''' 

1096 spec = getargs('spec', kwargs) 

1097 if isinstance(spec, BaseStorageSpec): 

1098 if spec.data_type_def is None: # why not also check data_type_inc? 1098 ↛ 1099line 1098 didn't jump to line 1099, because the condition on line 1098 was never true

1099 raise ValueError('cannot check if something was inherited if it does not have a %s' % self.def_key()) 

1100 spec = spec.data_type_def 

1101 return spec not in self.__new_data_types 

1102 

1103 @docval({'name': 'spec', 'type': (BaseStorageSpec, str), 'doc': 'the specification to check'}, 

1104 raises="ValueError, if 'name' is not part of this spec") 

1105 def is_overridden_type(self, **kwargs): 

1106 ''' Returns True if `spec` represents a data type that overrides a specification from a parent type ''' 

1107 return self.is_inherited_type(**kwargs) 

1108 

1109 @docval({'name': 'spec', 'type': (LinkSpec, str), 'doc': 'the specification to check'}) 

1110 def is_inherited_target_type(self, **kwargs): 

1111 ''' Returns True if `spec` represents a target type that was inherited ''' 

1112 spec = getargs('spec', kwargs) 

1113 if isinstance(spec, LinkSpec): 1113 ↛ 1114line 1113 didn't jump to line 1114, because the condition on line 1113 was never true

1114 spec = spec.target_type 

1115 return spec not in self.__new_target_types 

1116 

1117 @docval({'name': 'spec', 'type': (LinkSpec, str), 'doc': 'the specification to check'}, 

1118 raises="ValueError, if 'name' is not part of this spec") 

1119 def is_overridden_target_type(self, **kwargs): 

1120 ''' Returns True if `spec` represents a target type that overrides a specification from a parent type ''' 

1121 return self.is_inherited_target_type(**kwargs) 

1122 

1123 def __add_data_type_inc(self, spec): 

1124 # update the __data_types dict with the given groupspec/datasetspec so that: 

1125 # - if there is only one spec for a given data type, then it is stored in __data_types regardless of 

1126 # whether it is named 

1127 # - if there are multiple specs for a given data type and they are all named, then they are all stored in 

1128 # __data_types 

1129 # - if there are multiple specs for a given data type and only one is unnamed, then the unnamed spec is 

1130 # stored in __data_types 

1131 # it is not allowed to have multiple specs for a given data type and multiple are unnamed 

1132 dt = None 

1133 if hasattr(spec, 'data_type_def') and spec.data_type_def is not None: 

1134 dt = spec.data_type_def 

1135 elif hasattr(spec, 'data_type_inc') and spec.data_type_inc is not None: 

1136 dt = spec.data_type_inc 

1137 if not dt: # pragma: no cover 

1138 # this should not be possible 

1139 raise TypeError("spec does not have '%s' or '%s' defined" % (self.def_key(), self.inc_key())) 

1140 if dt in self.__data_types: 

1141 curr = self.__data_types[dt] 

1142 if curr is spec: # happens only if the same spec is added twice 1142 ↛ 1143line 1142 didn't jump to line 1143, because the condition on line 1142 was never true

1143 return 

1144 if spec.name is None: 

1145 if isinstance(curr, list): 

1146 # replace the list of named specs associated with the data_type with this unnamed spec 

1147 # the named specs can be retrieved by name 

1148 self.__data_types[dt] = spec 

1149 else: 

1150 if curr.name is None: 

1151 # neither the spec already associated with the data_type nor the given spec have a name 

1152 msg = "Cannot have multiple groups/datasets with the same data type without specifying name" 

1153 raise ValueError(msg) 

1154 else: 

1155 # replace the named spec associated with the data_type with this unnamed spec 

1156 # the named spec can be retrieved by name 

1157 self.__data_types[dt] = spec 

1158 else: 

1159 if isinstance(curr, list): 

1160 # add this named spec to the list of named current specs associated with the data_type 

1161 self.__data_types[dt].append(spec) 

1162 else: 

1163 if curr.name is None: 

1164 # the spec associated with the data_type has no name and the given spec has a name 

1165 # leave the existing data type as is, since the new one can be retrieved by name 

1166 return 

1167 else: 

1168 # both the spec associated with the data_type and the given spec have a name 

1169 # store both specific instances of a data type 

1170 self.__data_types[dt] = [curr, spec] 

1171 else: 

1172 self.__data_types[dt] = spec 

1173 

1174 def __add_target_type(self, spec): 

1175 # update the __target_types dict with the given linkspec so that: 

1176 # - if there is only one linkspec for a given target type, then it is stored in __target_types regardless of 

1177 # whether it is named 

1178 # - if there are multiple linkspecs for a given target type and they are all named, then they are all stored in 

1179 # __target_types 

1180 # - if there are multiple linkspecs for a given target type and only one is unnamed, then the unnamed spec is 

1181 # stored in __target_types 

1182 # it is not allowed to have multiple linkspecs for a given target type and multiple are unnamed 

1183 dt = spec.target_type 

1184 if dt in self.__target_types: 

1185 curr = self.__target_types[dt] 

1186 if curr is spec: # happens only if the same spec is added twice 1186 ↛ 1187line 1186 didn't jump to line 1187, because the condition on line 1186 was never true

1187 return 

1188 if spec.name is None: 

1189 if isinstance(curr, list): 

1190 # replace the list of named specs associated with the target_type with this unnamed spec 

1191 # the named specs can be retrieved by name 

1192 self.__target_types[dt] = spec 

1193 else: 

1194 if curr.name is None: 

1195 # neither the spec already associated with the target_type nor the given spec have a name 

1196 msg = "Cannot have multiple links with the same target type without specifying name" 

1197 raise ValueError(msg) 

1198 else: 

1199 # replace the named spec associated with the target_type with this unnamed spec 

1200 # the named spec can be retrieved by name 

1201 self.__target_types[dt] = spec 

1202 else: 

1203 if isinstance(curr, list): 

1204 # add this named spec to the list of named current specs associated with the target_type 

1205 self.__target_types[dt].append(spec) 

1206 else: 

1207 if curr.name is None: 

1208 # the spec associated with the target_type has no name and the given spec has a name 

1209 # leave the existing data type as is, since the new one can be retrieved by name 

1210 return 

1211 else: 

1212 # both the spec associated with the target_type and the given spec have a name 

1213 # store both specific instances of a data type 

1214 self.__target_types[dt] = [curr, spec] 

1215 else: 

1216 self.__target_types[dt] = spec 

1217 

1218 @docval({'name': 'data_type', 'type': str, 'doc': 'the data_type to retrieve'}) 

1219 def get_data_type(self, **kwargs): 

1220 ''' Get a specification by "data_type" 

1221 

1222 NOTE: If there is only one spec for a given data type, then it is returned. 

1223 If there are multiple specs for a given data type and they are all named, then they are returned in a list. 

1224 If there are multiple specs for a given data type and only one is unnamed, then the unnamed spec is returned. 

1225 The other named specs can be returned using get_group or get_dataset. 

1226 

1227 NOTE: this method looks for an exact match of the data type and does not consider the type hierarchy. 

1228 ''' 

1229 ndt = getargs('data_type', kwargs) 

1230 return self.__data_types.get(ndt, None) 

1231 

1232 @docval({'name': 'target_type', 'type': str, 'doc': 'the target_type to retrieve'}) 

1233 def get_target_type(self, **kwargs): 

1234 ''' Get a specification by "target_type" 

1235 

1236 NOTE: If there is only one spec for a given target type, then it is returned. 

1237 If there are multiple specs for a given target type and they are all named, then they are returned in a list. 

1238 If there are multiple specs for a given target type and only one is unnamed, then the unnamed spec is returned. 

1239 The other named specs can be returned using get_link. 

1240 

1241 NOTE: this method looks for an exact match of the target type and does not consider the type hierarchy. 

1242 ''' 

1243 ndt = getargs('target_type', kwargs) 

1244 return self.__target_types.get(ndt, None) 

1245 

1246 @property 

1247 def groups(self): 

1248 ''' The groups specified in this GroupSpec ''' 

1249 return tuple(self.get('groups', tuple())) 

1250 

1251 @property 

1252 def datasets(self): 

1253 ''' The datasets specified in this GroupSpec ''' 

1254 return tuple(self.get('datasets', tuple())) 

1255 

1256 @property 

1257 def links(self): 

1258 ''' The links specified in this GroupSpec ''' 

1259 return tuple(self.get('links', tuple())) 

1260 

1261 @docval(*_group_args) 

1262 def add_group(self, **kwargs): 

1263 ''' Add a new specification for a subgroup to this group specification ''' 

1264 spec = self.__class__(**kwargs) 

1265 self.set_group(spec) 

1266 return spec 

1267 

1268 @docval({'name': 'spec', 'type': ('GroupSpec'), 'doc': 'the specification for the subgroup'}) 

1269 def set_group(self, **kwargs): 

1270 ''' Add the given specification for a subgroup to this group specification ''' 

1271 spec = getargs('spec', kwargs) 

1272 if spec.parent is not None: 1272 ↛ 1273line 1272 didn't jump to line 1273, because the condition on line 1272 was never true

1273 spec = self.build_spec(spec) 

1274 if spec.name is None: 

1275 if spec.data_type_inc is not None or spec.data_type_def is not None: 

1276 self.__add_data_type_inc(spec) 

1277 else: # pragma: no cover 

1278 # this should not be possible 

1279 raise TypeError("must specify 'name' or 'data_type_inc' in Group spec") 

1280 else: 

1281 # NOTE named specs can be present in both __datasets and __data_types 

1282 if spec.data_type_inc is not None or spec.data_type_def is not None: 

1283 self.__add_data_type_inc(spec) 

1284 self.__groups[spec.name] = spec 

1285 self.setdefault('groups', list()).append(spec) 

1286 spec.parent = self 

1287 

1288 @docval({'name': 'name', 'type': str, 'doc': 'the name of the group to the Spec for'}) 

1289 def get_group(self, **kwargs): 

1290 ''' Get a specification for a subgroup to this group specification ''' 

1291 name = getargs('name', kwargs) 

1292 return self.__groups.get(name, self.__links.get(name)) 

1293 

1294 @docval(*_dataset_args) 

1295 def add_dataset(self, **kwargs): 

1296 ''' Add a new specification for a dataset to this group specification ''' 

1297 spec = self.dataset_spec_cls()(**kwargs) 

1298 self.set_dataset(spec) 

1299 return spec 

1300 

1301 @docval({'name': 'spec', 'type': 'DatasetSpec', 'doc': 'the specification for the dataset'}) 

1302 def set_dataset(self, **kwargs): 

1303 ''' Add the given specification for a dataset to this group specification ''' 

1304 spec = getargs('spec', kwargs) 

1305 if spec.parent is not None: 

1306 spec = self.dataset_spec_cls().build_spec(spec) 

1307 if spec.name is None: 

1308 if spec.data_type_inc is not None or spec.data_type_def is not None: 

1309 self.__add_data_type_inc(spec) 

1310 else: # pragma: no cover 

1311 # this should not be possible 

1312 raise TypeError("must specify 'name' or 'data_type_inc' in Dataset spec") 

1313 else: 

1314 # NOTE named specs can be present in both __datasets and __data_types 

1315 if spec.data_type_inc is not None or spec.data_type_def is not None: 

1316 self.__add_data_type_inc(spec) 

1317 self.__datasets[spec.name] = spec 

1318 self.setdefault('datasets', list()).append(spec) 

1319 spec.parent = self 

1320 

1321 @docval({'name': 'name', 'type': str, 'doc': 'the name of the dataset to the Spec for'}) 

1322 def get_dataset(self, **kwargs): 

1323 ''' Get a specification for a dataset to this group specification ''' 

1324 name = getargs('name', kwargs) 

1325 return self.__datasets.get(name, self.__links.get(name)) 

1326 

1327 @docval(*_link_args) 

1328 def add_link(self, **kwargs): 

1329 ''' Add a new specification for a link to this group specification ''' 

1330 spec = self.link_spec_cls()(**kwargs) 

1331 self.set_link(spec) 

1332 return spec 

1333 

1334 @docval({'name': 'spec', 'type': 'LinkSpec', 'doc': 'the specification for the object to link to'}) 

1335 def set_link(self, **kwargs): 

1336 ''' Add a given specification for a link to this group specification ''' 

1337 spec = getargs('spec', kwargs) 

1338 if spec.parent is not None: 

1339 spec = self.link_spec_cls().build_spec(spec) 

1340 # NOTE named specs can be present in both __links and __target_types 

1341 self.__add_target_type(spec) 

1342 if spec.name is not None: 

1343 self.__links[spec.name] = spec 

1344 self.setdefault('links', list()).append(spec) 

1345 spec.parent = self 

1346 

1347 @docval({'name': 'name', 'type': str, 'doc': 'the name of the link to the Spec for'}) 

1348 def get_link(self, **kwargs): 

1349 ''' Get a specification for a link to this group specification ''' 

1350 name = getargs('name', kwargs) 

1351 return self.__links.get(name) 

1352 

1353 @classmethod 

1354 def dataset_spec_cls(cls): 

1355 ''' The class to use when constructing DatasetSpec objects 

1356 

1357 Override this if extending to use a class other than DatasetSpec to build 

1358 dataset specifications 

1359 ''' 

1360 return DatasetSpec 

1361 

1362 @classmethod 

1363 def link_spec_cls(cls): 

1364 ''' The class to use when constructing LinkSpec objects 

1365 

1366 Override this if extending to use a class other than LinkSpec to build 

1367 link specifications 

1368 ''' 

1369 return LinkSpec 

1370 

1371 @classmethod 

1372 def build_const_args(cls, spec_dict): 

1373 ''' Build constructor arguments for this Spec class from a dictionary ''' 

1374 ret = super().build_const_args(spec_dict) 

1375 if 'datasets' in ret: 

1376 ret['datasets'] = list(map(cls.dataset_spec_cls().build_spec, ret['datasets'])) 

1377 if 'groups' in ret: 

1378 ret['groups'] = list(map(cls.build_spec, ret['groups'])) 

1379 if 'links' in ret: 

1380 ret['links'] = list(map(cls.link_spec_cls().build_spec, ret['links'])) 

1381 return ret