wbia.dtool package¶
Submodules¶
wbia.dtool.__SQLITE__ module¶
custom sqlite3 module that supports numpy types
wbia.dtool.__main__ module¶
- Initially Generated By:
- python -m utool –tf setup_repo –repo=dtool –codedir=~/code –modname=dtool
wbia.dtool._grave_depcache module¶
wbia.dtool.base module¶
-
class
wbia.dtool.base.
BaseRequest
[source]¶ Bases:
wbia.dtool.base.IBEISRequestHacks
,utool.util_dev.NiceRepr
Class that maintains both an algorithm, inputs, and a config.
-
ensure_dependencies
()[source]¶ - CommandLine:
- python -m dtool.base –exec-BaseRequest.ensure_dependencies
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> request = depc.new_request('vsmany', [1, 2], [2, 3, 4]) >>> request.ensure_dependencies()
-
get_cfgstr
(with_input=False, with_pipe=True, **kwargs)[source]¶ main cfgstring used to identify the ‘querytype’
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
-
class
wbia.dtool.base.
ClassVsClassSimilarityRequest
[source]¶ Bases:
wbia.dtool.base.BaseRequest
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
-
class
wbia.dtool.base.
Config
(**kwargs)[source]¶ Bases:
utool.util_dev.NiceRepr
,utool.util_dict.DictLike
Base class for heirarchical config need to overwrite get_param_info_list
- CommandLine:
- python -m dtool.base Config
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> cfg1 = Config.from_dict({'a': 1, 'b': 2}) >>> cfg2 = Config.from_dict({'a': 2, 'b': 2}) >>> # Must be hashable and orderable >>> hash(cfg1) >>> cfg1 > cfg2
-
classmethod
from_dict
(dict_, tablename=None)[source]¶ Parameters: - dict (dict_) – a dictionary
- tablename (None) – (default = None)
Returns: param_info_list
Return type: list
- CommandLine:
- python -m dtool.base Config.from_dict –show
Example
>>> # DISABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> cls = Config >>> dict_ = {'K': 1, 'Knorm': 5, 'min_pername': 1, 'max_pername': 1,} >>> tablename = None >>> config = cls.from_dict(dict_, tablename) >>> print(config) >>> # xdoctest: +REQUIRES(--show) >>> ut.quit_if_noshow() >>> dlg = config.make_qt_dialog( >>> title='Confirm Merge Query', >>> msg='Confirm') >>> dlg.resize(700, 500) >>> dlg.show() >>> import wbia.plottool as pt >>> self = dlg.widget >>> guitool.qtapp_loop(qwin=dlg) >>> updated_config = self.config # NOQA >>> print('updated_config = %r' % (updated_config,))
-
get_cfgstr_list
(ignore_keys=None, with_name=True, **kwargs)[source]¶ default get_cfgstr_list, can be overrided by a config object
-
parse_items
()[source]¶ Returns: param_list Return type: list - CommandLine:
- python -m dtool.base –exec-parse_items
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia.dtool.example_depcache import DummyVsManyConfig >>> cfg = DummyVsManyConfig() >>> param_list = cfg.parse_items() >>> result = ('param_list = %s' % (ut.repr2(param_list, nl=1),)) >>> print(result)
-
parse_namespace_config_items
()[source]¶ Recursively extracts key, val pairs from Config objects into a flat list. (there must not be name conflicts)
-
pop_update
(other)[source]¶ Updates based on other, while popping off used arguments. (useful for testing if a parameter was unused or misspelled)
- Doctest:
>>> from wbia.dtool.base import * # NOQA >>> from wbia import dtool as dt >>> cfg = dt.Config.from_dict({'a': 1, 'b': 2, 'c': 3}) >>> other = {'a': 5, 'e': 2} >>> cfg.pop_update(other) >>> assert cfg['a'] == 5 >>> assert len(other) == 1 and 'a' not in other
-
update
(**kwargs)[source]¶ Overwrites default DictLike update for only keys that exist. Non-existing key are ignored.
Note
prefixed keys in the form <classname>_<key> will be just be interpreted as <key>
- CommandLine:
- python -m dtool.base update –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia.dtool.example_depcache import DummyVsManyConfig >>> cfg = DummyVsManyConfig() >>> cfg.update(DummyAlgo_version=4) >>> print(cfg)
-
update2
(*args, **kwargs)[source]¶ Overwrites default DictLike update for only keys that exist. Non-existing key are ignored. Also updates nested configs.
Note
prefixed keys in the form <classname>_<key> will be just be interpreted as <key>
- CommandLine:
- python -m dtool.base update –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia import dtool as dt >>> cfg = dt.Config.from_dict({ >>> 'a': 1, >>> 'b': 2, >>> 'c': 3, >>> 'sub1': dt.Config.from_dict({ >>> 'x': 'x', >>> 'y': {'z', 'x'}, >>> 'c': 33, >>> }), >>> 'sub2': dt.Config.from_dict({ >>> 's': [1, 2, 3], >>> 't': (1, 2, 3), >>> 'c': 42, >>> 'sub3': dt.Config.from_dict({ >>> 'b': 99, >>> 'c': 88, >>> }), >>> }), >>> }) >>> kwargs = {'c': 10} >>> cfg.update2(c=10, y={1,2}) >>> assert cfg.c == 10 >>> assert cfg.sub1.c == 10 >>> assert cfg.sub2.c == 10 >>> assert cfg.sub2.sub3.c == 10 >>> assert cfg.sub1.y == {1, 2}
-
class
wbia.dtool.base.
IBEISRequestHacks
[source]¶ Bases:
object
-
dannots
¶
-
extern_data_config2
¶
-
extern_query_config2
¶
-
ibs
¶ HACK specific to wbia
-
qannots
¶
-
-
class
wbia.dtool.base.
MatchResult
(qaid=None, daids=None, qnid=None, dnid_list=None, annot_score_list=None, unique_nids=None, name_score_list=None)[source]¶ Bases:
wbia.dtool.base.AlgoResult
,utool.util_dev.NiceRepr
-
daids
¶
-
num_daids
¶
-
qaids
¶
-
-
class
wbia.dtool.base.
StackedConfig
(config_list)[source]¶ Bases:
utool.util_dict.DictLike
,utool.util_class.HashComparable
Manages a list of configurations
-
class
wbia.dtool.base.
VsManySimilarityRequest
[source]¶ Bases:
wbia.dtool.base.BaseRequest
,wbia.dtool.base.AnnotSimiliarity
Request for one-vs-many simlarity
- CommandLine:
- python -m dtool.base –exec-VsManySimilarityRequest
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> qaid_list = [1, 2] >>> daid_list = [2, 3, 4] >>> depc = testdata_depc() >>> request = depc.new_request('vsmany', qaid_list, daid_list) >>> request.ensure_dependencies() >>> results = request.execute() >>> # Test dependence on data >>> request2 = depc.new_request('vsmany', qaid_list + [3], daid_list + [5]) >>> results2 = request2.execute() >>> print('results = %r' % (results,)) >>> print('results2 = %r' % (results2,)) >>> assert len(results) == 2, 'incorrect num output' >>> assert len(results2) == 3, 'incorrect num output'
-
get_cfgstr
(with_input=False, with_data=True, with_pipe=True, hash_pipe=False)[source]¶ Override default get_cfgstr to show reliance on data
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
class
wbia.dtool.base.
VsOneSimilarityRequest
[source]¶ Bases:
wbia.dtool.base.BaseRequest
,wbia.dtool.base.AnnotSimiliarity
Similarity request for pairwise scores
References
https://thingspython.wordpress.com/2010/09/27/ another-super-wrinkle-raising-typeerror/
- CommandLine:
- python -m dtool.base –exec-VsOneSimilarityRequest
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.base import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> qaid_list = [1, 2, 3, 5] >>> daid_list = [2, 3, 4] >>> depc = testdata_depc() >>> request = depc.new_request('vsone', qaid_list, daid_list) >>> results = request.execute() >>> # Test that adding a query / data id only recomputes necessary items >>> request2 = depc.new_request('vsone', qaid_list + [4], daid_list + [5]) >>> results2 = request2.execute() >>> print('results = %r' % (results,)) >>> print('results2 = %r' % (results2,)) >>> ut.assert_eq(len(results), 10, 'incorrect num output') >>> ut.assert_eq(len(results2), 16, 'incorrect num output')
-
parent_rowids_T
¶
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
wbia.dtool.depcache_control module¶
implicit version of dependency cache from wbia/templates/template_generator
-
class
wbia.dtool.depcache_control.
DependencyCache
(root_tablename=None, cache_dpath='./DEPCACHE', controller=None, default_fname=None, get_root_uuid=None, root_getters=None, use_globals=True)[source]¶ Bases:
wbia.dtool.depcache_control._CoreDependencyCache
,utool.util_dev.NiceRepr
- Currently, to use this class a user must:
- on root modification, call depc.on_root_modified
- use decorators to register relevant functions
-
delete_root
(root_rowids, delete_extern=None, _debug=False, table_config_filter=None, prop=None)[source]¶ Deletes all properties of a root object regardless of config
Parameters: root_rowids (list) – - CommandLine:
- python -m dtool.depcache_control delete_root –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> exec(ut.execstr_funckw(depc.delete_root), globals()) >>> root_rowids = [1] >>> depc.delete_root(root_rowids, _debug=0) >>> depc.get('fgweight', [1]) >>> depc.delete_root(root_rowids, _debug=0)
-
explicit_graph
¶
-
get_native_property
(tablename, tbl_rowids, colnames=None, _debug=None, read_extern=True)¶ Gets data using internal ids, which is faster if you have them.
- CommandLine:
- python -m dtool.depcache_control get_native:0 python -m dtool.depcache_control get_native:1
Example
>>> # ENABLE_DOCTEST >>> # Simple test of get native >>> from wbia.dtool.example_depcache import * # NOQA >>> config = {} >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> aids = [1,] >>> tbl_rowids = depc.get_rowids(tablename, aids, config=config) >>> data = depc.get_native(tablename, tbl_rowids)
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = testdata_depc() >>> config = {} >>> tablename = 'chip' >>> colnames = extern_colname = 'chip' >>> aids = [1, 2] >>> depc.delete_property(tablename, aids, config=config) >>> # Ensure chip rowids exist then delete external data without >>> # notifying the depcache. This forces the depcache to recover >>> tbl_rowids = chip_rowids = depc.get_rowids(tablename, aids, config=config) >>> data_fpaths = depc.get(tablename, aids, extern_colname, config=config, read_extern=False) >>> ut.remove_file_list(data_fpaths) >>> chips = depc.get_native(tablename, tbl_rowids, extern_colname) >>> print('chips = %r' % (chips,))
-
get_property
(tablename, root_rowids, colnames=None, config=None, ensure=True, _debug=None, recompute=False, recompute_all=False, eager=True, nInput=None, read_extern=True, onthefly=False, num_retries=1, hack_paths=False)¶ Access dependant properties the primary objects using primary ids.
Gets the data in colnames of tablename that correspond to root_rowids using config. if colnames is None, all columns are returned.
Parameters: - tablename (str) – table name containing desired property
- root_rowids (List[int]) – ids of the root object
- colnames (None) – desired property (default = None)
- config (None) – (default = None)
- read_extern – if False then only returns extern URI
- hack_paths – if False then does not compute extern info just returns path that it will be located at
Returns: prop_list
Return type: list
- CommandLine:
- python -m dtool.depcache_control –exec-get
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = testdata_depc3(True) >>> exec(ut.execstr_funckw(depc.get), globals()) >>> aids = [1, 2, 3] >>> _debug = True >>> tablename = 'labeler' >>> root_rowids = aids >>> prop_list = depc.get( >>> tablename, root_rowids, colnames) >>> result = ('prop_list = %s' % (ut.repr2(prop_list),)) >>> print(result) prop_list = [('labeler([root(1)]:42)',), ('labeler([root(2)]:42)',), ('labeler([root(3)]:42)',)]
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = testdata_depc3(True) >>> exec(ut.execstr_funckw(depc.get), globals()) >>> aids = [1, 2, 3] >>> _debug = True >>> tablename = 'smk_match' >>> tablename = 'vocab' >>> table = depc[tablename] >>> root_rowids = [aids] >>> prop_list = depc.get( >>> tablename, root_rowids, colnames, config) >>> result = ('prop_list = %s' % (ut.repr2(prop_list),)) >>> print(result) prop_list = [('vocab([root(1;2;3)]:42)',)]
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = testdata_depc3(True) >>> exec(ut.execstr_funckw(depc.get), globals()) >>> aids = [1, 2, 3] >>> _debug = True >>> depc = testdata_depc() >>> tablename = 'chip' >>> table = depc[tablename] >>> root_rowids = aids >>> # Ensure chips are computed >>> prop_list1 = depc.get(tablename, root_rowids) >>> # Get file paths and delete them >>> prop_list2 = depc.get(tablename, root_rowids, read_extern=False) >>> n = ut.remove_file_list(ut.take_column(prop_list2, 1)) >>> assert n == len(prop_list2), 'files were not computed' >>> prop_list3 = depc.get(tablename, root_rowids) >>> assert np.all(prop_list1[0][1] == prop_list3[0][1]), 'computed same info'
-
get_uuids
(tablename, root_rowids, config=None)[source]¶ # TODO: Make uuids for dependant object based on root uuid and path of # construction.
-
graph
¶
-
make_graph
(**kwargs)[source]¶ Constructs a networkx representation of the dependency graph
- CommandLine:
python -m dtool –tf DependencyCache.make_graph –show –reduced
python -m wbia.control.IBEISControl show_depc_annot_graph –show –reduced
python -m wbia.control.IBEISControl show_depc_annot_graph –show –reduced –testmode python -m wbia.control.IBEISControl show_depc_annot_graph –show –testmode
python -m wbia.control.IBEISControl –test-show_depc_image_graph –show –reduced python -m wbia.control.IBEISControl –test-show_depc_image_graph –show
python -m wbia.scripts.specialdraw double_depcache_graph –show –testmode
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> import utool as ut >>> depc = testdata_depc() >>> graph = depc.make_graph(reduced=ut.get_argflag('--reduced')) >>> ut.quit_if_noshow() >>> import wbia.plottool as pt >>> pt.ensureqt() >>> import networkx as nx >>> #pt.show_nx(nx.dag.transitive_closure(graph)) >>> #pt.show_nx(ut.nx_transitive_reduction(graph)) >>> pt.show_nx(graph) >>> pt.show_nx(graph, layout='agraph') >>> ut.show_if_requested()
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> import utool as ut >>> depc = testdata_depc() >>> graph = depc.make_graph(reduced=True) >>> # xdoctest: +REQUIRES(--show) >>> ut.quit_if_noshow() >>> import wbia.plottool as pt >>> pt.ensureqt() >>> import networkx as nx >>> #pt.show_nx(nx.dag.transitive_closure(graph)) >>> #pt.show_nx(ut.nx_transitive_reduction(graph)) >>> pt.show_nx(graph) >>> pt.show_nx(graph, layout='agraph') >>> ut.show_if_requested()
-
make_root_info_uuid
(root_rowids, info_props)[source]¶ Creates a uuid that depends on certain properties of the root object. This is used for implicit cache invalidation because, if those properties change then this uuid also changes.
The depcache needs to know about stateful properties of dynamic root objects in order to correctly compute their hashes.
>>> #ibs = wbia.opendb(defaultdb='testdb1') >>> root_rowids = ibs._get_all_aids() >>> depc = ibs.depc_annot >>> info_props = ['image_uuid', 'verts', 'theta'] >>> info_props = ['image_uuid', 'verts', 'theta', 'name', 'species', 'yaw']
-
notify_root_changed
(root_rowids, prop, force_delete=False)[source]¶ this is where we are notified that a “registered” root property has changed.
-
reduced_graph
¶
-
root
¶
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
stacked_config
(source, dest, config)[source]¶ - CommandLine:
- python -m dtool.depcache_control stacked_config –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> source = depc.root >>> dest = 'fgweight' >>> config = {} >>> stacked_config = depc.stacked_config(source, dest, config) >>> cfgstr = stacked_config.get_cfgstr() >>> result = ('cfgstr = %s' % (ut.repr2(cfgstr),)) >>> print(result)
-
tablenames
¶
-
tables
¶
-
wbia.dtool.depcache_control.
make_depcache_decors
(root_tablename)[source]¶ Makes global decorators to register functions for a tablename.
A preproc function is meant to belong only to a single parent An algo function belongs to the root node, and may depend on a set of root nodes rather than just a single one.
wbia.dtool.depcache_object module¶
wbia.dtool.depcache_table module¶
Module contining DependencyCacheTable
python -m dtool.depcache_control –exec-make_graph –show python -m dtool.depcache_control –exec-make_graph –show –reduce
- FIXME:
- RECTIFY: ismulti / ismodel need to be rectified. This indicate that this
- table recieves multiple inputs from at least one parent table.
- RECTIFY: Need to standardize parent rowids -vs- parent args.
- in one-to-one cases they are the same. In multi cases the rowids indicate a uuid and the args are the saved set of rowids that exist in the manifest.
- RECTIFY: is rowid_list row-major or column-major?
- I think currently rowid_list is row-major and rowid_listT is column-major but this may not be consistent.
-
class
wbia.dtool.depcache_table.
DependencyCacheTable
(depc=None, parent_tablenames=None, tablename=None, data_colnames=None, data_coltypes=None, preproc_func=None, docstr='no docstr', fname=None, asobject=False, chunksize=None, isinteractive=False, default_to_unpack=False, default_onthefly=False, rm_extern_on_delete=False, vectorized=True, taggable=False)[source]¶ Bases:
wbia.dtool.depcache_table._TableGeneralHelper
,wbia.dtool.depcache_table._TableInternalSetup
,wbia.dtool.depcache_table._TableDebugHelper
,wbia.dtool.depcache_table._TableComputeHelper
,wbia.dtool.depcache_table._TableConfigHelper
An individual node in the dependency graph.
- All SQL column information is stored in:
- internal_col_attrs - keeps track of internal info
- Additional metadata about specific columns is stored in
- parent_col_attrs - keeps track of parent info data_col_attrs - keeps track of computed data
-
db
¶ pointer to underlying database
Type: dtool.SQLDatabaseController
-
depc
¶ pointer to parent cache
Type: dtool.DependencyCache
-
tablename
¶ name of the table
Type: str
-
docstr
¶ documentation for table
Type: str
-
parent_tablenames
¶ parent tables in depcache
Type: str
-
data_colnames
¶ columns produced by preproc_func
Type: List[str]
-
data_coltypes
¶ column SQL types produced by preproc_func
Type: List[str]
-
preproc_func
¶ worker function
Type: func
-
vectorized
¶ by defaults it is assumed registered functions can process multiple inputs at once.
Type: bool
-
taggable
¶ specifies if a computed object can be disconected from its ancestors and accessed via a tag.
Type: bool
- CommandLine:
- python -m dtool.depcache_table –exec-DependencyCacheTable
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_table import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> print(depc['vsmany']) >>> print(depc['spam']) >>> print(depc['vsone']) >>> print(depc['nnindexer'])
-
delete_rows
(rowid_list, delete_extern=None, dry=False, verbose=None)[source]¶ - CommandLine:
- python -m dtool.depcache_table –exec-delete_rows
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_table import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> #table = depc['keypoint'] >>> table = depc['chip'] >>> exec(ut.execstr_funckw(table.delete_rows), globals()) >>> tablename = table.tablename >>> graph = depc.explicit_graph >>> config1 = None >>> config2 = table.configclass(version=-1) >>> config3 = table.configclass(version=-1, ext='.jpg') >>> config4 = table.configclass(ext='.jpg') >>> # Create several configs of rowid >>> aids = [1, 2, 3] >>> depc.get_rowids('spam', aids, config=config1) >>> depc.get_rowids('spam', aids, config=config2) >>> depc.get_rowids('spam', aids, config=config3) >>> depc.get_rowids('spam', aids, config=config4) >>> # Delete the png configs >>> rowid_list1 = depc.get_rowids(table.tablename, aids, >>> config=config2) >>> rowid_list2 = depc.get_rowids(table.tablename, aids, >>> config=config1) >>> rowid_list = rowid_list1 + rowid_list2 >>> assert len(ut.setintersect_ordered(rowid_list1, rowid_list2)) == 0 >>> table.delete_rows(rowid_list)
-
ensure_rows
(parent_ids_, preproc_args, config=None, verbose=True, _debug=None)[source]¶ Lazy addition
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_table import * # NOQA >>> from wbia.dtool.example_depcache2 import testdata_depc3 >>> depc = testdata_depc3() >>> table = depc['vsone'] >>> exec(ut.execstr_funckw(table.get_rowid), globals()) >>> config = table.configclass() >>> _debug = 5 >>> verbose = True >>> # test duplicate inputs are detected and accounted for >>> parent_rowids = [(i, i) for i in list(range(100))] * 100 >>> rectify_tup = table._rectify_ids(parent_rowids) >>> (parent_ids_, preproc_args, idxs1, idxs2) = rectify_tup >>> rowids = table.ensure_rows(parent_ids_, preproc_args, config=config, _debug=_debug) >>> result = ('rowids = %r' % (rowids,)) >>> print(result)
-
export_rows
(rowid, target)[source]¶ The goal of this is to export taggable data that can be used independantly of its dependant features.
- TODO List:
- Gather information about columns
- Native and (localized) external data
- <table>_rowid - non-transferable
- Parent UUIDS - non-transferable
- config rowid - non-transferable
- model_uuid -
- augment_bit - transferable - trivial
- words_extern_uri - copy to destination
- feat_setsize - transferable - trivial
- model_tag
- Should also gather info from manifest:
- feat_setuuid_primary_ids - non-transferable
- feat_setuuid_model_input - non-transferable
- Should gather exhaustive config history
- Save to disk
- Add function to reload data in exported format
- Getters should be able to specify a tag inplace of the root input
for the tagged. Additionally native root-ids should also be allowed.
rowid = 1
-
get_internal_columns
(tbl_rowids, colnames=None, eager=True, nInput=None, unpack_scalars=True, keepwrap=False, showprog=False)[source]¶ Access data in this table using the table PRIMARY KEY rowids (not depc PRIMARY ids)
-
get_row_data
(tbl_rowids, colnames=None, _debug=None, read_extern=True, num_retries=1, eager=True, nInput=None, ensure=True, delete_on_fail=True, showprog=False, unpack_columns=None)[source]¶ FIXME: unpacking is confusing with sql controller TODO: Clean up and allow for eager=False
colnames = (‘mask’, ‘size’)
- CommandLine:
- python -m dtool.depcache_table –test-get_row_data:0 python -m dtool.depcache_table –test-get_row_data:1
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_table import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> table = depc['chip'] >>> exec(ut.execstr_funckw(table.get_row_data), globals()) >>> tbl_rowids = depc.get_rowids('chip', [1, 2, 3], _debug=True, recompute=True) >>> colnames = ('size_1', 'size', 'chip' + EXTERN_SUFFIX, 'chip') >>> kwargs = dict(read_extern=True, num_retries=1, _debug=True) >>> prop_list = table.get_row_data(tbl_rowids, colnames, **kwargs) >>> prop_list0 = ut.take_column(prop_list, [0, 1, 2]) # data subset >>> result = (ut.repr2(prop_list0, nl=1)) >>> print(result) >>> #_debug, num_retries, read_extern = True, 1, True >>> prop_gen = table.get_row_data(tbl_rowids, colnames, eager=False) >>> prop_list2 = list(prop_gen) >>> assert len(prop_list2) == len(prop_list), 'inconsistent lens' >>> assert all([ut.lists_eq(prop_list2[1], prop_list[1]) for x in range(len(prop_list))]), 'inconsistent vals' >>> chips = table.get_row_data(tbl_rowids, 'chip', eager=False)
- [
- [2453, (1707, 2453), ‘chip_chip_id=1_pyrappzicqoskdjq.png’], [250, (300, 250), ‘chip_chip_id=2_pyrappzicqoskdjq.png’], [372, (545, 372), ‘chip_chip_id=3_pyrappzicqoskdjq.png’],
]
Example
>>> # ENABLE_DOCTEST >>> # Test external / ensure getters >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = testdata_depc() >>> table = depc['chip'] >>> exec(ut.execstr_funckw(table.get_row_data), globals()) >>> depc.clear_all() >>> config = {} >>> aids = [1,] >>> read_extern = False >>> tbl_rowids = depc.get_rowids('chip', aids, config=config) >>> data_fpaths = depc.get('chip', aids, 'chip', config=config, read_extern=False) >>> # Ensure data is recomputed if an external file is missing >>> ut.remove_fpaths(data_fpaths) >>> data = table.get_row_data(tbl_rowids, 'chip', read_extern=False, ensure=False) >>> data = table.get_row_data(tbl_rowids, 'chip', read_extern=False, ensure=True)
-
get_rowid
(parent_rowids, config=None, ensure=True, eager=True, nInput=None, recompute=False, _debug=None, num_retries=1)[source]¶ Returns the rowids of derived properties. If they do not exist it computes them.
Parameters: - parent_rowids (list) – list of tuples with the parent rowids as the value of each tuple
- config (None) – (default = None)
- ensure (bool) – eager evaluation if True (default = True)
- eager (bool) – (default = True)
- nInput (int) – (default = None)
- recompute (bool) – (default = False)
- _debug (None) – (default = None)
Returns: rowid_list
Return type: list
- CommandLine:
- python -m dtool.depcache_table –exec-get_rowid
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.depcache_table import * # NOQA >>> from wbia.dtool.example_depcache2 import testdata_depc3 >>> depc = testdata_depc3() >>> table = depc['labeler'] >>> exec(ut.execstr_funckw(table.get_rowid), globals()) >>> config = table.configclass() >>> _debug = True >>> parent_rowids = list(zip([1, None, None, 2])) >>> rowids = table.get_rowid(parent_rowids, config=config, _debug=_debug) >>> result = ('rowids = %r' % (rowids,)) >>> print(result) rowids = [1, None, None, 2]
-
number_of_rows
¶
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
class
wbia.dtool.depcache_table.
ExternType
(read_func, write_func, extern_ext=None, extkey=None)[source]¶ Bases:
ubelt.util_mixins.NiceRepr
Type to denote an external resource not saved in an SQL table
-
exception
wbia.dtool.depcache_table.
ExternalStorageException
(*args, **kwargs)[source]¶ Bases:
Exception
Indicates a missing external file
wbia.dtool.example_depcache module¶
- CommandLine:
- python -m dtool.example_depcache –exec-dummy_example_depcacahe –show python -m dtool.depcache_control –exec-make_graph –show
-
class
wbia.dtool.example_depcache.
DummyAnnotMatch
(qaid=None, daids=None, qnid=None, dnid_list=None, annot_score_list=None, unique_nids=None, name_score_list=None)[source]¶ Bases:
wbia.dtool.base.MatchResult
-
class
wbia.dtool.example_depcache.
DummyChipConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import * # NOQA >>> cfg = DummyChipConfig() >>> cfg.dim_size = 700 >>> cfg.histeq = True >>> print(cfg) >>> cfg.histeq = False >>> print(cfg)
-
class
wbia.dtool.example_depcache.
DummyIndexerConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummyKptsConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummyNNConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummySVERConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummyVsManyConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummyVsManyRequest
[source]¶ Bases:
wbia.dtool.base.VsManySimilarityRequest
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import * # NOQA >>> algo_config = DummyVsManyConfig() >>> print(algo_config)
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
-
class
wbia.dtool.example_depcache.
DummyVsOneConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
-
class
wbia.dtool.example_depcache.
DummyVsOneMatch
[source]¶ Bases:
wbia.dtool.base.AlgoResult
,utool.util_dev.NiceRepr
-
class
wbia.dtool.example_depcache.
DummyVsOneRequest
[source]¶ Bases:
wbia.dtool.base.VsOneSimilarityRequest
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
-
class
wbia.dtool.example_depcache.
ProbchipConfig
(**kwargs)[source]¶ Bases:
wbia.dtool.base.Config
- CommandLine:
- python -m dtool.example_depcache –exec-ProbchipConfig –show
Example
>>> # DISABLE_DOCTEST >>> from wbia.dtool.depcache_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> table = depc['probchip'] >>> exec(ut.execstr_funckw(table.get_rowid), globals()) >>> config = table.configclass(testerror=True) >>> root_rowids = [1, 2, 3] >>> parent_rowids = list(zip(root_rowids)) >>> proptup_gen = list(table.preproc_func(depc, root_rowids, config)) >>> pc_rowids = depc.get_rowids('probchip', root_rowids, config) >>> prop_list2 = depc.get('probchip', root_rowids, config=config, read_extern=False) >>> print(prop_list2) >>> #depc.new_request('probchip', [1, 2, 3]) >>> fg_rowids = depc.get_rowids('fgweight', root_rowids, config) >>> fg = depc.get('fgweight', root_rowids, config=config) >>> ############# >>> config = table.configclass(testerror=False) >>> root_rowids = [1, 2, 3] >>> parent_rowids = list(zip(root_rowids)) >>> proptup_gen = list(table.preproc_func(depc, root_rowids, config)) >>> pc_rowids2 = depc.get_rowids('probchip', root_rowids, config) >>> prop_list2 = depc.get('probchip', root_rowids, config=config, read_extern=False) >>> print(prop_list2) >>> #depc.new_request('probchip', [1, 2, 3]) >>> fg_rowids2 = depc.get_rowids('fgweight', root_rowids, config)
-
wbia.dtool.example_depcache.
dummy_example_depcacahe
()[source]¶ - CommandLine:
- python -m dtool.example_depcache –exec-dummy_example_depcacahe
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import * # NOQA >>> depc = dummy_example_depcacahe()
wbia.dtool.example_depcache2 module¶
-
wbia.dtool.example_depcache2.
testdata_custom_annot_depc
(dummy_dependencies, in_memory=True)[source]¶
-
wbia.dtool.example_depcache2.
testdata_depc3
(in_memory=True)[source]¶ Example of local registration
- CommandLine:
- python -m dtool.example_depcache2 testdata_depc3 –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc3() >>> data = depc.get('labeler', [1, 2, 3], 'data', _debug=True) >>> data = depc.get('indexer', [[1, 2, 3]], 'data', _debug=True) >>> depc.print_all_tables() >>> # xdoctest: +REQUIRES(--show) >>> ut.quit_if_noshow() >>> import wbia.plottool as pt >>> depc.show_graph() >>> from wbia.plottool.interactions import ExpandableInteraction >>> inter = ExpandableInteraction(nCols=2) >>> depc['smk_match'].show_input_graph(inter) >>> depc['vsone'].show_input_graph(inter) >>> #depc['vocab'].show_input_graph(inter) >>> depc['neighbs'].show_input_graph(inter) >>> inter.start() >>> #depc['viewpoint_classification'].show_input_graph() >>> ut.show_if_requested()
-
wbia.dtool.example_depcache2.
testdata_depc4
(in_memory=True)[source]¶ Example of local registration
- CommandLine:
- python -m dtool.example_depcache2 testdata_depc4 –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc4() >>> #data = depc.get('labeler', [1, 2, 3], 'data', _debug=True) >>> #data = depc.get('indexer', [[1, 2, 3]], 'data', _debug=True) >>> depc.print_all_tables() >>> # xdoctest: +REQUIRES(--show) >>> ut.quit_if_noshow() >>> import wbia.plottool as pt >>> depc.show_graph() >>> from wbia.plottool.interactions import ExpandableInteraction >>> inter = ExpandableInteraction(nCols=2) >>> depc['smk_match'].show_input_graph(inter) >>> depc['vsone'].show_input_graph(inter) >>> depc['vocab'].show_input_graph(inter) >>> depc['neighbs'].show_input_graph(inter) >>> inter.start() >>> #depc['viewpoint_classification'].show_input_graph() >>> ut.show_if_requested()
wbia.dtool.experimental_features module¶
wbia.dtool.input_helpers module¶
-
class
wbia.dtool.input_helpers.
BranchId
(accum_ids, k, parent_colx)[source]¶ Bases:
utool.util_class.HashComparable
-
class
wbia.dtool.input_helpers.
ExiNode
(node_id, branch_id)[source]¶ Bases:
utool.util_class.HashComparable
Expanded Input Node
helps distinguish nodes and branch_ids
-
branch_id
¶
-
node_id
¶
-
-
class
wbia.dtool.input_helpers.
RootMostInput
(node, sink, exi_graph)[source]¶ Bases:
utool.util_class.HashComparable
-
ismulti
¶
-
-
class
wbia.dtool.input_helpers.
TableInput
(rmi_list, exi_graph, table, reorder=False)[source]¶ Bases:
utool.util_dev.NiceRepr
Specifies a set of inputs that can validly compute the output of a table in the dependency graph
-
expand_input
(index, inplace=False)[source]¶ Pushes the rootmost inputs all the way up to the sources of the graph
- CommandLine:
- python -m dtool.input_helpers expand_input
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc4() >>> inputs = depc['smk_match'].rootmost_inputs >>> inputs = depc['neighbs'].rootmost_inputs >>> print('(pre-expand) inputs = %r' % (inputs,)) >>> index = 'indexer' >>> inputs2 = inputs.expand_input(index) >>> print('(post-expand) inputs2 = %r' % (inputs2,)) >>> assert 'indexer' in str(inputs), 'missing indexer1' >>> assert 'indexer' not in str(inputs2), ( >>> '(2) unexpected indexer in %s' % (inputs2,))
-
expected_input_depth
()[source]¶ Example
>>> # DISABLE_DOCTEST >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc4() >>> inputs = depc['neighbs'].rootmost_inputs >>> index = 'indexer' >>> inputs = inputs.expand_input(index) >>> size = inputs.expected_input_depth() >>> print('size = %r' % (size,)) >>> inputs = depc['feat'].rootmost_inputs >>> size = inputs.expected_input_depth() >>> print('size = %r' % (size,))
-
flat_compute_order
()[source]¶ This is basically the scheduler
- CommandLine:
- python -m dtool.input_helpers flat_compute_order
Example
>>> # xdoctest: +REQUIRES(--fixme) >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc4() >>> inputs = depc['feat'].rootmost_inputs.total_expand() >>> flat_compute_order = inputs.flat_compute_order() >>> result = ut.repr2(flat_compute_order) ... >>> print(result) [chip[t, t:1, 1:1], probchip[t, t:1, 1:1], feat[t, t:1]]
-
flat_compute_rmi_edges
()[source]¶ Defines order of computation that maps input_ids to target_ids.
- CommandLine:
- python -m dtool.input_helpers flat_compute_rmi_edges
Returns: - compute_edges
- Each item is a tuple of input/output RootMostInputs
- ([parent_1, …, parent_n], node_i)
All parents should be known before you reach the i-th item in the list. Results of the the i-th item may be used in subsequent item computations.
Return type: list Example
>>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc =testdata_custom_annot_depc([ ... dict(tablename='chips', parents=['annot']), ... dict(tablename='Notch_Tips', parents=['annot']), ... dict(tablename='Cropped_Chips', parents=['chips', 'Notch_Tips']), ... ]) >>> table = depc['Cropped_Chips'] >>> inputs = exi_inputs = table.rootmost_inputs.total_expand() >>> compute_rmi_edges = exi_inputs.flat_compute_rmi_edges() >>> input_rmis = compute_rmi_edges[-1][0] >>> result = ut.repr2(input_rmis) >>> print(result) [chips[t, t:1, 1:1], Notch_Tips[t, t:1, 1:1]]
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
show_exi_graph
(inter=None)[source]¶ - CommandLine:
- python -m dtool.input_helpers TableInput.show_exi_graph –show
Example
>>> # DISABLE_DOCTEST >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc3() >>> # table = depc['smk_match'] >>> table = depc['neighbs'] >>> inputs = table.rootmost_inputs >>> print('inputs = %r' % (inputs,)) >>> import wbia.plottool as pt >>> from wbia.plottool.interactions import ExpandableInteraction >>> inter = ExpandableInteraction(nCols=1) >>> inputs.show_exi_graph(inter=inter) >>> # FIXME; Expanding inputs can overspecify inputs >>> #inputs = inputs.expand_input(2) >>> #print('inputs = %r' % (inputs,)) >>> #inputs.show_exi_graph(inter=inter) >>> #inputs = inputs.expand_input(1) >>> #inputs = inputs.expand_input(3) >>> #inputs = inputs.expand_input(2) >>> #inputs = inputs.expand_input(2) >>> #inputs = inputs.expand_input(1) >>> #print('inputs = %r' % (inputs,)) >>> #inputs.show_exi_graph(inter=inter) >>> inter.start() >>> ut.show_if_requested()
-
-
wbia.dtool.input_helpers.
get_rootmost_inputs
(exi_graph, table)[source]¶ - CommandLine:
- python -m dtool.input_helpers get_rootmost_inputs –show
Parameters: - exi_graph (nx.Graph) – made from make_expanded_input_graph(graph, target)
- table (dtool.Table) –
- CommandLine:
- python -m dtool.input_helpers get_rootmost_inputs
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc3() >>> tablename = 'smk_match' >>> table = depc[tablename] >>> exi_graph = table.expanded_input_graph >>> inputs_ = get_rootmost_inputs(exi_graph, table) >>> print('inputs_ = %r' % (inputs_,)) >>> inputs = inputs_.expand_input(1) >>> rmi = inputs.rmi_list[0] >>> result = ('inputs = %s' % (inputs,)) + '\n' >>> result += ('compute_edges = %s' % (ut.repr2(inputs.flat_compute_rmi_edges(), nl=1))) >>> print(result)
-
wbia.dtool.input_helpers.
make_expanded_input_graph
(graph, target)[source]¶ Starting from the target property we trace all possible paths in the graph back to all sources.
Parameters: - graph (nx.DiMultiGraph) – the dependency graph with a single source.
- target (str) – a single target node in graph
Notes
Each edge in the graph must have a local_input_id that defines the type of edge it is: (eg one-to-many, one-to-one, nwise/multi).
# Step 1: Extracting the Relevant Subgraph We start by searching for all sources of the graph (we assume there is only one). Then we extract the subgraph defined by all edges between the sources and the target. We augment this graph with a dummy super source s and super sink t. This allows us to associate an edge with the real source and sink.
# Step 2: Trace all paths from s to t. Create a set of all paths from the source to the sink and accumulate the local_input_id of each edge along the path. This will uniquely identify each path. We use a hack to condense the accumualated ids in order to display them nicely.
# Step 3: Create the new exi_graph Using the traced paths with ids we construct a new graph representing expanded inputs. The nodes in the original graph will be copied for each unique path that passes through the node. We identify these nodes using the accumulated ids built along the edges in our path set. For each path starting from the target we add each node augmented with the accumulated ids on its output(?) edge. We also add the edges along these paths which results in the final exi_graph.
# Step 4: Identify valid inputs candidates The purpose of this graph is to identify which inputs are needed to compute dependant properties. One valid set of inputs is all sources of the graph. However, sometimes it is preferable to specify a model that may have been trained from many inputs. Therefore any node with a one-to-many input edge may also be specified as an input.
# Step 5: Identify root-most inputs The user will only specify one possible set of the inputs. We refer to this set as the “root-most” inputs. This is a set of candiate nodes such that all paths from the sink to the super source are blocked. We default to the set of inputs which results in the fewest dependency computations. However this is arbitary.
The last step that is not represented here is to compute the order that the branches must be specified in when given to the depcache for a computation.
Returns: exi_graph: the expanded input graph Return type: nx.DiGraph Notes
All * nodes are defined to be distinct. TODO: To make a * node non-distinct it must be suffixed with an identifier.
- CommandLine:
- python -m dtool.input_helpers make_expanded_input_graph –show
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc = testdata_depc3() >>> table = depc['smk_match'] >>> table = depc['vsone'] >>> graph = table.depc.explicit_graph.copy() >>> target = table.tablename >>> exi_graph = make_expanded_input_graph(graph, target) >>> x = list(exi_graph.nodes())[0] >>> print('x = %r' % (x,)) >>> # xdoctest: +REQUIRES(--show) >>> ut.quit_if_noshow() >>> import wbia.plottool as pt >>> pt.show_nx(graph, fnum=1, pnum=(1, 2, 1)) >>> pt.show_nx(exi_graph, fnum=1, pnum=(1, 2, 2)) >>> ut.show_if_requested()
-
wbia.dtool.input_helpers.
sort_rmi_list
(rmi_list)[source]¶ - CommandLine:
- python -m dtool.input_helpers sort_rmi_list
Example
>>> from wbia.dtool.input_helpers import * # NOQA >>> from wbia.dtool.example_depcache2 import * # NOQA >>> depc =testdata_custom_annot_depc([ ... dict(tablename='Notch_Tips', parents=['annot']), ... dict(tablename='chips', parents=['annot']), ... dict(tablename='Cropped_Chips', parents=['chips', 'Notch_Tips']), ... ]) >>> table = depc['Cropped_Chips'] >>> inputs = exi_inputs = table.rootmost_inputs >>> compute_rmi_edges = exi_inputs.flat_compute_rmi_edges() >>> input_rmis = compute_rmi_edges[-1][0] >>> rmi_list = input_rmis[::-1] >>> rmi_list = sort_rmi_list(rmi_list) >>> assert rmi_list[0].node[0] == 'chips'
wbia.dtool.old_stuff module¶
wbia.dtool.sql_control module¶
Interface into SQL for the IBEIS Controller
TODO; need to use some sort of sticky bit so sql files are created with reasonable permissions.
-
class
wbia.dtool.sql_control.
SQLColumnRichInfo
(column_id, name, type_, notnull, dflt_value, pk)¶ Bases:
tuple
-
column_id
¶ Alias for field number 0
-
dflt_value
¶ Alias for field number 4
-
name
¶ Alias for field number 1
-
notnull
¶ Alias for field number 3
-
pk
¶ Alias for field number 5
-
type_
¶ Alias for field number 2
-
-
class
wbia.dtool.sql_control.
SQLDatabaseController
(sqldb_dpath='.', sqldb_fname='database.sqlite3', text_factory=<class 'str'>, inmemory=None, fpath=None, readonly=None, always_check_metadata=True, timeout=600)[source]¶ Bases:
object
Interface to an SQL database
-
add_cleanly
(tblname, colnames, params_iter, get_rowid_from_superkey, superkey_paramx=(0, ), **kwargs)[source]¶ ADDER Extra input: the first item of params_iter must be a superkey (like a uuid),
Does not add None values. Does not add duplicate values. For each None input returns None ouptut. For each duplicate input returns existing rowid
Parameters: - tblname (str) – table name to add into
- colnames (tuple of strs) – columns whos values are specified in params_iter
- params_iter (iterable) – an iterable of tuples where each tuple corresonds to a row
- get_rowid_from_superkey (func) – function that tests if a row needs to be added. It should return None for any new rows to be inserted. It should return the existing rowid if one exists
- superkey_paramx (tuple of ints) – indices of tuples in params_iter which correspond to superkeys. defaults to (0,)
Returns: rowid_list_ – list of newly added or previously added rowids
Return type: iterable
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> db = SQLDatabaseController(sqldb_fname=':memory:') >>> db.add_table('dummy_table', ( >>> ('rowid', 'INTEGER PRIMARY KEY'), >>> ('key', 'TEXT'), >>> ('superkey1', 'TEXT'), >>> ('superkey2', 'TEXT'), >>> ('val', 'TEXT'), >>> ), >>> superkeys=[('key',), ('superkey1', 'superkey2')], >>> docstr='') >>> db.print_schema() >>> tblname = 'dummy_table' >>> colnames = ('key', 'val') >>> params_iter = [('spam', 'eggs'), ('foo', 'bar')] >>> # Find a useable superkey >>> superkey_colnames = db.get_table_superkey_colnames(tblname) >>> superkey_paramx = None >>> for superkey in superkey_colnames: >>> if all(k in colnames for k in superkey): >>> superkey_paramx = [colnames.index(k) for k in superkey] >>> superkey_colnames = ut.take(colnames, superkey_paramx) >>> break >>> def get_rowid_from_superkey(superkeys_list): >>> return db.get_where_eq(tblname, ('rowid',), zip(superkeys_list), superkey_colnames) >>> rowid_list_ = db.add_cleanly( >>> tblname, colnames, params_iter, get_rowid_from_superkey, superkey_paramx) >>> print(rowid_list_)
-
add_table
(tablename=None, coldef_list=None, **metadata_keyval)[source]¶ Parameters: - tablename (str) –
- coldef_list (list) –
- constraint (list or None) –
- docstr (str) –
- superkeys (list or None) – list of tuples of column names which uniquely identifies a rowid
-
dump_schema
()[source]¶ Convenience: Dumps all csv database files to disk NOTE: This function is semi-obsolete because of the auto-generated current schema file. Use dump_schema_current_autogeneration instead for all purposes except for parsing out the database schema or for consice visual representation.
-
executemany
(operation, params_iter, verbose=False, unpack_scalars=True, nInput=None, eager=True, keepwrap=False, showprog=False)[source]¶ if unpack_scalars is True only a single result must be returned for each query.
-
exists_where_eq
(tblname, params_iter, where_colnames, op='AND', unpack_scalars=True, eager=True, **kwargs)[source]¶ hacked in function for nicer templates
-
get
(tblname, colnames, id_iter=None, id_colname='rowid', eager=True, assume_unique=False, **kwargs)[source]¶ getter
Parameters: - tblname (str) – table name to get from
- colnames (tuple of str) – column names to grab from
- id_iter (iterable) – iterable of search keys
- id_colname (bool) – column to be used as the search key (default: rowid)
- eager (bool) – use eager evaluation
- unpack_scalars (bool) – default True
- id_colname – default False. Experimental feature that could result in a 10x speedup
- CommandLine:
- python -m dtool.sql_control get
- Ignore:
tblname = ‘annotations’ colnames = (‘name_rowid’,) id_iter = aid_list #id_iter = id_iter[0:20] id_colname = ‘rowid’ eager = True db = ibs.db
x1 = db.get(tblname, colnames, id_iter, assume_unique=True) x2 = db.get(tblname, colnames, id_iter, assume_unique=False) x1 == x2 %timeit db.get(tblname, colnames, id_iter, assume_unique=True) %timeit db.get(tblname, colnames, id_iter, assume_unique=False)
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> depc.clear_all() >>> rowids = depc.get_rowids('notch', [1, 2, 3]) >>> table = depc['notch'] >>> db = table.db >>> table.print_csv() >>> # Break things to test set >>> colnames = ('dummy_annot_rowid',) >>> got_data = db.get('notch', colnames, id_iter=rowids) >>> assert got_data == [1, 2, 3]
-
get_all_col_rows
(tblname, colname)[source]¶ returns a list of all rowids from a table in ascending order
-
get_all_rowids
(tblname, **kwargs)[source]¶ returns a list of all rowids from a table in ascending order
-
get_all_rowids_where
(tblname, where_clause, params, **kwargs)[source]¶ returns a list of rowids from a table in ascending order satisfying a condition
-
get_coldef_list
(tablename)[source]¶ Returns: each tuple is (col_name, col_type) Return type: list of (str, str)
-
get_columns
(tablename)[source]¶ Parameters: tablename (str) – table name Returns: - list of tuples with format:
- (
- column_id : id of the column name : the name of the column type_ : the type of the column notnull : 0 or 1 if the column can contains null values dflt_value : the default value pk : 0 or 1 if the column partecipate to the primary key
)
Return type: column_list References
http://stackoverflow.com/questions/17717829/how-to-get-column-names-from-a-table-in-sqlite-via-pragma-net-c http://stackoverflow.com/questions/1601151/how-do-i-check-in-sqlite-whether-a-table-exists
- CommandLine:
- python -m dtool.sql_control –exec-get_columns python -m dtool.sql_control –exec-get_columns –tablename=contributors python -m dtool.sql_control –exec-get_columns –tablename=nonexist
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> colrichinfo_list = db.get_columns(tablename) >>> result = ('colrichinfo_list = %s' % (ut.repr2(colrichinfo_list, nl=1),)) >>> print(result) colrichinfo_list = [ (0, 'keypoint_rowid', 'INTEGER', 0, None, 1), (1, 'chip_rowid', 'INTEGER', 1, None, 0), (2, 'config_rowid', 'INTEGER', 0, '0', 0), (3, 'kpts', 'NDARRAY', 0, None, 0), (4, 'num', 'INTEGER', 0, None, 0), ]
-
get_db_init_uuid
(ensure=True)[source]¶ Get the database initialization (creation) UUID
- CommandLine:
- python -m dtool.sql_control get_db_init_uuid
Example
>>> # ENABLE_DOCTEST >>> import uuid >>> from wbia.dtool.sql_control import * # NOQA >>> # Check random database gets new UUID on init >>> db = SQLDatabaseController(sqldb_fname=':memory:') >>> uuid_ = db.get_db_init_uuid() >>> print('New Database: %r is valid' % (uuid_, )) >>> assert isinstance(uuid_, uuid.UUID) >>> # Check existing database keeps UUID >>> sqldb_dpath = ut.ensure_app_resource_dir('dtool') >>> sqldb_fname = u'test_database.sqlite3' >>> readonly = False >>> db1 = SQLDatabaseController(sqldb_dpath, sqldb_fname) >>> uuid_1 = db1.get_db_init_uuid() >>> db2 = SQLDatabaseController(sqldb_dpath, sqldb_fname) >>> uuid_2 = db2.get_db_init_uuid() >>> print('Existing Database: %r == %r' % (uuid_1, uuid_2, )) >>> assert uuid_1 == uuid_2
-
get_metadata_items
()[source]¶ Returns: metadata_items Return type: list - CommandLine:
- python -m dtool.sql_control –exec-get_metadata_items
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import testdata_depc >>> from wbia.dtool.sql_control import * # NOQA >>> db = testdata_depc()['notch'].db >>> metadata_items = db.get_metadata_items() >>> result = ('metadata_items = %s' % (ut.repr2(sorted(metadata_items)),)) >>> print(result)
-
get_metadata_val
(key, eval_=False, default=None)[source]¶ val is the repr string unless eval_ is true
-
get_rowid_from_superkey
(tblname, params_iter=None, superkey_colnames=None, **kwargs)[source]¶ getter which uses the constrained superkeys instead of rowids
-
get_schema_current_autogeneration_str
(autogen_cmd='')[source]¶ Convenience: Autogenerates the most up-to-date database schema
- CommandLine:
- python -m dtool.sql_control –exec-get_schema_current_autogeneration_str
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> result = db.get_schema_current_autogeneration_str('') >>> print(result)
-
get_table_as_pandas
(tablename, rowids=None, columns=None, exclude_columns=[])[source]¶ aid = 30 db = ibs.staging rowids = ut.flatten(ibs.get_review_rowids_from_single([aid])) tablename = ‘reviews’ exclude_columns = ‘review_user_confidence review_user_identity’.split(‘ ‘) print(db.get_table_as_pandas(tablename, rowids, exclude_columns=exclude_columns))
db = ibs.db rowids = ut.flatten(ibs.get_annotmatch_rowids_from_aid([aid])) tablename = ‘annotmatch’ exclude_columns = ‘annotmatch_confidence annotmatch_posixtime_modified annotmatch_reviewer’.split(‘ ‘) print(db.get_table_as_pandas(tablename, rowids, exclude_columns=exclude_columns))
-
get_table_autogen_dict
(tablename)[source]¶ Parameters: tablename (str) – Returns: autogen_dict Return type: dict - CommandLine:
- python -m dtool.sql_control get_table_autogen_dict
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> db = SQLDatabaseController(sqldb_fname=':memory:') >>> tablename = 'dummy_table' >>> db.add_table(tablename, ( >>> ('rowid', 'INTEGER PRIMARY KEY'), >>> ('value1', 'TEXT'), >>> ('value2', 'TEXT NOT NULL'), >>> ('value3', 'TEXT DEFAULT 1'), >>> ('time_added', "INTEGER DEFAULT (CAST(STRFTIME('%s', 'NOW', 'UTC') AS INTEGER))") >>> )) >>> autogen_dict = db.get_table_autogen_dict(tablename) >>> result = ut.repr2(autogen_dict, nl=2) >>> print(result)
-
get_table_autogen_str
(tablename)[source]¶ Parameters: tablename (str) – Returns: quoted_docstr Return type: str - CommandLine:
- python -m dtool.sql_control get_table_autogen_str
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> db = SQLDatabaseController(sqldb_fname=':memory:') >>> tablename = 'dummy_table' >>> db.add_table(tablename, ( >>> ('rowid', 'INTEGER PRIMARY KEY'), >>> ('value', 'TEXT'), >>> ('time_added', "INTEGER DEFAULT (CAST(STRFTIME('%s', 'NOW', 'UTC') AS INTEGER))") >>> )) >>> result = '\n'.join(db.get_table_autogen_str(tablename)) >>> print(result)
-
get_table_column_data
(tablename, columns=None, exclude_columns=[], rowids=None)[source]¶ Grabs a table of information
- CommandLine:
- python -m dtool.sql_control –test-get_table_column_data
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> column_list, column_names = db.get_table_column_data(tablename)
-
get_table_csv
(tablename, exclude_columns=[], rowids=None, truncate=False)[source]¶ Converts a tablename to csv format
Parameters: - tablename (str) –
- exclude_columns (list) –
Returns: csv_table
Return type: str
- CommandLine:
- python -m dtool.sql_control –test-get_table_csv python -m dtool.sql_control –exec-get_table_csv –tablename=contributors
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> depc.clear_all() >>> rowids = depc.get_rowids('notch', [1, 2, 3]) >>> table = depc['notch'] >>> db = table.db >>> ut.exec_funckw(db.get_table_csv, globals()) >>> tablename = 'notch' >>> csv_table = db.get_table_csv(tablename, exclude_columns, truncate=True) >>> print(csv_table)
-
get_table_docstr
(tablename)[source]¶ - CommandLine:
- python -m dtool.sql_control –exec-get_table_docstr
- Example0:
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> result = db.get_table_docstr(tablename) >>> print(result) Used to store individual chip features (ellipses)
-
get_table_new_transferdata
(tablename, exclude_columns=[])[source]¶ - CommandLine:
- python -m dtool.sql_control –test-get_table_column_data python -m dtool.sql_control –test-get_table_new_transferdata python -m dtool.sql_control –test-get_table_new_transferdata:1
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> tablename_list = db.get_table_names() >>> colrichinfo_list = db.get_columns(tablename) >>> for tablename in tablename_list: ... new_transferdata = db.get_table_new_transferdata(tablename) ... column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata ... print('tablename = %r' % (tablename,)) ... print('colnames = ' + ut.repr2(column_names)) ... print('extern_colx_list = ' + ut.repr2(extern_colx_list)) ... print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list)) ... print('L___')
Example
>>> # SLOW_DOCTEST >>> # xdoctest: +REQUIRES(module:wbia) >>> from wbia.dtool.sql_control import * # NOQA >>> import wbia >>> ibs = wbia.opendb('testdb1') >>> db = ibs.db >>> exclude_columns = [] >>> tablename_list = ibs.db.get_table_names() >>> for tablename in tablename_list: ... new_transferdata = db.get_table_new_transferdata(tablename) ... column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata ... print('tablename = %r' % (tablename,)) ... print('colnames = ' + ut.repr2(column_names)) ... print('extern_colx_list = ' + ut.repr2(extern_colx_list)) ... print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list)) ... print('L___')
Example
>>> # SLOW_DOCTEST >>> # xdoctest: +REQUIRES(module:wbia) >>> from wbia.dtool.sql_control import * # NOQA >>> import wbia >>> ibs = wbia.opendb('testdb1') >>> db = ibs.db >>> exclude_columns = [] >>> tablename = ibs.const.IMAGE_TABLE >>> new_transferdata = db.get_table_new_transferdata(tablename) >>> column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata >>> dependsmap = db.get_metadata_val(tablename + '_dependsmap', eval_=True, default=None) >>> print('tablename = %r' % (tablename,)) >>> print('colnames = ' + ut.repr2(column_names)) >>> print('extern_colx_list = ' + ut.repr2(extern_colx_list)) >>> print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list)) >>> print('dependsmap = %s' % (ut.repr2(dependsmap, nl=True),)) >>> print('L___') >>> tablename = ibs.const.ANNOTATION_TABLE >>> new_transferdata = db.get_table_new_transferdata(tablename) >>> column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata >>> dependsmap = db.get_metadata_val(tablename + '_dependsmap', eval_=True, default=None) >>> print('tablename = %r' % (tablename,)) >>> print('colnames = ' + ut.repr2(column_names)) >>> print('extern_colx_list = ' + ut.repr2(extern_colx_list)) >>> print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list)) >>> print('dependsmap = %s' % (ut.repr2(dependsmap, nl=True),)) >>> print('L___')
-
get_table_superkey_colnames
(tablename)[source]¶ Actually resturns a list of tuples. need to change the name to get_table_superkey_colnames_list
Parameters: tablename (str) – Returns: superkeys Return type: list - CommandLine:
- python -m dtool.sql_control –test-get_table_superkey_colnames python -m wbia –tf get_table_superkey_colnames –tablename=contributors python -m wbia –tf get_table_superkey_colnames –db PZ_Master0 –tablename=annotations python -m wbia –tf get_table_superkey_colnames –db PZ_Master0 –tablename=contributors # NOQA
- Example0:
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> db = depc['chip'].db >>> superkeys = db.get_table_superkey_colnames('chip') >>> result = ut.repr2(superkeys, nl=False) >>> print(result) [('dummy_annot_rowid', 'config_rowid')]
-
get_where
(tblname, colnames, params_iter, where_clause, unpack_scalars=True, eager=True, **kwargs)[source]¶
-
get_where_eq
(tblname, colnames, params_iter, where_colnames, unpack_scalars=True, eager=True, op='AND', **kwargs)[source]¶ hacked in function for nicer templates
unpack_scalars = True kwargs = {}
- Kwargs:
- verbose:
-
get_where_eq_set
(tblname, colnames, params_iter, where_colnames, unpack_scalars=True, eager=True, op='AND', **kwargs)[source]¶
-
make_json_table_definition
(tablename)[source]¶ VERY HACKY FUNC RIGHT NOW. NEED TO FIX LATER
Parameters: tablename – Returns: new_transferdata Return type: ? - CommandLine:
- python -m wbia –tf sql_control.make_json_table_definition
- CommandLine:
- python -m utool –tf iter_module_doctestable –modname=dtool.sql_control –include_inherited=True python -m dtool.sql_control –exec-make_json_table_definition
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.sql_control import * # NOQA >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> tablename = 'keypoint' >>> db = depc[tablename].db >>> table_def = db.make_json_table_definition(tablename) >>> result = ('table_def = %s' % (ut.repr2(table_def, nl=True),)) >>> print(result) table_def = { 'keypoint_rowid': 'INTEGER', 'chip_rowid': 'INTEGER', 'config_rowid': 'INTEGER', 'kpts': 'NDARRAY', 'num': 'INTEGER', }
-
merge_databases_new
(db_src, ignore_tables=None, rowid_subsets=None)[source]¶ Copies over all non-rowid properties into another sql table. handles annotated dependenceis. Does not handle external files Could handle dependency tree order, but not yet implemented.
FINISHME
Parameters: db_src (SQLController) – merge data from db_src into db - CommandLine:
- python -m dtool.sql_control –test-merge_databases_new:0 python -m dtool.sql_control –test-merge_databases_new:2
- Example0:
>>> # DISABLE_DOCTEST >>> # xdoctest: +REQUIRES(module:wbia) >>> from wbia.dtool.sql_control import * # NOQA >>> import wbia >>> #ibs_dst = wbia.opendb(dbdir='testdb_dst') >>> ibs_src = wbia.opendb(db='testdb1') >>> # OPEN A CLEAN DATABASE >>> ibs_dst = wbia.opendb(dbdir='test_sql_merge_dst1', allow_newdir=True, delete_ibsdir=True) >>> ibs_src.ensure_contributor_rowids() >>> # build test data >>> db = ibs_dst.db >>> db_src = ibs_src.db >>> rowid_subsets = None >>> # execute function >>> db.merge_databases_new(db_src)
- Example1:
>>> # DISABLE_DOCTEST >>> # xdoctest: +REQUIRES(module:wbia) >>> from wbia.dtool.sql_control import * # NOQA >>> import wbia >>> ibs_src = wbia.opendb(db='testdb2') >>> # OPEN A CLEAN DATABASE >>> ibs_dst = wbia.opendb(dbdir='test_sql_merge_dst2', allow_newdir=True, delete_ibsdir=True) >>> ibs_src.ensure_contributor_rowids() >>> # build test data >>> db = ibs_dst.db >>> db_src = ibs_src.db >>> ignore_tables = ['lblannot', 'lblimage', 'image_lblimage_relationship', 'annotation_lblannot_relationship', 'keys'] >>> rowid_subsets = None >>> # execute function >>> db.merge_databases_new(db_src, ignore_tables=ignore_tables)
- Example2:
>>> # DISABLE_DOCTEST >>> # xdoctest: +REQUIRES(module:wbia) >>> from wbia.dtool.sql_control import * # NOQA >>> import wbia >>> ibs_src = wbia.opendb(db='testdb2') >>> # OPEN A CLEAN DATABASE >>> ibs_src.fix_invalid_annotmatches() >>> ibs_dst = wbia.opendb(dbdir='test_sql_subexport_dst2', allow_newdir=True, delete_ibsdir=True) >>> ibs_src.ensure_contributor_rowids() >>> # build test data >>> db = ibs_dst.db >>> db_src = ibs_src.db >>> ignore_tables = ['lblannot', 'lblimage', 'image_lblimage_relationship', 'annotation_lblannot_relationship', 'keys'] >>> # execute function >>> aid_subset = [1, 2, 3] >>> rowid_subsets = {ANNOTATION_TABLE: aid_subset, ... NAME_TABLE: ibs_src.get_annot_nids(aid_subset), ... IMAGE_TABLE: ibs_src.get_annot_gids(aid_subset), ... ANNOTMATCH_TABLE: [], ... GSG_RELATION_TABLE: [], ... } >>> db.merge_databases_new(db_src, ignore_tables=ignore_tables, rowid_subsets=rowid_subsets)
-
modify_table
(tablename=None, colmap_list=None, tablename_new=None, drop_columns=[], add_columns=[], rename_columns=[], **metadata_keyval)[source]¶ function to modify the schema - only columns that are being added, removed or changed need to be enumerated
Parameters: - tablename (str) – tablename
- colmap_list (list) – of tuples (orig_colname, new_colname, new_coltype, convert_func) orig_colname - the original name of the column, None to append, int for index new_colname - the new column name (‘’ for same, None to delete) new_coltype - New Column Type. None to use data unmodified convert_func - Function to convert data from old to new
- constraint (str) –
- superkeys (list) –
- docstr (str) –
- tablename_new –
Example
>>> # DISABLE_DOCTEST >>> def loc_zip_map(x): ... return x >>> db.modify_table(const.CONTRIBUTOR_TABLE, ( >>> # orig_colname, new_colname, new_coltype, convert_func >>> # a non-needed, but correct mapping (identity function) >>> ('contrib_rowid', '', '', None), >>> # for new columns, function is ignored (TYPE CANNOT BE EMPTY IF ADDING) >>> (None, 'contrib_loc_address', 'TEXT', None), >>> # adding a new column at index 4 (if index is invalid, None is used) >>> (4, 'contrib_loc_address', 'TEXT', None), >>> # for deleted columns, type and function are ignored >>> ('contrib_loc_city', None, '', None), >>> # for renamed columns, type and function are ignored >>> ('contrib_loc_city', 'contrib_loc_town', '', None), >>> ('contrib_loc_zip', 'contrib_loc_zip', 'TEXT', loc_zip_map), >>> # type not changing, only NOT NULL provision >>> ('contrib_loc_country', '', 'TEXT NOT NULL', None), >>> ), >>> superkeys=[('contributor_rowid',)], >>> constraint=[], >>> docstr='Used to store the contributors to the project' >>> )
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
set
(tblname, colnames, val_iter, id_iter, id_colname='rowid', duplicate_behavior='error', duplcate_auto_resolve=True, **kwargs)[source]¶ setter
- CommandLine:
- python -m dtool.sql_control set
Example
>>> # ENABLE_DOCTEST >>> from wbia.dtool.example_depcache import testdata_depc >>> depc = testdata_depc() >>> depc.clear_all() >>> rowids = depc.get_rowids('notch', [1, 2, 3]) >>> table = depc['notch'] >>> db = table.db >>> table.print_csv() >>> # Break things to test set >>> colnames = ('dummy_annot_rowid',) >>> val_iter = [9003, 9001, 9002] >>> orig_data = db.get('notch', colnames, id_iter=rowids) >>> db.set('notch', colnames, val_iter, id_iter=rowids) >>> new_data = db.get('notch', colnames, id_iter=rowids) >>> assert new_data == val_iter >>> assert new_data != orig_data >>> table.print_csv() >>> depc.clear_all()
-
tablenames
¶
-
-
class
wbia.dtool.sql_control.
SQLExecutionContext
(db, operation, nInput=None, auto_commit=True, start_transaction=False, keepwrap=False, verbose=False, tablename=None)[source]¶ Bases:
object
Context manager for transactional database calls
FIXME: hash out details. I don’t think anybody who programmed this knows what is going on here. So much for fine grained control.
-
class
wbia.dtool.sql_control.
SQLTable
(db, name)[source]¶ Bases:
utool.util_dev.NiceRepr
convinience object for dealing with a specific table
table = db table = SQLTable(db, ‘annotmatch’)
-
rrr
(verbose=True, reload_module=True)¶ special class reloading function This function is often injected as rrr of classes
-
-
wbia.dtool.sql_control.
dev_test_new_schema_version
(dbname, sqldb_dpath, sqldb_fname, version_current, version_next=None)[source]¶ HACK
hacky function to ensure that only developer sees the development schema and only on test databases
-
wbia.dtool.sql_control.
flattenize
(list_)[source]¶ maps flatten to a tuplized list
Weird function. DEPRICATE
Example
>>> # DISABLE_DOCTEST >>> list_ = [[1, 2, 3], [2, 3, [4, 2, 1]], [3, 2], [[1, 2], [3, 4]]] >>> import utool >>> from itertools import zip >>> val_list1 = [(1, 2), (2, 4), (5, 3)] >>> id_list1 = [(1,), (2,), (3,)] >>> out_list1 = utool.flattenize(zip(val_list1, id_list1))
>>> val_list2 = [1, 4, 5] >>> id_list2 = [(1,), (2,), (3,)] >>> out_list2 = utool.flattenize(zip(val_list2, id_list2))
>>> val_list3 = [1, 4, 5] >>> id_list3 = [1, 2, 3] >>> out_list3 = utool.flattenize(zip(val_list3, id_list3))
out_list4 = list(zip(val_list3, id_list3)) %timeit utool.flattenize(zip(val_list1, id_list1)) %timeit utool.flattenize(zip(val_list2, id_list2)) %timeit utool.flattenize(zip(val_list3, id_list3)) %timeit list(zip(val_list3, id_list3))
100000 loops, best of 3: 14 us per loop 100000 loops, best of 3: 16.5 us per loop 100000 loops, best of 3: 18 us per loop 1000000 loops, best of 3: 1.18 us per loop
-
wbia.dtool.sql_control.
get_operation_type
(operation)[source]¶ Parses the operation_type from an SQL operation