Package dbf :: Module tables
[hide private]

Source Code for Module dbf.tables

   1  "table definitions" 
   2  import os 
   3  import sys 
   4  import csv 
   5  import codecs 
   6  import locale 
   7  import unicodedata 
   8  import weakref 
   9  from array import array 
  10  from decimal import Decimal 
  11  from shutil import copyfileobj 
  12  from dbf import _io as io 
  13  from dbf.dates import Date, DateTime, Time 
  14  from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode 
  15   
  16  input_decoding = locale.getdefaultlocale()[1]    # treat non-unicode data as ... 
  17  default_codepage = 'cp1252'  # if no codepage specified on dbf creation, use this 
  18  return_ascii = True         # convert back to icky ascii, losing chars if no mapping 
  19   
  20  version_map = { 
  21          '\x02' : 'FoxBASE', 
  22          '\x03' : 'dBase III Plus', 
  23          '\x04' : 'dBase IV', 
  24          '\x05' : 'dBase V', 
  25          '\x30' : 'Visual FoxPro', 
  26          '\x31' : 'Visual FoxPro (auto increment field)', 
  27          '\x43' : 'dBase IV SQL', 
  28          '\x7b' : 'dBase IV w/memos', 
  29          '\x83' : 'dBase III Plus w/memos', 
  30          '\x8b' : 'dBase IV w/memos', 
  31          '\x8e' : 'dBase IV w/SQL table', 
  32          '\xf5' : 'FoxPro w/memos'} 
  33   
  34  code_pages = { 
  35          '\x00' : ('ascii', "plain ol' ascii"), 
  36          '\x01' : ('cp437', 'U.S. MS-DOS'), 
  37          '\x02' : ('cp850', 'International MS-DOS'), 
  38          '\x03' : ('cp1252', 'Windows ANSI'), 
  39          '\x04' : ('mac_roman', 'Standard Macintosh'), 
  40          '\x08' : ('cp865', 'Danish OEM'), 
  41          '\x09' : ('cp437', 'Dutch OEM'), 
  42          '\x0A' : ('cp850', 'Dutch OEM (secondary)'), 
  43          '\x0B' : ('cp437', 'Finnish OEM'), 
  44          '\x0D' : ('cp437', 'French OEM'), 
  45          '\x0E' : ('cp850', 'French OEM (secondary)'), 
  46          '\x0F' : ('cp437', 'German OEM'), 
  47          '\x10' : ('cp850', 'German OEM (secondary)'), 
  48          '\x11' : ('cp437', 'Italian OEM'), 
  49          '\x12' : ('cp850', 'Italian OEM (secondary)'), 
  50          '\x13' : ('cp932', 'Japanese Shift-JIS'), 
  51          '\x14' : ('cp850', 'Spanish OEM (secondary)'), 
  52          '\x15' : ('cp437', 'Swedish OEM'), 
  53          '\x16' : ('cp850', 'Swedish OEM (secondary)'), 
  54          '\x17' : ('cp865', 'Norwegian OEM'), 
  55          '\x18' : ('cp437', 'Spanish OEM'), 
  56          '\x19' : ('cp437', 'English OEM (Britain)'), 
  57          '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'), 
  58          '\x1B' : ('cp437', 'English OEM (U.S.)'), 
  59          '\x1C' : ('cp863', 'French OEM (Canada)'), 
  60          '\x1D' : ('cp850', 'French OEM (secondary)'), 
  61          '\x1F' : ('cp852', 'Czech OEM'), 
  62          '\x22' : ('cp852', 'Hungarian OEM'), 
  63          '\x23' : ('cp852', 'Polish OEM'), 
  64          '\x24' : ('cp860', 'Portugese OEM'), 
  65          '\x25' : ('cp850', 'Potugese OEM (secondary)'), 
  66          '\x26' : ('cp866', 'Russian OEM'), 
  67          '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'), 
  68          '\x40' : ('cp852', 'Romanian OEM'), 
  69          '\x4D' : ('cp936', 'Chinese GBK (PRC)'), 
  70          '\x4E' : ('cp949', 'Korean (ANSI/OEM)'), 
  71          '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'), 
  72          '\x50' : ('cp874', 'Thai (ANSI/OEM)'), 
  73          '\x57' : ('cp1252', 'ANSI'), 
  74          '\x58' : ('cp1252', 'Western European ANSI'), 
  75          '\x59' : ('cp1252', 'Spanish ANSI'), 
  76          '\x64' : ('cp852', 'Eastern European MS-DOS'), 
  77          '\x65' : ('cp866', 'Russian MS-DOS'), 
  78          '\x66' : ('cp865', 'Nordic MS-DOS'), 
  79          '\x67' : ('cp861', 'Icelandic MS-DOS'), 
  80          '\x68' : (None, 'Kamenicky (Czech) MS-DOS'), 
  81          '\x69' : (None, 'Mazovia (Polish) MS-DOS'), 
  82          '\x6a' : ('cp737', 'Greek MS-DOS (437G)'), 
  83          '\x6b' : ('cp857', 'Turkish MS-DOS'), 
  84          '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 
  85          '\x79' : ('cp949', 'Korean Windows'), 
  86          '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 
  87          '\x7b' : ('cp932', 'Japanese Windows'), 
  88          '\x7c' : ('cp874', 'Thai Windows'), 
  89          '\x7d' : ('cp1255', 'Hebrew Windows'), 
  90          '\x7e' : ('cp1256', 'Arabic Windows'), 
  91          '\xc8' : ('cp1250', 'Eastern European Windows'), 
  92          '\xc9' : ('cp1251', 'Russian Windows'), 
  93          '\xca' : ('cp1254', 'Turkish Windows'), 
  94          '\xcb' : ('cp1253', 'Greek Windows'), 
  95          '\x96' : ('mac_cyrillic', 'Russian Macintosh'), 
  96          '\x97' : ('mac_latin2', 'Macintosh EE'), 
  97          '\x98' : ('mac_greek', 'Greek Macintosh') } 
  98   
  99  if sys.version_info[:2] < (2, 6): 
100 # define our own property type 101 - class property(object):
102 "Emulate PyProperty_Type() in Objects/descrobject.c" 103
104 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
105 self.fget = fget 106 self.fset = fset 107 self.fdel = fdel 108 self.__doc__ = doc or fget.__doc__
109 - def __call__(self, func):
110 self.fget = func 111 if not self.__doc__: 112 self.__doc__ = fget.__doc__
113 - def __get__(self, obj, objtype=None):
114 if obj is None: 115 return self 116 if self.fget is None: 117 raise AttributeError, "unreadable attribute" 118 return self.fget(obj)
119 - def __set__(self, obj, value):
120 if self.fset is None: 121 raise AttributeError, "can't set attribute" 122 self.fset(obj, value)
123 - def __delete__(self, obj):
124 if self.fdel is None: 125 raise AttributeError, "can't delete attribute" 126 self.fdel(obj)
127 - def setter(self, func):
128 self.fset = func 129 return self
130 - def deleter(self, func):
131 self.fdel = func 132 return self
133 -class _DbfRecord(object):
134 """Provides routines to extract and save data within the fields of a dbf record.""" 135 __slots__ = ['_recnum', '_layout', '_data', '__weakref__']
136 - def _retrieveFieldValue(yo, record_data, fielddef):
137 """calls appropriate routine to fetch value stored in field from array 138 @param record_data: the data portion of the record 139 @type record_data: array of characters 140 @param fielddef: description of the field definition 141 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags' 142 @returns: python data stored in field""" 143 144 field_type = fielddef['type'] 145 retrieve = yo._layout.fieldtypes[field_type]['Retrieve'] 146 datum = retrieve(record_data, fielddef, yo._layout.memo) 147 if field_type in yo._layout.character_fields: 148 datum = yo._layout.decoder(datum)[0] 149 if yo._layout.return_ascii: 150 try: 151 datum = yo._layout.output_encoder(datum)[0] 152 except UnicodeEncodeError: 153 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore') 154 return datum
155 - def _updateFieldValue(yo, fielddef, value):
156 "calls appropriate routine to convert value to ascii bytes, and save it in record" 157 field_type = fielddef['type'] 158 update = yo._layout.fieldtypes[field_type]['Update'] 159 if field_type in yo._layout.character_fields: 160 if not isinstance(value, unicode): 161 if yo._layout.input_decoder is None: 162 raise NonUnicode("String not in unicode format, no default encoding specified") 163 value = yo._layout.input_decoder(value)[0] # input ascii => unicode 164 value = yo._layout.encoder(value)[0] # unicode => table ascii 165 bytes = array('c', update(value, fielddef, yo._layout.memo)) 166 size = fielddef['length'] 167 if len(bytes) > size: 168 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) 169 blank = array('c', ' ' * size) 170 start = fielddef['start'] 171 end = start + size 172 blank[:len(bytes)] = bytes[:] 173 yo._data[start:end] = blank[:] 174 yo._update_disk(yo._recnum * yo._layout.header.record_length + yo._layout.header.start, yo._data.tostring())
175 - def _update_disk(yo, location='', data=None):
176 if not yo._layout.inmemory: 177 if yo._recnum < 0: 178 raise DbfError("Attempted to update record that has been packed") 179 if location == '': 180 location = yo._recnum * yo._layout.header.record_length + yo._layout.header.start 181 if data is None: 182 data = yo._data 183 yo._layout.dfd.seek(location) 184 yo._layout.dfd.write(data)
185 - def __call__(yo, *specs):
186 results = [] 187 if not specs: 188 specs = yo._layout.index 189 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop]) 190 for field, func in specs: 191 results.append(func(yo[field])) 192 return tuple(results)
193
194 - def __contains__(yo, key):
195 return key in yo._layout.fields
196 - def __iter__(yo):
197 return (yo[field] for field in yo._layout.fields)
198 - def __getattr__(yo, name):
199 if name[0:2] == '__' and name[-2:] == '__': 200 raise AttributeError, 'Method %s is not implemented.' % name 201 elif not name in yo._layout.fields: 202 raise FieldMissing(name) 203 try: 204 fielddef = yo._layout[name] 205 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) 206 return value 207 except DbfError, error: 208 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 209 raise
210 - def __getitem__(yo, item):
211 if type(item) == int: 212 if not -yo._layout.header.field_count <= item < yo._layout.header.field_count: 213 raise IndexError("Field offset %d is not in record" % item) 214 return yo[yo._layout.fields[item]] 215 elif type(item) == slice: 216 sequence = [] 217 for index in yo._layout.fields[item]: 218 sequence.append(yo[index]) 219 return sequence 220 elif type(item) == str: 221 return yo.__getattr__(item) 222 else: 223 raise TypeError("%s is not a field name" % item)
224 - def __len__(yo):
225 return yo._layout.header.field_count
226 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
227 """record = ascii array of entire record; layout=record specification; memo = memo object for table""" 228 record = object.__new__(cls) 229 record._recnum = recnum 230 record._layout = layout 231 if layout.blankrecord is None and not _fromdisk: 232 record._createBlankRecord() 233 record._data = layout.blankrecord 234 if recnum == -1: # not a disk-backed record 235 return record 236 elif type(kamikaze) == array: 237 record._data = kamikaze[:] 238 elif type(kamikaze) == str: 239 record._data = array('c', kamikaze) 240 else: 241 record._data = kamikaze._data[:] 242 datalen = len(record._data) 243 if datalen < layout.header.record_length: 244 record._data.extend(layout.blankrecord[datalen:]) 245 elif datalen > layout.header.record_length: 246 record._data = record._data[:layout.header.record_length] 247 if not _fromdisk and not layout.inmemory: 248 record._update_disk() 249 return record
250 - def __setattr__(yo, name, value):
251 if name in yo.__slots__: 252 object.__setattr__(yo, name, value) 253 return 254 elif not name in yo._layout.fields: 255 raise FieldMissing(name) 256 fielddef = yo._layout[name] 257 try: 258 yo._updateFieldValue(fielddef, value) 259 except DbfError, error: 260 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 261 error.data = name 262 raise 263 raise DbfError(message)
264 - def __setitem__(yo, name, value):
265 if type(name) == str: 266 yo.__setattr__(name, value) 267 elif type(name) in (int, long): 268 yo.__setattr__(yo._layout.fields[name], value) 269 else: 270 raise TypeError("%s is not a field name" % name)
271 - def __str__(yo):
272 result = [] 273 for field in yo.field_names: 274 result.append("%-10s: %s" % (field, yo[field])) 275 return '\n'.join(result)
276 - def __repr__(yo):
277 return yo._data.tostring()
278 - def _createBlankRecord(yo):
279 "creates a blank record data chunk" 280 layout = yo._layout 281 ondisk = layout.ondisk 282 layout.ondisk = False 283 yo._data = array('c', ' ' * layout.header.record_length) 284 layout.memofields = [] 285 for field in layout.fields: 286 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) 287 if layout[field]['type'] in layout.memotypes: 288 layout.memofields.append(field) 289 layout.blankrecord = yo._data[:] 290 layout.ondisk = ondisk
291 - def delete_record(yo):
292 "marks record as deleted" 293 yo._data[0] = '*' 294 yo._update_disk(data='*')
295 @property
296 - def field_names(yo):
297 "fields in table/record" 298 return yo._layout.fields[:]
299 - def gather_fields(yo, dict, drop=False):
300 "saves a dictionary into a records fields\nkeys with no matching field will raise a FieldMissing exception unless drop = True" 301 for key in dict: 302 if not key in yo.field_names: 303 if drop: 304 continue 305 raise FieldMissing(key) 306 yo.__setattr__(key, dict[key])
307 @property
308 - def has_been_deleted(yo):
309 "marked for deletion?" 310 return yo._data[0] == '*'
311 @property
312 - def record_number(yo):
313 "physical record number" 314 return yo._recnum
315 @property
316 - def record_table(yo):
317 table = yo._layout.table() 318 if table is None: 319 raise DbfError("table is no longer available") 320 return table
321 - def reset_record(yo, keep_fields=None):
322 "blanks record" 323 if keep_fields is None: 324 keep_fields = [] 325 keep = {} 326 for field in keep_fields: 327 keep[field] = yo[field] 328 if yo._layout.blankrecord == None: 329 yo._createBlankRecord() 330 yo._data[:] = yo._layout.blankrecord[:] 331 for field in keep_fields: 332 yo[field] = keep[field] 333 yo._update_disk()
334 - def scatter_fields(yo, blank=False):
335 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." 336 keys = yo._layout.fields 337 if blank: 338 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] 339 else: 340 values = [yo[field] for field in keys] 341 return dict(zip(keys, values))
342 - def undelete_record(yo):
343 "marks record as active" 344 yo._data[0] = ' ' 345 yo._update_disk(data=' ')
346 -class _DbfMemo(object):
347 """Provides access to memo fields as dictionaries 348 must override _init, _get_memo, and _put_memo to 349 store memo contents to disk"""
350 - def _init(yo):
351 "initialize disk file usage"
352 - def _get_memo(yo, block):
353 "retrieve memo contents from disk"
354 - def _put_memo(yo, data):
355 "store memo contents to disk"
356 - def __init__(yo, meta):
357 "" 358 yo.meta = meta 359 yo.memory = {} 360 yo.nextmemo = 1 361 yo._init() 362 yo.meta.newmemofile = False
363 - def get_memo(yo, block, field):
364 "gets the memo in block" 365 if yo.meta.ignorememos or not block: 366 return '' 367 if yo.meta.ondisk: 368 return yo._get_memo(block) 369 else: 370 return yo.memory[block]
371 - def put_memo(yo, data):
372 "stores data in memo file, returns block number" 373 if yo.meta.ignorememos or data == '': 374 return 0 375 if yo.meta.inmemory: 376 thismemo = yo.nextmemo 377 yo.nextmemo += 1 378 yo.memory[thismemo] = data 379 else: 380 thismemo = yo._put_memo(data) 381 return thismemo
382 -class _Db3Memo(_DbfMemo):
383 - def _init(yo):
384 "dBase III specific" 385 yo.meta.memo_size= 512 386 yo.record_header_length = 2 387 if yo.meta.ondisk and not yo.meta.ignorememos: 388 if yo.meta.newmemofile: 389 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 390 yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508) 391 else: 392 try: 393 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 394 yo.meta.mfd.seek(0) 395 yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4)) 396 except: 397 raise DbfError("memo file appears to be corrupt")
398 - def _get_memo(yo, block):
399 block = int(block) 400 yo.meta.mfd.seek(block * yo.meta.memo_size) 401 eom = -1 402 data = '' 403 while eom == -1: 404 newdata = yo.meta.mfd.read(yo.meta.memo_size) 405 if not newdata: 406 return data 407 data += newdata 408 eom = data.find('\x1a\x1a') 409 return data[:eom].rstrip()
410 - def _put_memo(yo, data):
411 data = data.rstrip() 412 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 413 blocks = length // yo.meta.memo_size 414 if length % yo.meta.memo_size: 415 blocks += 1 416 thismemo = yo.nextmemo 417 yo.nextmemo = thismemo + blocks 418 yo.meta.mfd.seek(0) 419 yo.meta.mfd.write(io.packLongInt(yo.nextmemo)) 420 yo.meta.mfd.seek(thismemo * yo.meta.memo_size) 421 yo.meta.mfd.write(data) 422 yo.meta.mfd.write('\x1a\x1a') 423 double_check = yo._get_memo(thismemo) 424 if len(double_check) != len(data): 425 uhoh = open('dbf_memo_dump.err','wb') 426 uhoh.write('thismemo: %d' % thismemo) 427 uhoh.write('nextmemo: %d' % yo.nextmemo) 428 uhoh.write('saved: %d bytes' % len(data)) 429 uhoh.write(data) 430 uhoh.write('retrieved: %d bytes' % len(double_check)) 431 uhoh.write(double_check) 432 uhoh.close() 433 raise DbfError("unknown error: memo not saved") 434 return thismemo
435 -class _VfpMemo(_DbfMemo):
436 - def _init(yo):
437 "Visual Foxpro 6 specific" 438 if yo.meta.ondisk and not yo.meta.ignorememos: 439 yo.record_header_length = 8 440 if yo.meta.newmemofile: 441 if yo.meta.memo_size == 0: 442 yo.meta.memo_size = 1 443 elif 1 < yo.meta.memo_size < 33: 444 yo.meta.memo_size *= 512 445 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 446 nextmemo = 512 // yo.meta.memo_size 447 if nextmemo * yo.meta.memo_size < 512: 448 nextmemo += 1 449 yo.nextmemo = nextmemo 450 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ 451 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) 452 else: 453 try: 454 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 455 yo.meta.mfd.seek(0) 456 header = yo.meta.mfd.read(512) 457 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True) 458 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True) 459 except: 460 raise DbfError("memo file appears to be corrupt")
461 - def _get_memo(yo, block):
462 yo.meta.mfd.seek(block * yo.meta.memo_size) 463 header = yo.meta.mfd.read(8) 464 length = io.unpackLongInt(header[4:], bigendian=True) 465 return yo.meta.mfd.read(length)
466 - def _put_memo(yo, data):
467 data = data.rstrip() # no trailing whitespace 468 yo.meta.mfd.seek(0) 469 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True) 470 yo.meta.mfd.seek(0) 471 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 472 blocks = length // yo.meta.memo_size 473 if length % yo.meta.memo_size: 474 blocks += 1 475 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True)) 476 yo.meta.mfd.seek(thismemo*yo.meta.memo_size) 477 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data) 478 return thismemo
479 -class DbfTable(object):
480 """Provides a framework for dbf style tables.""" 481 _version = 'basic memory table' 482 _versionabbv = 'dbf' 483 _fieldtypes = { 484 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, }, 485 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, }, 486 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } } 487 _memoext = '' 488 _memotypes = tuple('M', ) 489 _memoClass = _DbfMemo 490 _yesMemoMask = '' 491 _noMemoMask = '' 492 _fixed_fields = ('M','D','L') # always same length in table 493 _variable_fields = tuple() # variable length in table 494 _character_fields = tuple('M', ) # field representing character data 495 _decimal_fields = tuple() # text-based numeric fields 496 _numeric_fields = tuple() # fields representing a number 497 _dbfTableHeader = array('c', '\x00' * 32) 498 _dbfTableHeader[0] = '\x00' # table type - none 499 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 500 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 501 _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii 502 _dbfTableHeader = _dbfTableHeader.tostring() 503 _dbfTableHeaderExtra = '' 504 _supported_tables = [] 505 _read_only = False 506 _meta_only = False 507 _use_deleted = True 508 _backed_up = False
509 - class _MetaData(dict):
510 blankrecord = None 511 fields = None 512 filename = None 513 dfd = None 514 memoname = None 515 newmemofile = False 516 memo = None 517 mfd = None 518 ignorememos = False 519 memofields = None 520 index = [] # never mutated 521 index_reversed = False 522 orderresults = None 523 current = -1
524 - class _TableHeader(object):
525 - def __init__(yo, data):
526 if len(data) != 32: 527 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) 528 yo._data = array('c', data + '\x0d')
529 - def codepage(yo, cp=None):
530 "get/set code page of table" 531 if cp is None: 532 return yo._data[29] 533 else: 534 cp, sd, ld = _codepage_lookup(cp) 535 yo._data[29] = cp 536 return cp
537 @property
538 - def data(yo):
539 "main data structure" 540 date = io.packDate(Date.today()) 541 yo._data[1:4] = array('c', date) 542 return yo._data.tostring()
543 @data.setter
544 - def data(yo, bytes):
545 if len(bytes) < 32: 546 raise DbfError("length for data of %d is less than 32" % len(bytes)) 547 yo._data[:] = array('c', bytes)
548 @property
549 - def extra(yo):
550 "extra dbf info (located after headers, before data records)" 551 fieldblock = yo._data[32:] 552 for i in range(len(fieldblock)//32+1): 553 cr = i * 32 554 if fieldblock[cr] == '\x0d': 555 break 556 else: 557 raise DbfError("corrupt field structure") 558 cr += 33 # skip past CR 559 return yo._data[cr:].tostring()
560 @extra.setter
561 - def extra(yo, data):
562 fieldblock = yo._data[32:] 563 for i in range(len(fieldblock)//32+1): 564 cr = i * 32 565 if fieldblock[cr] == '\x0d': 566 break 567 else: 568 raise DbfError("corrupt field structure") 569 cr += 33 # skip past CR 570 yo._data[cr:] = array('c', data) # extra 571 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start
572 @property
573 - def field_count(yo):
574 "number of fields (read-only)" 575 fieldblock = yo._data[32:] 576 for i in range(len(fieldblock)//32+1): 577 cr = i * 32 578 if fieldblock[cr] == '\x0d': 579 break 580 else: 581 raise DbfError("corrupt field structure") 582 return len(fieldblock[:cr]) // 32
583 @property
584 - def fields(yo):
585 "field block structure" 586 fieldblock = yo._data[32:] 587 for i in range(len(fieldblock)//32+1): 588 cr = i * 32 589 if fieldblock[cr] == '\x0d': 590 break 591 else: 592 raise DbfError("corrupt field structure") 593 return fieldblock[:cr].tostring()
594 @fields.setter
595 - def fields(yo, block):
596 fieldblock = yo._data[32:] 597 for i in range(len(fieldblock)//32+1): 598 cr = i * 32 599 if fieldblock[cr] == '\x0d': 600 break 601 else: 602 raise DbfError("corrupt field structure") 603 cr += 32 # convert to indexing main structure 604 fieldlen = len(block) 605 if fieldlen % 32 != 0: 606 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 607 yo._data[32:cr] = array('c', block) # fields 608 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start 609 fieldlen = fieldlen // 32 610 recordlen = 1 # deleted flag 611 for i in range(fieldlen): 612 recordlen += ord(block[i*32+16]) 613 yo._data[10:12] = array('c', io.packShortInt(recordlen))
614 @property
615 - def record_count(yo):
616 "number of records (maximum 16,777,215)" 617 return io.unpackLongInt(yo._data[4:8].tostring())
618 @record_count.setter
619 - def record_count(yo, count):
620 yo._data[4:8] = array('c', io.packLongInt(count))
621 @property
622 - def record_length(yo):
623 "length of a record (read_only) (max of 65,535)" 624 return io.unpackShortInt(yo._data[10:12].tostring())
625 @property
626 - def start(yo):
627 "starting position of first record in file (must be within first 64K)" 628 return io.unpackShortInt(yo._data[8:10].tostring())
629 @start.setter
630 - def start(yo, pos):
631 yo._data[8:10] = array('c', io.packShortInt(pos))
632 @property
633 - def update(yo):
634 "date of last table modification (read-only)" 635 return io.unpackDate(yo._data[1:4].tostring())
636 @property
637 - def version(yo):
638 "dbf version" 639 return yo._data[0]
640 @version.setter
641 - def version(yo, ver):
642 yo._data[0] = ver
643 - class _Table(object):
644 "implements the weakref table for records"
645 - def __init__(yo, count, meta):
646 yo._meta = meta 647 yo._weakref_list = [weakref.ref(lambda x: None)] * count
648 - def __getitem__(yo, index):
649 maybe = yo._weakref_list[index]() 650 if maybe is None: 651 if index < 0: 652 index += yo._meta.header.record_count 653 size = yo._meta.header.record_length 654 location = index * size + yo._meta.header.start 655 yo._meta.dfd.seek(location) 656 bytes = yo._meta.dfd.read(size) 657 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) 658 yo._weakref_list[index] = weakref.ref(maybe) 659 return maybe
660 - def append(yo, record):
661 yo._weakref_list.append(weakref.ref(record))
662 - class DbfIterator(object):
663 "returns records using current index"
664 - def __init__(yo, table):
665 yo._table = table 666 yo._index = -1 667 yo._more_records = True
668 - def __iter__(yo):
669 return yo
670 - def next(yo):
671 while yo._more_records: 672 yo._index += 1 673 if yo._index >= len(yo._table): 674 yo._more_records = False 675 continue 676 record = yo._table[yo._index] 677 if not yo._table.use_deleted and record.has_been_deleted: 678 continue 679 return record 680 else: 681 raise StopIteration
682 - def _buildHeaderFields(yo):
683 "constructs fieldblock for disk table" 684 fieldblock = array('c', '') 685 memo = False 686 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask)) 687 for field in yo._meta.fields: 688 if yo._meta.fields.count(field) > 1: 689 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") 690 fielddef = array('c', '\x00' * 32) 691 fielddef[:11] = array('c', io.packStr(field)) 692 fielddef[11] = yo._meta[field]['type'] 693 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start'])) 694 fielddef[16] = chr(yo._meta[field]['length']) 695 fielddef[17] = chr(yo._meta[field]['decimals']) 696 fielddef[18] = chr(yo._meta[field]['flags']) 697 fieldblock.extend(fielddef) 698 if yo._meta[field]['type'] in yo._meta.memotypes: 699 memo = True 700 yo._meta.header.fields = fieldblock.tostring() 701 if memo: 702 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask)) 703 if yo._meta.memo is None: 704 yo._meta.memo = yo._memoClass(yo._meta)
705 - def _checkMemoIntegrity(yo):
706 "dBase III specific" 707 if yo._meta.header.version == '\x83': 708 try: 709 yo._meta.memo = yo._memoClass(yo._meta) 710 except: 711 yo._meta.dfd.close() 712 yo._meta.dfd = None 713 raise 714 if not yo._meta.ignorememos: 715 for field in yo._meta.fields: 716 if yo._meta[field]['type'] in yo._memotypes: 717 if yo._meta.header.version != '\x83': 718 yo._meta.dfd.close() 719 yo._meta.dfd = None 720 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 721 elif not os.path.exists(yo._meta.memoname): 722 yo._meta.dfd.close() 723 yo._meta.dfd = None 724 raise DbfError("Table structure corrupt: memo fields exist without memo file") 725 break
726 - def _initializeFields(yo):
727 "builds the FieldList of names, types, and descriptions from the disk file" 728 offset = 1 729 fieldsdef = yo._meta.header.fields 730 if len(fieldsdef) % 32 != 0: 731 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 732 if len(fieldsdef) // 32 != yo.field_count: 733 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 734 for i in range(yo.field_count): 735 fieldblock = fieldsdef[i*32:(i+1)*32] 736 name = io.unpackStr(fieldblock[:11]) 737 type = fieldblock[11] 738 if not type in yo._meta.fieldtypes: 739 raise DbfError("Unknown field type: %s" % type) 740 start = offset 741 length = ord(fieldblock[16]) 742 offset += length 743 end = start + length 744 decimals = ord(fieldblock[17]) 745 flags = ord(fieldblock[18]) 746 yo._meta.fields.append(name) 747 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
748 - def _fieldLayout(yo, i):
749 "Returns field information Name Type(Length[,Decimals])" 750 name = yo._meta.fields[i] 751 type = yo._meta[name]['type'] 752 length = yo._meta[name]['length'] 753 decimals = yo._meta[name]['decimals'] 754 if type in yo._decimal_fields: 755 description = "%s %s(%d,%d)" % (name, type, length, decimals) 756 elif type in yo._fixed_fields: 757 description = "%s %s" % (name, type) 758 else: 759 description = "%s %s(%d)" % (name, type, length) 760 return description
761 - def _loadtable(yo):
762 "loads the records from disk to memory" 763 if yo._meta_only: 764 raise DbfError("%s has been closed, records are unavailable" % yo.filename) 765 dfd = yo._meta.dfd 766 header = yo._meta.header 767 dfd.seek(header.start) 768 allrecords = dfd.read() # kludge to get around mysterious errno 0 problems 769 dfd.seek(0) 770 length = header.record_length 771 for i in range(header.record_count): 772 record_data = allrecords[length*i:length*i+length] 773 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) 774 yo._index.append(i) 775 dfd.seek(0)
776 - def _list_fields(yo, specs, sep=','):
777 if specs is None: 778 specs = yo.field_names 779 elif isinstance(specs, str): 780 specs = specs.split(sep) 781 else: 782 specs = list(specs) 783 specs = [s.strip() for s in specs] 784 return specs
785 - def _update_disk(yo, headeronly=False):
786 "synchronizes the disk file with current data" 787 if yo._meta.inmemory: 788 return 789 fd = yo._meta.dfd 790 fd.seek(0) 791 fd.write(yo._meta.header.data) 792 if not headeronly: 793 for record in yo._table: 794 record._update_disk() 795 fd.flush() 796 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length)
797 - def __contains__(yo, key):
798 return key in yo.field_names
799 - def __enter__(yo):
800 return yo
801 - def __exit__(yo, *exc_info):
802 yo.close()
803 - def __getattr__(yo, name):
804 if name in ('_index','_table'): 805 if yo._meta.ondisk: 806 yo._table = yo._Table(len(yo), yo._meta) 807 yo._index = range(len(yo)) 808 else: 809 yo._table = [] 810 yo._index = [] 811 yo._loadtable() 812 return object.__getattribute__(yo, name)
813 - def __getitem__(yo, value):
814 if type(value) == int: 815 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count: 816 raise IndexError("Record %d is not in table." % value) 817 return yo._table[yo._index[value]] 818 elif type(value) == slice: 819 sequence = DbfList(desc='%s --> %s' % (yo.filename, value)) 820 for index in yo._index[value]: 821 record = yo._table[index] 822 if yo.use_deleted is True or not record.has_been_deleted: 823 sequence.append(record) 824 return sequence 825 else: 826 raise TypeError('type <%s> not valid for indexing' % type(value))
827 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, 828 read_only=False, keep_memos=False, meta_only=False, codepage=None):
829 """open/create dbf file 830 filename should include path if needed 831 field_specs can be either a ;-delimited string or a list of strings 832 memo_size is always 512 for db3 memos 833 ignore_memos is useful if the memo file is missing or corrupt 834 read_only will load records into memory, then close the disk file 835 keep_memos will also load any memo fields into memory 836 meta_only will ignore all records, keeping only basic table information 837 codepage will override whatever is set in the table itself""" 838 if filename == ':memory:': 839 if field_specs is None: 840 raise DbfError("field list must be specified for in-memory tables") 841 elif type(yo) is DbfTable: 842 raise DbfError("only memory tables supported") 843 yo._meta = meta = yo._MetaData() 844 meta.table = weakref.ref(yo) 845 meta.filename = filename 846 meta.fields = [] 847 meta.fieldtypes = yo._fieldtypes 848 meta.fixed_fields = yo._fixed_fields 849 meta.variable_fields = yo._variable_fields 850 meta.character_fields = yo._character_fields 851 meta.decimal_fields = yo._decimal_fields 852 meta.numeric_fields = yo._numeric_fields 853 meta.memotypes = yo._memotypes 854 meta.ignorememos = ignore_memos 855 meta.memo_size = memo_size 856 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 857 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 858 meta.return_ascii = return_ascii 859 meta.header = header = yo._TableHeader(yo._dbfTableHeader) 860 header.extra = yo._dbfTableHeaderExtra 861 header.data #force update of date 862 if filename == ':memory:': 863 yo._index = [] 864 yo._table = [] 865 meta.ondisk = False 866 meta.inmemory = True 867 meta.memoname = ':memory:' 868 else: 869 base, ext = os.path.splitext(filename) 870 if ext == '': 871 meta.filename = base + '.dbf' 872 meta.memoname = base + yo._memoext 873 meta.ondisk = True 874 meta.inmemory = False 875 if field_specs: 876 if meta.ondisk: 877 meta.dfd = open(meta.filename, 'w+b') 878 meta.newmemofile = True 879 yo.add_fields(field_specs) 880 header.codepage = codepage or default_codepage 881 meta.decoder = codecs.getdecoder(header.codepage) 882 meta.encoder = codecs.getencoder(header.codepage) 883 return 884 dfd = meta.dfd = open(meta.filename, 'r+b') 885 dfd.seek(0) 886 meta.header = header = yo._TableHeader(dfd.read(32)) 887 if not header.version in yo._supported_tables: 888 dfd.close() 889 dfd = None 890 raise TypeError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 891 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 892 yo._meta.decoder = codecs.getdecoder(sd) 893 yo._meta.encoder = codecs.getencoder(sd) 894 fieldblock = dfd.read(header.start - 32) 895 for i in range(len(fieldblock)//32+1): 896 fieldend = i * 32 897 if fieldblock[fieldend] == '\x0d': 898 break 899 else: 900 raise DbfError("corrupt field structure in header") 901 if len(fieldblock[:fieldend]) % 32 != 0: 902 raise DbfError("corrupt field structure in header") 903 header.fields = fieldblock[:fieldend] 904 header.extra = fieldblock[fieldend+1:] # skip trailing \r 905 yo._initializeFields() 906 yo._checkMemoIntegrity() 907 meta.current = -1 908 if len(yo) > 0: 909 meta.current = 0 910 dfd.seek(0) 911 if meta_only: 912 yo.close(keep_table=False, keep_memos=False) 913 elif read_only: 914 yo.close(keep_table=True, keep_memos=keep_memos) 915 if codepage is not None: 916 cp, sd, ld = _codepage_lookup(codepage) 917 yo._meta.decoder = codecs.getdecoder(sd) 918 yo._meta.encoder = codecs.getencoder(sd)
919
920 - def __iter__(yo):
921 return yo.DbfIterator(yo)
922 - def __len__(yo):
923 return yo._meta.header.record_count
924 - def __nonzero__(yo):
925 return yo._meta.header.record_count != 0
926 - def __repr__(yo):
927 if yo._read_only: 928 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename 929 elif yo._meta_only: 930 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename 931 else: 932 return __name__ + ".Table('%s')" % yo._meta.filename
933 - def __str__(yo):
934 if yo._read_only: 935 status = "read-only" 936 elif yo._meta_only: 937 status = "meta-only" 938 else: 939 status = "read/write" 940 str = """ 941 Table: %s 942 Type: %s 943 Codepage: %s 944 Status: %s 945 Last updated: %s 946 Record count: %d 947 Field count: %d 948 Record length: %d 949 """ % (yo.filename, version_map.get(yo._meta.header.version, 'unknown - ' + hex(ord(yo._meta.header.version))), 950 yo.codepage, status, yo.last_update, len(yo), yo.field_count, yo.record_length) 951 str += "\n --Fields--\n" 952 for i in range(len(yo._meta.fields)): 953 str += " " + yo._fieldLayout(i) + "\n" 954 return str
955 @property
956 - def codepage(yo):
957 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
958 @codepage.setter
959 - def codepage(yo, cp):
960 cp = code_pages[yo._meta.header.codepage(cp)][0] 961 yo._meta.decoder = codecs.getdecoder(cp) 962 yo._meta.encoder = codecs.getencoder(cp) 963 yo._update_disk(headeronly=True)
964 @property
965 - def field_count(yo):
966 "the number of fields in the table" 967 return yo._meta.header.field_count
968 @property
969 - def field_names(yo):
970 "a list of the fields in the table" 971 return yo._meta.fields[:]
972 @property
973 - def filename(yo):
974 "table's file name, including path (if specified on open)" 975 return yo._meta.filename
976 @property
977 - def last_update(yo):
978 "date of last update" 979 return yo._meta.header.update
980 @property
981 - def memoname(yo):
982 "table's memo name (if path included in filename on open)" 983 return yo._meta.memoname
984 @property
985 - def record_length(yo):
986 "number of bytes in a record" 987 return yo._meta.header.record_length
988 @property
989 - def record_number(yo):
990 "index number of the current record" 991 return yo._meta.current
992 @property
993 - def supported_tables(yo):
994 "allowable table types" 995 return yo._supported_tables
996 @property
997 - def use_deleted(yo):
998 "process or ignore deleted records" 999 return yo._use_deleted
1000 @use_deleted.setter
1001 - def use_deleted(yo, new_setting):
1002 yo._use_deleted = new_setting
1003 @property
1004 - def version(yo):
1005 "returns the dbf type of the table" 1006 return yo._version
1007 - def add_fields(yo, field_specs):
1008 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 1009 backup table is created with _backup appended to name 1010 then modifies current structure""" 1011 all_records = [record for record in yo] 1012 if yo: 1013 yo.create_backup() 1014 yo._meta.blankrecord = None 1015 meta = yo._meta 1016 offset = meta.header.record_length 1017 fields = yo._list_fields(field_specs, sep=';') 1018 for field in fields: 1019 try: 1020 name, format = field.split() 1021 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): 1022 raise DbfError("Field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1023 name = name.lower() 1024 if name in meta.fields: 1025 raise DbfError("Field '%s' already exists" % name) 1026 field_type = format[0].upper() 1027 if len(name) > 10: 1028 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 1029 if not field_type in meta.fieldtypes.keys(): 1030 raise DbfError("Unknown field type: %s" % field_type) 1031 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) 1032 except ValueError: 1033 raise DbfError("invalid field specifier: %s" % field) 1034 start = offset 1035 end = offset + length 1036 offset = end 1037 meta.fields.append(name) 1038 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} 1039 if meta[name]['type'] in yo._memotypes and meta.memo is None: 1040 meta.memo = yo._memoClass(meta) 1041 for record in yo: 1042 record[name] = meta.fieldtypes[field_type]['Blank']() 1043 yo._buildHeaderFields() 1044 yo._update_disk()
1045 - def append(yo, kamikaze='', drop=False, multiple=1):
1046 "adds <multiple> blank records, and fills fields with dict/tuple values if present" 1047 if not yo.field_count: 1048 raise DbfError("No fields defined, cannot append") 1049 empty_table = len(yo) == 0 1050 dictdata = False 1051 tupledata = False 1052 if not isinstance(kamikaze, _DbfRecord): 1053 if isinstance(kamikaze, dict): 1054 dictdata = kamikaze 1055 kamikaze = '' 1056 elif isinstance(kamikaze, tuple): 1057 tupledata = kamikaze 1058 kamikaze = '' 1059 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze) 1060 yo._table.append(newrecord) 1061 yo._index.append(yo._meta.header.record_count) 1062 yo._meta.header.record_count += 1 1063 if dictdata: 1064 newrecord.gather_fields(dictdata, drop) 1065 elif tupledata: 1066 for index, item in enumerate(tupledata): 1067 newrecord[index] = item 1068 elif kamikaze == str: 1069 for field in yo._meta.memofields: 1070 newrecord[field] = '' 1071 elif kamikaze: 1072 for field in yo._meta.memofields: 1073 newrecord[field] = kamikaze[field] 1074 multiple -= 1 1075 if multiple: 1076 data = newrecord._data 1077 single = yo._meta.header.record_count 1078 total = single + multiple 1079 while single < total: 1080 multi_record = _DbfRecord(single, yo._meta, kamikaze=data) 1081 yo._table.append(multi_record) 1082 yo._index.append(single) 1083 for field in yo._meta.memofields: 1084 multi_record[field] = newrecord[field] 1085 single += 1 1086 yo._meta.header.record_count = total # += multiple 1087 yo._meta.current = yo._meta.header.record_count - 1 1088 newrecord = multi_record 1089 yo._update_disk(headeronly=True) 1090 if empty_table: 1091 yo._meta.current = 0 1092 return newrecord
1093 - def bof(yo):
1094 "moves record pointer to previous usable record; returns True if no more usable records" 1095 while yo._meta.current > 0: 1096 yo._meta.current -= 1 1097 if yo.use_deleted or not yo.current().has_been_deleted: 1098 break 1099 else: 1100 yo._meta.current = -1 1101 return True 1102 return False
1103 - def bottom(yo, get_record=False):
1104 """sets record pointer to bottom of table 1105 if get_record, seeks to and returns last (non-deleted) record 1106 DbfError if table is empty 1107 Bof if all records deleted and use_deleted is False""" 1108 yo._meta.current = yo._meta.header.record_count 1109 if get_record: 1110 try: 1111 return yo.prev() 1112 except Bof: 1113 yo._meta.current = yo._meta.header.record_count 1114 raise Eof()
1115 - def close(yo, keep_table=False, keep_memos=False):
1116 """closes disk files 1117 ensures table data is available if keep_table 1118 ensures memo data is available if keep_memos""" 1119 if keep_table: 1120 yo._table # force read of table if not already in memory 1121 else: 1122 if '_index' in dir(yo): 1123 del yo._table 1124 del yo._index 1125 yo._meta.inmemory = True 1126 if yo._meta.ondisk: 1127 yo._meta.dfd.close() 1128 yo._meta.dfd = None 1129 if '_index' in dir(yo): 1130 yo._read_only = True 1131 else: 1132 yo._meta_only = True 1133 if yo._meta.mfd is not None: 1134 if not keep_memos: 1135 yo._meta.ignorememos = True 1136 else: 1137 memo_fields = [] 1138 for field in yo.field_names: 1139 if yo.is_memotype(field): 1140 memo_fields.append(field) 1141 for record in yo: 1142 for field in memo_fields: 1143 record[field] = record[field] 1144 yo._meta.mfd.close() 1145 yo._meta.mfd = None 1146 yo._meta.ondisk = False
1147 - def create_backup(yo, new_name=None, overwrite=False):
1148 "creates a backup table -- ignored if memory table" 1149 if yo.filename.startswith(':memory:'): 1150 return 1151 if new_name is None: 1152 new_name = os.path.splitext(yo.filename)[0] + '_backup.dbf' 1153 else: 1154 overwrite = True 1155 if overwrite or not yo._backed_up: 1156 bkup = open(new_name, 'wb') 1157 try: 1158 yo._meta.dfd.seek(0) 1159 copyfileobj(yo._meta.dfd, bkup) 1160 yo._backed_up = True 1161 finally: 1162 bkup.close()
1163 - def current(yo, index=False):
1164 "returns current logical record, or its index" 1165 if yo._meta.current < 0: 1166 raise Bof() 1167 elif yo._meta.current >= yo._meta.header.record_count: 1168 raise Eof() 1169 if index: 1170 return yo._meta.current 1171 return yo._table[yo._index[yo._meta.current]]
1172 - def delete_fields(yo, doomed):
1173 """removes field(s) from the table 1174 creates backup files with _backup appended to the file name, 1175 then modifies current structure""" 1176 doomed = yo._list_fields(doomed) 1177 for victim in doomed: 1178 if victim not in yo._meta.fields: 1179 raise DbfError("field %s not in table -- delete aborted" % victim) 1180 all_records = [record for record in yo] 1181 yo.create_backup() 1182 for victim in doomed: 1183 yo._meta.fields.pop(yo._meta.fields.index(victim)) 1184 start = yo._meta[victim]['start'] 1185 end = yo._meta[victim]['end'] 1186 for record in yo: 1187 record._data = record._data[:start] + record._data[end:] 1188 for field in yo._meta.fields: 1189 if yo._meta[field]['start'] == end: 1190 end = yo._meta[field]['end'] 1191 yo._meta[field]['start'] = start 1192 yo._meta[field]['end'] = start + yo._meta[field]['length'] 1193 start = yo._meta[field]['end'] 1194 yo._buildHeaderFields() 1195 yo._update_disk()
1196 - def eof(yo):
1197 "moves record pointer to next usable record; returns True if no more usable records" 1198 while yo._meta.current < yo._meta.header.record_count - 1: 1199 yo._meta.current += 1 1200 if yo.use_deleted or not yo.current().has_been_deleted: 1201 break 1202 else: 1203 yo._meta.current = yo._meta.header.record_count 1204 return True 1205 return False
1206 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1207 """writes the table using CSV or tab-delimited format, using the filename 1208 given if specified, otherwise the table name""" 1209 if filename is None: 1210 filename = yo.filename 1211 field_specs = yo._list_fields(field_specs) 1212 if records is None: 1213 records = yo 1214 format = format.lower() 1215 if format not in ('csv', 'tab'): 1216 raise DbfError("export format: csv or tab, not %s" % format) 1217 base, ext = os.path.splitext(filename) 1218 if ext.lower() in ('', '.dbf'): 1219 filename = base + "." + format 1220 fd = open(filename, 'wb') 1221 try: 1222 if format == 'csv': 1223 csvfile = csv.writer(fd, dialect='dbf') 1224 if header: 1225 csvfile.writerow(field_specs) 1226 for record in records: 1227 fields = [] 1228 for fieldname in field_specs: 1229 fields.append(record[fieldname]) 1230 csvfile.writerow(fields) 1231 else: 1232 if header: 1233 fd.write('\t'.join(field_specs) + '\n') 1234 for record in records: 1235 fields = [] 1236 for fieldname in field_specs: 1237 fields.append(str(record[fieldname])) 1238 fd.write('\t'.join(fields) + '\n') 1239 finally: 1240 fd.close() 1241 fd = None 1242 return len(records)
1243 - def get_record(yo, recno):
1244 "returns record at physical_index[recno]" 1245 return yo._table[recno]
1246 - def goto(yo, criteria):
1247 """changes the record pointer to the first matching (non-deleted) record 1248 criteria should be either a tuple of tuple(value, field, func) triples, 1249 or an integer to go to""" 1250 if isinstance(criteria, int): 1251 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count: 1252 raise IndexError("Record %d does not exist" % criteria) 1253 if criteria < 0: 1254 criteria += yo._meta.header.record_count 1255 yo._meta.current = criteria 1256 return yo.current() 1257 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) 1258 specs = tuple([(field, func) for value, field, func in criteria]) 1259 match = tuple([value for value, field, func in criteria]) 1260 current = yo.current(index=True) 1261 matchlen = len(match) 1262 while not yo.Eof(): 1263 record = yo.current() 1264 results = record(*specs) 1265 if results == match: 1266 return record 1267 return yo.goto(current)
1268 - def index(yo, sort=None, reverse=False):
1269 "orders the table using the tuple provided; removes index if no sort provided" 1270 if sort is None: 1271 results = [] 1272 for field, func in yo._meta.index: 1273 results.append("%s(%s)" % (func.__name__, field)) 1274 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) 1275 yo._meta.index_reversed = reverse 1276 if sort == 'ORIGINAL': 1277 yo._index = range(yo._meta.header.record_count) 1278 yo._meta.index = [] 1279 if reverse: 1280 yo._index.reverse() 1281 return 1282 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) 1283 yo._meta.index = tuple(new_sort) 1284 yo._meta.orderresults = [''] * len(yo) 1285 for record in yo: 1286 yo._meta.orderresults[record.record_number] = record() 1287 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
1288 - def is_memotype(yo, name):
1289 "returns True if name is a memo type field" 1290 return yo._meta[name]['type'] in yo._memotypes
1291 - def new(yo, filename, _field_specs=None):
1292 "returns a new table of the same type" 1293 if _field_specs is None: 1294 _field_specs = yo.structure() 1295 if filename != ':memory:': 1296 path, name = os.path.split(filename) 1297 if path == "": 1298 filename = os.path.join(os.path.split(yo.filename)[0], filename) 1299 elif name == "": 1300 filename = os.path.join(path, os.path.split(yo.filename)[1]) 1301 return yo.__class__(filename, _field_specs)
1302 - def next(yo):
1303 "set record pointer to next (non-deleted) record, and return it" 1304 if yo.eof(): 1305 raise Eof() 1306 return yo.current()
1307 - def pack(yo, _pack=True):
1308 "physically removes all deleted records" 1309 newtable = [] 1310 newindex = [] 1311 i = 0 1312 for record in yo._table: 1313 if record.has_been_deleted and _pack: 1314 record._recnum = -1 1315 else: 1316 record._recnum = i 1317 newtable.append(record) 1318 newindex.append(i) 1319 i += 1 1320 yo._table = newtable 1321 yo._index = newindex 1322 yo._meta.header.record_count = i 1323 yo._current = -1 1324 yo._meta.index = '' 1325 yo._update_disk()
1326 - def prev(yo):
1327 "set record pointer to previous (non-deleted) record, and return it" 1328 if yo.bof(): 1329 raise Bof 1330 return yo.current()
1331 - def query(yo, sql=None, python=None):
1332 "uses exec to perform python queries on the table" 1333 if python is None: 1334 raise DbfError("query: python parameter must be specified") 1335 possible = DbfList(desc="%s --> %s" % (yo.filename, python)) 1336 query_result = {} 1337 select = 'query_result["keep"] = %s' % python 1338 g = {} 1339 for record in yo: 1340 query_result['keep'] = False 1341 g['query_result'] = query_result 1342 exec select in g, record 1343 if query_result['keep']: 1344 possible.append(record) 1345 return possible
1346 - def rename_field(yo, oldname, newname):
1347 "renames an existing field" 1348 if yo: 1349 yo.create_backup() 1350 if not oldname in yo._meta.fields: 1351 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) 1352 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): 1353 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1354 newname = newname.lower() 1355 if newname in yo._meta.fields: 1356 raise DbfError("field --%s-- already exists" % newname) 1357 if len(newname) > 10: 1358 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 1359 yo._meta[newname] = yo._meta[oldname] 1360 yo._meta.fields[yo._meta.fields.index(oldname)] = newname 1361 yo._buildHeaderFields() 1362 yo._update_disk(headeronly=True)
1363 - def search(yo, match, fuzzy=None, indices=False):
1364 """searches using a binary algorythm 1365 looking for records that match the criteria in match, which is a tuple 1366 with a data item per ordered field. table must be sorted. if index, 1367 returns a list of records' indices from the current sort order. 1368 """ 1369 if yo._meta.index is None: 1370 raise DbfError('table must be indexed to use Search') 1371 matchlen = len(match) 1372 if fuzzy: 1373 matchlen -= 1 1374 fuzzy_match = match[-1] 1375 fuzzy_field = yo._meta.index[matchlen][0] 1376 match = match[:-1] 1377 records = DbfList(desc="%s --> search: index=%s, match=%s, fuzzy=%s(%s))" % (yo.filename, yo.index(), match, fuzzy.__name__, fuzzy_match)) 1378 else: 1379 records = DbfList(desc="%s --> search: index=%s, match=%s)" % (yo.filename, yo.index(), match)) 1380 if indices: 1381 records = [] 1382 if not isinstance(match, tuple): 1383 match = tuple(match) 1384 segment = len(yo) 1385 current = 0 1386 toosoon = True 1387 notFound = True 1388 while notFound: 1389 segment = segment // 2 1390 if toosoon: 1391 current += segment 1392 else: 1393 current -= segment 1394 if current % 2: 1395 segment += 1 1396 if current == len(yo) or segment == 0: 1397 break 1398 value = yo._meta.orderresults[yo[current].record_number][:matchlen] 1399 if value < match: 1400 toosoon = True 1401 elif value > match: 1402 toosoon = False 1403 else: 1404 notFound = False 1405 break 1406 if current == 0: 1407 break 1408 if notFound: 1409 return records 1410 while current > 0: 1411 current -= 1 1412 value = yo._meta.orderresults[yo[current].record_number][:matchlen] 1413 if value != match: 1414 current += 1 1415 break 1416 while True: 1417 value = yo._meta.orderresults[yo[current].record_number][:matchlen] 1418 if value != match: 1419 break 1420 if yo.use_deleted or not yo[current].has_been_deleted: 1421 if indices: 1422 records.append(current) 1423 else: 1424 records.append(yo[current]) 1425 current += 1 1426 if current == len(yo): 1427 break 1428 if fuzzy: 1429 if indices: 1430 records = [rec for rec in records if fuzzy(yo[rec][fuzzy_field]) == fuzzy_match] 1431 else: 1432 final_records = [rec for rec in records if fuzzy(rec[fuzzy_field]) == fuzzy_match] 1433 records.clear() 1434 records.extend(final_records) 1435 return records
1436 - def size(yo, field):
1437 "returns size of field as a tuple of (length, decimals)" 1438 if field in yo: 1439 return (yo._meta[field]['length'], yo._meta[field]['decimals']) 1440 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1441 - def structure(yo, fields=None):
1442 """return list of fields suitable for creating same table layout 1443 @param fields: list of fields or None for all fields""" 1444 field_specs = [] 1445 fields = yo._list_fields(fields) 1446 try: 1447 for name in fields: 1448 field_specs.append(yo._fieldLayout(yo.field_names.index(name))) 1449 except ValueError: 1450 raise DbfError("field --%s-- does not exist" % name) 1451 return field_specs
1452 - def top(yo, get_record=False):
1453 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record 1454 DbfError if table is empty 1455 Eof if all records are deleted and use_deleted is False""" 1456 yo._meta.current = -1 1457 if get_record: 1458 try: 1459 return yo.next() 1460 except Eof: 1461 yo._meta.current = -1 1462 raise Bof()
1463 - def type(yo, field):
1464 "returns type of field" 1465 if field in yo: 1466 return yo._meta[field]['type'] 1467 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1468 - def zap(yo, areyousure=False):
1469 """removes all records from table -- this cannot be undone! 1470 areyousure must be True, else error is raised""" 1471 if areyousure: 1472 yo._table = [] 1473 yo._index = [] 1474 yo._meta.header.record_count = 0 1475 yo._current = -1 1476 yo._meta.index = '' 1477 yo._update_disk() 1478 else: 1479 raise DbfError("You must say you are sure to wipe the table")
1480 # these asignments are for backward compatibility, and will go away
1481 -class Db3Table(DbfTable):
1482 """Provides an interface for working with dBase III tables.""" 1483 _version = 'dBase III Plus' 1484 _versionabbv = 'db3' 1485 _fieldtypes = { 1486 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1487 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1488 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1489 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1490 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} } 1491 _memoext = '.dbt' 1492 _memotypes = ('M',) 1493 _memoClass = _Db3Memo 1494 _yesMemoMask = '\x80' 1495 _noMemoMask = '\x7f' 1496 _fixed_fields = ('D','L','M') 1497 _variable_fields = ('C','N') 1498 _character_fields = ('C','M') 1499 _decimal_fields = ('N',) 1500 _numeric_fields = ('N',) 1501 _dbfTableHeader = array('c', '\x00' * 32) 1502 _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's 1503 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 1504 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1505 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1506 _dbfTableHeader = _dbfTableHeader.tostring() 1507 _dbfTableHeaderExtra = '' 1508 _supported_tables = ['\x03', '\x83'] 1509 _read_only = False 1510 _meta_only = False 1511 _use_deleted = True
1512 - def _checkMemoIntegrity(yo):
1513 "dBase III specific" 1514 if yo._meta.header.version == '\x83': 1515 try: 1516 yo._meta.memo = yo._memoClass(yo._meta) 1517 except: 1518 yo._meta.dfd.close() 1519 yo._meta.dfd = None 1520 raise 1521 if not yo._meta.ignorememos: 1522 for field in yo._meta.fields: 1523 if yo._meta[field]['type'] in yo._memotypes: 1524 if yo._meta.header.version != '\x83': 1525 yo._meta.dfd.close() 1526 yo._meta.dfd = None 1527 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1528 elif not os.path.exists(yo._meta.memoname): 1529 yo._meta.dfd.close() 1530 yo._meta.dfd = None 1531 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1532 break
1533 - def _initializeFields(yo):
1534 "builds the FieldList of names, types, and descriptions" 1535 offset = 1 1536 fieldsdef = yo._meta.header.fields 1537 if len(fieldsdef) % 32 != 0: 1538 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1539 if len(fieldsdef) // 32 != yo.field_count: 1540 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1541 for i in range(yo.field_count): 1542 fieldblock = fieldsdef[i*32:(i+1)*32] 1543 name = io.unpackStr(fieldblock[:11]) 1544 type = fieldblock[11] 1545 if not type in yo._meta.fieldtypes: 1546 raise DbfError("Unknown field type: %s" % type) 1547 start = offset 1548 length = ord(fieldblock[16]) 1549 offset += length 1550 end = start + length 1551 decimals = ord(fieldblock[17]) 1552 flags = ord(fieldblock[18]) 1553 yo._meta.fields.append(name) 1554 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1555 -class FpTable(DbfTable):
1556 'Provides an interface for working with FoxPro 2 tables' 1557 _version = 'Foxpro' 1558 _versionabbv = 'fp' 1559 _fieldtypes = { 1560 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1561 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1562 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1563 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1564 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1565 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1566 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1567 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1568 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1569 _memoext = '.fpt' 1570 _memotypes = ('G','M','P') 1571 _memoClass = _VfpMemo 1572 _yesMemoMask = '\xf5' # 1111 0101 1573 _noMemoMask = '\x03' # 0000 0011 1574 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1575 _variable_fields = ('C','F','N') 1576 _character_fields = ('C','M') # field representing character data 1577 _decimal_fields = ('F','N') 1578 _numeric_fields = ('B','F','I','N','Y') 1579 _supported_tables = ('\x03', '\xf5') 1580 _dbfTableHeader = array('c', '\x00' * 32) 1581 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1582 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1583 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1584 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1585 _dbfTableHeader = _dbfTableHeader.tostring() 1586 _dbfTableHeaderExtra = '\x00' * 263 1587 _use_deleted = True
1588 - def _checkMemoIntegrity(yo):
1589 if os.path.exists(yo._meta.memoname): 1590 try: 1591 yo._meta.memo = yo._memoClass(yo._meta) 1592 except: 1593 yo._meta.dfd.close() 1594 yo._meta.dfd = None 1595 raise 1596 if not yo._meta.ignorememos: 1597 for field in yo._meta.fields: 1598 if yo._meta[field]['type'] in yo._memotypes: 1599 if not os.path.exists(yo._meta.memoname): 1600 yo._meta.dfd.close() 1601 yo._meta.dfd = None 1602 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1603 break
1604 - def _initializeFields(yo):
1605 "builds the FieldList of names, types, and descriptions" 1606 offset = 1 1607 fieldsdef = yo._meta.header.fields 1608 if len(fieldsdef) % 32 != 0: 1609 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1610 if len(fieldsdef) // 32 != yo.field_count: 1611 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1612 for i in range(yo.field_count): 1613 fieldblock = fieldsdef[i*32:(i+1)*32] 1614 name = io.unpackStr(fieldblock[:11]) 1615 type = fieldblock[11] 1616 if not type in yo._meta.fieldtypes: 1617 raise DbfError("Unknown field type: %s" % type) 1618 elif type == '0': 1619 return # ignore nullflags 1620 start = offset 1621 length = ord(fieldblock[16]) 1622 offset += length 1623 end = start + length 1624 decimals = ord(fieldblock[17]) 1625 flags = ord(fieldblock[18]) 1626 yo._meta.fields.append(name) 1627 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1628
1629 -class VfpTable(DbfTable):
1630 'Provides an interface for working with Visual FoxPro 6 tables' 1631 _version = 'Visual Foxpro v6' 1632 _versionabbv = 'vfp' 1633 _fieldtypes = { 1634 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1635 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1636 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1637 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1638 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1639 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1640 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1641 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1642 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1643 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1644 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1645 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1646 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1647 _memoext = '.fpt' 1648 _memotypes = ('G','M','P') 1649 _memoClass = _VfpMemo 1650 _yesMemoMask = '\x30' # 0011 0000 1651 _noMemoMask = '\x30' # 0011 0000 1652 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1653 _variable_fields = ('C','F','N') 1654 _character_fields = ('C','M') # field representing character data 1655 _decimal_fields = ('F','N') 1656 _numeric_fields = ('B','F','I','N','Y') 1657 _supported_tables = ('\x30',) 1658 _dbfTableHeader = array('c', '\x00' * 32) 1659 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1660 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1661 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1662 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1663 _dbfTableHeader = _dbfTableHeader.tostring() 1664 _dbfTableHeaderExtra = '\x00' * 263 1665 _use_deleted = True
1666 - def _checkMemoIntegrity(yo):
1667 if os.path.exists(yo._meta.memoname): 1668 try: 1669 yo._meta.memo = yo._memoClass(yo._meta) 1670 except: 1671 yo._meta.dfd.close() 1672 yo._meta.dfd = None 1673 raise 1674 if not yo._meta.ignorememos: 1675 for field in yo._meta.fields: 1676 if yo._meta[field]['type'] in yo._memotypes: 1677 if not os.path.exists(yo._meta.memoname): 1678 yo._meta.dfd.close() 1679 yo._meta.dfd = None 1680 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1681 break
1682 - def _initializeFields(yo):
1683 "builds the FieldList of names, types, and descriptions" 1684 offset = 1 1685 fieldsdef = yo._meta.header.fields 1686 for i in range(yo.field_count): 1687 fieldblock = fieldsdef[i*32:(i+1)*32] 1688 name = io.unpackStr(fieldblock[:11]) 1689 type = fieldblock[11] 1690 if not type in yo._meta.fieldtypes: 1691 raise DbfError("Unknown field type: %s" % type) 1692 elif type == '0': 1693 return # ignore nullflags 1694 start = io.unpackLongInt(fieldblock[12:16]) 1695 length = ord(fieldblock[16]) 1696 offset += length 1697 end = start + length 1698 decimals = ord(fieldblock[17]) 1699 flags = ord(fieldblock[18]) 1700 yo._meta.fields.append(name) 1701 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1702 -class DbfList(object):
1703 "list of Dbf records, with set-like behavior" 1704 _desc = ''
1705 - def __init__(yo, new_records=None, desc=None):
1706 yo._list = [] 1707 yo._set = set() 1708 yo._current = -1 1709 if isinstance(new_records, DbfList): 1710 yo._list = new_records._list[:] 1711 yo._set = new_records._set.copy() 1712 yo._current = 0 1713 elif new_records is not None: 1714 for record in new_records: 1715 item = (record.record_table, record.record_number) 1716 if item not in yo._set: 1717 yo._set.add(item) 1718 yo._list.append(item) 1719 yo._current = 0 1720 if desc is not None: 1721 yo._desc = desc
1722 - def __add__(yo, other):
1723 if isinstance(other, DbfList): 1724 result = DbfList() 1725 result._set = yo._set.copy() 1726 result._list[:] = yo._list[:] 1727 for item in other._list: 1728 if item not in result._set: 1729 result._set.add(item) 1730 result._list.append(item) 1731 result._current = 0 if result else -1 1732 return result 1733 return NotImplemented
1734 - def __delitem__(yo, key):
1735 if isinstance(key, int): 1736 item = yo._list.pop[key] 1737 yo._set.remove(item) 1738 elif isinstance(key, slice): 1739 yo._set.difference_update(yo._list[key]) 1740 yo._list.__delitem__(key) 1741 else: 1742 raise TypeError
1743 - def __getitem__(yo, key):
1744 if isinstance(key, int): 1745 count = len(yo._list) 1746 if not -count <= key < count: 1747 raise IndexError("Record %d is not in list." % key) 1748 return yo._get_record(*yo._list[key]) 1749 elif isinstance(key, slice): 1750 result = DbfList() 1751 result._list[:] = yo._list[key] 1752 result._set = set(result._list) 1753 result._current = 0 if result else -1 1754 return result 1755 else: 1756 raise TypeError
1757 - def __iter__(yo):
1758 return (table.get_record(recno) for table, recno in yo._list)
1759 - def __len__(yo):
1760 return len(yo._list)
1761 - def __nonzero__(yo):
1762 return len(yo) > 0
1763 - def __radd__(yo, other):
1764 return yo.__add__(other)
1765 - def __repr__(yo):
1766 if yo._desc: 1767 return "DbfList(%s - %d records)" % (yo._desc, len(yo._list)) 1768 else: 1769 return "DbfList(%d records)" % len(yo._list)
1770 - def __rsub__(yo, other):
1771 if isinstance(other, DbfList): 1772 result = DbfList() 1773 result._list[:] = other._list[:] 1774 result._set = other._set.copy() 1775 lost = set() 1776 for item in yo._list: 1777 if item in result._list: 1778 result._set.remove(item) 1779 lost.add(item) 1780 result._list = [item for item in result._list if item not in lost] 1781 result._current = 0 if result else -1 1782 return result 1783 return NotImplemented
1784 - def __sub__(yo, other):
1785 if isinstance(other, DbfList): 1786 result = DbfList() 1787 result._list[:] = yo._list[:] 1788 result._set = yo._set.copy() 1789 lost = set() 1790 for item in other._list: 1791 if item in result._set: 1792 result._set.remove(item) 1793 lost.add(item) 1794 result._list = [item for item in result._list if item not in lost] 1795 result._current = 0 if result else -1 1796 return result 1797 return NotImplemented
1798 - def _maybe_add(yo, item):
1799 if item not in yo._set: 1800 yo._set.add(item) 1801 yo._list.append(item)
1802 - def _get_record(yo, table=None, rec_no=None):
1803 if table is rec_no is None: 1804 table, rec_no = yo._list[yo._current] 1805 return table.get_record(rec_no)
1806 - def append(yo, new_record):
1807 yo._maybe_add((new_record.record_table, new_record.record_number)) 1808 if yo._current == -1 and yo._list: 1809 yo._current = 0
1810 - def bottom(yo):
1811 if yo._list: 1812 yo._current = len(yo._list) - 1 1813 return yo._get_record() 1814 raise DbfError("DbfList is empty")
1815 - def clear(yo):
1816 yo._list = [] 1817 yo._set = set() 1818 yo._current = -1
1819 - def current(yo):
1820 if yo._current < 0: 1821 raise Bof() 1822 elif yo._current == len(yo._list): 1823 raise Eof() 1824 return yo._get_record()
1825 - def extend(yo, new_records):
1826 if isinstance(new_records, DbfList): 1827 for item in new_records._list: 1828 yo._maybe_add(item) 1829 else: 1830 for record in new_records: 1831 yo.append(record) 1832 if yo._current == -1 and yo._list: 1833 yo._current = 0
1834 - def goto(yo, index_number):
1835 if yo._list: 1836 if 0 <= index_number <= len(yo._list): 1837 yo._current = index_number 1838 return yo._get_record() 1839 raise DbfError("index %d not in DbfList of %d records" % (index_number, len(yo._list))) 1840 raise DbfError("DbfList is empty")
1841 - def index(yo, record, start=None, stop=None):
1842 item = record.record_table, record.record_number 1843 if start is None: 1844 start = 0 1845 if stop is None: 1846 stop = len(yo._list) 1847 return yo._list.index(item, start, stop)
1848 - def insert(yo, i, table, record):
1849 item = table, record.record_number 1850 if item not in yo._set: 1851 yo._set.add(item) 1852 yo._list.insert(i, item)
1853 - def next(yo):
1854 if yo._current < len(yo._list): 1855 yo._current += 1 1856 if yo._current < len(yo._list): 1857 return yo._get_record() 1858 raise Eof()
1859 - def pop(yo, index=None):
1860 if index is None: 1861 table, recno = yo._list.pop() 1862 yo._set.remove((table, recno)) 1863 else: 1864 table, recno = yo._list.pop(index) 1865 yo._set.remove((table, recno)) 1866 return _get_record(table, recno)
1867 - def prev(yo):
1868 if yo._current >= 0: 1869 yo._current -= 1 1870 if yo._current > -1: 1871 return yo._get_record() 1872 raise Bof()
1873 - def remove(yo, record):
1874 item = record.record_table, record.record_number 1875 yo._list.remove(item) 1876 yo._set.remove(item)
1877 - def reverse(yo):
1878 return yo._list.reverse()
1879 - def top(yo):
1880 if yo._list: 1881 yo._current = 0 1882 return yo._get_record() 1883 raise DbfError("DbfList is empty")
1884 - def sort(yo, key=None, reverse=None):
1885 if key is None: 1886 return yo._list.sort(reverse=reverse) 1887 return yo._list(key=lambda item: key(item[1]), reverse=reverse)
1888
1889 -class DbfCsv(csv.Dialect):
1890 "csv format for exporting tables" 1891 delimiter = ',' 1892 doublequote = True 1893 escapechar = None 1894 lineterminator = '\r\n' 1895 quotechar = '"' 1896 skipinitialspace = True 1897 quoting = csv.QUOTE_NONNUMERIC
1898 csv.register_dialect('dbf', DbfCsv)
1899 1900 -def _nop(value):
1901 "returns parameter unchanged" 1902 return value
1903 -def _normalize_tuples(tuples, length, filler):
1904 "ensures each tuple is the same length, using filler[-missing] for the gaps" 1905 final = [] 1906 for t in tuples: 1907 if len(t) < length: 1908 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 1909 else: 1910 final.append(t) 1911 return tuple(final)
1912 -def _codepage_lookup(cp):
1913 if cp not in code_pages: 1914 for code_page in sorted(code_pages.keys()): 1915 sd, ld = code_pages[code_page] 1916 if cp == sd or cp == ld: 1917 if sd is None: 1918 raise DbfError("Unsupported codepage: %s" % ld) 1919 cp = code_page 1920 break 1921 else: 1922 raise DbfError("Unsupported codepage: %s" % cp) 1923 sd, ld = code_pages[cp] 1924 return cp, sd, ld
1925 -def ascii(new_setting=None):
1926 "get/set return_ascii setting" 1927 global return_ascii 1928 if new_setting is None: 1929 return return_ascii 1930 else: 1931 return_ascii = new_setting
1932 -def codepage(cp=None):
1933 "get/set default codepage for any new tables" 1934 global default_codepage 1935 cp, sd, ld = _codepage_lookup(cp or default_codepage) 1936 default_codepage = sd 1937 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
1938 -def encoding(cp=None):
1939 "get/set default encoding for non-unicode strings passed into a table" 1940 global input_decoding 1941 cp, sd, ld = _codepage_lookup(cp or input_decoding) 1942 default_codepage = sd 1943 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
1944 -class _Db4Table(DbfTable):
1945 version = 'dBase IV w/memos (non-functional)' 1946 _versionabbv = 'db4' 1947 _fieldtypes = { 1948 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1949 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1950 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1951 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1952 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1953 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1954 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1955 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1956 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1957 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1958 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1959 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1960 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1961 _memoext = '.dbt' 1962 _memotypes = ('G','M','P') 1963 _memoClass = _VfpMemo 1964 _yesMemoMask = '\x8b' # 0011 0000 1965 _noMemoMask = '\x04' # 0011 0000 1966 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1967 _variable_fields = ('C','F','N') 1968 _character_fields = ('C','M') # field representing character data 1969 _decimal_fields = ('F','N') 1970 _numeric_fields = ('B','F','I','N','Y') 1971 _supported_tables = ('\x04', '\x8b') 1972 _dbfTableHeader = ['\x00'] * 32 1973 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 1974 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1975 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1976 _dbfTableHeader = ''.join(_dbfTableHeader) 1977 _dbfTableHeaderExtra = '' 1978 _use_deleted = True
1979 - def _checkMemoIntegrity(yo):
1980 "dBase III specific" 1981 if yo._meta.header.version == '\x8b': 1982 try: 1983 yo._meta.memo = yo._memoClass(yo._meta) 1984 except: 1985 yo._meta.dfd.close() 1986 yo._meta.dfd = None 1987 raise 1988 if not yo._meta.ignorememos: 1989 for field in yo._meta.fields: 1990 if yo._meta[field]['type'] in yo._memotypes: 1991 if yo._meta.header.version != '\x8b': 1992 yo._meta.dfd.close() 1993 yo._meta.dfd = None 1994 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1995 elif not os.path.exists(yo._meta.memoname): 1996 yo._meta.dfd.close() 1997 yo._meta.dfd = None 1998 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1999 break
2000