1 "table definitions"
2 import os
3 import sys
4 import csv
5 import codecs
6 import unicodedata
7 import weakref
8 import locale
9 from array import array
10 from decimal import Decimal
11 from shutil import copyfileobj
12 from dbf import _io as io
13 from dbf.dates import Date, DateTime, Time
14 from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode
15
16 input_decoding = locale.getdefaultlocale()[1]
17 default_codepage = 'cp1252'
18 return_ascii = True
19
20 version_map = {
21 '\x02' : 'FoxBASE',
22 '\x03' : 'dBase III Plus',
23 '\x04' : 'dBase IV',
24 '\x05' : 'dBase V',
25 '\x30' : 'Visual FoxPro',
26 '\x31' : 'Visual FoxPro (auto increment field)',
27 '\x43' : 'dBase IV SQL',
28 '\x7b' : 'dBase IV w/memos',
29 '\x83' : 'dBase III Plus w/memos',
30 '\x8b' : 'dBase IV w/memos',
31 '\x8e' : 'dBase IV w/SQL table',
32 '\xf5' : 'FoxPro w/memos'}
33
34 code_pages = {
35 '\x00' : ('ascii', "plain ol' ascii"),
36 '\x01' : ('cp437', 'U.S. MS-DOS'),
37 '\x02' : ('cp850', 'International MS-DOS'),
38 '\x03' : ('cp1252', 'Windows ANSI'),
39 '\x04' : ('mac_roman', 'Standard Macintosh'),
40 '\x08' : ('cp865', 'Danish OEM'),
41 '\x09' : ('cp437', 'Dutch OEM'),
42 '\x0A' : ('cp850', 'Dutch OEM (secondary)'),
43 '\x0B' : ('cp437', 'Finnish OEM'),
44 '\x0D' : ('cp437', 'French OEM'),
45 '\x0E' : ('cp850', 'French OEM (secondary)'),
46 '\x0F' : ('cp437', 'German OEM'),
47 '\x10' : ('cp850', 'German OEM (secondary)'),
48 '\x11' : ('cp437', 'Italian OEM'),
49 '\x12' : ('cp850', 'Italian OEM (secondary)'),
50 '\x13' : ('cp932', 'Japanese Shift-JIS'),
51 '\x14' : ('cp850', 'Spanish OEM (secondary)'),
52 '\x15' : ('cp437', 'Swedish OEM'),
53 '\x16' : ('cp850', 'Swedish OEM (secondary)'),
54 '\x17' : ('cp865', 'Norwegian OEM'),
55 '\x18' : ('cp437', 'Spanish OEM'),
56 '\x19' : ('cp437', 'English OEM (Britain)'),
57 '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'),
58 '\x1B' : ('cp437', 'English OEM (U.S.)'),
59 '\x1C' : ('cp863', 'French OEM (Canada)'),
60 '\x1D' : ('cp850', 'French OEM (secondary)'),
61 '\x1F' : ('cp852', 'Czech OEM'),
62 '\x22' : ('cp852', 'Hungarian OEM'),
63 '\x23' : ('cp852', 'Polish OEM'),
64 '\x24' : ('cp860', 'Portugese OEM'),
65 '\x25' : ('cp850', 'Potugese OEM (secondary)'),
66 '\x26' : ('cp866', 'Russian OEM'),
67 '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'),
68 '\x40' : ('cp852', 'Romanian OEM'),
69 '\x4D' : ('cp936', 'Chinese GBK (PRC)'),
70 '\x4E' : ('cp949', 'Korean (ANSI/OEM)'),
71 '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'),
72 '\x50' : ('cp874', 'Thai (ANSI/OEM)'),
73 '\x57' : ('cp1252', 'ANSI'),
74 '\x58' : ('cp1252', 'Western European ANSI'),
75 '\x59' : ('cp1252', 'Spanish ANSI'),
76 '\x64' : ('cp852', 'Eastern European MS-DOS'),
77 '\x65' : ('cp866', 'Russian MS-DOS'),
78 '\x66' : ('cp865', 'Nordic MS-DOS'),
79 '\x67' : ('cp861', 'Icelandic MS-DOS'),
80 '\x68' : (None, 'Kamenicky (Czech) MS-DOS'),
81 '\x69' : (None, 'Mazovia (Polish) MS-DOS'),
82 '\x6a' : ('cp737', 'Greek MS-DOS (437G)'),
83 '\x6b' : ('cp857', 'Turkish MS-DOS'),
84 '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'),
85 '\x79' : ('cp949', 'Korean Windows'),
86 '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'),
87 '\x7b' : ('cp932', 'Japanese Windows'),
88 '\x7c' : ('cp874', 'Thai Windows'),
89 '\x7d' : ('cp1255', 'Hebrew Windows'),
90 '\x7e' : ('cp1256', 'Arabic Windows'),
91 '\xc8' : ('cp1250', 'Eastern European Windows'),
92 '\xc9' : ('cp1251', 'Russian Windows'),
93 '\xca' : ('cp1254', 'Turkish Windows'),
94 '\xcb' : ('cp1253', 'Greek Windows'),
95 '\x96' : ('mac_cyrillic', 'Russian Macintosh'),
96 '\x97' : ('mac_latin2', 'Macintosh EE'),
97 '\x98' : ('mac_greek', 'Greek Macintosh') }
98
99 if sys.version_info[:2] < (2, 6):
102 "Emulate PyProperty_Type() in Objects/descrobject.c"
103
104 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
105 self.fget = fget
106 self.fset = fset
107 self.fdel = fdel
108 self.__doc__ = doc or fget.__doc__
110 self.fget = func
111 if not self.__doc__:
112 self.__doc__ = fget.__doc__
113 - def __get__(self, obj, objtype=None):
114 if obj is None:
115 return self
116 if self.fget is None:
117 raise AttributeError, "unreadable attribute"
118 return self.fget(obj)
120 if self.fset is None:
121 raise AttributeError, "can't set attribute"
122 self.fset(obj, value)
124 if self.fdel is None:
125 raise AttributeError, "can't delete attribute"
126 self.fdel(obj)
128 self.fset = func
129 return self
131 self.fdel = func
132 return self
134 """Provides routines to extract and save data within the fields of a dbf record."""
135 __slots__ = ['_recnum', '_layout', '_data', '__weakref__']
137 """calls appropriate routine to fetch value stored in field from array
138 @param record_data: the data portion of the record
139 @type record_data: array of characters
140 @param fielddef: description of the field definition
141 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags'
142 @returns: python data stored in field"""
143
144 field_type = fielddef['type']
145 retrieve = yo._layout.fieldtypes[field_type]['Retrieve']
146 datum = retrieve(record_data, fielddef, yo._layout.memo)
147 if field_type in yo._layout.character_fields:
148 datum = yo._layout.decoder(datum)[0]
149 if yo._layout.return_ascii:
150 try:
151 datum = yo._layout.output_encoder(datum)[0]
152 except UnicodeEncodeError:
153 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore')
154 return datum
156 "calls appropriate routine to convert value to ascii bytes, and save it in record"
157 field_type = fielddef['type']
158 update = yo._layout.fieldtypes[field_type]['Update']
159 if field_type in yo._layout.character_fields:
160 if not isinstance(value, unicode):
161 if yo._layout.input_decoder is None:
162 raise NonUnicode("String not in unicode format, no default encoding specified")
163 value = yo._layout.input_decoder(value)[0]
164 value = yo._layout.encoder(value)[0]
165 bytes = array('c', update(value, fielddef, yo._layout.memo))
166 size = fielddef['length']
167 if len(bytes) > size:
168 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size))
169 blank = array('c', ' ' * size)
170 start = fielddef['start']
171 end = start + size
172 blank[:len(bytes)] = bytes[:]
173 yo._data[start:end] = blank[:]
174 yo._updateDisk(yo._recnum * yo._layout.header.record_length + yo._layout.header.start, yo._data.tostring())
186 results = []
187 if not specs:
188 specs = yo._layout.index
189 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop])
190 for field, func in specs:
191 results.append(func(yo[field]))
192 return tuple(results)
193
199 if name[0:2] == '__' and name[-2:] == '__':
200 raise AttributeError, 'Method %s is not implemented.' % name
201 elif not name in yo._layout.fields:
202 raise FieldMissing(name)
203 try:
204 fielddef = yo._layout[name]
205 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef)
206 return value
207 except DbfError, error:
208 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message)
209 raise
226 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
272 result = []
273 for field in yo.field_names:
274 result.append("%-10s: %s" % (field, yo[field]))
275 return '\n'.join(result)
277 return yo._data.tostring()
279 "creates a blank record data chunk"
280 layout = yo._layout
281 ondisk = layout.ondisk
282 layout.ondisk = False
283 yo._data = array('c', ' ' * layout.header.record_length)
284 layout.memofields = []
285 for field in layout.fields:
286 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']())
287 if layout[field]['type'] in layout.memotypes:
288 layout.memofields.append(field)
289 layout.blankrecord = yo._data[:]
290 layout.ondisk = ondisk
291 @property
293 "physical record number"
294 return yo._recnum
295 @property
297 "marked for deletion?"
298 return yo._data[0] == '*'
299 @property
308 "saves a dictionary into a records fields\nkeys with no matching field will raise a FieldMissing exception unless drop = True"
309 for key in dict:
310 if not key in yo.field_names:
311 if drop:
312 continue
313 raise FieldMissing(key)
314 yo.__setattr__(key, dict[key])
329 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty."
330 keys = yo._layout.fields
331 if blank:
332 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys]
333 else:
334 values = [yo[field] for field in keys]
335 return dict(zip(keys, values))
341 """Provides access to memo fields as dictionaries
342 must override _init, _get_memo, and _put_memo to
343 store memo contents to disk"""
345 "initialize disk file usage"
347 "retrieve memo contents from disk"
349 "store memo contents to disk"
351 ""
352 yo.meta = meta
353 yo.memory = {}
354 yo.nextmemo = 1
355 yo._init()
356 yo.meta.newmemofile = False
358 "gets the memo in block"
359 if yo.meta.ignorememos or not block:
360 return ''
361 if yo.meta.ondisk:
362 return yo._get_memo(block)
363 else:
364 return yo.memory[block]
366 "stores data in memo file, returns block number"
367 if yo.meta.ignorememos or data == '':
368 return 0
369 if yo.meta.inmemory:
370 thismemo = yo.nextmemo
371 yo.nextmemo += 1
372 yo.memory[thismemo] = data
373 else:
374 thismemo = yo._put_memo(data)
375 return thismemo
378 "dBase III specific"
379 yo.meta.memo_size= 512
380 yo.record_header_length = 2
381 if yo.meta.ondisk and not yo.meta.ignorememos:
382 if yo.meta.newmemofile:
383 yo.meta.mfd = open(yo.meta.memoname, 'w+b')
384 yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508)
385 else:
386 try:
387 yo.meta.mfd = open(yo.meta.memoname, 'r+b')
388 yo.meta.mfd.seek(0)
389 yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4))
390 except:
391 raise DbfError("memo file appears to be corrupt")
393 block = int(block)
394 yo.meta.mfd.seek(block * yo.meta.memo_size)
395 eom = -1
396 data = ''
397 while eom == -1:
398 newdata = yo.meta.mfd.read(yo.meta.memo_size)
399 if not newdata:
400 return data
401 data += newdata
402 eom = data.find('\x1a\x1a')
403 return data[:eom].rstrip()
405 length = len(data) + yo.record_header_length
406 blocks = length // yo.meta.memo_size
407 if length % yo.meta.memo_size:
408 blocks += 1
409 thismemo = yo.nextmemo
410 yo.nextmemo = thismemo + blocks
411 yo.meta.mfd.seek(0)
412 yo.meta.mfd.write(io.packLongInt(yo.nextmemo))
413 yo.meta.mfd.seek(thismemo * yo.meta.memo_size)
414 yo.meta.mfd.write(data)
415 yo.meta.mfd.write('\x1a\x1a')
416 if len(yo._get_memo(thismemo)) != len(data):
417 raise DbfError("unknown error: memo not saved")
418 return thismemo
421 "Visual Foxpro 6 specific"
422 if yo.meta.ondisk and not yo.meta.ignorememos:
423 yo.record_header_length = 8
424 if yo.meta.newmemofile:
425 if yo.meta.memo_size == 0:
426 yo.meta.memo_size = 1
427 elif 1 < yo.meta.memo_size < 33:
428 yo.meta.memo_size *= 512
429 yo.meta.mfd = open(yo.meta.memoname, 'w+b')
430 nextmemo = 512 // yo.meta.memo_size
431 if nextmemo * yo.meta.memo_size < 512:
432 nextmemo += 1
433 yo.nextmemo = nextmemo
434 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \
435 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504)
436 else:
437 try:
438 yo.meta.mfd = open(yo.meta.memoname, 'r+b')
439 yo.meta.mfd.seek(0)
440 header = yo.meta.mfd.read(512)
441 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True)
442 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True)
443 except:
444 raise DbfError("memo file appears to be corrupt")
446 yo.meta.mfd.seek(block * yo.meta.memo_size)
447 header = yo.meta.mfd.read(8)
448 length = io.unpackLongInt(header[4:], bigendian=True)
449 return yo.meta.mfd.read(length)
451 yo.meta.mfd.seek(0)
452 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True)
453 yo.meta.mfd.seek(0)
454 length = len(data) + yo.record_header_length
455 blocks = length // yo.meta.memo_size
456 if length % yo.meta.memo_size:
457 blocks += 1
458 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True))
459 yo.meta.mfd.seek(thismemo*yo.meta.memo_size)
460 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data)
461 return thismemo
463 """Provides a framework for dbf style tables."""
464 _version = 'basic memory table'
465 _versionabbv = 'dbf'
466 _fieldtypes = {
467 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, },
468 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, },
469 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } }
470 _memoext = ''
471 _memotypes = tuple('M', )
472 _memoClass = _DbfMemo
473 _yesMemoMask = ''
474 _noMemoMask = ''
475 _fixed_fields = ('M','D','L')
476 _variable_fields = tuple()
477 _character_fields = tuple('M', )
478 _decimal_fields = tuple()
479 _numeric_fields = tuple()
480 _dbfTableHeader = array('c', '\x00' * 32)
481 _dbfTableHeader[0] = '\x00'
482 _dbfTableHeader[8:10] = array('c', io.packShortInt(33))
483 _dbfTableHeader[10] = '\x01'
484 _dbfTableHeader[29] = '\x00'
485 _dbfTableHeader = _dbfTableHeader.tostring()
486 _dbfTableHeaderExtra = ''
487 _supported_tables = []
488 _read_only = False
489 _meta_only = False
490 _use_deleted = True
491 _backed_up = False
509 if len(data) != 32:
510 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data))
511 yo._data = array('c', data + '\x0d')
513 "get/set code page of table"
514 if cp is None:
515 return yo._data[29]
516 else:
517 if cp not in code_pages:
518 for code_page in sorted(code_pages.keys()):
519 sd, ld = code_pages[code_page]
520 if cp == sd or cp == ld:
521 if sd is None:
522 raise DbfError("Unsupported codepage: %s" % ld)
523 cp = code_page
524 break
525 else:
526 raise DbfError("Unsupported codepage: %s" % cp)
527 yo._data[29] = cp
528 return cp
529 @property
535 @data.setter
537 if len(bytes) < 32:
538 raise DbfError("length for data of %d is less than 32" % len(bytes))
539 yo._data[:] = array('c', bytes)
540 @property
542 "extra dbf info (located after headers, before data records)"
543 fieldblock = yo._data[32:]
544 for i in range(len(fieldblock)//32+1):
545 cr = i * 32
546 if fieldblock[cr] == '\x0d':
547 break
548 else:
549 raise DbfError("corrupt field structure")
550 cr += 33
551 return yo._data[cr:].tostring()
552 @extra.setter
554 fieldblock = yo._data[32:]
555 for i in range(len(fieldblock)//32+1):
556 cr = i * 32
557 if fieldblock[cr] == '\x0d':
558 break
559 else:
560 raise DbfError("corrupt field structure")
561 cr += 33
562 yo._data[cr:] = array('c', data)
563 yo._data[8:10] = array('c', io.packShortInt(len(yo._data)))
564 @property
566 "number of fields (read-only)"
567 fieldblock = yo._data[32:]
568 for i in range(len(fieldblock)//32+1):
569 cr = i * 32
570 if fieldblock[cr] == '\x0d':
571 break
572 else:
573 raise DbfError("corrupt field structure")
574 return len(fieldblock[:cr]) // 32
575 @property
577 "field block structure"
578 fieldblock = yo._data[32:]
579 for i in range(len(fieldblock)//32+1):
580 cr = i * 32
581 if fieldblock[cr] == '\x0d':
582 break
583 else:
584 raise DbfError("corrupt field structure")
585 return fieldblock[:cr].tostring()
586 @fields.setter
588 fieldblock = yo._data[32:]
589 for i in range(len(fieldblock)//32+1):
590 cr = i * 32
591 if fieldblock[cr] == '\x0d':
592 break
593 else:
594 raise DbfError("corrupt field structure")
595 cr += 32
596 fieldlen = len(block)
597 if fieldlen % 32 != 0:
598 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
599 yo._data[32:cr] = array('c', block)
600 yo._data[8:10] = array('c', io.packShortInt(len(yo._data)))
601 fieldlen = fieldlen // 32
602 recordlen = 1
603 for i in range(fieldlen):
604 recordlen += ord(block[i*32+16])
605 yo._data[10:12] = array('c', io.packShortInt(recordlen))
606 @property
608 "number of records (maximum 16,777,215)"
609 return io.unpackLongInt(yo._data[4:8].tostring())
610 @record_count.setter
613 @property
615 "length of a record (read_only) (max of 65,535)"
616 return io.unpackShortInt(yo._data[10:12].tostring())
617 @property
619 "starting position of first record in file (must be within first 64K)"
620 return io.unpackShortInt(yo._data[8:10].tostring())
621 @start.setter
624 @property
626 "date of last table modification (read-only)"
627 return io.unpackDate(yo._data[1:4].tostring())
628 @property
630 "dbf version"
631 return yo._data[0]
632 @version.setter
636 "implements the weakref table for records"
638 yo._meta = meta
639 yo._weakref_list = [weakref.ref(lambda x: None)] * count
653 yo._weakref_list.append(weakref.ref(record))
655 "returns records using current index"
657 yo._table = table
658 yo._index = -1
659 yo._more_records = True
663 while yo._more_records:
664 yo._index += 1
665 if yo._index >= len(yo._table):
666 yo._more_records = False
667 continue
668 record = yo._table[yo._index]
669 if not yo._table.use_deleted and record.has_been_deleted:
670 continue
671 return record
672 else:
673 raise StopIteration
675 "constructs fieldblock for disk table"
676 fieldblock = array('c', '')
677 memo = False
678 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask))
679 for field in yo._meta.fields:
680 if yo._meta.fields.count(field) > 1:
681 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)")
682 fielddef = array('c', '\x00' * 32)
683 fielddef[:11] = array('c', io.packStr(field))
684 fielddef[11] = yo._meta[field]['type']
685 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start']))
686 fielddef[16] = chr(yo._meta[field]['length'])
687 fielddef[17] = chr(yo._meta[field]['decimals'])
688 fielddef[18] = chr(yo._meta[field]['flags'])
689 fieldblock.extend(fielddef)
690 if yo._meta[field]['type'] in yo._meta.memotypes:
691 memo = True
692 yo._meta.header.fields = fieldblock.tostring()
693 if memo:
694 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask))
695 if yo._meta.memo is None:
696 yo._meta.memo = yo._memoClass(yo._meta)
698 "dBase III specific"
699 if yo._meta.header.version == '\x83':
700 try:
701 yo._meta.memo = yo._memoClass(yo._meta)
702 except:
703 yo._meta.dfd.close()
704 yo._meta.dfd = None
705 raise
706 if not yo._meta.ignorememos:
707 for field in yo._meta.fields:
708 if yo._meta[field]['type'] in yo._memotypes:
709 if yo._meta.header.version != '\x83':
710 yo._meta.dfd.close()
711 yo._meta.dfd = None
712 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
713 elif not os.path.exists(yo._meta.memoname):
714 yo._meta.dfd.close()
715 yo._meta.dfd = None
716 raise DbfError("Table structure corrupt: memo fields exist without memo file")
717 break
719 "builds the FieldList of names, types, and descriptions from the disk file"
720 offset = 1
721 fieldsdef = yo._meta.header.fields
722 if len(fieldsdef) % 32 != 0:
723 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
724 if len(fieldsdef) // 32 != yo.field_count:
725 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
726 for i in range(yo.field_count):
727 fieldblock = fieldsdef[i*32:(i+1)*32]
728 name = io.unpackStr(fieldblock[:11])
729 type = fieldblock[11]
730 if not type in yo._meta.fieldtypes:
731 raise DbfError("Unknown field type: %s" % type)
732 start = offset
733 length = ord(fieldblock[16])
734 offset += length
735 end = start + length
736 decimals = ord(fieldblock[17])
737 flags = ord(fieldblock[18])
738 yo._meta.fields.append(name)
739 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
741 "Returns field information Name Type(Length[,Decimals])"
742 name = yo._meta.fields[i]
743 type = yo._meta[name]['type']
744 length = yo._meta[name]['length']
745 decimals = yo._meta[name]['decimals']
746 if type in yo._decimal_fields:
747 description = "%s %s(%d,%d)" % (name, type, length, decimals)
748 elif type in yo._fixed_fields:
749 description = "%s %s" % (name, type)
750 else:
751 description = "%s %s(%d)" % (name, type, length)
752 return description
754 "loads the records from disk to memory"
755 if yo._meta_only:
756 raise DbfError("%s has been closed, records are unavailable" % yo.filename)
757 dfd = yo._meta.dfd
758 header = yo._meta.header
759 dfd.seek(header.start)
760 allrecords = dfd.read()
761 dfd.seek(0)
762 length = header.record_length
763 for i in range(header.record_count):
764 record_data = allrecords[length*i:length*i+length]
765 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True))
766 yo._index.append(i)
767 dfd.seek(0)
769 if specs is None:
770 specs = yo.field_names
771 elif isinstance(specs, str):
772 specs = specs.split(sep)
773 else:
774 specs = list(specs)
775 specs = [s.strip() for s in specs]
776 return specs
778 "synchronizes the disk file with current data"
779 if yo._meta.inmemory:
780 return
781 fd = yo._meta.dfd
782 fd.seek(0)
783 fd.write(yo._meta.header.data)
784 if not headeronly:
785 for record in yo._table:
786 record._updateDisk()
787 fd.flush()
788 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length)
796 if name in ('_index','_table'):
797 if yo._meta.ondisk:
798 yo._table = yo._Table(len(yo), yo._meta)
799 yo._index = range(len(yo))
800 else:
801 yo._table = []
802 yo._index = []
803 yo._loadtable()
804 return object.__getattribute__(yo, name)
806 if type(value) == int:
807 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count:
808 raise IndexError("Record %d is not in table." % value)
809 return yo._table[yo._index[value]]
810 elif type(value) == slice:
811 sequence = []
812 for index in yo._index[value]:
813 record = yo._table[index]
814 if yo.use_deleted is True or not record.has_been_deleted:
815 sequence.append(record)
816 return DbfList(yo, sequence, desc='%s --> %s' % (yo.filename, value))
817 else:
818 raise TypeError('type <%s> not valid for indexing' % type(value))
819 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False,
820 read_only=False, keep_memos=False, meta_only=False, codepage=None):
821 """open/create dbf file
822 filename should include path if needed
823 field_specs can be either a ;-delimited string or a list of strings
824 memo_size is always 512 for db3 memos
825 ignore_memos is useful if the memo file is missing or corrupt
826 read_only will load records into memory, then close the disk file
827 keep_memos will also load any memo fields into memory
828 meta_only will ignore all records, keeping only basic table information
829 codepage will override whatever is set in the table itself"""
830 if filename == ':memory:':
831 if field_specs is None:
832 raise DbfError("field list must be specified for in-memory tables")
833 elif type(yo) is DbfTable:
834 raise DbfError("only memory tables supported")
835 yo._meta = meta = yo._MetaData()
836 meta.filename = filename
837 meta.fields = []
838 meta.fieldtypes = yo._fieldtypes
839 meta.fixed_fields = yo._fixed_fields
840 meta.variable_fields = yo._variable_fields
841 meta.character_fields = yo._character_fields
842 meta.decimal_fields = yo._decimal_fields
843 meta.numeric_fields = yo._numeric_fields
844 meta.memotypes = yo._memotypes
845 meta.ignorememos = ignore_memos
846 meta.memo_size = memo_size
847 meta.input_decoder = codecs.getdecoder(input_decoding)
848 meta.output_encoder = codecs.getencoder(input_decoding)
849 meta.return_ascii = return_ascii
850 meta.header = header = yo._TableHeader(yo._dbfTableHeader)
851 header.extra = yo._dbfTableHeaderExtra
852 header.data
853 yo.codepage = codepage or default_codepage
854 if filename == ':memory:':
855 yo._index = []
856 yo._table = []
857 meta.ondisk = False
858 meta.inmemory = True
859 meta.memoname = ':memory:'
860 else:
861 base, ext = os.path.splitext(filename)
862 if ext == '':
863 meta.filename = base + '.dbf'
864 meta.memoname = base + yo._memoext
865 meta.ondisk = True
866 meta.inmemory = False
867 if field_specs:
868 if meta.ondisk:
869 meta.dfd = open(meta.filename, 'w+b')
870 meta.newmemofile = True
871 yo.add_fields(field_specs)
872 return
873 dfd = meta.dfd = open(meta.filename, 'r+b')
874 dfd.seek(0)
875 meta.header = header = yo._TableHeader(dfd.read(32))
876 if not header.version in yo._supported_tables:
877 dfd.close()
878 dfd = None
879 raise TypeError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version)))
880 yo.codepage = meta.header.codepage()
881 fieldblock = dfd.read(header.start - 32)
882 for i in range(len(fieldblock)//32+1):
883 fieldend = i * 32
884 if fieldblock[fieldend] == '\x0d':
885 break
886 else:
887 raise DbfError("corrupt field structure in header")
888 if len(fieldblock[:fieldend]) % 32 != 0:
889 raise DbfError("corrupt field structure in header")
890 header.fields = fieldblock[:fieldend]
891 header.extra = fieldblock[fieldend+1:]
892 yo._initializeFields()
893 yo._checkMemoIntegrity()
894 meta.current = -1
895 dfd.seek(0)
896 if meta_only:
897 yo.close(keep_table=False, keep_memos=False)
898 elif read_only:
899 yo.close(keep_table=True, keep_memos=keep_memos)
907 if yo._read_only:
908 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename
909 elif yo._meta_only:
910 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename
911 else:
912 return __name__ + ".Table('%s')" % yo._meta.filename
914 if yo._read_only:
915 status = "read-only"
916 elif yo._meta_only:
917 status = "meta-only"
918 else:
919 status = "read/write"
920 str = """
921 Table: %s
922 Type: %s
923 Codepage: %s
924 Status: %s
925 Last updated: %s
926 Record count: %d
927 Field count: %d
928 Record length: %d
929 """ % (yo.filename, version_map.get(yo._meta.header.version, 'unknown - ' + hex(ord(yo._meta.header.version))),
930 yo.codepage, status, yo.last_update, len(yo), yo.field_count, yo.record_length)
931 str += "\n --Fields--\n"
932 for i in range(len(yo._meta.fields)):
933 str += " " + yo._fieldLayout(i) + "\n"
934 return str
935 @property
937 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
938 @codepage.setter
939 - def codepage(yo, cp):
940 result = yo._meta.header.codepage(cp)
941 yo._meta.decoder = codecs.getdecoder(code_pages[result][0])
942 yo._meta.encoder = codecs.getencoder(code_pages[result][0])
943 @property
945 "the number of fields in the table"
946 return yo._meta.header.field_count
947 @property
949 "a list of the fields in the table"
950 return yo._meta.fields[:]
951 @property
953 "table's file name, including path (if specified on open)"
954 return yo._meta.filename
955 @property
957 "date of last update"
958 return yo._meta.header.update
959 @property
961 "table's memo name (if path included in filename on open)"
962 return yo._meta.memoname
963 @property
965 "number of bytes in a record"
966 return yo._meta.header.record_length
967 @property
969 "index number of the current record"
970 return yo._meta.current
971 @property
975 @property
977 "process or ignore deleted records"
978 return yo._use_deleted
979 @use_deleted.setter
982 @property
984 "returns the dbf type of the table"
985 return yo._version
987 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]]
988 backup table is created with _backup appended to name
989 then modifies current structure"""
990 all_records = [record for record in yo]
991 if yo:
992 yo.create_backup()
993 yo._meta.blankrecord = None
994 meta = yo._meta
995 offset = meta.header.record_length
996 fields = yo._list_fields(field_specs, sep=';')
997 for field in fields:
998 try:
999 name, format = field.split()
1000 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum():
1001 raise DbfError("Field names cannot start with _ or digits, and can only contain the _, letters, and digits")
1002 name = name.lower()
1003 if name in meta.fields:
1004 raise DbfError("Field '%s' already exists" % name)
1005 field_type = format[0].upper()
1006 if len(name) > 10:
1007 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name)))
1008 if not field_type in meta.fieldtypes.keys():
1009 raise DbfError("Unknown field type: %s" % field_type)
1010 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format)
1011 except ValueError:
1012 raise DbfError("invalid field specifier: %s" % field)
1013 start = offset
1014 end = offset + length
1015 offset = end
1016 meta.fields.append(name)
1017 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0}
1018 if meta[name]['type'] in yo._memotypes and meta.memo is None:
1019 meta.memo = yo._memoClass(meta)
1020 for record in yo:
1021 record[name] = meta.fieldtypes[field_type]['Blank']()
1022 yo._buildHeaderFields()
1023 yo._updateDisk()
1024 - def append(yo, kamikaze='', drop=False, multiple=1):
1025 "adds <multiple> blank records, and fills fields with dict/tuple values if present"
1026 if not yo.field_count:
1027 raise DbfError("No fields defined, cannot append")
1028 dictdata = False
1029 tupledata = False
1030 if not isinstance(kamikaze, _DbfRecord):
1031 if isinstance(kamikaze, dict):
1032 dictdata = kamikaze
1033 kamikaze = ''
1034 elif isinstance(kamikaze, tuple):
1035 tupledata = kamikaze
1036 kamikaze = ''
1037 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze)
1038 yo._table.append(newrecord)
1039 yo._index.append(yo._meta.header.record_count)
1040 yo._meta.header.record_count += 1
1041 if dictdata:
1042 newrecord.gather_fields(dictdata, drop)
1043 elif tupledata:
1044 for index, item in enumerate(tupledata):
1045 newrecord[index] = item
1046 elif kamikaze == str:
1047 for field in yo._meta.memofields:
1048 newrecord[field] = ''
1049 elif kamikaze:
1050 for field in yo._meta.memofields:
1051 newrecord[field] = kamikaze[field]
1052 multiple -= 1
1053 if multiple:
1054 data = newrecord._data
1055 single = yo._meta.header.record_count
1056 total = single + multiple
1057 while single < total:
1058 multi_record = _DbfRecord(single, yo._meta, kamikaze=data)
1059 yo._table.append(multi_record)
1060 yo._index.append(single)
1061 for field in yo._meta.memofields:
1062 multi_record[field] = newrecord[field]
1063 single += 1
1064 yo._meta.header.record_count = total
1065 yo._meta.current = yo._meta.header.record_count - 1
1066 newrecord = multi_record
1067 yo._updateDisk(headeronly=True)
1068 return newrecord
1070 "moves record pointer to previous usable record; returns True if no more usable records"
1071 while yo._meta.current > 0:
1072 yo._meta.current -= 1
1073 if yo.use_deleted or not yo.current().has_been_deleted:
1074 break
1075 else:
1076 yo._meta.current = -1
1077 return True
1078 return False
1079 - def bottom(yo, get_record=False):
1080 """sets record pointer to bottom of table
1081 if get_record, seeks to and returns last (non-deleted) record
1082 DbfError if table is empty
1083 Bof if all records deleted and use_deleted is False"""
1084 yo._meta.current = yo._meta.header.record_count
1085 if get_record:
1086 try:
1087 return yo.prev()
1088 except Bof:
1089 yo._meta.current = yo._meta.header.record_count
1090 raise Eof()
1091 - def close(yo, keep_table=False, keep_memos=False):
1092 """closes disk files
1093 ensures table data is available if keep_table
1094 ensures memo data is available if keep_memos"""
1095 if keep_table:
1096 yo._table
1097 else:
1098 if '_index' in dir(yo):
1099 del yo._table
1100 del yo._index
1101 yo._meta.inmemory = True
1102 if yo._meta.ondisk:
1103 yo._meta.dfd.close()
1104 yo._meta.dfd = None
1105 if '_index' in dir(yo):
1106 yo._read_only = True
1107 else:
1108 yo._meta_only = True
1109 if yo._meta.mfd is not None:
1110 if not keep_memos:
1111 yo._meta.ignorememos = True
1112 else:
1113 memo_fields = []
1114 for field in yo.field_names:
1115 if yo.is_memotype(field):
1116 memo_fields.append(field)
1117 for record in yo:
1118 for field in memo_fields:
1119 record[field] = record[field]
1120 yo._meta.mfd.close()
1121 yo._meta.mfd = None
1122 yo._meta.ondisk = False
1124 "creates a backup table -- ignored if memory table"
1125 if yo.filename.startswith(':memory:'):
1126 return
1127 if new_name is None:
1128 new_name = os.path.splitext(yo.filename)[0] + '_backup'
1129 else:
1130 overwrite = True
1131 if overwrite or not yo._backed_up:
1132 bkup = open(new_name, 'wb')
1133 try:
1134 yo._meta.dfd.seek(0)
1135 copyfileobj(yo._meta.dfd, bkup)
1136 yo._backed_up = True
1137 finally:
1138 bkup.close()
1140 "returns current logical record, or its index"
1141 if yo._meta.current < 0:
1142 raise Bof()
1143 elif yo._meta.current >= yo._meta.header.record_count:
1144 raise Eof()
1145 if index:
1146 return yo._meta.current
1147 return yo._table[yo._index[yo._meta.current]]
1149 """removes field(s) from the table
1150 creates backup files with _backup appended to the file name,
1151 then modifies current structure"""
1152 doomed = yo._list_fields(doomed)
1153 for victim in doomed:
1154 if victim not in yo._meta.fields:
1155 raise DbfError("field %s not in table -- delete aborted" % victim)
1156 all_records = [record for record in yo]
1157 yo.create_backup()
1158 for victim in doomed:
1159 yo._meta.fields.pop(yo._meta.fields.index(victim))
1160 start = yo._meta[victim]['start']
1161 end = yo._meta[victim]['end']
1162 for record in yo:
1163 record._data = record._data[:start] + record._data[end:]
1164 for field in yo._meta.fields:
1165 if yo._meta[field]['start'] == end:
1166 end = yo._meta[field]['end']
1167 yo._meta[field]['start'] = start
1168 yo._meta[field]['end'] = start + yo._meta[field]['length']
1169 start = yo._meta[field]['end']
1170 yo._buildHeaderFields()
1171 yo._updateDisk()
1182 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1183 """writes the table using CSV or tab-delimited format, using the filename
1184 given if specified, otherwise the table name"""
1185 if filename is None:
1186 filename = yo.filename
1187 field_specs = yo._list_fields(field_specs)
1188 if records is None:
1189 records = yo
1190 format = format.lower()
1191 if format not in ('csv', 'tab'):
1192 raise DbfError("export format: csv or tab, not %s" % format)
1193 base, ext = os.path.splitext(filename)
1194 if ext.lower() in ('', '.dbf'):
1195 filename = base + "." + format
1196 fd = open(filename, 'wb')
1197 try:
1198 if format == 'csv':
1199 csvfile = csv.writer(fd, dialect='dbf')
1200 if header:
1201 csvfile.writerow(field_specs)
1202 for record in records:
1203 fields = []
1204 for fieldname in field_specs:
1205 fields.append(record[fieldname])
1206 csvfile.writerow(fields)
1207 else:
1208 if header:
1209 fd.write('\t'.join(field_specs) + '\n')
1210 for record in records:
1211 fields = []
1212 for fieldname in field_specs:
1213 fields.append(str(record[fieldname]))
1214 fd.write('\t'.join(fields) + '\n')
1215 finally:
1216 fd.close()
1217 fd = None
1218 return len(records)
1220 "returns record at physical_index[recno]"
1221 return yo._table[recno]
1222 - def goto(yo, criteria):
1223 """changes the record pointer to the first matching (non-deleted) record
1224 criteria should be either a tuple of tuple(value, field, func) triples,
1225 or an integer to go to"""
1226 if isinstance(criteria, int):
1227 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count:
1228 raise IndexError("Record %d does not exist" % criteria)
1229 if criteria < 0:
1230 criteria += yo._meta.header.record_count
1231 yo._meta.current = criteria
1232 return yo.current()
1233 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop])
1234 specs = tuple([(field, func) for value, field, func in criteria])
1235 match = tuple([value for value, field, func in criteria])
1236 current = yo.current(index=True)
1237 matchlen = len(match)
1238 while not yo.Eof():
1239 record = yo.current()
1240 results = record(*specs)
1241 if results == match:
1242 return record
1243 return yo.goto(current)
1244 - def index(yo, sort=None, reverse=False):
1265 "returns True if name is a memo type field"
1266 return yo._meta[name]['type'] in yo._memotypes
1267 - def new(yo, filename, _field_specs=None):
1268 "returns a new table of the same type"
1269 if _field_specs is None:
1270 _field_specs = yo.structure()
1271 if filename != ':memory:':
1272 path, name = os.path.split(filename)
1273 if path == "":
1274 filename = os.path.join(os.path.split(yo.filename)[0], filename)
1275 elif name == "":
1276 filename = os.path.join(path, os.path.split(yo.filename)[1])
1277 return yo.__class__(filename, _field_specs)
1279 "set record pointer to next (non-deleted) record, and return it"
1280 if yo.eof():
1281 raise Eof()
1282 return yo.current()
1283 - def pack(yo, _pack=True):
1284 "physically removes all deleted records"
1285 newtable = []
1286 newindex = []
1287 i = 0
1288 for record in yo._table:
1289 if record.has_been_deleted and _pack:
1290 record._recnum = -1
1291 else:
1292 record._recnum = i
1293 newtable.append(record)
1294 newindex.append(i)
1295 i += 1
1296 yo._table = newtable
1297 yo._index = newindex
1298 yo._meta.header.record_count = i
1299 yo._current = -1
1300 yo._meta.index = ''
1301 yo._updateDisk()
1303 "set record pointer to previous (non-deleted) record, and return it"
1304 if yo.bof():
1305 raise Bof
1306 return yo.current()
1307 - def query(yo, sql=None, python=None):
1308 "uses exec to perform python queries on the table"
1309 if python is None:
1310 raise DbfError("query: python parameter must be specified")
1311 possible = DbfList(desc="%s --> %s" % (yo.filename, python))
1312 query_result = {}
1313 select = 'query_result["keep"] = %s' % python
1314 g = {}
1315 for record in yo:
1316 query_result['keep'] = False
1317 g['query_result'] = query_result
1318 exec select in g, record
1319 if query_result['keep']:
1320 possible.append(yo, record)
1321 return possible
1323 "renames an existing field"
1324 if not oldname in yo._meta.fields:
1325 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname)
1326 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum():
1327 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits")
1328 newname = newname.lower()
1329 if newname in yo._meta.fields:
1330 raise DbfError("field --%s-- already exists" % newname)
1331 if len(newname) > 10:
1332 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname)))
1333 yo._meta[newname] = yo._meta[oldname]
1334 yo._meta.fields[yo._meta.fields.index(oldname)] = newname
1335 yo._buildHeaderFields()
1336 yo._updateDisk(headeronly=True)
1337 - def search(yo, match, fuzzy=None, indices=False):
1338 """searches using a binary algorythm
1339 looking for records that match the criteria in match, which is a tuple
1340 with a data item per ordered field. table must be sorted. if index,
1341 returns a list of records' indices from the current sort order.
1342 """
1343 if yo._meta.index is None:
1344 raise DbfError('table must be indexed to use Search')
1345 matchlen = len(match)
1346 if fuzzy:
1347 matchlen -= 1
1348 fuzzy_match = match[-1]
1349 fuzzy_field = yo._meta.index[matchlen][0]
1350 match = match[:-1]
1351 records = DbfList(desc="%s --> search: index=%s, match=%s, fuzzy=%s(%s))" % (yo.filename, yo.index(), match, fuzzy.__name__, fuzzy_match))
1352 else:
1353 records = DbfList(desc="%s --> search: index=%s, match=%s)" % (yo.filename, yo.index(), match))
1354 if indices:
1355 records = []
1356 if not isinstance(match, tuple):
1357 match = tuple(match)
1358 segment = len(yo)
1359 current = 0
1360 toosoon = True
1361 notFound = True
1362 while notFound:
1363 segment = segment // 2
1364 if toosoon:
1365 current += segment
1366 else:
1367 current -= segment
1368 if current % 2:
1369 segment += 1
1370 if current == len(yo) or segment == 0:
1371 break
1372 value = yo._meta.orderresults[yo[current].record_number][:matchlen]
1373 if value < match:
1374 toosoon = True
1375 elif value > match:
1376 toosoon = False
1377 else:
1378 notFound = False
1379 break
1380 if current == 0:
1381 break
1382 if notFound:
1383 return records
1384 while current > 0:
1385 current -= 1
1386 value = yo._meta.orderresults[yo[current].record_number][:matchlen]
1387 if value != match:
1388 current += 1
1389 break
1390 while True:
1391 value = yo._meta.orderresults[yo[current].record_number][:matchlen]
1392 if value != match:
1393 break
1394 if yo.use_deleted or not yo[current].has_been_deleted:
1395 if indices:
1396 records.append(current)
1397 else:
1398 records.append(yo, yo[current])
1399 current += 1
1400 if current == len(yo):
1401 break
1402 if fuzzy:
1403 if indices:
1404 records = [rec for rec in records if fuzzy(yo[rec][fuzzy_field]) == fuzzy_match]
1405 else:
1406 records[:] = [rec for rec in records if fuzzy(rec[fuzzy_field]) == fuzzy_match]
1407 return records
1408 - def size(yo, field):
1409 "returns size of field as a tuple of (length, decimals)"
1410 if field in yo:
1411 return (yo._meta[field]['length'], yo._meta[field]['decimals'])
1412 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1414 """return list of fields suitable for creating same table layout
1415 @param fields: list of fields or None for all fields"""
1416 field_specs = []
1417 fields = yo._list_fields(fields)
1418 try:
1419 for name in fields:
1420 field_specs.append(yo._fieldLayout(yo.field_names.index(name)))
1421 except ValueError:
1422 raise DbfError("field --%s-- does not exist" % name)
1423 return field_specs
1424 - def top(yo, get_record=False):
1425 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record
1426 DbfError if table is empty
1427 Eof if all records are deleted and use_deleted is False"""
1428 yo._meta.current = -1
1429 if get_record:
1430 try:
1431 return yo.next()
1432 except Eof:
1433 yo._meta.current = -1
1434 raise Bof()
1435 - def type(yo, field):
1436 "returns type of field"
1437 if field in yo:
1438 return yo._meta[field]['type']
1439 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1440 - def zap(yo, areyousure=False):
1441 """removes all records from table -- this cannot be undone!
1442 areyousure must be True, else error is raised"""
1443 if areyousure:
1444 yo._table = []
1445 yo._index = []
1446 yo._meta.header.record_count = 0
1447 yo._current = -1
1448 yo._meta.index = ''
1449 yo._updateDisk()
1450 else:
1451 raise DbfError("You must say you are sure to wipe the table")
1452
1454 """Provides an interface for working with dBase III tables."""
1455 _version = 'dBase III Plus'
1456 _versionabbv = 'db3'
1457 _fieldtypes = {
1458 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1459 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1460 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1461 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1462 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} }
1463 _memoext = '.dbt'
1464 _memotypes = ('M',)
1465 _memoClass = _Db3Memo
1466 _yesMemoMask = '\x80'
1467 _noMemoMask = '\x7f'
1468 _fixed_fields = ('D','L','M')
1469 _variable_fields = ('C','N')
1470 _character_fields = ('C','M')
1471 _decimal_fields = ('N',)
1472 _numeric_fields = ('N',)
1473 _dbfTableHeader = array('c', '\x00' * 32)
1474 _dbfTableHeader[0] = '\x03'
1475 _dbfTableHeader[8:10] = array('c', io.packShortInt(33))
1476 _dbfTableHeader[10] = '\x01'
1477 _dbfTableHeader[29] = '\x03'
1478 _dbfTableHeader = _dbfTableHeader.tostring()
1479 _dbfTableHeaderExtra = ''
1480 _supported_tables = ['\x03', '\x83']
1481 _read_only = False
1482 _meta_only = False
1483 _use_deleted = True
1485 "dBase III specific"
1486 if yo._meta.header.version == '\x83':
1487 try:
1488 yo._meta.memo = yo._memoClass(yo._meta)
1489 except:
1490 yo._meta.dfd.close()
1491 yo._meta.dfd = None
1492 raise
1493 if not yo._meta.ignorememos:
1494 for field in yo._meta.fields:
1495 if yo._meta[field]['type'] in yo._memotypes:
1496 if yo._meta.header.version != '\x83':
1497 yo._meta.dfd.close()
1498 yo._meta.dfd = None
1499 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
1500 elif not os.path.exists(yo._meta.memoname):
1501 yo._meta.dfd.close()
1502 yo._meta.dfd = None
1503 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1504 break
1506 "builds the FieldList of names, types, and descriptions"
1507 offset = 1
1508 fieldsdef = yo._meta.header.fields
1509 if len(fieldsdef) % 32 != 0:
1510 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
1511 if len(fieldsdef) // 32 != yo.field_count:
1512 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
1513 for i in range(yo.field_count):
1514 fieldblock = fieldsdef[i*32:(i+1)*32]
1515 name = io.unpackStr(fieldblock[:11])
1516 type = fieldblock[11]
1517 if not type in yo._meta.fieldtypes:
1518 raise DbfError("Unknown field type: %s" % type)
1519 start = offset
1520 length = ord(fieldblock[16])
1521 offset += length
1522 end = start + length
1523 decimals = ord(fieldblock[17])
1524 flags = ord(fieldblock[18])
1525 yo._meta.fields.append(name)
1526 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1528 'Provides an interface for working with FoxPro 2 tables'
1529 _version = 'Foxpro'
1530 _versionabbv = 'fp'
1531 _fieldtypes = {
1532 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1533 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
1534 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
1535 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1536 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1537 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo},
1538 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1539 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1540 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
1541 _memoext = '.fpt'
1542 _memotypes = ('G','M','P')
1543 _memoClass = _VfpMemo
1544 _yesMemoMask = '\xf5'
1545 _noMemoMask = '\x03'
1546 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
1547 _variable_fields = ('C','F','N')
1548 _character_fields = ('C','M')
1549 _decimal_fields = ('F','N')
1550 _numeric_fields = ('B','F','I','N','Y')
1551 _supported_tables = ('\x03', '\xf5')
1552 _dbfTableHeader = array('c', '\x00' * 32)
1553 _dbfTableHeader[0] = '\x30'
1554 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263))
1555 _dbfTableHeader[10] = '\x01'
1556 _dbfTableHeader[29] = '\x03'
1557 _dbfTableHeader = _dbfTableHeader.tostring()
1558 _dbfTableHeaderExtra = '\x00' * 263
1559 _use_deleted = True
1561 if os.path.exists(yo._meta.memoname):
1562 try:
1563 yo._meta.memo = yo._memoClass(yo._meta)
1564 except:
1565 yo._meta.dfd.close()
1566 yo._meta.dfd = None
1567 raise
1568 if not yo._meta.ignorememos:
1569 for field in yo._meta.fields:
1570 if yo._meta[field]['type'] in yo._memotypes:
1571 if not os.path.exists(yo._meta.memoname):
1572 yo._meta.dfd.close()
1573 yo._meta.dfd = None
1574 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1575 break
1577 "builds the FieldList of names, types, and descriptions"
1578 offset = 1
1579 fieldsdef = yo._meta.header.fields
1580 for i in range(yo.field_count):
1581 fieldblock = fieldsdef[i*32:(i+1)*32]
1582 name = io.unpackStr(fieldblock[:11])
1583 type = fieldblock[11]
1584 if not type in yo._meta.fieldtypes:
1585 raise DbfError("Unknown field type: %s" % type)
1586 elif type == '0':
1587 return
1588 start = io.unpackLongInt(fieldblock[12:16])
1589 length = ord(fieldblock[16])
1590 offset += length
1591 end = start + length
1592 decimals = ord(fieldblock[17])
1593 flags = ord(fieldblock[18])
1594 yo._meta.fields.append(name)
1595 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1597 version = 'Provides an interface for working with Visual FoxPro 6 tables'
1598 _versionabbv = 'vfp'
1599 _fieldtypes = {
1600 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1601 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency},
1602 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble},
1603 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
1604 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
1605 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger},
1606 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1607 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1608 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime},
1609 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1610 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1611 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1612 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
1613 _memoext = '.fpt'
1614 _memotypes = ('G','M','P')
1615 _memoClass = _VfpMemo
1616 _yesMemoMask = '\x30'
1617 _noMemoMask = '\x30'
1618 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
1619 _variable_fields = ('C','F','N')
1620 _character_fields = ('C','M')
1621 _decimal_fields = ('F','N')
1622 _numeric_fields = ('B','F','I','N','Y')
1623 _supported_tables = ('\x30',)
1624 _dbfTableHeader = array('c', '\x00' * 32)
1625 _dbfTableHeader[0] = '\x30'
1626 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263))
1627 _dbfTableHeader[10] = '\x01'
1628 _dbfTableHeader[29] = '\x03'
1629 _dbfTableHeader = _dbfTableHeader.tostring()
1630 _dbfTableHeaderExtra = '\x00' * 263
1631 _use_deleted = True
1633 if os.path.exists(yo._meta.memoname):
1634 try:
1635 yo._meta.memo = yo._memoClass(yo._meta)
1636 except:
1637 yo._meta.dfd.close()
1638 yo._meta.dfd = None
1639 raise
1640 if not yo._meta.ignorememos:
1641 for field in yo._meta.fields:
1642 if yo._meta[field]['type'] in yo._memotypes:
1643 if not os.path.exists(yo._meta.memoname):
1644 yo._meta.dfd.close()
1645 yo._meta.dfd = None
1646 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1647 break
1649 "builds the FieldList of names, types, and descriptions"
1650 offset = 1
1651 fieldsdef = yo._meta.header.fields
1652 for i in range(yo.field_count):
1653 fieldblock = fieldsdef[i*32:(i+1)*32]
1654 name = io.unpackStr(fieldblock[:11])
1655 type = fieldblock[11]
1656 if not type in yo._meta.fieldtypes:
1657 raise DbfError("Unknown field type: %s" % type)
1658 elif type == '0':
1659 return
1660 start = io.unpackLongInt(fieldblock[12:16])
1661 length = ord(fieldblock[16])
1662 offset += length
1663 end = start + length
1664 decimals = ord(fieldblock[17])
1665 flags = ord(fieldblock[18])
1666 yo._meta.fields.append(name)
1667 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1669 "list of Dbf records, with set-like behavior"
1670 _desc = ''
1671 - def __init__(yo, table=None, new_records=None, desc=None):
1672 yo._list = []
1673 yo._set = set()
1674 yo._current = -1
1675 if isinstance(new_records, DbfList):
1676 yo._list = new_records._list
1677 for item in yo._list:
1678 yo._set.add(item)
1679 yo._current = 0
1680 elif new_records is not None:
1681 for record in new_records:
1682 item = (table, record.record_number)
1683 if item not in yo._set:
1684 yo._set.add(item)
1685 yo._list.append(item)
1686 yo._current = 0
1687 if desc is not None:
1688 yo._desc = desc
1690 if isinstance(other, DbfList):
1691 result = DbfList()
1692 result._set = set(yo._set)
1693 result._list[:] = yo._list[:]
1694 for item in other._list:
1695 if item not in result._set:
1696 result._set.add(item)
1697 result._list.append(item)
1698 result._current = 0 if result else -1
1699 return result
1700 return NotImplemented
1702 if isinstance(key, int):
1703 loc = yo._current - len(yo._list) + 1
1704 item = yo._list.pop[key]
1705 yo._set.remove(item)
1706 if loc > 0:
1707 yo._current = len(yo._list)
1708 elif loc == 0 or yo._current >= len(yo._list):
1709 yo._current = len(yo._list) - 1
1710 elif isinstance(key, slice):
1711 loc = yo._current - len(yo._list) + 1
1712 yo._set.difference_update(yo._list[key])
1713 yo._list.__delitem__(key)
1714 item = yo._list.pop(index)
1715 if loc > 0:
1716 yo._current = len(yo._list)
1717 elif loc == 0 or yo._current >= len(yo._list):
1718 yo._current = len(yo._list) - 1
1719 else:
1720 raise TypeError
1722 if isinstance(key, int):
1723 count = len(yo._list)
1724 if not -count <= key < count:
1725 raise IndexError("Record %d is not in list." % key)
1726 return yo._get_record(*yo._list[key])
1727 elif isinstance(key, slice):
1728 result = DbfList()
1729 result._list[:] = yo._list[key]
1730 result._set.update(result._set)
1731 result._current = 0 if result else -1
1732 return result
1733 else:
1734 raise TypeError
1736 return (table.get_record(recno) for table, recno in yo._list)
1738 return len(yo._list)
1744 if yo._desc:
1745 return "DbfList(%s - %d records)" % (yo._desc, len(yo._list))
1746 else:
1747 return "DbfList(%d records)" % len(yo._list)
1749 if isinstance(other, DbfList):
1750 result = DbfList()
1751 result._list[:] = other._list[:]
1752 result._set = set(other._set)
1753 lost = set()
1754 for item in yo._list:
1755 if item in result._list:
1756 result._set.remove(item)
1757 lost.add(item)
1758 result._list = [item for item in result._list if item not in lost]
1759 result._current = 0 if result else -1
1760 return result
1761 return NotImplemented
1763 if isinstance(other, DbfList):
1764 result = DbfList()
1765 result._list[:] = yo._list[:]
1766 result._set = set(yo._set)
1767 lost = set()
1768 for item in other._list:
1769 if item in result._set:
1770 result._set.remove(item)
1771 lost.add(item)
1772 result._list = [item for item in result._list if item not in lost]
1773 result._current = 0 if result else -1
1774 return result
1775 return NotImplemented
1777 if record is None:
1778 item = table
1779 else:
1780 item = table, record.record_number
1781 if item not in yo._set:
1782 yo._set.add(item)
1783 yo._list.append(item)
1785 if table is rec_no is None:
1786 table, rec_no = yo._list[yo._current]
1787 return table.get_record(rec_no)
1788 - def append(yo, table, new_record):
1789 yo._maybe_add(table, new_record)
1790 yo._current = len(yo._list) - 1
1792 if yo._list:
1793 yo._current = len(yo._list) - 1
1794 return _get_record()
1795 raise DbfError("DbfList is empty")
1797 if yo._current < 0:
1798 raise Bof()
1799 elif yo._current == len(yo._list):
1800 raise Eof()
1801 return _get_record()
1802 - def extend(yo, table=None, new_records=None):
1803 if isinstance(new_records, DbfList):
1804 for item in new_records:
1805 yo._maybe_add(item)
1806 else:
1807 for record in new_records:
1808 yo._maybe_add(table, record)
1809 yo._current = len(yo._list) - 1
1810 - def goto(yo, index_number):
1811 if yo._list:
1812 if 0 <= index_number <= len(yo._list):
1813 yo._current = index_number
1814 return _get_record()
1815 raise DbfError("index %d not in DbfList of %d records" % (index_number, len(yo._list)))
1816 raise DbfError("DbfList is empty")
1817 - def insert(yo, i, table, record):
1818 item = table, record.record_number
1819 if item not in yo._set:
1820 yo._set.add(item)
1821 yo._list.insert(i, item)
1823 if yo._current < len(yo._list):
1824 yo._current += 1
1825 if yo._current < len(yo._list):
1826 return _get_record()
1827 raise Eof()
1828 - def pop(yo, index=None):
1829 loc = yo._current - len(yo._list) + 1
1830 if index is None:
1831 table, recno = yo._list.pop()
1832 yo._set.remove((table, recno))
1833 else:
1834 table, recno = yo._list.pop(index)
1835 yo._set.remove((table, recno))
1836 if loc > 0:
1837 yo._current = len(yo._list)
1838 elif loc == 0 or yo._current >= len(yo._list):
1839 yo._current = len(yo._list) - 1
1840 return _get_record(table, recno)
1842 if yo._current >= 0:
1843 yo._current -= 1
1844 if yo._current > -1:
1845 return yo._get_record()
1846 raise Bof()
1850 if yo._list:
1851 yo._current = 0
1852 return yo._get_record()
1853 raise DbfError("DbfList is empty")
1854 - def sort(yo, key=None, reverse=None):
1865 csv.register_dialect('dbf', DbfCsv)
1866
1867 -def _nop(value):
1868 "returns parameter unchanged"
1869 return value
1871 "ensures each tuple is the same length, using filler[-missing] for the gaps"
1872 final = []
1873 for t in tuples:
1874 if len(t) < length:
1875 final.append( tuple([item for item in t] + filler[len(t)-length:]) )
1876 else:
1877 final.append(t)
1878 return tuple(final)
1880 if cp not in code_pages:
1881 for code_page in sorted(code_pages.keys()):
1882 sd, ld = code_pages[code_page]
1883 if cp == sd or cp == ld:
1884 if sd is None:
1885 raise DbfError("Unsupported codepage: %s" % ld)
1886 cp = code_page
1887 break
1888 else:
1889 raise DbfError("Unsupported codepage: %s" % cp)
1890 sd, ld = code_pages[cp]
1891 return cp, sd, ld
1892 -def ascii(new_setting=None):
1899 -def codepage(cp=None):
1900 "get/set default codepage for any new tables"
1901 global default_codepage
1902 cp, sd, ld = _codepage_lookup(cp or default_codepage)
1903 default_codepage = sd
1904 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
1912 version = 'dBase IV w/memos (non-functional)'
1913 _versionabbv = 'db4'
1914 _fieldtypes = {
1915 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1916 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency},
1917 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble},
1918 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
1919 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
1920 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger},
1921 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1922 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1923 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime},
1924 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1925 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1926 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1927 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
1928 _memoext = '.dbt'
1929 _memotypes = ('G','M','P')
1930 _memoClass = _VfpMemo
1931 _yesMemoMask = '\x8b'
1932 _noMemoMask = '\x04'
1933 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
1934 _variable_fields = ('C','F','N')
1935 _character_fields = ('C','M')
1936 _decimal_fields = ('F','N')
1937 _numeric_fields = ('B','F','I','N','Y')
1938 _supported_tables = ('\x04', '\x8b')
1939 _dbfTableHeader = ['\x00'] * 32
1940 _dbfTableHeader[0] = '\x8b'
1941 _dbfTableHeader[10] = '\x01'
1942 _dbfTableHeader[29] = '\x03'
1943 _dbfTableHeader = ''.join(_dbfTableHeader)
1944 _dbfTableHeaderExtra = ''
1945 _use_deleted = True
1947 "dBase III specific"
1948 if yo._meta.header.version == '\x8b':
1949 try:
1950 yo._meta.memo = yo._memoClass(yo._meta)
1951 except:
1952 yo._meta.dfd.close()
1953 yo._meta.dfd = None
1954 raise
1955 if not yo._meta.ignorememos:
1956 for field in yo._meta.fields:
1957 if yo._meta[field]['type'] in yo._memotypes:
1958 if yo._meta.header.version != '\x8b':
1959 yo._meta.dfd.close()
1960 yo._meta.dfd = None
1961 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
1962 elif not os.path.exists(yo._meta.memoname):
1963 yo._meta.dfd.close()
1964 yo._meta.dfd = None
1965 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1966 break
1967