Package dbf :: Module ver_32
[hide private]
[frames] | no frames]

Source Code for Module dbf.ver_32

   1  """ 
   2  ========= 
   3  Copyright 
   4  ========= 
   5      - Portions copyright: 2008-2012 Ad-Mail, Inc -- All rights reserved. 
   6      - Portions copyright: 2012-2013 Ethan Furman -- All rights reserved. 
   7      - Author: Ethan Furman 
   8      - Contact: ethan@stoneleaf.us 
   9   
  10  Redistribution and use in source and binary forms, with or without 
  11  modification, are permitted provided that the following conditions are met: 
  12      - Redistributions of source code must retain the above copyright 
  13        notice, this list of conditions and the following disclaimer. 
  14      - Redistributions in binary form must reproduce the above copyright 
  15        notice, this list of conditions and the following disclaimer in the 
  16        documentation and/or other materials provided with the distribution. 
  17      - Neither the name of Ad-Mail, Inc nor the 
  18        names of its contributors may be used to endorse or promote products 
  19        derived from this software without specific prior written permission. 
  20   
  21  THIS SOFTWARE IS PROVIDED ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, 
  22  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY 
  23  AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 
  24  ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
  25  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
  26  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
  27  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
  28  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
  29  OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
  30  ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  31  """ 
  32   
  33  import codecs 
  34  import collections 
  35  import csv 
  36  import datetime 
  37  import os 
  38  import struct 
  39  import sys 
  40  import time 
  41  import weakref 
  42   
  43  from array import array 
  44  from bisect import bisect_left, bisect_right 
  45  from collections import defaultdict 
  46  from decimal import Decimal 
  47  from enum import Enum 
  48  from glob import glob 
  49  from math import floor 
  50  from os import SEEK_SET, SEEK_CUR, SEEK_END 
  51  import types 
  52   
  53  module = globals() 
  54   
  55  NoneType = type(None) 
  56   
  57  # Flag for behavior if bad data is encountered in a logical field 
  58  # Return None if True, else raise BadDataError 
  59  LOGICAL_BAD_IS_NONE = True 
  60   
  61  # treat non-unicode data as ... 
  62  input_decoding = 'ascii' 
  63   
  64  # if no codepage specified on dbf creation, use this 
  65  default_codepage = 'ascii' 
  66   
  67  # default format if none specified 
  68  default_type = 'db3' 
  69   
  70  temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TMP") or os.environ.get("TEMP") or "" 
  71   
  72  # signature:_meta of template records 
  73  _Template_Records = dict() 
  74   
  75  # dec jan feb mar apr may jun jul aug sep oct nov dec jan 
  76  days_per_month = [31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31] 
  77  days_per_leap_month = [31, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31] 
78 79 -class Enum(Enum):
80 "adds 'export_to()' function" 81 @classmethod
82 - def export_to(cls, namespace):
83 namespace.update(cls.__members__)
84
85 -class IntEnum(int, Enum):
86 pass
87
88 -class HexEnum(IntEnum):
89 "repr is in hex"
90 - def __repr__(self):
91 return '<%s.%s: %#02x>' % ( 92 self.__class__.__name__, 93 self._name_, 94 self._value_, 95 )
96
97 -class ValueAliasEnum(Enum):
98 "allows value aliases (not name aliases)"
99 - def __new__(cls, int_value, *value_aliases):
100 obj = object.__new__(cls) 101 obj._value_ = int_value 102 for alias in value_aliases: 103 cls._value2member_map_[alias] = obj 104 return obj
105
106 -class AutoEnum(IntEnum):
107 """ 108 Automatically numbers enum members starting from __number__ (defaults to 0). 109 110 Includes support for a custom docstring per member. 111 """ 112 __number__ = 0 113
114 - def __new__(cls, *args):
115 """Ignores arguments (will be handled in __init__.""" 116 value = cls.__number__ 117 cls.__number__ += 1 118 obj = int.__new__(cls, value) 119 obj._value_ = value 120 return obj
121
122 - def __init__(self, *args):
123 """Can handle 0 or 1 argument; more requires a custom __init__. 124 125 0 = auto-number w/o docstring 126 1 = auto-number w/ docstring 127 2+ = needs custom __init__ 128 129 """ 130 if len(args) == 1 and isinstance(args[0], str): 131 self.__doc__ = args[0] 132 elif args: 133 raise TypeError('%s not dealt with -- need custom __init__' % (args,))
134
135 136 -class IsoDay(IntEnum):
137 MONDAY = 1 138 TUESDAY = 2 139 WEDNESDAY = 3 140 THURSDAY = 4 141 FRIDAY = 5 142 SATURDAY = 6 143 SUNDAY = 7 144
145 - def next_delta(self, day):
146 """Return number of days needed to get from self to day.""" 147 if self == day: 148 return 7 149 delta = day - self 150 if delta < 0: 151 delta += 7 152 return delta
153
154 - def last_delta(self, day):
155 """Return number of days needed to get from self to day.""" 156 if self == day: 157 return -7 158 delta = day - self 159 if delta > 0: 160 delta -= 7 161 return delta
162
163 -class RelativeDay(Enum):
164 LAST_SUNDAY = () 165 LAST_SATURDAY = () 166 LAST_FRIDAY = () 167 LAST_THURSDAY = () 168 LAST_WEDNESDAY = () 169 LAST_TUESDAY = () 170 LAST_MONDAY = () 171 NEXT_MONDAY = () 172 NEXT_TUESDAY = () 173 NEXT_WEDNESDAY = () 174 NEXT_THURSDAY = () 175 NEXT_FRIDAY = () 176 NEXT_SATURDAY = () 177 NEXT_SUNDAY = () 178
179 - def __new__(cls):
180 result = object.__new__(cls) 181 result._value = len(cls.__members__) + 1 182 return result
183
184 - def days_from(self, day):
185 target = IsoDay[self.name[5:]] 186 if self.name[:4] == 'LAST': 187 return day.last_delta(target) 188 return day.next_delta(target)
189 RelativeDay.export_to(module)
190 191 -class IsoMonth(IntEnum):
192 JANUARY = 1 193 FEBRUARY = 2 194 MARCH = 3 195 APRIL = 4 196 MAY = 5 197 JUNE = 6 198 JULY = 7 199 AUGUST = 8 200 SEPTEMBER = 9 201 OCTOBER = 10 202 NOVEMBER = 11 203 DECEMBER = 12 204
205 - def next_delta(self, month):
206 """Return number of months needed to get from self to month.""" 207 if self == month: 208 return 12 209 delta = month - self 210 if delta < 0: 211 delta += 12 212 return delta
213
214 - def last_delta(self, month):
215 """Return number of months needed to get from self to month.""" 216 if self == month: 217 return -12 218 delta = month - self 219 if delta > 0: 220 delta -= 12 221 return delta
222
223 -class RelativeMonth(Enum):
224 LAST_DECEMBER = () 225 LAST_NOVEMBER = () 226 LAST_OCTOBER = () 227 LAST_SEPTEMBER = () 228 LAST_AUGUST = () 229 LAST_JULY = () 230 LAST_JUNE = () 231 LAST_MAY = () 232 LAST_APRIL = () 233 LAST_MARCH= () 234 LAST_FEBRUARY = () 235 LAST_JANUARY = () 236 NEXT_JANUARY = () 237 NEXT_FEBRUARY = () 238 NEXT_MARCH = () 239 NEXT_APRIL = () 240 NEXT_MAY = () 241 NEXT_JUNE = () 242 NEXT_JULY = () 243 NEXT_AUGUST = () 244 NEXT_SEPTEMBER = () 245 NEXT_OCTOBER = () 246 NEXT_NOVEMBER = () 247 NEXT_DECEMBER = () 248
249 - def __new__(cls):
250 result = object.__new__(cls) 251 result._value = len(cls.__members__) + 1 252 return result
253
254 - def months_from(self, month):
255 target = IsoMonth[self.name[5:]] 256 if self.name[:4] == 'LAST': 257 return month.last_delta(target) 258 return month.next_delta(target)
259 RelativeMonth.export_to(module)
260 261 -def is_leapyear(year):
262 if year % 400 == 0: 263 return True 264 elif year % 100 == 0: 265 return False 266 elif year % 4 == 0: 267 return True 268 else: 269 return False
270
271 272 # Constants 273 274 -class LatinByte(HexEnum):
275 NULL = 0x00 276 LF = 0x0a 277 CR = 0x0d 278 EOF = 0x1a 279 ESC = 0x1b 280 SPACE = 0x20 281 ASTERISK = 0x2a
282 LatinByte.export_to(module)
283 284 -class FieldType(IntEnum):
285 - def __new__(cls, char):
286 int_value = ord(char) 287 obj = int.__new__(cls, int_value) 288 obj._value_ = int_value 289 obj.symbol = char.upper() 290 for alias in ( 291 char.lower(), 292 char.upper(), 293 ): 294 cls._value2member_map_[alias] = obj 295 cls._value2member_map_[alias.encode('ascii')] = obj 296 return obj
297 - def __repr__(self):
298 return '<%s.%s: %r>' % ( 299 self.__class__.__name__, 300 self._name_, 301 bytes([self._value_]), 302 )
303 _NULLFLAG = '0' 304 CHAR = 'C' 305 CURRENCY = 'Y' 306 DATE = 'D' 307 DATETIME = 'T' 308 DOUBLE = 'B' 309 FLOAT = 'F' 310 GENERAL = 'G' 311 INTEGER = 'I' 312 LOGICAL = 'L' 313 MEMO = 'M' 314 NUMERIC = 'N' 315 PICTURE = 'P'
316 FieldType.export_to(module)
317 318 -class FieldFlag(IntEnum):
319 @classmethod
320 - def lookup(cls, alias):
321 alias = alias.lower() 322 if alias in ('system', ): 323 return cls.SYSTEM 324 elif alias in ('null', 'nullable'): 325 return cls.NULLABLE 326 elif alias in ('binary', 'nocptrans'): 327 return cls.BINARY 328 else: 329 raise ValueError('no FieldFlag %r' % alias)
330 @property
331 - def text(self):
332 if self is NULLABLE: 333 return 'null' 334 else: 335 return self._name_.lower()
336 SYSTEM = 0x01 337 NULLABLE = 0x02 338 BINARY = 0x04 339 NOCPTRANS = 0x04
340 #AUTOINC = 0x0c # not currently supported (not vfp 6) 341 FieldFlag.export_to(module)
342 343 -class Field(AutoEnum):
344 __order__ = 'TYPE START LENGTH END DECIMALS FLAGS CLASS EMPTY NUL' 345 TYPE = "Char, Date, Logical, etc." 346 START = "Field offset in record" 347 LENGTH = "Length of field in record" 348 END = "End of field in record (exclusive)" 349 DECIMALS = "Number of decimal places if numeric" 350 FLAGS = "System, Binary, Nullable" 351 CLASS = "python class type" 352 EMPTY = "python function for empty field" 353 NUL = "python function for null field"
354 Field.export_to(module)
355 356 -class DbfLocation(AutoEnum):
357 __order__ = 'IN_MEMORY ON_DISK' 358 IN_MEMORY = "dbf is kept in memory (disappears at program end)" 359 ON_DISK = "dbf is kept on disk"
360 DbfLocation.export_to(module)
361 362 -class DbfStatus(AutoEnum):
363 __order__ = 'CLOSED READ_ONLY READ_WRITE' 364 CLOSED = 'closed (only meta information available)' 365 READ_ONLY = 'read-only' 366 READ_WRITE = 'read-write'
367 DbfStatus.export_to(module)
368 369 -class LazyAttr:
370 """ 371 doesn't create object until actually accessed 372 """ 373
374 - def __init__(yo, func=None, doc=None):
375 yo.fget = func 376 yo.__doc__ = doc or func.__doc__
377
378 - def __call__(yo, func):
379 yo.fget = func
380
381 - def __get__(yo, instance, owner):
382 if instance is None: 383 return yo 384 return yo.fget(instance)
385
386 387 -class MutableDefault:
388 """ 389 Lives in the class, and on first access calls the supplied factory and 390 maps the result into the instance it was called on 391 """ 392
393 - def __init__(self, func):
394 self._name = func.__name__ 395 self.func = func
396
397 - def __call__(self):
398 return self
399
400 - def __get__(self, instance, owner):
401 result = self.func() 402 if instance is not None: 403 setattr(instance, self._name, result) 404 return result
405
406 - def __repr__(self):
407 result = self.func() 408 return "MutableDefault(%r)" % (result, )
409
410 411 -def none(*args, **kwargs):
412 """ 413 because we can't do `NoneType(*args, **kwargs)` 414 """ 415 return None
416
417 418 # warnings and errors 419 420 -class DbfError(Exception):
421 """ 422 Fatal errors elicit this response. 423 """
424 - def __init__(self, message, *args):
425 Exception.__init__(self, message, *args) 426 self.message = message
427
428 429 -class DataOverflowError(DbfError):
430 """ 431 Data too large for field 432 """ 433
434 - def __init__(self, message, data=None):
435 DbfError.__init__(self, message) 436 self.data = data
437
438 439 -class BadDataError(DbfError):
440 """ 441 bad data in table 442 """ 443
444 - def __init__(self, message, data=None):
445 DbfError.__init__(self, message) 446 self.data = data
447
448 449 -class FieldMissingError(KeyError, DbfError):
450 """ 451 Field does not exist in table 452 """ 453
454 - def __init__(self, fieldname):
455 KeyError.__init__(self, '%s: no such field in table' % fieldname) 456 DbfError.__init__(self, '%s: no such field in table' % fieldname) 457 self.data = fieldname
458
459 460 -class FieldSpecError(DbfError, ValueError):
461 """ 462 invalid field specification 463 """ 464
465 - def __init__(self, message):
466 ValueError.__init__(self, message) 467 DbfError.__init__(self, message)
468
469 470 -class NonUnicodeError(DbfError):
471 """ 472 Data for table not in unicode 473 """ 474
475 - def __init__(self, message=None):
477
478 479 -class NotFoundError(DbfError, ValueError, KeyError, IndexError):
480 """ 481 record criteria not met 482 """ 483
484 - def __init__(self, message=None, data=None):
485 ValueError.__init__(self, message) 486 KeyError.__init__(self, message) 487 IndexError.__init__(self, message) 488 DbfError.__init__(self, message) 489 self.data = data
490
491 492 -class DbfWarning(Exception):
493 """ 494 Normal operations elicit this response 495 """
496
497 498 -class Eof(DbfWarning, StopIteration):
499 """ 500 End of file reached 501 """ 502 503 message = 'End of file reached' 504
505 - def __init__(self):
506 StopIteration.__init__(self, self.message) 507 DbfWarning.__init__(self, self.message)
508
509 510 -class Bof(DbfWarning, StopIteration):
511 """ 512 Beginning of file reached 513 """ 514 515 message = 'Beginning of file reached' 516
517 - def __init__(self):
518 StopIteration.__init__(self, self.message) 519 DbfWarning.__init__(self, self.message)
520
521 522 -class DoNotIndex(DbfWarning):
523 """ 524 Returned by indexing functions to suppress a record from becoming part of the index 525 """ 526 527 message = 'Not indexing record' 528
529 - def __init__(self):
530 DbfWarning.__init__(self, self.message)
531 532 533 # wrappers around datetime and logical objects to allow null values 534 535 # gets replaced later by their final values 536 Unknown = Other = object()
537 538 -class NullType:
539 """ 540 Null object -- any interaction returns Null 541 """ 542
543 - def _null(self, *args, **kwargs):
544 return self
545 546 __eq__ = __ne__ = __ge__ = __gt__ = __le__ = __lt__ = _null 547 __add__ = __iadd__ = __radd__ = _null 548 __sub__ = __isub__ = __rsub__ = _null 549 __mul__ = __imul__ = __rmul__ = _null 550 __div__ = __idiv__ = __rdiv__ = _null 551 __mod__ = __imod__ = __rmod__ = _null 552 __pow__ = __ipow__ = __rpow__ = _null 553 __and__ = __iand__ = __rand__ = _null 554 __xor__ = __ixor__ = __rxor__ = _null 555 __or__ = __ior__ = __ror__ = _null 556 __truediv__ = __itruediv__ = __rtruediv__ = _null 557 __floordiv__ = __ifloordiv__ = __rfloordiv__ = _null 558 __lshift__ = __ilshift__ = __rlshift__ = _null 559 __rshift__ = __irshift__ = __rrshift__ = _null 560 __neg__ = __pos__ = __abs__ = __invert__ = _null 561 __call__ = __getattr__ = _null 562
563 - def __divmod__(self, other):
564 return self, self
565 __rdivmod__ = __divmod__ 566
567 - def __hash__(self):
568 raise TypeError("unhashable type: 'Null'")
569
570 - def __new__(cls, *args):
571 return cls.null
572
573 - def __bool__(self):
574 return False
575
576 - def __repr__(self):
577 return '<null>'
578
579 - def __setattr__(self, name, value):
580 return None
581
582 - def __setitem___(self, index, value):
583 return None
584
585 - def __str__(self):
586 return ''
587 588 NullType.null = object.__new__(NullType) 589 Null = NullType()
590 591 592 -class Vapor:
593 """ 594 used in Vapor Records -- compares unequal with everything 595 """ 596
597 - def __eq__(self, other):
598 return False
599
600 - def __ne__(self, other):
601 return True
602 603 Vapor = Vapor()
604 605 606 -class Char(str):
607 """ 608 Strips trailing whitespace, and ignores trailing whitespace for comparisons 609 """ 610
611 - def __new__(cls, text=''):
612 if not isinstance(text, (basestring, cls)): 613 raise ValueError("Unable to automatically coerce %r to Char" % text) 614 result = str.__new__(cls, text.rstrip()) 615 result.field_size = len(text) 616 return result
617 618 __hash__ = str.__hash__ 619
620 - def __eq__(self, other):
621 """ 622 ignores trailing whitespace 623 """ 624 if not isinstance(other, (self.__class__, basestring)): 625 return NotImplemented 626 return str(self) == other.rstrip()
627
628 - def __ge__(self, other):
629 """ 630 ignores trailing whitespace 631 """ 632 if not isinstance(other, (self.__class__, basestring)): 633 return NotImplemented 634 return str(self) >= other.rstrip()
635
636 - def __gt__(self, other):
637 """ 638 ignores trailing whitespace 639 """ 640 if not isinstance(other, (self.__class__, basestring)): 641 return NotImplemented 642 return str(self) > other.rstrip()
643
644 - def __le__(self, other):
645 """ 646 ignores trailing whitespace 647 """ 648 if not isinstance(other, (self.__class__, basestring)): 649 return NotImplemented 650 return str(self) <= other.rstrip()
651
652 - def __lt__(self, other):
653 """ 654 ignores trailing whitespace 655 """ 656 if not isinstance(other, (self.__class__, basestring)): 657 return NotImplemented 658 return str(self) < other.rstrip()
659
660 - def __ne__(self, other):
661 """ 662 ignores trailing whitespace 663 """ 664 if not isinstance(other, (self.__class__, basestring)): 665 return NotImplemented 666 return str(self) != other.rstrip()
667
668 - def __bool__(self):
669 """ 670 ignores trailing whitespace 671 """ 672 return bool(str(self))
673
674 - def __add__(self, other):
675 result = self.__class__(str(self) + other) 676 result.field_size = self.field_size 677 return result
678 679 baseinteger = int 680 basestring = str, Char
681 682 -class Date:
683 """ 684 adds null capable datetime.date constructs 685 """ 686 687 __slots__ = ['_date'] 688
689 - def __new__(cls, year=None, month=0, day=0):
690 """ 691 date should be either a datetime.date or date/month/day should 692 all be appropriate integers 693 """ 694 if year is None or year is Null: 695 return cls._null_date 696 nd = object.__new__(cls) 697 if isinstance(year, basestring): 698 return Date.strptime(year) 699 elif isinstance(year, (datetime.date)): 700 nd._date = year 701 elif isinstance(year, (Date)): 702 nd._date = year._date 703 else: 704 nd._date = datetime.date(year, month, day) 705 return nd
706
707 - def __add__(self, other):
708 if self and isinstance(other, (datetime.timedelta)): 709 return Date(self._date + other) 710 else: 711 return NotImplemented
712
713 - def __eq__(self, other):
714 if isinstance(other, self.__class__): 715 return self._date == other._date 716 if isinstance(other, datetime.date): 717 return self._date == other 718 if isinstance(other, type(None)): 719 return self._date is None 720 return NotImplemented
721
722 - def __format__(self, spec):
723 if self: 724 return self._date.__format__(spec) 725 return ''
726
727 - def __getattr__(self, name):
728 if name == '_date': 729 raise AttributeError('_date missing!') 730 elif self: 731 return getattr(self._date, name) 732 else: 733 raise AttributeError('NullDate object has no attribute %s' % name)
734
735 - def __ge__(self, other):
736 if isinstance(other, (datetime.date)): 737 return self._date >= other 738 elif isinstance(other, (Date)): 739 if other: 740 return self._date >= other._date 741 return False 742 return NotImplemented
743
744 - def __gt__(self, other):
745 if isinstance(other, (datetime.date)): 746 return self._date > other 747 elif isinstance(other, (Date)): 748 if other: 749 return self._date > other._date 750 return True 751 return NotImplemented
752
753 - def __hash__(self):
754 return hash(self._date)
755
756 - def __le__(self, other):
757 if self: 758 if isinstance(other, (datetime.date)): 759 return self._date <= other 760 elif isinstance(other, (Date)): 761 if other: 762 return self._date <= other._date 763 return False 764 else: 765 if isinstance(other, (datetime.date)): 766 return True 767 elif isinstance(other, (Date)): 768 if other: 769 return True 770 return True 771 return NotImplemented
772
773 - def __lt__(self, other):
774 if self: 775 if isinstance(other, (datetime.date)): 776 return self._date < other 777 elif isinstance(other, (Date)): 778 if other: 779 return self._date < other._date 780 return False 781 else: 782 if isinstance(other, (datetime.date)): 783 return True 784 elif isinstance(other, (Date)): 785 if other: 786 return True 787 return False 788 return NotImplemented
789
790 - def __ne__(self, other):
791 if self: 792 if isinstance(other, (datetime.date)): 793 return self._date != other 794 elif isinstance(other, (Date)): 795 if other: 796 return self._date != other._date 797 return True 798 else: 799 if isinstance(other, (datetime.date)): 800 return True 801 elif isinstance(other, (Date)): 802 if other: 803 return True 804 return False 805 return NotImplemented
806
807 - def __bool__(self):
808 return self._date is not None
809 810 __radd__ = __add__ 811
812 - def __rsub__(self, other):
813 if self and isinstance(other, (datetime.date)): 814 return other - self._date 815 elif self and isinstance(other, (Date)): 816 return other._date - self._date 817 elif self and isinstance(other, (datetime.timedelta)): 818 return Date(other - self._date) 819 else: 820 return NotImplemented
821
822 - def __repr__(self):
823 if self: 824 return "Date(%d, %d, %d)" % self.timetuple()[:3] 825 else: 826 return "Date()"
827
828 - def __str__(self):
829 if self: 830 return str(self._date) 831 return ""
832
833 - def __sub__(self, other):
834 if self and isinstance(other, (datetime.date)): 835 return self._date - other 836 elif self and isinstance(other, (Date)): 837 return self._date - other._date 838 elif self and isinstance(other, (datetime.timedelta)): 839 return Date(self._date - other) 840 else: 841 return NotImplemented
842
843 - def date(self):
844 if self: 845 return self._date 846 return None
847 848 @classmethod
849 - def fromordinal(cls, number):
850 if number: 851 return cls(datetime.date.fromordinal(number)) 852 return cls()
853 854 @classmethod
855 - def fromtimestamp(cls, timestamp):
856 return cls(datetime.date.fromtimestamp(timestamp))
857 858 @classmethod
859 - def fromymd(cls, yyyymmdd):
860 if yyyymmdd in ('', ' ', 'no date'): 861 return cls() 862 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
863
864 - def replace(self, year=None, month=None, day=None, delta_year=0, delta_month=0, delta_day=0):
865 if not self: 866 return self.__class__._null_date 867 old_year, old_month, old_day = self.timetuple()[:3] 868 if isinstance(month, RelativeMonth): 869 this_month = IsoMonth(old_month) 870 delta_month += month.months_from(this_month) 871 month = None 872 if isinstance(day, RelativeDay): 873 this_day = IsoDay(self.isoweekday()) 874 delta_day += day.days_from(this_day) 875 day = None 876 year = (year or old_year) + delta_year 877 month = (month or old_month) + delta_month 878 day = (day or old_day) + delta_day 879 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 880 while not(0 < month < 13) or not (0 < day <= days_in_month[month]): 881 while month < 1: 882 year -= 1 883 month = 12 + month 884 while month > 12: 885 year += 1 886 month = month - 12 887 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 888 while day < 1: 889 month -= 1 890 day = days_in_month[month] + day 891 if not 0 < month < 13: 892 break 893 while day > days_in_month[month]: 894 day = day - days_in_month[month] 895 month += 1 896 if not 0 < month < 13: 897 break 898 return Date(year, month, day)
899
900 - def strftime(self, format):
901 if self: 902 return self._date.strftime(format) 903 return ''
904 905 @classmethod
906 - def strptime(cls, date_string, format=None):
907 if format is not None: 908 return cls(*(time.strptime(date_string, format)[0:3])) 909 return cls(*(time.strptime(date_string, "%Y-%m-%d")[0:3]))
910 911 @classmethod
912 - def today(cls):
913 return cls(datetime.date.today())
914
915 - def ymd(self):
916 if self: 917 return "%04d%02d%02d" % self.timetuple()[:3] 918 else: 919 return ' '
920 921 Date.max = Date(datetime.date.max) 922 Date.min = Date(datetime.date.min) 923 Date._null_date = object.__new__(Date) 924 Date._null_date._date = None 925 NullDate = Date()
926 927 928 -class DateTime:
929 """ 930 adds null capable datetime.datetime constructs 931 """ 932 933 __slots__ = ['_datetime'] 934
935 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsecond=0):
936 """year may be a datetime.datetime""" 937 if year is None or year is Null: 938 return cls._null_datetime 939 ndt = object.__new__(cls) 940 if isinstance(year, basestring): 941 return DateTime.strptime(year) 942 elif isinstance(year, DateTime): 943 ndt._datetime = year._datetime 944 elif isinstance(year, datetime.datetime): 945 microsecond = year.microsecond // 1000 * 1000 946 hour, minute, second = year.hour, year.minute, year.second 947 year, month, day = year.year, year.month, year.day 948 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond) 949 elif year is not None: 950 microsecond = microsecond // 1000 * 1000 951 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond) 952 return ndt
953
954 - def __add__(self, other):
955 if self and isinstance(other, (datetime.timedelta)): 956 return DateTime(self._datetime + other) 957 else: 958 return NotImplemented
959
960 - def __eq__(self, other):
961 if isinstance(other, self.__class__): 962 return self._datetime == other._datetime 963 if isinstance(other, datetime.date): 964 return self._datetime == other 965 if isinstance(other, type(None)): 966 return self._datetime is None 967 return NotImplemented
968
969 - def __format__(self, spec):
970 if self: 971 return self._datetime.__format__(spec) 972 return ''
973
974 - def __getattr__(self, name):
975 if name == '_datetime': 976 raise AttributeError('_datetime missing!') 977 elif self: 978 return getattr(self._datetime, name) 979 else: 980 raise AttributeError('NullDateTime object has no attribute %s' % name)
981
982 - def __ge__(self, other):
983 if self: 984 if isinstance(other, (datetime.datetime)): 985 return self._datetime >= other 986 elif isinstance(other, (DateTime)): 987 if other: 988 return self._datetime >= other._datetime 989 return False 990 else: 991 if isinstance(other, (datetime.datetime)): 992 return False 993 elif isinstance(other, (DateTime)): 994 if other: 995 return False 996 return True 997 return NotImplemented
998
999 - def __gt__(self, other):
1000 if self: 1001 if isinstance(other, (datetime.datetime)): 1002 return self._datetime > other 1003 elif isinstance(other, (DateTime)): 1004 if other: 1005 return self._datetime > other._datetime 1006 return True 1007 else: 1008 if isinstance(other, (datetime.datetime)): 1009 return False 1010 elif isinstance(other, (DateTime)): 1011 if other: 1012 return False 1013 return False 1014 return NotImplemented
1015
1016 - def __hash__(self):
1017 return self._datetime.__hash__()
1018
1019 - def __le__(self, other):
1020 if self: 1021 if isinstance(other, (datetime.datetime)): 1022 return self._datetime <= other 1023 elif isinstance(other, (DateTime)): 1024 if other: 1025 return self._datetime <= other._datetime 1026 return False 1027 else: 1028 if isinstance(other, (datetime.datetime)): 1029 return True 1030 elif isinstance(other, (DateTime)): 1031 if other: 1032 return True 1033 return True 1034 return NotImplemented
1035
1036 - def __lt__(self, other):
1037 if self: 1038 if isinstance(other, (datetime.datetime)): 1039 return self._datetime < other 1040 elif isinstance(other, (DateTime)): 1041 if other: 1042 return self._datetime < other._datetime 1043 return False 1044 else: 1045 if isinstance(other, (datetime.datetime)): 1046 return True 1047 elif isinstance(other, (DateTime)): 1048 if other: 1049 return True 1050 return False 1051 return NotImplemented
1052
1053 - def __ne__(self, other):
1054 if self: 1055 if isinstance(other, (datetime.datetime)): 1056 return self._datetime != other 1057 elif isinstance(other, (DateTime)): 1058 if other: 1059 return self._datetime != other._datetime 1060 return True 1061 else: 1062 if isinstance(other, (datetime.datetime)): 1063 return True 1064 elif isinstance(other, (DateTime)): 1065 if other: 1066 return True 1067 return False 1068 return NotImplemented
1069
1070 - def __bool__(self):
1071 return self._datetime is not None
1072 1073 __radd__ = __add__ 1074
1075 - def __rsub__(self, other):
1076 if self and isinstance(other, (datetime.datetime)): 1077 return other - self._datetime 1078 elif self and isinstance(other, (DateTime)): 1079 return other._datetime - self._datetime 1080 elif self and isinstance(other, (datetime.timedelta)): 1081 return DateTime(other - self._datetime) 1082 else: 1083 return NotImplemented
1084
1085 - def __repr__(self):
1086 if self: 1087 return "DateTime(%5d, %2d, %2d, %2d, %2d, %2d, %2d)" % ( 1088 self._datetime.timetuple()[:6] + (self._datetime.microsecond, ) 1089 ) 1090 else: 1091 return "DateTime()"
1092
1093 - def __str__(self):
1094 if self: 1095 return str(self._datetime) 1096 return ""
1097
1098 - def __sub__(self, other):
1099 if self and isinstance(other, (datetime.datetime)): 1100 return self._datetime - other 1101 elif self and isinstance(other, (DateTime)): 1102 return self._datetime - other._datetime 1103 elif self and isinstance(other, (datetime.timedelta)): 1104 return DateTime(self._datetime - other) 1105 else: 1106 return NotImplemented
1107 1108 @classmethod
1109 - def combine(cls, date, time):
1110 if Date(date) and Time(time): 1111 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) 1112 return cls()
1113
1114 - def date(self):
1115 if self: 1116 return Date(self.year, self.month, self.day) 1117 return Date()
1118
1119 - def datetime(self):
1120 if self: 1121 return self._datetime 1122 return None
1123 1124 @classmethod
1125 - def fromordinal(cls, number):
1126 if number: 1127 return cls(datetime.datetime.fromordinal(number)) 1128 else: 1129 return cls()
1130 1131 @classmethod
1132 - def fromtimestamp(cls, timestamp):
1133 return DateTime(datetime.datetime.fromtimestamp(timestamp))
1134 1135 @classmethod
1136 - def now(cls):
1137 return cls(datetime.datetime.now())
1138
1139 - def replace(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None, 1140 delta_year=0, delta_month=0, delta_day=0, delta_hour=0, delta_minute=0, delta_second=0):
1141 if not self: 1142 return self.__class__._null_datetime 1143 old_year, old_month, old_day, old_hour, old_minute, old_second, old_micro = self.timetuple()[:7] 1144 if isinstance(month, RelativeMonth): 1145 this_month = IsoMonth(old_month) 1146 delta_month += month.months_from(this_month) 1147 month = None 1148 if isinstance(day, RelativeDay): 1149 this_day = IsoDay(self.isoweekday()) 1150 delta_day += day.days_from(this_day) 1151 day = None 1152 year = (year or old_year) + delta_year 1153 month = (month or old_month) + delta_month 1154 day = (day or old_day) + delta_day 1155 hour = (hour or old_hour) + delta_hour 1156 minute = (minute or old_minute) + delta_minute 1157 second = (second or old_second) + delta_second 1158 microsecond = microsecond or old_micro 1159 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 1160 while ( not (0 < month < 13) 1161 or not (0 < day <= days_in_month[month]) 1162 or not (0 <= hour < 24) 1163 or not (0 <= minute < 60) 1164 or not (0 <= second < 60) 1165 ): 1166 while month < 1: 1167 year -= 1 1168 month = 12 + month 1169 while month > 12: 1170 year += 1 1171 month = month - 12 1172 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 1173 while day < 1: 1174 month -= 1 1175 day = days_in_month[month] + day 1176 if not 0 < month < 13: 1177 break 1178 while day > days_in_month[month]: 1179 day = day - days_in_month[month] 1180 month += 1 1181 if not 0 < month < 13: 1182 break 1183 while hour < 1: 1184 day -= 1 1185 hour = 24 + hour 1186 while hour > 23: 1187 day += 1 1188 hour = hour - 24 1189 while minute < 0: 1190 hour -= 1 1191 minute = 60 + minute 1192 while minute > 59: 1193 hour += 1 1194 minute = minute - 60 1195 while second < 0: 1196 minute -= 1 1197 second = 60 + second 1198 while second > 59: 1199 minute += 1 1200 second = second - 60 1201 return DateTime(year, month, day, hour, minute, second, microsecond)
1202
1203 - def strftime(self, format):
1204 if self: 1205 return self._datetime.strftime(format) 1206 return ''
1207 1208 @classmethod
1209 - def strptime(cls, datetime_string, format=None):
1210 if format is not None: 1211 return cls(datetime.datetime.strptime(datetime_string, format)) 1212 try: 1213 return cls(datetime.datetime.strptime(datetime_string, "%Y-%m-%d %H:%M:%S.%f")) 1214 except ValueError: 1215 return cls(datetime.datetime.strptime(datetime_string, "%Y-%m-%d %H:%M:%S"))
1216
1217 - def time(self):
1218 if self: 1219 return Time(self.hour, self.minute, self.second, self.microsecond) 1220 return Time()
1221 1222 @classmethod
1223 - def utcnow(cls):
1224 return cls(datetime.datetime.utcnow())
1225 1226 @classmethod
1227 - def today(cls):
1228 return cls(datetime.datetime.today())
1229 1230 DateTime.max = DateTime(datetime.datetime.max) 1231 DateTime.min = DateTime(datetime.datetime.min) 1232 DateTime._null_datetime = object.__new__(DateTime) 1233 DateTime._null_datetime._datetime = None 1234 NullDateTime = DateTime()
1235 1236 1237 -class Time:
1238 """ 1239 adds null capable datetime.time constructs 1240 """ 1241 1242 __slots__ = ['_time'] 1243
1244 - def __new__(cls, hour=None, minute=0, second=0, microsecond=0):
1245 """ 1246 hour may be a datetime.time or a str(Time) 1247 """ 1248 if hour is None or hour is Null: 1249 return cls._null_time 1250 nt = object.__new__(cls) 1251 if isinstance(hour, basestring): 1252 hour = Time.strptime(hour) 1253 if isinstance(hour, Time): 1254 nt._time = hour._time 1255 elif isinstance(hour, (datetime.time)): 1256 microsecond = hour.microsecond // 1000 * 1000 1257 hour, minute, second = hour.hour, hour.minute, hour.second 1258 nt._time = datetime.time(hour, minute, second, microsecond) 1259 elif hour is not None: 1260 microsecond = microsecond // 1000 * 1000 1261 nt._time = datetime.time(hour, minute, second, microsecond) 1262 return nt
1263 1264
1265 - def __add__(self, other):
1266 if self and isinstance(other, (datetime.timedelta)): 1267 t = self._time 1268 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1269 t += other 1270 return Time(t.hour, t.minute, t.second, t.microsecond) 1271 else: 1272 return NotImplemented
1273
1274 - def __eq__(self, other):
1275 if isinstance(other, self.__class__): 1276 return self._time == other._time 1277 if isinstance(other, datetime.time): 1278 return self._time == other 1279 if isinstance(other, type(None)): 1280 return self._time is None 1281 return NotImplemented
1282
1283 - def __format__(self, spec):
1284 if self: 1285 return self._time.__format__(spec) 1286 return ''
1287
1288 - def __getattr__(self, name):
1289 if name == '_time': 1290 raise AttributeError('_time missing!') 1291 elif self: 1292 return getattr(self._time, name) 1293 else: 1294 raise AttributeError('NullTime object has no attribute %s' % name)
1295
1296 - def __ge__(self, other):
1297 if self: 1298 if isinstance(other, (datetime.time)): 1299 return self._time >= other 1300 elif isinstance(other, (Time)): 1301 if other: 1302 return self._time >= other._time 1303 return False 1304 else: 1305 if isinstance(other, (datetime.time)): 1306 return False 1307 elif isinstance(other, (Time)): 1308 if other: 1309 return False 1310 return True 1311 return NotImplemented
1312
1313 - def __gt__(self, other):
1314 if self: 1315 if isinstance(other, (datetime.time)): 1316 return self._time > other 1317 elif isinstance(other, (DateTime)): 1318 if other: 1319 return self._time > other._time 1320 return True 1321 else: 1322 if isinstance(other, (datetime.time)): 1323 return False 1324 elif isinstance(other, (Time)): 1325 if other: 1326 return False 1327 return False 1328 return NotImplemented
1329
1330 - def __hash__(self):
1331 return self._datetime.__hash__()
1332
1333 - def __le__(self, other):
1334 if self: 1335 if isinstance(other, (datetime.time)): 1336 return self._time <= other 1337 elif isinstance(other, (Time)): 1338 if other: 1339 return self._time <= other._time 1340 return False 1341 else: 1342 if isinstance(other, (datetime.time)): 1343 return True 1344 elif isinstance(other, (Time)): 1345 if other: 1346 return True 1347 return True 1348 return NotImplemented
1349
1350 - def __lt__(self, other):
1351 if self: 1352 if isinstance(other, (datetime.time)): 1353 return self._time < other 1354 elif isinstance(other, (Time)): 1355 if other: 1356 return self._time < other._time 1357 return False 1358 else: 1359 if isinstance(other, (datetime.time)): 1360 return True 1361 elif isinstance(other, (Time)): 1362 if other: 1363 return True 1364 return False 1365 return NotImplemented
1366
1367 - def __ne__(self, other):
1368 if self: 1369 if isinstance(other, (datetime.time)): 1370 return self._time != other 1371 elif isinstance(other, (Time)): 1372 if other: 1373 return self._time != other._time 1374 return True 1375 else: 1376 if isinstance(other, (datetime.time)): 1377 return True 1378 elif isinstance(other, (Time)): 1379 if other: 1380 return True 1381 return False 1382 return NotImplemented
1383
1384 - def __bool__(self):
1385 return self._time is not None
1386 1387 __radd__ = __add__ 1388
1389 - def __rsub__(self, other):
1390 if self and isinstance(other, (Time, datetime.time)): 1391 t = self._time 1392 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1393 other = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond) 1394 other -= t 1395 return other 1396 else: 1397 return NotImplemented
1398
1399 - def __repr__(self):
1400 if self: 1401 return "Time(%d, %d, %d, %d)" % (self.hour, self.minute, self.second, self.microsecond) 1402 else: 1403 return "Time()"
1404
1405 - def __str__(self):
1406 if self: 1407 return str(self._time) 1408 return ""
1409
1410 - def __sub__(self, other):
1411 if self and isinstance(other, (Time, datetime.time)): 1412 t = self._time 1413 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1414 o = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond) 1415 return t - o 1416 elif self and isinstance(other, (datetime.timedelta)): 1417 t = self._time 1418 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1419 t -= other 1420 return Time(t.hour, t.minute, t.second, t.microsecond) 1421 else: 1422 return NotImplemented
1423 1424 @classmethod
1425 - def fromfloat(cls, num):
1426 "2.5 == 2 hours, 30 minutes, 0 seconds, 0 microseconds" 1427 if num < 0: 1428 raise ValueError("positive value required (got %r)" % num) 1429 if num == 0: 1430 return Time(0) 1431 hours = int(num) 1432 if hours: 1433 num = num % hours 1434 minutes = int(num * 60) 1435 if minutes: 1436 num = num * 60 % minutes 1437 else: 1438 num = num * 60 1439 seconds = int(num * 60) 1440 if seconds: 1441 num = num * 60 % seconds 1442 else: 1443 num = num * 60 1444 microseconds = int(num * 1000) 1445 return Time(hours, minutes, seconds, microseconds)
1446 1447 @staticmethod
1448 - def now():
1449 return DateTime.now().time()
1450
1451 - def replace(self, hour=None, minute=None, second=None, microsecond=None, delta_hour=0, delta_minute=0, delta_second=0):
1452 if not self: 1453 return self.__class__._null_time 1454 old_hour, old_minute, old_second, old_micro = self.hour, self.minute, self.second, self.microsecond 1455 hour = (hour or old_hour) + delta_hour 1456 minute = (minute or old_minute) + delta_minute 1457 second = (second or old_second) + delta_second 1458 microsecond = microsecond or old_micro 1459 while not (0 <= hour < 24) or not (0 <= minute < 60) or not (0 <= second < 60): 1460 while second < 0: 1461 minute -= 1 1462 second = 60 + second 1463 while second > 59: 1464 minute += 1 1465 second = second - 60 1466 while minute < 0: 1467 hour -= 1 1468 minute = 60 + minute 1469 while minute > 59: 1470 hour += 1 1471 minute = minute - 60 1472 while hour < 1: 1473 hour = 24 + hour 1474 while hour > 23: 1475 hour = hour - 24 1476 return Time(hour, minute, second, microsecond)
1477
1478 - def strftime(self, format):
1479 if self: 1480 return self._time.strftime(format) 1481 return ''
1482 1483 @classmethod
1484 - def strptime(cls, time_string, format=None):
1485 if format is not None: 1486 return cls(datetime.time.strptime(time_string, format)) 1487 try: 1488 return cls(datetime.time.strptime(time_string, "%H:%M:%S.%f")) 1489 except ValueError: 1490 return cls(datetime.time.strptime(time_string, "%H:%M:%S"))
1491
1492 - def time(self):
1493 if self: 1494 return self._time 1495 return None
1496
1497 - def tofloat(self):
1498 "returns Time as a float" 1499 hour = self.hour 1500 minute = self.minute * (1.0 / 60) 1501 second = self.second * (1.0 / 3600) 1502 microsecond = self.microsecond * (1.0 / 3600000) 1503 return hour + minute + second + microsecond
1504 1505 Time.max = Time(datetime.time.max) 1506 Time.min = Time(datetime.time.min) 1507 Time._null_time = object.__new__(Time) 1508 Time._null_time._time = None 1509 NullTime = Time()
1510 1511 1512 -class Period:
1513 "for matching various time ranges" 1514
1515 - def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None):
1516 params = vars() 1517 self._mask = {} 1518 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'): 1519 value = params[attr] 1520 if value is not None: 1521 self._mask[attr] = value
1522
1523 - def __contains__(self, other):
1524 if not self._mask: 1525 return True 1526 for attr, value in self._mask.items(): 1527 other_value = getattr(other, attr, None) 1528 try: 1529 if other_value == value or other_value in value: 1530 continue 1531 except TypeError: 1532 pass 1533 return False 1534 return True
1535
1536 - def __repr__(self):
1537 items = [] 1538 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'): 1539 if attr in self._mask: 1540 items.append('%s=%s' % (attr, self._mask[attr])) 1541 return "Period(%s)" % ', '.join(items)
1542
1543 1544 -class Logical:
1545 """ 1546 Logical field return type. 1547 1548 Accepts values of True, False, or None/Null. 1549 boolean value of Unknown is False (use Quantum if you want an exception instead. 1550 """ 1551
1552 - def __new__(cls, value=None):
1553 if value is None or value is Null or value is Other or value is Unknown: 1554 return cls.unknown 1555 elif isinstance(value, basestring): 1556 if value.lower() in ('t', 'true', 'y', 'yes', 'on'): 1557 return cls.true 1558 elif value.lower() in ('f', 'false', 'n', 'no', 'off'): 1559 return cls.false 1560 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''): 1561 return cls.unknown 1562 else: 1563 raise ValueError('unknown value for Logical: %s' % value) 1564 else: 1565 return (cls.false, cls.true)[bool(value)]
1566
1567 - def __add__(x, y):
1568 if isinstance(y, type(None)) or y is Unknown or x is Unknown: 1569 return Unknown 1570 try: 1571 i = int(y) 1572 except Exception: 1573 return NotImplemented 1574 return int(x) + i
1575 1576 __radd__ = __iadd__ = __add__ 1577
1578 - def __sub__(x, y):
1579 if isinstance(y, type(None)) or y is Unknown or x is Unknown: 1580 return Unknown 1581 try: 1582 i = int(y) 1583 except Exception: 1584 return NotImplemented 1585 return int(x) - i
1586 1587 __isub__ = __sub__ 1588
1589 - def __rsub__(y, x):
1590 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1591 return Unknown 1592 try: 1593 i = int(x) 1594 except Exception: 1595 return NotImplemented 1596 return i - int(y)
1597
1598 - def __mul__(x, y):
1599 if x == 0 or y == 0: 1600 return 0 1601 elif isinstance(y, type(None)) or y is Unknown or x is Unknown: 1602 return Unknown 1603 try: 1604 i = int(y) 1605 except Exception: 1606 return NotImplemented 1607 return int(x) * i
1608 1609 __rmul__ = __imul__ = __mul__ 1610
1611 - def __div__(x, y):
1612 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1613 return Unknown 1614 try: 1615 i = int(y) 1616 except Exception: 1617 return NotImplemented 1618 return int(x).__div__(i)
1619 1620 __idiv__ = __div__ 1621
1622 - def __rdiv__(y, x):
1623 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown: 1624 return Unknown 1625 try: 1626 i = int(x) 1627 except Exception: 1628 return NotImplemented 1629 return i.__div__(int(y))
1630
1631 - def __truediv__(x, y):
1632 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1633 return Unknown 1634 try: 1635 i = int(y) 1636 except Exception: 1637 return NotImplemented 1638 return int(x).__truediv__(i)
1639 1640 __itruediv__ = __truediv__ 1641
1642 - def __rtruediv__(y, x):
1643 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1644 return Unknown 1645 try: 1646 i = int(x) 1647 except Exception: 1648 return NotImplemented 1649 return i.__truediv__(int(y))
1650
1651 - def __floordiv__(x, y):
1652 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1653 return Unknown 1654 try: 1655 i = int(y) 1656 except Exception: 1657 return NotImplemented 1658 return int(x).__floordiv__(i)
1659 1660 __ifloordiv__ = __floordiv__ 1661
1662 - def __rfloordiv__(y, x):
1663 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1664 return Unknown 1665 try: 1666 i = int(x) 1667 except Exception: 1668 return NotImplemented 1669 return i.__floordiv__(int(y))
1670
1671 - def __divmod__(x, y):
1672 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1673 return (Unknown, Unknown) 1674 try: 1675 i = int(y) 1676 except Exception: 1677 return NotImplemented 1678 return divmod(int(x), i)
1679
1680 - def __rdivmod__(y, x):
1681 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1682 return (Unknown, Unknown) 1683 try: 1684 i = int(x) 1685 except Exception: 1686 return NotImplemented 1687 return divmod(i, int(y))
1688
1689 - def __mod__(x, y):
1690 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1691 return Unknown 1692 try: 1693 i = int(y) 1694 except Exception: 1695 return NotImplemented 1696 return int(x) % i
1697 1698 __imod__ = __mod__ 1699
1700 - def __rmod__(y, x):
1701 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown: 1702 return Unknown 1703 try: 1704 i = int(x) 1705 except Exception: 1706 return NotImplemented 1707 return i % int(y)
1708
1709 - def __pow__(x, y):
1710 if not isinstance(y, (x.__class__, bool, type(None), baseinteger)): 1711 return NotImplemented 1712 if isinstance(y, type(None)) or y is Unknown: 1713 return Unknown 1714 i = int(y) 1715 if i == 0: 1716 return 1 1717 if x is Unknown: 1718 return Unknown 1719 return int(x) ** i
1720 1721 __ipow__ = __pow__ 1722
1723 - def __rpow__(y, x):
1724 if not isinstance(x, (y.__class__, bool, type(None), baseinteger)): 1725 return NotImplemented 1726 if y is Unknown: 1727 return Unknown 1728 i = int(y) 1729 if i == 0: 1730 return 1 1731 if x is Unknown or isinstance(x, type(None)): 1732 return Unknown 1733 return int(x) ** i
1734
1735 - def __lshift__(x, y):
1736 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1737 return Unknown 1738 return int(x.value) << int(y)
1739 1740 __ilshift__ = __lshift__ 1741
1742 - def __rlshift__(y, x):
1743 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1744 return Unknown 1745 return int(x) << int(y)
1746
1747 - def __rshift__(x, y):
1748 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1749 return Unknown 1750 return int(x.value) >> int(y)
1751 1752 __irshift__ = __rshift__ 1753
1754 - def __rrshift__(y, x):
1755 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1756 return Unknown 1757 return int(x) >> int(y)
1758
1759 - def __neg__(x):
1760 "NEG (negation)" 1761 if x in (Truth, Falsth): 1762 return -x.value 1763 return Unknown
1764
1765 - def __pos__(x):
1766 "POS (posation)" 1767 if x in (Truth, Falsth): 1768 return +x.value 1769 return Unknown
1770
1771 - def __abs__(x):
1772 if x in (Truth, Falsth): 1773 return abs(x.value) 1774 return Unknown
1775
1776 - def __invert__(x):
1777 if x in (Truth, Falsth): 1778 return ~x.value 1779 return Unknown
1780
1781 - def __complex__(x):
1782 if x.value is None: 1783 raise ValueError("unable to return complex() of %r" % x) 1784 return complex(x.value)
1785
1786 - def __int__(x):
1787 if x.value is None: 1788 raise ValueError("unable to return int() of %r" % x) 1789 return int(x.value)
1790
1791 - def __float__(x):
1792 if x.value is None: 1793 raise ValueError("unable to return float() of %r" % x) 1794 return float(x.value)
1795
1796 - def __and__(x, y):
1797 """ 1798 AND (conjunction) x & y: 1799 True iff both x, y are True 1800 False iff at least one of x, y is False 1801 Unknown otherwise 1802 """ 1803 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1804 if x == 0 or y == 0: 1805 return 0 1806 elif x is Unknown or y is Unknown: 1807 return Unknown 1808 return int(x) & int(y) 1809 elif x in (False, Falsth) or y in (False, Falsth): 1810 return Falsth 1811 elif x in (True, Truth) and y in (True, Truth): 1812 return Truth 1813 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1814 return Unknown 1815 return NotImplemented
1816 1817 __rand__ = __and__ 1818
1819 - def __or__(x, y):
1820 "OR (disjunction): x | y => True iff at least one of x, y is True" 1821 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1822 if x is Unknown or y is Unknown: 1823 return Unknown 1824 return int(x) | int(y) 1825 elif x in (True, Truth) or y in (True, Truth): 1826 return Truth 1827 elif x in (False, Falsth) and y in (False, Falsth): 1828 return Falsth 1829 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1830 return Unknown 1831 return NotImplemented
1832 1833 __ror__ = __or__ 1834
1835 - def __xor__(x, y):
1836 "XOR (parity) x ^ y: True iff only one of x,y is True" 1837 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1838 if x is Unknown or y is Unknown: 1839 return Unknown 1840 return int(x) ^ int(y) 1841 elif x in (True, Truth, False, Falsth) and y in (True, Truth, False, Falsth): 1842 return { 1843 (True, True) : Falsth, 1844 (True, False) : Truth, 1845 (False, True) : Truth, 1846 (False, False): Falsth, 1847 }[(x, y)] 1848 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1849 return Unknown 1850 return NotImplemented
1851 1852 __rxor__ = __xor__ 1853
1854 - def __bool__(x):
1855 "boolean value of Unknown is assumed False" 1856 return x.value is True
1857
1858 - def __eq__(x, y):
1859 if isinstance(y, x.__class__): 1860 return x.value == y.value 1861 elif isinstance(y, (bool, NoneType, baseinteger)): 1862 return x.value == y 1863 return NotImplemented
1864
1865 - def __ge__(x, y):
1866 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1867 return x.value == None 1868 elif isinstance(y, x.__class__): 1869 return x.value >= y.value 1870 elif isinstance(y, (bool, baseinteger)): 1871 return x.value >= y 1872 return NotImplemented
1873
1874 - def __gt__(x, y):
1875 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1876 return False 1877 elif isinstance(y, x.__class__): 1878 return x.value > y.value 1879 elif isinstance(y, (bool, baseinteger)): 1880 return x.value > y 1881 return NotImplemented
1882
1883 - def __le__(x, y):
1884 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1885 return x.value == None 1886 elif isinstance(y, x.__class__): 1887 return x.value <= y.value 1888 elif isinstance(y, (bool, baseinteger)): 1889 return x.value <= y 1890 return NotImplemented
1891
1892 - def __lt__(x, y):
1893 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1894 return False 1895 elif isinstance(y, x.__class__): 1896 return x.value < y.value 1897 elif isinstance(y, (bool, baseinteger)): 1898 return x.value < y 1899 return NotImplemented
1900
1901 - def __ne__(x, y):
1902 if isinstance(y, x.__class__): 1903 return x.value != y.value 1904 elif isinstance(y, (bool, type(None), baseinteger)): 1905 return x.value != y 1906 return NotImplemented
1907
1908 - def __hash__(x):
1909 return hash(x.value)
1910
1911 - def __index__(x):
1912 if x.value is None: 1913 raise ValueError("unable to return int() of %r" % x) 1914 return int(x.value)
1915
1916 - def __repr__(x):
1917 return "Logical(%r)" % x.string
1918
1919 - def __str__(x):
1920 return x.string
1921 1922 Logical.true = object.__new__(Logical) 1923 Logical.true.value = True 1924 Logical.true.string = 'T' 1925 Logical.false = object.__new__(Logical) 1926 Logical.false.value = False 1927 Logical.false.string = 'F' 1928 Logical.unknown = object.__new__(Logical) 1929 Logical.unknown.value = None 1930 Logical.unknown.string = '?' 1931 Truth = Logical(True) 1932 Falsth = Logical(False) 1933 Unknown = Logical()
1934 1935 1936 -class Quantum(object):
1937 """ 1938 Logical field return type that implements boolean algebra 1939 1940 Accepts values of True/On, False/Off, or None/Null/Unknown/Other 1941 """ 1942
1943 - def __new__(cls, value=None):
1944 if value is None or value is Null or value is Other or value is Unknown: 1945 return cls.unknown 1946 elif isinstance(value, basestring): 1947 if value.lower() in ('t', 'true', 'y', 'yes', 'on'): 1948 return cls.true 1949 elif value.lower() in ('f', 'false', 'n', 'no', 'off'): 1950 return cls.false 1951 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''): 1952 return cls.unknown 1953 else: 1954 raise ValueError('unknown value for Quantum: %s' % value) 1955 else: 1956 return (cls.false, cls.true)[bool(value)]
1957
1958 - def A(x, y):
1959 "OR (disjunction): x | y => True iff at least one of x, y is True" 1960 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1961 return NotImplemented 1962 if x.value is True or y is not Other and y == True: 1963 return x.true 1964 elif x.value is False and y is not Other and y == False: 1965 return x.false 1966 return Other
1967
1968 - def _C_material(x, y):
1969 "IMP (material implication) x >> y => False iff x == True and y == False" 1970 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1971 return NotImplemented 1972 if (x.value is False 1973 or (x.value is True and y is not Other and y == True)): 1974 return x.true 1975 elif x.value is True and y is not Other and y == False: 1976 return False 1977 return Other
1978
1979 - def _C_material_reversed(y, x):
1980 "IMP (material implication) x >> y => False iff x = True and y = False" 1981 if not isinstance(x, (y.__class__, bool, NullType, type(None))): 1982 return NotImplemented 1983 if (x is not Other and x == False 1984 or (x is not Other and x == True and y.value is True)): 1985 return y.true 1986 elif x is not Other and x == True and y.value is False: 1987 return y.false 1988 return Other
1989
1990 - def _C_relevant(x, y):
1991 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if x is False" 1992 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1993 return NotImplemented 1994 if x.value is True and y is not Other and y == True: 1995 return x.true 1996 if x.value is True and y is not Other and y == False: 1997 return x.false 1998 return Other
1999
2000 - def _C_relevant_reversed(y, x):
2001 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if y is False" 2002 if not isinstance(x, (y.__class__, bool, NullType, type(None))): 2003 return NotImplemented 2004 if x is not Other and x == True and y.value is True: 2005 return y.true 2006 if x is not Other and x == True and y.value is False: 2007 return y.false 2008 return Other
2009
2010 - def D(x, y):
2011 "NAND (negative AND) x.D(y): False iff x and y are both True" 2012 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2013 return NotImplemented 2014 if x.value is False or y is not Other and y == False: 2015 return x.true 2016 elif x.value is True and y is not Other and y == True: 2017 return x.false 2018 return Other
2019
2020 - def E(x, y):
2021 "EQV (equivalence) x.E(y): True iff x and y are the same" 2022 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2023 return NotImplemented 2024 elif ( 2025 (x.value is True and y is not Other and y == True) 2026 or 2027 (x.value is False and y is not Other and y == False) 2028 ): 2029 return x.true 2030 elif ( 2031 (x.value is True and y is not Other and y == False) 2032 or 2033 (x.value is False and y is not Other and y == True) 2034 ): 2035 return x.false 2036 return Other
2037
2038 - def J(x, y):
2039 "XOR (parity) x ^ y: True iff only one of x,y is True" 2040 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2041 return NotImplemented 2042 if ( 2043 (x.value is True and y is not Other and y == False) 2044 or 2045 (x.value is False and y is not Other and y == True) 2046 ): 2047 return x.true 2048 if ( 2049 (x.value is False and y is not Other and y == False) 2050 or 2051 (x.value is True and y is not Other and y == True) 2052 ): 2053 return x.false 2054 return Other
2055
2056 - def K(x, y):
2057 "AND (conjunction) x & y: True iff both x, y are True" 2058 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2059 return NotImplemented 2060 if x.value is True and y is not Other and y == True: 2061 return x.true 2062 elif x.value is False or y is not Other and y == False: 2063 return x.false 2064 return Other
2065
2066 - def N(x):
2067 "NEG (negation) -x: True iff x = False" 2068 if x is x.true: 2069 return x.false 2070 elif x is x.false: 2071 return x.true 2072 return Other
2073 2074 @classmethod
2075 - def set_implication(cls, method):
2076 "sets IMP to material or relevant" 2077 if not isinstance(method, basestring) or method.lower() not in ('material', 'relevant'): 2078 raise ValueError("method should be 'material' (for strict boolean) or 'relevant', not %r'" % method) 2079 if method.lower() == 'material': 2080 cls.C = cls._C_material 2081 cls.__rshift__ = cls._C_material 2082 cls.__rrshift__ = cls._C_material_reversed 2083 elif method.lower() == 'relevant': 2084 cls.C = cls._C_relevant 2085 cls.__rshift__ = cls._C_relevant 2086 cls.__rrshift__ = cls._C_relevant_reversed
2087
2088 - def __eq__(x, y):
2089 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2090 return NotImplemented 2091 if ( 2092 (x.value is True and y is not Other and y == True) 2093 or 2094 (x.value is False and y is not Other and y == False) 2095 ): 2096 return x.true 2097 elif ( 2098 (x.value is True and y is not Other and y == False) 2099 or 2100 (x.value is False and y is not Other and y == True) 2101 ): 2102 return x.false 2103 return Other
2104
2105 - def __hash__(x):
2106 return hash(x.value)
2107
2108 - def __ne__(x, y):
2109 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2110 return NotImplemented 2111 if ( 2112 (x.value is True and y is not Other and y == False) 2113 or 2114 (x.value is False and y is not Other and y == True) 2115 ): 2116 return x.true 2117 elif ( 2118 (x.value is True and y is not Other and y == True) 2119 or 2120 (x.value is False and y is not Other and y == False) 2121 ): 2122 return x.false 2123 return Other
2124
2125 - def __bool__(x):
2126 if x is Other: 2127 raise TypeError('True/False value of %r is unknown' % x) 2128 return x.value is True
2129
2130 - def __repr__(x):
2131 return "Quantum(%r)" % x.string
2132
2133 - def __str__(x):
2134 return x.string
2135 2136 __add__ = A 2137 __and__ = K 2138 __mul__ = K 2139 __neg__ = N 2140 __or__ = A 2141 __radd__ = A 2142 __rand__ = K 2143 __rshift__ = None 2144 __rmul__ = K 2145 __ror__ = A 2146 __rrshift__ = None 2147 __rxor__ = J 2148 __xor__ = J
2149 2150 Quantum.true = object.__new__(Quantum) 2151 Quantum.true.value = True 2152 Quantum.true.string = 'Y' 2153 Quantum.false = object.__new__(Quantum) 2154 Quantum.false.value = False 2155 Quantum.false.string = 'N' 2156 Quantum.unknown = object.__new__(Quantum) 2157 Quantum.unknown.value = None 2158 Quantum.unknown.string = '?' 2159 Quantum.set_implication('material') 2160 On = Quantum(True) 2161 Off = Quantum(False) 2162 Other = Quantum() 2163 2164 2165 # add xmlrpc support 2166 from xmlrpc.client import Marshaller 2167 Marshaller.dispatch[Char] = Marshaller.dump_unicode 2168 Marshaller.dispatch[Logical] = Marshaller.dump_bool 2169 Marshaller.dispatch[DateTime] = Marshaller.dump_datetime 2170 del Marshaller
2171 2172 # Internal classes 2173 2174 -class _Navigation(object):
2175 """ 2176 Navigation base class that provides VPFish movement methods 2177 """ 2178 2179 _index = -1 2180
2181 - def _nav_check(self):
2182 """ 2183 implemented by subclass; must return True if underlying structure meets need 2184 """ 2185 raise NotImplementedError()
2186
2187 - def _get_index(self, direction, n=1, start=None):
2188 """ 2189 returns index of next available record towards direction 2190 """ 2191 if start is not None: 2192 index = start 2193 else: 2194 index = self._index 2195 if direction == 'reverse': 2196 move = -1 * n 2197 limit = 0 2198 index += move 2199 if index < limit: 2200 return -1 2201 else: 2202 return index 2203 elif direction == 'forward': 2204 move = +1 * n 2205 limit = len(self) - 1 2206 index += move 2207 if index > limit: 2208 return len(self) 2209 else: 2210 return index 2211 else: 2212 raise ValueError("direction should be 'forward' or 'reverse', not %r" % direction)
2213 2214 @property
2215 - def bof(self):
2216 """ 2217 returns True if no more usable records towards the beginning of the table 2218 """ 2219 self._nav_check() 2220 index = self._get_index('reverse') 2221 return index == -1
2222
2223 - def bottom(self):
2224 """ 2225 sets record index to bottom of table (end of table) 2226 """ 2227 self._nav_check() 2228 self._index = len(self) 2229 return self._index
2230 2231 @property
2232 - def current_record(self):
2233 """ 2234 returns current record (deleted or not) 2235 """ 2236 self._nav_check() 2237 index = self._index 2238 if index < 0: 2239 return RecordVaporWare('bof', self) 2240 elif index >= len(self): 2241 return RecordVaporWare('eof', self) 2242 return self[index]
2243 2244 @property
2245 - def current(self):
2246 """ 2247 returns current index 2248 """ 2249 self._nav_check() 2250 return self._index
2251 2252 @property
2253 - def eof(self):
2254 """ 2255 returns True if no more usable records towards the end of the table 2256 """ 2257 self._nav_check() 2258 index = self._get_index('forward') 2259 return index == len(self)
2260 2261 @property
2262 - def first_record(self):
2263 """ 2264 returns first available record (does not move index) 2265 """ 2266 self._nav_check() 2267 index = self._get_index('forward', start=-1) 2268 if -1 < index < len(self): 2269 return self[index] 2270 else: 2271 return RecordVaporWare('bof', self)
2272
2273 - def goto(self, where):
2274 """ 2275 changes the record pointer to the first matching (deleted) record 2276 where should be either an integer, or 'top' or 'bottom'. 2277 top -> before first record 2278 bottom -> after last record 2279 """ 2280 self._nav_check() 2281 max = len(self) 2282 if isinstance(where, baseinteger): 2283 if not -max <= where < max: 2284 raise IndexError("Record %d does not exist" % where) 2285 if where < 0: 2286 where += max 2287 self._index = where 2288 return self._index 2289 move = getattr(self, where, None) 2290 if move is None: 2291 raise DbfError("unable to go to %r" % where) 2292 return move()
2293 2294 @property
2295 - def last_record(self):
2296 """ 2297 returns last available record (does not move index) 2298 """ 2299 self._nav_check() 2300 index = self._get_index('reverse', start=len(self)) 2301 if -1 < index < len(self): 2302 return self[index] 2303 else: 2304 return RecordVaporWare('bof', self)
2305 2306 @property
2307 - def next_record(self):
2308 """ 2309 returns next available record (does not move index) 2310 """ 2311 self._nav_check() 2312 index = self._get_index('forward') 2313 if -1 < index < len(self): 2314 return self[index] 2315 else: 2316 return RecordVaporWare('eof', self)
2317 2318 @property
2319 - def prev_record(self):
2320 """ 2321 returns previous available record (does not move index) 2322 """ 2323 self._nav_check() 2324 index = self._get_index('reverse') 2325 if -1 < index < len(self): 2326 return self[index] 2327 else: 2328 return RecordVaporWare('bof', self)
2329
2330 - def skip(self, n=1):
2331 """ 2332 move index to the next nth available record 2333 """ 2334 self._nav_check() 2335 if n < 0: 2336 n *= -1 2337 direction = 'reverse' 2338 else: 2339 direction = 'forward' 2340 self._index = index = self._get_index(direction, n) 2341 if index < 0: 2342 raise Bof() 2343 elif index >= len(self): 2344 raise Eof() 2345 else: 2346 return index
2347
2348 - def top(self):
2349 """ 2350 sets record index to top of table (beginning of table) 2351 """ 2352 self._nav_check() 2353 self._index = -1 2354 return self._index
2355
2356 2357 -class Record(object):
2358 """ 2359 Provides routines to extract and save data within the fields of a 2360 dbf record. 2361 """ 2362 2363 __slots__ = ('_recnum', '_meta', '_data', '_old_data', '_dirty', 2364 '_memos', '_write_to_disk', '__weakref__') 2365
2366 - def __new__(cls, recnum, layout, kamikaze=b'', _fromdisk=False):
2367 """ 2368 record = ascii array of entire record; 2369 layout=record specification; 2370 memo = memo object for table 2371 """ 2372 record = object.__new__(cls) 2373 record._dirty = False 2374 record._recnum = recnum 2375 record._meta = layout 2376 record._memos = {} 2377 record._write_to_disk = True 2378 record._old_data = None 2379 header = layout.header 2380 record._data = layout.blankrecord[:] 2381 if kamikaze and len(record._data) != len(kamikaze): 2382 raise BadDataError("record data is not the correct length (should be %r, not %r)" % 2383 (len(record._data), len(kamikaze)), data=kamikaze[:]) 2384 if recnum == -1: # not a disk-backed record 2385 return record 2386 elif type(kamikaze) == array: 2387 record._data = kamikaze[:] 2388 elif type(kamikaze) == bytes: 2389 if kamikaze: 2390 record._data = array('B', kamikaze) 2391 else: 2392 raise BadDataError("%r recieved for record data" % kamikaze) 2393 if record._data[0] == NULL: 2394 record._data[0] = SPACE 2395 if record._data[0] not in (SPACE, ASTERISK): 2396 raise DbfError("record data not correct -- first character should be a ' ' or a '*'.") 2397 if not _fromdisk and layout.location == ON_DISK: 2398 record._update_disk() 2399 return record
2400
2401 - def __contains__(self, value):
2402 for field in self._meta.user_fields: 2403 if self[field] == value: 2404 return True 2405 return False
2406
2407 - def __enter__(self):
2408 if not self._write_to_disk: 2409 raise DbfError("`with record` is not reentrant") 2410 self._start_flux() 2411 return self
2412
2413 - def __eq__(self, other):
2414 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2415 return NotImplemented 2416 if isinstance(other, (Record, RecordTemplate)): 2417 if field_names(self) != field_names(other): 2418 return False 2419 for field in self._meta.user_fields: 2420 s_value, o_value = self[field], other[field] 2421 if s_value is not o_value and s_value != o_value: 2422 return False 2423 elif isinstance(other, dict): 2424 if sorted(field_names(self)) != sorted(other.keys()): 2425 return False 2426 for field in self._meta.user_fields: 2427 s_value, o_value = self[field], other[field] 2428 if s_value is not o_value and s_value != o_value: 2429 return False 2430 else: # tuple 2431 if len(self) != len(other): 2432 return False 2433 for s_value, o_value in zip(self, other): 2434 if s_value is not o_value and s_value != o_value: 2435 return False 2436 return True
2437
2438 - def __exit__(self, *args):
2439 if args == (None, None, None): 2440 self._commit_flux() 2441 else: 2442 self._rollback_flux()
2443
2444 - def __iter__(self):
2445 return (self[field] for field in self._meta.user_fields)
2446
2447 - def __getattr__(self, name):
2448 if name[0:2] == '__' and name[-2:] == '__': 2449 raise AttributeError('Method %s is not implemented.' % name) 2450 if not name in self._meta.fields: 2451 raise FieldMissingError(name) 2452 if name in self._memos: 2453 return self._memos[name] 2454 try: 2455 index = self._meta.fields.index(name) 2456 value = self._retrieve_field_value(index, name) 2457 return value 2458 except DbfError: 2459 error = sys.exc_info()[1] 2460 error.message = "field --%s-- is %s -> %s" % (name, self._meta.fieldtypes[fielddef['type']]['Type'], error.message) 2461 raise
2462
2463 - def __getitem__(self, item):
2464 if isinstance(item, baseinteger): 2465 fields = self._meta.user_fields 2466 field_count = len(fields) 2467 if not -field_count <= item < field_count: 2468 raise NotFoundError("Field offset %d is not in record" % item) 2469 field = fields[item] 2470 if field in self._memos: 2471 return self._memos[field] 2472 return self[field] 2473 elif isinstance(item, slice): 2474 sequence = [] 2475 if isinstance(item.start, basestring) or isinstance(item.stop, basestring): 2476 field_names = dbf.field_names(self) 2477 start, stop, step = item.start, item.stop, item.step 2478 if start not in field_names or stop not in field_names: 2479 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2480 if step is not None and not isinstance(step, baseinteger): 2481 raise DbfError("step value must be an int, not %r" % type(step)) 2482 start = field_names.index(start) 2483 stop = field_names.index(stop) + 1 2484 item = slice(start, stop, step) 2485 for index in self._meta.fields[item]: 2486 sequence.append(self[index]) 2487 return sequence 2488 elif isinstance(item, basestring): 2489 return self.__getattr__(item) 2490 else: 2491 raise TypeError("%r is not a field name" % item)
2492
2493 - def __len__(self):
2494 return self._meta.user_field_count
2495
2496 - def __ne__(self, other):
2497 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2498 return NotImplemented 2499 return not self == other
2500
2501 - def __setattr__(self, name, value):
2502 if name in self.__slots__: 2503 object.__setattr__(self, name, value) 2504 return 2505 if self._meta.status != READ_WRITE: 2506 raise DbfError("%s not in read/write mode" % self._meta.filename) 2507 elif self._write_to_disk: 2508 raise DbfError("unable to modify fields individually except in `with` or `Process()`") 2509 elif not name in self._meta.fields: 2510 raise FieldMissingError(name) 2511 if name in self._meta.memofields: 2512 self._memos[name] = value 2513 self._dirty = True 2514 return 2515 index = self._meta.fields.index(name) 2516 try: 2517 self._update_field_value(index, name, value) 2518 except DbfError: 2519 error = sys.exc_info()[1] 2520 fielddef = self._meta[name] 2521 message = "%s (%s) = %r --> %s" % (name, self._meta.fieldtypes[fielddef[TYPE]]['Type'], value, error.args) 2522 data = name 2523 err_cls = error.__class__ 2524 raise err_cls(message, data)
2525
2526 - def __setitem__(self, name, value):
2527 if self._meta.status != READ_WRITE: 2528 raise DbfError("%s not in read/write mode" % self._meta.filename) 2529 if self._write_to_disk: 2530 raise DbfError("unable to modify fields individually except in `with` or `Process()`") 2531 if isinstance(name, basestring): 2532 self.__setattr__(name, value) 2533 elif isinstance(name, baseinteger): 2534 self.__setattr__(self._meta.fields[name], value) 2535 elif isinstance(name, slice): 2536 sequence = [] 2537 field_names = dbf.field_names(self) 2538 if isinstance(name.start, basestring) or isinstance(name.stop, basestring): 2539 start, stop, step = name.start, name.stop, name.step 2540 if start not in field_names or stop not in field_names: 2541 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2542 if step is not None and not isinstance(step, baseinteger): 2543 raise DbfError("step value must be an int, not %r" % type(step)) 2544 start = field_names.index(start) 2545 stop = field_names.index(stop) + 1 2546 name = slice(start, stop, step) 2547 for field in self._meta.fields[name]: 2548 sequence.append(field) 2549 if len(sequence) != len(value): 2550 raise DbfError("length of slices not equal") 2551 for field, val in zip(sequence, value): 2552 self[field] = val 2553 else: 2554 raise TypeError("%s is not a field name" % name)
2555
2556 - def __str__(self):
2557 result = [] 2558 for seq, field in enumerate(field_names(self)): 2559 result.append("%3d - %-10s: %r" % (seq, field, self[field])) 2560 return '\n'.join(result)
2561
2562 - def __repr__(self):
2563 return self._data.tobytes().decode('latin1')
2564
2565 - def _commit_flux(self):
2566 """ 2567 stores field updates to disk; if any errors restores previous contents and propogates exception 2568 """ 2569 if self._write_to_disk: 2570 raise DbfError("record not in flux") 2571 try: 2572 self._write() 2573 except Exception: 2574 exc = sys.exc_info()[1] 2575 self._data[:] = self._old_data 2576 self._update_disk(data=self._old_data) 2577 raise DbfError("unable to write updates to disk, original data restored: %r" % (exc,)) 2578 self._memos.clear() 2579 self._old_data = None 2580 self._write_to_disk = True 2581 self._reindex_record()
2582 2583 @classmethod
2584 - def _create_blank_data(cls, layout):
2585 """ 2586 creates a blank record data chunk 2587 """ 2588 record = object.__new__(cls) 2589 record._dirty = False 2590 record._recnum = -1 2591 record._meta = layout 2592 record._data = array('B', b' ' * layout.header.record_length) 2593 layout.memofields = [] 2594 signature = [layout.table().codepage.name] 2595 for index, name in enumerate(layout.fields): 2596 if name == '_nullflags': 2597 record._data[layout['_nullflags'][START]:layout['_nullflags'][END]] = array('B', [0] * layout['_nullflags'][LENGTH]) 2598 for index, name in enumerate(layout.fields): 2599 signature.append(name) 2600 if name != '_nullflags': 2601 type = FieldType(layout[name][TYPE]) 2602 start = layout[name][START] 2603 size = layout[name][LENGTH] 2604 end = layout[name][END] 2605 blank = layout.fieldtypes[type]['Blank'] 2606 record._data[start:end] = array('B', blank(size)) 2607 if layout[name][TYPE] in layout.memo_types: 2608 layout.memofields.append(name) 2609 decimals = layout[name][DECIMALS] 2610 signature[-1] = '_'.join([str(x) for x in (signature[-1], type.symbol, size, decimals)]) 2611 layout.blankrecord = record._data[:] 2612 data_types = [] 2613 for fieldtype, defs in sorted(layout.fieldtypes.items()): 2614 if fieldtype != _NULLFLAG: # ignore the nullflags field 2615 data_types.append("%s_%s_%s" % (fieldtype.symbol, defs['Empty'], defs['Class'])) 2616 layout.record_sig = ('___'.join(signature), '___'.join(data_types))
2617
2618 - def _reindex_record(self):
2619 """ 2620 rerun all indices with this record 2621 """ 2622 if self._meta.status == CLOSED: 2623 raise DbfError("%s is closed; cannot alter indices" % self._meta.filename) 2624 elif not self._write_to_disk: 2625 raise DbfError("unable to reindex record until it is written to disk") 2626 for dbfindex in self._meta.table()._indexen: 2627 dbfindex(self)
2628
2629 - def _retrieve_field_value(self, index, name):
2630 """ 2631 calls appropriate routine to convert value stored in field from array 2632 """ 2633 fielddef = self._meta[name] 2634 flags = fielddef[FLAGS] 2635 nullable = flags & NULLABLE and '_nullflags' in self._meta 2636 binary = flags & BINARY 2637 if nullable: 2638 byte, bit = divmod(index, 8) 2639 null_def = self._meta['_nullflags'] 2640 null_data = self._data[null_def[START]:null_def[END]] 2641 try: 2642 if null_data[byte] >> bit & 1: 2643 return Null 2644 except IndexError: 2645 print(null_data) 2646 print(index) 2647 print(byte, bit) 2648 print(len(self._data), self._data) 2649 print(null_def) 2650 print(null_data) 2651 raise 2652 2653 record_data = self._data[fielddef[START]:fielddef[END]] 2654 field_type = fielddef[TYPE] 2655 retrieve = self._meta.fieldtypes[field_type]['Retrieve'] 2656 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder) 2657 return datum
2658
2659 - def _rollback_flux(self):
2660 """ 2661 discards all changes since ._start_flux() 2662 """ 2663 if self._write_to_disk: 2664 raise DbfError("record not in flux") 2665 self._data = self._old_data 2666 self._old_data = None 2667 self._memos.clear() 2668 self._write_to_disk = True 2669 self._write()
2670
2671 - def _start_flux(self):
2672 """ 2673 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes 2674 """ 2675 if self._meta.status == CLOSED: 2676 raise DbfError("%s is closed; cannot modify record" % self._meta.filename) 2677 elif self._recnum < 0: 2678 raise DbfError("record has been packed; unable to update") 2679 elif not self._write_to_disk: 2680 raise DbfError("record already in a state of flux") 2681 self._old_data = self._data[:] 2682 self._write_to_disk = False
2683
2684 - def _update_field_value(self, index, name, value):
2685 """ 2686 calls appropriate routine to convert value to bytes, and save it in record 2687 """ 2688 fielddef = self._meta[name] 2689 field_type = fielddef[TYPE] 2690 flags = fielddef[FLAGS] 2691 binary = flags & BINARY 2692 nullable = flags & NULLABLE and '_nullflags' in self._meta 2693 update = self._meta.fieldtypes[field_type]['Update'] 2694 if nullable: 2695 byte, bit = divmod(index, 8) 2696 null_def = self._meta['_nullflags'] 2697 null_data = self._data[null_def[START]:null_def[END]] 2698 if value is Null: 2699 null_data[byte] |= 1 << bit 2700 value = None 2701 else: 2702 null_data[byte] &= 0xff ^ 1 << bit 2703 # null_data = array('B', [chr(n) for n in null_data]) 2704 self._data[null_def[START]:null_def[END]] = null_data 2705 if value is not Null: 2706 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder)) 2707 size = fielddef[LENGTH] 2708 if len(bytes) > size: 2709 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size)) 2710 blank = array('B', b' ' * size) 2711 start = fielddef[START] 2712 end = start + size 2713 blank[:len(bytes)] = bytes[:] 2714 self._data[start:end] = blank[:] 2715 self._dirty = True
2716
2717 - def _update_disk(self, location='', data=None):
2718 layout = self._meta 2719 if self._recnum < 0: 2720 raise DbfError("cannot update a packed record") 2721 if layout.location == ON_DISK: 2722 header = layout.header 2723 if location == '': 2724 location = self._recnum * header.record_length + header.start 2725 if data is None: 2726 data = self._data 2727 layout.dfd.seek(location) 2728 layout.dfd.write(data) 2729 self._dirty = False 2730 table = layout.table() 2731 if table is not None: # is None when table is being destroyed 2732 for index in table._indexen: 2733 index(self)
2734
2735 - def _write(self):
2736 for field, value in self._memos.items(): 2737 index = self._meta.fields.index(field) 2738 self._update_field_value(index, field, value) 2739 self._update_disk()
2740
2741 2742 -class RecordTemplate(object):
2743 """ 2744 Provides routines to mimic a dbf record. 2745 """ 2746 2747 __slots__ = ('_meta', '_data', '_old_data', '_memos', '_write_to_disk', '__weakref__') 2748
2749 - def _commit_flux(self):
2750 """ 2751 Flushes field updates to disk 2752 If any errors restores previous contents and raises `DbfError` 2753 """ 2754 if self._write_to_disk: 2755 raise DbfError("record not in flux") 2756 self._memos.clear() 2757 self._old_data = None 2758 self._write_to_disk = True
2759
2760 - def _retrieve_field_value(self, index, name):
2761 """ 2762 Calls appropriate routine to convert value stored in field from 2763 array 2764 """ 2765 fielddef = self._meta[name] 2766 flags = fielddef[FLAGS] 2767 nullable = flags & NULLABLE and '_nullflags' in self._meta 2768 binary = flags & BINARY 2769 if nullable: 2770 byte, bit = divmod(index, 8) 2771 null_def = self._meta['_nullflags'] 2772 null_data = self._data[null_def[START]:null_def[END]] 2773 if null_data[byte] >> bit & 1: 2774 return Null 2775 record_data = self._data[fielddef[START]:fielddef[END]] 2776 field_type = fielddef[TYPE] 2777 retrieve = self._meta.fieldtypes[field_type]['Retrieve'] 2778 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder) 2779 return datum
2780
2781 - def _rollback_flux(self):
2782 """ 2783 discards all changes since ._start_flux() 2784 """ 2785 if self._write_to_disk: 2786 raise DbfError("template not in flux") 2787 self._data = self._old_data 2788 self._old_data = None 2789 self._memos.clear() 2790 self._write_to_disk = True
2791
2792 - def _start_flux(self):
2793 """ 2794 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes 2795 """ 2796 if not self._write_to_disk: 2797 raise DbfError("template already in a state of flux") 2798 self._old_data = self._data[:] 2799 self._write_to_disk = False
2800
2801 - def _update_field_value(self, index, name, value):
2802 """ 2803 calls appropriate routine to convert value to ascii bytes, and save it in record 2804 """ 2805 fielddef = self._meta[name] 2806 field_type = fielddef[TYPE] 2807 flags = fielddef[FLAGS] 2808 binary = flags & BINARY 2809 nullable = flags & NULLABLE and '_nullflags' in self._meta 2810 update = self._meta.fieldtypes[field_type]['Update'] 2811 if nullable: 2812 byte, bit = divmod(index, 8) 2813 null_def = self._meta['_nullflags'] 2814 null_data = self._data[null_def[START]:null_def[END]] 2815 if value is Null: 2816 null_data[byte] |= 1 << bit 2817 value = None 2818 else: 2819 null_data[byte] &= 0xff ^ 1 << bit 2820 # null_data = array('B', [chr(n) for n in null_data]) 2821 self._data[null_def[START]:null_def[END]] = null_data 2822 if value is not Null: 2823 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder)) 2824 size = fielddef[LENGTH] 2825 if len(bytes) > size: 2826 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size)) 2827 blank = array('B', b' ' * size) 2828 start = fielddef[START] 2829 end = start + size 2830 blank[:len(bytes)] = bytes[:] 2831 self._data[start:end] = blank[:]
2832
2833 - def __new__(cls, layout, original_record=None, defaults=None):
2834 """ 2835 record = ascii array of entire record; layout=record specification 2836 """ 2837 sig = layout.record_sig 2838 if sig not in _Template_Records: 2839 table = layout.table() 2840 _Template_Records[sig] = table.new( 2841 ':%s:' % layout.filename, 2842 default_data_types=table._meta._default_data_types, 2843 field_data_types=table._meta._field_data_types, on_disk=False 2844 )._meta 2845 layout = _Template_Records[sig] 2846 record = object.__new__(cls) 2847 record._write_to_disk = True 2848 record._meta = layout 2849 record._memos = {} 2850 for name in layout.memofields: 2851 field_type = layout[name][TYPE] 2852 record._memos[name] = layout.fieldtypes[field_type]['Empty']() 2853 header = layout.header 2854 if original_record is None: 2855 record._data = layout.blankrecord[:] 2856 else: 2857 record._data = original_record._data[:] 2858 for name in layout.memofields: 2859 record._memos[name] = original_record[name] 2860 for field in field_names(defaults or {}): 2861 record[field] = defaults[field] 2862 record._old_data = record._data[:] 2863 return record
2864
2865 - def __contains__(self, key):
2866 return key in self._meta.user_fields
2867
2868 - def __eq__(self, other):
2869 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2870 return NotImplemented 2871 if isinstance(other, (Record, RecordTemplate)): 2872 if field_names(self) != field_names(other): 2873 return False 2874 for field in self._meta.user_fields: 2875 s_value, o_value = self[field], other[field] 2876 if s_value is not o_value and s_value != o_value: 2877 return False 2878 elif isinstance(other, dict): 2879 if sorted(field_names(self)) != sorted(other.keys()): 2880 return False 2881 for field in self._meta.user_fields: 2882 s_value, o_value = self[field], other[field] 2883 if s_value is not o_value and s_value != o_value: 2884 return False 2885 else: # tuple 2886 if len(self) != len(other): 2887 return False 2888 for s_value, o_value in zip(self, other): 2889 if s_value is not o_value and s_value != o_value: 2890 return False 2891 return True
2892
2893 - def __iter__(self):
2894 return (self[field] for field in self._meta.user_fields)
2895
2896 - def __getattr__(self, name):
2897 if name[0:2] == '__' and name[-2:] == '__': 2898 raise AttributeError('Method %s is not implemented.' % name) 2899 if not name in self._meta.fields: 2900 raise FieldMissingError(name) 2901 if name in self._memos: 2902 return self._memos[name] 2903 try: 2904 index = self._meta.fields.index(name) 2905 value = self._retrieve_field_value(index, name) 2906 return value 2907 except DbfError: 2908 error = sys.exc_info()[1] 2909 error.message = "field --%s-- is %s -> %s" % (name, self._meta.fieldtypes[fielddef['type']]['Type'], error.message) 2910 raise
2911
2912 - def __getitem__(self, item):
2913 fields = self._meta.user_fields 2914 if isinstance(item, baseinteger): 2915 field_count = len(fields) 2916 if not -field_count <= item < field_count: 2917 raise NotFoundError("Field offset %d is not in record" % item) 2918 field = fields[item] 2919 if field in self._memos: 2920 return self._memos[field] 2921 return self[field] 2922 elif isinstance(item, slice): 2923 sequence = [] 2924 if isinstance(item.start, basestring) or isinstance(item.stop, basestring): 2925 start, stop, step = item.start, item.stop, item.step 2926 if start not in fields or stop not in fields: 2927 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2928 if step is not None and not isinstance(step, baseinteger): 2929 raise DbfError("step value must be an int, not %r" % type(step)) 2930 start = fields.index(start) 2931 stop = fields.index(stop) + 1 2932 item = slice(start, stop, step) 2933 for index in self._meta.fields[item]: 2934 sequence.append(self[index]) 2935 return sequence 2936 elif isinstance(item, basestring): 2937 return self.__getattr__(item) 2938 else: 2939 raise TypeError("%r is not a field name" % item)
2940
2941 - def __len__(self):
2942 return self._meta.user_field_count
2943
2944 - def __ne__(self, other):
2945 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2946 return NotImplemented 2947 return not self == other
2948
2949 - def __setattr__(self, name, value):
2950 if name in self.__slots__: 2951 object.__setattr__(self, name, value) 2952 return 2953 if not name in self._meta.fields: 2954 raise FieldMissingError(name) 2955 if name in self._meta.memofields: 2956 self._memos[name] = value 2957 self._dirty = True 2958 return 2959 index = self._meta.fields.index(name) 2960 try: 2961 self._update_field_value(index, name, value) 2962 except DbfError: 2963 error = sys.exc_info()[1] 2964 fielddef = self._meta[name] 2965 message = "%s (%s) = %r --> %s" % (name, self._meta.fieldtypes[fielddef[TYPE]]['Type'], value, error.message) 2966 data = name 2967 err_cls = error.__class__ 2968 raise err_cls(message, data)
2969
2970 - def __setitem__(self, name, value):
2971 if isinstance(name, basestring): 2972 self.__setattr__(name, value) 2973 elif isinstance(name, baseinteger): 2974 self.__setattr__(self._meta.fields[name], value) 2975 elif isinstance(name, slice): 2976 sequence = [] 2977 field_names = dbf.field_names(self) 2978 if isinstance(name.start, basestring) or isinstance(name.stop, basestring): 2979 start, stop, step = name.start, name.stop, name.step 2980 if start not in field_names or stop not in field_names: 2981 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2982 if step is not None and not isinstance(step, baseinteger): 2983 raise DbfError("step value must be an int, not %r" % type(step)) 2984 start = field_names.index(start) 2985 stop = field_names.index(stop) + 1 2986 name = slice(start, stop, step) 2987 for field in self._meta.fields[name]: 2988 sequence.append(field) 2989 if len(sequence) != len(value): 2990 raise DbfError("length of slices not equal") 2991 for field, val in zip(sequence, value): 2992 self[field] = val 2993 else: 2994 raise TypeError("%s is not a field name" % name)
2995 2996
2997 - def __repr__(self):
2998 return self._data.tobytes()
2999
3000 - def __str__(self):
3001 result = [] 3002 for seq, field in enumerate(field_names(self)): 3003 result.append("%3d - %-10s: %r" % (seq, field, self[field])) 3004 return '\n'.join(result)
3005
3006 3007 -class RecordVaporWare(object):
3008 """ 3009 Provides routines to mimic a dbf record, but all values are non-existent. 3010 """ 3011 3012 __slots__ = ('_recno', '_sequence') 3013
3014 - def __new__(cls, position, sequence):
3015 """ 3016 record = ascii array of entire record 3017 layout=record specification 3018 memo = memo object for table 3019 """ 3020 if position not in ('bof', 'eof'): 3021 raise ValueError("position should be 'bof' or 'eof', not %r" % position) 3022 vapor = object.__new__(cls) 3023 vapor._recno = (-1, None)[position == 'eof'] 3024 vapor._sequence = sequence 3025 return vapor
3026
3027 - def __contains__(self, key):
3028 return False
3029
3030 - def __eq__(self, other):
3031 if not isinstance(other, (Record, RecordTemplate, RecordVaporWare, dict, tuple)): 3032 return NotImplemented 3033 return False
3034 3035
3036 - def __getattr__(self, name):
3037 if name[0:2] == '__' and name[-2:] == '__': 3038 raise AttributeError('Method %s is not implemented.' % name) 3039 else: 3040 return Vapor
3041
3042 - def __getitem__(self, item):
3043 if isinstance(item, baseinteger): 3044 return Vapor 3045 elif isinstance(item, slice): 3046 raise TypeError('slice notation not allowed on Vapor records') 3047 elif isinstance(item, basestring): 3048 return self.__getattr__(item) 3049 else: 3050 raise TypeError("%r is not a field name" % item)
3051
3052 - def __len__(self):
3053 raise TypeError("Vapor records have no length")
3054
3055 - def __ne__(self, other):
3056 if not isinstance(other, (Record, RecordTemplate, RecordVaporWare, dict, tuple)): 3057 return NotImplemented 3058 return True
3059
3060 - def __bool__(self):
3061 """ 3062 Vapor records are always False 3063 """ 3064 return False
3065
3066 - def __setattr__(self, name, value):
3067 if name in self.__slots__: 3068 object.__setattr__(self, name, value) 3069 return 3070 raise TypeError("cannot change Vapor record")
3071
3072 - def __setitem__(self, name, value):
3073 if isinstance(name, (basestring, baseinteger)): 3074 raise TypeError("cannot change Vapor record") 3075 elif isinstance(name, slice): 3076 raise TypeError("slice notation not allowed on Vapor records") 3077 else: 3078 raise TypeError("%s is not a field name" % name)
3079
3080 - def __repr__(self):
3081 return "RecordVaporWare(position=%r, sequence=%r)" % (('bof', 'eof')[recno(self) is None], self._sequence)
3082
3083 - def __str__(self):
3084 return 'VaporRecord(%r)' % recno(self)
3085 3086 @property
3087 - def _recnum(self):
3088 if self._recno is None: 3089 return len(self._sequence) 3090 else: 3091 return self._recno
3092
3093 3094 -class _DbfMemo(object):
3095 """ 3096 Provides access to memo fields as dictionaries 3097 Must override _init, _get_memo, and _put_memo to 3098 store memo contents to disk 3099 """ 3100
3101 - def _init(self):
3102 """ 3103 Initialize disk file usage 3104 """
3105
3106 - def _get_memo(self, block):
3107 """ 3108 Retrieve memo contents from disk 3109 """
3110
3111 - def _put_memo(self, data):
3112 """ 3113 Store memo contents to disk 3114 """
3115
3116 - def _zap(self):
3117 """ 3118 Resets memo structure back to zero memos 3119 """ 3120 self.memory.clear() 3121 self.nextmemo = 1
3122
3123 - def __init__(self, meta):
3124 self.meta = meta 3125 self.memory = {} 3126 self.nextmemo = 1 3127 self._init() 3128 self.meta.newmemofile = False
3129
3130 - def get_memo(self, block):
3131 """ 3132 Gets the memo in block 3133 """ 3134 if self.meta.ignorememos or not block: 3135 return '' 3136 if self.meta.location == ON_DISK: 3137 return self._get_memo(block) 3138 else: 3139 return self.memory[block]
3140
3141 - def put_memo(self, data):
3142 """ 3143 Stores data in memo file, returns block number 3144 """ 3145 if self.meta.ignorememos or data == '': 3146 return 0 3147 if self.meta.location == IN_MEMORY: 3148 thismemo = self.nextmemo 3149 self.nextmemo += 1 3150 self.memory[thismemo] = data 3151 else: 3152 thismemo = self._put_memo(data) 3153 return thismemo
3154
3155 3156 -class _Db3Memo(_DbfMemo):
3157 """ 3158 dBase III specific 3159 """ 3160
3161 - def _init(self):
3162 self.meta.memo_size= 512 3163 self.record_header_length = 2 3164 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3165 if self.meta.newmemofile: 3166 self.meta.mfd = open(self.meta.memoname, 'w+b') 3167 self.meta.mfd.write(pack_long_int(1) + b'\x00' * 508) 3168 else: 3169 try: 3170 self.meta.mfd = open(self.meta.memoname, 'r+b') 3171 self.meta.mfd.seek(0) 3172 next = self.meta.mfd.read(4) 3173 self.nextmemo = unpack_long_int(next) 3174 except Exception: 3175 exc = sys.exc_info()[1] 3176 raise DbfError("memo file appears to be corrupt: %r" % exc.args)
3177
3178 - def _get_memo(self, block):
3179 block = int(block) 3180 self.meta.mfd.seek(block * self.meta.memo_size) 3181 eom = -1 3182 data = b'' 3183 while eom == -1: 3184 newdata = self.meta.mfd.read(self.meta.memo_size) 3185 if not newdata: 3186 return data 3187 data += newdata 3188 eom = data.find(b'\x1a\x1a') 3189 return data[:eom]
3190
3191 - def _put_memo(self, data):
3192 data = data 3193 length = len(data) + self.record_header_length # room for two ^Z at end of memo 3194 blocks = length // self.meta.memo_size 3195 if length % self.meta.memo_size: 3196 blocks += 1 3197 thismemo = self.nextmemo 3198 self.nextmemo = thismemo + blocks 3199 self.meta.mfd.seek(0) 3200 self.meta.mfd.write(pack_long_int(self.nextmemo)) 3201 self.meta.mfd.seek(thismemo * self.meta.memo_size) 3202 self.meta.mfd.write(data) 3203 self.meta.mfd.write(b'\x1a\x1a') 3204 double_check = self._get_memo(thismemo) 3205 if len(double_check) != len(data): 3206 uhoh = open('dbf_memo_dump.err', 'wb') 3207 uhoh.write('thismemo: %d' % thismemo) 3208 uhoh.write('nextmemo: %d' % self.nextmemo) 3209 uhoh.write('saved: %d bytes' % len(data)) 3210 uhoh.write(data) 3211 uhoh.write('retrieved: %d bytes' % len(double_check)) 3212 uhoh.write(double_check) 3213 uhoh.close() 3214 raise DbfError("unknown error: memo not saved") 3215 return thismemo
3216
3217 - def _zap(self):
3218 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3219 mfd = self.meta.mfd 3220 mfd.seek(0) 3221 mfd.truncate(0) 3222 mfd.write(pack_long_int(1) + b'\x00' * 508) 3223 mfd.flush()
3224
3225 -class _VfpMemo(_DbfMemo):
3226 """ 3227 Visual Foxpro 6 specific 3228 """ 3229
3230 - def _init(self):
3231 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3232 self.record_header_length = 8 3233 if self.meta.newmemofile: 3234 if self.meta.memo_size == 0: 3235 self.meta.memo_size = 1 3236 elif 1 < self.meta.memo_size < 33: 3237 self.meta.memo_size *= 512 3238 self.meta.mfd = open(self.meta.memoname, 'w+b') 3239 nextmemo = 512 // self.meta.memo_size 3240 if nextmemo * self.meta.memo_size < 512: 3241 nextmemo += 1 3242 self.nextmemo = nextmemo 3243 self.meta.mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \ 3244 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504) 3245 else: 3246 try: 3247 self.meta.mfd = open(self.meta.memoname, 'r+b') 3248 self.meta.mfd.seek(0) 3249 header = self.meta.mfd.read(512) 3250 self.nextmemo = unpack_long_int(header[:4], bigendian=True) 3251 self.meta.memo_size = unpack_short_int(header[6:8], bigendian=True) 3252 except Exception: 3253 exc = sys.exc_info()[1] 3254 raise DbfError("memo file appears to be corrupt: %r" % exc.args)
3255
3256 - def _get_memo(self, block):
3257 self.meta.mfd.seek(block * self.meta.memo_size) 3258 header = self.meta.mfd.read(8) 3259 length = unpack_long_int(header[4:], bigendian=True) 3260 return self.meta.mfd.read(length)
3261
3262 - def _put_memo(self, data):
3263 data = data 3264 self.meta.mfd.seek(0) 3265 thismemo = unpack_long_int(self.meta.mfd.read(4), bigendian=True) 3266 self.meta.mfd.seek(0) 3267 length = len(data) + self.record_header_length 3268 blocks = length // self.meta.memo_size 3269 if length % self.meta.memo_size: 3270 blocks += 1 3271 self.meta.mfd.write(pack_long_int(thismemo + blocks, bigendian=True)) 3272 self.meta.mfd.seek(thismemo * self.meta.memo_size) 3273 self.meta.mfd.write(b'\x00\x00\x00\x01' + pack_long_int(len(data), bigendian=True) + data) 3274 return thismemo
3275
3276 - def _zap(self):
3277 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3278 mfd = self.meta.mfd 3279 mfd.seek(0) 3280 mfd.truncate(0) 3281 nextmemo = 512 // self.meta.memo_size 3282 if nextmemo * self.meta.memo_size < 512: 3283 nextmemo += 1 3284 self.nextmemo = nextmemo 3285 mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \ 3286 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504) 3287 mfd.flush()
3288
3289 3290 -class DbfCsv(csv.Dialect):
3291 """ 3292 csv format for exporting tables 3293 """ 3294 delimiter = ',' 3295 doublequote = True 3296 escapechar = None 3297 lineterminator = '\n' 3298 quotechar = '"' 3299 skipinitialspace = True 3300 quoting = csv.QUOTE_NONNUMERIC
3301 csv.register_dialect('dbf', DbfCsv)
3302 3303 3304 -class _DeadObject(object):
3305 """ 3306 used because you cannot weakref None 3307 """ 3308
3309 - def __bool__(self):
3310 return False
3311 3312 _DeadObject = _DeadObject() 3313 3314 3315 # Routines for saving, retrieving, and creating fields 3316 3317 VFPTIME = 1721425
3318 3319 -def pack_short_int(value, bigendian=False):
3320 """ 3321 Returns a two-bye integer from the value, or raises DbfError 3322 """ 3323 # 256 / 65,536 3324 if value > 65535: 3325 raise DataOverflowError("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) 3326 if bigendian: 3327 return struct.pack('>H', value) 3328 else: 3329 return struct.pack('<H', value)
3330
3331 -def pack_long_int(value, bigendian=False):
3332 """ 3333 Returns a four-bye integer from the value, or raises DbfError 3334 """ 3335 # 256 / 65,536 / 16,777,216 3336 if value > 4294967295: 3337 raise DataOverflowError("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) 3338 if bigendian: 3339 return struct.pack('>L', value) 3340 else: 3341 return struct.pack('<L', value)
3342
3343 -def pack_str(string):
3344 """ 3345 Returns an 11 byte, upper-cased, null padded string suitable for field names; 3346 raises DbfError if the string is bigger than 10 bytes 3347 """ 3348 if len(string) > 10: 3349 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) 3350 return struct.pack('11s', string.upper())
3351
3352 -def unpack_short_int(bytes, bigendian=False):
3353 """ 3354 Returns the value in the two-byte integer passed in 3355 """ 3356 if bigendian: 3357 return struct.unpack('>H', bytes)[0] 3358 else: 3359 return struct.unpack('<H', bytes)[0]
3360
3361 -def unpack_long_int(bytes, bigendian=False):
3362 """ 3363 Returns the value in the four-byte integer passed in 3364 """ 3365 if bigendian: 3366 return int(struct.unpack('>L', bytes)[0]) 3367 else: 3368 return int(struct.unpack('<L', bytes)[0])
3369
3370 -def unpack_str(chars):
3371 """ 3372 Returns a normal, lower-cased string from a null-padded byte string 3373 """ 3374 field = struct.unpack('%ds' % len(chars), chars)[0] 3375 name = [] 3376 for ch in field: 3377 if ch == NULL: 3378 break 3379 name.append(ch) 3380 return bytes(name).lower()
3381
3382 -def scinot(value, decimals):
3383 """ 3384 return scientific notation with not more than decimals-1 decimal places 3385 """ 3386 value = str(value) 3387 sign = '' 3388 if value[0] in ('+-'): 3389 sign = value[0] 3390 if sign == '+': 3391 sign = '' 3392 value = value[1:] 3393 if 'e' in value: #7.8e-05 3394 e = value.find('e') 3395 if e - 1 <= decimals: 3396 return sign + value 3397 integer, mantissa, power = value[0], value[1:e], value[e+1:] 3398 mantissa = mantissa[:decimals] 3399 value = sign + integer + mantissa + 'e' + power 3400 return value 3401 integer, mantissa = value[0], value[1:] 3402 if integer == '0': 3403 for e, integer in enumerate(mantissa): 3404 if integer not in ('.0'): 3405 break 3406 mantissa = '.' + mantissa[e+1:] 3407 mantissa = mantissa[:decimals] 3408 value = sign + integer + mantissa + 'e-%03d' % e 3409 return value 3410 e = mantissa.find('.') 3411 mantissa = '.' + mantissa.replace('.','') 3412 mantissa = mantissa[:decimals] 3413 value = sign + integer + mantissa + 'e+%03d' % e 3414 return value
3415
3416 -def unsupported_type(something, *ignore):
3417 """ 3418 called if a data type is not supported for that style of table 3419 """ 3420 return something
3421
3422 -def retrieve_character(bytes, fielddef, memo, decoder):
3423 """ 3424 Returns the string in bytes as fielddef[CLASS] or fielddef[EMPTY] 3425 """ 3426 data = bytes.tobytes() 3427 if fielddef[FLAGS] & BINARY: 3428 return data 3429 data = fielddef[CLASS](decoder(data)[0]) 3430 if not data.strip(): 3431 cls = fielddef[EMPTY] 3432 if cls is NoneType: 3433 return None 3434 return cls(data) 3435 return data
3436
3437 -def update_character(string, fielddef, memo, decoder, encoder):
3438 """ 3439 returns the string as bytes (not unicode) as fielddef[CLASS] or fielddef[EMPTY] 3440 """ 3441 length = fielddef[LENGTH] 3442 if string == None: 3443 return length * b' ' 3444 if fielddef[FLAGS] & BINARY: 3445 if not isinstance(string, bytes): 3446 raise ValueError('binary field: %r not in bytes format' % string) 3447 return string 3448 else: 3449 if not isinstance(string, basestring): 3450 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3451 string = encoder(string.strip())[0] 3452 return string
3453
3454 -def retrieve_currency(bytes, fielddef, *ignore):
3455 """ 3456 Returns the currency value in bytes 3457 """ 3458 value = struct.unpack('<q', bytes)[0] 3459 return fielddef[CLASS](("%de-4" % value).strip())
3460
3461 -def update_currency(value, *ignore):
3462 """ 3463 Returns the value to be stored in the record's disk data 3464 """ 3465 if value == None: 3466 value = 0 3467 currency = int(value * 10000) 3468 if not -9223372036854775808 < currency < 9223372036854775808: 3469 raise DataOverflowError("value %s is out of bounds" % value) 3470 return struct.pack('<q', currency)
3471
3472 -def retrieve_date(bytes, fielddef, *ignore):
3473 """ 3474 Returns the ascii coded date as fielddef[CLASS] or fielddef[EMPTY] 3475 """ 3476 text = bytes.tobytes() 3477 if text == b' ': 3478 cls = fielddef[EMPTY] 3479 if cls is NoneType: 3480 return None 3481 return cls() 3482 year = int(text[0:4]) 3483 month = int(text[4:6]) 3484 day = int(text[6:8]) 3485 return fielddef[CLASS](year, month, day)
3486
3487 -def update_date(moment, *ignore):
3488 """ 3489 Returns the Date or datetime.date object ascii-encoded (yyyymmdd) 3490 """ 3491 if moment == None: 3492 return b' ' 3493 return ("%04d%02d%02d" % moment.timetuple()[:3]).encode('ascii')
3494
3495 -def retrieve_double(bytes, fielddef, *ignore):
3496 """ 3497 Returns the double in bytes as fielddef[CLASS] ('default' == float) 3498 """ 3499 typ = fielddef[CLASS] 3500 if typ == 'default': 3501 typ = float 3502 return typ(struct.unpack('<d', bytes)[0])
3503
3504 -def update_double(value, *ignore):
3505 """ 3506 returns the value to be stored in the record's disk data 3507 """ 3508 if value == None: 3509 value = 0 3510 return struct.pack('<d', float(value))
3511
3512 -def retrieve_integer(bytes, fielddef, *ignore):
3513 """ 3514 Returns the binary number stored in bytes in little-endian 3515 format as fielddef[CLASS] 3516 """ 3517 typ = fielddef[CLASS] 3518 if typ == 'default': 3519 typ = int 3520 return typ(struct.unpack('<i', bytes)[0])
3521
3522 -def update_integer(value, *ignore):
3523 """ 3524 Returns value in little-endian binary format 3525 """ 3526 if value == None: 3527 value = 0 3528 try: 3529 value = int(value) 3530 except Exception: 3531 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) 3532 if not -2147483648 < value < 2147483647: 3533 raise DataOverflowError("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value) 3534 return struct.pack('<i', int(value))
3535
3536 -def retrieve_logical(bytes, fielddef, *ignore):
3537 """ 3538 Returns True if bytes is 't', 'T', 'y', or 'Y' 3539 None if '?' 3540 False otherwise 3541 """ 3542 cls = fielddef[CLASS] 3543 empty = fielddef[EMPTY] 3544 bytes = bytes.tobytes() 3545 if bytes in b'tTyY': 3546 return cls(True) 3547 elif bytes in b'fFnN': 3548 return cls(False) 3549 elif bytes in b'? ': 3550 if empty is NoneType: 3551 return None 3552 return empty() 3553 elif LOGICAL_BAD_IS_NONE: 3554 return None 3555 else: 3556 raise BadDataError('Logical field contained %r' % bytes) 3557 return typ(bytes)
3558
3559 -def update_logical(data, *ignore):
3560 """ 3561 Returns 'T' if logical is True, 'F' if False, '?' otherwise 3562 """ 3563 if data is Unknown or data is None or data is Null or data is Other: 3564 return b'?' 3565 if data == True: 3566 return b'T' 3567 if data == False: 3568 return b'F' 3569 raise ValueError("unable to automatically coerce %r to Logical" % data)
3570
3571 -def retrieve_memo(bytes, fielddef, memo, decoder):
3572 """ 3573 Returns the block of data from a memo file 3574 """ 3575 stringval = bytes.tobytes().strip() 3576 if not stringval or memo is None: 3577 cls = fielddef[EMPTY] 3578 if cls is NoneType: 3579 return None 3580 return cls() 3581 block = int(stringval) 3582 data = memo.get_memo(block) 3583 if fielddef[FLAGS] & BINARY: 3584 return data 3585 return fielddef[CLASS](decoder(data)[0])
3586
3587 -def update_memo(string, fielddef, memo, decoder, encoder):
3588 """ 3589 Writes string as a memo, returns the block number it was saved into 3590 """ 3591 if memo is None: 3592 raise DbfError('Memos are being ignored, unable to update') 3593 if fielddef[FLAGS] & BINARY: 3594 if string == None: 3595 string = b'' 3596 if not isinstance(string, bytes): 3597 raise ValueError('binary field: %r not in bytes format' % string) 3598 else: 3599 if string == None: 3600 string = '' 3601 if not isinstance(string, basestring): 3602 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3603 string = encoder(string)[0] 3604 block = memo.put_memo(string) 3605 if block == 0: 3606 block = b'' 3607 return ("%*s" % (fielddef[LENGTH], block)).encode('ascii')
3608
3609 -def retrieve_numeric(bytes, fielddef, *ignore):
3610 """ 3611 Returns the number stored in bytes as integer if field spec for 3612 decimals is 0, float otherwise 3613 """ 3614 string = bytes.tobytes().replace(b'\x00', b'').strip() 3615 cls = fielddef[CLASS] 3616 if not string or string[0:1] == b'*': # value too big to store (Visual FoxPro idiocy) 3617 cls = fielddef[EMPTY] 3618 if cls is NoneType: 3619 return None 3620 return cls() 3621 if cls == 'default': 3622 if fielddef[DECIMALS] == 0: 3623 return int(string) 3624 else: 3625 return float(string) 3626 else: 3627 return cls(string)
3628
3629 -def update_numeric(value, fielddef, *ignore):
3630 """ 3631 returns value as ascii representation, rounding decimal 3632 portion as necessary 3633 """ 3634 if value == None: 3635 return fielddef[LENGTH] * b' ' 3636 try: 3637 value = float(value) 3638 except Exception: 3639 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) 3640 decimalsize = fielddef[DECIMALS] 3641 totalsize = fielddef[LENGTH] 3642 if decimalsize: 3643 decimalsize += 1 3644 maxintegersize = totalsize - decimalsize 3645 integersize = len("%.0f" % floor(value)) 3646 if integersize > maxintegersize: 3647 if integersize != 1: 3648 raise DataOverflowError('Integer portion too big') 3649 string = scinot(value, decimalsize) 3650 if len(string) > totalsize: 3651 raise DataOverflowError('Value representation too long for field') 3652 return ("%*.*f" % (fielddef[LENGTH], fielddef[DECIMALS], value)).encode('ascii')
3653
3654 -def retrieve_vfp_datetime(bytes, fielddef, *ignore):
3655 """ 3656 returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 3657 may not be accurate; BC dates are nulled. 3658 """ 3659 # two four-byte integers store the date and time. 3660 # millesecords are discarded from time 3661 if bytes == array('B', [0] * 8): 3662 cls = fielddef[EMPTY] 3663 if cls is NoneType: 3664 return None 3665 return cls() 3666 cls = fielddef[CLASS] 3667 time = unpack_long_int(bytes[4:]) 3668 microseconds = (time % 1000) * 1000 3669 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds 3670 hours = time // 3600 3671 mins = time % 3600 // 60 3672 secs = time % 3600 % 60 3673 time = datetime.time(hours, mins, secs, microseconds) 3674 possible = unpack_long_int(bytes[:4]) 3675 possible -= VFPTIME 3676 possible = max(0, possible) 3677 date = datetime.date.fromordinal(possible) 3678 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond)
3679
3680 -def update_vfp_datetime(moment, *ignore):
3681 """ 3682 Sets the date/time stored in moment 3683 moment must have fields: 3684 year, month, day, hour, minute, second, microsecond 3685 """ 3686 data = [0] * 8 3687 if moment: 3688 hour = moment.hour 3689 minute = moment.minute 3690 second = moment.second 3691 millisecond = moment.microsecond // 1000 # convert from millionths to thousandths 3692 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond 3693 data[4:] = update_integer(time) 3694 data[:4] = update_integer(moment.toordinal() + VFPTIME) 3695 return bytes(data)
3696
3697 -def retrieve_vfp_memo(bytes, fielddef, memo, decoder):
3698 """ 3699 Returns the block of data from a memo file 3700 """ 3701 if memo is None: 3702 block = 0 3703 else: 3704 block = struct.unpack('<i', bytes)[0] 3705 if not block: 3706 cls = fielddef[EMPTY] 3707 if cls is NoneType: 3708 return None 3709 return cls() 3710 data = memo.get_memo(block) 3711 if fielddef[FLAGS] & BINARY: 3712 return data 3713 return fielddef[CLASS](decoder(data)[0])
3714
3715 -def update_vfp_memo(string, fielddef, memo, decoder, encoder):
3716 """ 3717 Writes string as a memo, returns the block number it was saved into 3718 """ 3719 if memo is None: 3720 raise DbfError('Memos are being ignored, unable to update') 3721 if string == None: 3722 return struct.pack('<i', 0) 3723 if fielddef[FLAGS] & BINARY: 3724 # if string == None: 3725 # string = b'' 3726 if not isinstance(string, bytes): 3727 raise ValueError('binary field: %r not in bytes format' % string) 3728 string = bytes(string) 3729 else: 3730 # if string == None: 3731 # string = '' 3732 if not isinstance(string, basestring): 3733 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3734 string = encoder(string)[0] 3735 block = memo.put_memo(string) 3736 return struct.pack('<i', block)
3737
3738 -def add_character(format, flags):
3739 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3740 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3741 length = int(format[0][1:-1]) 3742 if not 0 < length < 256: 3743 raise FieldSpecError("Character fields must be between 1 and 255, not %d" % length) 3744 decimals = 0 3745 flag = 0 3746 for f in format[1:]: 3747 flag |= FieldFlag.lookup(f) 3748 return length, decimals, flag
3749
3750 -def add_date(format, flags):
3751 if any([f not in flags for f in format]): 3752 raise FieldSpecError("Format for Date field creation is 'D%s', not 'D%s'" % field_spec_error_text(format, flags)) 3753 length = 8 3754 decimals = 0 3755 flag = 0 3756 for f in format: 3757 flag |= FieldFlag.lookup(f) 3758 return length, decimals, flag
3759
3760 -def add_logical(format, flags):
3761 if any([f not in flags for f in format]): 3762 raise FieldSpecError("Format for Logical field creation is 'L%s', not 'L%s'" % field_spec_error_text(format, flags)) 3763 length = 1 3764 decimals = 0 3765 flag = 0 3766 for f in format: 3767 flag |= FieldFlag.lookup(f) 3768 return length, decimals, flag
3769
3770 -def add_memo(format, flags):
3771 if any(f not in flags for f in format): 3772 raise FieldSpecError("Format for Memo field creation is 'M(n)%s', not 'M%s'" % field_spec_error_text(format, flags)) 3773 length = 10 3774 decimals = 0 3775 flag = 0 3776 for f in format: 3777 flag |= FieldFlag.lookup(f) 3778 return length, decimals, flag
3779
3780 -def add_binary_memo(format, flags):
3781 if any(f not in flags for f in format): 3782 raise FieldSpecError("Format for Memo field creation is 'M(n)%s', not 'M%s'" % field_spec_error_text(format, flags)) 3783 length = 10 3784 decimals = 0 3785 flag = 0 3786 for f in format: 3787 flag |= FieldFlag.lookup(f) 3788 flag |= FieldFlag.BINARY 3789 return length, decimals, flag
3790
3791 -def add_numeric(format, flags):
3792 if len(format) > 1 or format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]): 3793 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags)) 3794 length, decimals = format[0][1:-1].split(',') 3795 length = int(length) 3796 decimals = int(decimals) 3797 flag = 0 3798 for f in format[1:]: 3799 flag |= FieldFlag.lookup(f) 3800 if not 0 < length < 20: 3801 raise FieldSpecError("Numeric fields must be between 1 and 19 digits, not %d" % length) 3802 if decimals and not 0 < decimals <= length - 2: 3803 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals)) 3804 return length, decimals, flag
3805
3806 -def add_clp_character(format, flags):
3807 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3808 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3809 length = int(format[0][1:-1]) 3810 if not 0 < length < 65519: 3811 raise FieldSpecError("Character fields must be between 1 and 65,519") 3812 decimals = 0 3813 flag = 0 3814 for f in format[1:]: 3815 flag |= FieldFlag.lookup(f) 3816 return length, decimals, flag
3817
3818 -def add_vfp_character(format, flags):
3819 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3820 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3821 length = int(format[0][1:-1]) 3822 if not 0 < length < 255: 3823 raise FieldSpecError("Character fields must be between 1 and 255") 3824 decimals = 0 3825 flag = 0 3826 for f in format[1:]: 3827 flag |= FieldFlag.lookup(f) 3828 return length, decimals, flag
3829
3830 -def add_vfp_currency(format, flags):
3831 if any(f not in flags for f in format[1:]): 3832 raise FieldSpecError("Format for Currency field creation is 'Y%s', not 'Y%s'" % field_spec_error_text(format, flags)) 3833 length = 8 3834 decimals = 0 3835 flag = 0 3836 for f in format: 3837 flag |= FieldFlag.lookup(f) 3838 return length, decimals, flag
3839
3840 -def add_vfp_datetime(format, flags):
3841 if any(f not in flags for f in format[1:]): 3842 raise FieldSpecError("Format for DateTime field creation is 'T%s', not 'T%s'" % field_spec_error_text(format, flags)) 3843 length = 8 3844 decimals = 0 3845 flag = 0 3846 for f in format: 3847 flag |= FieldFlag.lookup(f) 3848 return length, decimals, flag
3849
3850 -def add_vfp_double(format, flags):
3851 if any(f not in flags for f in format[1:]): 3852 raise FieldSpecError("Format for Double field creation is 'B%s', not 'B%s'" % field_spec_error_text(format, flags)) 3853 length = 8 3854 decimals = 0 3855 flag = 0 3856 for f in format: 3857 flag |= FieldFlag.lookup(f) 3858 return length, decimals, flag
3859
3860 -def add_vfp_integer(format, flags):
3861 if any(f not in flags for f in format[1:]): 3862 raise FieldSpecError("Format for Integer field creation is 'I%s', not 'I%s'" % field_spec_error_text(format, flags)) 3863 length = 4 3864 decimals = 0 3865 flag = 0 3866 for f in format: 3867 flag |= FieldFlag.lookup(f) 3868 return length, decimals, flag
3869
3870 -def add_vfp_memo(format, flags):
3871 if any(f not in flags for f in format[1:]): 3872 raise FieldSpecError("Format for Memo field creation is 'M%s', not 'M%s'" % field_spec_error_text(format, flags)) 3873 length = 4 3874 decimals = 0 3875 flag = 0 3876 for f in format: 3877 flag |= FieldFlag.lookup(f) 3878 if 'binary' not in flags: # general or picture -- binary is implied 3879 flag |= FieldFlag.BINARY 3880 return length, decimals, flag
3881
3882 -def add_vfp_binary_memo(format, flags):
3883 if any(f not in flags for f in format[1:]): 3884 raise FieldSpecError("Format for Memo field creation is 'M%s', not 'M%s'" % field_spec_error_text(format, flags)) 3885 length = 4 3886 decimals = 0 3887 flag = 0 3888 for f in format: 3889 flag |= FieldFlag.lookup(f) 3890 # general or picture -- binary is implied 3891 flag |= FieldFlag.BINARY 3892 return length, decimals, flag
3893
3894 -def add_vfp_numeric(format, flags):
3895 if format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]): 3896 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags)) 3897 length, decimals = format[0][1:-1].split(',') 3898 length = int(length) 3899 decimals = int(decimals) 3900 flag = 0 3901 for f in format[1:]: 3902 flag |= FieldFlag.lookup(f) 3903 if not 0 < length < 21: 3904 raise FieldSpecError("Numeric fields must be between 1 and 20 digits, not %d" % length) 3905 if decimals and not 0 < decimals <= length - 2: 3906 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals)) 3907 return length, decimals, flag
3908
3909 -def field_spec_error_text(format, flags):
3910 """ 3911 generic routine for error text for the add...() functions 3912 """ 3913 flg = '' 3914 if flags: 3915 flg = ' [ ' + ' | '.join(flags) + ' ]' 3916 frmt = '' 3917 if format: 3918 frmt = ' ' + ' '.join(format) 3919 return flg, frmt
3920
3921 -def ezip(*iters):
3922 """ 3923 extends all iters to longest one, using last value from each as necessary 3924 """ 3925 iters = [iter(x) for x in iters] 3926 last = [None] * len(iters) 3927 while "any iters have items left": 3928 alive = len(iters) 3929 for i, iterator in enumerate(iters): 3930 try: 3931 value = next(iterator) 3932 last[i] = value 3933 except StopIteration: 3934 alive -= 1 3935 if alive: 3936 yield tuple(last) 3937 alive = len(iters) 3938 continue 3939 break
3940
3941 3942 # Public classes 3943 3944 -class Tables(object):
3945 """ 3946 context manager for multiple tables and/or indices 3947 """
3948 - def __init__(yo, *tables):
3949 if len(tables) == 1 and not isinstance(tables[0], (Table, basestring)): 3950 tables = tables[0] 3951 yo._tables = [] 3952 yo._entered = [] 3953 for table in tables: 3954 if isinstance(table, basestring): 3955 table = Table(table) 3956 yo._tables.append(table)
3957 - def __enter__(yo):
3958 for table in yo._tables: 3959 table.__enter__() 3960 yo._entered.append(table) 3961 return tuple(yo._tables)
3962 - def __exit__(yo, *args):
3963 while yo._entered: 3964 table = yo._entered.pop() 3965 try: 3966 table.__exit__() 3967 except Exception: 3968 pass
3969
3970 -class IndexLocation(int):
3971 """ 3972 Represents the index where the match criteria is if True, 3973 or would be if False 3974 3975 Used by Index.index_search 3976 """ 3977
3978 - def __new__(cls, value, found):
3979 "value is the number, found is True/False" 3980 result = int.__new__(cls, value) 3981 result.found = found 3982 return result
3983
3984 - def __bool__(self):
3985 return self.found
3986
3987 3988 -class FieldInfo(tuple):
3989 """ 3990 tuple with named attributes for representing a field's dbf type, 3991 length, decimal portion, and python class 3992 """ 3993 3994 __slots__= () 3995
3996 - def __new__(cls, *args):
3997 if len(args) != 4: 3998 raise TypeError("%s should be called with Type, Length, Decimal size, and Class" % cls.__name__) 3999 return tuple.__new__(cls, args)
4000 4001 @property
4002 - def field_type(self):
4003 return self[0]
4004 4005 @property
4006 - def length(self):
4007 return self[1]
4008 4009 @property
4010 - def decimal(self):
4011 return self[2]
4012 4013 @property
4014 - def py_type(self):
4015 return self[3]
4016
4017 4018 -class CodePage(tuple):
4019 """ 4020 tuple with named attributes for representing a tables codepage 4021 """ 4022 4023 __slots__= () 4024
4025 - def __new__(cls, name):
4026 "call with name of codepage (e.g. 'cp1252')" 4027 code, name, desc = _codepage_lookup(name) 4028 return tuple.__new__(cls, (name, desc, code))
4029
4030 - def __repr__(self):
4031 return "CodePage(%r, %r, %r)" % (self[0], self[1], self[2])
4032
4033 - def __str__(self):
4034 return "%s (%s)" % (self[0], self[1])
4035 4036 @property
4037 - def name(self):
4038 return self[0]
4039 4040 @property
4041 - def desc(self):
4042 return self[1]
4043 4044 @property
4045 - def code(self):
4046 return self[2]
4047
4048 4049 -class Iter(_Navigation):
4050 """ 4051 Provides iterable behavior for a table 4052 """ 4053
4054 - def __init__(self, table, include_vapor=False):
4055 """ 4056 Return a Vapor record as the last record in the iteration 4057 if include_vapor is True 4058 """ 4059 self._table = table 4060 self._record = None 4061 self._include_vapor = include_vapor 4062 self._exhausted = False
4063
4064 - def __iter__(self):
4065 return self
4066
4067 - def __next__(self):
4068 while not self._exhausted: 4069 if self._index == len(self._table): 4070 break 4071 if self._index >= (len(self._table) - 1): 4072 self._index = max(self._index, len(self._table)) 4073 if self._include_vapor: 4074 return RecordVaporWare('eof', self._table) 4075 break 4076 self._index += 1 4077 record = self._table[self._index] 4078 return record 4079 self._exhausted = True 4080 raise StopIteration
4081
4082 4083 -class Table(_Navigation):
4084 """ 4085 Base class for dbf style tables 4086 """ 4087 4088 _version = 'basic memory table' 4089 _versionabbr = 'dbf' 4090 _max_fields = 255 4091 _max_records = 4294967296 4092 4093 @MutableDefault
4094 - def _field_types():
4095 return { 4096 CHAR: { 4097 'Type':'Character', 'Init':add_character, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_character, 'Update':update_character, 4098 'Class':str, 'Empty':str, 'flags':tuple(), 4099 }, 4100 DATE: { 4101 'Type':'Date', 'Init':add_date, 'Blank':lambda x: b' ', 'Retrieve':retrieve_date, 'Update':update_date, 4102 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 4103 }, 4104 NUMERIC: { 4105 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 4106 'Class':'default', 'Empty':none, 'flags':tuple(), 4107 }, 4108 LOGICAL: { 4109 'Type':'Logical', 'Init':add_logical, 'Blank':lambda x: b'?', 'Retrieve':retrieve_logical, 'Update':update_logical, 4110 'Class':bool, 'Empty':none, 'flags':tuple(), 4111 }, 4112 MEMO: { 4113 'Type':'Memo', 'Init':add_memo, 'Blank':lambda x: b' ', 'Retrieve':retrieve_memo, 'Update':update_memo, 4114 'Class':str, 'Empty':str, 'flags':tuple(), 4115 }, 4116 NUMERIC: { 4117 'Type':'Numeric', 'Init':add_numeric, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_numeric, 'Update':update_numeric, 4118 'Class':'default', 'Empty':none, 'flags':tuple(), 4119 }, 4120 }
4121 @MutableDefault
4122 - def _previous_status():
4123 return []
4124 _memoext = '' 4125 _memoClass = _DbfMemo 4126 _yesMemoMask = 0 4127 _noMemoMask = 0 4128 _binary_types = tuple() # as in non-unicode character, or non-text number 4129 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) # field represented by text data 4130 _currency_types = tuple() # money! 4131 _date_types = (DATE, ) # dates 4132 _datetime_types = tuple() # dates w/times 4133 _decimal_types = (NUMERIC, FLOAT) # text-based numeric fields 4134 _fixed_types = (MEMO, DATE, LOGICAL) # always same length in table 4135 _logical_types = (LOGICAL, ) # logicals 4136 _memo_types = (MEMO, ) 4137 _numeric_types = (NUMERIC, FLOAT) # fields representing a number 4138 _variable_types = (CHAR, NUMERIC, FLOAT) # variable length in table 4139 _dbfTableHeader = array('B', [0] * 32) 4140 _dbfTableHeader[0] = 0 # table type - none 4141 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 4142 _dbfTableHeader[10] = 1 # record length -- one for delete flag 4143 _dbfTableHeader[29] = 0 # code page -- none, using plain ascii 4144 _dbfTableHeader = _dbfTableHeader.tobytes() 4145 _dbfTableHeaderExtra = b'' 4146 _supported_tables = () 4147 _pack_count = 0 4148 backup = None 4149
4150 - class _Indexen(object):
4151 """ 4152 implements the weakref structure for seperate indexes 4153 """ 4154
4155 - def __init__(self):
4156 self._indexen = set()
4157
4158 - def __iter__(self):
4159 self._indexen = set([s for s in self._indexen if s() is not None]) 4160 return (s() for s in self._indexen if s() is not None)
4161
4162 - def __len__(self):
4163 self._indexen = set([s for s in self._indexen if s() is not None]) 4164 return len(self._indexen)
4165
4166 - def add(self, new_index):
4167 self._indexen.add(weakref.ref(new_index)) 4168 self._indexen = set([s for s in self._indexen if s() is not None])
4169
4170 - class _MetaData(dict):
4171 """ 4172 Container class for storing per table metadata 4173 """ 4174 blankrecord = None 4175 dfd = None # file handle 4176 fields = None # field names 4177 field_count = 0 # number of fields 4178 field_types = None # dictionary of dbf type field specs 4179 filename = None # name of .dbf file 4180 ignorememos = False # True when memos should be ignored 4181 memoname = None # name of .dbt/.fpt file 4182 mfd = None # file handle 4183 memo = None # memo object 4184 memofields = None # field names of Memo type 4185 newmemofile = False # True when memo file needs to be created 4186 nulls = None # non-None when Nullable fields present 4187 user_fields = None # not counting SYSTEM fields 4188 user_field_count = 0 # also not counting SYSTEM fields
4189
4190 - class _TableHeader(object):
4191 """ 4192 represents the data block that defines a tables type and layout 4193 """ 4194
4195 - def __init__(self, data, pack_date, unpack_date):
4196 if len(data) != 32: 4197 raise BadDataError('table header should be 32 bytes, but is %d bytes' % len(data)) 4198 self.packDate = pack_date 4199 self.unpackDate = unpack_date 4200 self._data = array('B', data + b'\x0d')
4201
4202 - def codepage(self, cp=None):
4203 """ 4204 get/set code page of table 4205 """ 4206 if cp is None: 4207 return self._data[29] 4208 else: 4209 cp, sd, ld = _codepage_lookup(cp) 4210 self._data[29] = cp 4211 return cp
4212 4213 @property
4214 - def data(self):
4215 """ 4216 main data structure 4217 """ 4218 date = self.packDate(Date.today()) 4219 self._data[1:4] = array('B', date) 4220 return self._data.tobytes()
4221 4222 @data.setter
4223 - def data(self, bytes):
4224 if len(bytes) < 32: 4225 raise BadDataError("length for data of %d is less than 32" % len(bytes)) 4226 self._data[:] = array('B', bytes)
4227 4228 @property
4229 - def extra(self):
4230 "extra dbf info (located after headers, before data records)" 4231 fieldblock = self._data[32:] 4232 for i in range(len(fieldblock) // 32 + 1): 4233 cr = i * 32 4234 if fieldblock[cr] == CR: 4235 break 4236 else: 4237 raise BadDataError("corrupt field structure") 4238 cr += 33 # skip past CR 4239 return self._data[cr:].tobytes()
4240 4241 @extra.setter
4242 - def extra(self, data):
4243 fieldblock = self._data[32:] 4244 for i in range(len(fieldblock) // 32 + 1): 4245 cr = i * 32 4246 if fieldblock[cr] == CR: 4247 break 4248 else: 4249 raise BadDataError("corrupt field structure") 4250 cr += 33 # skip past CR 4251 self._data[cr:] = array('B', data) # extra 4252 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start
4253 4254 @property
4255 - def field_count(self):
4256 "number of fields (read-only)" 4257 fieldblock = self._data[32:] 4258 for i in range(len(fieldblock) // 32 + 1): 4259 cr = i * 32 4260 if fieldblock[cr] == CR: 4261 break 4262 else: 4263 raise BadDataError("corrupt field structure") 4264 return len(fieldblock[:cr]) // 32
4265 4266 @property
4267 - def fields(self):
4268 """ 4269 field block structure 4270 """ 4271 fieldblock = self._data[32:] 4272 for i in range(len(fieldblock) // 32 + 1): 4273 cr = i * 32 4274 if fieldblock[cr] == CR: 4275 break 4276 else: 4277 raise BadDataError("corrupt field structure") 4278 return fieldblock[:cr].tobytes()
4279 4280 @fields.setter
4281 - def fields(self, block):
4282 fieldblock = self._data[32:] 4283 for i in range(len(fieldblock) // 32 + 1): 4284 cr = i * 32 4285 if fieldblock[cr] == CR: 4286 break 4287 else: 4288 raise BadDataError("corrupt field structure") 4289 cr += 32 # convert to indexing main structure 4290 fieldlen = len(block) 4291 if fieldlen % 32 != 0: 4292 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 4293 self._data[32:cr] = array('B', block) # fields 4294 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start 4295 fieldlen = fieldlen // 32 4296 recordlen = 1 # deleted flag 4297 for i in range(fieldlen): 4298 recordlen += block[i*32+16] 4299 self._data[10:12] = array('B', pack_short_int(recordlen))
4300 4301 @property
4302 - def record_count(self):
4303 """ 4304 number of records (maximum 16,777,215) 4305 """ 4306 return unpack_long_int(self._data[4:8].tobytes())
4307 4308 @record_count.setter
4309 - def record_count(self, count):
4310 self._data[4:8] = array('B', pack_long_int(count))
4311 4312 @property
4313 - def record_length(self):
4314 """ 4315 length of a record (read_only) (max of 65,535) 4316 """ 4317 return unpack_short_int(self._data[10:12].tobytes())
4318 4319 @record_length.setter
4320 - def record_length(self, length):
4321 """ 4322 to support Clipper large Character fields 4323 """ 4324 self._data[10:12] = array('B', pack_short_int(length))
4325 4326 @property
4327 - def start(self):
4328 """ 4329 starting position of first record in file (must be within first 64K) 4330 """ 4331 return unpack_short_int(self._data[8:10].tobytes())
4332 4333 @start.setter
4334 - def start(self, pos):
4335 self._data[8:10] = array('B', pack_short_int(pos))
4336 4337 @property
4338 - def update(self):
4339 """ 4340 date of last table modification (read-only) 4341 """ 4342 return self.unpackDate(self._data[1:4].tobytes())
4343 4344 @property
4345 - def version(self):
4346 """ 4347 dbf version 4348 """ 4349 return self._data[0]
4350 4351 @version.setter
4352 - def version(self, ver):
4353 self._data[0] = ver
4354
4355 - class _Table(object):
4356 """ 4357 implements the weakref table for records 4358 """ 4359
4360 - def __init__(self, count, meta):
4361 self._meta = meta 4362 self._max_count = count 4363 self._weakref_list = {} 4364 self._accesses = 0 4365 self._dead_check = 1024
4366
4367 - def __getitem__(self, index):
4368 # maybe = self._weakref_list[index]() 4369 if index < 0: 4370 if self._max_count + index < 0: 4371 raise IndexError('index %d smaller than available records' % index) 4372 index = self._max_count + index 4373 if index >= self._max_count: 4374 raise IndexError('index %d greater than available records' % index) 4375 maybe = self._weakref_list.get(index) 4376 if maybe: 4377 maybe = maybe() 4378 self._accesses += 1 4379 if self._accesses >= self._dead_check: 4380 dead = [] 4381 for key, value in self._weakref_list.items(): 4382 if value() is None: 4383 dead.append(key) 4384 for key in dead: 4385 del self._weakref_list[key] 4386 if not maybe: 4387 meta = self._meta 4388 if meta.status == CLOSED: 4389 raise DbfError("%s is closed; record %d is unavailable" % (meta.filename, index)) 4390 header = meta.header 4391 if index < 0: 4392 index += header.record_count 4393 size = header.record_length 4394 location = index * size + header.start 4395 meta.dfd.seek(location) 4396 if meta.dfd.tell() != location: 4397 raise ValueError("unable to seek to offset %d in file" % location) 4398 bytes = meta.dfd.read(size) 4399 if not bytes: 4400 raise ValueError("unable to read record data from %s at location %d" % (meta.filename, location)) 4401 maybe = Record(recnum=index, layout=meta, kamikaze=bytes, _fromdisk=True) 4402 self._weakref_list[index] = weakref.ref(maybe) 4403 return maybe
4404
4405 - def append(self, record):
4406 self._weakref_list[self._max_count] = weakref.ref(record) 4407 self._max_count += 1
4408
4409 - def clear(self):
4410 for key in list(self._weakref_list.keys()): 4411 del self._weakref_list[key] 4412 self._max_count = 0
4413
4414 - def flush(self):
4415 for maybe in self._weakref_list.values(): 4416 maybe = maybe() 4417 if maybe and not maybe._write_to_disk: 4418 raise DbfError("some records have not been written to disk")
4419
4420 - def pop(self):
4421 if not self._max_count: 4422 raise IndexError('no records exist') 4423 record = self[self._max_count-1] 4424 self._max_count -= 1 4425 return record
4426
4427 - def _build_header_fields(self):
4428 """ 4429 constructs fieldblock for disk table 4430 """ 4431 fieldblock = array('B', b'') 4432 memo = False 4433 nulls = False 4434 meta = self._meta 4435 header = meta.header 4436 header.version = header.version & self._noMemoMask 4437 meta.fields = [f for f in meta.fields if f != '_nullflags'] 4438 for field in meta.fields: 4439 layout = meta[field] 4440 if meta.fields.count(field) > 1: 4441 raise BadDataError("corrupted field structure (noticed in _build_header_fields)") 4442 fielddef = array('B', [0] * 32) 4443 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0])) 4444 fielddef[11] = layout[TYPE] 4445 fielddef[12:16] = array('B', pack_long_int(layout[START])) 4446 fielddef[16] = layout[LENGTH] 4447 fielddef[17] = layout[DECIMALS] 4448 fielddef[18] = layout[FLAGS] 4449 fieldblock.extend(fielddef) 4450 if layout[TYPE] in meta.memo_types: 4451 memo = True 4452 if layout[FLAGS] & NULLABLE: 4453 nulls = True 4454 if memo: 4455 header.version = header.version | self._yesMemoMask 4456 if meta.memo is None: 4457 meta.memo = self._memoClass(meta) 4458 else: 4459 if os.path.exists(meta.memoname): 4460 if meta.mfd is not None: 4461 meta.mfd.close() 4462 4463 os.remove(meta.memoname) 4464 meta.memo = None 4465 if nulls: 4466 start = layout[START] + layout[LENGTH] 4467 length, one_more = divmod(len(meta.fields), 8) 4468 if one_more: 4469 length += 1 4470 fielddef = array('B', [0] * 32) 4471 fielddef[:11] = array('B', pack_str(b'_nullflags')) 4472 fielddef[11] = 0x30 4473 fielddef[12:16] = array('B', pack_long_int(start)) 4474 fielddef[16] = length 4475 fielddef[17] = 0 4476 fielddef[18] = BINARY | SYSTEM 4477 fieldblock.extend(fielddef) 4478 meta.fields.append('_nullflags') 4479 nullflags = ( 4480 _NULLFLAG, # type 4481 start, # start 4482 length, # length 4483 start + length, # end 4484 0, # decimals 4485 BINARY | SYSTEM, # flags 4486 none, # class 4487 none, # empty 4488 ) 4489 meta['_nullflags'] = nullflags 4490 header.fields = fieldblock.tobytes() 4491 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 4492 meta.user_field_count = len(meta.user_fields) 4493 Record._create_blank_data(meta)
4494
4495 - def _check_memo_integrity(self):
4496 """ 4497 checks memo file for problems 4498 """ 4499 raise NotImplementedError("_check_memo_integrity must be implemented by subclass")
4500
4501 - def _initialize_fields(self):
4502 """ 4503 builds the FieldList of names, types, and descriptions from the disk file 4504 """ 4505 raise NotImplementedError("_initialize_fields must be implemented by subclass")
4506
4507 - def _field_layout(self, i):
4508 """ 4509 Returns field information Name Type(Length[, Decimals]) 4510 """ 4511 name = self._meta.fields[i] 4512 fielddef = self._meta[name] 4513 type = FieldType(fielddef[TYPE]) 4514 length = fielddef[LENGTH] 4515 decimals = fielddef[DECIMALS] 4516 set_flags = fielddef[FLAGS] 4517 flags = [] 4518 if type in (GENERAL, PICTURE): 4519 printable_flags = NULLABLE, SYSTEM 4520 else: 4521 printable_flags = BINARY, NULLABLE, SYSTEM 4522 for flg in printable_flags: 4523 if flg & set_flags == flg: 4524 flags.append(FieldFlag(flg)) 4525 set_flags &= 255 ^ flg 4526 if flags: 4527 flags = ' ' + ' '.join(f.text for f in flags) 4528 else: 4529 flags = '' 4530 if type in self._fixed_types: 4531 description = "%s %s%s" % (name, type.symbol, flags) 4532 elif type in self._numeric_types: 4533 description = "%s %s(%d,%d)%s" % (name, type.symbol, length, decimals, flags) 4534 else: 4535 description = "%s %s(%d)%s" % (name, type.symbol, length, flags) 4536 return description
4537
4538 - def _list_fields(self, specs, sep=','):
4539 """ 4540 standardizes field specs 4541 """ 4542 if specs is None: 4543 specs = self.field_names 4544 elif isinstance(specs, basestring): 4545 specs = specs.strip(sep).split(sep) 4546 else: 4547 specs = list(specs) 4548 specs = [s.strip() for s in specs] 4549 return specs
4550
4551 - def _nav_check(self):
4552 """ 4553 Raises `DbfError` if table is closed 4554 """ 4555 if self._meta.status == CLOSED: 4556 raise DbfError('table %s is closed' % self.filename)
4557 4558 @staticmethod
4559 - def _pack_date(date):
4560 """ 4561 Returns a group of three bytes, in integer form, of the date 4562 """ 4563 # return "%c%c%c" % (date.year - 1900, date.month, date.day) 4564 return bytes([date.year - 1900, date.month, date.day])
4565 4566 @staticmethod
4567 - def _unpack_date(bytestr):
4568 """ 4569 Returns a Date() of the packed three-byte date passed in 4570 """ 4571 year, month, day = struct.unpack('<BBB', bytestr) 4572 year += 1900 4573 return Date(year, month, day)
4574
4575 - def _update_disk(self, headeronly=False):
4576 """ 4577 synchronizes the disk file with current data 4578 """ 4579 if self._meta.location == IN_MEMORY: 4580 return 4581 meta = self._meta 4582 header = meta.header 4583 fd = meta.dfd 4584 fd.seek(0) 4585 fd.write(header.data) 4586 eof = header.start + header.record_count * header.record_length 4587 if not headeronly: 4588 for record in self: 4589 record._update_disk() 4590 fd.flush() 4591 fd.truncate(eof) 4592 if self._versionabbr in ('db3', 'clp'): 4593 fd.seek(0, SEEK_END) 4594 fd.write(b'\x1a') # required for dBase III compatibility 4595 fd.flush() 4596 fd.truncate(eof + 1)
4597
4598 - def __contains__(self, data):
4599 """ 4600 data can be a record, template, dict, or tuple 4601 """ 4602 if not isinstance(data, (Record, RecordTemplate, dict, tuple)): 4603 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(data)) 4604 for record in Iter(self): 4605 if data == record: 4606 return True 4607 return False
4608
4609 - def __enter__(self):
4610 self._previous_status.append(self._meta.status) 4611 self.open() 4612 return self
4613
4614 - def __exit__(self, *exc_info):
4615 if self._previous_status.pop() == CLOSED: 4616 self.close()
4617
4618 - def __getattr__(self, name):
4619 if name in ( 4620 'binary_types', 4621 'character_types', 4622 'currency_types', 4623 'date_types', 4624 'datetime_types', 4625 'decimal_types', 4626 'fixed_types', 4627 'logical_types', 4628 'memo_types', 4629 'numeric_types', 4630 'variable_types', 4631 ): 4632 return getattr(self, '_'+name) 4633 if name in ('_table', ): 4634 if self._meta.location == ON_DISK: 4635 self._table = self._Table(len(self), self._meta) 4636 else: 4637 self._table = [] 4638 return object.__getattribute__(self, name)
4639
4640 - def __getitem__(self, value):
4641 if isinstance(value, baseinteger): 4642 if not -self._meta.header.record_count <= value < self._meta.header.record_count: 4643 raise NotFoundError("Record %d is not in table %s." % (value, self.filename)) 4644 return self._table[value] 4645 elif type(value) == slice: 4646 sequence = List(desc='%s --> %s' % (self.filename, value)) 4647 for index in range(len(self))[value]: 4648 record = self._table[index] 4649 sequence.append(record) 4650 return sequence 4651 else: 4652 raise TypeError('type <%s> not valid for indexing' % type(value))
4653
4654 - def __init__(self, filename, field_specs=None, memo_size=128, ignore_memos=False, 4655 codepage=None, default_data_types=None, field_data_types=None, # e.g. 'name':str, 'age':float 4656 dbf_type=None, on_disk=True, 4657 ):
4658 """ 4659 open/create dbf file 4660 filename should include path if needed 4661 field_specs can be either a ;-delimited string or a list of strings 4662 memo_size is always 512 for db3 memos 4663 ignore_memos is useful if the memo file is missing or corrupt 4664 read_only will load records into memory, then close the disk file 4665 keep_memos will also load any memo fields into memory 4666 meta_only will ignore all records, keeping only basic table information 4667 codepage will override whatever is set in the table itself 4668 """ 4669 4670 if not on_disk: 4671 if field_specs is None: 4672 raise DbfError("field list must be specified for memory tables") 4673 self._indexen = self._Indexen() 4674 self._meta = meta = self._MetaData() 4675 meta.max_fields = self._max_fields 4676 meta.max_records = self._max_records 4677 meta.table = weakref.ref(self) 4678 meta.filename = filename 4679 meta.fields = [] 4680 meta.user_fields = [] 4681 meta.user_field_count = 0 4682 meta.fieldtypes = fieldtypes = self._field_types 4683 meta.fixed_types = self._fixed_types 4684 meta.variable_types = self._variable_types 4685 meta.character_types = self._character_types 4686 meta.currency_types = self._currency_types 4687 meta.decimal_types = self._decimal_types 4688 meta.numeric_types = self._numeric_types 4689 meta.memo_types = self._memo_types 4690 meta.ignorememos = meta.original_ignorememos = ignore_memos 4691 meta.memo_size = memo_size 4692 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 4693 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 4694 meta.header = header = self._TableHeader(self._dbfTableHeader, self._pack_date, self._unpack_date) 4695 header.extra = self._dbfTableHeaderExtra 4696 if default_data_types is None: 4697 default_data_types = dict() 4698 elif default_data_types == 'enhanced': 4699 default_data_types = { 4700 'C' : dbf.Char, 4701 'L' : dbf.Logical, 4702 'D' : dbf.Date, 4703 'T' : dbf.DateTime, 4704 } 4705 self._meta._default_data_types = default_data_types 4706 if field_data_types is None: 4707 field_data_types = dict() 4708 self._meta._field_data_types = field_data_types 4709 for field, types in default_data_types.items(): 4710 field = FieldType(field) 4711 if not isinstance(types, tuple): 4712 types = (types, ) 4713 for result_name, result_type in ezip(('Class', 'Empty', 'Null'), types): 4714 fieldtypes[field][result_name] = result_type 4715 if not on_disk: 4716 self._table = [] 4717 meta.location = IN_MEMORY 4718 meta.memoname = filename 4719 meta.header.data 4720 else: 4721 base, ext = os.path.splitext(filename) 4722 if ext.lower() != '.dbf': 4723 meta.filename = filename + '.dbf' 4724 searchname = filename + '.[Db][Bb][Ff]' 4725 else: 4726 meta.filename = filename 4727 searchname = filename 4728 matches = glob(searchname) 4729 if len(matches) == 1: 4730 meta.filename = matches[0] 4731 elif matches: 4732 raise DbfError("please specify exactly which of %r you want" % (matches, )) 4733 case = [('l','u')[c.isupper()] for c in meta.filename[-4:]] 4734 if case == ['l','l','l','l']: 4735 meta.memoname = base + self._memoext.lower() 4736 elif case == ['l','u','u','u']: 4737 meta.memoname = base + self._memoext.upper() 4738 else: 4739 meta.memoname = base + ''.join([c.lower() if case[i] == 'l' else c.upper() for i, c in enumerate(self._memoext)]) 4740 meta.location = ON_DISK 4741 if codepage is not None: 4742 header.codepage(codepage) 4743 cp, sd, ld = _codepage_lookup(codepage) 4744 self._meta.decoder = codecs.getdecoder(sd) 4745 self._meta.encoder = codecs.getencoder(sd) 4746 if field_specs: 4747 if meta.location == ON_DISK: 4748 meta.dfd = open(meta.filename, 'w+b') 4749 meta.newmemofile = True 4750 if codepage is None: 4751 header.codepage(default_codepage) 4752 cp, sd, ld = _codepage_lookup(header.codepage()) 4753 meta.decoder = codecs.getdecoder(sd) 4754 meta.encoder = codecs.getencoder(sd) 4755 meta.status = READ_WRITE 4756 self.add_fields(field_specs) 4757 else: 4758 try: 4759 dfd = meta.dfd = open(meta.filename, 'r+b') 4760 except IOError: 4761 e= sys.exc_info()[1] 4762 raise DbfError(str(e)) 4763 dfd.seek(0) 4764 meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) 4765 if not header.version in self._supported_tables: 4766 dfd.close() 4767 dfd = None 4768 raise DbfError( 4769 "%s does not support %s [%x]" % 4770 (self._version, 4771 version_map.get(header.version, 'Unknown: %s' % header.version), 4772 header.version)) 4773 if codepage is None: 4774 cp, sd, ld = _codepage_lookup(header.codepage()) 4775 self._meta.decoder = codecs.getdecoder(sd) 4776 self._meta.encoder = codecs.getencoder(sd) 4777 fieldblock = dfd.read(header.start - 32) 4778 for i in range(len(fieldblock) // 32 + 1): 4779 fieldend = i * 32 4780 if fieldblock[fieldend] == CR: 4781 break 4782 else: 4783 raise BadDataError("corrupt field structure in header") 4784 if len(fieldblock[:fieldend]) % 32 != 0: 4785 raise BadDataError("corrupt field structure in header") 4786 old_length = header.data[10:12] 4787 header.fields = fieldblock[:fieldend] 4788 header.data = header.data[:10] + old_length + header.data[12:] # restore original for testing 4789 header.extra = fieldblock[fieldend + 1:] # skip trailing \r 4790 self._initialize_fields() 4791 self._check_memo_integrity() 4792 dfd.seek(0) 4793 4794 for field in meta.fields: 4795 field_type = meta[field][TYPE] 4796 default_field_type = ( 4797 fieldtypes[field_type]['Class'], 4798 fieldtypes[field_type]['Empty'], 4799 ) 4800 specific_field_type = field_data_types.get(field) 4801 if specific_field_type is not None and not isinstance(specific_field_type, tuple): 4802 specific_field_type = (specific_field_type, ) 4803 classes = [] 4804 for result_name, result_type in ezip( 4805 ('class', 'empty'), 4806 specific_field_type or default_field_type, 4807 ): 4808 classes.append(result_type) 4809 meta[field] = meta[field][:-2] + tuple(classes) 4810 meta.status = READ_ONLY 4811 self.close()
4812
4813 - def __iter__(self):
4814 """ 4815 iterates over the table's records 4816 """ 4817 return Iter(self)
4818
4819 - def __len__(self):
4820 """ 4821 returns number of records in table 4822 """ 4823 return self._meta.header.record_count
4824
4825 - def __new__(cls, filename, field_specs=None, memo_size=128, ignore_memos=False, 4826 codepage=None, default_data_types=None, field_data_types=None, # e.g. 'name':str, 'age':float 4827 dbf_type=None, on_disk=True, 4828 ):
4829 if dbf_type is None and isinstance(filename, Table): 4830 return filename 4831 if field_specs and dbf_type is None: 4832 dbf_type = default_type 4833 if dbf_type is not None: 4834 dbf_type = dbf_type.lower() 4835 table = table_types.get(dbf_type) 4836 if table is None: 4837 raise DbfError("Unknown table type: %s" % dbf_type) 4838 return object.__new__(table) 4839 else: 4840 base, ext = os.path.splitext(filename) 4841 if ext.lower() != '.dbf': 4842 filename = filename + '.dbf' 4843 possibles = guess_table_type(filename) 4844 if len(possibles) == 1: 4845 return object.__new__(possibles[0][2]) 4846 else: 4847 for type, desc, cls in possibles: 4848 if type == default_type: 4849 return object.__new__(cls) 4850 else: 4851 types = ', '.join(["%s" % item[1] for item in possibles]) 4852 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' 4853 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
4854
4855 - def __bool__(self):
4856 """ 4857 True if table has any records 4858 """ 4859 return self._meta.header.record_count != 0
4860
4861 - def __repr__(self):
4862 return __name__ + ".Table(%r, status=%r)" % (self._meta.filename, self._meta.status)
4863
4864 - def __str__(self):
4865 status = self._meta.status 4866 version = version_map.get(self._meta.header.version) 4867 if version is not None: 4868 version = self._version 4869 else: 4870 version = 'unknown - ' + hex(self._meta.header.version) 4871 str = """ 4872 Table: %s 4873 Type: %s 4874 Codepage: %s 4875 Status: %s 4876 Last updated: %s 4877 Record count: %d 4878 Field count: %d 4879 Record length: %d """ % (self.filename, version 4880 , self.codepage, status, 4881 self.last_update, len(self), self.field_count, self.record_length) 4882 str += "\n --Fields--\n" 4883 for i in range(len(self.field_names)): 4884 str += "%11d) %s\n" % (i, self._field_layout(i)) 4885 return str
4886 4887 @property
4888 - def codepage(self):
4889 """ 4890 code page used for text translation 4891 """ 4892 return CodePage(code_pages[self._meta.header.codepage()][0])
4893 4894 @codepage.setter
4895 - def codepage(self, codepage):
4896 if not isinstance(codepage, CodePage): 4897 raise TypeError("codepage should be a CodePage, not a %r" % type(codepage)) 4898 meta = self._meta 4899 if meta.status != READ_WRITE: 4900 raise DbfError('%s not in read/write mode, unable to change codepage' % meta.filename) 4901 meta.header.codepage(codepage.code) 4902 meta.decoder = codecs.getdecoder(codepage.name) 4903 meta.encoder = codecs.getencoder(codepage.name) 4904 self._update_disk(headeronly=True)
4905 4906 @property
4907 - def field_count(self):
4908 """ 4909 the number of user fields in the table 4910 """ 4911 return self._meta.user_field_count
4912 4913 @property
4914 - def field_names(self):
4915 """ 4916 a list of the user fields in the table 4917 """ 4918 return self._meta.user_fields[:]
4919 4920 @property
4921 - def filename(self):
4922 """ 4923 table's file name, including path (if specified on open) 4924 """ 4925 return self._meta.filename
4926 4927 @property
4928 - def last_update(self):
4929 """ 4930 date of last update 4931 """ 4932 return self._meta.header.update
4933 4934 @property
4935 - def memoname(self):
4936 """ 4937 table's memo name (if path included in filename on open) 4938 """ 4939 return self._meta.memoname
4940 4941 @property
4942 - def record_length(self):
4943 """ 4944 number of bytes in a record (including deleted flag and null field size 4945 """ 4946 return self._meta.header.record_length
4947 4948 @property
4949 - def supported_tables(self):
4950 """ 4951 allowable table types 4952 """ 4953 return self._supported_tables
4954 4955 @property
4956 - def status(self):
4957 """ 4958 CLOSED, READ_ONLY, or READ_WRITE 4959 """ 4960 return self._meta.status
4961 4962 @property
4963 - def version(self):
4964 """ 4965 returns the dbf type of the table 4966 """ 4967 return self._version
4968
4969 - def add_fields(self, field_specs):
4970 """ 4971 adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 4972 backup table is created with _backup appended to name 4973 then zaps table, recreates current structure, and copies records back from the backup 4974 """ 4975 meta = self._meta 4976 if meta.status != READ_WRITE: 4977 raise DbfError('%s not in read/write mode, unable to add fields (%s)' % (meta.filename, meta.status)) 4978 header = meta.header 4979 fields = self.structure() + self._list_fields(field_specs, sep=';') 4980 if (len(fields) + ('_nullflags' in meta)) > meta.max_fields: 4981 raise DbfError( 4982 "Adding %d more field%s would exceed the limit of %d" 4983 % (len(fields), ('','s')[len(fields)==1], meta.max_fields) 4984 ) 4985 old_table = None 4986 if self: 4987 old_table = self.create_backup() 4988 self.zap() 4989 if meta.mfd is not None and not meta.ignorememos: 4990 meta.mfd.close() 4991 meta.mfd = None 4992 meta.memo = None 4993 if not meta.ignorememos: 4994 meta.newmemofile = True 4995 offset = 1 4996 for name in meta.fields: 4997 del meta[name] 4998 meta.fields[:] = [] 4999 5000 meta.blankrecord = None 5001 for field in fields: 5002 field = field.lower() 5003 pieces = field.split() 5004 name = pieces.pop(0) 5005 if '(' in pieces[0]: 5006 loc = pieces[0].index('(') 5007 pieces.insert(0, pieces[0][:loc]) 5008 pieces[1] = pieces[1][loc:] 5009 format = FieldType(pieces.pop(0)) 5010 if pieces and '(' in pieces[0]: 5011 for i, p in enumerate(pieces): 5012 if ')' in p: 5013 pieces[0:i+1] = [''.join(pieces[0:i+1])] 5014 break 5015 if name[0] == '_' or name[0].isdigit() or not name.replace('_', '').isalnum(): 5016 raise FieldSpecError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name) 5017 # name = unicode(name) 5018 if name in meta.fields: 5019 raise DbfError("Field '%s' already exists" % name) 5020 field_type = format 5021 if len(name) > 10: 5022 raise FieldSpecError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 5023 if not field_type in meta.fieldtypes.keys(): 5024 raise FieldSpecError("Unknown field type: %s" % field_type) 5025 init = self._meta.fieldtypes[field_type]['Init'] 5026 flags = self._meta.fieldtypes[field_type]['flags'] 5027 try: 5028 length, decimals, flags = init(pieces, flags) 5029 except FieldSpecError: 5030 exc = sys.exc_info()[1] 5031 raise FieldSpecError(exc.message + ' (%s:%s)' % (meta.filename, name)) 5032 start = offset 5033 end = offset + length 5034 offset = end 5035 meta.fields.append(name) 5036 cls = meta.fieldtypes[field_type]['Class'] 5037 empty = meta.fieldtypes[field_type]['Empty'] 5038 meta[name] = ( 5039 field_type, 5040 start, 5041 length, 5042 end, 5043 decimals, 5044 flags, 5045 cls, 5046 empty, 5047 ) 5048 self._build_header_fields() 5049 self._update_disk() 5050 if old_table is not None: 5051 old_table.open() 5052 for record in old_table: 5053 self.append(scatter(record)) 5054 old_table.close()
5055
5056 - def allow_nulls(self, fields):
5057 """ 5058 set fields to allow null values 5059 """ 5060 meta = self._meta 5061 if meta.status != READ_WRITE: 5062 raise DbfError('%s not in read/write mode, unable to change field types' % meta.filename) 5063 elif self._versionabbr in ('db3', ): 5064 raise DbfError("Nullable fields are not allowed in %s tables" % self._version) 5065 header = meta.header 5066 fields = self._list_fields(fields) 5067 missing = set(fields) - set(self.field_names) 5068 if missing: 5069 raise FieldMissingError(', '.join(missing)) 5070 if len(self.field_names) + 1 > meta.max_fields: 5071 raise DbfError( 5072 "Adding the hidden _nullflags field would exceed the limit of %d fields for this table" 5073 % (meta.max_fields, ) 5074 ) 5075 old_table = None 5076 if self: 5077 old_table = self.create_backup() 5078 self.zap() 5079 if meta.mfd is not None and not meta.ignorememos: 5080 meta.mfd.close() 5081 meta.mfd = None 5082 meta.memo = None 5083 if not meta.ignorememos: 5084 meta.newmemofile = True 5085 for field in fields: 5086 specs = list(meta[field]) 5087 specs[FLAGS] |= NULLABLE 5088 meta[field] = tuple(specs) 5089 meta.blankrecord = None 5090 self._build_header_fields() 5091 self._update_disk() 5092 if old_table is not None: 5093 old_table.open() 5094 for record in old_table: 5095 self.append(scatter(record)) 5096 old_table.close()
5097
5098 - def append(self, data=b'', drop=False, multiple=1):
5099 """ 5100 adds <multiple> blank records, and fills fields with dict/tuple values if present 5101 """ 5102 meta = self._meta 5103 if meta.status != READ_WRITE: 5104 raise DbfError('%s not in read/write mode, unable to append records' % meta.filename) 5105 if not self.field_count: 5106 raise DbfError("No fields defined, cannot append") 5107 empty_table = len(self) == 0 5108 dictdata = False 5109 tupledata = False 5110 header = meta.header 5111 kamikaze = b'' 5112 if header.record_count == meta.max_records: 5113 raise DbfError("table %r is full; unable to add any more records" % self) 5114 if isinstance(data, (Record, RecordTemplate)): 5115 if data._meta.record_sig[0] == self._meta.record_sig[0]: 5116 kamikaze = data._data 5117 else: 5118 if isinstance(data, dict): 5119 dictdata = data 5120 data = b'' 5121 elif isinstance(data, tuple): 5122 if len(data) > self.field_count: 5123 raise DbfError("incoming data has too many values") 5124 tupledata = data 5125 data = b'' 5126 elif data: 5127 raise TypeError("data to append must be a tuple, dict, record, or template; not a %r" % type(data)) 5128 newrecord = Record(recnum=header.record_count, layout=meta, kamikaze=kamikaze) 5129 if kamikaze and meta.memofields: 5130 newrecord._start_flux() 5131 for field in meta.memofields: 5132 newrecord[field] = data[field] 5133 newrecord._commit_flux() 5134 5135 self._table.append(newrecord) 5136 header.record_count += 1 5137 if not kamikaze: 5138 try: 5139 if dictdata: 5140 gather(newrecord, dictdata, drop=drop) 5141 elif tupledata: 5142 newrecord._start_flux() 5143 for index, item in enumerate(tupledata): 5144 newrecord[index] = item 5145 newrecord._commit_flux() 5146 elif data: 5147 newrecord._start_flux() 5148 data_fields = field_names(data) 5149 my_fields = self.field_names 5150 for field in data_fields: 5151 if field not in my_fields: 5152 if not drop: 5153 raise DbfError("field %r not in table %r" % (field, self)) 5154 else: 5155 newrecord[field] = data[field] 5156 newrecord._commit_flux() 5157 except Exception: 5158 self._table.pop() # discard failed record 5159 header.record_count = header.record_count - 1 5160 self._update_disk() 5161 raise 5162 multiple -= 1 5163 if multiple: 5164 data = newrecord._data 5165 single = header.record_count 5166 total = single + multiple 5167 while single < total: 5168 multi_record = Record(single, meta, kamikaze=data) 5169 multi_record._start_flux() 5170 self._table.append(multi_record) 5171 for field in meta.memofields: 5172 multi_record[field] = newrecord[field] 5173 single += 1 5174 multi_record._commit_flux() 5175 header.record_count = total # += multiple 5176 newrecord = multi_record 5177 self._update_disk(headeronly=True)
5178
5179 - def close(self):
5180 """ 5181 closes disk files, flushing record data to disk 5182 ensures table data is available if keep_table 5183 ensures memo data is available if keep_memos 5184 """ 5185 if self._meta.location == ON_DISK and self._meta.status != CLOSED: 5186 self._table.flush() 5187 if self._meta.mfd is not None: 5188 self._meta.mfd.close() 5189 self._meta.mfd = None 5190 self._meta.dfd.close() 5191 self._meta.dfd = None 5192 self._meta.status = CLOSED
5193
5194 - def create_backup(self, new_name=None, on_disk=None):
5195 """ 5196 creates a backup table 5197 """ 5198 meta = self._meta 5199 already_open = meta.status != CLOSED 5200 if not already_open: 5201 self.open() 5202 if on_disk is None: 5203 on_disk = meta.location 5204 if not on_disk and new_name is None: 5205 new_name = self.filename + '_backup' 5206 if new_name is None: 5207 upper = self.filename.isupper() 5208 directory, filename = os.path.split(self.filename) 5209 name, ext = os.path.splitext(filename) 5210 extra = ('_backup', '_BACKUP')[upper] 5211 new_name = os.path.join(temp_dir or directory, name + extra + ext) 5212 bkup = Table(new_name, self.structure(), codepage=self.codepage.name, dbf_type=self._versionabbr, on_disk=on_disk) 5213 bkup.open() 5214 for record in self: 5215 bkup.append(record) 5216 bkup.close() 5217 self.backup = new_name 5218 if not already_open: 5219 self.close() 5220 return bkup
5221
5222 - def create_index(self, key):
5223 """ 5224 creates an in-memory index using the function key 5225 """ 5226 meta = self._meta 5227 if meta.status == CLOSED: 5228 raise DbfError('%s is closed' % meta.filename) 5229 return Index(self, key)
5230
5231 - def create_template(self, record=None, defaults=None):
5232 """ 5233 returns a record template that can be used like a record 5234 """ 5235 return RecordTemplate(self._meta, original_record=record, defaults=defaults)
5236
5237 - def delete_fields(self, doomed):
5238 """ 5239 removes field(s) from the table 5240 creates backup files with _backup appended to the file name, 5241 then modifies current structure 5242 """ 5243 meta = self._meta 5244 if meta.status != READ_WRITE: 5245 raise DbfError('%s not in read/write mode, unable to delete fields' % meta.filename) 5246 doomed = self._list_fields(doomed) 5247 header = meta.header 5248 for victim in doomed: 5249 if victim not in meta.user_fields: 5250 raise DbfError("field %s not in table -- delete aborted" % victim) 5251 old_table = None 5252 if self: 5253 old_table = self.create_backup() 5254 self.zap() 5255 if meta.mfd is not None and not meta.ignorememos: 5256 meta.mfd.close() 5257 meta.mfd = None 5258 meta.memo = None 5259 if not meta.ignorememos: 5260 meta.newmemofile = True 5261 if '_nullflags' in meta.fields: 5262 doomed.append('_nullflags') 5263 for victim in doomed: 5264 layout = meta[victim] 5265 meta.fields.pop(meta.fields.index(victim)) 5266 start = layout[START] 5267 end = layout[END] 5268 for field in meta.fields: 5269 if meta[field][START] == end: 5270 specs = list(meta[field]) 5271 end = specs[END] #self._meta[field][END] 5272 specs[START] = start #self._meta[field][START] = start 5273 specs[END] = start + specs[LENGTH] #self._meta[field][END] = start + self._meta[field][LENGTH] 5274 start = specs[END] #self._meta[field][END] 5275 meta[field] = tuple(specs) 5276 self._build_header_fields() 5277 self._update_disk() 5278 for name in list(meta): 5279 if name not in meta.fields: 5280 del meta[name] 5281 if old_table is not None: 5282 old_table.open() 5283 for record in old_table: 5284 self.append(scatter(record), drop=True) 5285 old_table.close()
5286
5287 - def disallow_nulls(self, fields):
5288 """ 5289 set fields to not allow null values 5290 """ 5291 meta = self._meta 5292 if meta.status != READ_WRITE: 5293 raise DbfError('%s not in read/write mode, unable to change field types' % meta.filename) 5294 fields = self._list_fields(fields) 5295 missing = set(fields) - set(self.field_names) 5296 if missing: 5297 raise FieldMissingError(', '.join(missing)) 5298 old_table = None 5299 if self: 5300 old_table = self.create_backup() 5301 self.zap() 5302 if meta.mfd is not None and not meta.ignorememos: 5303 meta.mfd.close() 5304 meta.mfd = None 5305 meta.memo = None 5306 if not meta.ignorememos: 5307 meta.newmemofile = True 5308 for field in fields: 5309 specs = list(meta[field]) 5310 specs[FLAGS] &= 0xff ^ NULLABLE 5311 meta[field] = tuple(specs) 5312 meta.blankrecord = None 5313 self._build_header_fields() 5314 self._update_disk() 5315 if old_table is not None: 5316 old_table.open() 5317 for record in old_table: 5318 self.append(scatter(record)) 5319 old_table.close()
5320
5321 - def field_info(self, field):
5322 """ 5323 returns (field type, size, dec, class) of field 5324 """ 5325 if field in self.field_names: 5326 field = self._meta[field] 5327 return FieldInfo(field[TYPE], field[LENGTH], field[DECIMALS], field[CLASS]) 5328 raise FieldMissingError("%s is not a field in %s" % (field, self.filename))
5329
5330 - def index(self, record, start=None, stop=None):
5331 """ 5332 returns the index of record between start and stop 5333 start and stop default to the first and last record 5334 """ 5335 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 5336 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 5337 meta = self._meta 5338 if meta.status == CLOSED: 5339 raise DbfError('%s is closed' % meta.filename) 5340 if start is None: 5341 start = 0 5342 if stop is None: 5343 stop = len(self) 5344 for i in range(start, stop): 5345 if record == (self[i]): 5346 return i 5347 else: 5348 raise NotFoundError("dbf.Table.index(x): x not in table", data=record)
5349
5350 - def new(self, filename, field_specs=None, memo_size=None, ignore_memos=None, codepage=None, default_data_types=None, field_data_types=None, on_disk=True):
5351 """ 5352 returns a new table of the same type 5353 """ 5354 if field_specs is None: 5355 field_specs = self.structure() 5356 if on_disk: 5357 path, name = os.path.split(filename) 5358 if path == "": 5359 filename = os.path.join(os.path.split(self.filename)[0], filename) 5360 elif name == "": 5361 filename = os.path.join(path, os.path.split(self.filename)[1]) 5362 if memo_size is None: 5363 memo_size = self._meta.memo_size 5364 if ignore_memos is None: 5365 ignore_memos = self._meta.ignorememos 5366 if codepage is None: 5367 codepage = self._meta.header.codepage()#[0] 5368 if default_data_types is None: 5369 default_data_types = self._meta._default_data_types 5370 if field_data_types is None: 5371 field_data_types = self._meta._field_data_types 5372 return Table(filename, field_specs, memo_size, ignore_memos, codepage, default_data_types, field_data_types, dbf_type=self._versionabbr, on_disk=on_disk)
5373
5374 - def nullable_field(self, field):
5375 """ 5376 returns True if field allows Nulls 5377 """ 5378 if field not in self.field_names: 5379 raise MissingField(field) 5380 return bool(self._meta[field][FLAGS] & NULLABLE)
5381
5382 - def open(self, mode=READ_WRITE):
5383 """ 5384 (re)opens disk table, (re)initializes data structures 5385 """ 5386 if mode not in (READ_WRITE, READ_ONLY): 5387 raise DbfError("mode for open must be 'read-write' or 'read-only', not %r" % mode) 5388 meta = self._meta 5389 if meta.status == mode: 5390 return self # no-op 5391 meta.status = mode 5392 if meta.location == IN_MEMORY: 5393 return self 5394 if '_table' in dir(self): 5395 del self._table 5396 dfd = meta.dfd = open(meta.filename, 'r+b') 5397 dfd.seek(0) 5398 header = meta.header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) 5399 if not header.version in self._supported_tables: 5400 dfd.close() 5401 dfd = None 5402 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(header.version, 'Unknown: %s' % header.version), header.version)) 5403 fieldblock = dfd.read(header.start - 32) 5404 for i in range(len(fieldblock) // 32 + 1): 5405 fieldend = i * 32 5406 if fieldblock[fieldend] == CR: 5407 break 5408 else: 5409 raise BadDataError("corrupt field structure in header") 5410 if len(fieldblock[:fieldend]) % 32 != 0: 5411 raise BadDataError("corrupt field structure in header") 5412 header.fields = fieldblock[:fieldend] 5413 header.extra = fieldblock[fieldend + 1:] # skip trailing \r 5414 self._meta.ignorememos = self._meta.original_ignorememos 5415 self._initialize_fields() 5416 self._check_memo_integrity() 5417 self._index = -1 5418 dfd.seek(0) 5419 return self
5420
5421 - def pack(self):
5422 """ 5423 physically removes all deleted records 5424 """ 5425 meta = self._meta 5426 if meta.status != READ_WRITE: 5427 raise DbfError('%s not in read/write mode, unable to pack records' % meta.filename) 5428 for dbfindex in self._indexen: 5429 dbfindex._clear() 5430 newtable = [] 5431 index = 0 5432 for record in self._table: 5433 if is_deleted(record): 5434 record._recnum = -1 5435 else: 5436 record._recnum = index 5437 newtable.append(record) 5438 index += 1 5439 if meta.location == ON_DISK: 5440 self._table.clear() 5441 else: 5442 self._table[:] = [] 5443 for record in newtable: 5444 self._table.append(record) 5445 self._pack_count += 1 5446 self._meta.header.record_count = index 5447 self._index = -1 5448 self._update_disk() 5449 self.reindex()
5450
5451 - def query(self, criteria):
5452 """ 5453 criteria is a string that will be converted into a function that returns 5454 a List of all matching records 5455 """ 5456 meta = self._meta 5457 if meta.status == CLOSED: 5458 raise DbfError('%s is closed' % meta.filename) 5459 return pql(self, criteria)
5460
5461 - def reindex(self):
5462 """ 5463 reprocess all indices for this table 5464 """ 5465 meta = self._meta 5466 if meta.status == CLOSED: 5467 raise DbfError('%s is closed' % meta.filename) 5468 for dbfindex in self._indexen: 5469 dbfindex._reindex()
5470
5471 - def rename_field(self, oldname, newname):
5472 """ 5473 renames an existing field 5474 """ 5475 meta = self._meta 5476 if meta.status != READ_WRITE: 5477 raise DbfError('%s not in read/write mode, unable to change field names' % meta.filename) 5478 if self: 5479 self.create_backup() 5480 if not oldname in self._meta.user_fields: 5481 raise FieldMissingError("field --%s-- does not exist -- cannot rename it." % oldname) 5482 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_', '').isalnum(): 5483 raise FieldSpecError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 5484 newname = newname.lower() 5485 if newname in self._meta.fields: 5486 raise DbfError("field --%s-- already exists" % newname) 5487 if len(newname) > 10: 5488 raise FieldSpecError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 5489 self._meta[newname] = self._meta[oldname] 5490 self._meta.fields[self._meta.fields.index(oldname)] = newname 5491 self._build_header_fields() 5492 self._update_disk(headeronly=True)
5493
5494 - def resize_field(self, chosen, new_size):
5495 """ 5496 resizes field (C only at this time) 5497 creates backup file, then modifies current structure 5498 """ 5499 meta = self._meta 5500 if meta.status != READ_WRITE: 5501 raise DbfError('%s not in read/write mode, unable to change field size' % meta.filename) 5502 if not 0 < new_size < 256: 5503 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)") 5504 chosen = self._list_fields(chosen) 5505 for candidate in chosen: 5506 if candidate not in self._meta.user_fields: 5507 raise DbfError("field %s not in table -- resize aborted" % candidate) 5508 elif self.field_info(candidate).field_type != FieldType.CHAR: 5509 raise DbfError("field %s is not Character -- resize aborted" % candidate) 5510 if self: 5511 old_table = self.create_backup() 5512 self.zap() 5513 if meta.mfd is not None and not meta.ignorememos: 5514 meta.mfd.close() 5515 meta.mfd = None 5516 meta.memo = None 5517 if not meta.ignorememos: 5518 meta.newmemofile = True 5519 struct = self.structure() 5520 meta.user_fields[:] = [] 5521 new_struct = [] 5522 for field_spec in struct: 5523 name, spec = field_spec.split(' ', 1) 5524 if name in chosen: 5525 spec = "C(%d)" % new_size 5526 new_struct.append(' '.join([name, spec])) 5527 self.add_fields(';'.join(new_struct)) 5528 if old_table is not None: 5529 old_table.open() 5530 for record in old_table: 5531 self.append(scatter(record), drop=True) 5532 old_table.close()
5533
5534 - def structure(self, fields=None):
5535 """ 5536 return field specification list suitable for creating same table layout 5537 fields should be a list of fields or None for all fields in table 5538 """ 5539 field_specs = [] 5540 fields = self._list_fields(fields) 5541 try: 5542 for name in fields: 5543 field_specs.append(self._field_layout(self.field_names.index(name))) 5544 except ValueError: 5545 raise DbfError("field %s does not exist" % name) 5546 return field_specs
5547
5548 - def zap(self):
5549 """ 5550 removes all records from table -- this cannot be undone! 5551 """ 5552 meta = self._meta 5553 if meta.status != READ_WRITE: 5554 raise DbfError('%s not in read/write mode, unable to zap table' % meta.filename) 5555 if meta.location == IN_MEMORY: 5556 self._table[:] = [] 5557 else: 5558 self._table.clear() 5559 if meta.memo: 5560 meta.memo._zap() 5561 meta.header.record_count = 0 5562 self._index = -1 5563 self._update_disk()
5564
5565 5566 -class Db3Table(Table):
5567 """ 5568 Provides an interface for working with dBase III tables. 5569 """ 5570 5571 _version = 'dBase III Plus' 5572 _versionabbr = 'db3' 5573 5574 @MutableDefault
5575 - def _field_types():
5576 return { 5577 CHAR: { 5578 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_character, 5579 'Class':str, 'Empty':str, 'flags':tuple(), 5580 }, 5581 DATE: { 5582 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5583 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 5584 }, 5585 NUMERIC: { 5586 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5587 'Class':'default', 'Empty':none, 'flags':tuple(), 5588 }, 5589 LOGICAL: { 5590 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5591 'Class':bool, 'Empty':none, 'flags':tuple(), 5592 }, 5593 MEMO: { 5594 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5595 'Class':str, 'Empty':str, 'flags':tuple(), 5596 }, 5597 NUMERIC: { 5598 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5599 'Class':'default', 'Empty':none, 'flags':tuple(), 5600 } }
5601 5602 _memoext = '.dbt' 5603 _memoClass = _Db3Memo 5604 _yesMemoMask = 0x80 5605 _noMemoMask = 0x7f 5606 _binary_types = () 5607 _character_types = (CHAR, MEMO) 5608 _currency_types = tuple() 5609 _date_types = (DATE, ) 5610 _datetime_types = tuple() 5611 _decimal_types = (NUMERIC, FLOAT) 5612 _fixed_types = (DATE, LOGICAL, MEMO) 5613 _logical_types = (LOGICAL, ) 5614 _memo_types = (MEMO, ) 5615 _numeric_types = (NUMERIC, FLOAT) 5616 _variable_types = (CHAR, NUMERIC) 5617 _dbfTableHeader = array('B', [0] * 32) 5618 _dbfTableHeader[0] = 3 # version - dBase III w/o memo's 5619 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 5620 _dbfTableHeader[10] = 1 # record length -- one for delete flag 5621 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 5622 _dbfTableHeader = _dbfTableHeader.tobytes() 5623 _dbfTableHeaderExtra = b'' 5624 _supported_tables = (0x03, 0x83) 5625
5626 - def _check_memo_integrity(self):
5627 """ 5628 dBase III and Clipper 5629 """ 5630 if not self._meta.ignorememos: 5631 memo_fields = False 5632 for field in self._meta.fields: 5633 if self._meta[field][TYPE] in self._memo_types: 5634 memo_fields = True 5635 break 5636 if memo_fields and self._meta.header.version != 0x83: 5637 self._meta.dfd.close() 5638 self._meta.dfd = None 5639 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos") 5640 elif memo_fields and not os.path.exists(self._meta.memoname): 5641 self._meta.dfd.close() 5642 self._meta.dfd = None 5643 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 5644 if memo_fields: 5645 try: 5646 self._meta.memo = self._memoClass(self._meta) 5647 except Exception: 5648 exc = sys.exc_info()[1] 5649 self._meta.dfd.close() 5650 self._meta.dfd = None 5651 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1])
5652
5653 - def _initialize_fields(self):
5654 """ 5655 builds the FieldList of names, types, and descriptions 5656 """ 5657 old_fields = defaultdict(dict) 5658 meta = self._meta 5659 for name in meta.fields: 5660 old_fields[name]['type'] = meta[name][TYPE] 5661 old_fields[name]['empty'] = meta[name][EMPTY] 5662 old_fields[name]['class'] = meta[name][CLASS] 5663 meta.fields[:] = [] 5664 offset = 1 5665 fieldsdef = meta.header.fields 5666 if len(fieldsdef) % 32 != 0: 5667 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 5668 if len(fieldsdef) // 32 != meta.header.field_count: 5669 raise BadDataError("Header shows %d fields, but field definition block has %d fields" % (meta.header.field_count, len(fieldsdef) // 32)) 5670 total_length = meta.header.record_length 5671 for i in range(meta.header.field_count): 5672 fieldblock = fieldsdef[i*32:(i+1)*32] 5673 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 5674 type = fieldblock[11] 5675 if not type in meta.fieldtypes: 5676 raise BadDataError("Unknown field type: %s" % type) 5677 start = offset 5678 length = fieldblock[16] 5679 offset += length 5680 end = start + length 5681 decimals = fieldblock[17] 5682 flags = fieldblock[18] 5683 if name in meta.fields: 5684 raise BadDataError('Duplicate field name found: %s' % name) 5685 meta.fields.append(name) 5686 if name in old_fields and old_fields[name]['type'] == type: 5687 cls = old_fields[name]['class'] 5688 empty = old_fields[name]['empty'] 5689 else: 5690 cls = meta.fieldtypes[type]['Class'] 5691 empty = meta.fieldtypes[type]['Empty'] 5692 meta[name] = ( 5693 type, 5694 start, 5695 length, 5696 end, 5697 decimals, 5698 flags, 5699 cls, 5700 empty, 5701 ) 5702 if offset != total_length: 5703 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 5704 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5705 meta.user_field_count = len(meta.user_fields) 5706 Record._create_blank_data(meta)
5707
5708 5709 -class ClpTable(Db3Table):
5710 """ 5711 Provides an interface for working with Clipper tables. 5712 """ 5713 5714 _version = 'Clipper 5' 5715 _versionabbr = 'clp' 5716 5717 @MutableDefault
5718 - def _field_types():
5719 return { 5720 CHAR: { 5721 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_clp_character, 5722 'Class':str, 'Empty':str, 'flags':tuple(), 5723 }, 5724 DATE: { 5725 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5726 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 5727 }, 5728 NUMERIC: { 5729 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5730 'Class':'default', 'Empty':none, 'flags':tuple(), 5731 }, 5732 LOGICAL: { 5733 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5734 'Class':bool, 'Empty':none, 'flags':tuple(), 5735 }, 5736 MEMO: { 5737 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5738 'Class':str, 'Empty':str, 'flags':tuple(), 5739 }, 5740 NUMERIC: { 5741 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5742 'Class':'default', 'Empty':none, 'flags':tuple(), 5743 } }
5744 5745 _memoext = '.dbt' 5746 _memoClass = _Db3Memo 5747 _yesMemoMask = 0x80 5748 _noMemoMask = 0x7f 5749 _binary_types = () 5750 _character_types = (CHAR, MEMO) 5751 _currency_types = tuple() 5752 _date_types = (DATE, ) 5753 _datetime_types = tuple() 5754 _decimal_types = (NUMERIC, FLOAT) 5755 _fixed_types = (DATE, LOGICAL, MEMO) 5756 _logical_types = (LOGICAL, ) 5757 _memo_types = (MEMO, ) 5758 _numeric_types = (NUMERIC, FLOAT) 5759 _variable_types = (CHAR, NUMERIC) 5760 _dbfTableHeader = array('B', [0] * 32) 5761 _dbfTableHeader[0] = 3 # version - dBase III w/o memo's 5762 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 5763 _dbfTableHeader[10] = 1 # record length -- one for delete flag 5764 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 5765 _dbfTableHeader = _dbfTableHeader.tobytes() 5766 _dbfTableHeaderExtra = b'' 5767 _supported_tables = (0x03, 0x83) 5768
5769 - class _TableHeader(Table._TableHeader):
5770 """ 5771 represents the data block that defines a tables type and layout 5772 """ 5773 5774 @property
5775 - def fields(self):
5776 "field block structure" 5777 fieldblock = self._data[32:] 5778 for i in range(len(fieldblock)//32+1): 5779 cr = i * 32 5780 if fieldblock[cr] == CR: 5781 break 5782 else: 5783 raise BadDataError("corrupt field structure") 5784 return fieldblock[:cr].tobytes()
5785 5786 @fields.setter
5787 - def fields(self, block):
5788 fieldblock = self._data[32:] 5789 for i in range(len(fieldblock)//32+1): 5790 cr = i * 32 5791 if fieldblock[cr] == CR: 5792 break 5793 else: 5794 raise BadDataError("corrupt field structure") 5795 cr += 32 # convert to indexing main structure 5796 fieldlen = len(block) 5797 if fieldlen % 32 != 0: 5798 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 5799 self._data[32:cr] = array('B', block) # fields 5800 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start 5801 fieldlen = fieldlen // 32 5802 recordlen = 1 # deleted flag 5803 for i in range(fieldlen): 5804 recordlen += block[i*32+16] 5805 if block[i*32+11] == CHAR: 5806 recordlen += block[i*32+17] * 256 5807 self._data[10:12] = array('B', pack_short_int(recordlen))
5808 5809
5810 - def _build_header_fields(self):
5811 """ 5812 constructs fieldblock for disk table 5813 """ 5814 fieldblock = array('B', b'') 5815 memo = False 5816 nulls = False 5817 meta = self._meta 5818 header = meta.header 5819 header.version = header.version & self._noMemoMask 5820 meta.fields = [f for f in meta.fields if f != '_nullflags'] 5821 total_length = 1 # delete flag 5822 for field in meta.fields: 5823 layout = meta[field] 5824 if meta.fields.count(field) > 1: 5825 raise BadDataError("corrupted field structure (noticed in _build_header_fields)") 5826 fielddef = array('B', [0] * 32) 5827 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0])) 5828 fielddef[11] = layout[TYPE] 5829 fielddef[12:16] = array('B', pack_long_int(layout[START])) 5830 total_length += layout[LENGTH] 5831 if layout[TYPE] == CHAR: # long character field 5832 fielddef[16] = layout[LENGTH] % 256 5833 fielddef[17] = layout[LENGTH] // 256 5834 else: 5835 fielddef[16] = layout[LENGTH] 5836 fielddef[17] = layout[DECIMALS] 5837 fielddef[18] = layout[FLAGS] 5838 fieldblock.extend(fielddef) 5839 if layout[TYPE] in meta.memo_types: 5840 memo = True 5841 if layout[FLAGS] & NULLABLE: 5842 nulls = True 5843 if memo: 5844 header.version = header.version | self._yesMemoMask 5845 if meta.memo is None: 5846 meta.memo = self._memoClass(meta) 5847 else: 5848 if os.path.exists(meta.memoname): 5849 if meta.mfd is not None: 5850 meta.mfd.close() 5851 5852 os.remove(meta.memoname) 5853 meta.memo = None 5854 if nulls: 5855 start = layout[START] + layout[LENGTH] 5856 length, one_more = divmod(len(meta.fields), 8) 5857 if one_more: 5858 length += 1 5859 fielddef = array('B', [0] * 32) 5860 fielddef[:11] = array('B', pack_str(b'_nullflags')) 5861 fielddef[11] = FieldType._NULLFLAG 5862 fielddef[12:16] = array('B', pack_long_int(start)) 5863 fielddef[16] = length 5864 fielddef[17] = 0 5865 fielddef[18] = BINARY | SYSTEM 5866 fieldblock.extend(fielddef) 5867 meta.fields.append('_nullflags') 5868 nullflags = ( 5869 _NULLFLAG, # type 5870 start, # start 5871 length, # length 5872 start + length, # end 5873 0, # decimals 5874 BINARY | SYSTEM, # flags 5875 none, # class 5876 none, # empty 5877 ) 5878 meta['_nullflags'] = nullflags 5879 header.fields = fieldblock.tobytes() 5880 header.record_length = total_length 5881 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5882 meta.user_field_count = len(meta.user_fields) 5883 Record._create_blank_data(meta)
5884
5885 - def _initialize_fields(self):
5886 """ 5887 builds the FieldList of names, types, and descriptions 5888 """ 5889 meta = self._meta 5890 old_fields = defaultdict(dict) 5891 for name in meta.fields: 5892 old_fields[name]['type'] = meta[name][TYPE] 5893 old_fields[name]['empty'] = meta[name][EMPTY] 5894 old_fields[name]['class'] = meta[name][CLASS] 5895 meta.fields[:] = [] 5896 offset = 1 5897 fieldsdef = meta.header.fields 5898 if len(fieldsdef) % 32 != 0: 5899 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 5900 if len(fieldsdef) // 32 != meta.header.field_count: 5901 raise BadDataError("Header shows %d fields, but field definition block has %d fields" 5902 (meta.header.field_count, len(fieldsdef) // 32)) 5903 total_length = meta.header.record_length 5904 for i in range(meta.header.field_count): 5905 fieldblock = fieldsdef[i*32:(i+1)*32] 5906 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 5907 type = fieldblock[11] 5908 if not type in meta.fieldtypes: 5909 raise BadDataError("Unknown field type: %s" % type) 5910 start = offset 5911 length = fieldblock[16] 5912 decimals = fieldblock[17] 5913 if type == CHAR: 5914 length += decimals * 256 5915 offset += length 5916 end = start + length 5917 flags = fieldblock[18] 5918 if name in meta.fields: 5919 raise BadDataError('Duplicate field name found: %s' % name) 5920 meta.fields.append(name) 5921 if name in old_fields and old_fields[name]['type'] == type: 5922 cls = old_fields[name]['class'] 5923 empty = old_fields[name]['empty'] 5924 else: 5925 cls = meta.fieldtypes[type]['Class'] 5926 empty = meta.fieldtypes[type]['Empty'] 5927 meta[name] = ( 5928 type, 5929 start, 5930 length, 5931 end, 5932 decimals, 5933 flags, 5934 cls, 5935 empty, 5936 ) 5937 if offset != total_length: 5938 raise BadDataError("Header shows record length of %d, but calculated record length is %d" 5939 (total_length, offset)) 5940 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5941 meta.user_field_count = len(meta.user_fields) 5942 Record._create_blank_data(meta)
5943
5944 5945 -class FpTable(Table):
5946 """ 5947 Provides an interface for working with FoxPro 2 tables 5948 """ 5949 5950 _version = 'Foxpro' 5951 _versionabbr = 'fp' 5952 5953 @MutableDefault
5954 - def _field_types():
5955 return { 5956 CHAR: { 5957 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character, 5958 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 5959 }, 5960 FLOAT: { 5961 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 5962 'Class':'default', 'Empty':none, 'flags':('null', ), 5963 }, 5964 NUMERIC: { 5965 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 5966 'Class':'default', 'Empty':none, 'flags':('null', ), 5967 }, 5968 LOGICAL: { 5969 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5970 'Class':bool, 'Empty':none, 'flags':('null', ), 5971 }, 5972 DATE: { 5973 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5974 'Class':datetime.date, 'Empty':none, 'flags':('null', ), 5975 }, 5976 MEMO: { 5977 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5978 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 5979 }, 5980 GENERAL: { 5981 'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo, 5982 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 5983 }, 5984 PICTURE: { 5985 'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo, 5986 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 5987 }, 5988 _NULLFLAG: { 5989 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':None, 5990 'Class':none, 'Empty':none, 'flags':('binary', 'system', ), 5991 } }
5992 5993 _memoext = '.fpt' 5994 _memoClass = _VfpMemo 5995 _yesMemoMask = 0xf5 # 1111 0101 5996 _noMemoMask = 0x03 # 0000 0011 5997 _binary_types = (GENERAL, MEMO, PICTURE) 5998 # _character_types = ('C', 'D', 'F', 'L', 'M', 'N') # field representing character data 5999 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) # field representing character data 6000 _currency_types = tuple() 6001 _date_types = (DATE, ) 6002 _datetime_types = tuple() 6003 # _fixed_types = ('D', 'G', 'L', 'M', 'P') 6004 _fixed_types = (DATE, GENERAL, LOGICAL, MEMO, PICTURE) 6005 _logical_types = (LOGICAL, ) 6006 _memo_types = (GENERAL, MEMO, PICTURE) 6007 _numeric_types = (FLOAT, NUMERIC) 6008 _text_types = (CHAR, MEMO) 6009 _variable_types = (CHAR, FLOAT, NUMERIC) 6010 _supported_tables = (0x03, 0xf5) 6011 _dbfTableHeader = array('B', [0] * 32) 6012 _dbfTableHeader[0] = 0x30 # version - Foxpro 6 0011 0000 6013 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263)) 6014 _dbfTableHeader[10] = 1 # record length -- one for delete flag 6015 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 6016 _dbfTableHeader = _dbfTableHeader.tobytes() 6017 _dbfTableHeaderExtra = b'\x00' * 263 6018
6019 - def _check_memo_integrity(self):
6020 if not self._meta.ignorememos: 6021 memo_fields = False 6022 for field in self._meta.fields: 6023 if self._meta[field][TYPE] in self._memo_types: 6024 memo_fields = True 6025 break 6026 if memo_fields and not os.path.exists(self._meta.memoname): 6027 self._meta.dfd.close() 6028 self._meta.dfd = None 6029 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 6030 elif not memo_fields and os.path.exists(self._meta.memoname): 6031 self._meta.dfd.close() 6032 self._meta.dfd = None 6033 raise BadDataError("Table structure corrupt: no memo fields exist but memo file does") 6034 if memo_fields: 6035 try: 6036 self._meta.memo = self._memoClass(self._meta) 6037 except Exception: 6038 exc = sys.exc_info()[1] 6039 self._meta.dfd.close() 6040 self._meta.dfd = None 6041 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1])
6042
6043 - def _initialize_fields(self):
6044 """ 6045 builds the FieldList of names, types, and descriptions 6046 """ 6047 meta = self._meta 6048 old_fields = defaultdict(dict) 6049 for name in meta.fields: 6050 old_fields[name]['type'] = meta[name][TYPE] 6051 old_fields[name]['class'] = meta[name][CLASS] 6052 old_fields[name]['empty'] = meta[name][EMPTY] 6053 meta.fields[:] = [] 6054 offset = 1 6055 fieldsdef = meta.header.fields 6056 if len(fieldsdef) % 32 != 0: 6057 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 6058 if len(fieldsdef) // 32 != meta.header.field_count: 6059 raise BadDataError("Header shows %d fields, but field definition block has %d fields" 6060 (meta.header.field_count, len(fieldsdef) // 32)) 6061 total_length = meta.header.record_length 6062 for i in range(meta.header.field_count): 6063 fieldblock = fieldsdef[i*32:(i+1)*32] 6064 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 6065 type = fieldblock[11] 6066 if not type in meta.fieldtypes: 6067 raise BadDataError("Unknown field type: %s" % type) 6068 start = offset 6069 length = fieldblock[16] 6070 offset += length 6071 end = start + length 6072 decimals = fieldblock[17] 6073 flags = fieldblock[18] 6074 if name in meta.fields: 6075 raise BadDataError('Duplicate field name found: %s' % name) 6076 meta.fields.append(name) 6077 if name in old_fields and old_fields[name]['type'] == type: 6078 cls = old_fields[name]['class'] 6079 empty = old_fields[name]['empty'] 6080 else: 6081 cls = meta.fieldtypes[type]['Class'] 6082 empty = meta.fieldtypes[type]['Empty'] 6083 meta[name] = ( 6084 type, 6085 start, 6086 length, 6087 end, 6088 decimals, 6089 flags, 6090 cls, 6091 empty, 6092 ) 6093 if offset != total_length: 6094 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 6095 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 6096 meta.user_field_count = len(meta.user_fields) 6097 Record._create_blank_data(meta)
6098 6099 @staticmethod
6100 - def _pack_date(date):
6101 """ 6102 Returns a group of three bytes, in integer form, of the date 6103 """ 6104 # return "%c%c%c" % (date.year - 2000, date.month, date.day) 6105 return bytes([date.year - 2000, date.month, date.day])
6106 6107 @staticmethod
6108 - def _unpack_date(bytestr):
6109 """ 6110 Returns a Date() of the packed three-byte date passed in 6111 """ 6112 year, month, day = struct.unpack('<BBB', bytestr) 6113 year += 2000 6114 return Date(year, month, day)
6115
6116 -class VfpTable(FpTable):
6117 """ 6118 Provides an interface for working with Visual FoxPro 6 tables 6119 """ 6120 6121 _version = 'Visual Foxpro' 6122 _versionabbr = 'vfp' 6123 6124 @MutableDefault
6125 - def _field_types():
6126 return { 6127 CHAR: { 6128 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character, 6129 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 6130 }, 6131 CURRENCY: { 6132 'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_currency, 6133 'Class':Decimal, 'Empty':none, 'flags':('null', ), 6134 }, 6135 DOUBLE: { 6136 'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_double, 6137 'Class':float, 'Empty':none, 'flags':('null', ), 6138 }, 6139 FLOAT: { 6140 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 6141 'Class':'default', 'Empty':none, 'flags':('null', ), 6142 }, 6143 NUMERIC: { 6144 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 6145 'Class':'default', 'Empty':none, 'flags':('null', ), 6146 }, 6147 INTEGER: { 6148 'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':lambda x: b'\x00' * 4, 'Init':add_vfp_integer, 6149 'Class':int, 'Empty':none, 'flags':('null', ), 6150 }, 6151 LOGICAL: { 6152 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 6153 'Class':bool, 'Empty':none, 'flags':('null', ), 6154 }, 6155 DATE: { 6156 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 6157 'Class':datetime.date, 'Empty':none, 'flags':('null', ), 6158 }, 6159 DATETIME: { 6160 'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_datetime, 6161 'Class':datetime.datetime, 'Empty':none, 'flags':('null', ), 6162 }, 6163 MEMO: { 6164 'Type':'Memo', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_memo, 6165 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 6166 }, 6167 GENERAL: { 6168 'Type':'General', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo, 6169 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 6170 }, 6171 PICTURE: { 6172 'Type':'Picture', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo, 6173 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 6174 }, 6175 _NULLFLAG: { 6176 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':int, 6177 'Class':none, 'Empty':none, 'flags':('binary', 'system',), 6178 } }
6179 6180 _memoext = '.fpt' 6181 _memoClass = _VfpMemo 6182 _yesMemoMask = 0x30 # 0011 0000 6183 _noMemoMask = 0x30 # 0011 0000 6184 # _binary_types = ('B', 'G', 'I', 'P', 'T', 'Y') 6185 _binary_types = (DOUBLE, GENERAL, INTEGER, MEMO, PICTURE, DATETIME, CURRENCY) 6186 # _character_types = ('C', 'D', 'F', 'L', 'M', 'N') # field representing character data 6187 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) 6188 _currency_types = (CURRENCY, ) 6189 _date_types = (DATE, DATETIME) 6190 _datetime_types = (DATETIME, ) 6191 # _fixed_types = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y') 6192 _fixed_types = (DOUBLE, DATE, GENERAL, INTEGER, LOGICAL, MEMO, PICTURE, DATETIME, CURRENCY) 6193 _logical_types = (LOGICAL, ) 6194 _memo_types = (GENERAL, MEMO, PICTURE) 6195 # _numeric_types = ('B', 'F', 'I', 'N', 'Y') 6196 _numeric_types = (DOUBLE, FLOAT, INTEGER, NUMERIC, CURRENCY) 6197 _variable_types = (CHAR, FLOAT, NUMERIC) 6198 _supported_tables = (0x30, 0x31) 6199 _dbfTableHeader = array('B', [0] * 32) 6200 _dbfTableHeader[0] = 0x30 # version - Foxpro 6 0011 0000 6201 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263)) 6202 _dbfTableHeader[10] = 1 # record length -- one for delete flag 6203 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 6204 _dbfTableHeader = _dbfTableHeader.tobytes() 6205 _dbfTableHeaderExtra = b'\x00' * 263 6206
6207 - def _initialize_fields(self):
6208 """ 6209 builds the FieldList of names, types, and descriptions 6210 """ 6211 meta = self._meta 6212 old_fields = defaultdict(dict) 6213 for name in meta.fields: 6214 old_fields[name]['type'] = meta[name][TYPE] 6215 old_fields[name]['class'] = meta[name][CLASS] 6216 old_fields[name]['empty'] = meta[name][EMPTY] 6217 meta.fields[:] = [] 6218 offset = 1 6219 fieldsdef = meta.header.fields 6220 meta.nullflags = None 6221 total_length = meta.header.record_length 6222 for i in range(meta.header.field_count): 6223 fieldblock = fieldsdef[i*32:(i+1)*32] 6224 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 6225 type = fieldblock[11] 6226 if not type in meta.fieldtypes: 6227 raise BadDataError("Unknown field type: %s" % type) 6228 start = unpack_long_int(fieldblock[12:16]) 6229 length = fieldblock[16] 6230 offset += length 6231 end = start + length 6232 decimals = fieldblock[17] 6233 flags = fieldblock[18] 6234 if name in meta.fields: 6235 raise BadDataError('Duplicate field name found: %s' % name) 6236 meta.fields.append(name) 6237 if name in old_fields and old_fields[name]['type'] == type: 6238 cls = old_fields[name]['class'] 6239 empty = old_fields[name]['empty'] 6240 else: 6241 cls = meta.fieldtypes[type]['Class'] 6242 empty = meta.fieldtypes[type]['Empty'] 6243 meta[name] = ( 6244 type, 6245 start, 6246 length, 6247 end, 6248 decimals, 6249 flags, 6250 cls, 6251 empty, 6252 ) 6253 if offset != total_length: 6254 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 6255 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 6256 meta.user_field_count = len(meta.user_fields) 6257 Record._create_blank_data(meta)
6258
6259 6260 -class List(_Navigation):
6261 """ 6262 list of Dbf records, with set-like behavior 6263 """ 6264 6265 _desc = '' 6266
6267 - def __init__(self, records=None, desc=None, key=None):
6268 self._list = [] 6269 self._set = set() 6270 self._tables = dict() 6271 if key is not None: 6272 self.key = key 6273 if key.__doc__ is None: 6274 key.__doc__ = 'unknown' 6275 key = self.key 6276 self._current = -1 6277 if isinstance(records, self.__class__) and key is records.key: 6278 self._list = records._list[:] 6279 self._set = records._set.copy() 6280 self._current = 0 6281 elif records is not None: 6282 for record in records: 6283 value = key(record) 6284 item = (source_table(record), recno(record), value) 6285 if value not in self._set: 6286 self._set.add(value) 6287 self._list.append(item) 6288 self._current = 0 6289 if desc is not None: 6290 self._desc = desc
6291
6292 - def __add__(self, other):
6293 self._still_valid_check() 6294 key = self.key 6295 if isinstance(other, (Table, list)): 6296 other = self.__class__(other, key=key) 6297 if isinstance(other, self.__class__): 6298 other._still_valid_check() 6299 result = self.__class__() 6300 result._set = self._set.copy() 6301 result._list[:] = self._list[:] 6302 result._tables = {} 6303 result._tables.update(self._tables) 6304 result.key = self.key 6305 if key is other.key: # same key? just compare key values 6306 for item in other._list: 6307 result._maybe_add(item) 6308 else: # different keys, use this list's key on other's records 6309 for rec in other: 6310 result._maybe_add((source_table(rec), recno(rec), key(rec))) 6311 return result 6312 return NotImplemented
6313
6314 - def __contains__(self, data):
6315 self._still_valid_check() 6316 if not isinstance(data, (Record, RecordTemplate, tuple, dict)): 6317 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, )) 6318 try: # attempt quick method 6319 item = self.key(data) 6320 if not isinstance(item, tuple): 6321 item = (item, ) 6322 return item in self._set 6323 except Exception: # argh, try brute force method 6324 for record in self: 6325 if record == data: 6326 return True 6327 return False
6328
6329 - def __delitem__(self, key):
6330 self._still_valid_check() 6331 if isinstance(key, baseinteger): 6332 item = self._list.pop[key] 6333 self._set.remove(item[2]) 6334 elif isinstance(key, slice): 6335 self._set.difference_update([item[2] for item in self._list[key]]) 6336 self._list.__delitem__(key) 6337 elif isinstance(key, (Record, RecordTemplate, dict, tuple)): 6338 index = self.index(key) 6339 item = self._list.pop[index] 6340 self._set.remove(item[2]) 6341 else: 6342 raise TypeError('%r should be an int, slice, record, template, tuple, or dict -- not a %r' % (key, type(key)))
6343
6344 - def __getitem__(self, key):
6345 self._still_valid_check() 6346 if isinstance(key, baseinteger): 6347 count = len(self._list) 6348 if not -count <= key < count: 6349 raise NotFoundError("Record %d is not in list." % key) 6350 return self._get_record(*self._list[key]) 6351 elif isinstance(key, slice): 6352 result = self.__class__() 6353 result._list[:] = self._list[key] 6354 result._set = set(result._list) 6355 result.key = self.key 6356 return result 6357 elif isinstance(key, (Record, RecordTemplate, dict, tuple)): 6358 index = self.index(key) 6359 return self._get_record(*self._list[index]) 6360 else: 6361 raise TypeError('%r should be an int, slice, record, record template, tuple, or dict -- not a %r' % (key, type(key)))
6362
6363 - def __iter__(self):
6364 self._still_valid_check() 6365 return Iter(self)
6366
6367 - def __len__(self):
6368 self._still_valid_check() 6369 return len(self._list)
6370
6371 - def __bool__(self):
6372 self._still_valid_check() 6373 return len(self) > 0
6374
6375 - def __radd__(self, other):
6376 self._still_valid_check() 6377 key = self.key 6378 if isinstance(other, (Table, list)): 6379 other = self.__class__(other, key=key) 6380 if isinstance(other, self.__class__): 6381 other._still_valid_check() 6382 result = other.__class__() 6383 result._set = other._set.copy() 6384 result._list[:] = other._list[:] 6385 result._tables = {} 6386 result._tables.update(self._tables) 6387 result.key = other.key 6388 if key is other.key: # same key? just compare key values 6389 for item in self._list: 6390 result._maybe_add(item) 6391 else: # different keys, use this list's key on other's records 6392 for rec in self: 6393 result._maybe_add((source_table(rec), recno(rec), key(rec))) 6394 return result 6395 return NotImplemented
6396
6397 - def __repr__(self):
6398 self._still_valid_check() 6399 if self._desc: 6400 return "%s(key=(%s), desc=%s)" % (self.__class__, self.key.__doc__, self._desc) 6401 else: 6402 return "%s(key=(%s))" % (self.__class__, self.key.__doc__)
6403
6404 - def __rsub__(self, other):
6405 self._still_valid_check() 6406 key = self.key 6407 if isinstance(other, (Table, list)): 6408 other = self.__class__(other, key=key) 6409 if isinstance(other, self.__class__): 6410 other._still_valid_check() 6411 result = other.__class__() 6412 result._list[:] = other._list[:] 6413 result._set = other._set.copy() 6414 result._tables = {} 6415 result._tables.update(other._tables) 6416 result.key = key 6417 lost = set() 6418 if key is other.key: 6419 for item in self._list: 6420 if item[2] in result._list: 6421 result._set.remove(item[2]) 6422 lost.add(item) 6423 else: 6424 for rec in self: 6425 value = key(rec) 6426 if value in result._set: 6427 result._set.remove(value) 6428 lost.add((source_table(rec), recno(rec), value)) 6429 result._list = [item for item in result._list if item not in lost] 6430 lost = set(result._tables.keys()) 6431 for table, _1, _2 in result._list: 6432 if table in result._tables: 6433 lost.remove(table) 6434 if not lost: 6435 break 6436 for table in lost: 6437 del result._tables[table] 6438 return result 6439 return NotImplemented
6440
6441 - def __sub__(self, other):
6442 self._still_valid_check() 6443 key = self.key 6444 if isinstance(other, (Table, list)): 6445 other = self.__class__(other, key=key) 6446 if isinstance(other, self.__class__): 6447 other._still_valid_check() 6448 result = self.__class__() 6449 result._list[:] = self._list[:] 6450 result._set = self._set.copy() 6451 result._tables = {} 6452 result._tables.update(self._tables) 6453 result.key = key 6454 lost = set() 6455 if key is other.key: 6456 for item in other._list: 6457 if item[2] in result._set: 6458 result._set.remove(item[2]) 6459 lost.add(item[2]) 6460 else: 6461 for rec in other: 6462 value = key(rec) 6463 if value in result._set: 6464 result._set.remove(value) 6465 lost.add(value) 6466 result._list = [item for item in result._list if item[2] not in lost] 6467 lost = set(result._tables.keys()) 6468 for table, _1, _2 in result._list: 6469 if table in result._tables: 6470 lost.remove(table) 6471 if not lost: 6472 break 6473 for table in lost: 6474 del result._tables[table] 6475 return result 6476 return NotImplemented
6477
6478 - def _maybe_add(self, item):
6479 self._still_valid_check() 6480 table, recno, key = item 6481 self._tables[table] = table._pack_count # TODO: check that _pack_count is the same if already in table 6482 if key not in self._set: 6483 self._set.add(key) 6484 self._list.append(item)
6485
6486 - def _get_record(self, table=None, rec_no=None, value=None):
6487 if table is rec_no is None: 6488 table, rec_no, value = self._list[self._index] 6489 return table[rec_no]
6490
6491 - def _purge(self, record, old_record_number, offset):
6492 partial = source_table(record), old_record_number 6493 records = sorted(self._list, key=lambda item: (item[0], item[1])) 6494 for item in records: 6495 if partial == item[:2]: 6496 found = True 6497 break 6498 elif partial[0] is item[0] and partial[1] < item[1]: 6499 found = False 6500 break 6501 else: 6502 found = False 6503 if found: 6504 self._list.pop(self._list.index(item)) 6505 self._set.remove(item[2]) 6506 start = records.index(item) + found 6507 for item in records[start:]: 6508 if item[0] is not partial[0]: # into other table's records 6509 break 6510 i = self._list.index(item) 6511 self._set.remove(item[2]) 6512 item = item[0], (item[1] - offset), item[2] 6513 self._list[i] = item 6514 self._set.add(item[2]) 6515 return found
6516
6517 - def _still_valid_check(self):
6518 for table, last_pack in self._tables.items(): 6519 if last_pack != getattr(table, '_pack_count'): 6520 raise DbfError("table has been packed; list is invalid")
6521 6522 _nav_check = _still_valid_check 6523
6524 - def append(self, record):
6525 self._still_valid_check() 6526 self._maybe_add((source_table(record), recno(record), self.key(record)))
6527
6528 - def clear(self):
6529 self._list = [] 6530 self._set = set() 6531 self._index = -1 6532 self._tables.clear()
6533
6534 - def extend(self, records):
6535 self._still_valid_check() 6536 key = self.key 6537 if isinstance(records, self.__class__): 6538 if key is records.key: # same key? just compare key values 6539 for item in records._list: 6540 self._maybe_add(item) 6541 else: # different keys, use this list's key on other's records 6542 for rec in records: 6543 value = key(rec) 6544 self._maybe_add((source_table(rec), recno(rec), value)) 6545 else: 6546 for rec in records: 6547 value = key(rec) 6548 self._maybe_add((source_table(rec), recno(rec), value))
6549
6550 - def index(self, record, start=None, stop=None):
6551 """ 6552 returns the index of record between start and stop 6553 start and stop default to the first and last record 6554 """ 6555 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 6556 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 6557 self._still_valid_check() 6558 if start is None: 6559 start = 0 6560 if stop is None: 6561 stop = len(self) 6562 for i in range(start, stop): 6563 if record == (self[i]): 6564 return i 6565 else: 6566 raise NotFoundError("dbf.List.index(x): x not in List", data=record)
6567
6568 - def insert(self, i, record):
6569 self._still_valid_check() 6570 item = source_table(record), recno(record), self.key(record) 6571 if item not in self._set: 6572 self._set.add(item[2]) 6573 self._list.insert(i, item)
6574
6575 - def key(self, record):
6576 """ 6577 table_name, record_number 6578 """ 6579 self._still_valid_check() 6580 return source_table(record), recno(record)
6581
6582 - def pop(self, index=None):
6583 self._still_valid_check() 6584 if index is None: 6585 table, recno, value = self._list.pop() 6586 else: 6587 table, recno, value = self._list.pop(index) 6588 self._set.remove(value) 6589 return self._get_record(table, recno, value)
6590
6591 - def query(self, criteria):
6592 """ 6593 criteria is a callback that returns a truthy value for matching record 6594 """ 6595 return pql(self, criteria)
6596
6597 - def remove(self, data):
6598 self._still_valid_check() 6599 if not isinstance(data, (Record, RecordTemplate, dict, tuple)): 6600 raise TypeError("%r(%r) is not a record, template, tuple, nor dict" % (type(data), data)) 6601 index = self.index(data) 6602 record = self[index] 6603 item = source_table(record), recno(record), self.key(record) 6604 self._list.remove(item) 6605 self._set.remove(item[2])
6606
6607 - def reverse(self):
6608 self._still_valid_check() 6609 return self._list.reverse()
6610
6611 - def sort(self, key=None, reverse=False):
6612 self._still_valid_check() 6613 if key is None: 6614 return self._list.sort(reverse=reverse) 6615 return self._list.sort(key=lambda item: key(item[0][item[1]]), reverse=reverse)
6616
6617 6618 -class Index(_Navigation):
6619 """ 6620 non-persistent index for a table 6621 """ 6622
6623 - def __init__(self, table, key):
6624 self._table = table 6625 self._values = [] # ordered list of values 6626 self._rec_by_val = [] # matching record numbers 6627 self._records = {} # record numbers:values 6628 self.__doc__ = key.__doc__ or 'unknown' 6629 self._key = key 6630 self._previous_status = [] 6631 for record in table: 6632 value = key(record) 6633 if value is DoNotIndex: 6634 continue 6635 rec_num = recno(record) 6636 if not isinstance(value, tuple): 6637 value = (value, ) 6638 vindex = bisect_right(self._values, value) 6639 self._values.insert(vindex, value) 6640 self._rec_by_val.insert(vindex, rec_num) 6641 self._records[rec_num] = value 6642 table._indexen.add(self)
6643
6644 - def __call__(self, record):
6645 rec_num = recno(record) 6646 key = self.key(record) 6647 if rec_num in self._records: 6648 if self._records[rec_num] == key: 6649 return 6650 old_key = self._records[rec_num] 6651 vindex = bisect_left(self._values, old_key) 6652 self._values.pop(vindex) 6653 self._rec_by_val.pop(vindex) 6654 del self._records[rec_num] 6655 assert rec_num not in self._records 6656 if key == (DoNotIndex, ): 6657 return 6658 vindex = bisect_right(self._values, key) 6659 self._values.insert(vindex, key) 6660 self._rec_by_val.insert(vindex, rec_num) 6661 self._records[rec_num] = key
6662
6663 - def __contains__(self, data):
6664 if not isinstance(data, (Record, RecordTemplate, tuple, dict)): 6665 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, )) 6666 try: 6667 value = self.key(data) 6668 return value in self._values 6669 except Exception: 6670 for record in self: 6671 if record == data: 6672 return True 6673 return False
6674
6675 - def __getitem__(self, key):
6676 '''if key is an integer, returns the matching record; 6677 if key is a [slice | string | tuple | record] returns a List; 6678 raises NotFoundError on failure''' 6679 if isinstance(key, baseinteger): 6680 count = len(self._values) 6681 if not -count <= key < count: 6682 raise NotFoundError("Record %d is not in list." % key) 6683 rec_num = self._rec_by_val[key] 6684 return self._table[rec_num] 6685 elif isinstance(key, slice): 6686 result = List() 6687 start, stop, step = key.start, key.stop, key.step 6688 if start is None: start = 0 6689 if stop is None: stop = len(self._rec_by_val) 6690 if step is None: step = 1 6691 if step < 0: 6692 start, stop = stop - 1, -(stop - start + 1) 6693 for loc in range(start, stop, step): 6694 record = self._table[self._rec_by_val[loc]] 6695 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6696 return result 6697 elif isinstance (key, (basestring, tuple, Record, RecordTemplate)): 6698 if isinstance(key, (Record, RecordTemplate)): 6699 key = self.key(key) 6700 elif isinstance(key, basestring): 6701 key = (key, ) 6702 lo = self._search(key, where='left') 6703 hi = self._search(key, where='right') 6704 if lo == hi: 6705 raise NotFoundError(key) 6706 result = List(desc='match = %r' % (key, )) 6707 for loc in range(lo, hi): 6708 record = self._table[self._rec_by_val[loc]] 6709 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6710 return result 6711 else: 6712 raise TypeError('indices must be integers, match objects must by strings or tuples')
6713
6714 - def __enter__(self):
6715 self._table.__enter__() 6716 return self
6717
6718 - def __exit__(self, *exc_info):
6719 self._table.__exit__() 6720 return False
6721
6722 - def __iter__(self):
6723 return Iter(self)
6724
6725 - def __len__(self):
6726 return len(self._records)
6727
6728 - def _clear(self):
6729 """ 6730 removes all entries from index 6731 """ 6732 self._values[:] = [] 6733 self._rec_by_val[:] = [] 6734 self._records.clear()
6735
6736 - def _key(self, record):
6737 """ 6738 table_name, record_number 6739 """ 6740 self._still_valid_check() 6741 return source_table(record), recno(record)
6742
6743 - def _nav_check(self):
6744 """ 6745 raises error if table is closed 6746 """ 6747 if self._table._meta.status == CLOSED: 6748 raise DbfError('indexed table %s is closed' % self.filename)
6749
6750 - def _partial_match(self, target, match):
6751 target = target[:len(match)] 6752 if isinstance(match[-1], basestring): 6753 target = list(target) 6754 target[-1] = target[-1][:len(match[-1])] 6755 target = tuple(target) 6756 return target == match
6757
6758 - def _purge(self, rec_num):
6759 value = self._records.get(rec_num) 6760 if value is not None: 6761 vindex = bisect_left(self._values, value) 6762 del self._records[rec_num] 6763 self._values.pop(vindex) 6764 self._rec_by_val.pop(vindex)
6765
6766 - def _reindex(self):
6767 """ 6768 reindexes all records 6769 """ 6770 for record in self._table: 6771 self(record)
6772
6773 - def _search(self, match, lo=0, hi=None, where=None):
6774 if hi is None: 6775 hi = len(self._values) 6776 if where == 'left': 6777 return bisect_left(self._values, match, lo, hi) 6778 elif where == 'right': 6779 return bisect_right(self._values, match, lo, hi)
6780
6781 - def index(self, record, start=None, stop=None):
6782 """ 6783 returns the index of record between start and stop 6784 start and stop default to the first and last record 6785 """ 6786 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 6787 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 6788 self._nav_check() 6789 if start is None: 6790 start = 0 6791 if stop is None: 6792 stop = len(self) 6793 for i in range(start, stop): 6794 if record == (self[i]): 6795 return i 6796 else: 6797 raise NotFoundError("dbf.Index.index(x): x not in Index", data=record)
6798
6799 - def index_search(self, match, start=None, stop=None, nearest=False, partial=False):
6800 """ 6801 returns the index of match between start and stop 6802 start and stop default to the first and last record. 6803 if nearest is true returns the location of where the match should be 6804 otherwise raises NotFoundError 6805 """ 6806 self._nav_check() 6807 if not isinstance(match, tuple): 6808 match = (match, ) 6809 if start is None: 6810 start = 0 6811 if stop is None: 6812 stop = len(self) 6813 loc = self._search(match, start, stop, where='left') 6814 if loc == len(self._values): 6815 if nearest: 6816 return IndexLocation(loc, False) 6817 raise NotFoundError("dbf.Index.index_search(x): x not in index", data=match) 6818 if self._values[loc] == match \ 6819 or partial and self._partial_match(self._values[loc], match): 6820 return IndexLocation(loc, True) 6821 elif nearest: 6822 return IndexLocation(loc, False) 6823 else: 6824 raise NotFoundError("dbf.Index.index_search(x): x not in Index", data=match)
6825
6826 - def key(self, record):
6827 result = self._key(record) 6828 if not isinstance(result, tuple): 6829 result = (result, ) 6830 return result
6831
6832 - def query(self, criteria):
6833 """ 6834 criteria is a callback that returns a truthy value for matching record 6835 """ 6836 self._nav_check() 6837 return pql(self, criteria)
6838
6839 - def search(self, match, partial=False):
6840 """ 6841 returns dbf.List of all (partially) matching records 6842 """ 6843 self._nav_check() 6844 result = List() 6845 if not isinstance(match, tuple): 6846 match = (match, ) 6847 loc = self._search(match, where='left') 6848 if loc == len(self._values): 6849 return result 6850 while loc < len(self._values) and self._values[loc] == match: 6851 record = self._table[self._rec_by_val[loc]] 6852 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6853 loc += 1 6854 if partial: 6855 while loc < len(self._values) and self._partial_match(self._values[loc], match): 6856 record = self._table[self._rec_by_val[loc]] 6857 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6858 loc += 1 6859 return result
6860
6861 6862 -class Relation(object):
6863 """ 6864 establishes a relation between two dbf tables (not persistent) 6865 """ 6866 6867 relations = {} 6868
6869 - def __new__(cls, src, tgt, src_names=None, tgt_names=None):
6870 if (len(src) != 2 or len(tgt) != 2): 6871 raise DbfError("Relation should be called with ((src_table, src_field), (tgt_table, tgt_field))") 6872 if src_names and len(src_names) !=2 or tgt_names and len(tgt_names) != 2: 6873 raise DbfError('src_names and tgt_names, if specified, must be ("table","field")') 6874 src_table, src_field = src 6875 tgt_table, tgt_field = tgt 6876 try: 6877 if isinstance(src_field, baseinteger): 6878 table, field = src_table, src_field 6879 src_field = table.field_names[field] 6880 else: 6881 src_table.field_names.index(src_field) 6882 if isinstance(tgt_field, baseinteger): 6883 table, field = tgt_table, tgt_field 6884 tgt_field = table.field_names[field] 6885 else: 6886 tgt_table.field_names.index(tgt_field) 6887 except (IndexError, ValueError): 6888 raise DbfError('%r not in %r' % (field, table)) 6889 if src_names: 6890 src_table_name, src_field_name = src_names 6891 else: 6892 src_table_name, src_field_name = src_table.filename, src_field 6893 if src_table_name[-4:].lower() == '.dbf': 6894 src_table_name = src_table_name[:-4] 6895 if tgt_names: 6896 tgt_table_name, tgt_field_name = tgt_names 6897 else: 6898 tgt_table_name, tgt_field_name = tgt_table.filename, tgt_field 6899 if tgt_table_name[-4:].lower() == '.dbf': 6900 tgt_table_name = tgt_table_name[:-4] 6901 relation = cls.relations.get(((src_table, src_field), (tgt_table, tgt_field))) 6902 if relation is not None: 6903 return relation 6904 obj = object.__new__(cls) 6905 obj._src_table, obj._src_field = src_table, src_field 6906 obj._tgt_table, obj._tgt_field = tgt_table, tgt_field 6907 obj._src_table_name, obj._src_field_name = src_table_name, src_field_name 6908 obj._tgt_table_name, obj._tgt_field_name = tgt_table_name, tgt_field_name 6909 obj._tables = dict() 6910 cls.relations[((src_table, src_field), (tgt_table, tgt_field))] = obj 6911 return obj
6912
6913 - def __eq__(yo, other):
6914 if (yo.src_table == other.src_table 6915 and yo.src_field == other.src_field 6916 and yo.tgt_table == other.tgt_table 6917 and yo.tgt_field == other.tgt_field): 6918 return True 6919 return False
6920
6921 - def __getitem__(yo, record):
6922 """ 6923 record should be from the source table 6924 """ 6925 key = (record[yo.src_field], ) 6926 try: 6927 return yo.index[key] 6928 except NotFoundError: 6929 return List(desc='%s not found' % key)
6930
6931 - def __hash__(yo):
6932 return hash((yo.src_table, yo.src_field, yo.tgt_table, yo.tgt_field))
6933
6934 - def __ne__(yo, other):
6935 if (yo.src_table != other.src_table 6936 or yo.src_field != other.src_field 6937 or yo.tgt_table != other.tgt_table 6938 or yo.tgt_field != other.tgt_field): 6939 return True 6940 return False
6941
6942 - def __repr__(yo):
6943 return "Relation((%r, %r), (%r, %r))" % (yo.src_table_name, yo.src_field, yo.tgt_table_name, yo.tgt_field)
6944
6945 - def __str__(yo):
6946 return "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name)
6947 6948 @property
6949 - def src_table(yo):
6950 "name of source table" 6951 return yo._src_table
6952 6953 @property
6954 - def src_field(yo):
6955 "name of source field" 6956 return yo._src_field
6957 6958 @property
6959 - def src_table_name(yo):
6960 return yo._src_table_name
6961 6962 @property
6963 - def src_field_name(yo):
6964 return yo._src_field_name
6965 6966 @property
6967 - def tgt_table(yo):
6968 "name of target table" 6969 return yo._tgt_table
6970 6971 @property
6972 - def tgt_field(yo):
6973 "name of target field" 6974 return yo._tgt_field
6975 6976 @property
6977 - def tgt_table_name(yo):
6978 return yo._tgt_table_name
6979 6980 @property
6981 - def tgt_field_name(yo):
6982 return yo._tgt_field_name
6983 6984 @LazyAttr
6985 - def index(yo):
6986 def index(record, field=yo._tgt_field): 6987 return record[field]
6988 index.__doc__ = "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name) 6989 yo.index = yo._tgt_table.create_index(index) 6990 source = dbf.List(yo._src_table, key=lambda rec, field=yo._src_field: rec[field]) 6991 target = dbf.List(yo._tgt_table, key=lambda rec, field=yo._tgt_field: rec[field]) 6992 if len(source) != len(yo._src_table): 6993 yo._tables[yo._src_table] = 'many' 6994 else: 6995 yo._tables[yo._src_table] = 'one' 6996 if len(target) != len(yo._tgt_table): 6997 yo._tables[yo._tgt_table] = 'many' 6998 else: 6999 yo._tables[yo._tgt_table] = 'one' 7000 return yo.index
7001
7002 - def one_or_many(yo, table):
7003 yo.index # make sure yo._tables has been populated 7004 try: 7005 if isinstance(table, basestring): 7006 table = (yo._src_table, yo._tgt_table)[yo._tgt_table_name == table] 7007 return yo._tables[table] 7008 except IndexError: 7009 raise NotFoundError("table %s not in relation" % table)
7010
7011 7012 -class IndexFile(_Navigation):
7013 pass
7014 7015 7016 # table meta 7017 7018 table_types = { 7019 'db3' : Db3Table, 7020 'clp' : ClpTable, 7021 'fp' : FpTable, 7022 'vfp' : VfpTable, 7023 } 7024 7025 version_map = { 7026 0x02 : 'FoxBASE', 7027 0x03 : 'dBase III Plus', 7028 0x04 : 'dBase IV', 7029 0x05 : 'dBase V', 7030 0x30 : 'Visual FoxPro', 7031 0x31 : 'Visual FoxPro (auto increment field)', 7032 0x32 : 'Visual FoxPro (VarChar, VarBinary, or BLOB enabled)', 7033 0x43 : 'dBase IV SQL table files', 7034 0x63 : 'dBase IV SQL system files', 7035 0x83 : 'dBase III Plus w/memos', 7036 0x8b : 'dBase IV w/memos', 7037 0x8e : 'dBase IV w/SQL table', 7038 0xf5 : 'FoxPro w/memos'} 7039 7040 code_pages = { 7041 0x00 : ('ascii', "plain ol' ascii"), 7042 0x01 : ('cp437', 'U.S. MS-DOS'), 7043 0x02 : ('cp850', 'International MS-DOS'), 7044 0x03 : ('cp1252', 'Windows ANSI'), 7045 0x04 : ('mac_roman', 'Standard Macintosh'), 7046 0x08 : ('cp865', 'Danish OEM'), 7047 0x09 : ('cp437', 'Dutch OEM'), 7048 0x0A : ('cp850', 'Dutch OEM (secondary)'), 7049 0x0B : ('cp437', 'Finnish OEM'), 7050 0x0D : ('cp437', 'French OEM'), 7051 0x0E : ('cp850', 'French OEM (secondary)'), 7052 0x0F : ('cp437', 'German OEM'), 7053 0x10 : ('cp850', 'German OEM (secondary)'), 7054 0x11 : ('cp437', 'Italian OEM'), 7055 0x12 : ('cp850', 'Italian OEM (secondary)'), 7056 0x13 : ('cp932', 'Japanese Shift-JIS'), 7057 0x14 : ('cp850', 'Spanish OEM (secondary)'), 7058 0x15 : ('cp437', 'Swedish OEM'), 7059 0x16 : ('cp850', 'Swedish OEM (secondary)'), 7060 0x17 : ('cp865', 'Norwegian OEM'), 7061 0x18 : ('cp437', 'Spanish OEM'), 7062 0x19 : ('cp437', 'English OEM (Britain)'), 7063 0x1A : ('cp850', 'English OEM (Britain) (secondary)'), 7064 0x1B : ('cp437', 'English OEM (U.S.)'), 7065 0x1C : ('cp863', 'French OEM (Canada)'), 7066 0x1D : ('cp850', 'French OEM (secondary)'), 7067 0x1F : ('cp852', 'Czech OEM'), 7068 0x22 : ('cp852', 'Hungarian OEM'), 7069 0x23 : ('cp852', 'Polish OEM'), 7070 0x24 : ('cp860', 'Portugese OEM'), 7071 0x25 : ('cp850', 'Potugese OEM (secondary)'), 7072 0x26 : ('cp866', 'Russian OEM'), 7073 0x37 : ('cp850', 'English OEM (U.S.) (secondary)'), 7074 0x40 : ('cp852', 'Romanian OEM'), 7075 0x4D : ('cp936', 'Chinese GBK (PRC)'), 7076 0x4E : ('cp949', 'Korean (ANSI/OEM)'), 7077 0x4F : ('cp950', 'Chinese Big 5 (Taiwan)'), 7078 0x50 : ('cp874', 'Thai (ANSI/OEM)'), 7079 0x57 : ('cp1252', 'ANSI'), 7080 0x58 : ('cp1252', 'Western European ANSI'), 7081 0x59 : ('cp1252', 'Spanish ANSI'), 7082 0x64 : ('cp852', 'Eastern European MS-DOS'), 7083 0x65 : ('cp866', 'Russian MS-DOS'), 7084 0x66 : ('cp865', 'Nordic MS-DOS'), 7085 0x67 : ('cp861', 'Icelandic MS-DOS'), 7086 0x68 : (None, 'Kamenicky (Czech) MS-DOS'), 7087 0x69 : (None, 'Mazovia (Polish) MS-DOS'), 7088 0x6a : ('cp737', 'Greek MS-DOS (437G)'), 7089 0x6b : ('cp857', 'Turkish MS-DOS'), 7090 0x78 : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 7091 0x79 : ('cp949', 'Korean Windows'), 7092 0x7a : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 7093 0x7b : ('cp932', 'Japanese Windows'), 7094 0x7c : ('cp874', 'Thai Windows'), 7095 0x7d : ('cp1255', 'Hebrew Windows'), 7096 0x7e : ('cp1256', 'Arabic Windows'), 7097 0xc8 : ('cp1250', 'Eastern European Windows'), 7098 0xc9 : ('cp1251', 'Russian Windows'), 7099 0xca : ('cp1254', 'Turkish Windows'), 7100 0xcb : ('cp1253', 'Greek Windows'), 7101 0x96 : ('mac_cyrillic', 'Russian Macintosh'), 7102 0x97 : ('mac_latin2', 'Macintosh EE'), 7103 0x98 : ('mac_greek', 'Greek Macintosh'), 7104 0xf0 : ('utf8', '8-bit unicode'), 7105 } 7106 7107 7108 default_codepage = code_pages.get(default_codepage, code_pages.get(0x00))[0]
7109 7110 7111 -def _nop(value):
7112 """ 7113 returns parameter unchanged 7114 """ 7115 return value
7116
7117 -def _normalize_tuples(tuples, length, filler):
7118 """ 7119 ensures each tuple is the same length, using filler[-missing] for the gaps 7120 """ 7121 final = [] 7122 for t in tuples: 7123 if len(t) < length: 7124 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 7125 else: 7126 final.append(t) 7127 return tuple(final)
7128
7129 -def _codepage_lookup(cp):
7130 if cp not in code_pages: 7131 for code_page in sorted(code_pages.keys()): 7132 sd, ld = code_pages[code_page] 7133 if cp == sd or cp == ld: 7134 if sd is None: 7135 raise DbfError("Unsupported codepage: %s" % ld) 7136 cp = code_page 7137 break 7138 else: 7139 raise DbfError("Unsupported codepage: %s" % cp) 7140 sd, ld = code_pages[cp] 7141 return cp, sd, ld
7142
7143 7144 # miscellany 7145 7146 -class _Db4Table(Table):
7147 """ 7148 under development 7149 """ 7150 7151 version = 'dBase IV w/memos (non-functional)' 7152 _versionabbr = 'db4' 7153 7154 @MutableDefault
7155 - def _field_types():
7156 return { 7157 CHAR: {'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character}, 7158 CURRENCY: {'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':Decimal, 'Init':add_vfp_currency}, 7159 DOUBLE: {'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':float, 'Init':add_vfp_double}, 7160 FLOAT: {'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':float, 'Init':add_vfp_numeric}, 7161 NUMERIC: {'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':int, 'Init':add_vfp_numeric}, 7162 INTEGER: {'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':int, 'Init':add_vfp_integer}, 7163 LOGICAL: {'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':Logical, 'Init':add_logical}, 7164 DATE: {'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':Date, 'Init':add_date}, 7165 DATETIME: {'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':DateTime, 'Init':add_vfp_datetime}, 7166 MEMO: {'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7167 GENERAL: {'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7168 PICTURE: {'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7169 _NULLFLAG: {'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':int, 'Init':None} }
7170 7171 _memoext = '.dbt' 7172 _memotypes = ('G', 'M', 'P') 7173 _memoClass = _VfpMemo 7174 _yesMemoMask = 0x8b # 0011 0000 7175 _noMemoMask = 0x04 # 0011 0000 7176 _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y') 7177 _variable_fields = ('C', 'F', 'N') 7178 _binary_fields = ('G', 'P') 7179 _character_fields = ('C', 'M') # field representing character data 7180 _decimal_fields = ('F', 'N') 7181 _numeric_fields = ('B', 'F', 'I', 'N', 'Y') 7182 _currency_fields = ('Y',) 7183 _supported_tables = (0x04, 0x8b) 7184 _dbfTableHeader = [0] * 32 7185 _dbfTableHeader[0] = 0x8b # version - Foxpro 6 0011 0000 7186 _dbfTableHeader[10] = 0x01 # record length -- one for delete flag 7187 _dbfTableHeader[29] = 0x03 # code page -- 437 US-MS DOS 7188 _dbfTableHeader = bytes(_dbfTableHeader) 7189 _dbfTableHeaderExtra = b'' 7190
7191 - def _check_memo_integrity(self):
7192 """ 7193 dBase IV specific 7194 """ 7195 if self._meta.header.version == 0x8b: 7196 try: 7197 self._meta.memo = self._memoClass(self._meta) 7198 except: 7199 self._meta.dfd.close() 7200 self._meta.dfd = None 7201 raise 7202 if not self._meta.ignorememos: 7203 for field in self._meta.fields: 7204 if self._meta[field][TYPE] in self._memotypes: 7205 if self._meta.header.version != 0x8b: 7206 self._meta.dfd.close() 7207 self._meta.dfd = None 7208 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos") 7209 elif not os.path.exists(self._meta.memoname): 7210 self._meta.dfd.close() 7211 self._meta.dfd = None 7212 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 7213 break
7214
7215 7216 # utility functions 7217 7218 -def create_template(table_or_record, defaults=None):
7219 if isinstance(table_or_record, Table): 7220 return RecordTemplate(table_or_record._meta, defaults) 7221 else: 7222 return RecordTemplate(table_or_record._meta, table_or_record, defaults)
7223
7224 -def delete(record):
7225 """ 7226 marks record as deleted 7227 """ 7228 template = isinstance(record, RecordTemplate) 7229 if not template and record._meta.status == CLOSED: 7230 raise DbfError("%s is closed; cannot delete record" % record._meta.filename) 7231 record_in_flux = not record._write_to_disk 7232 if not template and not record_in_flux: 7233 record._start_flux() 7234 try: 7235 record._data[0] = ASTERISK 7236 if not template: 7237 record._dirty = True 7238 except: 7239 if not template and not record_in_flux: 7240 record._rollback_flux() 7241 raise 7242 if not template and not record_in_flux: 7243 record._commit_flux()
7244
7245 -def export(table_or_records, filename=None, field_names=None, format='csv', header=True, dialect='dbf', encoding=None):
7246 """ 7247 writes the records using CSV or tab-delimited format, using the filename 7248 given if specified, otherwise the table name 7249 if table_or_records is a collection of records (not an actual table) they 7250 should all be of the same format 7251 """ 7252 table = source_table(table_or_records[0]) 7253 if filename is None: 7254 filename = table.filename 7255 if field_names is None: 7256 field_names = table.field_names 7257 if isinstance(field_names, basestring): 7258 field_names = [f.strip() for f in field_names.split(',')] 7259 format = format.lower() 7260 if format not in ('csv', 'tab', 'fixed'): 7261 raise DbfError("export format: csv, tab, or fixed -- not %s" % format) 7262 if format == 'fixed': 7263 format = 'txt' 7264 if encoding is None: 7265 encoding = table.codepage.name 7266 encoder = codecs.getencoder(encoding) 7267 header_names = field_names 7268 base, ext = os.path.splitext(filename) 7269 if ext.lower() in ('', '.dbf'): 7270 filename = base + "." + format 7271 try: 7272 if format == 'csv': 7273 fd = open(filename, 'w', encoding=encoding) 7274 csvfile = csv.writer(fd, dialect=dialect) 7275 if header: 7276 csvfile.writerow(header_names) 7277 for record in table_or_records: 7278 fields = [] 7279 for fieldname in field_names: 7280 data = record[fieldname] 7281 # if isinstance(data, str): 7282 # fields.append(encoder(data)[0]) 7283 # else: 7284 fields.append(data) 7285 csvfile.writerow(fields) 7286 elif format == 'tab': 7287 fd = open(filename, 'w', encoding=encoding) 7288 if header: 7289 fd.write('\t'.join(header_names) + '\n') 7290 for record in table_or_records: 7291 fields = [] 7292 for fieldname in field_names: 7293 data = record[fieldname] 7294 # if isinstance(data, str): 7295 # fields.append(encoder(data)[0]) 7296 # else: 7297 fields.append(str(data)) 7298 fd.write('\t'.join(fields) + '\n') 7299 else: # format == 'fixed' 7300 fd = open(filename, 'w', encoding=encoding) 7301 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w', encoding=encoding) 7302 header.write("%-15s Size\n" % "Field Name") 7303 header.write("%-15s ----\n" % ("-" * 15)) 7304 sizes = [] 7305 for field in field_names: 7306 size = table.field_info(field).length 7307 sizes.append(size) 7308 # field = encoder(field)[0] 7309 header.write("%-15s %3d\n" % (field, size)) 7310 header.write('\nTotal Records in file: %d\n' % len(table_or_records)) 7311 header.close() 7312 for record in table_or_records: 7313 fields = [] 7314 for i, fieldname in enumerate(field_names): 7315 data = record[fieldname] 7316 # if isinstance(data, str): 7317 # fields.append("%-*s" % (sizes[i], encoder(data)[0])) 7318 # else: 7319 fields.append("%-*s" % (sizes[i], data)) 7320 fd.write(''.join(fields) + '\n') 7321 finally: 7322 fd.close() 7323 fd = None 7324 return len(table_or_records)
7325
7326 -def field_names(thing):
7327 """ 7328 fields in table/record, keys in dict 7329 """ 7330 if isinstance(thing, dict): 7331 return list(thing.keys()) 7332 elif isinstance(thing, (Table, Record, RecordTemplate)): 7333 return thing._meta.user_fields[:] 7334 elif isinstance(thing, Index): 7335 return thing._table._meta.user_fields[:] 7336 else: 7337 for record in thing: # grab any record 7338 return record._meta.user_fields[:]
7339
7340 -def is_deleted(record):
7341 """ 7342 marked for deletion? 7343 """ 7344 return record._data[0] == ASTERISK
7345
7346 -def recno(record):
7347 """ 7348 physical record number 7349 """ 7350 return record._recnum
7351
7352 -def reset(record, keep_fields=None):
7353 """ 7354 sets record's fields back to original, except for fields in keep_fields 7355 """ 7356 template = record_in_flux = False 7357 if isinstance(record, RecordTemplate): 7358 template = True 7359 else: 7360 record_in_flux = not record._write_to_disk 7361 if record._meta.status == CLOSED: 7362 raise DbfError("%s is closed; cannot modify record" % record._meta.filename) 7363 if keep_fields is None: 7364 keep_fields = [] 7365 keep = {} 7366 for field in keep_fields: 7367 keep[field] = record[field] 7368 record._data[:] = record._meta.blankrecord[:] 7369 for field in keep_fields: 7370 record[field] = keep[field] 7371 if not template: 7372 if record._write_to_disk: 7373 record._write() 7374 else: 7375 record._dirty = True
7376
7377 -def source_table(thingie):
7378 """ 7379 table associated with table | record | index 7380 """ 7381 table = thingie._meta.table() 7382 if table is None: 7383 raise DbfError("table is no longer available") 7384 return table
7385
7386 -def undelete(record):
7387 """ 7388 marks record as active 7389 """ 7390 template = isinstance(record, RecordTemplate) 7391 if not template and record._meta.status == CLOSED: 7392 raise DbfError("%s is closed; cannot undelete record" % record._meta.filename) 7393 record_in_flux = not record._write_to_disk 7394 if not template and not record_in_flux: 7395 record._start_flux() 7396 try: 7397 record._data[0] = SPACE 7398 if not template: 7399 record._dirty = True 7400 except: 7401 if not template and not record_in_flux: 7402 record._rollback_flux() 7403 raise 7404 if not template and not record_in_flux: 7405 record._commit_flux()
7406 -def write(record, **kwargs):
7407 """ 7408 write record data to disk (updates indices) 7409 """ 7410 if record._meta.status == CLOSED: 7411 raise DbfError("%s is closed; cannot update record" % record._meta.filename) 7412 elif not record._write_to_disk: 7413 raise DbfError("unable to use .write_record() while record is in flux") 7414 if kwargs: 7415 gather(record, kwargs) 7416 if record._dirty: 7417 record._write()
7418
7419 -def Process(records, start=0, stop=None, filter=None):
7420 """commits each record to disk before returning the next one; undoes all changes to that record if exception raised 7421 if records is a table, it will be opened and closed if necessary 7422 filter function should return True to skip record, False to keep""" 7423 already_open = True 7424 if isinstance(records, Table): 7425 already_open = records.status != CLOSED 7426 if not already_open: 7427 records.open() 7428 try: 7429 if stop is None: 7430 stop = len(records) 7431 for record in records[start:stop]: 7432 if filter is not None and filter(record): 7433 continue 7434 try: 7435 record._start_flux() 7436 yield record 7437 except: 7438 record._rollback_flux() 7439 raise 7440 else: 7441 record._commit_flux() 7442 finally: 7443 if not already_open: 7444 records.close()
7445
7446 -def Templates(records, start=0, stop=None, filter=None):
7447 """ 7448 returns a template of each record instead of the record itself 7449 if records is a table, it will be opened and closed if necessary 7450 """ 7451 already_open = True 7452 if isinstance(records, Table): 7453 already_open = records.status != CLOSED 7454 if not already_open: 7455 records.open() 7456 try: 7457 if stop is None: 7458 stop = len(records) 7459 for record in records[start:stop]: 7460 if filter is not None and filter(record): 7461 continue 7462 yield(create_template(record)) 7463 finally: 7464 if not already_open: 7465 records.close()
7466
7467 -def index(sequence):
7468 """ 7469 returns integers 0 - len(sequence) 7470 """ 7471 for i in range(len(sequence)): 7472 yield i
7473
7474 -def guess_table_type(filename):
7475 reported = table_type(filename) 7476 possibles = [] 7477 version = reported[0] 7478 for tabletype in (Db3Table, ClpTable, FpTable, VfpTable): 7479 if version in tabletype._supported_tables: 7480 possibles.append((tabletype._versionabbr, tabletype._version, tabletype)) 7481 if not possibles: 7482 raise DbfError("Tables of type %s not supported" % str(reported)) 7483 return possibles
7484
7485 -def table_type(filename):
7486 """ 7487 returns text representation of a table's dbf version 7488 """ 7489 base, ext = os.path.splitext(filename) 7490 if ext == '': 7491 filename = base + '.[Dd][Bb][Ff]' 7492 matches = glob(filename) 7493 if matches: 7494 filename = matches[0] 7495 else: 7496 filename = base + '.dbf' 7497 if not os.path.exists(filename): 7498 raise DbfError('File %s not found' % filename) 7499 fd = open(filename, 'rb') 7500 version = fd.read(1) 7501 if version: 7502 [version] = version 7503 fd.close() 7504 fd = None 7505 if not version in version_map: 7506 raise DbfError("Unknown dbf type: %s (%x)" % (version, version)) 7507 return version, version_map[version]
7508
7509 -def add_fields(table_name, field_specs):
7510 """ 7511 adds fields to an existing table 7512 """ 7513 table = Table(table_name) 7514 table.open() 7515 try: 7516 table.add_fields(field_specs) 7517 finally: 7518 table.close()
7519
7520 -def delete_fields(table_name, field_names):
7521 """ 7522 deletes fields from an existing table 7523 """ 7524 table = Table(table_name) 7525 table.open() 7526 try: 7527 table.delete_fields(field_names) 7528 finally: 7529 table.close()
7530
7531 -def first_record(table_name):
7532 """ 7533 prints the first record of a table 7534 """ 7535 table = Table(table_name) 7536 table.open() 7537 try: 7538 print(str(table[0])) 7539 finally: 7540 table.close()
7541
7542 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, 7543 dbf_type='db3', memo_size=64, min_field_size=1, 7544 encoding=None, errors=None):
7545 """ 7546 creates a Character table from a csv file 7547 to_disk will create a table with the same name 7548 filename will be used if provided 7549 field_names default to f0, f1, f2, etc, unless specified (list) 7550 extra_fields can be used to add additional fields -- should be normal field specifiers (list) 7551 """ 7552 with codecs.open(csvfile, 'r', encoding='latin-1', errors=errors) as fd: 7553 reader = csv.reader(fd) 7554 if field_names: 7555 if isinstance(field_names, basestring): 7556 field_names = field_names.split() 7557 if ' ' not in field_names[0]: 7558 field_names = ['%s M' % fn for fn in field_names] 7559 else: 7560 field_names = ['f0 M'] 7561 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding, on_disk=False) 7562 mtable.open() 7563 fields_so_far = 1 7564 #for row in reader: 7565 while reader: 7566 try: 7567 row = next(reader) 7568 except UnicodeEncodeError: 7569 row = [''] 7570 except StopIteration: 7571 break 7572 while fields_so_far < len(row): 7573 if fields_so_far == len(field_names): 7574 field_names.append('f%d M' % fields_so_far) 7575 mtable.add_fields(field_names[fields_so_far]) 7576 fields_so_far += 1 7577 mtable.append(tuple(row)) 7578 if filename: 7579 to_disk = True 7580 if not to_disk: 7581 if extra_fields: 7582 mtable.add_fields(extra_fields) 7583 else: 7584 if not filename: 7585 filename = os.path.splitext(csvfile)[0] 7586 length = [min_field_size] * len(field_names) 7587 for record in mtable: 7588 for i in index(mtable.field_names): 7589 length[i] = max(length[i], len(record[i])) 7590 fields = mtable.field_names 7591 fielddef = [] 7592 for i in index(length): 7593 if length[i] < 255: 7594 fielddef.append('%s C(%d)' % (fields[i], length[i])) 7595 else: 7596 fielddef.append('%s M' % (fields[i])) 7597 if extra_fields: 7598 fielddef.extend(extra_fields) 7599 csvtable = Table(filename, fielddef, dbf_type=dbf_type, codepage=encoding) 7600 csvtable.open() 7601 for record in mtable: 7602 csvtable.append(scatter(record)) 7603 csvtable.close() 7604 return csvtable 7605 mtable.close() 7606 return mtable
7607
7608 -def get_fields(table_name):
7609 """ 7610 returns the list of field names of a table 7611 """ 7612 table = Table(table_name) 7613 return table.field_names
7614
7615 -def info(table_name):
7616 """ 7617 prints table info 7618 """ 7619 table = Table(table_name) 7620 print(str(table))
7621
7622 -def rename_field(table_name, oldfield, newfield):
7623 """ 7624 renames a field in a table 7625 """ 7626 table = Table(table_name) 7627 try: 7628 table.rename_field(oldfield, newfield) 7629 finally: 7630 table.close()
7631
7632 -def structure(table_name, field=None):
7633 """ 7634 returns the definition of a field (or all fields) 7635 """ 7636 table = Table(table_name) 7637 return table.structure(field)
7638
7639 -def hex_dump(records):
7640 """ 7641 just what it says ;) 7642 """ 7643 for index, dummy in enumerate(records): 7644 chars = dummy._data 7645 print("%2d: " % (index,)) 7646 for char in chars[1:]: 7647 print(" %2x " % (char,)) 7648 print()
7649
7650 7651 # Foxpro functions 7652 7653 -def gather(record, data, drop=False):
7654 """ 7655 saves data into a record's fields; writes to disk if not in flux 7656 keys with no matching field will raise a FieldMissingError 7657 exception unless drop_missing == True; 7658 if an Exception occurs the record is restored before reraising 7659 """ 7660 if isinstance(record, Record) and record._meta.status == CLOSED: 7661 raise DbfError("%s is closed; cannot modify record" % record._meta.filename) 7662 record_in_flux = not record._write_to_disk 7663 if not record_in_flux: 7664 record._start_flux() 7665 try: 7666 record_fields = field_names(record) 7667 for key in field_names(data): 7668 value = data[key] 7669 if not key in record_fields: 7670 if drop: 7671 continue 7672 raise FieldMissingError(key) 7673 record[key] = value 7674 except: 7675 if not record_in_flux: 7676 record._rollback_flux() 7677 raise 7678 if not record_in_flux: 7679 record._commit_flux()
7680
7681 -def scan(table, direction='forward', filter=lambda rec: True):
7682 """ 7683 moves record pointer forward 1; returns False if Eof/Bof reached 7684 table must be derived from _Navigation or have skip() method 7685 """ 7686 if direction not in ('forward', 'reverse'): 7687 raise TypeError("direction should be 'forward' or 'reverse', not %r" % direction) 7688 if direction == 'forward': 7689 n = +1 7690 no_more_records = Eof 7691 else: 7692 n = -1 7693 no_more_records = Bof 7694 try: 7695 while True: 7696 table.skip(n) 7697 if filter(table.current_record): 7698 return True 7699 except no_more_records: 7700 return False
7701
7702 -def scatter(record, as_type=create_template, _mappings=getattr(collections, 'Mapping', dict)):
7703 """ 7704 returns as_type() of [fieldnames and] values. 7705 """ 7706 if isinstance(as_type, types.FunctionType): 7707 return as_type(record) 7708 elif issubclass(as_type, _mappings): 7709 return as_type(zip(field_names(record), record)) 7710 else: 7711 return as_type(record)
7712