dbf-0.88.16/0000755000175100017510000000000011506103361011367 5ustar margamargadbf-0.88.16/PKG-INFO0000666000175100017510000000156711477220026012506 0ustar margamargaMetadata-Version: 1.1 Name: dbf Version: 0.88.16 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos) Home-page: http://groups.google.com/group/python-dbase Author: Ethan Furman Author-email: ethan@stoneleaf.us License: BSD License Description: Currently supports dBase III, and FoxPro - Visual FoxPro 6 tables. Text is returned as unicode, and codepage settings in tables are honored. Documentation needs work, but author is very responsive to e-mails. Not supported: index files, null fields, auto-incrementing fields. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Programming Language :: Python Classifier: Topic :: Database Provides: dbf dbf-0.88.16/dbf/0000755000175100017510000000000011506103361012122 5ustar margamargadbf-0.88.16/dbf/dates.py0000666000175100017510000004437611401173462013621 0ustar margamarga"""wrappers around datetime objects to allow null values""" import datetime import time class Date(object): "adds null capable datetime.date constructs" __slots__ = ['_date'] def __new__(cls, year=None, month=0, day=0): """date should be either a datetime.date, a string in yyyymmdd format, or date/month/day should all be appropriate integers""" nd = object.__new__(cls) nd._date = False if type(year) == datetime.date: nd._date = year elif type(year) == Date: nd._date = year._date elif year == 'no date': pass # date object is already False elif year is not None: nd._date = datetime.date(year, month, day) return nd def __add__(yo, other): if yo and type(other) == datetime.timedelta: return Date(yo._date + other) else: return NotImplemented def __eq__(yo, other): if yo: if type(other) == datetime.date: return yo._date == other elif type(other) == Date: if other: return yo._date == other._date return False else: if type(other) == datetime.date: return False elif type(other) == Date: if other: return False return True return NotImplemented def __getattr__(yo, name): if yo: attribute = yo._date.__getattribute__(name) return attribute else: raise AttributeError('null Date object has no attribute %s' % name) def __ge__(yo, other): if yo: if type(other) == datetime.date: return yo._date >= other elif type(other) == Date: if other: return yo._date >= other._date return False else: if type(other) == datetime.date: return False elif type(other) == Date: if other: return False return True return NotImplemented def __gt__(yo, other): if yo: if type(other) == datetime.date: return yo._date > other elif type(other) == Date: if other: return yo._date > other._date return True else: if type(other) == datetime.date: return False elif type(other) == Date: if other: return False return False return NotImplemented def __hash__(yo): return yo._date.__hash__() def __le__(yo, other): if yo: if type(other) == datetime.date: return yo._date <= other elif type(other) == Date: if other: return yo._date <= other._date return False else: if type(other) == datetime.date: return True elif type(other) == Date: if other: return True return True return NotImplemented def __lt__(yo, other): if yo: if type(other) == datetime.date: return yo._date < other elif type(other) == Date: if other: return yo._date < other._date return False else: if type(other) == datetime.date: return True elif type(other) == Date: if other: return True return False return NotImplemented def __ne__(yo, other): if yo: if type(other) == datetime.date: return yo._date != other elif type(other) == Date: if other: return yo._date != other._date return True else: if type(other) == datetime.date: return True elif type(other) == Date: if other: return True return False return NotImplemented def __nonzero__(yo): if yo._date: return True return False __radd__ = __add__ def __rsub__(yo, other): if yo and type(other) == datetime.date: return other - yo._date elif yo and type(other) == Date: return other._date - yo._date elif yo and type(other) == datetime.timedelta: return Date(other - yo._date) else: return NotImplemented def __repr__(yo): if yo: return "Date(%d, %d, %d)" % yo.timetuple()[:3] else: return "Date()" def __str__(yo): if yo: return yo.isoformat() return "no date" def __sub__(yo, other): if yo and type(other) == datetime.date: return yo._date - other elif yo and type(other) == Date: return yo._date - other._date elif yo and type(other) == datetime.timedelta: return Date(yo._date - other) else: return NotImplemented def date(yo): if yo: return yo._date return None @classmethod def fromordinal(cls, number): if number: return cls(datetime.date.fromordinal(number)) return cls() @classmethod def fromtimestamp(cls, timestamp): return cls(datetime.date.fromtimestamp(timestamp)) @classmethod def fromymd(cls, yyyymmdd): if yyyymmdd in ('', ' ','no date'): return cls() return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:]))) def strftime(yo, format): if yo: return yo._date.strftime(format) return '' @classmethod def today(cls): return cls(datetime.date.today()) def ymd(yo): if yo: return "%04d%02d%02d" % yo.timetuple()[:3] else: return ' ' Date.max = Date(datetime.date.max) Date.min = Date(datetime.date.min) class DateTime(object): "adds null capable datetime.datetime constructs" __slots__ = ['_datetime'] def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0): """year may be a datetime.datetime""" ndt = object.__new__(cls) ndt._datetime = False if type(year) == datetime.datetime: ndt._datetime = year elif type(year) == DateTime: ndt._datetime = year._datetime elif year is not None: ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsec) return ndt def __add__(yo, other): if yo and type(other) == datetime.timedelta: return DateTime(yo._datetime + other) else: return NotImplemented def __eq__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime == other elif type(other) == DateTime: if other: return yo._datetime == other._datetime return False else: if type(other) == datetime.datetime: return False elif type(other) == DateTime: if other: return False return True return NotImplemented def __getattr__(yo, name): if yo: attribute = yo._datetime.__getattribute__(name) return attribute else: raise AttributeError('null DateTime object has no attribute %s' % name) def __ge__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime >= other elif type(other) == DateTime: if other: return yo._datetime >= other._datetime return False else: if type(other) == datetime.datetime: return False elif type(other) == DateTime: if other: return False return True return NotImplemented def __gt__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime > other elif type(other) == DateTime: if other: return yo._datetime > other._datetime return True else: if type(other) == datetime.datetime: return False elif type(other) == DateTime: if other: return False return False return NotImplemented def __hash__(yo): return yo._datetime.__hash__() def __le__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime <= other elif type(other) == DateTime: if other: return yo._datetime <= other._datetime return False else: if type(other) == datetime.datetime: return True elif type(other) == DateTime: if other: return True return True return NotImplemented def __lt__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime < other elif type(other) == DateTime: if other: return yo._datetime < other._datetime return False else: if type(other) == datetime.datetime: return True elif type(other) == DateTime: if other: return True return False return NotImplemented def __ne__(yo, other): if yo: if type(other) == datetime.datetime: return yo._datetime != other elif type(other) == DateTime: if other: return yo._datetime != other._datetime return True else: if type(other) == datetime.datetime: return True elif type(other) == DateTime: if other: return True return False return NotImplemented def __nonzero__(yo): if yo._datetime is not False: return True return False __radd__ = __add__ def __rsub__(yo, other): if yo and type(other) == datetime.datetime: return other - yo._datetime elif yo and type(other) == DateTime: return other._datetime - yo._datetime elif yo and type(other) == datetime.timedelta: return DateTime(other - yo._datetime) else: return NotImplemented def __repr__(yo): if yo: return "DateTime(%d, %d, %d, %d, %d, %d, %d, %d, %d)" % yo._datetime.timetuple()[:] else: return "DateTime()" def __str__(yo): if yo: return yo.isoformat() return "no datetime" def __sub__(yo, other): if yo and type(other) == datetime.datetime: return yo._datetime - other elif yo and type(other) == DateTime: return yo._datetime - other._datetime elif yo and type(other) == datetime.timedelta: return DateTime(yo._datetime - other) else: return NotImplemented @classmethod def combine(cls, date, time): if Date(date) and Time(time): return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) return cls() def date(yo): if yo: return Date(yo.year, yo.month, yo.day) return Date() def datetime(yo): if yo: return yo._datetime return None @classmethod def fromordinal(cls, number): if number: return cls(datetime.datetime.fromordinal(number)) else: return cls() @classmethod def fromtimestamp(cls, timestamp): return DateTime(datetime.datetime.fromtimestamp(timestamp)) @classmethod def now(cls): return cls(datetime.datetime.now()) def time(yo): if yo: return Time(yo.hour, yo.minute, yo.second, yo.microsecond) return Time() @classmethod def utcnow(cls): return cls(datetime.datetime.utcnow()) @classmethod def today(cls): return cls(datetime.datetime.today()) DateTime.max = DateTime(datetime.datetime.max) DateTime.min = DateTime(datetime.datetime.min) class Time(object): "adds null capable datetime.time constructs" __slots__ = ['_time'] def __new__(cls, hour=None, minute=0, second=0, microsec=0): """hour may be a datetime.time""" nt = object.__new__(cls) nt._time = False if type(hour) == datetime.time: nt._time = hour elif type(hour) == Time: nt._time = hour._time elif hour is not None: nt._time = datetime.time(hour, minute, second, microsec) return nt def __add__(yo, other): if yo and type(other) == datetime.timedelta: return Time(yo._time + other) else: return NotImplemented def __eq__(yo, other): if yo: if type(other) == datetime.time: return yo._time == other elif type(other) == Time: if other: return yo._time == other._time return False else: if type(other) == datetime.time: return False elif type(other) == Time: if other: return False return True return NotImplemented def __getattr__(yo, name): if yo: attribute = yo._time.__getattribute__(name) return attribute else: raise AttributeError('null Time object has no attribute %s' % name) def __ge__(yo, other): if yo: if type(other) == datetime.time: return yo._time >= other elif type(other) == Time: if other: return yo._time >= other._time return False else: if type(other) == datetime.time: return False elif type(other) == Time: if other: return False return True return NotImplemented def __gt__(yo, other): if yo: if type(other) == datetime.time: return yo._time > other elif type(other) == DateTime: if other: return yo._time > other._time return True else: if type(other) == datetime.time: return False elif type(other) == Time: if other: return False return False return NotImplemented def __hash__(yo): return yo._datetime.__hash__() def __le__(yo, other): if yo: if type(other) == datetime.time: return yo._time <= other elif type(other) == Time: if other: return yo._time <= other._time return False else: if type(other) == datetime.time: return True elif type(other) == Time: if other: return True return True return NotImplemented def __lt__(yo, other): if yo: if type(other) == datetime.time: return yo._time < other elif type(other) == Time: if other: return yo._time < other._time return False else: if type(other) == datetime.time: return True elif type(other) == Time: if other: return True return False return NotImplemented def __ne__(yo, other): if yo: if type(other) == datetime.time: return yo._time != other elif type(other) == Time: if other: return yo._time != other._time return True else: if type(other) == datetime.time: return True elif type(other) == Time: if other: return True return False return NotImplemented def __nonzero__(yo): if yo._time is not False: return True return False __radd__ = __add__ def __rsub__(yo, other): if yo and type(other) == datetime.time: return other - yo._time elif yo and type(other) == Time: return other._time - yo._time elif yo and type(other) == datetime.timedelta: return Time(other - yo._datetime) else: return NotImplemented def __repr__(yo): if yo: return "Time(%d, %d, %d, %d)" % (yo.hour, yo.minute, yo.second, yo.microsecond) else: return "Time()" def __str__(yo): if yo: return yo.isoformat() return "no time" def __sub__(yo, other): if yo and type(other) == datetime.time: return yo._time - other elif yo and type(other) == Time: return yo._time - other._time elif yo and type(other) == datetime.timedelta: return Time(yo._time - other) else: return NotImplemented Time.max = Time(datetime.time.max) Time.min = Time(datetime.time.min) dbf-0.88.16/dbf/exceptions.py0000666000175100017510000000246011401173462014666 0ustar margamarga"warnings and errors" class DbfError(Exception): "Fatal errors elicit this response." pass class DataOverflow(DbfError): "Data too large for field" def __init__(yo, message, data=None): super(DataOverflow, yo).__init__(message) yo.data = data class FieldMissing(KeyError, DbfError): "Field does not exist in table" def __init__(yo, fieldname): super(FieldMissing, yo).__init__('%s: no such field in table' % fieldname) yo.data = fieldname class NonUnicode(DbfError): "Data for table not in unicode" def __init__(yo, message=None): super(NonUnicode, yo).__init__(message) class DbfWarning(Exception): "Normal operations elicit this response" class Eof(DbfWarning, StopIteration): "End of file reached" message = 'End of file reached' def __init__(yo): super(Eof, yo).__init__(yo.message) class Bof(DbfWarning, StopIteration): "Beginning of file reached" message = 'Beginning of file reached' def __init__(yo): super(Bof, yo).__init__(yo.message) class DoNotIndex(DbfWarning): "Returned by indexing functions to suppress a record from becoming part of the index" message = 'Not indexing record' def __init__(yo): super(DoNotIndex, yo).__init__(yo.message) dbf-0.88.16/dbf/html/0000755000175100017510000000000011506103362013067 5ustar margamargadbf-0.88.16/dbf/html/dbf.old-pysrc.html0000666000175100017510000031431011477216672016451 0ustar margamarga dbf.old
Package dbf :: Package old
[hide private]

Source Code for Package dbf.old

  1  """ 
  2  Copyright 
  3  ========= 
  4      - Copyright: 2008-2009 Ad-Mail, Inc -- All rights reserved. 
  5      - Author: Ethan Furman 
  6      - Contact: ethanf@admailinc.com 
  7      - Organization: Ad-Mail, Inc. 
  8      - Version: 0.87.003 as of 03 Dec 2009 
  9   
 10  Redistribution and use in source and binary forms, with or without 
 11  modification, are permitted provided that the following conditions are met: 
 12      - Redistributions of source code must retain the above copyright 
 13        notice, this list of conditions and the following disclaimer. 
 14      - Redistributions in binary form must reproduce the above copyright 
 15        notice, this list of conditions and the following disclaimer in the 
 16        documentation and/or other materials provided with the distribution. 
 17      - Neither the name of Ad-Mail, Inc nor the 
 18        names of its contributors may be used to endorse or promote products 
 19        derived from this software without specific prior written permission. 
 20   
 21  THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY 
 22  EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
 23  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
 24  DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY 
 25  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 
 26  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
 27  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 
 28  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 
 29  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 
 30  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 31   
 32  B{I{Summary}} 
 33   
 34  Python package for reading/writing dBase III and VFP 6 tables and memos 
 35   
 36  The entire table is read into memory, and all operations occur on the in-memory 
 37  table, with data changes being written to disk as they occur. 
 38   
 39  Goals:  programming style with databases 
 40      - C{table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])} 
 41          - fielddesc examples:  C{name C(30); age N(3,0); wisdom M; marriage D} 
 42      - C{record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]} 
 43      - C{record.field | record['field']} accesses the field 
 44   
 45  NOTE:  Of the VFP data types, auto-increment and null settings are not implemented. 
 46  """ 
 47  import os 
 48  import csv 
 49   
 50  from dbf.dates import Date, DateTime, Time 
 51  from dbf.exceptions import DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex 
 52  from dbf.tables import DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv 
 53  from dbf.tables import sql, ascii, codepage, encoding, version_map 
 54   
 55  version = (0, 88, 07) 
 56   
 57  __docformat__ = 'epytext' 
 58   
59 -def Table(filename, field_specs='', memo_size=128, ignore_memos=False, \ 60 read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None):
61 "returns an open table of the correct dbf_type, or creates it if field_specs is given" 62 if dbf_type is not None: 63 dbf_type = dbf_type.lower() 64 if dbf_type == 'db3': 65 return Db3Table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 66 elif dbf_type == 'fp': 67 return FpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 68 elif dbf_type == 'vfp': 69 return VfpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 70 elif dbf_type == 'dbf': 71 return DbfTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 72 else: 73 raise DbfError("Unknown table type: %s" % dbf_type) 74 else: 75 possibles = guess_table_type(filename) 76 if len(possibles) == 1: 77 return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \ 78 read_only, keep_memos, meta_only) 79 elif len(possibles) > 1: 80 types = ', '.join(["%s" % item[1] for item in possibles]) 81 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' 82 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs)) 83 else: 84 raise DbfError("Shouldn't have gotten here -- yell at programmer!")
85 -def index(sequence):
86 "returns integers 0 - len(sequence)" 87 for i in xrange(len(sequence)): 88 yield i
89 -def guess_table_type(filename):
90 reported = table_type(filename) 91 possibles = [] 92 version = reported[0] 93 for tabletype in (Db3Table, FpTable, VfpTable): 94 if version in tabletype._supported_tables: 95 possibles.append((tabletype._versionabbv, tabletype._version, tabletype)) 96 if not possibles: 97 raise DbfError("Tables of type %s not supported" % str(reported)) 98 return possibles
99 -def table_type(filename):
100 "returns text representation of a table's dbf version" 101 base, ext = os.path.splitext(filename) 102 if ext == '': 103 filename = base + '.dbf' 104 if not os.path.exists(filename): 105 raise DbfError('File %s not found' % filename) 106 fd = open(filename) 107 version = fd.read(1) 108 fd.close() 109 fd = None 110 if not version in version_map: 111 raise DbfError("Unknown dbf type: %s (%x)" % (version, ord(version))) 112 return version, version_map[version]
113
114 -def add_fields(table, field_specs):
115 "adds fields to an existing table" 116 table = Table(table) 117 try: 118 table.add_fields(field_specs) 119 finally: 120 table.close()
121 -def delete_fields(table, field_names):
122 "deletes fields from an existing table" 123 table = Table(table) 124 try: 125 table.delete_fields(field_names) 126 finally: 127 table.close()
128 -def export(table, filename='', fields='', format='csv', header=True):
129 "creates a csv or tab-delimited file from an existing table" 130 if fields is None: 131 fields = [] 132 table = Table(table) 133 try: 134 table.export(filename=filename, field_specs=fields, format=format, header=header) 135 finally: 136 table.close()
137 -def first_record(table):
138 "prints the first record of a table" 139 table = Table(table) 140 try: 141 print str(table[0]) 142 finally: 143 table.close()
144 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1):
145 """creates a Character table from a csv file 146 to_disk will create a table with the same name 147 filename will be used if provided 148 field_names default to f0, f1, f2, etc, unless specified (list) 149 extra_fields can be used to add additional fields -- should be normal field specifiers (list)""" 150 reader = csv.reader(open(csvfile)) 151 if field_names: 152 field_names = ['%s M' % fn for fn in field_names] 153 else: 154 field_names = ['f0 M'] 155 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size) 156 fields_so_far = 1 157 for row in reader: 158 while fields_so_far < len(row): 159 if fields_so_far == len(field_names): 160 field_names.append('f%d M' % fields_so_far) 161 mtable.add_fields(field_names[fields_so_far]) 162 fields_so_far += 1 163 mtable.append(tuple(row)) 164 if not to_disk: 165 if extra_fields: 166 mtable.add_fields(extra_fields) 167 else: 168 if not filename: 169 filename = os.path.splitext(csvfile)[0] 170 length = [min_field_size] * len(field_names) 171 for record in mtable: 172 for i in index(record.field_names): 173 length[i] = max(length[i], len(record[i])) 174 fields = mtable.field_names 175 fielddef = [] 176 for i in index(length): 177 if length[i] < 255: 178 fielddef.append('%s C(%d)' % (fields[i], length[i])) 179 else: 180 fielddef.append('%s M' % (fields[i])) 181 if extra_fields: 182 fielddef.extend(extra_fields) 183 csvtable = Table(filename, fielddef, dbf_type=dbf_type) 184 for record in mtable: 185 csvtable.append(record.scatter_fields()) 186 return csvtable 187 return mtable
188 -def get_fields(table):
189 "returns the list of field names of a table" 190 table = Table(table) 191 return table.field_names
192 -def info(table):
193 "prints table info" 194 table = Table(table) 195 print str(table)
196 -def rename_field(table, oldfield, newfield):
197 "renames a field in a table" 198 table = Table(table) 199 try: 200 table.rename_field(oldfield, newfield) 201 finally: 202 table.close()
203 -def structure(table, field=None):
204 "returns the definition of a field (or all fields)" 205 table = Table(table) 206 return table.structure(field)
207 -def hex_dump(records):
208 "just what it says ;)" 209 for index,dummy in enumerate(records): 210 chars = dummy._data 211 print "%2d: " % index, 212 for char in chars[1:]: 213 print " %2x " % ord(char), 214 print
215

dbf-0.88.16/dbf/html/dbf.exceptions-module.html0000666000175100017510000001452411477216670020203 0ustar margamarga dbf.exceptions
Package dbf :: Module exceptions
[hide private]

Module exceptions

source code

warnings and errors

Classes [hide private]
  DbfError
Fatal errors elicit this response.
  DataOverflow
Data too large for field
  FieldMissing
Field does not exist in table
  NonUnicode
Data for table not in unicode
  DbfWarning
Normal operations elicit this response
  Eof
End of file reached
  Bof
Beginning of file reached
  DoNotIndex
Returned by indexing functions to suppress a record from becoming part of the index
dbf-0.88.16/dbf/html/dbf.old.dates.Date-class.html0000666000175100017510000006756211477216672020407 0ustar margamarga dbf.old.dates.Date
Package dbf :: Package old :: Module dates :: Class Date
[hide private]

Class Date

source code

object --+
         |
        Date

adds null capable datetime.date constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code
 
date(yo) source code
 
strftime(yo, format) source code
 
ymd(yo) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Class Methods [hide private]
 
fromordinal(cls, number) source code
 
fromtimestamp(cls, timestamp) source code
 
fromymd(cls, yyyymmdd) source code
 
today(cls) source code
Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, year=None, month=0, day=0)
date should be either a datetime.date, a string in yyyymmdd format, or date/month/day should all be appropriate integers
source code
Class Variables [hide private]
  max = Date(9999, 12, 31)
  min = Date(1, 1, 1)
Properties [hide private]
  _date

Inherited from object: __class__

Method Details [hide private]

__new__(cls, year=None, month=0, day=0)
Static Method

source code 

date should be either a datetime.date, a string in yyyymmdd format, or date/month/day should all be appropriate integers

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.DataOverflow-class.html0000666000175100017510000002165711477216672023203 0ustar margamarga dbf.old.exceptions.DataOverflow
Package dbf :: Package old :: Module exceptions :: Class DataOverflow
[hide private]

Class DataOverflow

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                        DbfError --+
                                   |
                                  DataOverflow

Data too large for field

Instance Methods [hide private]
 
__init__(yo, message, data=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, message, data=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables._Db3Memo-class.html0000666000175100017510000002701111477216672020475 0ustar margamarga dbf.tables._Db3Memo
Package dbf :: Module tables :: Class _Db3Memo
[hide private]

Class _Db3Memo

source code

object --+    
         |    
  _DbfMemo --+
             |
            _Db3Memo

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
dBase III specific
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code

Inherited from _DbfMemo: __init__, get_memo, put_memo

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_init(yo)

source code 

dBase III specific

Overrides: _DbfMemo._init

_get_memo(yo, block)

source code 

retrieve memo contents from disk

Overrides: _DbfMemo._get_memo
(inherited documentation)

_put_memo(yo, data)

source code 

store memo contents to disk

Overrides: _DbfMemo._put_memo
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables._DbfRecord-class.html0000666000175100017510000010164011477216672021102 0ustar margamarga dbf.tables._DbfRecord
Package dbf :: Module tables :: Class _DbfRecord
[hide private]

Class _DbfRecord

source code

object --+
         |
        _DbfRecord

Provides routines to extract and save data within the fields of a dbf record.

Instance Methods [hide private]
 
_retrieveFieldValue(yo, record_data, fielddef)
calls appropriate routine to fetch value stored in field from array
source code
 
_updateFieldValue(yo, fielddef, value)
calls appropriate routine to convert value to ascii bytes, and save it in record
source code
 
_update_disk(yo, location='', data=None) source code
 
__call__(yo, *specs) source code
 
__contains__(yo, key) source code
 
__iter__(yo) source code
 
__getattr__(yo, name) source code
 
__getitem__(yo, item) source code
 
__len__(yo) source code
 
__setattr__(yo, name, value)
x.__setattr__('name', value) <==> x.name = value
source code
 
__setitem__(yo, name, value) source code
 
__str__(yo)
str(x)
source code
 
__repr__(yo)
repr(x)
source code
 
_createBlankRecord(yo)
creates a blank record data chunk
source code
 
delete_record(yo)
marks record as deleted
source code
 
gather_fields(yo, dictionary, drop=False)
saves a dictionary into a record's fields keys with no matching field will raise a FieldMissing exception unless drop_missing = True
source code
 
read_record(yo)
refresh record data from disk
source code
 
check_index(yo) source code
 
reset_record(yo, keep_fields=None)
blanks record
source code
 
scatter_fields(yo, blank=False)
returns a dictionary of fieldnames and values which can be used with gather_fields().
source code
 
undelete_record(yo)
marks record as active
source code
 
write_record(yo, **kwargs)
write record data to disk
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __init__, __reduce__, __reduce_ex__

Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, recnum, layout, kamikaze='', _fromdisk=False)
record = ascii array of entire record; layout=record specification; memo = memo object for table
source code
Class Variables [hide private]
  field_names = <dbf.tables.property object at 0x00EC79B0>
  has_been_deleted = <dbf.tables.property object at 0x00EC79F0>
  record_number = <dbf.tables.property object at 0x00EC7A10>
  record_table = <dbf.tables.property object at 0x00EC7A30>
Properties [hide private]
  _data
  _dirty
  _layout
  _recnum

Inherited from object: __class__

Method Details [hide private]

_retrieveFieldValue(yo, record_data, fielddef)

source code 

calls appropriate routine to fetch value stored in field from array

Parameters:
  • record_data (array of characters) - the data portion of the record
  • fielddef (dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags') - description of the field definition
Returns:
python data stored in field

__new__(cls, recnum, layout, kamikaze='', _fromdisk=False)
Static Method

source code 

record = ascii array of entire record; layout=record specification; memo = memo object for table

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__setattr__(yo, name, value)

source code 

x.__setattr__('name', value) <==> x.name = value

Overrides: object.__setattr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

scatter_fields(yo, blank=False)

source code 

returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty.


dbf-0.88.16/dbf/html/dbf.exceptions-pysrc.html0000666000175100017510000010203011477216674020050 0ustar margamarga dbf.exceptions
Package dbf :: Module exceptions
[hide private]

Source Code for Module dbf.exceptions

 1  "warnings and errors" 
 2   
3 -class DbfError(Exception):
4 "Fatal errors elicit this response." 5 pass
6 -class DataOverflow(DbfError):
7 "Data too large for field"
8 - def __init__(yo, message, data=None):
9 super(DataOverflow, yo).__init__(message) 10 yo.data = data
11 -class FieldMissing(KeyError, DbfError):
12 "Field does not exist in table"
13 - def __init__(yo, fieldname):
14 super(FieldMissing, yo).__init__('%s: no such field in table' % fieldname) 15 yo.data = fieldname
16 -class NonUnicode(DbfError):
17 "Data for table not in unicode"
18 - def __init__(yo, message=None):
19 super(NonUnicode, yo).__init__(message)
20 -class DbfWarning(Exception):
21 "Normal operations elicit this response"
22 -class Eof(DbfWarning, StopIteration):
23 "End of file reached" 24 message = 'End of file reached'
25 - def __init__(yo):
26 super(Eof, yo).__init__(yo.message)
27 -class Bof(DbfWarning, StopIteration):
28 "Beginning of file reached" 29 message = 'Beginning of file reached'
30 - def __init__(yo):
31 super(Bof, yo).__init__(yo.message)
32 -class DoNotIndex(DbfWarning):
33 "Returned by indexing functions to suppress a record from becoming part of the index" 34 message = 'Not indexing record'
35 - def __init__(yo):
36 super(DoNotIndex, yo).__init__(yo.message)
37

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable._TableHeader-class.html0000666000175100017510000003130411477216672023626 0ustar margamarga dbf.old.tables.DbfTable._TableHeader
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class _TableHeader
[hide private]

Class _TableHeader

source code

object --+
         |
        DbfTable._TableHeader

Instance Methods [hide private]
 
__init__(yo, data)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
codepage(yo, cp=None)
get/set code page of table
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Class Variables [hide private]
  data = <dbf.old.tables.property object at 0x011A4250>
  extra = <dbf.old.tables.property object at 0x011A4270>
  field_count = <dbf.old.tables.property object at 0x011A4290>
  fields = <dbf.old.tables.property object at 0x011A42B0>
  record_count = <dbf.old.tables.property object at 0x011A42D0>
  record_length = <dbf.old.tables.property object at 0x011A42F0>
  start = <dbf.old.tables.property object at 0x011A4310>
  update = <dbf.old.tables.property object at 0x011A4330>
  version = <dbf.old.tables.property object at 0x011A4350>
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, data)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/identifier-index.html0000666000175100017510000066202011477216670017234 0ustar margamarga Identifier Index
 
[hide private]

Identifier Index

[ A B C D E F G H I J K L M N O P Q R S T U V W X Y Z _ ]

A

B

C

D

E

F

G

H

I

K

L

M

N

O

P

Q

R

S

T

U

V

W

Y

Z

_



dbf-0.88.16/dbf/html/dbf.tables.property-class.html0000666000175100017510000003110411477216672020772 0ustar margamarga dbf.tables.property
Package dbf :: Module tables :: Class property
[hide private]

Class property

source code

object --+
         |
        property

Emulate PyProperty_Type() in Objects/descrobject.c

Instance Methods [hide private]
 
__init__(self, fget=None, fset=None, fdel=None, doc=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__call__(self, func) source code
 
__get__(self, obj, objtype=None) source code
 
__set__(self, obj, value) source code
 
__delete__(self, obj) source code
 
setter(self, func) source code
 
deleter(self, func) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(self, fget=None, fset=None, fdel=None, doc=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables-pysrc.html0000666000175100017510000506077211477216674017166 0ustar margamarga dbf.tables
Package dbf :: Module tables
[hide private]

Source Code for Module dbf.tables

   1  "table definitions" 
   2  import os 
   3  import sys 
   4  import csv 
   5  import codecs 
   6  import locale 
   7  import unicodedata 
   8  import weakref 
   9  from array import array 
  10  from bisect import bisect_left, bisect_right 
  11  from decimal import Decimal 
  12  from shutil import copyfileobj 
  13  import dbf 
  14  from dbf import _io as io 
  15  from dbf.dates import Date, DateTime, Time 
  16  from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode, DoNotIndex 
  17   
  18  input_decoding = locale.getdefaultlocale()[1]    # treat non-unicode data as ... 
  19  default_codepage = 'cp1252'  # if no codepage specified on dbf creation, use this 
  20  return_ascii = False         # if True -- convert back to icky ascii, losing chars if no mapping 
  21   
  22  version_map = { 
  23          '\x02' : 'FoxBASE', 
  24          '\x03' : 'dBase III Plus', 
  25          '\x04' : 'dBase IV', 
  26          '\x05' : 'dBase V', 
  27          '\x30' : 'Visual FoxPro', 
  28          '\x31' : 'Visual FoxPro (auto increment field)', 
  29          '\x43' : 'dBase IV SQL', 
  30          '\x7b' : 'dBase IV w/memos', 
  31          '\x83' : 'dBase III Plus w/memos', 
  32          '\x8b' : 'dBase IV w/memos', 
  33          '\x8e' : 'dBase IV w/SQL table', 
  34          '\xf5' : 'FoxPro w/memos'} 
  35   
  36  code_pages = { 
  37          '\x00' : ('ascii', "plain ol' ascii"), 
  38          '\x01' : ('cp437', 'U.S. MS-DOS'), 
  39          '\x02' : ('cp850', 'International MS-DOS'), 
  40          '\x03' : ('cp1252', 'Windows ANSI'), 
  41          '\x04' : ('mac_roman', 'Standard Macintosh'), 
  42          '\x08' : ('cp865', 'Danish OEM'), 
  43          '\x09' : ('cp437', 'Dutch OEM'), 
  44          '\x0A' : ('cp850', 'Dutch OEM (secondary)'), 
  45          '\x0B' : ('cp437', 'Finnish OEM'), 
  46          '\x0D' : ('cp437', 'French OEM'), 
  47          '\x0E' : ('cp850', 'French OEM (secondary)'), 
  48          '\x0F' : ('cp437', 'German OEM'), 
  49          '\x10' : ('cp850', 'German OEM (secondary)'), 
  50          '\x11' : ('cp437', 'Italian OEM'), 
  51          '\x12' : ('cp850', 'Italian OEM (secondary)'), 
  52          '\x13' : ('cp932', 'Japanese Shift-JIS'), 
  53          '\x14' : ('cp850', 'Spanish OEM (secondary)'), 
  54          '\x15' : ('cp437', 'Swedish OEM'), 
  55          '\x16' : ('cp850', 'Swedish OEM (secondary)'), 
  56          '\x17' : ('cp865', 'Norwegian OEM'), 
  57          '\x18' : ('cp437', 'Spanish OEM'), 
  58          '\x19' : ('cp437', 'English OEM (Britain)'), 
  59          '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'), 
  60          '\x1B' : ('cp437', 'English OEM (U.S.)'), 
  61          '\x1C' : ('cp863', 'French OEM (Canada)'), 
  62          '\x1D' : ('cp850', 'French OEM (secondary)'), 
  63          '\x1F' : ('cp852', 'Czech OEM'), 
  64          '\x22' : ('cp852', 'Hungarian OEM'), 
  65          '\x23' : ('cp852', 'Polish OEM'), 
  66          '\x24' : ('cp860', 'Portugese OEM'), 
  67          '\x25' : ('cp850', 'Potugese OEM (secondary)'), 
  68          '\x26' : ('cp866', 'Russian OEM'), 
  69          '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'), 
  70          '\x40' : ('cp852', 'Romanian OEM'), 
  71          '\x4D' : ('cp936', 'Chinese GBK (PRC)'), 
  72          '\x4E' : ('cp949', 'Korean (ANSI/OEM)'), 
  73          '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'), 
  74          '\x50' : ('cp874', 'Thai (ANSI/OEM)'), 
  75          '\x57' : ('cp1252', 'ANSI'), 
  76          '\x58' : ('cp1252', 'Western European ANSI'), 
  77          '\x59' : ('cp1252', 'Spanish ANSI'), 
  78          '\x64' : ('cp852', 'Eastern European MS-DOS'), 
  79          '\x65' : ('cp866', 'Russian MS-DOS'), 
  80          '\x66' : ('cp865', 'Nordic MS-DOS'), 
  81          '\x67' : ('cp861', 'Icelandic MS-DOS'), 
  82          '\x68' : (None, 'Kamenicky (Czech) MS-DOS'), 
  83          '\x69' : (None, 'Mazovia (Polish) MS-DOS'), 
  84          '\x6a' : ('cp737', 'Greek MS-DOS (437G)'), 
  85          '\x6b' : ('cp857', 'Turkish MS-DOS'), 
  86          '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 
  87          '\x79' : ('cp949', 'Korean Windows'), 
  88          '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 
  89          '\x7b' : ('cp932', 'Japanese Windows'), 
  90          '\x7c' : ('cp874', 'Thai Windows'), 
  91          '\x7d' : ('cp1255', 'Hebrew Windows'), 
  92          '\x7e' : ('cp1256', 'Arabic Windows'), 
  93          '\xc8' : ('cp1250', 'Eastern European Windows'), 
  94          '\xc9' : ('cp1251', 'Russian Windows'), 
  95          '\xca' : ('cp1254', 'Turkish Windows'), 
  96          '\xcb' : ('cp1253', 'Greek Windows'), 
  97          '\x96' : ('mac_cyrillic', 'Russian Macintosh'), 
  98          '\x97' : ('mac_latin2', 'Macintosh EE'), 
  99          '\x98' : ('mac_greek', 'Greek Macintosh') } 
 100   
 101  if sys.version_info[:2] < (2, 6): 
102 # define our own property type 103 - class property(object):
104 "Emulate PyProperty_Type() in Objects/descrobject.c" 105
106 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
107 self.fget = fget 108 self.fset = fset 109 self.fdel = fdel 110 self.__doc__ = doc or fget.__doc__
111 - def __call__(self, func):
112 self.fget = func 113 if not self.__doc__: 114 self.__doc__ = fget.__doc__
115 - def __get__(self, obj, objtype=None):
116 if obj is None: 117 return self 118 if self.fget is None: 119 raise AttributeError, "unreadable attribute" 120 return self.fget(obj)
121 - def __set__(self, obj, value):
122 if self.fset is None: 123 raise AttributeError, "can't set attribute" 124 self.fset(obj, value)
125 - def __delete__(self, obj):
126 if self.fdel is None: 127 raise AttributeError, "can't delete attribute" 128 self.fdel(obj)
129 - def setter(self, func):
130 self.fset = func 131 return self
132 - def deleter(self, func):
133 self.fdel = func 134 return self
135 # Internal classes
136 -class _DbfRecord(object):
137 """Provides routines to extract and save data within the fields of a dbf record.""" 138 __slots__ = ['_recnum', '_layout', '_data', '_dirty', '__weakref__']
139 - def _retrieveFieldValue(yo, record_data, fielddef):
140 """calls appropriate routine to fetch value stored in field from array 141 @param record_data: the data portion of the record 142 @type record_data: array of characters 143 @param fielddef: description of the field definition 144 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags' 145 @returns: python data stored in field""" 146 147 field_type = fielddef['type'] 148 retrieve = yo._layout.fieldtypes[field_type]['Retrieve'] 149 datum = retrieve(record_data, fielddef, yo._layout.memo) 150 if field_type in yo._layout.character_fields: 151 datum = yo._layout.decoder(datum)[0] 152 if yo._layout.return_ascii: 153 try: 154 datum = yo._layout.output_encoder(datum)[0] 155 except UnicodeEncodeError: 156 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore') 157 return datum
158 - def _updateFieldValue(yo, fielddef, value):
159 "calls appropriate routine to convert value to ascii bytes, and save it in record" 160 field_type = fielddef['type'] 161 update = yo._layout.fieldtypes[field_type]['Update'] 162 if field_type in yo._layout.character_fields: 163 if not isinstance(value, unicode): 164 if yo._layout.input_decoder is None: 165 raise NonUnicode("String not in unicode format, no default encoding specified") 166 value = yo._layout.input_decoder(value)[0] # input ascii => unicode 167 value = yo._layout.encoder(value)[0] # unicode => table ascii 168 bytes = array('c', update(value, fielddef, yo._layout.memo)) 169 size = fielddef['length'] 170 if len(bytes) > size: 171 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) 172 blank = array('c', ' ' * size) 173 start = fielddef['start'] 174 end = start + size 175 blank[:len(bytes)] = bytes[:] 176 yo._data[start:end] = blank[:] 177 yo._dirty = True
178 - def _update_disk(yo, location='', data=None):
179 if not yo._layout.inmemory: 180 if yo._recnum < 0: 181 raise DbfError("Attempted to update record that has been packed") 182 if location == '': 183 location = yo._recnum * yo._layout.header.record_length + yo._layout.header.start 184 if data is None: 185 data = yo._data 186 yo._layout.dfd.seek(location) 187 yo._layout.dfd.write(data) 188 yo._dirty = False 189 for index in yo.record_table._indexen: 190 index(yo)
191 - def __call__(yo, *specs):
192 results = [] 193 if not specs: 194 specs = yo._layout.index 195 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop]) 196 for field, func in specs: 197 results.append(func(yo[field])) 198 return tuple(results)
199
200 - def __contains__(yo, key):
201 return key in yo._layout.fields or key in ['record_number','delete_flag']
202 - def __iter__(yo):
203 return (yo[field] for field in yo._layout.fields)
204 - def __getattr__(yo, name):
205 if name[0:2] == '__' and name[-2:] == '__': 206 raise AttributeError, 'Method %s is not implemented.' % name 207 elif name == 'record_number': 208 return yo._recnum 209 elif name == 'delete_flag': 210 return yo._data[0] != ' ' 211 elif not name in yo._layout.fields: 212 raise FieldMissing(name) 213 try: 214 fielddef = yo._layout[name] 215 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) 216 return value 217 except DbfError, error: 218 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 219 raise
220 - def __getitem__(yo, item):
221 if type(item) in (int, long): 222 if not -yo._layout.header.field_count <= item < yo._layout.header.field_count: 223 raise IndexError("Field offset %d is not in record" % item) 224 return yo[yo._layout.fields[item]] 225 elif type(item) == slice: 226 sequence = [] 227 for index in yo._layout.fields[item]: 228 sequence.append(yo[index]) 229 return sequence 230 elif type(item) == str: 231 return yo.__getattr__(item) 232 else: 233 raise TypeError("%s is not a field name" % item)
234 - def __len__(yo):
235 return yo._layout.header.field_count
236 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
237 """record = ascii array of entire record; layout=record specification; memo = memo object for table""" 238 record = object.__new__(cls) 239 record._dirty = False 240 record._recnum = recnum 241 record._layout = layout 242 if layout.blankrecord is None and not _fromdisk: 243 record._createBlankRecord() 244 record._data = layout.blankrecord 245 if recnum == -1: # not a disk-backed record 246 return record 247 elif type(kamikaze) == array: 248 record._data = kamikaze[:] 249 elif type(kamikaze) == str: 250 record._data = array('c', kamikaze) 251 else: 252 record._data = kamikaze._data[:] 253 datalen = len(record._data) 254 if datalen < layout.header.record_length: 255 record._data.extend(layout.blankrecord[datalen:]) 256 elif datalen > layout.header.record_length: 257 record._data = record._data[:layout.header.record_length] 258 if not _fromdisk and not layout.inmemory: 259 record._update_disk() 260 return record
261 - def __setattr__(yo, name, value):
262 if name in yo.__slots__: 263 object.__setattr__(yo, name, value) 264 return 265 elif not name in yo._layout.fields: 266 raise FieldMissing(name) 267 fielddef = yo._layout[name] 268 try: 269 yo._updateFieldValue(fielddef, value) 270 except DbfError, error: 271 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 272 error.data = name 273 raise
274 - def __setitem__(yo, name, value):
275 if type(name) == str: 276 yo.__setattr__(name, value) 277 elif type(name) in (int, long): 278 yo.__setattr__(yo._layout.fields[name], value) 279 elif type(name) == slice: 280 sequence = [] 281 for field in yo._layout.fields[name]: 282 sequence.append(field) 283 if len(sequence) != len(value): 284 raise DbfError("length of slices not equal") 285 for field, val in zip(sequence, value): 286 yo[field] = val 287 else: 288 raise TypeError("%s is not a field name" % name)
289 - def __str__(yo):
290 result = [] 291 for seq, field in enumerate(yo.field_names): 292 result.append("%3d - %-10s: %s" % (seq, field, yo[field])) 293 return '\n'.join(result)
294 - def __repr__(yo):
295 return yo._data.tostring()
296 - def _createBlankRecord(yo):
297 "creates a blank record data chunk" 298 layout = yo._layout 299 ondisk = layout.ondisk 300 layout.ondisk = False 301 yo._data = array('c', ' ' * layout.header.record_length) 302 layout.memofields = [] 303 for field in layout.fields: 304 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) 305 if layout[field]['type'] in layout.memotypes: 306 layout.memofields.append(field) 307 layout.blankrecord = yo._data[:] 308 layout.ondisk = ondisk
309 - def delete_record(yo):
310 "marks record as deleted" 311 yo._data[0] = '*' 312 yo._dirty = True 313 return yo
314 @property
315 - def field_names(yo):
316 "fields in table/record" 317 return yo._layout.fields[:]
318 - def gather_fields(yo, dictionary, drop=False): # dict, drop_missing=False):
319 "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True" 320 old_data = yo._data[:] 321 try: 322 for key in dictionary: 323 if not key in yo.field_names: 324 if drop: 325 continue 326 raise FieldMissing(key) 327 yo.__setattr__(key, dictionary[key]) 328 except: 329 yo._data[:] = old_data 330 raise 331 return yo
332 @property
333 - def has_been_deleted(yo):
334 "marked for deletion?" 335 return yo._data[0] == '*'
336 - def read_record(yo):
337 "refresh record data from disk" 338 size = yo._layout.header.record_length 339 location = yo._recnum * size + yo._layout.header.start 340 yo._layout.dfd.seek(location) 341 yo._data[:] = yo._meta.dfd.read(size) 342 yo._dirty = False 343 return yo
344 @property
345 - def record_number(yo):
346 "physical record number" 347 return yo._recnum
348 @property
349 - def record_table(yo):
350 table = yo._layout.table() 351 if table is None: 352 raise DbfError("table is no longer available") 353 return table
354 - def check_index(yo):
355 for dbfindex in yo._layout.table()._indexen: 356 dbfindex(yo)
357 - def reset_record(yo, keep_fields=None):
358 "blanks record" 359 if keep_fields is None: 360 keep_fields = [] 361 keep = {} 362 for field in keep_fields: 363 keep[field] = yo[field] 364 if yo._layout.blankrecord == None: 365 yo._createBlankRecord() 366 yo._data[:] = yo._layout.blankrecord[:] 367 for field in keep_fields: 368 yo[field] = keep[field] 369 yo._dirty = True 370 return yo
371 - def scatter_fields(yo, blank=False):
372 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." 373 keys = yo._layout.fields 374 if blank: 375 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] 376 else: 377 values = [yo[field] for field in keys] 378 return dict(zip(keys, values))
379 - def undelete_record(yo):
380 "marks record as active" 381 yo._data[0] = ' ' 382 yo._dirty = True 383 return yo
384 - def write_record(yo, **kwargs):
385 "write record data to disk" 386 if kwargs: 387 yo.gather_fields(kwargs) 388 if yo._dirty: 389 yo._update_disk() 390 return 1 391 return 0
392 -class _DbfMemo(object):
393 """Provides access to memo fields as dictionaries 394 must override _init, _get_memo, and _put_memo to 395 store memo contents to disk"""
396 - def _init(yo):
397 "initialize disk file usage"
398 - def _get_memo(yo, block):
399 "retrieve memo contents from disk"
400 - def _put_memo(yo, data):
401 "store memo contents to disk"
402 - def __init__(yo, meta):
403 "" 404 yo.meta = meta 405 yo.memory = {} 406 yo.nextmemo = 1 407 yo._init() 408 yo.meta.newmemofile = False
409 - def get_memo(yo, block, field):
410 "gets the memo in block" 411 if yo.meta.ignorememos or not block: 412 return '' 413 if yo.meta.ondisk: 414 return yo._get_memo(block) 415 else: 416 return yo.memory[block]
417 - def put_memo(yo, data):
418 "stores data in memo file, returns block number" 419 if yo.meta.ignorememos or data == '': 420 return 0 421 if yo.meta.inmemory: 422 thismemo = yo.nextmemo 423 yo.nextmemo += 1 424 yo.memory[thismemo] = data 425 else: 426 thismemo = yo._put_memo(data) 427 return thismemo
428 -class _Db3Memo(_DbfMemo):
429 - def _init(yo):
430 "dBase III specific" 431 yo.meta.memo_size= 512 432 yo.record_header_length = 2 433 if yo.meta.ondisk and not yo.meta.ignorememos: 434 if yo.meta.newmemofile: 435 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 436 yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508) 437 else: 438 try: 439 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 440 yo.meta.mfd.seek(0) 441 yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4)) 442 except: 443 raise DbfError("memo file appears to be corrupt")
444 - def _get_memo(yo, block):
445 block = int(block) 446 yo.meta.mfd.seek(block * yo.meta.memo_size) 447 eom = -1 448 data = '' 449 while eom == -1: 450 newdata = yo.meta.mfd.read(yo.meta.memo_size) 451 if not newdata: 452 return data 453 data += newdata 454 eom = data.find('\x1a\x1a') 455 return data[:eom].rstrip()
456 - def _put_memo(yo, data):
457 data = data.rstrip() 458 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 459 blocks = length // yo.meta.memo_size 460 if length % yo.meta.memo_size: 461 blocks += 1 462 thismemo = yo.nextmemo 463 yo.nextmemo = thismemo + blocks 464 yo.meta.mfd.seek(0) 465 yo.meta.mfd.write(io.packLongInt(yo.nextmemo)) 466 yo.meta.mfd.seek(thismemo * yo.meta.memo_size) 467 yo.meta.mfd.write(data) 468 yo.meta.mfd.write('\x1a\x1a') 469 double_check = yo._get_memo(thismemo) 470 if len(double_check) != len(data): 471 uhoh = open('dbf_memo_dump.err','wb') 472 uhoh.write('thismemo: %d' % thismemo) 473 uhoh.write('nextmemo: %d' % yo.nextmemo) 474 uhoh.write('saved: %d bytes' % len(data)) 475 uhoh.write(data) 476 uhoh.write('retrieved: %d bytes' % len(double_check)) 477 uhoh.write(double_check) 478 uhoh.close() 479 raise DbfError("unknown error: memo not saved") 480 return thismemo
481 -class _VfpMemo(_DbfMemo):
482 - def _init(yo):
483 "Visual Foxpro 6 specific" 484 if yo.meta.ondisk and not yo.meta.ignorememos: 485 yo.record_header_length = 8 486 if yo.meta.newmemofile: 487 if yo.meta.memo_size == 0: 488 yo.meta.memo_size = 1 489 elif 1 < yo.meta.memo_size < 33: 490 yo.meta.memo_size *= 512 491 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 492 nextmemo = 512 // yo.meta.memo_size 493 if nextmemo * yo.meta.memo_size < 512: 494 nextmemo += 1 495 yo.nextmemo = nextmemo 496 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ 497 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) 498 else: 499 try: 500 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 501 yo.meta.mfd.seek(0) 502 header = yo.meta.mfd.read(512) 503 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True) 504 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True) 505 except: 506 raise DbfError("memo file appears to be corrupt")
507 - def _get_memo(yo, block):
508 yo.meta.mfd.seek(block * yo.meta.memo_size) 509 header = yo.meta.mfd.read(8) 510 length = io.unpackLongInt(header[4:], bigendian=True) 511 return yo.meta.mfd.read(length)
512 - def _put_memo(yo, data):
513 data = data.rstrip() # no trailing whitespace 514 yo.meta.mfd.seek(0) 515 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True) 516 yo.meta.mfd.seek(0) 517 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 518 blocks = length // yo.meta.memo_size 519 if length % yo.meta.memo_size: 520 blocks += 1 521 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True)) 522 yo.meta.mfd.seek(thismemo*yo.meta.memo_size) 523 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data) 524 return thismemo
525 # Public classes
526 -class DbfTable(object):
527 """Provides a framework for dbf style tables.""" 528 _version = 'basic memory table' 529 _versionabbv = 'dbf' 530 _fieldtypes = { 531 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, }, 532 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, }, 533 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } } 534 _memoext = '' 535 _memotypes = tuple('M', ) 536 _memoClass = _DbfMemo 537 _yesMemoMask = '' 538 _noMemoMask = '' 539 _fixed_fields = ('M','D','L') # always same length in table 540 _variable_fields = tuple() # variable length in table 541 _character_fields = tuple('M', ) # field representing character data 542 _decimal_fields = tuple() # text-based numeric fields 543 _numeric_fields = tuple() # fields representing a number 544 _dbfTableHeader = array('c', '\x00' * 32) 545 _dbfTableHeader[0] = '\x00' # table type - none 546 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 547 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 548 _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii 549 _dbfTableHeader = _dbfTableHeader.tostring() 550 _dbfTableHeaderExtra = '' 551 _supported_tables = [] 552 _read_only = False 553 _meta_only = False 554 _use_deleted = True 555 backup = False
556 - class _DbfLists(object):
557 "implements the weakref structure for DbfLists"
558 - def __init__(yo):
559 yo._lists = set()
560 - def __iter__(yo):
561 yo._lists = set([s for s in yo._lists if s() is not None]) 562 return (s() for s in yo._lists if s() is not None)
563 - def __len__(yo):
564 yo._lists = set([s for s in yo._lists if s() is not None]) 565 return len(yo._lists)
566 - def add(yo, new_list):
567 yo._lists.add(weakref.ref(new_list)) 568 yo._lists = set([s for s in yo._lists if s() is not None])
569 - class _Indexen(object):
570 "implements the weakref structure for seperate indexes"
571 - def __init__(yo):
572 yo._indexen = set()
573 - def __iter__(yo):
574 yo._indexen = set([s for s in yo._indexen if s() is not None]) 575 return (s() for s in yo._indexen if s() is not None)
576 - def __len__(yo):
577 yo._indexen = set([s for s in yo._indexen if s() is not None]) 578 return len(yo._indexen)
579 - def add(yo, new_list):
580 yo._indexen.add(weakref.ref(new_list)) 581 yo._indexen = set([s for s in yo._indexen if s() is not None])
582 - class _MetaData(dict):
583 blankrecord = None 584 fields = None 585 filename = None 586 dfd = None 587 memoname = None 588 newmemofile = False 589 memo = None 590 mfd = None 591 ignorememos = False 592 memofields = None 593 current = -1
594 - class _TableHeader(object):
595 - def __init__(yo, data):
596 if len(data) != 32: 597 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) 598 yo._data = array('c', data + '\x0d')
599 - def codepage(yo, cp=None):
600 "get/set code page of table" 601 if cp is None: 602 return yo._data[29] 603 else: 604 cp, sd, ld = _codepage_lookup(cp) 605 yo._data[29] = cp 606 return cp
607 @property
608 - def data(yo):
609 "main data structure" 610 date = io.packDate(Date.today()) 611 yo._data[1:4] = array('c', date) 612 return yo._data.tostring()
613 @data.setter
614 - def data(yo, bytes):
615 if len(bytes) < 32: 616 raise DbfError("length for data of %d is less than 32" % len(bytes)) 617 yo._data[:] = array('c', bytes)
618 @property
619 - def extra(yo):
620 "extra dbf info (located after headers, before data records)" 621 fieldblock = yo._data[32:] 622 for i in range(len(fieldblock)//32+1): 623 cr = i * 32 624 if fieldblock[cr] == '\x0d': 625 break 626 else: 627 raise DbfError("corrupt field structure") 628 cr += 33 # skip past CR 629 return yo._data[cr:].tostring()
630 @extra.setter
631 - def extra(yo, data):
632 fieldblock = yo._data[32:] 633 for i in range(len(fieldblock)//32+1): 634 cr = i * 32 635 if fieldblock[cr] == '\x0d': 636 break 637 else: 638 raise DbfError("corrupt field structure") 639 cr += 33 # skip past CR 640 yo._data[cr:] = array('c', data) # extra 641 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start
642 @property
643 - def field_count(yo):
644 "number of fields (read-only)" 645 fieldblock = yo._data[32:] 646 for i in range(len(fieldblock)//32+1): 647 cr = i * 32 648 if fieldblock[cr] == '\x0d': 649 break 650 else: 651 raise DbfError("corrupt field structure") 652 return len(fieldblock[:cr]) // 32
653 @property
654 - def fields(yo):
655 "field block structure" 656 fieldblock = yo._data[32:] 657 for i in range(len(fieldblock)//32+1): 658 cr = i * 32 659 if fieldblock[cr] == '\x0d': 660 break 661 else: 662 raise DbfError("corrupt field structure") 663 return fieldblock[:cr].tostring()
664 @fields.setter
665 - def fields(yo, block):
666 fieldblock = yo._data[32:] 667 for i in range(len(fieldblock)//32+1): 668 cr = i * 32 669 if fieldblock[cr] == '\x0d': 670 break 671 else: 672 raise DbfError("corrupt field structure") 673 cr += 32 # convert to indexing main structure 674 fieldlen = len(block) 675 if fieldlen % 32 != 0: 676 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 677 yo._data[32:cr] = array('c', block) # fields 678 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start 679 fieldlen = fieldlen // 32 680 recordlen = 1 # deleted flag 681 for i in range(fieldlen): 682 recordlen += ord(block[i*32+16]) 683 yo._data[10:12] = array('c', io.packShortInt(recordlen))
684 @property
685 - def record_count(yo):
686 "number of records (maximum 16,777,215)" 687 return io.unpackLongInt(yo._data[4:8].tostring())
688 @record_count.setter
689 - def record_count(yo, count):
690 yo._data[4:8] = array('c', io.packLongInt(count))
691 @property
692 - def record_length(yo):
693 "length of a record (read_only) (max of 65,535)" 694 return io.unpackShortInt(yo._data[10:12].tostring())
695 @property
696 - def start(yo):
697 "starting position of first record in file (must be within first 64K)" 698 return io.unpackShortInt(yo._data[8:10].tostring())
699 @start.setter
700 - def start(yo, pos):
701 yo._data[8:10] = array('c', io.packShortInt(pos))
702 @property
703 - def update(yo):
704 "date of last table modification (read-only)" 705 return io.unpackDate(yo._data[1:4].tostring())
706 @property
707 - def version(yo):
708 "dbf version" 709 return yo._data[0]
710 @version.setter
711 - def version(yo, ver):
712 yo._data[0] = ver
713 - class _Table(object):
714 "implements the weakref table for records"
715 - def __init__(yo, count, meta):
716 yo._meta = meta 717 yo._weakref_list = [weakref.ref(lambda x: None)] * count
718 - def __getitem__(yo, index):
719 maybe = yo._weakref_list[index]() 720 if maybe is None: 721 if index < 0: 722 index += yo._meta.header.record_count 723 size = yo._meta.header.record_length 724 location = index * size + yo._meta.header.start 725 yo._meta.dfd.seek(location) 726 if yo._meta.dfd.tell() != location: 727 raise ValueError("unable to seek to offset %d in file" % location) 728 bytes = yo._meta.dfd.read(size) 729 if not bytes: 730 raise ValueError("unable to read record data from %s at location %d" % (yo._meta.filename, location)) 731 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) 732 yo._weakref_list[index] = weakref.ref(maybe) 733 return maybe
734 - def append(yo, record):
735 yo._weakref_list.append(weakref.ref(record))
736 - def clear(yo):
737 yo._weakref_list[:] = []
738 - class DbfIterator(object):
739 "returns records using current index"
740 - def __init__(yo, table):
741 yo._table = table 742 yo._index = -1 743 yo._more_records = True
744 - def __iter__(yo):
745 return yo
746 - def next(yo):
747 while yo._more_records: 748 yo._index += 1 749 if yo._index >= len(yo._table): 750 yo._more_records = False 751 continue 752 record = yo._table[yo._index] 753 if not yo._table.use_deleted and record.has_been_deleted: 754 continue 755 return record 756 else: 757 raise StopIteration
758 - def _buildHeaderFields(yo):
759 "constructs fieldblock for disk table" 760 fieldblock = array('c', '') 761 memo = False 762 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask)) 763 for field in yo._meta.fields: 764 if yo._meta.fields.count(field) > 1: 765 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") 766 fielddef = array('c', '\x00' * 32) 767 fielddef[:11] = array('c', io.packStr(field)) 768 fielddef[11] = yo._meta[field]['type'] 769 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start'])) 770 fielddef[16] = chr(yo._meta[field]['length']) 771 fielddef[17] = chr(yo._meta[field]['decimals']) 772 fielddef[18] = chr(yo._meta[field]['flags']) 773 fieldblock.extend(fielddef) 774 if yo._meta[field]['type'] in yo._meta.memotypes: 775 memo = True 776 yo._meta.header.fields = fieldblock.tostring() 777 if memo: 778 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask)) 779 if yo._meta.memo is None: 780 yo._meta.memo = yo._memoClass(yo._meta)
781 - def _checkMemoIntegrity(yo):
782 "dBase III specific" 783 if yo._meta.header.version == '\x83': 784 try: 785 yo._meta.memo = yo._memoClass(yo._meta) 786 except: 787 yo._meta.dfd.close() 788 yo._meta.dfd = None 789 raise 790 if not yo._meta.ignorememos: 791 for field in yo._meta.fields: 792 if yo._meta[field]['type'] in yo._memotypes: 793 if yo._meta.header.version != '\x83': 794 yo._meta.dfd.close() 795 yo._meta.dfd = None 796 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 797 elif not os.path.exists(yo._meta.memoname): 798 yo._meta.dfd.close() 799 yo._meta.dfd = None 800 raise DbfError("Table structure corrupt: memo fields exist without memo file") 801 break
802 - def _initializeFields(yo):
803 "builds the FieldList of names, types, and descriptions from the disk file" 804 yo._meta.fields[:] = [] 805 offset = 1 806 fieldsdef = yo._meta.header.fields 807 if len(fieldsdef) % 32 != 0: 808 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 809 if len(fieldsdef) // 32 != yo.field_count: 810 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 811 for i in range(yo.field_count): 812 fieldblock = fieldsdef[i*32:(i+1)*32] 813 name = io.unpackStr(fieldblock[:11]) 814 type = fieldblock[11] 815 if not type in yo._meta.fieldtypes: 816 raise DbfError("Unknown field type: %s" % type) 817 start = offset 818 length = ord(fieldblock[16]) 819 offset += length 820 end = start + length 821 decimals = ord(fieldblock[17]) 822 flags = ord(fieldblock[18]) 823 if name in yo._meta.fields: 824 raise DbfError('Duplicate field name found: %s' % name) 825 yo._meta.fields.append(name) 826 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
827 - def _fieldLayout(yo, i):
828 "Returns field information Name Type(Length[,Decimals])" 829 name = yo._meta.fields[i] 830 type = yo._meta[name]['type'] 831 length = yo._meta[name]['length'] 832 decimals = yo._meta[name]['decimals'] 833 if type in yo._decimal_fields: 834 description = "%s %s(%d,%d)" % (name, type, length, decimals) 835 elif type in yo._fixed_fields: 836 description = "%s %s" % (name, type) 837 else: 838 description = "%s %s(%d)" % (name, type, length) 839 return description
840 - def _loadtable(yo):
841 "loads the records from disk to memory" 842 if yo._meta_only: 843 raise DbfError("%s has been closed, records are unavailable" % yo.filename) 844 dfd = yo._meta.dfd 845 header = yo._meta.header 846 dfd.seek(header.start) 847 allrecords = dfd.read() # kludge to get around mysterious errno 0 problems 848 dfd.seek(0) 849 length = header.record_length 850 for i in range(header.record_count): 851 record_data = allrecords[length*i:length*i+length] 852 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) 853 dfd.seek(0)
854 - def _list_fields(yo, specs, sep=','):
855 if specs is None: 856 specs = yo.field_names 857 elif isinstance(specs, str): 858 specs = specs.split(sep) 859 else: 860 specs = list(specs) 861 specs = [s.strip() for s in specs] 862 return specs
863 - def _update_disk(yo, headeronly=False):
864 "synchronizes the disk file with current data" 865 if yo._meta.inmemory: 866 return 867 fd = yo._meta.dfd 868 fd.seek(0) 869 fd.write(yo._meta.header.data) 870 if not headeronly: 871 for record in yo._table: 872 record._update_disk() 873 fd.flush() 874 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length) 875 if 'db3' in yo._versionabbv: 876 fd.seek(0, os.SEEK_END) 877 fd.write('\x1a') # required for dBase III 878 fd.flush() 879 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length + 1)
880
881 - def __contains__(yo, key):
882 return key in yo.field_names
883 - def __enter__(yo):
884 return yo
885 - def __exit__(yo, *exc_info):
886 yo.close()
887 - def __getattr__(yo, name):
888 if name in ('_table'): 889 if yo._meta.ondisk: 890 yo._table = yo._Table(len(yo), yo._meta) 891 else: 892 yo._table = [] 893 yo._loadtable() 894 return object.__getattribute__(yo, name)
895 - def __getitem__(yo, value):
896 if type(value) == int: 897 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count: 898 raise IndexError("Record %d is not in table." % value) 899 return yo._table[value] 900 elif type(value) == slice: 901 sequence = List(desc='%s --> %s' % (yo.filename, value), field_names=yo.field_names) 902 yo._dbflists.add(sequence) 903 for index in range(len(yo))[value]: 904 record = yo._table[index] 905 if yo.use_deleted is True or not record.has_been_deleted: 906 sequence.append(record) 907 return sequence 908 else: 909 raise TypeError('type <%s> not valid for indexing' % type(value))
910 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, 911 read_only=False, keep_memos=False, meta_only=False, codepage=None):
912 """open/create dbf file 913 filename should include path if needed 914 field_specs can be either a ;-delimited string or a list of strings 915 memo_size is always 512 for db3 memos 916 ignore_memos is useful if the memo file is missing or corrupt 917 read_only will load records into memory, then close the disk file 918 keep_memos will also load any memo fields into memory 919 meta_only will ignore all records, keeping only basic table information 920 codepage will override whatever is set in the table itself""" 921 if filename[0] == filename[-1] == ':': 922 if field_specs is None: 923 raise DbfError("field list must be specified for memory tables") 924 elif type(yo) is DbfTable: 925 raise DbfError("only memory tables supported") 926 yo._dbflists = yo._DbfLists() 927 yo._indexen = yo._Indexen() 928 yo._meta = meta = yo._MetaData() 929 meta.table = weakref.ref(yo) 930 meta.filename = filename 931 meta.fields = [] 932 meta.fieldtypes = yo._fieldtypes 933 meta.fixed_fields = yo._fixed_fields 934 meta.variable_fields = yo._variable_fields 935 meta.character_fields = yo._character_fields 936 meta.decimal_fields = yo._decimal_fields 937 meta.numeric_fields = yo._numeric_fields 938 meta.memotypes = yo._memotypes 939 meta.ignorememos = ignore_memos 940 meta.memo_size = memo_size 941 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 942 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 943 meta.return_ascii = return_ascii 944 meta.header = header = yo._TableHeader(yo._dbfTableHeader) 945 header.extra = yo._dbfTableHeaderExtra 946 header.data #force update of date 947 if filename[0] == filename[-1] == ':': 948 yo._table = [] 949 meta.ondisk = False 950 meta.inmemory = True 951 meta.memoname = filename 952 else: 953 base, ext = os.path.splitext(filename) 954 if ext == '': 955 meta.filename = base + '.dbf' 956 meta.memoname = base + yo._memoext 957 meta.ondisk = True 958 meta.inmemory = False 959 if field_specs: 960 if meta.ondisk: 961 meta.dfd = open(meta.filename, 'w+b') 962 meta.newmemofile = True 963 yo.add_fields(field_specs) 964 header.codepage(codepage or default_codepage) 965 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 966 meta.decoder = codecs.getdecoder(sd) 967 meta.encoder = codecs.getencoder(sd) 968 return 969 try: 970 dfd = meta.dfd = open(meta.filename, 'r+b') 971 except IOError, e: 972 raise DbfError(str(e)) 973 dfd.seek(0) 974 meta.header = header = yo._TableHeader(dfd.read(32)) 975 if not header.version in yo._supported_tables: 976 dfd.close() 977 dfd = None 978 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 979 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 980 yo._meta.decoder = codecs.getdecoder(sd) 981 yo._meta.encoder = codecs.getencoder(sd) 982 fieldblock = dfd.read(header.start - 32) 983 for i in range(len(fieldblock)//32+1): 984 fieldend = i * 32 985 if fieldblock[fieldend] == '\x0d': 986 break 987 else: 988 raise DbfError("corrupt field structure in header") 989 if len(fieldblock[:fieldend]) % 32 != 0: 990 raise DbfError("corrupt field structure in header") 991 header.fields = fieldblock[:fieldend] 992 header.extra = fieldblock[fieldend+1:] # skip trailing \r 993 yo._initializeFields() 994 yo._checkMemoIntegrity() 995 meta.current = -1 996 if len(yo) > 0: 997 meta.current = 0 998 dfd.seek(0) 999 if meta_only: 1000 yo.close(keep_table=False, keep_memos=False) 1001 elif read_only: 1002 yo.close(keep_table=True, keep_memos=keep_memos) 1003 if codepage is not None: 1004 cp, sd, ld = _codepage_lookup(codepage) 1005 yo._meta.decoder = codecs.getdecoder(sd) 1006 yo._meta.encoder = codecs.getencoder(sd)
1007
1008 - def __iter__(yo):
1009 return yo.DbfIterator(yo)
1010 - def __len__(yo):
1011 return yo._meta.header.record_count
1012 - def __nonzero__(yo):
1013 return yo._meta.header.record_count != 0
1014 - def __repr__(yo):
1015 if yo._read_only: 1016 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename 1017 elif yo._meta_only: 1018 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename 1019 else: 1020 return __name__ + ".Table('%s')" % yo._meta.filename
1021 - def __str__(yo):
1022 if yo._read_only: 1023 status = "read-only" 1024 elif yo._meta_only: 1025 status = "meta-only" 1026 else: 1027 status = "read/write" 1028 str = """ 1029 Table: %s 1030 Type: %s 1031 Codepage: %s 1032 Status: %s 1033 Last updated: %s 1034 Record count: %d 1035 Field count: %d 1036 Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version, 1037 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status, 1038 yo.last_update, len(yo), yo.field_count, yo.record_length) 1039 str += "\n --Fields--\n" 1040 for i in range(len(yo._meta.fields)): 1041 str += "%11d) %s\n" % (i, yo._fieldLayout(i)) 1042 return str
1043 @property
1044 - def codepage(yo):
1045 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
1046 @codepage.setter
1047 - def codepage(yo, cp):
1048 cp = code_pages[yo._meta.header.codepage(cp)][0] 1049 yo._meta.decoder = codecs.getdecoder(cp) 1050 yo._meta.encoder = codecs.getencoder(cp) 1051 yo._update_disk(headeronly=True)
1052 @property
1053 - def field_count(yo):
1054 "the number of fields in the table" 1055 return yo._meta.header.field_count
1056 @property
1057 - def field_names(yo):
1058 "a list of the fields in the table" 1059 return yo._meta.fields[:]
1060 @property
1061 - def filename(yo):
1062 "table's file name, including path (if specified on open)" 1063 return yo._meta.filename
1064 @property
1065 - def last_update(yo):
1066 "date of last update" 1067 return yo._meta.header.update
1068 @property
1069 - def memoname(yo):
1070 "table's memo name (if path included in filename on open)" 1071 return yo._meta.memoname
1072 @property
1073 - def record_length(yo):
1074 "number of bytes in a record" 1075 return yo._meta.header.record_length
1076 @property
1077 - def record_number(yo):
1078 "index number of the current record" 1079 return yo._meta.current
1080 @property
1081 - def supported_tables(yo):
1082 "allowable table types" 1083 return yo._supported_tables
1084 @property
1085 - def use_deleted(yo):
1086 "process or ignore deleted records" 1087 return yo._use_deleted
1088 @use_deleted.setter
1089 - def use_deleted(yo, new_setting):
1090 yo._use_deleted = new_setting
1091 @property
1092 - def version(yo):
1093 "returns the dbf type of the table" 1094 return yo._version
1095 - def add_fields(yo, field_specs):
1096 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 1097 backup table is created with _backup appended to name 1098 then modifies current structure""" 1099 all_records = [record for record in yo] 1100 if yo: 1101 yo.create_backup() 1102 yo._meta.blankrecord = None 1103 meta = yo._meta 1104 offset = meta.header.record_length 1105 fields = yo._list_fields(field_specs, sep=';') 1106 for field in fields: 1107 try: 1108 name, format = field.split() 1109 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): 1110 raise DbfError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name) 1111 name = name.lower() 1112 if name in meta.fields: 1113 raise DbfError("Field '%s' already exists" % name) 1114 field_type = format[0].upper() 1115 if len(name) > 10: 1116 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 1117 if not field_type in meta.fieldtypes.keys(): 1118 raise DbfError("Unknown field type: %s" % field_type) 1119 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) 1120 except ValueError: 1121 raise DbfError("invalid field specifier: %s" % field) 1122 start = offset 1123 end = offset + length 1124 offset = end 1125 meta.fields.append(name) 1126 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} 1127 if meta[name]['type'] in yo._memotypes and meta.memo is None: 1128 meta.memo = yo._memoClass(meta) 1129 for record in yo: 1130 record[name] = meta.fieldtypes[field_type]['Blank']() 1131 yo._buildHeaderFields() 1132 yo._update_disk()
1133 - def append(yo, kamikaze='', drop=False, multiple=1):
1134 "adds <multiple> blank records, and fills fields with dict/tuple values if present" 1135 if not yo.field_count: 1136 raise DbfError("No fields defined, cannot append") 1137 empty_table = len(yo) == 0 1138 dictdata = False 1139 tupledata = False 1140 if not isinstance(kamikaze, _DbfRecord): 1141 if isinstance(kamikaze, dict): 1142 dictdata = kamikaze 1143 kamikaze = '' 1144 elif isinstance(kamikaze, tuple): 1145 tupledata = kamikaze 1146 kamikaze = '' 1147 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze) 1148 yo._table.append(newrecord) 1149 yo._meta.header.record_count += 1 1150 if dictdata: 1151 newrecord.gather_fields(dictdata, drop=drop) 1152 elif tupledata: 1153 for index, item in enumerate(tupledata): 1154 newrecord[index] = item 1155 elif kamikaze == str: 1156 for field in yo._meta.memofields: 1157 newrecord[field] = '' 1158 elif kamikaze: 1159 for field in yo._meta.memofields: 1160 newrecord[field] = kamikaze[field] 1161 newrecord.write_record() 1162 multiple -= 1 1163 if multiple: 1164 data = newrecord._data 1165 single = yo._meta.header.record_count 1166 total = single + multiple 1167 while single < total: 1168 multi_record = _DbfRecord(single, yo._meta, kamikaze=data) 1169 yo._table.append(multi_record) 1170 for field in yo._meta.memofields: 1171 multi_record[field] = newrecord[field] 1172 single += 1 1173 multi_record.write_record() 1174 yo._meta.header.record_count = total # += multiple 1175 yo._meta.current = yo._meta.header.record_count - 1 1176 newrecord = multi_record 1177 yo._update_disk(headeronly=True) 1178 if empty_table: 1179 yo._meta.current = 0 1180 return newrecord
1181 - def bof(yo, _move=False):
1182 "moves record pointer to previous usable record; returns True if no more usable records" 1183 current = yo._meta.current 1184 try: 1185 while yo._meta.current > 0: 1186 yo._meta.current -= 1 1187 if yo.use_deleted or not yo.current().has_been_deleted: 1188 break 1189 else: 1190 yo._meta.current = -1 1191 return True 1192 return False 1193 finally: 1194 if not _move: 1195 yo._meta.current = current
1196 - def bottom(yo, get_record=False):
1197 """sets record pointer to bottom of table 1198 if get_record, seeks to and returns last (non-deleted) record 1199 DbfError if table is empty 1200 Bof if all records deleted and use_deleted is False""" 1201 yo._meta.current = yo._meta.header.record_count 1202 if get_record: 1203 try: 1204 return yo.prev() 1205 except Bof: 1206 yo._meta.current = yo._meta.header.record_count 1207 raise Eof()
1208 - def close(yo, keep_table=False, keep_memos=False):
1209 """closes disk files 1210 ensures table data is available if keep_table 1211 ensures memo data is available if keep_memos""" 1212 yo._meta.inmemory = True 1213 if keep_table: 1214 replacement_table = [] 1215 for record in yo._table: 1216 replacement_table.append(record) 1217 yo._table = replacement_table 1218 else: 1219 if yo._meta.ondisk: 1220 yo._meta_only = True 1221 if yo._meta.mfd is not None: 1222 if not keep_memos: 1223 yo._meta.ignorememos = True 1224 else: 1225 memo_fields = [] 1226 for field in yo.field_names: 1227 if yo.is_memotype(field): 1228 memo_fields.append(field) 1229 for record in yo: 1230 for field in memo_fields: 1231 record[field] = record[field] 1232 yo._meta.mfd.close() 1233 yo._meta.mfd = None 1234 if yo._meta.ondisk: 1235 yo._meta.dfd.close() 1236 yo._meta.dfd = None 1237 if keep_table: 1238 yo._read_only = True 1239 yo._meta.ondisk = False
1240 - def create_backup(yo, new_name=None, overwrite=False):
1241 "creates a backup table -- ignored if memory table" 1242 if yo.filename[0] == yo.filename[-1] == ':': 1243 return 1244 if new_name is None: 1245 new_name = os.path.splitext(yo.filename)[0] + '_backup.dbf' 1246 else: 1247 overwrite = True 1248 if overwrite or not yo.backup: 1249 bkup = open(new_name, 'wb') 1250 try: 1251 yo._meta.dfd.seek(0) 1252 copyfileobj(yo._meta.dfd, bkup) 1253 yo.backup = new_name 1254 finally: 1255 bkup.close()
1256 - def create_index(yo, key):
1257 return Index(yo, key)
1258 - def current(yo, index=False):
1259 "returns current logical record, or its index" 1260 if yo._meta.current < 0: 1261 raise Bof() 1262 elif yo._meta.current >= yo._meta.header.record_count: 1263 raise Eof() 1264 if index: 1265 return yo._meta.current 1266 return yo._table[yo._meta.current]
1267 - def delete_fields(yo, doomed):
1268 """removes field(s) from the table 1269 creates backup files with _backup appended to the file name, 1270 then modifies current structure""" 1271 doomed = yo._list_fields(doomed) 1272 for victim in doomed: 1273 if victim not in yo._meta.fields: 1274 raise DbfError("field %s not in table -- delete aborted" % victim) 1275 all_records = [record for record in yo] 1276 yo.create_backup() 1277 for victim in doomed: 1278 yo._meta.fields.pop(yo._meta.fields.index(victim)) 1279 start = yo._meta[victim]['start'] 1280 end = yo._meta[victim]['end'] 1281 for record in yo: 1282 record._data = record._data[:start] + record._data[end:] 1283 for field in yo._meta.fields: 1284 if yo._meta[field]['start'] == end: 1285 end = yo._meta[field]['end'] 1286 yo._meta[field]['start'] = start 1287 yo._meta[field]['end'] = start + yo._meta[field]['length'] 1288 start = yo._meta[field]['end'] 1289 yo._buildHeaderFields() 1290 yo._update_disk()
1291 - def eof(yo, _move=False):
1292 "moves record pointer to next usable record; returns True if no more usable records" 1293 current = yo._meta.current 1294 try: 1295 while yo._meta.current < yo._meta.header.record_count - 1: 1296 yo._meta.current += 1 1297 if yo.use_deleted or not yo.current().has_been_deleted: 1298 break 1299 else: 1300 yo._meta.current = yo._meta.header.record_count 1301 return True 1302 return False 1303 finally: 1304 if not _move: 1305 yo._meta.current = current
1306 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1307 """writes the table using CSV or tab-delimited format, using the filename 1308 given if specified, otherwise the table name""" 1309 if filename is not None: 1310 path, filename = os.path.split(filename) 1311 else: 1312 path, filename = os.path.split(yo.filename) 1313 filename = os.path.join(path, filename) 1314 field_specs = yo._list_fields(field_specs) 1315 if records is None: 1316 records = yo 1317 format = format.lower() 1318 if format not in ('csv', 'tab', 'fixed'): 1319 raise DbfError("export format: csv, tab, or fixed -- not %s" % format) 1320 if format == 'fixed': 1321 format = 'txt' 1322 base, ext = os.path.splitext(filename) 1323 if ext.lower() in ('', '.dbf'): 1324 filename = base + "." + format[:3] 1325 fd = open(filename, 'w') 1326 try: 1327 if format == 'csv': 1328 csvfile = csv.writer(fd, dialect='dbf') 1329 if header: 1330 csvfile.writerow(field_specs) 1331 for record in records: 1332 fields = [] 1333 for fieldname in field_specs: 1334 fields.append(record[fieldname]) 1335 csvfile.writerow(fields) 1336 elif format == 'tab': 1337 if header: 1338 fd.write('\t'.join(field_specs) + '\n') 1339 for record in records: 1340 fields = [] 1341 for fieldname in field_specs: 1342 fields.append(str(record[fieldname])) 1343 fd.write('\t'.join(fields) + '\n') 1344 else: # format == 'fixed' 1345 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w') 1346 header.write("%-15s Size\n" % "Field Name") 1347 header.write("%-15s ----\n" % ("-" * 15)) 1348 sizes = [] 1349 for field in field_specs: 1350 size = yo.size(field)[0] 1351 sizes.append(size) 1352 header.write("%-15s %3d\n" % (field, size)) 1353 header.write('\nTotal Records in file: %d\n' % len(records)) 1354 header.close() 1355 for record in records: 1356 fields = [] 1357 for i, field_name in enumerate(field_specs): 1358 fields.append("%-*s" % (sizes[i], record[field_name])) 1359 fd.write(''.join(fields) + '\n') 1360 finally: 1361 fd.close() 1362 fd = None 1363 return len(records)
1364 - def get_record(yo, recno):
1365 "returns record at physical_index[recno]" 1366 return yo._table[recno]
1367 - def goto(yo, criteria):
1368 """changes the record pointer to the first matching (non-deleted) record 1369 criteria should be either a tuple of tuple(value, field, func) triples, 1370 or an integer to go to""" 1371 if isinstance(criteria, int): 1372 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count: 1373 raise IndexError("Record %d does not exist" % criteria) 1374 if criteria < 0: 1375 criteria += yo._meta.header.record_count 1376 yo._meta.current = criteria 1377 return yo.current() 1378 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) 1379 specs = tuple([(field, func) for value, field, func in criteria]) 1380 match = tuple([value for value, field, func in criteria]) 1381 current = yo.current(index=True) 1382 matchlen = len(match) 1383 while not yo.Eof(): 1384 record = yo.current() 1385 results = record(*specs) 1386 if results == match: 1387 return record 1388 return yo.goto(current)
1389 - def is_decimal(yo, name):
1390 "returns True if name is a variable-length field type" 1391 return yo._meta[name]['type'] in yo._decimal_fields
1392 - def is_memotype(yo, name):
1393 "returns True if name is a memo type field" 1394 return yo._meta[name]['type'] in yo._memotypes
1395 - def new(yo, filename, field_specs=None, codepage=None):
1396 "returns a new table of the same type" 1397 if field_specs is None: 1398 field_specs = yo.structure() 1399 if not (filename[0] == filename[-1] == ':'): 1400 path, name = os.path.split(filename) 1401 if path == "": 1402 filename = os.path.join(os.path.split(yo.filename)[0], filename) 1403 elif name == "": 1404 filename = os.path.join(path, os.path.split(yo.filename)[1]) 1405 if codepage is None: 1406 codepage = yo._meta.header.codepage()[0] 1407 return yo.__class__(filename, field_specs, codepage=codepage)
1408 - def next(yo):
1409 "set record pointer to next (non-deleted) record, and return it" 1410 if yo.eof(_move=True): 1411 raise Eof() 1412 return yo.current()
1413 - def open(yo):
1414 meta = yo._meta 1415 meta.inmemory = False 1416 meta.ondisk = True 1417 yo._read_only = False 1418 yo._meta_only = False 1419 if '_table' in dir(yo): 1420 del yo._table 1421 dfd = meta.dfd = open(meta.filename, 'r+b') 1422 dfd.seek(0) 1423 meta.header = header = yo._TableHeader(dfd.read(32)) 1424 if not header.version in yo._supported_tables: 1425 dfd.close() 1426 dfd = None 1427 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 1428 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 1429 meta.decoder = codecs.getdecoder(sd) 1430 meta.encoder = codecs.getencoder(sd) 1431 fieldblock = dfd.read(header.start - 32) 1432 for i in range(len(fieldblock)//32+1): 1433 fieldend = i * 32 1434 if fieldblock[fieldend] == '\x0d': 1435 break 1436 else: 1437 raise DbfError("corrupt field structure in header") 1438 if len(fieldblock[:fieldend]) % 32 != 0: 1439 raise DbfError("corrupt field structure in header") 1440 header.fields = fieldblock[:fieldend] 1441 header.extra = fieldblock[fieldend+1:] # skip trailing \r 1442 yo._initializeFields() 1443 yo._checkMemoIntegrity() 1444 meta.current = -1 1445 if len(yo) > 0: 1446 meta.current = 0 1447 dfd.seek(0)
1448
1449 - def pack(yo, _pack=True):
1450 "physically removes all deleted records" 1451 for dbfindex in yo._indexen: 1452 dbfindex.clear() 1453 newtable = [] 1454 index = 0 1455 offset = 0 # +1 for each purged record 1456 for record in yo._table: 1457 found = False 1458 if record.has_been_deleted and _pack: 1459 for dbflist in yo._dbflists: 1460 if dbflist._purge(record, record.record_number - offset, 1): 1461 found = True 1462 record._recnum = -1 1463 else: 1464 record._recnum = index 1465 newtable.append(record) 1466 index += 1 1467 if found: 1468 offset += 1 1469 found = False 1470 yo._table.clear() 1471 for record in newtable: 1472 yo._table.append(record) 1473 yo._meta.header.record_count = index 1474 yo._current = -1 1475 yo._update_disk() 1476 yo.reindex()
1477 - def prev(yo):
1478 "set record pointer to previous (non-deleted) record, and return it" 1479 if yo.bof(_move=True): 1480 raise Bof 1481 return yo.current()
1482 - def query(yo, sql_command=None, python=None):
1483 "uses exec to perform queries on the table" 1484 if sql_command: 1485 return sql(yo, sql_command) 1486 elif python is None: 1487 raise DbfError("query: python parameter must be specified") 1488 possible = List(desc="%s --> %s" % (yo.filename, python), field_names=yo.field_names) 1489 yo._dbflists.add(possible) 1490 query_result = {} 1491 select = 'query_result["keep"] = %s' % python 1492 g = {} 1493 use_deleted = yo.use_deleted 1494 for record in yo: 1495 query_result['keep'] = False 1496 g['query_result'] = query_result 1497 exec select in g, record 1498 if query_result['keep']: 1499 possible.append(record) 1500 record.write_record() 1501 return possible
1502 - def reindex(yo):
1503 for dbfindex in yo._indexen: 1504 dbfindex.reindex()
1505 - def rename_field(yo, oldname, newname):
1506 "renames an existing field" 1507 if yo: 1508 yo.create_backup() 1509 if not oldname in yo._meta.fields: 1510 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) 1511 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): 1512 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1513 newname = newname.lower() 1514 if newname in yo._meta.fields: 1515 raise DbfError("field --%s-- already exists" % newname) 1516 if len(newname) > 10: 1517 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 1518 yo._meta[newname] = yo._meta[oldname] 1519 yo._meta.fields[yo._meta.fields.index(oldname)] = newname 1520 yo._buildHeaderFields() 1521 yo._update_disk(headeronly=True)
1522 - def size(yo, field):
1523 "returns size of field as a tuple of (length, decimals)" 1524 if field in yo: 1525 return (yo._meta[field]['length'], yo._meta[field]['decimals']) 1526 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1527 - def structure(yo, fields=None):
1528 """return list of fields suitable for creating same table layout 1529 @param fields: list of fields or None for all fields""" 1530 field_specs = [] 1531 fields = yo._list_fields(fields) 1532 try: 1533 for name in fields: 1534 field_specs.append(yo._fieldLayout(yo.field_names.index(name))) 1535 except ValueError: 1536 raise DbfError("field --%s-- does not exist" % name) 1537 return field_specs
1538 - def top(yo, get_record=False):
1539 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record 1540 DbfError if table is empty 1541 Eof if all records are deleted and use_deleted is False""" 1542 yo._meta.current = -1 1543 if get_record: 1544 try: 1545 return yo.next() 1546 except Eof: 1547 yo._meta.current = -1 1548 raise Bof()
1549 - def type(yo, field):
1550 "returns type of field" 1551 if field in yo: 1552 return yo._meta[field]['type'] 1553 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1554 - def zap(yo, areyousure=False):
1555 """removes all records from table -- this cannot be undone! 1556 areyousure must be True, else error is raised""" 1557 if areyousure: 1558 if yo._meta.inmemory: 1559 yo._table = [] 1560 else: 1561 yo._table.clear() 1562 yo._meta.header.record_count = 0 1563 yo._current = -1 1564 yo._update_disk() 1565 else: 1566 raise DbfError("You must say you are sure to wipe the table")
1567 -class Db3Table(DbfTable):
1568 """Provides an interface for working with dBase III tables.""" 1569 _version = 'dBase III Plus' 1570 _versionabbv = 'db3' 1571 _fieldtypes = { 1572 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1573 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1574 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1575 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1576 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} } 1577 _memoext = '.dbt' 1578 _memotypes = ('M',) 1579 _memoClass = _Db3Memo 1580 _yesMemoMask = '\x80' 1581 _noMemoMask = '\x7f' 1582 _fixed_fields = ('D','L','M') 1583 _variable_fields = ('C','N') 1584 _character_fields = ('C','M') 1585 _decimal_fields = ('N',) 1586 _numeric_fields = ('N',) 1587 _dbfTableHeader = array('c', '\x00' * 32) 1588 _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's 1589 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 1590 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1591 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1592 _dbfTableHeader = _dbfTableHeader.tostring() 1593 _dbfTableHeaderExtra = '' 1594 _supported_tables = ['\x03', '\x83'] 1595 _read_only = False 1596 _meta_only = False 1597 _use_deleted = True
1598 - def _checkMemoIntegrity(yo):
1599 "dBase III specific" 1600 if yo._meta.header.version == '\x83': 1601 try: 1602 yo._meta.memo = yo._memoClass(yo._meta) 1603 except: 1604 yo._meta.dfd.close() 1605 yo._meta.dfd = None 1606 raise 1607 if not yo._meta.ignorememos: 1608 for field in yo._meta.fields: 1609 if yo._meta[field]['type'] in yo._memotypes: 1610 if yo._meta.header.version != '\x83': 1611 yo._meta.dfd.close() 1612 yo._meta.dfd = None 1613 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1614 elif not os.path.exists(yo._meta.memoname): 1615 yo._meta.dfd.close() 1616 yo._meta.dfd = None 1617 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1618 break
1619 - def _initializeFields(yo):
1620 "builds the FieldList of names, types, and descriptions" 1621 yo._meta.fields[:] = [] 1622 offset = 1 1623 fieldsdef = yo._meta.header.fields 1624 if len(fieldsdef) % 32 != 0: 1625 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1626 if len(fieldsdef) // 32 != yo.field_count: 1627 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1628 for i in range(yo.field_count): 1629 fieldblock = fieldsdef[i*32:(i+1)*32] 1630 name = io.unpackStr(fieldblock[:11]) 1631 type = fieldblock[11] 1632 if not type in yo._meta.fieldtypes: 1633 raise DbfError("Unknown field type: %s" % type) 1634 start = offset 1635 length = ord(fieldblock[16]) 1636 offset += length 1637 end = start + length 1638 decimals = ord(fieldblock[17]) 1639 flags = ord(fieldblock[18]) 1640 yo._meta.fields.append(name) 1641 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1642 -class FpTable(DbfTable):
1643 'Provides an interface for working with FoxPro 2 tables' 1644 _version = 'Foxpro' 1645 _versionabbv = 'fp' 1646 _fieldtypes = { 1647 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1648 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1649 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1650 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1651 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1652 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1653 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1654 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1655 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1656 _memoext = '.fpt' 1657 _memotypes = ('G','M','P') 1658 _memoClass = _VfpMemo 1659 _yesMemoMask = '\xf5' # 1111 0101 1660 _noMemoMask = '\x03' # 0000 0011 1661 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1662 _variable_fields = ('C','F','N') 1663 _character_fields = ('C','M') # field representing character data 1664 _decimal_fields = ('F','N') 1665 _numeric_fields = ('B','F','I','N','Y') 1666 _supported_tables = ('\x03', '\xf5') 1667 _dbfTableHeader = array('c', '\x00' * 32) 1668 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1669 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1670 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1671 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1672 _dbfTableHeader = _dbfTableHeader.tostring() 1673 _dbfTableHeaderExtra = '\x00' * 263 1674 _use_deleted = True
1675 - def _checkMemoIntegrity(yo):
1676 if os.path.exists(yo._meta.memoname): 1677 try: 1678 yo._meta.memo = yo._memoClass(yo._meta) 1679 except: 1680 yo._meta.dfd.close() 1681 yo._meta.dfd = None 1682 raise 1683 if not yo._meta.ignorememos: 1684 for field in yo._meta.fields: 1685 if yo._meta[field]['type'] in yo._memotypes: 1686 if not os.path.exists(yo._meta.memoname): 1687 yo._meta.dfd.close() 1688 yo._meta.dfd = None 1689 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1690 break
1691 - def _initializeFields(yo):
1692 "builds the FieldList of names, types, and descriptions" 1693 yo._meta.fields[:] = [] 1694 offset = 1 1695 fieldsdef = yo._meta.header.fields 1696 if len(fieldsdef) % 32 != 0: 1697 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1698 if len(fieldsdef) // 32 != yo.field_count: 1699 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1700 for i in range(yo.field_count): 1701 fieldblock = fieldsdef[i*32:(i+1)*32] 1702 name = io.unpackStr(fieldblock[:11]) 1703 type = fieldblock[11] 1704 if not type in yo._meta.fieldtypes: 1705 raise DbfError("Unknown field type: %s" % type) 1706 elif type == '0': 1707 return # ignore nullflags 1708 start = offset 1709 length = ord(fieldblock[16]) 1710 offset += length 1711 end = start + length 1712 decimals = ord(fieldblock[17]) 1713 flags = ord(fieldblock[18]) 1714 yo._meta.fields.append(name) 1715 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1716
1717 -class VfpTable(DbfTable):
1718 'Provides an interface for working with Visual FoxPro 6 tables' 1719 _version = 'Visual Foxpro v6' 1720 _versionabbv = 'vfp' 1721 _fieldtypes = { 1722 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1723 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1724 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1725 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1726 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1727 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1728 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1729 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1730 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1731 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1732 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1733 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1734 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1735 _memoext = '.fpt' 1736 _memotypes = ('G','M','P') 1737 _memoClass = _VfpMemo 1738 _yesMemoMask = '\x30' # 0011 0000 1739 _noMemoMask = '\x30' # 0011 0000 1740 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1741 _variable_fields = ('C','F','N') 1742 _character_fields = ('C','M') # field representing character data 1743 _decimal_fields = ('F','N') 1744 _numeric_fields = ('B','F','I','N','Y') 1745 _supported_tables = ('\x30',) 1746 _dbfTableHeader = array('c', '\x00' * 32) 1747 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1748 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1749 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1750 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1751 _dbfTableHeader = _dbfTableHeader.tostring() 1752 _dbfTableHeaderExtra = '\x00' * 263 1753 _use_deleted = True
1754 - def _checkMemoIntegrity(yo):
1755 if os.path.exists(yo._meta.memoname): 1756 try: 1757 yo._meta.memo = yo._memoClass(yo._meta) 1758 except: 1759 yo._meta.dfd.close() 1760 yo._meta.dfd = None 1761 raise 1762 if not yo._meta.ignorememos: 1763 for field in yo._meta.fields: 1764 if yo._meta[field]['type'] in yo._memotypes: 1765 if not os.path.exists(yo._meta.memoname): 1766 yo._meta.dfd.close() 1767 yo._meta.dfd = None 1768 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1769 break
1770 - def _initializeFields(yo):
1771 "builds the FieldList of names, types, and descriptions" 1772 yo._meta.fields[:] = [] 1773 offset = 1 1774 fieldsdef = yo._meta.header.fields 1775 for i in range(yo.field_count): 1776 fieldblock = fieldsdef[i*32:(i+1)*32] 1777 name = io.unpackStr(fieldblock[:11]) 1778 type = fieldblock[11] 1779 if not type in yo._meta.fieldtypes: 1780 raise DbfError("Unknown field type: %s" % type) 1781 elif type == '0': 1782 return # ignore nullflags 1783 start = io.unpackLongInt(fieldblock[12:16]) 1784 length = ord(fieldblock[16]) 1785 offset += length 1786 end = start + length 1787 decimals = ord(fieldblock[17]) 1788 flags = ord(fieldblock[18]) 1789 yo._meta.fields.append(name) 1790 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1791 -class List(object):
1792 "list of Dbf records, with set-like behavior" 1793 _desc = ''
1794 - def __init__(yo, new_records=None, desc=None, key=None, field_names=None):
1795 yo.field_names = field_names 1796 yo._list = [] 1797 yo._set = set() 1798 if key is not None: 1799 yo.key = key 1800 if key.__doc__ is None: 1801 key.__doc__ = 'unknown' 1802 key = yo.key 1803 yo._current = -1 1804 if isinstance(new_records, yo.__class__) and key is new_records.key: 1805 yo._list = new_records._list[:] 1806 yo._set = new_records._set.copy() 1807 yo._current = 0 1808 elif new_records is not None: 1809 for record in new_records: 1810 value = key(record) 1811 item = (record.record_table, record.record_number, value) 1812 if value not in yo._set: 1813 yo._set.add(value) 1814 yo._list.append(item) 1815 yo._current = 0 1816 if desc is not None: 1817 yo._desc = desc
1818 - def __add__(yo, other):
1819 key = yo.key 1820 if isinstance(other, (DbfTable, list)): 1821 other = yo.__class__(other, key=key) 1822 if isinstance(other, yo.__class__): 1823 result = yo.__class__() 1824 result._set = yo._set.copy() 1825 result._list[:] = yo._list[:] 1826 result.key = yo.key 1827 if key is other.key: # same key? just compare key values 1828 for item in other._list: 1829 if item[2] not in result._set: 1830 result._set.add(item[2]) 1831 result._list.append(item) 1832 else: # different keys, use this list's key on other's records 1833 for rec in other: 1834 value = key(rec) 1835 if value not in result._set: 1836 result._set.add(value) 1837 result._list.append((rec.record_table, rec.record_number, value)) 1838 result._current = 0 if result else -1 1839 return result 1840 return NotImplemented
1841 - def __contains__(yo, record):
1842 if isinstance(record, tuple): 1843 item = record 1844 else: 1845 item = yo.key(record) 1846 return item in yo._set
1847 - def __delitem__(yo, key):
1848 if isinstance(key, int): 1849 item = yo._list.pop[key] 1850 yo._set.remove(item[2]) 1851 elif isinstance(key, slice): 1852 yo._set.difference_update([item[2] for item in yo._list[key]]) 1853 yo._list.__delitem__(key) 1854 else: 1855 raise TypeError
1856 - def __getitem__(yo, key):
1857 if isinstance(key, int): 1858 count = len(yo._list) 1859 if not -count <= key < count: 1860 raise IndexError("Record %d is not in list." % key) 1861 return yo._get_record(*yo._list[key]) 1862 elif isinstance(key, slice): 1863 result = yo.__class__() 1864 result._list[:] = yo._list[key] 1865 result._set = set(result._list) 1866 result.key = yo.key 1867 result._current = 0 if result else -1 1868 return result 1869 else: 1870 raise TypeError('indices must be integers')
1871 - def __iter__(yo):
1872 return (table.get_record(recno) for table, recno, value in yo._list)
1873 - def __len__(yo):
1874 return len(yo._list)
1875 - def __nonzero__(yo):
1876 return len(yo) > 0
1877 - def __radd__(yo, other):
1878 return yo.__add__(other)
1879 - def __repr__(yo):
1880 if yo._desc: 1881 return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list)) 1882 else: 1883 return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list))
1884 - def __rsub__(yo, other):
1885 key = yo.key 1886 if isinstance(other, (DbfTable, list)): 1887 other = yo.__class__(other, key=key) 1888 if isinstance(other, yo.__class__): 1889 result = yo.__class__() 1890 result._list[:] = other._list[:] 1891 result._set = other._set.copy() 1892 result.key = key 1893 lost = set() 1894 if key is other.key: 1895 for item in yo._list: 1896 if item[2] in result._list: 1897 result._set.remove(item[2]) 1898 lost.add(item) 1899 else: 1900 for rec in other: 1901 value = key(rec) 1902 if value in result._set: 1903 result._set.remove(value) 1904 lost.add((rec.record_table, rec.record_number, value)) 1905 result._list = [item for item in result._list if item not in lost] 1906 result._current = 0 if result else -1 1907 return result 1908 return NotImplemented
1909 - def __sub__(yo, other):
1910 key = yo.key 1911 if isinstance(other, (DbfTable, list)): 1912 other = yo.__class__(other, key=key) 1913 if isinstance(other, yo.__class__): 1914 result = yo.__class__() 1915 result._list[:] = yo._list[:] 1916 result._set = yo._set.copy() 1917 result.key = key 1918 lost = set() 1919 if key is other.key: 1920 for item in other._list: 1921 if item[2] in result._set: 1922 result._set.remove(item[2]) 1923 lost.add(item[2]) 1924 else: 1925 for rec in other: 1926 value = key(rec) 1927 if value in result._set: 1928 result._set.remove(value) 1929 lost.add(value) 1930 result._list = [item for item in result._list if item[2] not in lost] 1931 result._current = 0 if result else -1 1932 return result 1933 return NotImplemented
1934 - def _maybe_add(yo, item):
1935 if item[2] not in yo._set: 1936 yo._set.add(item[2]) 1937 yo._list.append(item)
1938 - def _get_record(yo, table=None, rec_no=None, value=None):
1939 if table is rec_no is None: 1940 table, rec_no, value = yo._list[yo._current] 1941 return table.get_record(rec_no)
1942 - def _purge(yo, record, old_record_number, offset):
1943 partial = record.record_table, old_record_number 1944 records = sorted(yo._list, key=lambda item: (item[0], item[1])) 1945 for item in records: 1946 if partial == item[:2]: 1947 found = True 1948 break 1949 elif partial[0] is item[0] and partial[1] < item[1]: 1950 found = False 1951 break 1952 else: 1953 found = False 1954 if found: 1955 yo._list.pop(yo._list.index(item)) 1956 yo._set.remove(item[2]) 1957 start = records.index(item) + found 1958 for item in records[start:]: 1959 if item[0] is not partial[0]: # into other table's records 1960 break 1961 i = yo._list.index(item) 1962 yo._set.remove(item[2]) 1963 item = item[0], (item[1] - offset), item[2] 1964 yo._list[i] = item 1965 yo._set.add(item[2]) 1966 return found
1967 - def append(yo, new_record):
1968 yo._maybe_add((new_record.record_table, new_record.record_number, yo.key(new_record))) 1969 if yo._current == -1 and yo._list: 1970 yo._current = 0 1971 return new_record
1972 - def bottom(yo):
1973 if yo._list: 1974 yo._current = len(yo._list) - 1 1975 return yo._get_record() 1976 raise DbfError("dbf.List is empty")
1977 - def clear(yo):
1978 yo._list = [] 1979 yo._set = set() 1980 yo._current = -1
1981 - def current(yo):
1982 if yo._current < 0: 1983 raise Bof() 1984 elif yo._current == len(yo._list): 1985 raise Eof() 1986 return yo._get_record()
1987 - def extend(yo, new_records):
1988 key = yo.key 1989 if isinstance(new_records, yo.__class__): 1990 if key is new_records.key: # same key? just compare key values 1991 for item in new_records._list: 1992 yo._maybe_add(item) 1993 else: # different keys, use this list's key on other's records 1994 for rec in new_records: 1995 value = key(rec) 1996 yo._maybe_add((rec.record_table, rec.record_number, value)) 1997 else: 1998 for record in new_records: 1999 value = key(rec) 2000 yo._maybe_add((rec.record_table, rec.record_number, value)) 2001 if yo._current == -1 and yo._list: 2002 yo._current = 0
2003 - def goto(yo, index_number):
2004 if yo._list: 2005 if 0 <= index_number <= len(yo._list): 2006 yo._current = index_number 2007 return yo._get_record() 2008 raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list))) 2009 raise DbfError("dbf.List is empty")
2010 - def index(yo, sort=None, reverse=False):
2011 "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'" 2012 if sort is None: 2013 results = [] 2014 for field, func in yo._meta.index: 2015 results.append("%s(%s)" % (func.__name__, field)) 2016 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) 2017 yo._meta.index_reversed = reverse 2018 if sort == 'ORIGINAL': 2019 yo._index = range(yo._meta.header.record_count) 2020 yo._meta.index = 'ORIGINAL' 2021 if reverse: 2022 yo._index.reverse() 2023 return 2024 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) 2025 yo._meta.index = tuple(new_sort) 2026 yo._meta.orderresults = [''] * len(yo) 2027 for record in yo: 2028 yo._meta.orderresults[record.record_number] = record() 2029 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
2030 - def index(yo, record, start=None, stop=None):
2031 item = record.record_table, record.record_number, yo.key(record) 2032 if start is None: 2033 start = 0 2034 if stop is None: 2035 stop = len(yo._list) 2036 return yo._list.index(item, start, stop)
2037 - def insert(yo, i, record):
2038 item = record.record_table, record.record_number, yo.key(record) 2039 if item not in yo._set: 2040 yo._set.add(item[2]) 2041 yo._list.insert(i, item)
2042 - def key(yo, record):
2043 "table_name, record_number" 2044 return record.record_table, record.record_number
2045 - def next(yo):
2046 if yo._current < len(yo._list): 2047 yo._current += 1 2048 if yo._current < len(yo._list): 2049 return yo._get_record() 2050 raise Eof()
2051 - def pop(yo, index=None):
2052 if index is None: 2053 table, recno, value = yo._list.pop() 2054 else: 2055 table, recno, value = yo._list.pop(index) 2056 yo._set.remove(value) 2057 return yo._get_record(table, recno, value)
2058 - def prev(yo):
2059 if yo._current >= 0: 2060 yo._current -= 1 2061 if yo._current > -1: 2062 return yo._get_record() 2063 raise Bof()
2064 - def remove(yo, record):
2065 item = record.record_table, record.record_number, yo.key(record) 2066 yo._list.remove(item) 2067 yo._set.remove(item[2])
2068 - def reverse(yo):
2069 return yo._list.reverse()
2070 - def top(yo):
2071 if yo._list: 2072 yo._current = 0 2073 return yo._get_record() 2074 raise DbfError("dbf.List is empty")
2075 - def sort(yo, key=None, reverse=False):
2076 if key is None: 2077 return yo._list.sort(reverse=reverse) 2078 return yo._list.sort(key=lambda item: key(item[0].get_record(item[1])), reverse=reverse)
2079
2080 -class DbfCsv(csv.Dialect):
2081 "csv format for exporting tables" 2082 delimiter = ',' 2083 doublequote = True 2084 escapechar = None 2085 lineterminator = '\n' 2086 quotechar = '"' 2087 skipinitialspace = True 2088 quoting = csv.QUOTE_NONNUMERIC
2089 -class Index(object):
2090 - class IndexIterator(object):
2091 "returns records using this index"
2092 - def __init__(yo, table, records):
2093 yo.table = table 2094 yo.records = records 2095 yo.index = 0
2096 - def __iter__(yo):
2097 return yo
2098 - def next(yo):
2099 while yo.index < len(yo.records): 2100 record = yo.table.get_record(yo.records[yo.index]) 2101 yo.index += 1 2102 if not yo.table.use_deleted and record.has_been_deleted: 2103 continue 2104 return record 2105 else: 2106 raise StopIteration
2107 - def __init__(yo, table, key, field_names=None):
2108 yo._table = table 2109 yo._values = [] # ordered list of values 2110 yo._rec_by_val = [] # matching record numbers 2111 yo._records = {} # record numbers:values 2112 yo.__doc__ = key.__doc__ or 'unknown' 2113 yo.key = key 2114 yo.field_names = field_names or table.field_names 2115 for record in table: 2116 value = key(record) 2117 if value is DoNotIndex: 2118 continue 2119 rec_num = record.record_number 2120 if not isinstance(value, tuple): 2121 value = (value, ) 2122 vindex = bisect_right(yo._values, value) 2123 yo._values.insert(vindex, value) 2124 yo._rec_by_val.insert(vindex, rec_num) 2125 yo._records[rec_num] = value 2126 table._indexen.add(yo)
2127 - def __call__(yo, record):
2128 rec_num = record.record_number 2129 if rec_num in yo._records: 2130 value = yo._records[rec_num] 2131 vindex = bisect_left(yo._values, value) 2132 yo._values.pop(vindex) 2133 yo._rec_by_val.pop(vindex) 2134 value = yo.key(record) 2135 if value is DoNotIndex: 2136 return 2137 if not isinstance(value, tuple): 2138 value = (value, ) 2139 vindex = bisect_right(yo._values, value) 2140 yo._values.insert(vindex, value) 2141 yo._rec_by_val.insert(vindex, rec_num) 2142 yo._records[rec_num] = value
2143 - def __contains__(yo, match):
2144 if isinstance(match, _DbfRecord): 2145 if match.record_table is yo._table: 2146 return match.record_number in yo._records 2147 match = yo.key(match) 2148 elif not isinstance(match, tuple): 2149 match = (match, ) 2150 return yo.find(match) != -1
2151 - def __getitem__(yo, key):
2152 if isinstance(key, int): 2153 count = len(yo._values) 2154 if not -count <= key < count: 2155 raise IndexError("Record %d is not in list." % key) 2156 rec_num = yo._rec_by_val[key] 2157 return yo._table.get_record(rec_num) 2158 elif isinstance(key, slice): 2159 result = List(field_names=yo._table.field_names) 2160 yo._table._dbflists.add(result) 2161 start, stop, step = key.start, key.stop, key.step 2162 if start is None: start = 0 2163 if stop is None: stop = len(yo._rec_by_val) 2164 if step is None: step = 1 2165 for loc in range(start, stop, step): 2166 record = yo._table.get_record(yo._rec_by_val[loc]) 2167 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2168 result._current = 0 if result else -1 2169 return result 2170 elif isinstance (key, (str, unicode, tuple, _DbfRecord)): 2171 if isinstance(key, _DbfRecord): 2172 key = yo.key(key) 2173 elif not isinstance(key, tuple): 2174 key = (key, ) 2175 loc = yo.find(key) 2176 if loc == -1: 2177 raise KeyError(key) 2178 return yo._table.get_record(yo._rec_by_val[loc]) 2179 else: 2180 raise TypeError('indices must be integers, match objects must by strings or tuples')
2181 - def __enter__(yo):
2182 return yo
2183 - def __exit__(yo, *exc_info):
2184 yo._table.close() 2185 yo._values[:] = [] 2186 yo._rec_by_val[:] = [] 2187 yo._records.clear() 2188 return False
2189 - def __iter__(yo):
2190 return yo.IndexIterator(yo._table, yo._rec_by_val)
2191 - def __len__(yo):
2192 return len(yo._records)
2193 - def _partial_match(yo, target, match):
2194 target = target[:len(match)] 2195 if isinstance(match[-1], (str, unicode)): 2196 target = list(target) 2197 target[-1] = target[-1][:len(match[-1])] 2198 target = tuple(target) 2199 return target == match
2200 - def _purge(yo, rec_num):
2201 value = yo._records.get(rec_num) 2202 if value is not None: 2203 vindex = bisect_left(yo._values, value) 2204 del yo._records[rec_num] 2205 yo._values.pop(vindex) 2206 yo._rec_by_val.pop(vindex)
2207 - def _search(yo, match, lo=0, hi=None):
2208 if hi is None: 2209 hi = len(yo._values) 2210 return bisect_left(yo._values, match, lo, hi)
2211 - def clear(yo):
2212 "removes all entries from index" 2213 yo._values[:] = [] 2214 yo._rec_by_val[:] = [] 2215 yo._records.clear()
2216 - def close(yo):
2217 yo._table.close()
2218 - def find(yo, match, partial=False):
2219 "returns numeric index of (partial) match, or -1" 2220 if isinstance(match, _DbfRecord): 2221 if match.record_number in yo._records: 2222 return yo._values.index(yo._records[match.record_number]) 2223 else: 2224 return -1 2225 if not isinstance(match, tuple): 2226 match = (match, ) 2227 loc = yo._search(match) 2228 while loc < len(yo._values) and yo._values[loc] == match: 2229 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 2230 loc += 1 2231 continue 2232 return loc 2233 if partial: 2234 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 2235 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 2236 loc += 1 2237 continue 2238 return loc 2239 return -1
2240 - def find_index(yo, match):
2241 "returns numeric index of either (partial) match, or position of where match would be" 2242 if isinstance(match, _DbfRecord): 2243 if match.record_number in yo._records: 2244 return yo._values.index(yo._records[match.record_number]) 2245 else: 2246 match = yo.key(match) 2247 if not isinstance(match, tuple): 2248 match = (match, ) 2249 loc = yo._search(match) 2250 return loc
2251 @classmethod
2252 - def from_file(cls, table, index_file):
2253 2254 def get_idx_records(data, length, howmany): 2255 ptr = 0 2256 current = 0 2257 while current < howmany: 2258 key = data[ptr:ptr+length].replace('\x00','') 2259 rec = io.unpackLongInt(data[ptr+length:ptr+length+4], bigendian=True) 2260 yield key, recnum 2261 ptr += length + 4 2262 current += 1
2263 2264 def next_item(idx_file, node_loc, keylen): 2265 idx_file.seek(node_loc) 2266 data_chunk = idx_file.read(512) 2267 attributes = io.unpackShortInt(data_chunk[:2]) 2268 howmany = io.unpackShortInt(data_chunk[2:4]) 2269 if attributes in (2, 3): 2270 for key, recnum in get_idx_records(data_chunk[12:512], keylen, howmany): 2271 yield key, recnum 2272 else: 2273 for ignore, next_node in get_idx_records(data_chunk[12:512], keylen, howmany): 2274 print ignore, next_node 2275 for key, recnum in next_item(idx_file, next_node, keylen): 2276 yield key, recnum
2277 2278 2279 idx = object.__new__(cls) 2280 #- idx.key = lambda rec: DoNotIndex 2281 data = open(index_file, 'rb') 2282 header = data.read(512) 2283 rootnode = io.unpackLongInt(header[:4]) 2284 keylen = io.unpackShortInt(header[12:14]) 2285 idx.__doc__ = header[16:236].replace('\x00','') 2286 for_expr = header[236:456].replace('\x00','') 2287 if for_expr: 2288 idx.__doc__ += ' for ' + for_expr.replace('=','==') 2289 for rec in next_item(data, rootnode, keylen): 2290 print rec 2291
2292 - def index(yo, match, partial=False):
2293 "returns numeric index of (partial) match, or raises ValueError" 2294 loc = yo.find(match, partial) 2295 if loc == -1: 2296 if isinstance(match, _DbfRecord): 2297 raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__)) 2298 else: 2299 raise ValueError("match criteria <%s> not in index" % (match, )) 2300 return loc
2301 - def reindex(yo):
2302 "reindexes all records" 2303 for record in yo._table: 2304 yo(record)
2305 - def query(yo, sql_command=None, python=None):
2306 """recognized sql commands are SELECT, UPDATE, REPLACE, INSERT, DELETE, and RECALL""" 2307 if sql_command: 2308 return sql(yo, sql_command) 2309 elif python is None: 2310 raise DbfError("query: python parameter must be specified") 2311 possible = List(desc="%s --> %s" % (yo._table.filename, python), field_names=yo._table.field_names) 2312 yo._table._dbflists.add(possible) 2313 query_result = {} 2314 select = 'query_result["keep"] = %s' % python 2315 g = {} 2316 for record in yo: 2317 query_result['keep'] = False 2318 g['query_result'] = query_result 2319 exec select in g, record 2320 if query_result['keep']: 2321 possible.append(record) 2322 record.write_record() 2323 return possible
2324 - def search(yo, match, partial=False):
2325 "returns dbf.List of all (partially) matching records" 2326 result = List(field_names=yo._table.field_names) 2327 yo._table._dbflists.add(result) 2328 if not isinstance(match, tuple): 2329 match = (match, ) 2330 loc = yo._search(match) 2331 if loc == len(yo._values): 2332 return result 2333 while loc < len(yo._values) and yo._values[loc] == match: 2334 record = yo._table.get_record(yo._rec_by_val[loc]) 2335 if not yo._table.use_deleted and record.has_been_deleted: 2336 loc += 1 2337 continue 2338 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2339 loc += 1 2340 if partial: 2341 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 2342 record = yo._table.get_record(yo._rec_by_val[loc]) 2343 if not yo._table.use_deleted and record.has_been_deleted: 2344 loc += 1 2345 continue 2346 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2347 loc += 1 2348 return result
2349 2350 csv.register_dialect('dbf', DbfCsv) 2351 2352 sql_functions = { 2353 'select':None, 2354 'update':None, 2355 'insert':None, 2356 'delete':None, 2357 'count': None}
2358 -def sql_criteria(records, criteria):
2359 "creates a function matching the sql criteria" 2360 function = """def func(records): 2361 \"\"\"%s\"\"\" 2362 matched = List(field_names=records[0].field_names) 2363 for rec in records: 2364 %s 2365 2366 if %s: 2367 matched.append(rec) 2368 return matched""" 2369 fields = [] 2370 for field in records[0].field_names: 2371 if field in criteria: 2372 fields.append(field) 2373 fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) 2374 g = {'List':List} 2375 function %= (criteria, fields, criteria) 2376 #- print function 2377 exec function in g 2378 return g['func']
2379
2380 -def sql_cmd(records, command):
2381 "creates a function matching to apply command to each record in records" 2382 function = """def func(records): 2383 \"\"\"%s\"\"\" 2384 changed = 0 2385 for rec in records: 2386 %s 2387 2388 %s 2389 2390 %s 2391 changed += rec.write_record() 2392 return changed""" 2393 fields = [] 2394 for field in records[0].field_names: 2395 if field in command: 2396 fields.append(field) 2397 pre_fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) 2398 post_fields = '\n '.join(['rec.%s = %s' % (field, field) for field in fields]) 2399 g = dbf.sql_user_functions.copy() 2400 if '=' not in command and ' with ' in command.lower(): 2401 offset = command.lower().index(' with ') 2402 command = command[:offset] + ' = ' + command[offset+6:] 2403 function %= (command, pre_fields, command, post_fields) 2404 #- print function 2405 exec function in g 2406 return g['func']
2407
2408 -def sql(records, command):
2409 """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL""" 2410 table = records[0].record_table 2411 sql_command = command 2412 no_condition = False 2413 if ' for ' in command: 2414 command, condition = command.split(' for ') 2415 condition = sql_criteria(records, condition) 2416 else: 2417 def condition(records): 2418 return records[:]
2419 no_condition = True 2420 name, command = command.split(' ', 1) 2421 name = name.lower() 2422 field_names = table.field_names 2423 if name == 'select': 2424 if command.strip() != '*': 2425 field_names = command.replace(' ','').split(',') 2426 def command(records): 2427 return 2428 else: 2429 command = sql_cmd(records, command) 2430 if name not in ('delete','insert','recall','select','update','replace'): 2431 raise DbfError("unrecognized sql command: %s" % name.upper()) 2432 if name == 'insert' and not no_condition: 2433 raise DbfError("FOR clause not allowed with INSERT") 2434 possible = List(desc=sql_command, field_names=field_names) 2435 tables = set() 2436 if name == 'insert': 2437 raise DbfError("INSERT not currently implemented") 2438 record = table.append() 2439 command(record) 2440 record.write_record() 2441 record.check_index() 2442 possible.append(record) 2443 changed = 0 2444 else: 2445 possible = condition(records) 2446 possible.field_names = field_names 2447 changed = command(possible) 2448 for record in possible: 2449 tables.add(record.record_table) 2450 if name == 'delete': 2451 record.delete_record() 2452 elif name == 'recall': 2453 record.undelete_record() 2454 elif name == 'select': 2455 pass 2456 elif name == 'update' or name == 'replace': 2457 pass 2458 #command(record) 2459 else: 2460 raise DbfError("unrecognized sql command: %s" % sql.upper) 2461 record.write_record() 2462 for list_table in tables: 2463 list_table._dbflists.add(possible) 2464 possible.modified = changed 2465 return possible
2466 -def _nop(value):
2467 "returns parameter unchanged" 2468 return value
2469 -def _normalize_tuples(tuples, length, filler):
2470 "ensures each tuple is the same length, using filler[-missing] for the gaps" 2471 final = [] 2472 for t in tuples: 2473 if len(t) < length: 2474 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 2475 else: 2476 final.append(t) 2477 return tuple(final)
2478 -def _codepage_lookup(cp):
2479 if cp not in code_pages: 2480 for code_page in sorted(code_pages.keys()): 2481 sd, ld = code_pages[code_page] 2482 if cp == sd or cp == ld: 2483 if sd is None: 2484 raise DbfError("Unsupported codepage: %s" % ld) 2485 cp = code_page 2486 break 2487 else: 2488 raise DbfError("Unsupported codepage: %s" % cp) 2489 sd, ld = code_pages[cp] 2490 return cp, sd, ld
2491 -def ascii(new_setting=None):
2492 "get/set return_ascii setting" 2493 global return_ascii 2494 if new_setting is None: 2495 return return_ascii 2496 else: 2497 return_ascii = new_setting
2498 -def codepage(cp=None):
2499 "get/set default codepage for any new tables" 2500 global default_codepage 2501 cp, sd, ld = _codepage_lookup(cp or default_codepage) 2502 default_codepage = sd 2503 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
2504 -def encoding(cp=None):
2505 "get/set default encoding for non-unicode strings passed into a table" 2506 global input_decoding 2507 cp, sd, ld = _codepage_lookup(cp or input_decoding) 2508 default_codepage = sd 2509 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
2510 -class _Db4Table(DbfTable):
2511 version = 'dBase IV w/memos (non-functional)' 2512 _versionabbv = 'db4' 2513 _fieldtypes = { 2514 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 2515 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 2516 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 2517 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 2518 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 2519 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 2520 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 2521 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 2522 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 2523 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2524 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2525 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2526 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 2527 _memoext = '.dbt' 2528 _memotypes = ('G','M','P') 2529 _memoClass = _VfpMemo 2530 _yesMemoMask = '\x8b' # 0011 0000 2531 _noMemoMask = '\x04' # 0011 0000 2532 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 2533 _variable_fields = ('C','F','N') 2534 _character_fields = ('C','M') # field representing character data 2535 _decimal_fields = ('F','N') 2536 _numeric_fields = ('B','F','I','N','Y') 2537 _supported_tables = ('\x04', '\x8b') 2538 _dbfTableHeader = ['\x00'] * 32 2539 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 2540 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 2541 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 2542 _dbfTableHeader = ''.join(_dbfTableHeader) 2543 _dbfTableHeaderExtra = '' 2544 _use_deleted = True
2545 - def _checkMemoIntegrity(yo):
2546 "dBase III specific" 2547 if yo._meta.header.version == '\x8b': 2548 try: 2549 yo._meta.memo = yo._memoClass(yo._meta) 2550 except: 2551 yo._meta.dfd.close() 2552 yo._meta.dfd = None 2553 raise 2554 if not yo._meta.ignorememos: 2555 for field in yo._meta.fields: 2556 if yo._meta[field]['type'] in yo._memotypes: 2557 if yo._meta.header.version != '\x8b': 2558 yo._meta.dfd.close() 2559 yo._meta.dfd = None 2560 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 2561 elif not os.path.exists(yo._meta.memoname): 2562 yo._meta.dfd.close() 2563 yo._meta.dfd = None 2564 raise DbfError("Table structure corrupt: memo fields exist without memo file") 2565 break
2566

dbf-0.88.16/dbf/html/dbf.exceptions.DbfWarning-class.html0000666000175100017510000001472611477216672022051 0ustar margamarga dbf.exceptions.DbfWarning
Package dbf :: Module exceptions :: Class DbfWarning
[hide private]

Class DbfWarning

source code

              object --+        
                       |        
exceptions.BaseException --+    
                           |    
        exceptions.Exception --+
                               |
                              DbfWarning
Known Subclasses:

Normal operations elicit this response

Instance Methods [hide private]

Inherited from exceptions.Exception: __init__, __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf.tables._DbfMemo-class.html0000666000175100017510000002732711477216672020572 0ustar margamarga dbf.tables._DbfMemo
Package dbf :: Module tables :: Class _DbfMemo
[hide private]

Class _DbfMemo

source code

object --+
         |
        _DbfMemo
Known Subclasses:

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
initialize disk file usage
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code
 
__init__(yo, meta)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
get_memo(yo, block, field)
gets the memo in block
source code
 
put_memo(yo, data)
stores data in memo file, returns block number
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, meta)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__

dbf-0.88.16/dbf/html/dbf.old.dates.DateTime-class.html0000666000175100017510000007352511477216672021222 0ustar margamarga dbf.old.dates.DateTime
Package dbf :: Package old :: Module dates :: Class DateTime
[hide private]

Class DateTime

source code

object --+
         |
        DateTime

adds null capable datetime.datetime constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code
 
date(yo) source code
 
datetime(yo) source code
 
time(yo) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Class Methods [hide private]
 
combine(cls, date, time) source code
 
fromordinal(cls, number) source code
 
fromtimestamp(cls, timestamp) source code
 
now(cls) source code
 
utcnow(cls) source code
 
today(cls) source code
Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0)
year may be a datetime.datetime
source code
Class Variables [hide private]
  max = DateTime(9999, 12, 31, 23, 59, 59, 4, 365, -1)
  min = DateTime(1, 1, 1, 0, 0, 0, 0, 1, -1)
Properties [hide private]
  _datetime

Inherited from object: __class__

Method Details [hide private]

__new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0)
Static Method

source code 

year may be a datetime.datetime

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables._Db4Table-class.html0000666000175100017510000007716211477216672021421 0ustar margamarga dbf.old.tables._Db4Table
Package dbf :: Package old :: Module tables :: Class _Db4Table
[hide private]

Class _Db4Table

source code

object --+    
         |    
  DbfTable --+
             |
            _Db4Table

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  version = 'dBase IV w/memos (non-functional)'
  _versionabbv = 'db4'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.dbt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '\x8b'
  _noMemoMask = '\x04'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('\x04', '\x8b')
  _dbfTableHeader = '\x8b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0...
  _dbfTableHeaderExtra = ''
  _use_deleted = True

Inherited from DbfTable: codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted

Inherited from DbfTable (private): _backed_up, _meta_only, _read_only, _version

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'B': {'Blank': <type 'float'>,
       'Init': <function addVfpDouble at 0x00ECA830>,
       'Retrieve': <function retrieveDouble at 0x00ECA2F0>,
...

_dbfTableHeader

Value:
'\x8b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

dbf-0.88.16/dbf/html/dbf._io-pysrc.html0000666000175100017510000032352311477216672016447 0ustar margamarga dbf._io
Package dbf :: Module _io
[hide private]

Source Code for Module dbf._io

  1  """Routines for saving, retrieving, and creating fields""" 
  2   
  3  import struct 
  4  from decimal import Decimal 
  5  from dbf.exceptions import DbfError, DataOverflow 
  6  from dbf.dates import Date, DateTime, Time 
  7  from math import floor 
  8   
  9   
 10  # Constants 
 11  VFPTIME = 1721425 
 12   
13 -def packShortInt(value, bigendian=False):
14 "Returns a two-bye integer from the value, or raises DbfError" 15 # 256 / 65,536 16 if value > 65535: 17 raise DateOverflow("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) 18 if bigendian: 19 return struct.pack('>H', value) 20 else: 21 return struct.pack('<H', value)
22 -def packLongInt(value, bigendian=False):
23 "Returns a four-bye integer from the value, or raises DbfError" 24 # 256 / 65,536 / 16,777,216 25 if value > 4294967295: 26 raise DateOverflow("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) 27 if bigendian: 28 return struct.pack('>L', value) 29 else: 30 return struct.pack('<L', value)
31 -def packDate(date):
32 "Returns a group of three bytes, in integer form, of the date" 33 return "%c%c%c" % (date.year-1900, date.month, date.day)
34 -def packStr(string):
35 "Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes" 36 if len(string) > 10: 37 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) 38 return struct.pack('11s', string.upper())
39 -def unpackShortInt(bytes, bigendian=False):
40 "Returns the value in the two-byte integer passed in" 41 if bigendian: 42 return struct.unpack('>H', bytes)[0] 43 else: 44 return struct.unpack('<H', bytes)[0]
45 -def unpackLongInt(bytes, bigendian=False):
46 "Returns the value in the four-byte integer passed in" 47 if bigendian: 48 return int(struct.unpack('>L', bytes)[0]) 49 else: 50 return int(struct.unpack('<L', bytes)[0])
51 -def unpackDate(bytestr):
52 "Returns a Date() of the packed three-byte date passed in" 53 year, month, day = struct.unpack('<BBB', bytestr) 54 year += 1900 55 return Date(year, month, day)
56 -def unpackStr(chars):
57 "Returns a normal, lower-cased string from a null-padded byte string" 58 return struct.unpack('%ds' % len(chars), chars)[0].replace('\x00','').lower()
59 -def convertToBool(value):
60 """Returns boolean true or false; normal rules apply to non-string values; string values 61 must be 'y','t', 'yes', or 'true' (case insensitive) to be True""" 62 if type(value) == str: 63 return bool(value.lower() in ['t', 'y', 'true', 'yes']) 64 else: 65 return bool(value)
66 -def unsupportedType(something, field, memo=None):
67 "called if a data type is not supported for that style of table" 68 raise DbfError('field type is not supported.')
69 -def retrieveCharacter(bytes, fielddef={}, memo=None):
70 "Returns the string in bytes with trailing white space removed" 71 return bytes.tostring().rstrip()
72 -def updateCharacter(string, fielddef, memo=None):
73 "returns the string, truncating if string is longer than it's field" 74 if type(string) != str: 75 raise DbfError("incompatible type: %s" % type(string)) 76 return string.rstrip()
77 -def retrieveCurrency(bytes, fielddef={}, memo=None):
78 value = struct.unpack('<q', bytes)[0] 79 return Decimal("%de-4" % value)
80 -def updateCurrency(value, fielddef={}, memo=None):
81 currency = int(value * 10000) 82 if not -9223372036854775808 < currency < 9223372036854775808: 83 raise DataOverflow("value %s is out of bounds" % value) 84 return struct.pack('<q', currency)
85 -def retrieveDate(bytes, fielddef={}, memo=None):
86 "Returns the ascii coded date as a Date object" 87 return Date.fromymd(bytes.tostring())
88 -def updateDate(moment, fielddef={}, memo=None):
89 "returns the Date or datetime.date object ascii-encoded (yyyymmdd)" 90 if moment: 91 return "%04d%02d%02d" % moment.timetuple()[:3] 92 return ' '
93 -def retrieveDouble(bytes, fielddef={}, memo=None):
94 return struct.unpack('<d', bytes)[0]
95 -def updateDouble(value, fielddef={}, memo=None):
96 if not (type(value) in (int, long, float)): 97 raise DbfError("incompatible type: %s" % type(value)) 98 return struct.pack('<d', value)
99 -def retrieveInteger(bytes, fielddef={}, memo=None):
100 "Returns the binary number stored in bytes in little-endian format" 101 return struct.unpack('<i', bytes)[0]
102 -def updateInteger(value, fielddef={}, memo=None):
103 "returns value in little-endian binary format" 104 if not (type(value) in (int, long)): 105 raise DbfError("incompatible type: %s" % type(value)) 106 if not -2147483648 < value < 2147483647: 107 raise DataOverflow("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value) 108 return struct.pack('<i', value)
109 -def retrieveLogical(bytes, fielddef={}, memo=None):
110 "Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise" 111 bytes = bytes.tostring() 112 if bytes == '?': 113 return None 114 return bytes in ['t','T','y','Y']
115 -def updateLogical(logical, fielddef={}, memo=None):
116 "Returs 'T' if logical is True, 'F' otherwise" 117 if type(logical) != bool: 118 logical = convertToBool(logical) 119 if type(logical) <> bool: 120 raise DbfError('Value %s is not logical.' % logical) 121 return logical and 'T' or 'F'
122 -def retrieveMemo(bytes, fielddef, memo):
123 "Returns the block of data from a memo file" 124 stringval = bytes.tostring() 125 if stringval.strip(): 126 block = int(stringval.strip()) 127 else: 128 block = 0 129 return memo.get_memo(block, fielddef)
130 -def updateMemo(string, fielddef, memo):
131 "Writes string as a memo, returns the block number it was saved into" 132 block = memo.put_memo(string) 133 if block == 0: 134 block = '' 135 return "%*s" % (fielddef['length'], block)
136 -def retrieveNumeric(bytes, fielddef, memo=None):
137 "Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise" 138 string = bytes.tostring() 139 if string[0:1] == '*': # value too big to store (Visual FoxPro idiocy) 140 return None 141 if not string.strip(): 142 string = '0' 143 if fielddef['decimals'] == 0: 144 return int(string) 145 else: 146 return float(string)
147 -def updateNumeric(value, fielddef, memo=None):
148 "returns value as ascii representation, rounding decimal portion as necessary" 149 if not (type(value) in (int, long, float)): 150 raise DbfError("incompatible type: %s" % type(value)) 151 decimalsize = fielddef['decimals'] 152 if decimalsize: 153 decimalsize += 1 154 maxintegersize = fielddef['length']-decimalsize 155 integersize = len("%.0f" % floor(value)) 156 if integersize > maxintegersize: 157 raise DataOverflow('Integer portion too big') 158 return "%*.*f" % (fielddef['length'], fielddef['decimals'], value)
159 -def retrieveVfpDateTime(bytes, fielddef={}, memo=None):
160 """returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 161 may not be accurate; BC dates are nulled.""" 162 # two four-byte integers store the date and time. 163 # millesecords are discarded from time 164 time = retrieveInteger(bytes[4:]) 165 microseconds = (time % 1000) * 1000 166 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds 167 hours = time // 3600 168 mins = time % 3600 // 60 169 secs = time % 3600 % 60 170 time = Time(hours, mins, secs, microseconds) 171 possible = retrieveInteger(bytes[:4]) 172 possible -= VFPTIME 173 possible = max(0, possible) 174 date = Date.fromordinal(possible) 175 return DateTime.combine(date, time)
176 -def updateVfpDateTime(moment, fielddef={}, memo=None):
177 """sets the date/time stored in moment 178 moment must have fields year, month, day, hour, minute, second, microsecond""" 179 bytes = [0] * 8 180 hour = moment.hour 181 minute = moment.minute 182 second = moment.second 183 millisecond = moment.microsecond // 1000 # convert from millionths to thousandths 184 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond 185 bytes[4:] = updateInteger(time) 186 bytes[:4] = updateInteger(moment.toordinal() + VFPTIME) 187 return ''.join(bytes)
188 -def retrieveVfpMemo(bytes, fielddef, memo):
189 "Returns the block of data from a memo file" 190 block = struct.unpack('<i', bytes)[0] 191 return memo.get_memo(block, fielddef)
192 -def updateVfpMemo(string, fielddef, memo):
193 "Writes string as a memo, returns the block number it was saved into" 194 block = memo.put_memo(string) 195 return struct.pack('<i', block)
196 -def addCharacter(format):
197 if format[1] != '(' or format[-1] != ')': 198 raise DbfError("Format for Character field creation is C(n), not %s" % format) 199 length = int(format[2:-1]) 200 if not 0 < length < 255: 201 raise ValueError 202 decimals = 0 203 return length, decimals
204 -def addDate(format):
205 length = 8 206 decimals = 0 207 return length, decimals
208 -def addLogical(format):
209 length = 1 210 decimals = 0 211 return length, decimals
212 -def addMemo(format):
213 length = 10 214 decimals = 0 215 return length, decimals
216 -def addNumeric(format):
217 if format[1] != '(' or format[-1] != ')': 218 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 219 length, decimals = format[2:-1].split(',') 220 length = int(length) 221 decimals = int(decimals) 222 if not 0 < length < 18: 223 raise ValueError 224 if decimals and not 0 < decimals <= length - 2: 225 raise ValueError 226 return length, decimals
227 -def addVfpCurrency(format):
228 length = 8 229 decimals = 0 230 return length, decimals
231 -def addVfpDateTime(format):
232 length = 8 233 decimals = 8 234 return length, decimals
235 -def addVfpDouble(format):
236 length = 8 237 decimals = 0 238 return length, decimals
239 -def addVfpInteger(format):
240 length = 4 241 decimals = 0 242 return length, decimals
243 -def addVfpMemo(format):
244 length = 4 245 decimals = 0 246 return length, decimals
247 -def addVfpNumeric(format):
248 if format[1] != '(' or format[-1] != ')': 249 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 250 length, decimals = format[2:-1].split(',') 251 length = int(length) 252 decimals = int(decimals) 253 if not 0 < length < 21: 254 raise ValueError 255 if decimals and not 0 < decimals <= length - 2: 256 raise ValueError 257 return length, decimals
258

dbf-0.88.16/dbf/html/dbf.old.tables.List-class.html0000666000175100017510000007455511477216672020617 0ustar margamarga dbf.old.tables.List
Package dbf :: Package old :: Module tables :: Class List
[hide private]

Class List

source code

object --+
         |
        List

list of Dbf records, with set-like behavior

Instance Methods [hide private]
 
__init__(yo, new_records=None, desc=None, key=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__add__(yo, other) source code
 
__contains__(yo, record) source code
 
__delitem__(yo, key) source code
 
__getitem__(yo, key) source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__rsub__(yo, other) source code
 
__sub__(yo, other) source code
 
_maybe_add(yo, item) source code
 
_get_record(yo, table=None, rec_no=None, value=None) source code
 
_purge(yo, record, old_record_number, offset) source code
 
append(yo, new_record) source code
 
bottom(yo) source code
 
clear(yo) source code
 
current(yo) source code
 
extend(yo, new_records) source code
 
goto(yo, index_number) source code
 
index(yo, record, start=None, stop=None) source code
 
insert(yo, i, record) source code
 
key(yo, record)
table_name, record_number
source code
 
next(yo) source code
 
pop(yo, index=None) source code
 
prev(yo) source code
 
remove(yo, record) source code
 
reverse(yo) source code
 
top(yo) source code
 
sort(yo, key=None, reverse=False) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__, __str__

Class Variables [hide private]
  _desc = ''
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, new_records=None, desc=None, key=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

dbf-0.88.16/dbf/html/help.html0000666000175100017510000002436311477216670014737 0ustar margamarga Help
 
[hide private]

API Documentation

This document contains the API (Application Programming Interface) documentation for this project. Documentation for the Python objects defined by the project is divided into separate pages for each package, module, and class. The API documentation also includes two pages containing information about the project as a whole: a trees page, and an index page.

Object Documentation

Each Package Documentation page contains:

Each Module Documentation page contains:

Each Class Documentation page contains:

Project Documentation

The Trees page contains the module and class hierarchies:

The Index page contains indices of terms and identifiers:

The Table of Contents

The table of contents occupies the two frames on the left side of the window. The upper-left frame displays the project contents, and the lower-left frame displays the module contents:

Project
Contents
...
API
Documentation
Frame


Module
Contents
 
...
 

The project contents frame contains a list of all packages and modules that are defined by the project. Clicking on an entry will display its contents in the module contents frame. Clicking on a special entry, labeled "Everything," will display the contents of the entire project.

The module contents frame contains a list of every submodule, class, type, exception, function, and variable defined by a module or package. Clicking on an entry will display its documentation in the API documentation frame. Clicking on the name of the module, at the top of the frame, will display the documentation for the module itself.

The "frames" and "no frames" buttons below the top navigation bar can be used to control whether the table of contents is displayed or not.

The Navigation Bar

A navigation bar is located at the top and bottom of every page. It indicates what type of page you are currently viewing, and allows you to go to related pages. The following table describes the labels on the navigation bar. Note that not some labels (such as [Parent]) are not displayed on all pages.

Label Highlighted when... Links to...
[Parent] (never highlighted) the parent of the current package
[Package] viewing a package the package containing the current object
[Module] viewing a module the module containing the current object
[Class] viewing a class the class containing the current object
[Trees] viewing the trees page the trees page
[Index] viewing the index page the index page
[Help] viewing the help page the help page

The "show private" and "hide private" buttons below the top navigation bar can be used to control whether documentation for private objects is displayed. Private objects are usually defined as objects whose (short) names begin with a single underscore, but do not end with an underscore. For example, "_x", "__pprint", and "epydoc.epytext._tokenize" are private objects; but "re.sub", "__init__", and "type_" are not. However, if a module defines the "__all__" variable, then its contents are used to decide which objects are private.

A timestamp below the bottom navigation bar indicates when each page was last updated.

dbf-0.88.16/dbf/html/dbf.tables.Index.IndexIterator-class.html0000666000175100017510000002244611477216672022746 0ustar margamarga dbf.tables.Index.IndexIterator
Package dbf :: Module tables :: Class Index :: Class IndexIterator
[hide private]

Class IndexIterator

source code

object --+
         |
        Index.IndexIterator

returns records using this index

Instance Methods [hide private]
 
__init__(yo, table, records)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
next(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table, records)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.Db3Table-class.html0000666000175100017510000007416011477216672021254 0ustar margamarga dbf.old.tables.Db3Table
Package dbf :: Package old :: Module tables :: Class Db3Table
[hide private]

Class Db3Table

source code

object --+    
         |    
  DbfTable --+
             |
            Db3Table

Provides an interface for working with dBase III tables.

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'dBase III Plus'
  _versionabbv = 'db3'
  _fieldtypes = {'C': {'Blank': <type 'str'>, 'Init': <function ...
  _memoext = '.dbt'
  _memotypes = 'M',
  _yesMemoMask = '\x80'
  _noMemoMask = '\x7f'
  _fixed_fields = ('D', 'L', 'M')
  _variable_fields = ('C', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('N')
  _numeric_fields = ('N')
  _dbfTableHeader = '\x03\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x...
  _dbfTableHeaderExtra = ''
  _supported_tables = ['\x03', '\x83']
  _read_only = False
  _meta_only = False
  _use_deleted = True

Inherited from DbfTable: codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Inherited from DbfTable (private): _backed_up

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'C': {'Type': 'Character', 'Retrieve': io.retrieveCharacter, 'Update'\
: io.updateCharacter, 'Blank': str, 'Init': io.addCharacter}, 'D': {'T\
ype': 'Date', 'Retrieve': io.retrieveDate, 'Update': io.updateDate, 'B\
lank': Date.today, 'Init': io.addDate}, 'L': {'Type': 'Logical', 'Retr\
ieve': io.retrieveLogical, 'Update': io.updateLogical, 'Blank': bool, \
'Init': io.addLogical}, 'M': {'Type': 'Memo', 'Retrieve': io.retrieveM\
emo, 'Update': io.updateMemo, 'Blank': str, 'Init': io.addMemo}, 'N': \
{'Type': 'Numeric', 'Retrieve': io.retrieveNumeric, 'Update': io.updat\
...

_dbfTableHeader

Value:
'\x03\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

dbf-0.88.16/dbf/html/dbf.tables.DbfTable._DbfLists-class.html0000666000175100017510000002342611477216672022431 0ustar margamarga dbf.tables.DbfTable._DbfLists
Package dbf :: Module tables :: Class DbfTable :: Class _DbfLists
[hide private]

Class _DbfLists

source code

object --+
         |
        DbfTable._DbfLists

implements the weakref structure for DbfLists

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
add(yo, new_list) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable._MetaData-class.html0000666000175100017510000002422111477216672023146 0ustar margamarga dbf.old.tables.DbfTable._MetaData
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class _MetaData
[hide private]

Class _MetaData

source code

object --+    
         |    
      dict --+
             |
            DbfTable._MetaData

Instance Methods [hide private]

Inherited from dict: __cmp__, __contains__, __delitem__, __eq__, __ge__, __getattribute__, __getitem__, __gt__, __hash__, __init__, __iter__, __le__, __len__, __lt__, __ne__, __new__, __repr__, __setitem__, clear, copy, fromkeys, get, has_key, items, iteritems, iterkeys, itervalues, keys, pop, popitem, setdefault, update, values

Inherited from object: __delattr__, __reduce__, __reduce_ex__, __setattr__, __str__

Class Variables [hide private]
  blankrecord = None
  fields = None
  filename = None
  dfd = None
  memoname = None
  newmemofile = False
  memo = None
  mfd = None
  ignorememos = False
  memofields = None
  current = -1
Properties [hide private]

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf.dates-pysrc.html0000666000175100017510000074240211477216672017002 0ustar margamarga dbf.dates
Package dbf :: Module dates
[hide private]

Source Code for Module dbf.dates

  1  """wrappers around datetime objects to allow null values""" 
  2   
  3  import datetime 
  4  import time 
5 6 7 -class Date(object):
8 "adds null capable datetime.date constructs" 9 __slots__ = ['_date']
10 - def __new__(cls, year=None, month=0, day=0):
11 """date should be either a datetime.date, a string in yyyymmdd format, 12 or date/month/day should all be appropriate integers""" 13 nd = object.__new__(cls) 14 nd._date = False 15 if type(year) == datetime.date: 16 nd._date = year 17 elif type(year) == Date: 18 nd._date = year._date 19 elif year == 'no date': 20 pass # date object is already False 21 elif year is not None: 22 nd._date = datetime.date(year, month, day) 23 return nd
24 - def __add__(yo, other):
25 if yo and type(other) == datetime.timedelta: 26 return Date(yo._date + other) 27 else: 28 return NotImplemented
29 - def __eq__(yo, other):
30 if yo: 31 if type(other) == datetime.date: 32 return yo._date == other 33 elif type(other) == Date: 34 if other: 35 return yo._date == other._date 36 return False 37 else: 38 if type(other) == datetime.date: 39 return False 40 elif type(other) == Date: 41 if other: 42 return False 43 return True 44 return NotImplemented
45 - def __getattr__(yo, name):
46 if yo: 47 attribute = yo._date.__getattribute__(name) 48 return attribute 49 else: 50 raise AttributeError('null Date object has no attribute %s' % name)
51 - def __ge__(yo, other):
52 if yo: 53 if type(other) == datetime.date: 54 return yo._date >= other 55 elif type(other) == Date: 56 if other: 57 return yo._date >= other._date 58 return False 59 else: 60 if type(other) == datetime.date: 61 return False 62 elif type(other) == Date: 63 if other: 64 return False 65 return True 66 return NotImplemented
67 - def __gt__(yo, other):
68 if yo: 69 if type(other) == datetime.date: 70 return yo._date > other 71 elif type(other) == Date: 72 if other: 73 return yo._date > other._date 74 return True 75 else: 76 if type(other) == datetime.date: 77 return False 78 elif type(other) == Date: 79 if other: 80 return False 81 return False 82 return NotImplemented
83 - def __hash__(yo):
84 return yo._date.__hash__()
85 - def __le__(yo, other):
86 if yo: 87 if type(other) == datetime.date: 88 return yo._date <= other 89 elif type(other) == Date: 90 if other: 91 return yo._date <= other._date 92 return False 93 else: 94 if type(other) == datetime.date: 95 return True 96 elif type(other) == Date: 97 if other: 98 return True 99 return True 100 return NotImplemented
101 - def __lt__(yo, other):
102 if yo: 103 if type(other) == datetime.date: 104 return yo._date < other 105 elif type(other) == Date: 106 if other: 107 return yo._date < other._date 108 return False 109 else: 110 if type(other) == datetime.date: 111 return True 112 elif type(other) == Date: 113 if other: 114 return True 115 return False 116 return NotImplemented
117 - def __ne__(yo, other):
118 if yo: 119 if type(other) == datetime.date: 120 return yo._date != other 121 elif type(other) == Date: 122 if other: 123 return yo._date != other._date 124 return True 125 else: 126 if type(other) == datetime.date: 127 return True 128 elif type(other) == Date: 129 if other: 130 return True 131 return False 132 return NotImplemented
133 - def __nonzero__(yo):
134 if yo._date: 135 return True 136 return False
137 __radd__ = __add__
138 - def __rsub__(yo, other):
139 if yo and type(other) == datetime.date: 140 return other - yo._date 141 elif yo and type(other) == Date: 142 return other._date - yo._date 143 elif yo and type(other) == datetime.timedelta: 144 return Date(other - yo._date) 145 else: 146 return NotImplemented
147 - def __repr__(yo):
148 if yo: 149 return "Date(%d, %d, %d)" % yo.timetuple()[:3] 150 else: 151 return "Date()"
152 - def __str__(yo):
153 if yo: 154 return yo.isoformat() 155 return "no date"
156 - def __sub__(yo, other):
157 if yo and type(other) == datetime.date: 158 return yo._date - other 159 elif yo and type(other) == Date: 160 return yo._date - other._date 161 elif yo and type(other) == datetime.timedelta: 162 return Date(yo._date - other) 163 else: 164 return NotImplemented
165 - def date(yo):
166 if yo: 167 return yo._date 168 return None
169 @classmethod
170 - def fromordinal(cls, number):
171 if number: 172 return cls(datetime.date.fromordinal(number)) 173 return cls()
174 @classmethod
175 - def fromtimestamp(cls, timestamp):
176 return cls(datetime.date.fromtimestamp(timestamp))
177 @classmethod
178 - def fromymd(cls, yyyymmdd):
179 if yyyymmdd in ('', ' ','no date'): 180 return cls() 181 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
182 - def strftime(yo, format):
183 if yo: 184 return yo._date.strftime(format) 185 return '<no date>'
186 @classmethod
187 - def today(cls):
188 return cls(datetime.date.today())
189 - def ymd(yo):
190 if yo: 191 return "%04d%02d%02d" % yo.timetuple()[:3] 192 else: 193 return ' '
194 Date.max = Date(datetime.date.max) 195 Date.min = Date(datetime.date.min)
196 -class DateTime(object):
197 "adds null capable datetime.datetime constructs" 198 __slots__ = ['_datetime']
199 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0):
200 """year may be a datetime.datetime""" 201 ndt = object.__new__(cls) 202 ndt._datetime = False 203 if type(year) == datetime.datetime: 204 ndt._datetime = year 205 elif type(year) == DateTime: 206 ndt._datetime = year._datetime 207 elif year is not None: 208 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsec) 209 return ndt
210 - def __add__(yo, other):
211 if yo and type(other) == datetime.timedelta: 212 return DateTime(yo._datetime + other) 213 else: 214 return NotImplemented
215 - def __eq__(yo, other):
216 if yo: 217 if type(other) == datetime.datetime: 218 return yo._datetime == other 219 elif type(other) == DateTime: 220 if other: 221 return yo._datetime == other._datetime 222 return False 223 else: 224 if type(other) == datetime.datetime: 225 return False 226 elif type(other) == DateTime: 227 if other: 228 return False 229 return True 230 return NotImplemented
231 - def __getattr__(yo, name):
232 if yo: 233 attribute = yo._datetime.__getattribute__(name) 234 return attribute 235 else: 236 raise AttributeError('null DateTime object has no attribute %s' % name)
237 - def __ge__(yo, other):
238 if yo: 239 if type(other) == datetime.datetime: 240 return yo._datetime >= other 241 elif type(other) == DateTime: 242 if other: 243 return yo._datetime >= other._datetime 244 return False 245 else: 246 if type(other) == datetime.datetime: 247 return False 248 elif type(other) == DateTime: 249 if other: 250 return False 251 return True 252 return NotImplemented
253 - def __gt__(yo, other):
254 if yo: 255 if type(other) == datetime.datetime: 256 return yo._datetime > other 257 elif type(other) == DateTime: 258 if other: 259 return yo._datetime > other._datetime 260 return True 261 else: 262 if type(other) == datetime.datetime: 263 return False 264 elif type(other) == DateTime: 265 if other: 266 return False 267 return False 268 return NotImplemented
269 - def __hash__(yo):
270 return yo._datetime.__hash__()
271 - def __le__(yo, other):
272 if yo: 273 if type(other) == datetime.datetime: 274 return yo._datetime <= other 275 elif type(other) == DateTime: 276 if other: 277 return yo._datetime <= other._datetime 278 return False 279 else: 280 if type(other) == datetime.datetime: 281 return True 282 elif type(other) == DateTime: 283 if other: 284 return True 285 return True 286 return NotImplemented
287 - def __lt__(yo, other):
288 if yo: 289 if type(other) == datetime.datetime: 290 return yo._datetime < other 291 elif type(other) == DateTime: 292 if other: 293 return yo._datetime < other._datetime 294 return False 295 else: 296 if type(other) == datetime.datetime: 297 return True 298 elif type(other) == DateTime: 299 if other: 300 return True 301 return False 302 return NotImplemented
303 - def __ne__(yo, other):
304 if yo: 305 if type(other) == datetime.datetime: 306 return yo._datetime != other 307 elif type(other) == DateTime: 308 if other: 309 return yo._datetime != other._datetime 310 return True 311 else: 312 if type(other) == datetime.datetime: 313 return True 314 elif type(other) == DateTime: 315 if other: 316 return True 317 return False 318 return NotImplemented
319 - def __nonzero__(yo):
320 if yo._datetime is not False: 321 return True 322 return False
323 __radd__ = __add__
324 - def __rsub__(yo, other):
325 if yo and type(other) == datetime.datetime: 326 return other - yo._datetime 327 elif yo and type(other) == DateTime: 328 return other._datetime - yo._datetime 329 elif yo and type(other) == datetime.timedelta: 330 return DateTime(other - yo._datetime) 331 else: 332 return NotImplemented
333 - def __repr__(yo):
334 if yo: 335 return "DateTime(%d, %d, %d, %d, %d, %d, %d, %d, %d)" % yo._datetime.timetuple()[:] 336 else: 337 return "DateTime()"
338 - def __str__(yo):
339 if yo: 340 return yo.isoformat() 341 return "no datetime"
342 - def __sub__(yo, other):
343 if yo and type(other) == datetime.datetime: 344 return yo._datetime - other 345 elif yo and type(other) == DateTime: 346 return yo._datetime - other._datetime 347 elif yo and type(other) == datetime.timedelta: 348 return DateTime(yo._datetime - other) 349 else: 350 return NotImplemented
351 @classmethod
352 - def combine(cls, date, time):
353 if Date(date) and Time(time): 354 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) 355 return cls()
356 - def date(yo):
357 if yo: 358 return Date(yo.year, yo.month, yo.day) 359 return Date()
360 - def datetime(yo):
361 if yo: 362 return yo._datetime 363 return None
364 @classmethod
365 - def fromordinal(cls, number):
366 if number: 367 return cls(datetime.datetime.fromordinal(number)) 368 else: 369 return cls()
370 @classmethod
371 - def fromtimestamp(cls, timestamp):
372 return DateTime(datetime.datetime.fromtimestamp(timestamp))
373 @classmethod
374 - def now(cls):
375 return cls(datetime.datetime.now())
376 - def time(yo):
377 if yo: 378 return Time(yo.hour, yo.minute, yo.second, yo.microsecond) 379 return Time()
380 @classmethod
381 - def utcnow(cls):
382 return cls(datetime.datetime.utcnow())
383 @classmethod
384 - def today(cls):
385 return cls(datetime.datetime.today())
386 DateTime.max = DateTime(datetime.datetime.max) 387 DateTime.min = DateTime(datetime.datetime.min)
388 -class Time(object):
389 "adds null capable datetime.time constructs" 390 __slots__ = ['_time']
391 - def __new__(cls, hour=None, minute=0, second=0, microsec=0):
392 """hour may be a datetime.time""" 393 nt = object.__new__(cls) 394 nt._time = False 395 if type(hour) == datetime.time: 396 nt._time = hour 397 elif type(hour) == Time: 398 nt._time = hour._time 399 elif hour is not None: 400 nt._time = datetime.time(hour, minute, second, microsec) 401 return nt
402 - def __add__(yo, other):
403 if yo and type(other) == datetime.timedelta: 404 return Time(yo._time + other) 405 else: 406 return NotImplemented
407 - def __eq__(yo, other):
408 if yo: 409 if type(other) == datetime.time: 410 return yo._time == other 411 elif type(other) == Time: 412 if other: 413 return yo._time == other._time 414 return False 415 else: 416 if type(other) == datetime.time: 417 return False 418 elif type(other) == Time: 419 if other: 420 return False 421 return True 422 return NotImplemented
423 - def __getattr__(yo, name):
424 if yo: 425 attribute = yo._time.__getattribute__(name) 426 return attribute 427 else: 428 raise AttributeError('null Time object has no attribute %s' % name)
429 - def __ge__(yo, other):
430 if yo: 431 if type(other) == datetime.time: 432 return yo._time >= other 433 elif type(other) == Time: 434 if other: 435 return yo._time >= other._time 436 return False 437 else: 438 if type(other) == datetime.time: 439 return False 440 elif type(other) == Time: 441 if other: 442 return False 443 return True 444 return NotImplemented
445 - def __gt__(yo, other):
446 if yo: 447 if type(other) == datetime.time: 448 return yo._time > other 449 elif type(other) == DateTime: 450 if other: 451 return yo._time > other._time 452 return True 453 else: 454 if type(other) == datetime.time: 455 return False 456 elif type(other) == Time: 457 if other: 458 return False 459 return False 460 return NotImplemented
461 - def __hash__(yo):
462 return yo._datetime.__hash__()
463 - def __le__(yo, other):
464 if yo: 465 if type(other) == datetime.time: 466 return yo._time <= other 467 elif type(other) == Time: 468 if other: 469 return yo._time <= other._time 470 return False 471 else: 472 if type(other) == datetime.time: 473 return True 474 elif type(other) == Time: 475 if other: 476 return True 477 return True 478 return NotImplemented
479 - def __lt__(yo, other):
480 if yo: 481 if type(other) == datetime.time: 482 return yo._time < other 483 elif type(other) == Time: 484 if other: 485 return yo._time < other._time 486 return False 487 else: 488 if type(other) == datetime.time: 489 return True 490 elif type(other) == Time: 491 if other: 492 return True 493 return False 494 return NotImplemented
495 - def __ne__(yo, other):
496 if yo: 497 if type(other) == datetime.time: 498 return yo._time != other 499 elif type(other) == Time: 500 if other: 501 return yo._time != other._time 502 return True 503 else: 504 if type(other) == datetime.time: 505 return True 506 elif type(other) == Time: 507 if other: 508 return True 509 return False 510 return NotImplemented
511 - def __nonzero__(yo):
512 if yo._time is not False: 513 return True 514 return False
515 __radd__ = __add__
516 - def __rsub__(yo, other):
517 if yo and type(other) == datetime.time: 518 return other - yo._time 519 elif yo and type(other) == Time: 520 return other._time - yo._time 521 elif yo and type(other) == datetime.timedelta: 522 return Time(other - yo._datetime) 523 else: 524 return NotImplemented
525 - def __repr__(yo):
526 if yo: 527 return "Time(%d, %d, %d, %d)" % (yo.hour, yo.minute, yo.second, yo.microsecond) 528 else: 529 return "Time()"
530 - def __str__(yo):
531 if yo: 532 return yo.isoformat() 533 return "no time"
534 - def __sub__(yo, other):
535 if yo and type(other) == datetime.time: 536 return yo._time - other 537 elif yo and type(other) == Time: 538 return yo._time - other._time 539 elif yo and type(other) == datetime.timedelta: 540 return Time(yo._time - other) 541 else: 542 return NotImplemented
543 Time.max = Time(datetime.time.max) 544 Time.min = Time(datetime.time.min) 545

dbf-0.88.16/dbf/html/dbf.tables-module.html0000666000175100017510000007640011477216670017275 0ustar margamarga dbf.tables
Package dbf :: Module tables
[hide private]

Module tables

source code

table definitions

Classes [hide private]
  property
Emulate PyProperty_Type() in Objects/descrobject.c
  _DbfRecord
Provides routines to extract and save data within the fields of a dbf record.
  _DbfMemo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _Db3Memo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _VfpMemo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  DbfTable
Provides a framework for dbf style tables.
  Db3Table
Provides an interface for working with dBase III tables.
  FpTable
Provides an interface for working with FoxPro 2 tables
  VfpTable
Provides an interface for working with Visual FoxPro 6 tables
  List
list of Dbf records, with set-like behavior
  DbfCsv
csv format for exporting tables
  Index
  _Db4Table
Functions [hide private]
 
sql_criteria(records, criteria)
creates a function matching the sql criteria
source code
 
sql_cmd(records, command)
creates a function matching to apply command to each record in records
source code
 
sql(records, command)
recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL
source code
 
_nop(value)
returns parameter unchanged
source code
 
_normalize_tuples(tuples, length, filler)
ensures each tuple is the same length, using filler[-missing] for the gaps
source code
 
_codepage_lookup(cp) source code
 
ascii(new_setting=None)
get/set return_ascii setting
source code
 
codepage(cp=None)
get/set default codepage for any new tables
source code
 
encoding(cp=None)
get/set default encoding for non-unicode strings passed into a table
source code
Variables [hide private]
  input_decoding = 'cp1252'
  default_codepage = 'cp1252'
  return_ascii = False
  version_map = {'\x02': 'FoxBASE', '\x03': 'dBase III Plus', '\...
  code_pages = {'\x00': ('ascii', 'plain ol\' ascii'), '\x01': (...
  sql_functions = {'count': None, 'delete': None, 'insert': None...

Imports: os, sys, csv, codecs, locale, unicodedata, weakref, array, bisect_left, bisect_right, Decimal, copyfileobj, dbf, io, Date, DateTime, Time, Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode, DoNotIndex


Variables Details [hide private]

version_map

Value:
{'\x02': 'FoxBASE',
 '\x03': 'dBase III Plus',
 '\x04': 'dBase IV',
 '\x05': 'dBase V',
 '0': 'Visual FoxPro',
 '1': 'Visual FoxPro (auto increment field)',
 'C': 'dBase IV SQL',
 '{': 'dBase IV w/memos',
...

code_pages

Value:
{'\x00': ('ascii', 'plain ol\' ascii'),
 '\x01': ('cp437', 'U.S. MS-DOS'),
 '\x02': ('cp850', 'International MS-DOS'),
 '\x03': ('cp1252', 'Windows ANSI'),
 '\x04': ('mac_roman', 'Standard Macintosh'),
 '\x08': ('cp865', 'Danish OEM'),
 '\t': ('cp437', 'Dutch OEM'),
 '''
...

sql_functions

Value:
{'count': None,
 'delete': None,
 'insert': None,
 'select': None,
 'update': None}

dbf-0.88.16/dbf/html/dbf.exceptions.NonUnicode-class.html0000666000175100017510000002135411477216672022064 0ustar margamarga dbf.exceptions.NonUnicode
Package dbf :: Module exceptions :: Class NonUnicode
[hide private]

Class NonUnicode

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                        DbfError --+
                                   |
                                  NonUnicode

Data for table not in unicode

Instance Methods [hide private]
 
__init__(yo, message=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, message=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.NonUnicode-class.html0000666000175100017510000002150611477216672022640 0ustar margamarga dbf.old.exceptions.NonUnicode
Package dbf :: Package old :: Module exceptions :: Class NonUnicode
[hide private]

Class NonUnicode

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                        DbfError --+
                                   |
                                  NonUnicode

Data for table not in unicode

Instance Methods [hide private]
 
__init__(yo, message=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, message=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable._DbfLists-class.html0000666000175100017510000002357411477216672023212 0ustar margamarga dbf.old.tables.DbfTable._DbfLists
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class _DbfLists
[hide private]

Class _DbfLists

source code

object --+
         |
        DbfTable._DbfLists

implements the weakref structure for DbfLists

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
add(yo, new_list) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables.FpTable-class.html0000666000175100017510000010650511477216672020433 0ustar margamarga dbf.tables.FpTable
Package dbf :: Module tables :: Class FpTable
[hide private]

Class FpTable

source code

object --+    
         |    
  DbfTable --+
             |
            FpTable

Provides an interface for working with FoxPro 2 tables

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'Foxpro'
  _versionabbv = 'fp'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.fpt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '\xf5'
  _noMemoMask = '\x03'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('\x03', '\xf5')
  _dbfTableHeader = '0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\...
  _dbfTableHeaderExtra = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x...
  _use_deleted = True

Inherited from DbfTable: backup, codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Inherited from DbfTable (private): _meta_only, _read_only

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity
(inherited documentation)

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'C': {'Blank': <type 'str'>,
       'Init': <function addCharacter at 0x00ECA670>,
       'Retrieve': <function retrieveCharacter at 0x00ECA170>,
...

_dbfTableHeader

Value:
'0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

_dbfTableHeaderExtra

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
...

dbf-0.88.16/dbf/html/dbf.old.tables-module.html0000666000175100017510000006711311477216670020053 0ustar margamarga dbf.old.tables
Package dbf :: Package old :: Module tables
[hide private]

Module tables

source code

table definitions

Classes [hide private]
  property
Emulate PyProperty_Type() in Objects/descrobject.c
  _DbfRecord
Provides routines to extract and save data within the fields of a dbf record.
  _DbfMemo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _Db3Memo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _VfpMemo
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  DbfTable
Provides a framework for dbf style tables.
  Db3Table
Provides an interface for working with dBase III tables.
  FpTable
Provides an interface for working with FoxPro 2 tables
  VfpTable
Provides an interface for working with Visual FoxPro 6 tables
  List
list of Dbf records, with set-like behavior
  DbfCsv
csv format for exporting tables
  Index
  _Db4Table
Functions [hide private]
 
sql(records, command)
recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL
source code
 
_nop(value)
returns parameter unchanged
source code
 
_normalize_tuples(tuples, length, filler)
ensures each tuple is the same length, using filler[-missing] for the gaps
source code
 
_codepage_lookup(cp) source code
 
ascii(new_setting=None)
get/set return_ascii setting
source code
 
codepage(cp=None)
get/set default codepage for any new tables
source code
 
encoding(cp=None)
get/set default encoding for non-unicode strings passed into a table
source code
Variables [hide private]
  input_decoding = 'cp1252'
  default_codepage = 'cp1252'
  return_ascii = True
  version_map = {'\x02': 'FoxBASE', '\x03': 'dBase III Plus', '\...
  code_pages = {'\x00': ('ascii', 'plain ol\' ascii'), '\x01': (...

Imports: os, sys, csv, codecs, locale, unicodedata, weakref, array, bisect_left, bisect_right, Decimal, copyfileobj, io, Date, DateTime, Time, Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode, DoNotIndex


Variables Details [hide private]

version_map

Value:
{'\x02': 'FoxBASE',
 '\x03': 'dBase III Plus',
 '\x04': 'dBase IV',
 '\x05': 'dBase V',
 '0': 'Visual FoxPro',
 '1': 'Visual FoxPro (auto increment field)',
 'C': 'dBase IV SQL',
 '{': 'dBase IV w/memos',
...

code_pages

Value:
{'\x00': ('ascii', 'plain ol\' ascii'),
 '\x01': ('cp437', 'U.S. MS-DOS'),
 '\x02': ('cp850', 'International MS-DOS'),
 '\x03': ('cp1252', 'Windows ANSI'),
 '\x04': ('mac_roman', 'Standard Macintosh'),
 '\x08': ('cp865', 'Danish OEM'),
 '\t': ('cp437', 'Dutch OEM'),
 '''
...

dbf-0.88.16/dbf/html/dbf.dates.DateTime-class.html0000666000175100017510000007320711477216672020442 0ustar margamarga dbf.dates.DateTime
Package dbf :: Module dates :: Class DateTime
[hide private]

Class DateTime

source code

object --+
         |
        DateTime

adds null capable datetime.datetime constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code
 
date(yo) source code
 
datetime(yo) source code
 
time(yo) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Class Methods [hide private]
 
combine(cls, date, time) source code
 
fromordinal(cls, number) source code
 
fromtimestamp(cls, timestamp) source code
 
now(cls) source code
 
utcnow(cls) source code
 
today(cls) source code
Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0)
year may be a datetime.datetime
source code
Class Variables [hide private]
  max = DateTime(9999, 12, 31, 23, 59, 59, 4, 365, -1)
  min = DateTime(1, 1, 1, 0, 0, 0, 0, 1, -1)
Properties [hide private]
  _datetime

Inherited from object: __class__

Method Details [hide private]

__new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0)
Static Method

source code 

year may be a datetime.datetime

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable._Indexen-class.html0000666000175100017510000002357111477216672023067 0ustar margamarga dbf.old.tables.DbfTable._Indexen
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class _Indexen
[hide private]

Class _Indexen

source code

object --+
         |
        DbfTable._Indexen

implements the weakref structure for seperate indexes

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
add(yo, new_list) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable._Table-class.html0000666000175100017510000002412211477216672022515 0ustar margamarga dbf.old.tables.DbfTable._Table
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class _Table
[hide private]

Class _Table

source code

object --+
         |
        DbfTable._Table

implements the weakref table for records

Instance Methods [hide private]
 
__init__(yo, count, meta)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__getitem__(yo, index) source code
 
append(yo, record) source code
 
clear(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, count, meta)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable-class.html0000666000175100017510000020436611477216672021342 0ustar margamarga dbf.old.tables.DbfTable
Package dbf :: Package old :: Module tables :: Class DbfTable
[hide private]

Class DbfTable

source code

object --+
         |
        DbfTable
Known Subclasses:

Provides a framework for dbf style tables.

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _DbfLists
implements the weakref structure for DbfLists
  _Indexen
implements the weakref structure for seperate indexes
  _MetaData
  _TableHeader
  _Table
implements the weakref table for records
  DbfIterator
returns records using current index
Instance Methods [hide private]
 
_buildHeaderFields(yo)
constructs fieldblock for disk table
source code
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions from the disk file
source code
 
_fieldLayout(yo, i)
Returns field information Name Type(Length[,Decimals])
source code
 
_loadtable(yo)
loads the records from disk to memory
source code
 
_list_fields(yo, specs, sep=',') source code
 
_update_disk(yo, headeronly=False)
synchronizes the disk file with current data
source code
 
__contains__(yo, key) source code
 
__enter__(yo) source code
 
__exit__(yo, *exc_info) source code
 
__getattr__(yo, name) source code
 
__getitem__(yo, value) source code
 
__init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, codepage=None)
open/create dbf file filename should include path if needed field_specs can be either a ;-delimited string or a list of strings memo_size is always 512 for db3 memos ignore_memos is useful if the memo file is missing or corrupt read_only will load records into memory, then close the disk file keep_memos will also load any memo fields into memory meta_only will ignore all records, keeping only basic table information codepage will override whatever is set in the table itself
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
__nonzero__(yo) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
add_fields(yo, field_specs)
adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] backup table is created with _backup appended to name then modifies current structure
source code
 
append(yo, kamikaze='', drop=False, multiple=1)
adds <multiple> blank records, and fills fields with dict/tuple values if present
source code
 
bof(yo)
moves record pointer to previous usable record; returns True if no more usable records
source code
 
bottom(yo, get_record=False)
sets record pointer to bottom of table if get_record, seeks to and returns last (non-deleted) record DbfError if table is empty Bof if all records deleted and use_deleted is False
source code
 
close(yo, keep_table=False, keep_memos=False)
closes disk files ensures table data is available if keep_table ensures memo data is available if keep_memos
source code
 
create_backup(yo, new_name=None, overwrite=False)
creates a backup table -- ignored if memory table
source code
 
create_index(yo, key) source code
 
current(yo, index=False)
returns current logical record, or its index
source code
 
delete_fields(yo, doomed)
removes field(s) from the table creates backup files with _backup appended to the file name, then modifies current structure
source code
 
eof(yo)
moves record pointer to next usable record; returns True if no more usable records
source code
 
export(yo, records=None, filename=None, field_specs=None, format='csv', header=True)
writes the table using CSV or tab-delimited format, using the filename given if specified, otherwise the table name
source code
 
get_record(yo, recno)
returns record at physical_index[recno]
source code
 
goto(yo, criteria)
changes the record pointer to the first matching (non-deleted) record criteria should be either a tuple of tuple(value, field, func) triples, or an integer to go to
source code
 
is_decimal(yo, name)
returns True if name is a variable-length field type
source code
 
is_memotype(yo, name)
returns True if name is a memo type field
source code
 
new(yo, filename, field_specs=None)
returns a new table of the same type
source code
 
next(yo)
set record pointer to next (non-deleted) record, and return it
source code
 
open(yo) source code
 
pack(yo, _pack=True)
physically removes all deleted records
source code
 
prev(yo)
set record pointer to previous (non-deleted) record, and return it
source code
 
query(yo, sql_command=None, python=None)
uses exec to perform queries on the table
source code
 
reindex(yo) source code
 
rename_field(yo, oldname, newname)
renames an existing field
source code
 
size(yo, field)
returns size of field as a tuple of (length, decimals)
source code
 
structure(yo, fields=None)
return list of fields suitable for creating same table layout
source code
 
top(yo, get_record=False)
sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record DbfError if table is empty Eof if all records are deleted and use_deleted is False
source code
 
type(yo, field)
returns type of field
source code
 
zap(yo, areyousure=False)
removes all records from table -- this cannot be undone! areyousure must be True, else error is raised
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'basic memory table'
  _versionabbv = 'dbf'
  _fieldtypes = {'D': {'Blank': <bound method type.today of <cla...
  _memoext = ''
  _memotypes = ('M')
  _yesMemoMask = ''
  _noMemoMask = ''
  _fixed_fields = ('M', 'D', 'L')
  _variable_fields = ()
  _character_fields = ('M')
  _decimal_fields = ()
  _numeric_fields = ()
  _dbfTableHeader = '\x00\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x...
  _dbfTableHeaderExtra = ''
  _supported_tables = []
  _read_only = False
  _meta_only = False
  _use_deleted = True
  _backed_up = False
  codepage = <dbf.old.tables.property object at 0x011A43F0>
  field_count = <dbf.old.tables.property object at 0x011A4410>
  field_names = <dbf.old.tables.property object at 0x011A4430>
  filename = <dbf.old.tables.property object at 0x011A4450>
  last_update = <dbf.old.tables.property object at 0x011A4470>
  memoname = <dbf.old.tables.property object at 0x011A4490>
  record_length = <dbf.old.tables.property object at 0x011A44B0>
  record_number = <dbf.old.tables.property object at 0x011A44D0>
  supported_tables = <dbf.old.tables.property object at 0x011A44F0>
  use_deleted = <dbf.old.tables.property object at 0x011A4510>
  version = <dbf.old.tables.property object at 0x011A4530>
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, codepage=None)
(Constructor)

source code 

open/create dbf file filename should include path if needed field_specs can be either a ;-delimited string or a list of strings memo_size is always 512 for db3 memos ignore_memos is useful if the memo file is missing or corrupt read_only will load records into memory, then close the disk file keep_memos will also load any memo fields into memory meta_only will ignore all records, keeping only basic table information codepage will override whatever is set in the table itself

Overrides: object.__init__

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

structure(yo, fields=None)

source code 

return list of fields suitable for creating same table layout

Parameters:
  • fields - list of fields or None for all fields

Class Variable Details [hide private]

_fieldtypes

Value:
{'D': {'Type': 'Date', 'Init': io.addDate, 'Blank': Date.today, 'Retri\
eve': io.retrieveDate, 'Update': io.updateDate,}, 'L': {'Type': 'Logic\
al', 'Init': io.addLogical, 'Blank': bool, 'Retrieve': io.retrieveLogi\
cal, 'Update': io.updateLogical,}, 'M': {'Type': 'Memo', 'Init': io.ad\
dMemo, 'Blank': str, 'Retrieve': io.retrieveMemo, 'Update': io.updateM\
emo,}}

_dbfTableHeader

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'

dbf-0.88.16/dbf/html/dbf.exceptions.Bof-class.html0000666000175100017510000002363311477216672020533 0ustar margamarga dbf.exceptions.Bof
Package dbf :: Module exceptions :: Class Bof
[hide private]

Class Bof

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
              object --+           |
                       |           |
exceptions.BaseException --+       |
                           |       |
        exceptions.Exception --+   |
                               |   |
        exceptions.StopIteration --+
                                   |
                                  Bof

Beginning of file reached

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.StopIteration: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'Beginning of file reached'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables._VfpMemo-class.html0000666000175100017510000002724311477216672021404 0ustar margamarga dbf.old.tables._VfpMemo
Package dbf :: Package old :: Module tables :: Class _VfpMemo
[hide private]

Class _VfpMemo

source code

object --+    
         |    
  _DbfMemo --+
             |
            _VfpMemo

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
Visual Foxpro 6 specific
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code

Inherited from _DbfMemo: __init__, get_memo, put_memo

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_init(yo)

source code 

Visual Foxpro 6 specific

Overrides: _DbfMemo._init

_get_memo(yo, block)

source code 

retrieve memo contents from disk

Overrides: _DbfMemo._get_memo
(inherited documentation)

_put_memo(yo, data)

source code 

store memo contents to disk

Overrides: _DbfMemo._put_memo
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables.Db3Table-class.html0000666000175100017510000007302311477216672020474 0ustar margamarga dbf.tables.Db3Table
Package dbf :: Module tables :: Class Db3Table
[hide private]

Class Db3Table

source code

object --+    
         |    
  DbfTable --+
             |
            Db3Table

Provides an interface for working with dBase III tables.

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'dBase III Plus'
  _versionabbv = 'db3'
  _fieldtypes = {'C': {'Blank': <type 'str'>, 'Init': <function ...
  _memoext = '.dbt'
  _memotypes = 'M',
  _yesMemoMask = '\x80'
  _noMemoMask = '\x7f'
  _fixed_fields = ('D', 'L', 'M')
  _variable_fields = ('C', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('N')
  _numeric_fields = ('N')
  _dbfTableHeader = '\x03\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x...
  _dbfTableHeaderExtra = ''
  _supported_tables = ['\x03', '\x83']
  _read_only = False
  _meta_only = False
  _use_deleted = True

Inherited from DbfTable: backup, codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'C': {'Type': 'Character', 'Retrieve': io.retrieveCharacter, 'Update'\
: io.updateCharacter, 'Blank': str, 'Init': io.addCharacter}, 'D': {'T\
ype': 'Date', 'Retrieve': io.retrieveDate, 'Update': io.updateDate, 'B\
lank': Date.today, 'Init': io.addDate}, 'L': {'Type': 'Logical', 'Retr\
ieve': io.retrieveLogical, 'Update': io.updateLogical, 'Blank': bool, \
'Init': io.addLogical}, 'M': {'Type': 'Memo', 'Retrieve': io.retrieveM\
emo, 'Update': io.updateMemo, 'Blank': str, 'Init': io.addMemo}, 'N': \
{'Type': 'Numeric', 'Retrieve': io.retrieveNumeric, 'Update': io.updat\
...

_dbfTableHeader

Value:
'\x03\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

dbf-0.88.16/dbf/html/dbf.old.tables.VfpTable-class.html0000666000175100017510000010716611477216672021402 0ustar margamarga dbf.old.tables.VfpTable
Package dbf :: Package old :: Module tables :: Class VfpTable
[hide private]

Class VfpTable

source code

object --+    
         |    
  DbfTable --+
             |
            VfpTable

Provides an interface for working with Visual FoxPro 6 tables

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'Visual Foxpro v6'
  _versionabbv = 'vfp'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.fpt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '0'
  _noMemoMask = '0'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('0')
  _dbfTableHeader = '0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\...
  _dbfTableHeaderExtra = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x...
  _use_deleted = True

Inherited from DbfTable: codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Inherited from DbfTable (private): _backed_up, _meta_only, _read_only

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity
(inherited documentation)

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'B': {'Blank': <type 'float'>,
       'Init': <function addVfpDouble at 0x00ECA830>,
       'Retrieve': <function retrieveDouble at 0x00ECA2F0>,
...

_dbfTableHeader

Value:
'0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

_dbfTableHeaderExtra

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
...

dbf-0.88.16/dbf/html/dbf.old.tables._DbfMemo-class.html0000666000175100017510000002751111477216672021342 0ustar margamarga dbf.old.tables._DbfMemo
Package dbf :: Package old :: Module tables :: Class _DbfMemo
[hide private]

Class _DbfMemo

source code

object --+
         |
        _DbfMemo
Known Subclasses:

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
initialize disk file usage
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code
 
__init__(yo, meta)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
get_memo(yo, block, field)
gets the memo in block
source code
 
put_memo(yo, data)
stores data in memo file, returns block number
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, meta)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__

dbf-0.88.16/dbf/html/dbf.tables.DbfTable._Table-class.html0000666000175100017510000002375411477216672021752 0ustar margamarga dbf.tables.DbfTable._Table
Package dbf :: Module tables :: Class DbfTable :: Class _Table
[hide private]

Class _Table

source code

object --+
         |
        DbfTable._Table

implements the weakref table for records

Instance Methods [hide private]
 
__init__(yo, count, meta)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__getitem__(yo, index) source code
 
append(yo, record) source code
 
clear(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, count, meta)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf-module.html0000666000175100017510000006231211477216670016021 0ustar margamarga dbf
Package dbf
[hide private]

Package dbf

source code

Copyright

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Summary

Python package for reading/writing dBase III and VFP 6 tables and memos

The entire table is read into memory, and all operations occur on the in-memory table, with data changes being written to disk as they occur.

Goals: programming style with databases

NOTE: Of the VFP data types, auto-increment and null settings are not implemented.

Submodules [hide private]

Functions [hide private]
 
Table(filename, field_specs='', memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None)
returns an open table of the correct dbf_type, or creates it if field_specs is given
source code
 
index(sequence)
returns integers 0 - len(sequence)
source code
 
guess_table_type(filename) source code
 
table_type(filename)
returns text representation of a table's dbf version
source code
 
add_fields(table, field_specs)
adds fields to an existing table
source code
 
delete_fields(table, field_names)
deletes fields from an existing table
source code
 
export(table, filename='', fields='', format='csv', header=True)
creates a csv or tab-delimited file from an existing table
source code
 
first_record(table)
prints the first record of a table
source code
 
from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1)
creates a Character table from a csv file to_disk will create a table with the same name filename will be used if provided field_names default to f0, f1, f2, etc, unless specified (list) extra_fields can be used to add additional fields -- should be normal field specifiers (list)
source code
 
get_fields(table)
returns the list of field names of a table
source code
 
info(table)
prints table info
source code
 
rename_field(table, oldfield, newfield)
renames a field in a table
source code
 
structure(table, field=None)
returns the definition of a field (or all fields)
source code
 
hex_dump(records)
just what it says ;)
source code
Variables [hide private]
  version = (0, 88, 16)
  default_type = 'db3'
  sql_user_functions = {}

Imports: os, csv, Date, DateTime, Time, DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex, DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv, sql, ascii, codepage, encoding, version_map, _io, dates, exceptions, tables


dbf-0.88.16/dbf/html/dbf.old.exceptions-module.html0000666000175100017510000001467211477216670020764 0ustar margamarga dbf.old.exceptions
Package dbf :: Package old :: Module exceptions
[hide private]

Module exceptions

source code

warnings and errors

Classes [hide private]
  DbfError
Fatal errors elicit this response.
  DataOverflow
Data too large for field
  FieldMissing
Field does not exist in table
  NonUnicode
Data for table not in unicode
  DbfWarning
Normal operations elicit this response
  Eof
End of file reached
  Bof
Beginning of file reached
  DoNotIndex
Returned by indexing functions to suppress a record from becoming part of the index
dbf-0.88.16/dbf/html/api-objects.txt0000666000175100017510000032764111477216676016075 0ustar margamargadbf dbf-module.html dbf.rename_field dbf-module.html#rename_field dbf.codepage dbf.tables-module.html#codepage dbf.sql dbf.tables-module.html#sql dbf.delete_fields dbf-module.html#delete_fields dbf.export dbf-module.html#export dbf.sql_user_functions dbf-module.html#sql_user_functions dbf.ascii dbf.tables-module.html#ascii dbf.from_csv dbf-module.html#from_csv dbf.index dbf-module.html#index dbf.get_fields dbf-module.html#get_fields dbf.encoding dbf.tables-module.html#encoding dbf.version dbf-module.html#version dbf.structure dbf-module.html#structure dbf.table_type dbf-module.html#table_type dbf.first_record dbf-module.html#first_record dbf.info dbf-module.html#info dbf.default_type dbf-module.html#default_type dbf.guess_table_type dbf-module.html#guess_table_type dbf.hex_dump dbf-module.html#hex_dump dbf.add_fields dbf-module.html#add_fields dbf.Table dbf-module.html#Table dbf._io dbf._io-module.html dbf._io.packStr dbf._io-module.html#packStr dbf._io.retrieveMemo dbf._io-module.html#retrieveMemo dbf._io.retrieveNumeric dbf._io-module.html#retrieveNumeric dbf._io.addVfpCurrency dbf._io-module.html#addVfpCurrency dbf._io.retrieveCharacter dbf._io-module.html#retrieveCharacter dbf._io.updateVfpMemo dbf._io-module.html#updateVfpMemo dbf._io.updateDouble dbf._io-module.html#updateDouble dbf._io.retrieveDouble dbf._io-module.html#retrieveDouble dbf._io.packLongInt dbf._io-module.html#packLongInt dbf._io.updateVfpDateTime dbf._io-module.html#updateVfpDateTime dbf._io.unsupportedType dbf._io-module.html#unsupportedType dbf._io.unpackShortInt dbf._io-module.html#unpackShortInt dbf._io.packShortInt dbf._io-module.html#packShortInt dbf._io.addLogical dbf._io-module.html#addLogical dbf._io.updateNumeric dbf._io-module.html#updateNumeric dbf._io.addVfpDouble dbf._io-module.html#addVfpDouble dbf._io.unpackDate dbf._io-module.html#unpackDate dbf._io.addVfpDateTime dbf._io-module.html#addVfpDateTime dbf._io.updateDate dbf._io-module.html#updateDate dbf._io.VFPTIME dbf._io-module.html#VFPTIME dbf._io.retrieveVfpMemo dbf._io-module.html#retrieveVfpMemo dbf._io.retrieveLogical dbf._io-module.html#retrieveLogical dbf._io.packDate dbf._io-module.html#packDate dbf._io.addVfpNumeric dbf._io-module.html#addVfpNumeric dbf._io.retrieveDate dbf._io-module.html#retrieveDate dbf._io.addNumeric dbf._io-module.html#addNumeric dbf._io.unpackStr dbf._io-module.html#unpackStr dbf._io.updateCharacter dbf._io-module.html#updateCharacter dbf._io.addMemo dbf._io-module.html#addMemo dbf._io.addVfpMemo dbf._io-module.html#addVfpMemo dbf._io.retrieveCurrency dbf._io-module.html#retrieveCurrency dbf._io.updateCurrency dbf._io-module.html#updateCurrency dbf._io.updateLogical dbf._io-module.html#updateLogical dbf._io.updateMemo dbf._io-module.html#updateMemo dbf._io.updateInteger dbf._io-module.html#updateInteger dbf._io.addVfpInteger dbf._io-module.html#addVfpInteger dbf._io.addDate dbf._io-module.html#addDate dbf._io.convertToBool dbf._io-module.html#convertToBool dbf._io.retrieveVfpDateTime dbf._io-module.html#retrieveVfpDateTime dbf._io.retrieveInteger dbf._io-module.html#retrieveInteger dbf._io.addCharacter dbf._io-module.html#addCharacter dbf._io.unpackLongInt dbf._io-module.html#unpackLongInt dbf.dates dbf.dates-module.html dbf.exceptions dbf.exceptions-module.html dbf.old dbf.old-module.html dbf.old.rename_field dbf.old-module.html#rename_field dbf.old.codepage dbf.tables-module.html#codepage dbf.old.sql dbf.tables-module.html#sql dbf.old.delete_fields dbf.old-module.html#delete_fields dbf.old.export dbf.old-module.html#export dbf.old.ascii dbf.tables-module.html#ascii dbf.old.from_csv dbf.old-module.html#from_csv dbf.old.index dbf.old-module.html#index dbf.old.get_fields dbf.old-module.html#get_fields dbf.old.encoding dbf.tables-module.html#encoding dbf.old.version dbf.old-module.html#version dbf.old.structure dbf.old-module.html#structure dbf.old.table_type dbf.old-module.html#table_type dbf.old.first_record dbf.old-module.html#first_record dbf.old.info dbf.old-module.html#info dbf.old.guess_table_type dbf.old-module.html#guess_table_type dbf.old.hex_dump dbf.old-module.html#hex_dump dbf.old.add_fields dbf.old-module.html#add_fields dbf.old.Table dbf.old-module.html#Table dbf.old._io dbf.old._io-module.html dbf.old._io.packStr dbf.old._io-module.html#packStr dbf.old._io.retrieveMemo dbf.old._io-module.html#retrieveMemo dbf.old._io.retrieveNumeric dbf.old._io-module.html#retrieveNumeric dbf.old._io.addVfpCurrency dbf.old._io-module.html#addVfpCurrency dbf.old._io.retrieveCharacter dbf.old._io-module.html#retrieveCharacter dbf.old._io.updateVfpMemo dbf.old._io-module.html#updateVfpMemo dbf.old._io.updateDouble dbf.old._io-module.html#updateDouble dbf.old._io.retrieveDouble dbf.old._io-module.html#retrieveDouble dbf.old._io.updateVfpDateTime dbf.old._io-module.html#updateVfpDateTime dbf.old._io.unsupportedType dbf.old._io-module.html#unsupportedType dbf.old._io.unpackShortInt dbf.old._io-module.html#unpackShortInt dbf.old._io.packShortInt dbf.old._io-module.html#packShortInt dbf.old._io.addLogical dbf.old._io-module.html#addLogical dbf.old._io.updateNumeric dbf.old._io-module.html#updateNumeric dbf.old._io.addVfpDouble dbf.old._io-module.html#addVfpDouble dbf.old._io.unpackDate dbf.old._io-module.html#unpackDate dbf.old._io.addVfpDateTime dbf.old._io-module.html#addVfpDateTime dbf.old._io.updateDate dbf.old._io-module.html#updateDate dbf.old._io.VFPTIME dbf.old._io-module.html#VFPTIME dbf.old._io.retrieveVfpMemo dbf.old._io-module.html#retrieveVfpMemo dbf.old._io.retrieveLogical dbf.old._io-module.html#retrieveLogical dbf.old._io.packDate dbf.old._io-module.html#packDate dbf.old._io.addVfpNumeric dbf.old._io-module.html#addVfpNumeric dbf.old._io.retrieveDate dbf.old._io-module.html#retrieveDate dbf.old._io.addNumeric dbf.old._io-module.html#addNumeric dbf.old._io.unpackStr dbf.old._io-module.html#unpackStr dbf.old._io.updateCharacter dbf.old._io-module.html#updateCharacter dbf.old._io.addVfpMemo dbf.old._io-module.html#addVfpMemo dbf.old._io.addMemo dbf.old._io-module.html#addMemo dbf.old._io.packLongInt dbf.old._io-module.html#packLongInt dbf.old._io.retrieveCurrency dbf.old._io-module.html#retrieveCurrency dbf.old._io.updateCurrency dbf.old._io-module.html#updateCurrency dbf.old._io.updateLogical dbf.old._io-module.html#updateLogical dbf.old._io.updateMemo dbf.old._io-module.html#updateMemo dbf.old._io.updateInteger dbf.old._io-module.html#updateInteger dbf.old._io.addVfpInteger dbf.old._io-module.html#addVfpInteger dbf.old._io.addDate dbf.old._io-module.html#addDate dbf.old._io.convertToBool dbf.old._io-module.html#convertToBool dbf.old._io.retrieveVfpDateTime dbf.old._io-module.html#retrieveVfpDateTime dbf.old._io.retrieveInteger dbf.old._io-module.html#retrieveInteger dbf.old._io.addCharacter dbf.old._io-module.html#addCharacter dbf.old._io.unpackLongInt dbf.old._io-module.html#unpackLongInt dbf.old.dates dbf.old.dates-module.html dbf.old.exceptions dbf.old.exceptions-module.html dbf.old.tables dbf.old.tables-module.html dbf.old.tables._nop dbf.old.tables-module.html#_nop dbf.old.tables.codepage dbf.old.tables-module.html#codepage dbf.old.tables.ascii dbf.old.tables-module.html#ascii dbf.old.tables._normalize_tuples dbf.old.tables-module.html#_normalize_tuples dbf.old.tables._codepage_lookup dbf.old.tables-module.html#_codepage_lookup dbf.old.tables.return_ascii dbf.old.tables-module.html#return_ascii dbf.old.tables.input_decoding dbf.old.tables-module.html#input_decoding dbf.old.tables.version_map dbf.old.tables-module.html#version_map dbf.old.tables.encoding dbf.old.tables-module.html#encoding dbf.old.tables.default_codepage dbf.old.tables-module.html#default_codepage dbf.old.tables.sql dbf.old.tables-module.html#sql dbf.old.tables.code_pages dbf.old.tables-module.html#code_pages dbf.tables dbf.tables-module.html dbf.tables._nop dbf.tables-module.html#_nop dbf.tables.codepage dbf.tables-module.html#codepage dbf.tables.ascii dbf.tables-module.html#ascii dbf.tables._normalize_tuples dbf.tables-module.html#_normalize_tuples dbf.tables.sql_cmd dbf.tables-module.html#sql_cmd dbf.tables._codepage_lookup dbf.tables-module.html#_codepage_lookup dbf.tables.sql_functions dbf.tables-module.html#sql_functions dbf.tables.return_ascii dbf.tables-module.html#return_ascii dbf.tables.input_decoding dbf.tables-module.html#input_decoding dbf.tables.sql_criteria dbf.tables-module.html#sql_criteria dbf.tables.encoding dbf.tables-module.html#encoding dbf.tables.default_codepage dbf.tables-module.html#default_codepage dbf.tables.version_map dbf.tables-module.html#version_map dbf.tables.sql dbf.tables-module.html#sql dbf.tables.code_pages dbf.tables-module.html#code_pages dbf.dates.Date dbf.dates.Date-class.html dbf.dates.Date.__str__ dbf.dates.Date-class.html#__str__ dbf.dates.Date.__radd__ dbf.dates.Date-class.html#__radd__ dbf.dates.Date.__rsub__ dbf.dates.Date-class.html#__rsub__ dbf.dates.Date.__lt__ dbf.dates.Date-class.html#__lt__ dbf.dates.Date.__new__ dbf.dates.Date-class.html#__new__ dbf.dates.Date.min dbf.dates.Date-class.html#min dbf.dates.Date.__getattr__ dbf.dates.Date-class.html#__getattr__ dbf.dates.Date.ymd dbf.dates.Date-class.html#ymd dbf.dates.Date.today dbf.dates.Date-class.html#today dbf.dates.Date.__repr__ dbf.dates.Date-class.html#__repr__ dbf.dates.Date.__ne__ dbf.dates.Date-class.html#__ne__ dbf.dates.Date.max dbf.dates.Date-class.html#max dbf.dates.Date.fromymd dbf.dates.Date-class.html#fromymd dbf.dates.Date.__add__ dbf.dates.Date-class.html#__add__ dbf.dates.Date.__gt__ dbf.dates.Date-class.html#__gt__ dbf.dates.Date.date dbf.dates.Date-class.html#date dbf.dates.Date.__eq__ dbf.dates.Date-class.html#__eq__ dbf.dates.Date._date dbf.dates.Date-class.html#_date dbf.dates.Date.__nonzero__ dbf.dates.Date-class.html#__nonzero__ dbf.dates.Date.fromordinal dbf.dates.Date-class.html#fromordinal dbf.dates.Date.fromtimestamp dbf.dates.Date-class.html#fromtimestamp dbf.dates.Date.__le__ dbf.dates.Date-class.html#__le__ dbf.dates.Date.strftime dbf.dates.Date-class.html#strftime dbf.dates.Date.__hash__ dbf.dates.Date-class.html#__hash__ dbf.dates.Date.__sub__ dbf.dates.Date-class.html#__sub__ dbf.dates.Date.__ge__ dbf.dates.Date-class.html#__ge__ dbf.dates.DateTime dbf.dates.DateTime-class.html dbf.dates.DateTime.__str__ dbf.dates.DateTime-class.html#__str__ dbf.dates.DateTime.datetime dbf.dates.DateTime-class.html#datetime dbf.dates.DateTime._datetime dbf.dates.DateTime-class.html#_datetime dbf.dates.DateTime.__rsub__ dbf.dates.DateTime-class.html#__rsub__ dbf.dates.DateTime.__lt__ dbf.dates.DateTime-class.html#__lt__ dbf.dates.DateTime.utcnow dbf.dates.DateTime-class.html#utcnow dbf.dates.DateTime.now dbf.dates.DateTime-class.html#now dbf.dates.DateTime.__new__ dbf.dates.DateTime-class.html#__new__ dbf.dates.DateTime.min dbf.dates.DateTime-class.html#min dbf.dates.DateTime.__getattr__ dbf.dates.DateTime-class.html#__getattr__ dbf.dates.DateTime.combine dbf.dates.DateTime-class.html#combine dbf.dates.DateTime.today dbf.dates.DateTime-class.html#today dbf.dates.DateTime.__ne__ dbf.dates.DateTime-class.html#__ne__ dbf.dates.DateTime.max dbf.dates.DateTime-class.html#max dbf.dates.DateTime.__add__ dbf.dates.DateTime-class.html#__add__ dbf.dates.DateTime.__gt__ dbf.dates.DateTime-class.html#__gt__ dbf.dates.DateTime.date dbf.dates.DateTime-class.html#date dbf.dates.DateTime.__radd__ dbf.dates.DateTime-class.html#__radd__ dbf.dates.DateTime.__eq__ dbf.dates.DateTime-class.html#__eq__ dbf.dates.DateTime.__nonzero__ dbf.dates.DateTime-class.html#__nonzero__ dbf.dates.DateTime.fromordinal dbf.dates.DateTime-class.html#fromordinal dbf.dates.DateTime.fromtimestamp dbf.dates.DateTime-class.html#fromtimestamp dbf.dates.DateTime.__le__ dbf.dates.DateTime-class.html#__le__ dbf.dates.DateTime.__repr__ dbf.dates.DateTime-class.html#__repr__ dbf.dates.DateTime.time dbf.dates.DateTime-class.html#time dbf.dates.DateTime.__hash__ dbf.dates.DateTime-class.html#__hash__ dbf.dates.DateTime.__sub__ dbf.dates.DateTime-class.html#__sub__ dbf.dates.DateTime.__ge__ dbf.dates.DateTime-class.html#__ge__ dbf.dates.Time dbf.dates.Time-class.html dbf.dates.Time.__str__ dbf.dates.Time-class.html#__str__ dbf.dates.Time.__radd__ dbf.dates.Time-class.html#__radd__ dbf.dates.Time.__rsub__ dbf.dates.Time-class.html#__rsub__ dbf.dates.Time.__lt__ dbf.dates.Time-class.html#__lt__ dbf.dates.Time.__new__ dbf.dates.Time-class.html#__new__ dbf.dates.Time.min dbf.dates.Time-class.html#min dbf.dates.Time.__getattr__ dbf.dates.Time-class.html#__getattr__ dbf.dates.Time.__ne__ dbf.dates.Time-class.html#__ne__ dbf.dates.Time.max dbf.dates.Time-class.html#max dbf.dates.Time.__add__ dbf.dates.Time-class.html#__add__ dbf.dates.Time.__gt__ dbf.dates.Time-class.html#__gt__ dbf.dates.Time.__eq__ dbf.dates.Time-class.html#__eq__ dbf.dates.Time.__nonzero__ dbf.dates.Time-class.html#__nonzero__ dbf.dates.Time._time dbf.dates.Time-class.html#_time dbf.dates.Time.__le__ dbf.dates.Time-class.html#__le__ dbf.dates.Time.__repr__ dbf.dates.Time-class.html#__repr__ dbf.dates.Time.__hash__ dbf.dates.Time-class.html#__hash__ dbf.dates.Time.__sub__ dbf.dates.Time-class.html#__sub__ dbf.dates.Time.__ge__ dbf.dates.Time-class.html#__ge__ dbf.exceptions.Bof dbf.exceptions.Bof-class.html dbf.exceptions.Bof.message dbf.exceptions.Bof-class.html#message dbf.exceptions.Bof.__init__ dbf.exceptions.Bof-class.html#__init__ dbf.exceptions.DataOverflow dbf.exceptions.DataOverflow-class.html dbf.exceptions.DataOverflow.__init__ dbf.exceptions.DataOverflow-class.html#__init__ dbf.exceptions.DbfError dbf.exceptions.DbfError-class.html dbf.exceptions.DbfWarning dbf.exceptions.DbfWarning-class.html dbf.exceptions.DoNotIndex dbf.exceptions.DoNotIndex-class.html dbf.exceptions.DoNotIndex.message dbf.exceptions.DoNotIndex-class.html#message dbf.exceptions.DoNotIndex.__init__ dbf.exceptions.DoNotIndex-class.html#__init__ dbf.exceptions.Eof dbf.exceptions.Eof-class.html dbf.exceptions.Eof.message dbf.exceptions.Eof-class.html#message dbf.exceptions.Eof.__init__ dbf.exceptions.Eof-class.html#__init__ dbf.exceptions.FieldMissing dbf.exceptions.FieldMissing-class.html dbf.exceptions.FieldMissing.__init__ dbf.exceptions.FieldMissing-class.html#__init__ dbf.exceptions.NonUnicode dbf.exceptions.NonUnicode-class.html dbf.exceptions.NonUnicode.__init__ dbf.exceptions.NonUnicode-class.html#__init__ dbf.old.dates.Date dbf.old.dates.Date-class.html dbf.old.dates.Date.__str__ dbf.old.dates.Date-class.html#__str__ dbf.old.dates.Date.__radd__ dbf.old.dates.Date-class.html#__radd__ dbf.old.dates.Date.__rsub__ dbf.old.dates.Date-class.html#__rsub__ dbf.old.dates.Date.__lt__ dbf.old.dates.Date-class.html#__lt__ dbf.old.dates.Date.__new__ dbf.old.dates.Date-class.html#__new__ dbf.old.dates.Date.min dbf.old.dates.Date-class.html#min dbf.old.dates.Date.__getattr__ dbf.old.dates.Date-class.html#__getattr__ dbf.old.dates.Date.ymd dbf.old.dates.Date-class.html#ymd dbf.old.dates.Date.today dbf.old.dates.Date-class.html#today dbf.old.dates.Date.__repr__ dbf.old.dates.Date-class.html#__repr__ dbf.old.dates.Date.__ne__ dbf.old.dates.Date-class.html#__ne__ dbf.old.dates.Date.max dbf.old.dates.Date-class.html#max dbf.old.dates.Date.fromymd dbf.old.dates.Date-class.html#fromymd dbf.old.dates.Date.__add__ dbf.old.dates.Date-class.html#__add__ dbf.old.dates.Date.__gt__ dbf.old.dates.Date-class.html#__gt__ dbf.old.dates.Date.date dbf.old.dates.Date-class.html#date dbf.old.dates.Date.__eq__ dbf.old.dates.Date-class.html#__eq__ dbf.old.dates.Date._date dbf.old.dates.Date-class.html#_date dbf.old.dates.Date.__nonzero__ dbf.old.dates.Date-class.html#__nonzero__ dbf.old.dates.Date.fromordinal dbf.old.dates.Date-class.html#fromordinal dbf.old.dates.Date.fromtimestamp dbf.old.dates.Date-class.html#fromtimestamp dbf.old.dates.Date.__le__ dbf.old.dates.Date-class.html#__le__ dbf.old.dates.Date.strftime dbf.old.dates.Date-class.html#strftime dbf.old.dates.Date.__hash__ dbf.old.dates.Date-class.html#__hash__ dbf.old.dates.Date.__sub__ dbf.old.dates.Date-class.html#__sub__ dbf.old.dates.Date.__ge__ dbf.old.dates.Date-class.html#__ge__ dbf.old.dates.DateTime dbf.old.dates.DateTime-class.html dbf.old.dates.DateTime.__str__ dbf.old.dates.DateTime-class.html#__str__ dbf.old.dates.DateTime.datetime dbf.old.dates.DateTime-class.html#datetime dbf.old.dates.DateTime._datetime dbf.old.dates.DateTime-class.html#_datetime dbf.old.dates.DateTime.__rsub__ dbf.old.dates.DateTime-class.html#__rsub__ dbf.old.dates.DateTime.__lt__ dbf.old.dates.DateTime-class.html#__lt__ dbf.old.dates.DateTime.utcnow dbf.old.dates.DateTime-class.html#utcnow dbf.old.dates.DateTime.now dbf.old.dates.DateTime-class.html#now dbf.old.dates.DateTime.__new__ dbf.old.dates.DateTime-class.html#__new__ dbf.old.dates.DateTime.min dbf.old.dates.DateTime-class.html#min dbf.old.dates.DateTime.__getattr__ dbf.old.dates.DateTime-class.html#__getattr__ dbf.old.dates.DateTime.combine dbf.old.dates.DateTime-class.html#combine dbf.old.dates.DateTime.today dbf.old.dates.DateTime-class.html#today dbf.old.dates.DateTime.__ne__ dbf.old.dates.DateTime-class.html#__ne__ dbf.old.dates.DateTime.max dbf.old.dates.DateTime-class.html#max dbf.old.dates.DateTime.__add__ dbf.old.dates.DateTime-class.html#__add__ dbf.old.dates.DateTime.__gt__ dbf.old.dates.DateTime-class.html#__gt__ dbf.old.dates.DateTime.date dbf.old.dates.DateTime-class.html#date dbf.old.dates.DateTime.__radd__ dbf.old.dates.DateTime-class.html#__radd__ dbf.old.dates.DateTime.__eq__ dbf.old.dates.DateTime-class.html#__eq__ dbf.old.dates.DateTime.__nonzero__ dbf.old.dates.DateTime-class.html#__nonzero__ dbf.old.dates.DateTime.fromordinal dbf.old.dates.DateTime-class.html#fromordinal dbf.old.dates.DateTime.fromtimestamp dbf.old.dates.DateTime-class.html#fromtimestamp dbf.old.dates.DateTime.__le__ dbf.old.dates.DateTime-class.html#__le__ dbf.old.dates.DateTime.__repr__ dbf.old.dates.DateTime-class.html#__repr__ dbf.old.dates.DateTime.time dbf.old.dates.DateTime-class.html#time dbf.old.dates.DateTime.__hash__ dbf.old.dates.DateTime-class.html#__hash__ dbf.old.dates.DateTime.__sub__ dbf.old.dates.DateTime-class.html#__sub__ dbf.old.dates.DateTime.__ge__ dbf.old.dates.DateTime-class.html#__ge__ dbf.old.dates.Time dbf.old.dates.Time-class.html dbf.old.dates.Time.__str__ dbf.old.dates.Time-class.html#__str__ dbf.old.dates.Time.__radd__ dbf.old.dates.Time-class.html#__radd__ dbf.old.dates.Time.__rsub__ dbf.old.dates.Time-class.html#__rsub__ dbf.old.dates.Time.__lt__ dbf.old.dates.Time-class.html#__lt__ dbf.old.dates.Time.__new__ dbf.old.dates.Time-class.html#__new__ dbf.old.dates.Time.min dbf.old.dates.Time-class.html#min dbf.old.dates.Time.__getattr__ dbf.old.dates.Time-class.html#__getattr__ dbf.old.dates.Time.__ne__ dbf.old.dates.Time-class.html#__ne__ dbf.old.dates.Time.max dbf.old.dates.Time-class.html#max dbf.old.dates.Time.__add__ dbf.old.dates.Time-class.html#__add__ dbf.old.dates.Time.__gt__ dbf.old.dates.Time-class.html#__gt__ dbf.old.dates.Time.__eq__ dbf.old.dates.Time-class.html#__eq__ dbf.old.dates.Time.__nonzero__ dbf.old.dates.Time-class.html#__nonzero__ dbf.old.dates.Time._time dbf.old.dates.Time-class.html#_time dbf.old.dates.Time.__le__ dbf.old.dates.Time-class.html#__le__ dbf.old.dates.Time.__repr__ dbf.old.dates.Time-class.html#__repr__ dbf.old.dates.Time.__hash__ dbf.old.dates.Time-class.html#__hash__ dbf.old.dates.Time.__sub__ dbf.old.dates.Time-class.html#__sub__ dbf.old.dates.Time.__ge__ dbf.old.dates.Time-class.html#__ge__ dbf.old.exceptions.Bof dbf.old.exceptions.Bof-class.html dbf.old.exceptions.Bof.message dbf.old.exceptions.Bof-class.html#message dbf.old.exceptions.Bof.__init__ dbf.old.exceptions.Bof-class.html#__init__ dbf.old.exceptions.DataOverflow dbf.old.exceptions.DataOverflow-class.html dbf.old.exceptions.DataOverflow.__init__ dbf.old.exceptions.DataOverflow-class.html#__init__ dbf.old.exceptions.DbfError dbf.old.exceptions.DbfError-class.html dbf.old.exceptions.DbfWarning dbf.old.exceptions.DbfWarning-class.html dbf.old.exceptions.DoNotIndex dbf.old.exceptions.DoNotIndex-class.html dbf.old.exceptions.DoNotIndex.message dbf.old.exceptions.DoNotIndex-class.html#message dbf.old.exceptions.DoNotIndex.__init__ dbf.old.exceptions.DoNotIndex-class.html#__init__ dbf.old.exceptions.Eof dbf.old.exceptions.Eof-class.html dbf.old.exceptions.Eof.message dbf.old.exceptions.Eof-class.html#message dbf.old.exceptions.Eof.__init__ dbf.old.exceptions.Eof-class.html#__init__ dbf.old.exceptions.FieldMissing dbf.old.exceptions.FieldMissing-class.html dbf.old.exceptions.FieldMissing.__init__ dbf.old.exceptions.FieldMissing-class.html#__init__ dbf.old.exceptions.NonUnicode dbf.old.exceptions.NonUnicode-class.html dbf.old.exceptions.NonUnicode.__init__ dbf.old.exceptions.NonUnicode-class.html#__init__ dbf.old.tables.Db3Table dbf.old.tables.Db3Table-class.html dbf.old.tables.DbfTable.__str__ dbf.old.tables.DbfTable-class.html#__str__ dbf.old.tables.DbfTable.supported_tables dbf.old.tables.DbfTable-class.html#supported_tables dbf.old.tables.DbfTable._backed_up dbf.old.tables.DbfTable-class.html#_backed_up dbf.old.tables.Db3Table._yesMemoMask dbf.old.tables.Db3Table-class.html#_yesMemoMask dbf.old.tables.DbfTable.query dbf.old.tables.DbfTable-class.html#query dbf.old.tables.DbfTable.last_update dbf.old.tables.DbfTable-class.html#last_update dbf.old.tables.Db3Table._meta_only dbf.old.tables.Db3Table-class.html#_meta_only dbf.old.tables.DbfTable.__enter__ dbf.old.tables.DbfTable-class.html#__enter__ dbf.old.tables.DbfTable.__exit__ dbf.old.tables.DbfTable-class.html#__exit__ dbf.old.tables.DbfTable.__getitem__ dbf.old.tables.DbfTable-class.html#__getitem__ dbf.old.tables.Db3Table._fieldtypes dbf.old.tables.Db3Table-class.html#_fieldtypes dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables.Db3Table._decimal_fields dbf.old.tables.Db3Table-class.html#_decimal_fields dbf.old.tables.DbfTable.use_deleted dbf.old.tables.DbfTable-class.html#use_deleted dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.__nonzero__ dbf.old.tables.DbfTable-class.html#__nonzero__ dbf.old.tables.Db3Table._character_fields dbf.old.tables.Db3Table-class.html#_character_fields dbf.old.tables.DbfTable._list_fields dbf.old.tables.DbfTable-class.html#_list_fields dbf.old.tables.DbfTable.is_memotype dbf.old.tables.DbfTable-class.html#is_memotype dbf.old.tables.Db3Table._memoext dbf.old.tables.Db3Table-class.html#_memoext dbf.old.tables.Db3Table._dbfTableHeader dbf.old.tables.Db3Table-class.html#_dbfTableHeader dbf.old.tables.DbfTable.export dbf.old.tables.DbfTable-class.html#export dbf.old.tables.DbfTable.close dbf.old.tables.DbfTable-class.html#close dbf.old.tables.DbfTable.append dbf.old.tables.DbfTable-class.html#append dbf.old.tables.DbfTable.rename_field dbf.old.tables.DbfTable-class.html#rename_field dbf.old.tables.Db3Table._use_deleted dbf.old.tables.Db3Table-class.html#_use_deleted dbf.old.tables.DbfTable.__contains__ dbf.old.tables.DbfTable-class.html#__contains__ dbf.old.tables.DbfTable.__getattr__ dbf.old.tables.DbfTable-class.html#__getattr__ dbf.old.tables.DbfTable.filename dbf.old.tables.DbfTable-class.html#filename dbf.old.tables.DbfTable.current dbf.old.tables.DbfTable-class.html#current dbf.old.tables.DbfTable.version dbf.old.tables.DbfTable-class.html#version dbf.old.tables.DbfTable.new dbf.old.tables.DbfTable-class.html#new dbf.old.tables.Db3Table._read_only dbf.old.tables.Db3Table-class.html#_read_only dbf.old.tables.DbfTable.goto dbf.old.tables.DbfTable-class.html#goto dbf.old.tables.DbfTable.field_count dbf.old.tables.DbfTable-class.html#field_count dbf.old.tables.Db3Table._versionabbv dbf.old.tables.Db3Table-class.html#_versionabbv dbf.old.tables.DbfTable._loadtable dbf.old.tables.DbfTable-class.html#_loadtable dbf.old.tables.Db3Table._memotypes dbf.old.tables.Db3Table-class.html#_memotypes dbf.old.tables.Db3Table._version dbf.old.tables.Db3Table-class.html#_version dbf.old.tables.DbfTable.add_fields dbf.old.tables.DbfTable-class.html#add_fields dbf.old.tables.DbfTable.__repr__ dbf.old.tables.DbfTable-class.html#__repr__ dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable.pack dbf.old.tables.DbfTable-class.html#pack dbf.old.tables.DbfTable.bof dbf.old.tables.DbfTable-class.html#bof dbf.old.tables.DbfTable.reindex dbf.old.tables.DbfTable-class.html#reindex dbf.old.tables.Db3Table._numeric_fields dbf.old.tables.Db3Table-class.html#_numeric_fields dbf.old.tables.DbfTable.codepage dbf.old.tables.DbfTable-class.html#codepage dbf.old.tables.DbfTable.delete_fields dbf.old.tables.DbfTable-class.html#delete_fields dbf.old.tables.Db3Table._memoClass dbf.old.tables._Db3Memo-class.html dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable.open dbf.old.tables.DbfTable-class.html#open dbf.old.tables.DbfTable.zap dbf.old.tables.DbfTable-class.html#zap dbf.old.tables.DbfTable.size dbf.old.tables.DbfTable-class.html#size dbf.old.tables.DbfTable.top dbf.old.tables.DbfTable-class.html#top dbf.old.tables.Db3Table._supported_tables dbf.old.tables.Db3Table-class.html#_supported_tables dbf.old.tables.DbfTable.is_decimal dbf.old.tables.DbfTable-class.html#is_decimal dbf.old.tables.DbfTable.next dbf.old.tables.DbfTable-class.html#next dbf.old.tables.Db3Table._noMemoMask dbf.old.tables.Db3Table-class.html#_noMemoMask dbf.old.tables.DbfTable.prev dbf.old.tables.DbfTable-class.html#prev dbf.old.tables.DbfTable.create_backup dbf.old.tables.DbfTable-class.html#create_backup dbf.old.tables.DbfTable.__len__ dbf.old.tables.DbfTable-class.html#__len__ dbf.old.tables.Db3Table._fixed_fields dbf.old.tables.Db3Table-class.html#_fixed_fields dbf.old.tables.DbfTable.__iter__ dbf.old.tables.DbfTable-class.html#__iter__ dbf.old.tables.DbfTable.memoname dbf.old.tables.DbfTable-class.html#memoname dbf.old.tables.DbfTable.create_index dbf.old.tables.DbfTable-class.html#create_index dbf.old.tables.DbfTable.type dbf.old.tables.DbfTable-class.html#type dbf.old.tables.DbfTable._update_disk dbf.old.tables.DbfTable-class.html#_update_disk dbf.old.tables.DbfTable._buildHeaderFields dbf.old.tables.DbfTable-class.html#_buildHeaderFields dbf.old.tables.DbfTable.get_record dbf.old.tables.DbfTable-class.html#get_record dbf.old.tables.Db3Table._dbfTableHeaderExtra dbf.old.tables.Db3Table-class.html#_dbfTableHeaderExtra dbf.old.tables.DbfTable.__init__ dbf.old.tables.DbfTable-class.html#__init__ dbf.old.tables.Db3Table._initializeFields dbf.old.tables.Db3Table-class.html#_initializeFields dbf.old.tables.Db3Table._variable_fields dbf.old.tables.Db3Table-class.html#_variable_fields dbf.old.tables.Db3Table._checkMemoIntegrity dbf.old.tables.Db3Table-class.html#_checkMemoIntegrity dbf.old.tables.DbfTable.record_length dbf.old.tables.DbfTable-class.html#record_length dbf.old.tables.DbfTable._fieldLayout dbf.old.tables.DbfTable-class.html#_fieldLayout dbf.old.tables.DbfTable.record_number dbf.old.tables.DbfTable-class.html#record_number dbf.old.tables.DbfTable.structure dbf.old.tables.DbfTable-class.html#structure dbf.old.tables.DbfTable.eof dbf.old.tables.DbfTable-class.html#eof dbf.old.tables.DbfTable.bottom dbf.old.tables.DbfTable-class.html#bottom dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable.field_names dbf.old.tables.DbfTable-class.html#field_names dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables.DbfCsv dbf.old.tables.DbfCsv-class.html dbf.old.tables.DbfCsv.lineterminator dbf.old.tables.DbfCsv-class.html#lineterminator dbf.old.tables.DbfCsv.skipinitialspace dbf.old.tables.DbfCsv-class.html#skipinitialspace dbf.old.tables.DbfCsv.quoting dbf.old.tables.DbfCsv-class.html#quoting dbf.old.tables.DbfCsv.delimiter dbf.old.tables.DbfCsv-class.html#delimiter dbf.old.tables.DbfCsv.escapechar dbf.old.tables.DbfCsv-class.html#escapechar dbf.old.tables.DbfCsv.quotechar dbf.old.tables.DbfCsv-class.html#quotechar dbf.old.tables.DbfCsv.doublequote dbf.old.tables.DbfCsv-class.html#doublequote dbf.old.tables.DbfTable dbf.old.tables.DbfTable-class.html dbf.old.tables.DbfTable.__str__ dbf.old.tables.DbfTable-class.html#__str__ dbf.old.tables.DbfTable.supported_tables dbf.old.tables.DbfTable-class.html#supported_tables dbf.old.tables.DbfTable._backed_up dbf.old.tables.DbfTable-class.html#_backed_up dbf.old.tables.DbfTable._yesMemoMask dbf.old.tables.DbfTable-class.html#_yesMemoMask dbf.old.tables.DbfTable.query dbf.old.tables.DbfTable-class.html#query dbf.old.tables.DbfTable.last_update dbf.old.tables.DbfTable-class.html#last_update dbf.old.tables.DbfTable._meta_only dbf.old.tables.DbfTable-class.html#_meta_only dbf.old.tables.DbfTable.__enter__ dbf.old.tables.DbfTable-class.html#__enter__ dbf.old.tables.DbfTable.__exit__ dbf.old.tables.DbfTable-class.html#__exit__ dbf.old.tables.DbfTable.__getitem__ dbf.old.tables.DbfTable-class.html#__getitem__ dbf.old.tables.DbfTable._fieldtypes dbf.old.tables.DbfTable-class.html#_fieldtypes dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables.DbfTable._decimal_fields dbf.old.tables.DbfTable-class.html#_decimal_fields dbf.old.tables.DbfTable.use_deleted dbf.old.tables.DbfTable-class.html#use_deleted dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.__nonzero__ dbf.old.tables.DbfTable-class.html#__nonzero__ dbf.old.tables.DbfTable._character_fields dbf.old.tables.DbfTable-class.html#_character_fields dbf.old.tables.DbfTable._noMemoMask dbf.old.tables.DbfTable-class.html#_noMemoMask dbf.old.tables.DbfTable.is_memotype dbf.old.tables.DbfTable-class.html#is_memotype dbf.old.tables.DbfTable._list_fields dbf.old.tables.DbfTable-class.html#_list_fields dbf.old.tables.DbfTable._memoext dbf.old.tables.DbfTable-class.html#_memoext dbf.old.tables.DbfTable._dbfTableHeader dbf.old.tables.DbfTable-class.html#_dbfTableHeader dbf.old.tables.DbfTable.export dbf.old.tables.DbfTable-class.html#export dbf.old.tables.DbfTable.close dbf.old.tables.DbfTable-class.html#close dbf.old.tables.DbfTable.append dbf.old.tables.DbfTable-class.html#append dbf.old.tables.DbfTable.rename_field dbf.old.tables.DbfTable-class.html#rename_field dbf.old.tables.DbfTable._use_deleted dbf.old.tables.DbfTable-class.html#_use_deleted dbf.old.tables.DbfTable.__contains__ dbf.old.tables.DbfTable-class.html#__contains__ dbf.old.tables.DbfTable.__getattr__ dbf.old.tables.DbfTable-class.html#__getattr__ dbf.old.tables.DbfTable.filename dbf.old.tables.DbfTable-class.html#filename dbf.old.tables.DbfTable.current dbf.old.tables.DbfTable-class.html#current dbf.old.tables.DbfTable.version dbf.old.tables.DbfTable-class.html#version dbf.old.tables.DbfTable.new dbf.old.tables.DbfTable-class.html#new dbf.old.tables.DbfTable._read_only dbf.old.tables.DbfTable-class.html#_read_only dbf.old.tables.DbfTable.goto dbf.old.tables.DbfTable-class.html#goto dbf.old.tables.DbfTable.field_count dbf.old.tables.DbfTable-class.html#field_count dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable._memotypes dbf.old.tables.DbfTable-class.html#_memotypes dbf.old.tables.DbfTable._version dbf.old.tables.DbfTable-class.html#_version dbf.old.tables.DbfTable.add_fields dbf.old.tables.DbfTable-class.html#add_fields dbf.old.tables.DbfTable.__repr__ dbf.old.tables.DbfTable-class.html#__repr__ dbf.old.tables.DbfTable._versionabbv dbf.old.tables.DbfTable-class.html#_versionabbv dbf.old.tables.DbfTable._buildHeaderFields dbf.old.tables.DbfTable-class.html#_buildHeaderFields dbf.old.tables.DbfTable.bof dbf.old.tables.DbfTable-class.html#bof dbf.old.tables.DbfTable.reindex dbf.old.tables.DbfTable-class.html#reindex dbf.old.tables.DbfTable._numeric_fields dbf.old.tables.DbfTable-class.html#_numeric_fields dbf.old.tables.DbfTable.codepage dbf.old.tables.DbfTable-class.html#codepage dbf.old.tables.DbfTable.delete_fields dbf.old.tables.DbfTable-class.html#delete_fields dbf.old.tables.DbfTable._memoClass dbf.old.tables._DbfMemo-class.html dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable.open dbf.old.tables.DbfTable-class.html#open dbf.old.tables.DbfTable.zap dbf.old.tables.DbfTable-class.html#zap dbf.old.tables.DbfTable.size dbf.old.tables.DbfTable-class.html#size dbf.old.tables.DbfTable.top dbf.old.tables.DbfTable-class.html#top dbf.old.tables.DbfTable._supported_tables dbf.old.tables.DbfTable-class.html#_supported_tables dbf.old.tables.DbfTable.is_decimal dbf.old.tables.DbfTable-class.html#is_decimal dbf.old.tables.DbfTable.next dbf.old.tables.DbfTable-class.html#next dbf.old.tables.DbfTable._loadtable dbf.old.tables.DbfTable-class.html#_loadtable dbf.old.tables.DbfTable.prev dbf.old.tables.DbfTable-class.html#prev dbf.old.tables.DbfTable.type dbf.old.tables.DbfTable-class.html#type dbf.old.tables.DbfTable.__len__ dbf.old.tables.DbfTable-class.html#__len__ dbf.old.tables.DbfTable._fixed_fields dbf.old.tables.DbfTable-class.html#_fixed_fields dbf.old.tables.DbfTable.__iter__ dbf.old.tables.DbfTable-class.html#__iter__ dbf.old.tables.DbfTable.memoname dbf.old.tables.DbfTable-class.html#memoname dbf.old.tables.DbfTable.create_index dbf.old.tables.DbfTable-class.html#create_index dbf.old.tables.DbfTable.create_backup dbf.old.tables.DbfTable-class.html#create_backup dbf.old.tables.DbfTable._update_disk dbf.old.tables.DbfTable-class.html#_update_disk dbf.old.tables.DbfTable.pack dbf.old.tables.DbfTable-class.html#pack dbf.old.tables.DbfTable.get_record dbf.old.tables.DbfTable-class.html#get_record dbf.old.tables.DbfTable._dbfTableHeaderExtra dbf.old.tables.DbfTable-class.html#_dbfTableHeaderExtra dbf.old.tables.DbfTable.__init__ dbf.old.tables.DbfTable-class.html#__init__ dbf.old.tables.DbfTable._initializeFields dbf.old.tables.DbfTable-class.html#_initializeFields dbf.old.tables.DbfTable._variable_fields dbf.old.tables.DbfTable-class.html#_variable_fields dbf.old.tables.DbfTable._checkMemoIntegrity dbf.old.tables.DbfTable-class.html#_checkMemoIntegrity dbf.old.tables.DbfTable.record_length dbf.old.tables.DbfTable-class.html#record_length dbf.old.tables.DbfTable._fieldLayout dbf.old.tables.DbfTable-class.html#_fieldLayout dbf.old.tables.DbfTable.record_number dbf.old.tables.DbfTable-class.html#record_number dbf.old.tables.DbfTable.structure dbf.old.tables.DbfTable-class.html#structure dbf.old.tables.DbfTable.eof dbf.old.tables.DbfTable-class.html#eof dbf.old.tables.DbfTable.bottom dbf.old.tables.DbfTable-class.html#bottom dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable.field_names dbf.old.tables.DbfTable-class.html#field_names dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.DbfIterator.next dbf.old.tables.DbfTable.DbfIterator-class.html#next dbf.old.tables.DbfTable.DbfIterator.__iter__ dbf.old.tables.DbfTable.DbfIterator-class.html#__iter__ dbf.old.tables.DbfTable.DbfIterator.__init__ dbf.old.tables.DbfTable.DbfIterator-class.html#__init__ dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables.DbfTable._DbfLists.add dbf.old.tables.DbfTable._DbfLists-class.html#add dbf.old.tables.DbfTable._DbfLists.__iter__ dbf.old.tables.DbfTable._DbfLists-class.html#__iter__ dbf.old.tables.DbfTable._DbfLists.__len__ dbf.old.tables.DbfTable._DbfLists-class.html#__len__ dbf.old.tables.DbfTable._DbfLists.__init__ dbf.old.tables.DbfTable._DbfLists-class.html#__init__ dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable._Indexen.add dbf.old.tables.DbfTable._Indexen-class.html#add dbf.old.tables.DbfTable._Indexen.__iter__ dbf.old.tables.DbfTable._Indexen-class.html#__iter__ dbf.old.tables.DbfTable._Indexen.__len__ dbf.old.tables.DbfTable._Indexen-class.html#__len__ dbf.old.tables.DbfTable._Indexen.__init__ dbf.old.tables.DbfTable._Indexen-class.html#__init__ dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable._MetaData.mfd dbf.old.tables.DbfTable._MetaData-class.html#mfd dbf.old.tables.DbfTable._MetaData.newmemofile dbf.old.tables.DbfTable._MetaData-class.html#newmemofile dbf.old.tables.DbfTable._MetaData.ignorememos dbf.old.tables.DbfTable._MetaData-class.html#ignorememos dbf.old.tables.DbfTable._MetaData.filename dbf.old.tables.DbfTable._MetaData-class.html#filename dbf.old.tables.DbfTable._MetaData.current dbf.old.tables.DbfTable._MetaData-class.html#current dbf.old.tables.DbfTable._MetaData.memofields dbf.old.tables.DbfTable._MetaData-class.html#memofields dbf.old.tables.DbfTable._MetaData.dfd dbf.old.tables.DbfTable._MetaData-class.html#dfd dbf.old.tables.DbfTable._MetaData.blankrecord dbf.old.tables.DbfTable._MetaData-class.html#blankrecord dbf.old.tables.DbfTable._MetaData.memo dbf.old.tables.DbfTable._MetaData-class.html#memo dbf.old.tables.DbfTable._MetaData.memoname dbf.old.tables.DbfTable._MetaData-class.html#memoname dbf.old.tables.DbfTable._MetaData.fields dbf.old.tables.DbfTable._MetaData-class.html#fields dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables.DbfTable._Table.__getitem__ dbf.old.tables.DbfTable._Table-class.html#__getitem__ dbf.old.tables.DbfTable._Table.clear dbf.old.tables.DbfTable._Table-class.html#clear dbf.old.tables.DbfTable._Table.append dbf.old.tables.DbfTable._Table-class.html#append dbf.old.tables.DbfTable._Table.__init__ dbf.old.tables.DbfTable._Table-class.html#__init__ dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable._TableHeader.record_count dbf.old.tables.DbfTable._TableHeader-class.html#record_count dbf.old.tables.DbfTable._TableHeader.record_length dbf.old.tables.DbfTable._TableHeader-class.html#record_length dbf.old.tables.DbfTable._TableHeader.extra dbf.old.tables.DbfTable._TableHeader-class.html#extra dbf.old.tables.DbfTable._TableHeader.fields dbf.old.tables.DbfTable._TableHeader-class.html#fields dbf.old.tables.DbfTable._TableHeader.codepage dbf.old.tables.DbfTable._TableHeader-class.html#codepage dbf.old.tables.DbfTable._TableHeader.field_count dbf.old.tables.DbfTable._TableHeader-class.html#field_count dbf.old.tables.DbfTable._TableHeader.update dbf.old.tables.DbfTable._TableHeader-class.html#update dbf.old.tables.DbfTable._TableHeader.start dbf.old.tables.DbfTable._TableHeader-class.html#start dbf.old.tables.DbfTable._TableHeader.version dbf.old.tables.DbfTable._TableHeader-class.html#version dbf.old.tables.DbfTable._TableHeader.data dbf.old.tables.DbfTable._TableHeader-class.html#data dbf.old.tables.DbfTable._TableHeader.__init__ dbf.old.tables.DbfTable._TableHeader-class.html#__init__ dbf.old.tables.FpTable dbf.old.tables.FpTable-class.html dbf.old.tables.DbfTable.__str__ dbf.old.tables.DbfTable-class.html#__str__ dbf.old.tables.DbfTable.supported_tables dbf.old.tables.DbfTable-class.html#supported_tables dbf.old.tables.DbfTable._backed_up dbf.old.tables.DbfTable-class.html#_backed_up dbf.old.tables.FpTable._yesMemoMask dbf.old.tables.FpTable-class.html#_yesMemoMask dbf.old.tables.DbfTable.query dbf.old.tables.DbfTable-class.html#query dbf.old.tables.DbfTable.last_update dbf.old.tables.DbfTable-class.html#last_update dbf.old.tables.DbfTable._meta_only dbf.old.tables.DbfTable-class.html#_meta_only dbf.old.tables.DbfTable.__enter__ dbf.old.tables.DbfTable-class.html#__enter__ dbf.old.tables.DbfTable.__exit__ dbf.old.tables.DbfTable-class.html#__exit__ dbf.old.tables.DbfTable.__getitem__ dbf.old.tables.DbfTable-class.html#__getitem__ dbf.old.tables.FpTable._fieldtypes dbf.old.tables.FpTable-class.html#_fieldtypes dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables.FpTable._decimal_fields dbf.old.tables.FpTable-class.html#_decimal_fields dbf.old.tables.DbfTable.use_deleted dbf.old.tables.DbfTable-class.html#use_deleted dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.__nonzero__ dbf.old.tables.DbfTable-class.html#__nonzero__ dbf.old.tables.FpTable._character_fields dbf.old.tables.FpTable-class.html#_character_fields dbf.old.tables.FpTable._noMemoMask dbf.old.tables.FpTable-class.html#_noMemoMask dbf.old.tables.DbfTable.is_memotype dbf.old.tables.DbfTable-class.html#is_memotype dbf.old.tables.DbfTable._list_fields dbf.old.tables.DbfTable-class.html#_list_fields dbf.old.tables.FpTable._memoext dbf.old.tables.FpTable-class.html#_memoext dbf.old.tables.FpTable._dbfTableHeader dbf.old.tables.FpTable-class.html#_dbfTableHeader dbf.old.tables.DbfTable.export dbf.old.tables.DbfTable-class.html#export dbf.old.tables.DbfTable.close dbf.old.tables.DbfTable-class.html#close dbf.old.tables.DbfTable.append dbf.old.tables.DbfTable-class.html#append dbf.old.tables.DbfTable.rename_field dbf.old.tables.DbfTable-class.html#rename_field dbf.old.tables.FpTable._use_deleted dbf.old.tables.FpTable-class.html#_use_deleted dbf.old.tables.DbfTable.__contains__ dbf.old.tables.DbfTable-class.html#__contains__ dbf.old.tables.DbfTable.__getattr__ dbf.old.tables.DbfTable-class.html#__getattr__ dbf.old.tables.DbfTable.filename dbf.old.tables.DbfTable-class.html#filename dbf.old.tables.DbfTable.current dbf.old.tables.DbfTable-class.html#current dbf.old.tables.DbfTable.version dbf.old.tables.DbfTable-class.html#version dbf.old.tables.DbfTable.new dbf.old.tables.DbfTable-class.html#new dbf.old.tables.DbfTable._read_only dbf.old.tables.DbfTable-class.html#_read_only dbf.old.tables.DbfTable.goto dbf.old.tables.DbfTable-class.html#goto dbf.old.tables.DbfTable.field_count dbf.old.tables.DbfTable-class.html#field_count dbf.old.tables.FpTable._versionabbv dbf.old.tables.FpTable-class.html#_versionabbv dbf.old.tables.FpTable._memotypes dbf.old.tables.FpTable-class.html#_memotypes dbf.old.tables.FpTable._version dbf.old.tables.FpTable-class.html#_version dbf.old.tables.DbfTable.add_fields dbf.old.tables.DbfTable-class.html#add_fields dbf.old.tables.DbfTable.__repr__ dbf.old.tables.DbfTable-class.html#__repr__ dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable._buildHeaderFields dbf.old.tables.DbfTable-class.html#_buildHeaderFields dbf.old.tables.DbfTable.bof dbf.old.tables.DbfTable-class.html#bof dbf.old.tables.DbfTable.reindex dbf.old.tables.DbfTable-class.html#reindex dbf.old.tables.FpTable._numeric_fields dbf.old.tables.FpTable-class.html#_numeric_fields dbf.old.tables.DbfTable.codepage dbf.old.tables.DbfTable-class.html#codepage dbf.old.tables.DbfTable.delete_fields dbf.old.tables.DbfTable-class.html#delete_fields dbf.old.tables.FpTable._memoClass dbf.old.tables._VfpMemo-class.html dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable.open dbf.old.tables.DbfTable-class.html#open dbf.old.tables.DbfTable.zap dbf.old.tables.DbfTable-class.html#zap dbf.old.tables.DbfTable.size dbf.old.tables.DbfTable-class.html#size dbf.old.tables.DbfTable.top dbf.old.tables.DbfTable-class.html#top dbf.old.tables.FpTable._supported_tables dbf.old.tables.FpTable-class.html#_supported_tables dbf.old.tables.DbfTable.is_decimal dbf.old.tables.DbfTable-class.html#is_decimal dbf.old.tables.DbfTable.next dbf.old.tables.DbfTable-class.html#next dbf.old.tables.DbfTable._loadtable dbf.old.tables.DbfTable-class.html#_loadtable dbf.old.tables.DbfTable.prev dbf.old.tables.DbfTable-class.html#prev dbf.old.tables.DbfTable.create_backup dbf.old.tables.DbfTable-class.html#create_backup dbf.old.tables.DbfTable.__len__ dbf.old.tables.DbfTable-class.html#__len__ dbf.old.tables.FpTable._fixed_fields dbf.old.tables.FpTable-class.html#_fixed_fields dbf.old.tables.DbfTable.__iter__ dbf.old.tables.DbfTable-class.html#__iter__ dbf.old.tables.DbfTable.memoname dbf.old.tables.DbfTable-class.html#memoname dbf.old.tables.DbfTable.create_index dbf.old.tables.DbfTable-class.html#create_index dbf.old.tables.DbfTable.type dbf.old.tables.DbfTable-class.html#type dbf.old.tables.DbfTable._update_disk dbf.old.tables.DbfTable-class.html#_update_disk dbf.old.tables.DbfTable.pack dbf.old.tables.DbfTable-class.html#pack dbf.old.tables.DbfTable.get_record dbf.old.tables.DbfTable-class.html#get_record dbf.old.tables.FpTable._dbfTableHeaderExtra dbf.old.tables.FpTable-class.html#_dbfTableHeaderExtra dbf.old.tables.DbfTable.__init__ dbf.old.tables.DbfTable-class.html#__init__ dbf.old.tables.FpTable._initializeFields dbf.old.tables.FpTable-class.html#_initializeFields dbf.old.tables.FpTable._variable_fields dbf.old.tables.FpTable-class.html#_variable_fields dbf.old.tables.FpTable._checkMemoIntegrity dbf.old.tables.FpTable-class.html#_checkMemoIntegrity dbf.old.tables.DbfTable.record_length dbf.old.tables.DbfTable-class.html#record_length dbf.old.tables.DbfTable._fieldLayout dbf.old.tables.DbfTable-class.html#_fieldLayout dbf.old.tables.DbfTable.record_number dbf.old.tables.DbfTable-class.html#record_number dbf.old.tables.DbfTable.structure dbf.old.tables.DbfTable-class.html#structure dbf.old.tables.DbfTable.eof dbf.old.tables.DbfTable-class.html#eof dbf.old.tables.DbfTable.bottom dbf.old.tables.DbfTable-class.html#bottom dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable.field_names dbf.old.tables.DbfTable-class.html#field_names dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables.Index dbf.old.tables.Index-class.html dbf.old.tables.Index.reindex dbf.old.tables.Index-class.html#reindex dbf.old.tables.Index.IndexIterator dbf.old.tables.Index.IndexIterator-class.html dbf.old.tables.Index.close dbf.old.tables.Index-class.html#close dbf.old.tables.Index.query dbf.old.tables.Index-class.html#query dbf.old.tables.Index.find dbf.old.tables.Index-class.html#find dbf.old.tables.Index.__init__ dbf.old.tables.Index-class.html#__init__ dbf.old.tables.Index.index dbf.old.tables.Index-class.html#index dbf.old.tables.Index.__contains__ dbf.old.tables.Index-class.html#__contains__ dbf.old.tables.Index.__enter__ dbf.old.tables.Index-class.html#__enter__ dbf.old.tables.Index._partial_match dbf.old.tables.Index-class.html#_partial_match dbf.old.tables.Index.__call__ dbf.old.tables.Index-class.html#__call__ dbf.old.tables.Index.__len__ dbf.old.tables.Index-class.html#__len__ dbf.old.tables.Index._purge dbf.old.tables.Index-class.html#_purge dbf.old.tables.Index.__exit__ dbf.old.tables.Index-class.html#__exit__ dbf.old.tables.Index.__getitem__ dbf.old.tables.Index-class.html#__getitem__ dbf.old.tables.Index.__iter__ dbf.old.tables.Index-class.html#__iter__ dbf.old.tables.Index.search dbf.old.tables.Index-class.html#search dbf.old.tables.Index.clear dbf.old.tables.Index-class.html#clear dbf.old.tables.Index._search dbf.old.tables.Index-class.html#_search dbf.old.tables.Index.find_index dbf.old.tables.Index-class.html#find_index dbf.old.tables.Index.IndexIterator dbf.old.tables.Index.IndexIterator-class.html dbf.old.tables.Index.IndexIterator.next dbf.old.tables.Index.IndexIterator-class.html#next dbf.old.tables.Index.IndexIterator.__iter__ dbf.old.tables.Index.IndexIterator-class.html#__iter__ dbf.old.tables.Index.IndexIterator.__init__ dbf.old.tables.Index.IndexIterator-class.html#__init__ dbf.old.tables.List dbf.old.tables.List-class.html dbf.old.tables.List.key dbf.old.tables.List-class.html#key dbf.old.tables.List.pop dbf.old.tables.List-class.html#pop dbf.old.tables.List.__radd__ dbf.old.tables.List-class.html#__radd__ dbf.old.tables.List.__rsub__ dbf.old.tables.List-class.html#__rsub__ dbf.old.tables.List.next dbf.old.tables.List-class.html#next dbf.old.tables.List.__init__ dbf.old.tables.List-class.html#__init__ dbf.old.tables.List.index dbf.old.tables.List-class.html#index dbf.old.tables.List.extend dbf.old.tables.List-class.html#extend dbf.old.tables.List.__contains__ dbf.old.tables.List-class.html#__contains__ dbf.old.tables.List.top dbf.old.tables.List-class.html#top dbf.old.tables.List._desc dbf.old.tables.List-class.html#_desc dbf.old.tables.List._maybe_add dbf.old.tables.List-class.html#_maybe_add dbf.old.tables.List.current dbf.old.tables.List-class.html#current dbf.old.tables.List.append dbf.old.tables.List-class.html#append dbf.old.tables.List.prev dbf.old.tables.List-class.html#prev dbf.old.tables.List.__len__ dbf.old.tables.List-class.html#__len__ dbf.old.tables.List.sort dbf.old.tables.List-class.html#sort dbf.old.tables.List._purge dbf.old.tables.List-class.html#_purge dbf.old.tables.List.goto dbf.old.tables.List-class.html#goto dbf.old.tables.List.__getitem__ dbf.old.tables.List-class.html#__getitem__ dbf.old.tables.List.insert dbf.old.tables.List-class.html#insert dbf.old.tables.List.__iter__ dbf.old.tables.List-class.html#__iter__ dbf.old.tables.List.__add__ dbf.old.tables.List-class.html#__add__ dbf.old.tables.List._get_record dbf.old.tables.List-class.html#_get_record dbf.old.tables.List.__delitem__ dbf.old.tables.List-class.html#__delitem__ dbf.old.tables.List.__nonzero__ dbf.old.tables.List-class.html#__nonzero__ dbf.old.tables.List.reverse dbf.old.tables.List-class.html#reverse dbf.old.tables.List.clear dbf.old.tables.List-class.html#clear dbf.old.tables.List.bottom dbf.old.tables.List-class.html#bottom dbf.old.tables.List.remove dbf.old.tables.List-class.html#remove dbf.old.tables.List.__repr__ dbf.old.tables.List-class.html#__repr__ dbf.old.tables.List.__sub__ dbf.old.tables.List-class.html#__sub__ dbf.old.tables.VfpTable dbf.old.tables.VfpTable-class.html dbf.old.tables.DbfTable.__str__ dbf.old.tables.DbfTable-class.html#__str__ dbf.old.tables.DbfTable.supported_tables dbf.old.tables.DbfTable-class.html#supported_tables dbf.old.tables.DbfTable._backed_up dbf.old.tables.DbfTable-class.html#_backed_up dbf.old.tables.VfpTable._yesMemoMask dbf.old.tables.VfpTable-class.html#_yesMemoMask dbf.old.tables.DbfTable.query dbf.old.tables.DbfTable-class.html#query dbf.old.tables.DbfTable.last_update dbf.old.tables.DbfTable-class.html#last_update dbf.old.tables.DbfTable._meta_only dbf.old.tables.DbfTable-class.html#_meta_only dbf.old.tables.DbfTable.__enter__ dbf.old.tables.DbfTable-class.html#__enter__ dbf.old.tables.DbfTable.__exit__ dbf.old.tables.DbfTable-class.html#__exit__ dbf.old.tables.DbfTable.__getitem__ dbf.old.tables.DbfTable-class.html#__getitem__ dbf.old.tables.VfpTable._fieldtypes dbf.old.tables.VfpTable-class.html#_fieldtypes dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables.VfpTable._decimal_fields dbf.old.tables.VfpTable-class.html#_decimal_fields dbf.old.tables.DbfTable.use_deleted dbf.old.tables.DbfTable-class.html#use_deleted dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.__nonzero__ dbf.old.tables.DbfTable-class.html#__nonzero__ dbf.old.tables.VfpTable._character_fields dbf.old.tables.VfpTable-class.html#_character_fields dbf.old.tables.VfpTable._noMemoMask dbf.old.tables.VfpTable-class.html#_noMemoMask dbf.old.tables.DbfTable.is_memotype dbf.old.tables.DbfTable-class.html#is_memotype dbf.old.tables.DbfTable._list_fields dbf.old.tables.DbfTable-class.html#_list_fields dbf.old.tables.VfpTable._memoext dbf.old.tables.VfpTable-class.html#_memoext dbf.old.tables.VfpTable._dbfTableHeader dbf.old.tables.VfpTable-class.html#_dbfTableHeader dbf.old.tables.DbfTable.export dbf.old.tables.DbfTable-class.html#export dbf.old.tables.DbfTable.close dbf.old.tables.DbfTable-class.html#close dbf.old.tables.DbfTable.append dbf.old.tables.DbfTable-class.html#append dbf.old.tables.DbfTable.rename_field dbf.old.tables.DbfTable-class.html#rename_field dbf.old.tables.VfpTable._use_deleted dbf.old.tables.VfpTable-class.html#_use_deleted dbf.old.tables.DbfTable.__contains__ dbf.old.tables.DbfTable-class.html#__contains__ dbf.old.tables.DbfTable.__getattr__ dbf.old.tables.DbfTable-class.html#__getattr__ dbf.old.tables.DbfTable.filename dbf.old.tables.DbfTable-class.html#filename dbf.old.tables.DbfTable.current dbf.old.tables.DbfTable-class.html#current dbf.old.tables.DbfTable.version dbf.old.tables.DbfTable-class.html#version dbf.old.tables.DbfTable.new dbf.old.tables.DbfTable-class.html#new dbf.old.tables.DbfTable._read_only dbf.old.tables.DbfTable-class.html#_read_only dbf.old.tables.DbfTable.goto dbf.old.tables.DbfTable-class.html#goto dbf.old.tables.DbfTable.field_count dbf.old.tables.DbfTable-class.html#field_count dbf.old.tables.VfpTable._versionabbv dbf.old.tables.VfpTable-class.html#_versionabbv dbf.old.tables.VfpTable._memotypes dbf.old.tables.VfpTable-class.html#_memotypes dbf.old.tables.VfpTable._version dbf.old.tables.VfpTable-class.html#_version dbf.old.tables.DbfTable.add_fields dbf.old.tables.DbfTable-class.html#add_fields dbf.old.tables.DbfTable.__repr__ dbf.old.tables.DbfTable-class.html#__repr__ dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable._buildHeaderFields dbf.old.tables.DbfTable-class.html#_buildHeaderFields dbf.old.tables.DbfTable.bof dbf.old.tables.DbfTable-class.html#bof dbf.old.tables.DbfTable.reindex dbf.old.tables.DbfTable-class.html#reindex dbf.old.tables.VfpTable._numeric_fields dbf.old.tables.VfpTable-class.html#_numeric_fields dbf.old.tables.DbfTable.codepage dbf.old.tables.DbfTable-class.html#codepage dbf.old.tables.DbfTable.delete_fields dbf.old.tables.DbfTable-class.html#delete_fields dbf.old.tables.VfpTable._memoClass dbf.old.tables._VfpMemo-class.html dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable.open dbf.old.tables.DbfTable-class.html#open dbf.old.tables.DbfTable.zap dbf.old.tables.DbfTable-class.html#zap dbf.old.tables.DbfTable.size dbf.old.tables.DbfTable-class.html#size dbf.old.tables.DbfTable.top dbf.old.tables.DbfTable-class.html#top dbf.old.tables.VfpTable._supported_tables dbf.old.tables.VfpTable-class.html#_supported_tables dbf.old.tables.DbfTable.is_decimal dbf.old.tables.DbfTable-class.html#is_decimal dbf.old.tables.DbfTable.next dbf.old.tables.DbfTable-class.html#next dbf.old.tables.DbfTable._loadtable dbf.old.tables.DbfTable-class.html#_loadtable dbf.old.tables.DbfTable.prev dbf.old.tables.DbfTable-class.html#prev dbf.old.tables.DbfTable.create_backup dbf.old.tables.DbfTable-class.html#create_backup dbf.old.tables.DbfTable.__len__ dbf.old.tables.DbfTable-class.html#__len__ dbf.old.tables.VfpTable._fixed_fields dbf.old.tables.VfpTable-class.html#_fixed_fields dbf.old.tables.DbfTable.__iter__ dbf.old.tables.DbfTable-class.html#__iter__ dbf.old.tables.DbfTable.memoname dbf.old.tables.DbfTable-class.html#memoname dbf.old.tables.DbfTable.create_index dbf.old.tables.DbfTable-class.html#create_index dbf.old.tables.DbfTable.type dbf.old.tables.DbfTable-class.html#type dbf.old.tables.DbfTable._update_disk dbf.old.tables.DbfTable-class.html#_update_disk dbf.old.tables.DbfTable.pack dbf.old.tables.DbfTable-class.html#pack dbf.old.tables.DbfTable.get_record dbf.old.tables.DbfTable-class.html#get_record dbf.old.tables.VfpTable._dbfTableHeaderExtra dbf.old.tables.VfpTable-class.html#_dbfTableHeaderExtra dbf.old.tables.DbfTable.__init__ dbf.old.tables.DbfTable-class.html#__init__ dbf.old.tables.VfpTable._initializeFields dbf.old.tables.VfpTable-class.html#_initializeFields dbf.old.tables.VfpTable._variable_fields dbf.old.tables.VfpTable-class.html#_variable_fields dbf.old.tables.VfpTable._checkMemoIntegrity dbf.old.tables.VfpTable-class.html#_checkMemoIntegrity dbf.old.tables.DbfTable.record_length dbf.old.tables.DbfTable-class.html#record_length dbf.old.tables.DbfTable._fieldLayout dbf.old.tables.DbfTable-class.html#_fieldLayout dbf.old.tables.DbfTable.record_number dbf.old.tables.DbfTable-class.html#record_number dbf.old.tables.DbfTable.structure dbf.old.tables.DbfTable-class.html#structure dbf.old.tables.DbfTable.eof dbf.old.tables.DbfTable-class.html#eof dbf.old.tables.DbfTable.bottom dbf.old.tables.DbfTable-class.html#bottom dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable.field_names dbf.old.tables.DbfTable-class.html#field_names dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables._Db3Memo dbf.old.tables._Db3Memo-class.html dbf.old.tables._Db3Memo._get_memo dbf.old.tables._Db3Memo-class.html#_get_memo dbf.old.tables._Db3Memo._init dbf.old.tables._Db3Memo-class.html#_init dbf.old.tables._DbfMemo.put_memo dbf.old.tables._DbfMemo-class.html#put_memo dbf.old.tables._Db3Memo._put_memo dbf.old.tables._Db3Memo-class.html#_put_memo dbf.old.tables._DbfMemo.get_memo dbf.old.tables._DbfMemo-class.html#get_memo dbf.old.tables._DbfMemo.__init__ dbf.old.tables._DbfMemo-class.html#__init__ dbf.old.tables._Db4Table dbf.old.tables._Db4Table-class.html dbf.old.tables.DbfTable.__str__ dbf.old.tables.DbfTable-class.html#__str__ dbf.old.tables.DbfTable.supported_tables dbf.old.tables.DbfTable-class.html#supported_tables dbf.old.tables.DbfTable._backed_up dbf.old.tables.DbfTable-class.html#_backed_up dbf.old.tables._Db4Table._yesMemoMask dbf.old.tables._Db4Table-class.html#_yesMemoMask dbf.old.tables.DbfTable.query dbf.old.tables.DbfTable-class.html#query dbf.old.tables.DbfTable.last_update dbf.old.tables.DbfTable-class.html#last_update dbf.old.tables.DbfTable._meta_only dbf.old.tables.DbfTable-class.html#_meta_only dbf.old.tables.DbfTable.__enter__ dbf.old.tables.DbfTable-class.html#__enter__ dbf.old.tables.DbfTable.__exit__ dbf.old.tables.DbfTable-class.html#__exit__ dbf.old.tables.DbfTable.__getitem__ dbf.old.tables.DbfTable-class.html#__getitem__ dbf.old.tables._Db4Table._fieldtypes dbf.old.tables._Db4Table-class.html#_fieldtypes dbf.old.tables.DbfTable._DbfLists dbf.old.tables.DbfTable._DbfLists-class.html dbf.old.tables._Db4Table._decimal_fields dbf.old.tables._Db4Table-class.html#_decimal_fields dbf.old.tables.DbfTable.use_deleted dbf.old.tables.DbfTable-class.html#use_deleted dbf.old.tables.DbfTable.DbfIterator dbf.old.tables.DbfTable.DbfIterator-class.html dbf.old.tables.DbfTable.__nonzero__ dbf.old.tables.DbfTable-class.html#__nonzero__ dbf.old.tables._Db4Table._character_fields dbf.old.tables._Db4Table-class.html#_character_fields dbf.old.tables._Db4Table._noMemoMask dbf.old.tables._Db4Table-class.html#_noMemoMask dbf.old.tables.DbfTable.is_memotype dbf.old.tables.DbfTable-class.html#is_memotype dbf.old.tables.DbfTable._list_fields dbf.old.tables.DbfTable-class.html#_list_fields dbf.old.tables._Db4Table._memoext dbf.old.tables._Db4Table-class.html#_memoext dbf.old.tables._Db4Table._dbfTableHeader dbf.old.tables._Db4Table-class.html#_dbfTableHeader dbf.old.tables.DbfTable.export dbf.old.tables.DbfTable-class.html#export dbf.old.tables.DbfTable.close dbf.old.tables.DbfTable-class.html#close dbf.old.tables.DbfTable.append dbf.old.tables.DbfTable-class.html#append dbf.old.tables.DbfTable.rename_field dbf.old.tables.DbfTable-class.html#rename_field dbf.old.tables._Db4Table._use_deleted dbf.old.tables._Db4Table-class.html#_use_deleted dbf.old.tables.DbfTable.__contains__ dbf.old.tables.DbfTable-class.html#__contains__ dbf.old.tables.DbfTable.__getattr__ dbf.old.tables.DbfTable-class.html#__getattr__ dbf.old.tables.DbfTable.filename dbf.old.tables.DbfTable-class.html#filename dbf.old.tables.DbfTable.current dbf.old.tables.DbfTable-class.html#current dbf.old.tables._Db4Table.version dbf.old.tables._Db4Table-class.html#version dbf.old.tables.DbfTable.new dbf.old.tables.DbfTable-class.html#new dbf.old.tables.DbfTable._read_only dbf.old.tables.DbfTable-class.html#_read_only dbf.old.tables.DbfTable.goto dbf.old.tables.DbfTable-class.html#goto dbf.old.tables.DbfTable.field_count dbf.old.tables.DbfTable-class.html#field_count dbf.old.tables._Db4Table._versionabbv dbf.old.tables._Db4Table-class.html#_versionabbv dbf.old.tables._Db4Table._memotypes dbf.old.tables._Db4Table-class.html#_memotypes dbf.old.tables.DbfTable._version dbf.old.tables.DbfTable-class.html#_version dbf.old.tables.DbfTable.add_fields dbf.old.tables.DbfTable-class.html#add_fields dbf.old.tables.DbfTable.__repr__ dbf.old.tables.DbfTable-class.html#__repr__ dbf.old.tables.DbfTable._Indexen dbf.old.tables.DbfTable._Indexen-class.html dbf.old.tables.DbfTable._buildHeaderFields dbf.old.tables.DbfTable-class.html#_buildHeaderFields dbf.old.tables.DbfTable.bof dbf.old.tables.DbfTable-class.html#bof dbf.old.tables.DbfTable.reindex dbf.old.tables.DbfTable-class.html#reindex dbf.old.tables._Db4Table._numeric_fields dbf.old.tables._Db4Table-class.html#_numeric_fields dbf.old.tables.DbfTable.codepage dbf.old.tables.DbfTable-class.html#codepage dbf.old.tables.DbfTable.delete_fields dbf.old.tables.DbfTable-class.html#delete_fields dbf.old.tables._Db4Table._memoClass dbf.old.tables._VfpMemo-class.html dbf.old.tables.DbfTable._TableHeader dbf.old.tables.DbfTable._TableHeader-class.html dbf.old.tables.DbfTable.open dbf.old.tables.DbfTable-class.html#open dbf.old.tables.DbfTable.zap dbf.old.tables.DbfTable-class.html#zap dbf.old.tables.DbfTable.size dbf.old.tables.DbfTable-class.html#size dbf.old.tables.DbfTable.top dbf.old.tables.DbfTable-class.html#top dbf.old.tables._Db4Table._supported_tables dbf.old.tables._Db4Table-class.html#_supported_tables dbf.old.tables.DbfTable.is_decimal dbf.old.tables.DbfTable-class.html#is_decimal dbf.old.tables.DbfTable.next dbf.old.tables.DbfTable-class.html#next dbf.old.tables.DbfTable._loadtable dbf.old.tables.DbfTable-class.html#_loadtable dbf.old.tables.DbfTable.prev dbf.old.tables.DbfTable-class.html#prev dbf.old.tables.DbfTable.create_backup dbf.old.tables.DbfTable-class.html#create_backup dbf.old.tables.DbfTable.__len__ dbf.old.tables.DbfTable-class.html#__len__ dbf.old.tables._Db4Table._fixed_fields dbf.old.tables._Db4Table-class.html#_fixed_fields dbf.old.tables.DbfTable.__iter__ dbf.old.tables.DbfTable-class.html#__iter__ dbf.old.tables.DbfTable.memoname dbf.old.tables.DbfTable-class.html#memoname dbf.old.tables.DbfTable.create_index dbf.old.tables.DbfTable-class.html#create_index dbf.old.tables.DbfTable.type dbf.old.tables.DbfTable-class.html#type dbf.old.tables.DbfTable._update_disk dbf.old.tables.DbfTable-class.html#_update_disk dbf.old.tables.DbfTable.pack dbf.old.tables.DbfTable-class.html#pack dbf.old.tables.DbfTable.get_record dbf.old.tables.DbfTable-class.html#get_record dbf.old.tables._Db4Table._dbfTableHeaderExtra dbf.old.tables._Db4Table-class.html#_dbfTableHeaderExtra dbf.old.tables.DbfTable.__init__ dbf.old.tables.DbfTable-class.html#__init__ dbf.old.tables.DbfTable._initializeFields dbf.old.tables.DbfTable-class.html#_initializeFields dbf.old.tables._Db4Table._variable_fields dbf.old.tables._Db4Table-class.html#_variable_fields dbf.old.tables._Db4Table._checkMemoIntegrity dbf.old.tables._Db4Table-class.html#_checkMemoIntegrity dbf.old.tables.DbfTable.record_length dbf.old.tables.DbfTable-class.html#record_length dbf.old.tables.DbfTable._fieldLayout dbf.old.tables.DbfTable-class.html#_fieldLayout dbf.old.tables.DbfTable.record_number dbf.old.tables.DbfTable-class.html#record_number dbf.old.tables.DbfTable.structure dbf.old.tables.DbfTable-class.html#structure dbf.old.tables.DbfTable.eof dbf.old.tables.DbfTable-class.html#eof dbf.old.tables.DbfTable.bottom dbf.old.tables.DbfTable-class.html#bottom dbf.old.tables.DbfTable._MetaData dbf.old.tables.DbfTable._MetaData-class.html dbf.old.tables.DbfTable.field_names dbf.old.tables.DbfTable-class.html#field_names dbf.old.tables.DbfTable._Table dbf.old.tables.DbfTable._Table-class.html dbf.old.tables._DbfMemo dbf.old.tables._DbfMemo-class.html dbf.old.tables._DbfMemo._get_memo dbf.old.tables._DbfMemo-class.html#_get_memo dbf.old.tables._DbfMemo._init dbf.old.tables._DbfMemo-class.html#_init dbf.old.tables._DbfMemo.put_memo dbf.old.tables._DbfMemo-class.html#put_memo dbf.old.tables._DbfMemo.get_memo dbf.old.tables._DbfMemo-class.html#get_memo dbf.old.tables._DbfMemo.__init__ dbf.old.tables._DbfMemo-class.html#__init__ dbf.old.tables._DbfMemo._put_memo dbf.old.tables._DbfMemo-class.html#_put_memo dbf.old.tables._DbfRecord dbf.old.tables._DbfRecord-class.html dbf.old.tables._DbfRecord.reindex dbf.old.tables._DbfRecord-class.html#reindex dbf.old.tables._DbfRecord.reset_record dbf.old.tables._DbfRecord-class.html#reset_record dbf.old.tables._DbfRecord._dirty dbf.old.tables._DbfRecord-class.html#_dirty dbf.old.tables._DbfRecord.__str__ dbf.old.tables._DbfRecord-class.html#__str__ dbf.old.tables._DbfRecord._retrieveFieldValue dbf.old.tables._DbfRecord-class.html#_retrieveFieldValue dbf.old.tables._DbfRecord._createBlankRecord dbf.old.tables._DbfRecord-class.html#_createBlankRecord dbf.old.tables._DbfRecord.undelete_record dbf.old.tables._DbfRecord-class.html#undelete_record dbf.old.tables._DbfRecord.__setattr__ dbf.old.tables._DbfRecord-class.html#__setattr__ dbf.old.tables._DbfRecord.__new__ dbf.old.tables._DbfRecord-class.html#__new__ dbf.old.tables._DbfRecord.__contains__ dbf.old.tables._DbfRecord-class.html#__contains__ dbf.old.tables._DbfRecord.__getattr__ dbf.old.tables._DbfRecord-class.html#__getattr__ dbf.old.tables._DbfRecord.write dbf.old.tables._DbfRecord-class.html#write dbf.old.tables._DbfRecord._recnum dbf.old.tables._DbfRecord-class.html#_recnum dbf.old.tables._DbfRecord.__call__ dbf.old.tables._DbfRecord-class.html#__call__ dbf.old.tables._DbfRecord._updateFieldValue dbf.old.tables._DbfRecord-class.html#_updateFieldValue dbf.old.tables._DbfRecord.record_table dbf.old.tables._DbfRecord-class.html#record_table dbf.old.tables._DbfRecord.delete_record dbf.old.tables._DbfRecord-class.html#delete_record dbf.old.tables._DbfRecord.__repr__ dbf.old.tables._DbfRecord-class.html#__repr__ dbf.old.tables._DbfRecord.__getitem__ dbf.old.tables._DbfRecord-class.html#__getitem__ dbf.old.tables._DbfRecord.read dbf.old.tables._DbfRecord-class.html#read dbf.old.tables._DbfRecord._layout dbf.old.tables._DbfRecord-class.html#_layout dbf.old.tables._DbfRecord.scatter_fields dbf.old.tables._DbfRecord-class.html#scatter_fields dbf.old.tables._DbfRecord.__setitem__ dbf.old.tables._DbfRecord-class.html#__setitem__ dbf.old.tables._DbfRecord.record_number dbf.old.tables._DbfRecord-class.html#record_number dbf.old.tables._DbfRecord._data dbf.old.tables._DbfRecord-class.html#_data dbf.old.tables._DbfRecord.gather_fields dbf.old.tables._DbfRecord-class.html#gather_fields dbf.old.tables._DbfRecord.has_been_deleted dbf.old.tables._DbfRecord-class.html#has_been_deleted dbf.old.tables._DbfRecord.__iter__ dbf.old.tables._DbfRecord-class.html#__iter__ dbf.old.tables._DbfRecord.__len__ dbf.old.tables._DbfRecord-class.html#__len__ dbf.old.tables._DbfRecord.field_names dbf.old.tables._DbfRecord-class.html#field_names dbf.old.tables._DbfRecord._update_disk dbf.old.tables._DbfRecord-class.html#_update_disk dbf.old.tables._VfpMemo dbf.old.tables._VfpMemo-class.html dbf.old.tables._VfpMemo._get_memo dbf.old.tables._VfpMemo-class.html#_get_memo dbf.old.tables._VfpMemo._init dbf.old.tables._VfpMemo-class.html#_init dbf.old.tables._DbfMemo.put_memo dbf.old.tables._DbfMemo-class.html#put_memo dbf.old.tables._VfpMemo._put_memo dbf.old.tables._VfpMemo-class.html#_put_memo dbf.old.tables._DbfMemo.get_memo dbf.old.tables._DbfMemo-class.html#get_memo dbf.old.tables._DbfMemo.__init__ dbf.old.tables._DbfMemo-class.html#__init__ dbf.old.tables.property dbf.old.tables.property-class.html dbf.old.tables.property.setter dbf.old.tables.property-class.html#setter dbf.old.tables.property.__set__ dbf.old.tables.property-class.html#__set__ dbf.old.tables.property.__init__ dbf.old.tables.property-class.html#__init__ dbf.old.tables.property.__call__ dbf.old.tables.property-class.html#__call__ dbf.old.tables.property.__get__ dbf.old.tables.property-class.html#__get__ dbf.old.tables.property.deleter dbf.old.tables.property-class.html#deleter dbf.old.tables.property.__delete__ dbf.old.tables.property-class.html#__delete__ dbf.tables.Db3Table dbf.tables.Db3Table-class.html dbf.tables.DbfTable.__str__ dbf.tables.DbfTable-class.html#__str__ dbf.tables.DbfTable.supported_tables dbf.tables.DbfTable-class.html#supported_tables dbf.tables.Db3Table._yesMemoMask dbf.tables.Db3Table-class.html#_yesMemoMask dbf.tables.DbfTable.query dbf.tables.DbfTable-class.html#query dbf.tables.DbfTable.last_update dbf.tables.DbfTable-class.html#last_update dbf.tables.Db3Table._meta_only dbf.tables.Db3Table-class.html#_meta_only dbf.tables.DbfTable.__enter__ dbf.tables.DbfTable-class.html#__enter__ dbf.tables.DbfTable.__exit__ dbf.tables.DbfTable-class.html#__exit__ dbf.tables.DbfTable.__getitem__ dbf.tables.DbfTable-class.html#__getitem__ dbf.tables.Db3Table._fieldtypes dbf.tables.Db3Table-class.html#_fieldtypes dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables.Db3Table._decimal_fields dbf.tables.Db3Table-class.html#_decimal_fields dbf.tables.DbfTable.use_deleted dbf.tables.DbfTable-class.html#use_deleted dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.__nonzero__ dbf.tables.DbfTable-class.html#__nonzero__ dbf.tables.Db3Table._character_fields dbf.tables.Db3Table-class.html#_character_fields dbf.tables.DbfTable._list_fields dbf.tables.DbfTable-class.html#_list_fields dbf.tables.DbfTable.is_memotype dbf.tables.DbfTable-class.html#is_memotype dbf.tables.Db3Table._memoext dbf.tables.Db3Table-class.html#_memoext dbf.tables.Db3Table._dbfTableHeader dbf.tables.Db3Table-class.html#_dbfTableHeader dbf.tables.DbfTable.export dbf.tables.DbfTable-class.html#export dbf.tables.DbfTable.close dbf.tables.DbfTable-class.html#close dbf.tables.DbfTable.append dbf.tables.DbfTable-class.html#append dbf.tables.DbfTable.rename_field dbf.tables.DbfTable-class.html#rename_field dbf.tables.Db3Table._use_deleted dbf.tables.Db3Table-class.html#_use_deleted dbf.tables.DbfTable.__contains__ dbf.tables.DbfTable-class.html#__contains__ dbf.tables.DbfTable.__getattr__ dbf.tables.DbfTable-class.html#__getattr__ dbf.tables.DbfTable.filename dbf.tables.DbfTable-class.html#filename dbf.tables.DbfTable.current dbf.tables.DbfTable-class.html#current dbf.tables.DbfTable.version dbf.tables.DbfTable-class.html#version dbf.tables.DbfTable.new dbf.tables.DbfTable-class.html#new dbf.tables.Db3Table._read_only dbf.tables.Db3Table-class.html#_read_only dbf.tables.DbfTable.goto dbf.tables.DbfTable-class.html#goto dbf.tables.DbfTable.field_count dbf.tables.DbfTable-class.html#field_count dbf.tables.Db3Table._versionabbv dbf.tables.Db3Table-class.html#_versionabbv dbf.tables.DbfTable._loadtable dbf.tables.DbfTable-class.html#_loadtable dbf.tables.Db3Table._memotypes dbf.tables.Db3Table-class.html#_memotypes dbf.tables.Db3Table._version dbf.tables.Db3Table-class.html#_version dbf.tables.DbfTable.add_fields dbf.tables.DbfTable-class.html#add_fields dbf.tables.DbfTable.__repr__ dbf.tables.DbfTable-class.html#__repr__ dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable.pack dbf.tables.DbfTable-class.html#pack dbf.tables.DbfTable.backup dbf.tables.DbfTable-class.html#backup dbf.tables.DbfTable.bof dbf.tables.DbfTable-class.html#bof dbf.tables.DbfTable.reindex dbf.tables.DbfTable-class.html#reindex dbf.tables.Db3Table._numeric_fields dbf.tables.Db3Table-class.html#_numeric_fields dbf.tables.DbfTable.codepage dbf.tables.DbfTable-class.html#codepage dbf.tables.DbfTable.delete_fields dbf.tables.DbfTable-class.html#delete_fields dbf.tables.Db3Table._memoClass dbf.tables._Db3Memo-class.html dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable.open dbf.tables.DbfTable-class.html#open dbf.tables.DbfTable.zap dbf.tables.DbfTable-class.html#zap dbf.tables.DbfTable.size dbf.tables.DbfTable-class.html#size dbf.tables.DbfTable.top dbf.tables.DbfTable-class.html#top dbf.tables.Db3Table._supported_tables dbf.tables.Db3Table-class.html#_supported_tables dbf.tables.DbfTable.is_decimal dbf.tables.DbfTable-class.html#is_decimal dbf.tables.DbfTable.next dbf.tables.DbfTable-class.html#next dbf.tables.Db3Table._noMemoMask dbf.tables.Db3Table-class.html#_noMemoMask dbf.tables.DbfTable.prev dbf.tables.DbfTable-class.html#prev dbf.tables.DbfTable.create_backup dbf.tables.DbfTable-class.html#create_backup dbf.tables.DbfTable.__len__ dbf.tables.DbfTable-class.html#__len__ dbf.tables.Db3Table._fixed_fields dbf.tables.Db3Table-class.html#_fixed_fields dbf.tables.DbfTable.__iter__ dbf.tables.DbfTable-class.html#__iter__ dbf.tables.DbfTable.memoname dbf.tables.DbfTable-class.html#memoname dbf.tables.DbfTable.create_index dbf.tables.DbfTable-class.html#create_index dbf.tables.DbfTable.type dbf.tables.DbfTable-class.html#type dbf.tables.DbfTable._update_disk dbf.tables.DbfTable-class.html#_update_disk dbf.tables.DbfTable._buildHeaderFields dbf.tables.DbfTable-class.html#_buildHeaderFields dbf.tables.DbfTable.get_record dbf.tables.DbfTable-class.html#get_record dbf.tables.Db3Table._dbfTableHeaderExtra dbf.tables.Db3Table-class.html#_dbfTableHeaderExtra dbf.tables.DbfTable.__init__ dbf.tables.DbfTable-class.html#__init__ dbf.tables.Db3Table._initializeFields dbf.tables.Db3Table-class.html#_initializeFields dbf.tables.Db3Table._variable_fields dbf.tables.Db3Table-class.html#_variable_fields dbf.tables.Db3Table._checkMemoIntegrity dbf.tables.Db3Table-class.html#_checkMemoIntegrity dbf.tables.DbfTable.record_length dbf.tables.DbfTable-class.html#record_length dbf.tables.DbfTable._fieldLayout dbf.tables.DbfTable-class.html#_fieldLayout dbf.tables.DbfTable.record_number dbf.tables.DbfTable-class.html#record_number dbf.tables.DbfTable.structure dbf.tables.DbfTable-class.html#structure dbf.tables.DbfTable.eof dbf.tables.DbfTable-class.html#eof dbf.tables.DbfTable.bottom dbf.tables.DbfTable-class.html#bottom dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable.field_names dbf.tables.DbfTable-class.html#field_names dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables.DbfCsv dbf.tables.DbfCsv-class.html dbf.tables.DbfCsv.lineterminator dbf.tables.DbfCsv-class.html#lineterminator dbf.tables.DbfCsv.skipinitialspace dbf.tables.DbfCsv-class.html#skipinitialspace dbf.tables.DbfCsv.quoting dbf.tables.DbfCsv-class.html#quoting dbf.tables.DbfCsv.delimiter dbf.tables.DbfCsv-class.html#delimiter dbf.tables.DbfCsv.escapechar dbf.tables.DbfCsv-class.html#escapechar dbf.tables.DbfCsv.quotechar dbf.tables.DbfCsv-class.html#quotechar dbf.tables.DbfCsv.doublequote dbf.tables.DbfCsv-class.html#doublequote dbf.tables.DbfTable dbf.tables.DbfTable-class.html dbf.tables.DbfTable.__str__ dbf.tables.DbfTable-class.html#__str__ dbf.tables.DbfTable.supported_tables dbf.tables.DbfTable-class.html#supported_tables dbf.tables.DbfTable._yesMemoMask dbf.tables.DbfTable-class.html#_yesMemoMask dbf.tables.DbfTable.query dbf.tables.DbfTable-class.html#query dbf.tables.DbfTable.last_update dbf.tables.DbfTable-class.html#last_update dbf.tables.DbfTable._meta_only dbf.tables.DbfTable-class.html#_meta_only dbf.tables.DbfTable.__enter__ dbf.tables.DbfTable-class.html#__enter__ dbf.tables.DbfTable.__exit__ dbf.tables.DbfTable-class.html#__exit__ dbf.tables.DbfTable.__getitem__ dbf.tables.DbfTable-class.html#__getitem__ dbf.tables.DbfTable._fieldtypes dbf.tables.DbfTable-class.html#_fieldtypes dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables.DbfTable._decimal_fields dbf.tables.DbfTable-class.html#_decimal_fields dbf.tables.DbfTable.use_deleted dbf.tables.DbfTable-class.html#use_deleted dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.__nonzero__ dbf.tables.DbfTable-class.html#__nonzero__ dbf.tables.DbfTable._character_fields dbf.tables.DbfTable-class.html#_character_fields dbf.tables.DbfTable._noMemoMask dbf.tables.DbfTable-class.html#_noMemoMask dbf.tables.DbfTable.is_memotype dbf.tables.DbfTable-class.html#is_memotype dbf.tables.DbfTable._list_fields dbf.tables.DbfTable-class.html#_list_fields dbf.tables.DbfTable._memoext dbf.tables.DbfTable-class.html#_memoext dbf.tables.DbfTable._dbfTableHeader dbf.tables.DbfTable-class.html#_dbfTableHeader dbf.tables.DbfTable.export dbf.tables.DbfTable-class.html#export dbf.tables.DbfTable.close dbf.tables.DbfTable-class.html#close dbf.tables.DbfTable.append dbf.tables.DbfTable-class.html#append dbf.tables.DbfTable.rename_field dbf.tables.DbfTable-class.html#rename_field dbf.tables.DbfTable._use_deleted dbf.tables.DbfTable-class.html#_use_deleted dbf.tables.DbfTable.__contains__ dbf.tables.DbfTable-class.html#__contains__ dbf.tables.DbfTable.__getattr__ dbf.tables.DbfTable-class.html#__getattr__ dbf.tables.DbfTable.filename dbf.tables.DbfTable-class.html#filename dbf.tables.DbfTable.current dbf.tables.DbfTable-class.html#current dbf.tables.DbfTable.version dbf.tables.DbfTable-class.html#version dbf.tables.DbfTable.new dbf.tables.DbfTable-class.html#new dbf.tables.DbfTable._read_only dbf.tables.DbfTable-class.html#_read_only dbf.tables.DbfTable.goto dbf.tables.DbfTable-class.html#goto dbf.tables.DbfTable.field_count dbf.tables.DbfTable-class.html#field_count dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable._memotypes dbf.tables.DbfTable-class.html#_memotypes dbf.tables.DbfTable._version dbf.tables.DbfTable-class.html#_version dbf.tables.DbfTable.add_fields dbf.tables.DbfTable-class.html#add_fields dbf.tables.DbfTable.__repr__ dbf.tables.DbfTable-class.html#__repr__ dbf.tables.DbfTable._versionabbv dbf.tables.DbfTable-class.html#_versionabbv dbf.tables.DbfTable._buildHeaderFields dbf.tables.DbfTable-class.html#_buildHeaderFields dbf.tables.DbfTable.backup dbf.tables.DbfTable-class.html#backup dbf.tables.DbfTable.bof dbf.tables.DbfTable-class.html#bof dbf.tables.DbfTable.reindex dbf.tables.DbfTable-class.html#reindex dbf.tables.DbfTable._numeric_fields dbf.tables.DbfTable-class.html#_numeric_fields dbf.tables.DbfTable.codepage dbf.tables.DbfTable-class.html#codepage dbf.tables.DbfTable.delete_fields dbf.tables.DbfTable-class.html#delete_fields dbf.tables.DbfTable._memoClass dbf.tables._DbfMemo-class.html dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable.open dbf.tables.DbfTable-class.html#open dbf.tables.DbfTable.zap dbf.tables.DbfTable-class.html#zap dbf.tables.DbfTable.size dbf.tables.DbfTable-class.html#size dbf.tables.DbfTable.top dbf.tables.DbfTable-class.html#top dbf.tables.DbfTable._supported_tables dbf.tables.DbfTable-class.html#_supported_tables dbf.tables.DbfTable.is_decimal dbf.tables.DbfTable-class.html#is_decimal dbf.tables.DbfTable.next dbf.tables.DbfTable-class.html#next dbf.tables.DbfTable._loadtable dbf.tables.DbfTable-class.html#_loadtable dbf.tables.DbfTable.prev dbf.tables.DbfTable-class.html#prev dbf.tables.DbfTable.type dbf.tables.DbfTable-class.html#type dbf.tables.DbfTable.__len__ dbf.tables.DbfTable-class.html#__len__ dbf.tables.DbfTable._fixed_fields dbf.tables.DbfTable-class.html#_fixed_fields dbf.tables.DbfTable.__iter__ dbf.tables.DbfTable-class.html#__iter__ dbf.tables.DbfTable.memoname dbf.tables.DbfTable-class.html#memoname dbf.tables.DbfTable.create_index dbf.tables.DbfTable-class.html#create_index dbf.tables.DbfTable.create_backup dbf.tables.DbfTable-class.html#create_backup dbf.tables.DbfTable._update_disk dbf.tables.DbfTable-class.html#_update_disk dbf.tables.DbfTable.pack dbf.tables.DbfTable-class.html#pack dbf.tables.DbfTable.get_record dbf.tables.DbfTable-class.html#get_record dbf.tables.DbfTable._dbfTableHeaderExtra dbf.tables.DbfTable-class.html#_dbfTableHeaderExtra dbf.tables.DbfTable.__init__ dbf.tables.DbfTable-class.html#__init__ dbf.tables.DbfTable._initializeFields dbf.tables.DbfTable-class.html#_initializeFields dbf.tables.DbfTable._variable_fields dbf.tables.DbfTable-class.html#_variable_fields dbf.tables.DbfTable._checkMemoIntegrity dbf.tables.DbfTable-class.html#_checkMemoIntegrity dbf.tables.DbfTable.record_length dbf.tables.DbfTable-class.html#record_length dbf.tables.DbfTable._fieldLayout dbf.tables.DbfTable-class.html#_fieldLayout dbf.tables.DbfTable.record_number dbf.tables.DbfTable-class.html#record_number dbf.tables.DbfTable.structure dbf.tables.DbfTable-class.html#structure dbf.tables.DbfTable.eof dbf.tables.DbfTable-class.html#eof dbf.tables.DbfTable.bottom dbf.tables.DbfTable-class.html#bottom dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable.field_names dbf.tables.DbfTable-class.html#field_names dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.DbfIterator.next dbf.tables.DbfTable.DbfIterator-class.html#next dbf.tables.DbfTable.DbfIterator.__iter__ dbf.tables.DbfTable.DbfIterator-class.html#__iter__ dbf.tables.DbfTable.DbfIterator.__init__ dbf.tables.DbfTable.DbfIterator-class.html#__init__ dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables.DbfTable._DbfLists.add dbf.tables.DbfTable._DbfLists-class.html#add dbf.tables.DbfTable._DbfLists.__iter__ dbf.tables.DbfTable._DbfLists-class.html#__iter__ dbf.tables.DbfTable._DbfLists.__len__ dbf.tables.DbfTable._DbfLists-class.html#__len__ dbf.tables.DbfTable._DbfLists.__init__ dbf.tables.DbfTable._DbfLists-class.html#__init__ dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable._Indexen.add dbf.tables.DbfTable._Indexen-class.html#add dbf.tables.DbfTable._Indexen.__iter__ dbf.tables.DbfTable._Indexen-class.html#__iter__ dbf.tables.DbfTable._Indexen.__len__ dbf.tables.DbfTable._Indexen-class.html#__len__ dbf.tables.DbfTable._Indexen.__init__ dbf.tables.DbfTable._Indexen-class.html#__init__ dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable._MetaData.mfd dbf.tables.DbfTable._MetaData-class.html#mfd dbf.tables.DbfTable._MetaData.newmemofile dbf.tables.DbfTable._MetaData-class.html#newmemofile dbf.tables.DbfTable._MetaData.ignorememos dbf.tables.DbfTable._MetaData-class.html#ignorememos dbf.tables.DbfTable._MetaData.filename dbf.tables.DbfTable._MetaData-class.html#filename dbf.tables.DbfTable._MetaData.current dbf.tables.DbfTable._MetaData-class.html#current dbf.tables.DbfTable._MetaData.memofields dbf.tables.DbfTable._MetaData-class.html#memofields dbf.tables.DbfTable._MetaData.dfd dbf.tables.DbfTable._MetaData-class.html#dfd dbf.tables.DbfTable._MetaData.blankrecord dbf.tables.DbfTable._MetaData-class.html#blankrecord dbf.tables.DbfTable._MetaData.memo dbf.tables.DbfTable._MetaData-class.html#memo dbf.tables.DbfTable._MetaData.memoname dbf.tables.DbfTable._MetaData-class.html#memoname dbf.tables.DbfTable._MetaData.fields dbf.tables.DbfTable._MetaData-class.html#fields dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables.DbfTable._Table.__getitem__ dbf.tables.DbfTable._Table-class.html#__getitem__ dbf.tables.DbfTable._Table.clear dbf.tables.DbfTable._Table-class.html#clear dbf.tables.DbfTable._Table.append dbf.tables.DbfTable._Table-class.html#append dbf.tables.DbfTable._Table.__init__ dbf.tables.DbfTable._Table-class.html#__init__ dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable._TableHeader.record_count dbf.tables.DbfTable._TableHeader-class.html#record_count dbf.tables.DbfTable._TableHeader.record_length dbf.tables.DbfTable._TableHeader-class.html#record_length dbf.tables.DbfTable._TableHeader.extra dbf.tables.DbfTable._TableHeader-class.html#extra dbf.tables.DbfTable._TableHeader.fields dbf.tables.DbfTable._TableHeader-class.html#fields dbf.tables.DbfTable._TableHeader.codepage dbf.tables.DbfTable._TableHeader-class.html#codepage dbf.tables.DbfTable._TableHeader.field_count dbf.tables.DbfTable._TableHeader-class.html#field_count dbf.tables.DbfTable._TableHeader.update dbf.tables.DbfTable._TableHeader-class.html#update dbf.tables.DbfTable._TableHeader.start dbf.tables.DbfTable._TableHeader-class.html#start dbf.tables.DbfTable._TableHeader.version dbf.tables.DbfTable._TableHeader-class.html#version dbf.tables.DbfTable._TableHeader.data dbf.tables.DbfTable._TableHeader-class.html#data dbf.tables.DbfTable._TableHeader.__init__ dbf.tables.DbfTable._TableHeader-class.html#__init__ dbf.tables.FpTable dbf.tables.FpTable-class.html dbf.tables.DbfTable.__str__ dbf.tables.DbfTable-class.html#__str__ dbf.tables.DbfTable.supported_tables dbf.tables.DbfTable-class.html#supported_tables dbf.tables.FpTable._yesMemoMask dbf.tables.FpTable-class.html#_yesMemoMask dbf.tables.DbfTable.query dbf.tables.DbfTable-class.html#query dbf.tables.DbfTable.last_update dbf.tables.DbfTable-class.html#last_update dbf.tables.DbfTable._meta_only dbf.tables.DbfTable-class.html#_meta_only dbf.tables.DbfTable.__enter__ dbf.tables.DbfTable-class.html#__enter__ dbf.tables.DbfTable.__exit__ dbf.tables.DbfTable-class.html#__exit__ dbf.tables.DbfTable.__getitem__ dbf.tables.DbfTable-class.html#__getitem__ dbf.tables.FpTable._fieldtypes dbf.tables.FpTable-class.html#_fieldtypes dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables.FpTable._decimal_fields dbf.tables.FpTable-class.html#_decimal_fields dbf.tables.DbfTable.use_deleted dbf.tables.DbfTable-class.html#use_deleted dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.__nonzero__ dbf.tables.DbfTable-class.html#__nonzero__ dbf.tables.FpTable._character_fields dbf.tables.FpTable-class.html#_character_fields dbf.tables.FpTable._noMemoMask dbf.tables.FpTable-class.html#_noMemoMask dbf.tables.DbfTable.is_memotype dbf.tables.DbfTable-class.html#is_memotype dbf.tables.DbfTable._list_fields dbf.tables.DbfTable-class.html#_list_fields dbf.tables.FpTable._memoext dbf.tables.FpTable-class.html#_memoext dbf.tables.FpTable._dbfTableHeader dbf.tables.FpTable-class.html#_dbfTableHeader dbf.tables.DbfTable.export dbf.tables.DbfTable-class.html#export dbf.tables.DbfTable.close dbf.tables.DbfTable-class.html#close dbf.tables.DbfTable.append dbf.tables.DbfTable-class.html#append dbf.tables.DbfTable.rename_field dbf.tables.DbfTable-class.html#rename_field dbf.tables.FpTable._use_deleted dbf.tables.FpTable-class.html#_use_deleted dbf.tables.DbfTable.__contains__ dbf.tables.DbfTable-class.html#__contains__ dbf.tables.DbfTable.__getattr__ dbf.tables.DbfTable-class.html#__getattr__ dbf.tables.DbfTable.filename dbf.tables.DbfTable-class.html#filename dbf.tables.DbfTable.current dbf.tables.DbfTable-class.html#current dbf.tables.DbfTable.version dbf.tables.DbfTable-class.html#version dbf.tables.DbfTable.new dbf.tables.DbfTable-class.html#new dbf.tables.DbfTable._read_only dbf.tables.DbfTable-class.html#_read_only dbf.tables.DbfTable.goto dbf.tables.DbfTable-class.html#goto dbf.tables.DbfTable.field_count dbf.tables.DbfTable-class.html#field_count dbf.tables.FpTable._versionabbv dbf.tables.FpTable-class.html#_versionabbv dbf.tables.FpTable._memotypes dbf.tables.FpTable-class.html#_memotypes dbf.tables.FpTable._version dbf.tables.FpTable-class.html#_version dbf.tables.DbfTable.add_fields dbf.tables.DbfTable-class.html#add_fields dbf.tables.DbfTable.__repr__ dbf.tables.DbfTable-class.html#__repr__ dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable._buildHeaderFields dbf.tables.DbfTable-class.html#_buildHeaderFields dbf.tables.DbfTable.backup dbf.tables.DbfTable-class.html#backup dbf.tables.DbfTable.bof dbf.tables.DbfTable-class.html#bof dbf.tables.DbfTable.reindex dbf.tables.DbfTable-class.html#reindex dbf.tables.FpTable._numeric_fields dbf.tables.FpTable-class.html#_numeric_fields dbf.tables.DbfTable.codepage dbf.tables.DbfTable-class.html#codepage dbf.tables.DbfTable.delete_fields dbf.tables.DbfTable-class.html#delete_fields dbf.tables.FpTable._memoClass dbf.tables._VfpMemo-class.html dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable.open dbf.tables.DbfTable-class.html#open dbf.tables.DbfTable.zap dbf.tables.DbfTable-class.html#zap dbf.tables.DbfTable.size dbf.tables.DbfTable-class.html#size dbf.tables.DbfTable.top dbf.tables.DbfTable-class.html#top dbf.tables.FpTable._supported_tables dbf.tables.FpTable-class.html#_supported_tables dbf.tables.DbfTable.is_decimal dbf.tables.DbfTable-class.html#is_decimal dbf.tables.DbfTable.next dbf.tables.DbfTable-class.html#next dbf.tables.DbfTable._loadtable dbf.tables.DbfTable-class.html#_loadtable dbf.tables.DbfTable.prev dbf.tables.DbfTable-class.html#prev dbf.tables.DbfTable.create_backup dbf.tables.DbfTable-class.html#create_backup dbf.tables.DbfTable.__len__ dbf.tables.DbfTable-class.html#__len__ dbf.tables.FpTable._fixed_fields dbf.tables.FpTable-class.html#_fixed_fields dbf.tables.DbfTable.__iter__ dbf.tables.DbfTable-class.html#__iter__ dbf.tables.DbfTable.memoname dbf.tables.DbfTable-class.html#memoname dbf.tables.DbfTable.create_index dbf.tables.DbfTable-class.html#create_index dbf.tables.DbfTable.type dbf.tables.DbfTable-class.html#type dbf.tables.DbfTable._update_disk dbf.tables.DbfTable-class.html#_update_disk dbf.tables.DbfTable.pack dbf.tables.DbfTable-class.html#pack dbf.tables.DbfTable.get_record dbf.tables.DbfTable-class.html#get_record dbf.tables.FpTable._dbfTableHeaderExtra dbf.tables.FpTable-class.html#_dbfTableHeaderExtra dbf.tables.DbfTable.__init__ dbf.tables.DbfTable-class.html#__init__ dbf.tables.FpTable._initializeFields dbf.tables.FpTable-class.html#_initializeFields dbf.tables.FpTable._variable_fields dbf.tables.FpTable-class.html#_variable_fields dbf.tables.FpTable._checkMemoIntegrity dbf.tables.FpTable-class.html#_checkMemoIntegrity dbf.tables.DbfTable.record_length dbf.tables.DbfTable-class.html#record_length dbf.tables.DbfTable._fieldLayout dbf.tables.DbfTable-class.html#_fieldLayout dbf.tables.DbfTable.record_number dbf.tables.DbfTable-class.html#record_number dbf.tables.DbfTable.structure dbf.tables.DbfTable-class.html#structure dbf.tables.DbfTable.eof dbf.tables.DbfTable-class.html#eof dbf.tables.DbfTable.bottom dbf.tables.DbfTable-class.html#bottom dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable.field_names dbf.tables.DbfTable-class.html#field_names dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables.Index dbf.tables.Index-class.html dbf.tables.Index.reindex dbf.tables.Index-class.html#reindex dbf.tables.Index.IndexIterator dbf.tables.Index.IndexIterator-class.html dbf.tables.Index.query dbf.tables.Index-class.html#query dbf.tables.Index.close dbf.tables.Index-class.html#close dbf.tables.Index.find dbf.tables.Index-class.html#find dbf.tables.Index.__init__ dbf.tables.Index-class.html#__init__ dbf.tables.Index.index dbf.tables.Index-class.html#index dbf.tables.Index.__contains__ dbf.tables.Index-class.html#__contains__ dbf.tables.Index.__enter__ dbf.tables.Index-class.html#__enter__ dbf.tables.Index._partial_match dbf.tables.Index-class.html#_partial_match dbf.tables.Index.__call__ dbf.tables.Index-class.html#__call__ dbf.tables.Index.__len__ dbf.tables.Index-class.html#__len__ dbf.tables.Index._purge dbf.tables.Index-class.html#_purge dbf.tables.Index.__exit__ dbf.tables.Index-class.html#__exit__ dbf.tables.Index.__getitem__ dbf.tables.Index-class.html#__getitem__ dbf.tables.Index.__iter__ dbf.tables.Index-class.html#__iter__ dbf.tables.Index.from_file dbf.tables.Index-class.html#from_file dbf.tables.Index.search dbf.tables.Index-class.html#search dbf.tables.Index.clear dbf.tables.Index-class.html#clear dbf.tables.Index._search dbf.tables.Index-class.html#_search dbf.tables.Index.find_index dbf.tables.Index-class.html#find_index dbf.tables.Index.IndexIterator dbf.tables.Index.IndexIterator-class.html dbf.tables.Index.IndexIterator.next dbf.tables.Index.IndexIterator-class.html#next dbf.tables.Index.IndexIterator.__iter__ dbf.tables.Index.IndexIterator-class.html#__iter__ dbf.tables.Index.IndexIterator.__init__ dbf.tables.Index.IndexIterator-class.html#__init__ dbf.tables.List dbf.tables.List-class.html dbf.tables.List.key dbf.tables.List-class.html#key dbf.tables.List.pop dbf.tables.List-class.html#pop dbf.tables.List.__radd__ dbf.tables.List-class.html#__radd__ dbf.tables.List.__rsub__ dbf.tables.List-class.html#__rsub__ dbf.tables.List.next dbf.tables.List-class.html#next dbf.tables.List.__init__ dbf.tables.List-class.html#__init__ dbf.tables.List.index dbf.tables.List-class.html#index dbf.tables.List.extend dbf.tables.List-class.html#extend dbf.tables.List.__contains__ dbf.tables.List-class.html#__contains__ dbf.tables.List.top dbf.tables.List-class.html#top dbf.tables.List._desc dbf.tables.List-class.html#_desc dbf.tables.List._maybe_add dbf.tables.List-class.html#_maybe_add dbf.tables.List.current dbf.tables.List-class.html#current dbf.tables.List.append dbf.tables.List-class.html#append dbf.tables.List.prev dbf.tables.List-class.html#prev dbf.tables.List.__len__ dbf.tables.List-class.html#__len__ dbf.tables.List.sort dbf.tables.List-class.html#sort dbf.tables.List._purge dbf.tables.List-class.html#_purge dbf.tables.List.goto dbf.tables.List-class.html#goto dbf.tables.List.__getitem__ dbf.tables.List-class.html#__getitem__ dbf.tables.List.insert dbf.tables.List-class.html#insert dbf.tables.List.__iter__ dbf.tables.List-class.html#__iter__ dbf.tables.List.__add__ dbf.tables.List-class.html#__add__ dbf.tables.List._get_record dbf.tables.List-class.html#_get_record dbf.tables.List.__delitem__ dbf.tables.List-class.html#__delitem__ dbf.tables.List.__nonzero__ dbf.tables.List-class.html#__nonzero__ dbf.tables.List.reverse dbf.tables.List-class.html#reverse dbf.tables.List.clear dbf.tables.List-class.html#clear dbf.tables.List.bottom dbf.tables.List-class.html#bottom dbf.tables.List.remove dbf.tables.List-class.html#remove dbf.tables.List.__repr__ dbf.tables.List-class.html#__repr__ dbf.tables.List.__sub__ dbf.tables.List-class.html#__sub__ dbf.tables.VfpTable dbf.tables.VfpTable-class.html dbf.tables.DbfTable.__str__ dbf.tables.DbfTable-class.html#__str__ dbf.tables.DbfTable.supported_tables dbf.tables.DbfTable-class.html#supported_tables dbf.tables.VfpTable._yesMemoMask dbf.tables.VfpTable-class.html#_yesMemoMask dbf.tables.DbfTable.query dbf.tables.DbfTable-class.html#query dbf.tables.DbfTable.last_update dbf.tables.DbfTable-class.html#last_update dbf.tables.DbfTable._meta_only dbf.tables.DbfTable-class.html#_meta_only dbf.tables.DbfTable.__enter__ dbf.tables.DbfTable-class.html#__enter__ dbf.tables.DbfTable.__exit__ dbf.tables.DbfTable-class.html#__exit__ dbf.tables.DbfTable.__getitem__ dbf.tables.DbfTable-class.html#__getitem__ dbf.tables.VfpTable._fieldtypes dbf.tables.VfpTable-class.html#_fieldtypes dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables.VfpTable._decimal_fields dbf.tables.VfpTable-class.html#_decimal_fields dbf.tables.DbfTable.use_deleted dbf.tables.DbfTable-class.html#use_deleted dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.__nonzero__ dbf.tables.DbfTable-class.html#__nonzero__ dbf.tables.VfpTable._character_fields dbf.tables.VfpTable-class.html#_character_fields dbf.tables.VfpTable._noMemoMask dbf.tables.VfpTable-class.html#_noMemoMask dbf.tables.DbfTable.is_memotype dbf.tables.DbfTable-class.html#is_memotype dbf.tables.DbfTable._list_fields dbf.tables.DbfTable-class.html#_list_fields dbf.tables.VfpTable._memoext dbf.tables.VfpTable-class.html#_memoext dbf.tables.VfpTable._dbfTableHeader dbf.tables.VfpTable-class.html#_dbfTableHeader dbf.tables.DbfTable.export dbf.tables.DbfTable-class.html#export dbf.tables.DbfTable.close dbf.tables.DbfTable-class.html#close dbf.tables.DbfTable.append dbf.tables.DbfTable-class.html#append dbf.tables.DbfTable.rename_field dbf.tables.DbfTable-class.html#rename_field dbf.tables.VfpTable._use_deleted dbf.tables.VfpTable-class.html#_use_deleted dbf.tables.DbfTable.__contains__ dbf.tables.DbfTable-class.html#__contains__ dbf.tables.DbfTable.__getattr__ dbf.tables.DbfTable-class.html#__getattr__ dbf.tables.DbfTable.filename dbf.tables.DbfTable-class.html#filename dbf.tables.DbfTable.current dbf.tables.DbfTable-class.html#current dbf.tables.DbfTable.version dbf.tables.DbfTable-class.html#version dbf.tables.DbfTable.new dbf.tables.DbfTable-class.html#new dbf.tables.DbfTable._read_only dbf.tables.DbfTable-class.html#_read_only dbf.tables.DbfTable.goto dbf.tables.DbfTable-class.html#goto dbf.tables.DbfTable.field_count dbf.tables.DbfTable-class.html#field_count dbf.tables.VfpTable._versionabbv dbf.tables.VfpTable-class.html#_versionabbv dbf.tables.VfpTable._memotypes dbf.tables.VfpTable-class.html#_memotypes dbf.tables.VfpTable._version dbf.tables.VfpTable-class.html#_version dbf.tables.DbfTable.add_fields dbf.tables.DbfTable-class.html#add_fields dbf.tables.DbfTable.__repr__ dbf.tables.DbfTable-class.html#__repr__ dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable._buildHeaderFields dbf.tables.DbfTable-class.html#_buildHeaderFields dbf.tables.DbfTable.backup dbf.tables.DbfTable-class.html#backup dbf.tables.DbfTable.bof dbf.tables.DbfTable-class.html#bof dbf.tables.DbfTable.reindex dbf.tables.DbfTable-class.html#reindex dbf.tables.VfpTable._numeric_fields dbf.tables.VfpTable-class.html#_numeric_fields dbf.tables.DbfTable.codepage dbf.tables.DbfTable-class.html#codepage dbf.tables.DbfTable.delete_fields dbf.tables.DbfTable-class.html#delete_fields dbf.tables.VfpTable._memoClass dbf.tables._VfpMemo-class.html dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable.open dbf.tables.DbfTable-class.html#open dbf.tables.DbfTable.zap dbf.tables.DbfTable-class.html#zap dbf.tables.DbfTable.size dbf.tables.DbfTable-class.html#size dbf.tables.DbfTable.top dbf.tables.DbfTable-class.html#top dbf.tables.VfpTable._supported_tables dbf.tables.VfpTable-class.html#_supported_tables dbf.tables.DbfTable.is_decimal dbf.tables.DbfTable-class.html#is_decimal dbf.tables.DbfTable.next dbf.tables.DbfTable-class.html#next dbf.tables.DbfTable._loadtable dbf.tables.DbfTable-class.html#_loadtable dbf.tables.DbfTable.prev dbf.tables.DbfTable-class.html#prev dbf.tables.DbfTable.create_backup dbf.tables.DbfTable-class.html#create_backup dbf.tables.DbfTable.__len__ dbf.tables.DbfTable-class.html#__len__ dbf.tables.VfpTable._fixed_fields dbf.tables.VfpTable-class.html#_fixed_fields dbf.tables.DbfTable.__iter__ dbf.tables.DbfTable-class.html#__iter__ dbf.tables.DbfTable.memoname dbf.tables.DbfTable-class.html#memoname dbf.tables.DbfTable.create_index dbf.tables.DbfTable-class.html#create_index dbf.tables.DbfTable.type dbf.tables.DbfTable-class.html#type dbf.tables.DbfTable._update_disk dbf.tables.DbfTable-class.html#_update_disk dbf.tables.DbfTable.pack dbf.tables.DbfTable-class.html#pack dbf.tables.DbfTable.get_record dbf.tables.DbfTable-class.html#get_record dbf.tables.VfpTable._dbfTableHeaderExtra dbf.tables.VfpTable-class.html#_dbfTableHeaderExtra dbf.tables.DbfTable.__init__ dbf.tables.DbfTable-class.html#__init__ dbf.tables.VfpTable._initializeFields dbf.tables.VfpTable-class.html#_initializeFields dbf.tables.VfpTable._variable_fields dbf.tables.VfpTable-class.html#_variable_fields dbf.tables.VfpTable._checkMemoIntegrity dbf.tables.VfpTable-class.html#_checkMemoIntegrity dbf.tables.DbfTable.record_length dbf.tables.DbfTable-class.html#record_length dbf.tables.DbfTable._fieldLayout dbf.tables.DbfTable-class.html#_fieldLayout dbf.tables.DbfTable.record_number dbf.tables.DbfTable-class.html#record_number dbf.tables.DbfTable.structure dbf.tables.DbfTable-class.html#structure dbf.tables.DbfTable.eof dbf.tables.DbfTable-class.html#eof dbf.tables.DbfTable.bottom dbf.tables.DbfTable-class.html#bottom dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable.field_names dbf.tables.DbfTable-class.html#field_names dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables._Db3Memo dbf.tables._Db3Memo-class.html dbf.tables._Db3Memo._get_memo dbf.tables._Db3Memo-class.html#_get_memo dbf.tables._Db3Memo._init dbf.tables._Db3Memo-class.html#_init dbf.tables._DbfMemo.put_memo dbf.tables._DbfMemo-class.html#put_memo dbf.tables._Db3Memo._put_memo dbf.tables._Db3Memo-class.html#_put_memo dbf.tables._DbfMemo.get_memo dbf.tables._DbfMemo-class.html#get_memo dbf.tables._DbfMemo.__init__ dbf.tables._DbfMemo-class.html#__init__ dbf.tables._Db4Table dbf.tables._Db4Table-class.html dbf.tables.DbfTable.__str__ dbf.tables.DbfTable-class.html#__str__ dbf.tables.DbfTable.supported_tables dbf.tables.DbfTable-class.html#supported_tables dbf.tables._Db4Table._yesMemoMask dbf.tables._Db4Table-class.html#_yesMemoMask dbf.tables.DbfTable.query dbf.tables.DbfTable-class.html#query dbf.tables.DbfTable.last_update dbf.tables.DbfTable-class.html#last_update dbf.tables.DbfTable._meta_only dbf.tables.DbfTable-class.html#_meta_only dbf.tables.DbfTable.__enter__ dbf.tables.DbfTable-class.html#__enter__ dbf.tables.DbfTable.__exit__ dbf.tables.DbfTable-class.html#__exit__ dbf.tables.DbfTable.__getitem__ dbf.tables.DbfTable-class.html#__getitem__ dbf.tables._Db4Table._fieldtypes dbf.tables._Db4Table-class.html#_fieldtypes dbf.tables.DbfTable._DbfLists dbf.tables.DbfTable._DbfLists-class.html dbf.tables._Db4Table._decimal_fields dbf.tables._Db4Table-class.html#_decimal_fields dbf.tables.DbfTable.use_deleted dbf.tables.DbfTable-class.html#use_deleted dbf.tables.DbfTable.DbfIterator dbf.tables.DbfTable.DbfIterator-class.html dbf.tables.DbfTable.__nonzero__ dbf.tables.DbfTable-class.html#__nonzero__ dbf.tables._Db4Table._character_fields dbf.tables._Db4Table-class.html#_character_fields dbf.tables._Db4Table._noMemoMask dbf.tables._Db4Table-class.html#_noMemoMask dbf.tables.DbfTable.is_memotype dbf.tables.DbfTable-class.html#is_memotype dbf.tables.DbfTable._list_fields dbf.tables.DbfTable-class.html#_list_fields dbf.tables._Db4Table._memoext dbf.tables._Db4Table-class.html#_memoext dbf.tables._Db4Table._dbfTableHeader dbf.tables._Db4Table-class.html#_dbfTableHeader dbf.tables.DbfTable.export dbf.tables.DbfTable-class.html#export dbf.tables.DbfTable.close dbf.tables.DbfTable-class.html#close dbf.tables.DbfTable.append dbf.tables.DbfTable-class.html#append dbf.tables.DbfTable.rename_field dbf.tables.DbfTable-class.html#rename_field dbf.tables._Db4Table._use_deleted dbf.tables._Db4Table-class.html#_use_deleted dbf.tables.DbfTable.__contains__ dbf.tables.DbfTable-class.html#__contains__ dbf.tables.DbfTable.__getattr__ dbf.tables.DbfTable-class.html#__getattr__ dbf.tables.DbfTable.filename dbf.tables.DbfTable-class.html#filename dbf.tables.DbfTable.current dbf.tables.DbfTable-class.html#current dbf.tables._Db4Table.version dbf.tables._Db4Table-class.html#version dbf.tables.DbfTable.new dbf.tables.DbfTable-class.html#new dbf.tables.DbfTable._read_only dbf.tables.DbfTable-class.html#_read_only dbf.tables.DbfTable.goto dbf.tables.DbfTable-class.html#goto dbf.tables.DbfTable.field_count dbf.tables.DbfTable-class.html#field_count dbf.tables._Db4Table._versionabbv dbf.tables._Db4Table-class.html#_versionabbv dbf.tables._Db4Table._memotypes dbf.tables._Db4Table-class.html#_memotypes dbf.tables.DbfTable._version dbf.tables.DbfTable-class.html#_version dbf.tables.DbfTable.add_fields dbf.tables.DbfTable-class.html#add_fields dbf.tables.DbfTable.__repr__ dbf.tables.DbfTable-class.html#__repr__ dbf.tables.DbfTable._Indexen dbf.tables.DbfTable._Indexen-class.html dbf.tables.DbfTable._buildHeaderFields dbf.tables.DbfTable-class.html#_buildHeaderFields dbf.tables.DbfTable.backup dbf.tables.DbfTable-class.html#backup dbf.tables.DbfTable.bof dbf.tables.DbfTable-class.html#bof dbf.tables.DbfTable.reindex dbf.tables.DbfTable-class.html#reindex dbf.tables._Db4Table._numeric_fields dbf.tables._Db4Table-class.html#_numeric_fields dbf.tables.DbfTable.codepage dbf.tables.DbfTable-class.html#codepage dbf.tables.DbfTable.delete_fields dbf.tables.DbfTable-class.html#delete_fields dbf.tables._Db4Table._memoClass dbf.tables._VfpMemo-class.html dbf.tables.DbfTable._TableHeader dbf.tables.DbfTable._TableHeader-class.html dbf.tables.DbfTable.open dbf.tables.DbfTable-class.html#open dbf.tables.DbfTable.zap dbf.tables.DbfTable-class.html#zap dbf.tables.DbfTable.size dbf.tables.DbfTable-class.html#size dbf.tables.DbfTable.top dbf.tables.DbfTable-class.html#top dbf.tables._Db4Table._supported_tables dbf.tables._Db4Table-class.html#_supported_tables dbf.tables.DbfTable.is_decimal dbf.tables.DbfTable-class.html#is_decimal dbf.tables.DbfTable.next dbf.tables.DbfTable-class.html#next dbf.tables.DbfTable._loadtable dbf.tables.DbfTable-class.html#_loadtable dbf.tables.DbfTable.prev dbf.tables.DbfTable-class.html#prev dbf.tables.DbfTable.create_backup dbf.tables.DbfTable-class.html#create_backup dbf.tables.DbfTable.__len__ dbf.tables.DbfTable-class.html#__len__ dbf.tables._Db4Table._fixed_fields dbf.tables._Db4Table-class.html#_fixed_fields dbf.tables.DbfTable.__iter__ dbf.tables.DbfTable-class.html#__iter__ dbf.tables.DbfTable.memoname dbf.tables.DbfTable-class.html#memoname dbf.tables.DbfTable.create_index dbf.tables.DbfTable-class.html#create_index dbf.tables.DbfTable.type dbf.tables.DbfTable-class.html#type dbf.tables.DbfTable._update_disk dbf.tables.DbfTable-class.html#_update_disk dbf.tables.DbfTable.pack dbf.tables.DbfTable-class.html#pack dbf.tables.DbfTable.get_record dbf.tables.DbfTable-class.html#get_record dbf.tables._Db4Table._dbfTableHeaderExtra dbf.tables._Db4Table-class.html#_dbfTableHeaderExtra dbf.tables.DbfTable.__init__ dbf.tables.DbfTable-class.html#__init__ dbf.tables.DbfTable._initializeFields dbf.tables.DbfTable-class.html#_initializeFields dbf.tables._Db4Table._variable_fields dbf.tables._Db4Table-class.html#_variable_fields dbf.tables._Db4Table._checkMemoIntegrity dbf.tables._Db4Table-class.html#_checkMemoIntegrity dbf.tables.DbfTable.record_length dbf.tables.DbfTable-class.html#record_length dbf.tables.DbfTable._fieldLayout dbf.tables.DbfTable-class.html#_fieldLayout dbf.tables.DbfTable.record_number dbf.tables.DbfTable-class.html#record_number dbf.tables.DbfTable.structure dbf.tables.DbfTable-class.html#structure dbf.tables.DbfTable.eof dbf.tables.DbfTable-class.html#eof dbf.tables.DbfTable.bottom dbf.tables.DbfTable-class.html#bottom dbf.tables.DbfTable._MetaData dbf.tables.DbfTable._MetaData-class.html dbf.tables.DbfTable.field_names dbf.tables.DbfTable-class.html#field_names dbf.tables.DbfTable._Table dbf.tables.DbfTable._Table-class.html dbf.tables._DbfMemo dbf.tables._DbfMemo-class.html dbf.tables._DbfMemo._get_memo dbf.tables._DbfMemo-class.html#_get_memo dbf.tables._DbfMemo._init dbf.tables._DbfMemo-class.html#_init dbf.tables._DbfMemo.put_memo dbf.tables._DbfMemo-class.html#put_memo dbf.tables._DbfMemo.get_memo dbf.tables._DbfMemo-class.html#get_memo dbf.tables._DbfMemo.__init__ dbf.tables._DbfMemo-class.html#__init__ dbf.tables._DbfMemo._put_memo dbf.tables._DbfMemo-class.html#_put_memo dbf.tables._DbfRecord dbf.tables._DbfRecord-class.html dbf.tables._DbfRecord.reset_record dbf.tables._DbfRecord-class.html#reset_record dbf.tables._DbfRecord._dirty dbf.tables._DbfRecord-class.html#_dirty dbf.tables._DbfRecord.__str__ dbf.tables._DbfRecord-class.html#__str__ dbf.tables._DbfRecord._retrieveFieldValue dbf.tables._DbfRecord-class.html#_retrieveFieldValue dbf.tables._DbfRecord._createBlankRecord dbf.tables._DbfRecord-class.html#_createBlankRecord dbf.tables._DbfRecord.undelete_record dbf.tables._DbfRecord-class.html#undelete_record dbf.tables._DbfRecord.__setattr__ dbf.tables._DbfRecord-class.html#__setattr__ dbf.tables._DbfRecord.__new__ dbf.tables._DbfRecord-class.html#__new__ dbf.tables._DbfRecord.__contains__ dbf.tables._DbfRecord-class.html#__contains__ dbf.tables._DbfRecord._recnum dbf.tables._DbfRecord-class.html#_recnum dbf.tables._DbfRecord.__getattr__ dbf.tables._DbfRecord-class.html#__getattr__ dbf.tables._DbfRecord.check_index dbf.tables._DbfRecord-class.html#check_index dbf.tables._DbfRecord.__call__ dbf.tables._DbfRecord-class.html#__call__ dbf.tables._DbfRecord._updateFieldValue dbf.tables._DbfRecord-class.html#_updateFieldValue dbf.tables._DbfRecord.record_table dbf.tables._DbfRecord-class.html#record_table dbf.tables._DbfRecord.delete_record dbf.tables._DbfRecord-class.html#delete_record dbf.tables._DbfRecord.__repr__ dbf.tables._DbfRecord-class.html#__repr__ dbf.tables._DbfRecord.__getitem__ dbf.tables._DbfRecord-class.html#__getitem__ dbf.tables._DbfRecord._layout dbf.tables._DbfRecord-class.html#_layout dbf.tables._DbfRecord.scatter_fields dbf.tables._DbfRecord-class.html#scatter_fields dbf.tables._DbfRecord.__setitem__ dbf.tables._DbfRecord-class.html#__setitem__ dbf.tables._DbfRecord.record_number dbf.tables._DbfRecord-class.html#record_number dbf.tables._DbfRecord._data dbf.tables._DbfRecord-class.html#_data dbf.tables._DbfRecord.read_record dbf.tables._DbfRecord-class.html#read_record dbf.tables._DbfRecord.gather_fields dbf.tables._DbfRecord-class.html#gather_fields dbf.tables._DbfRecord.has_been_deleted dbf.tables._DbfRecord-class.html#has_been_deleted dbf.tables._DbfRecord.__iter__ dbf.tables._DbfRecord-class.html#__iter__ dbf.tables._DbfRecord.__len__ dbf.tables._DbfRecord-class.html#__len__ dbf.tables._DbfRecord.field_names dbf.tables._DbfRecord-class.html#field_names dbf.tables._DbfRecord.write_record dbf.tables._DbfRecord-class.html#write_record dbf.tables._DbfRecord._update_disk dbf.tables._DbfRecord-class.html#_update_disk dbf.tables._VfpMemo dbf.tables._VfpMemo-class.html dbf.tables._VfpMemo._get_memo dbf.tables._VfpMemo-class.html#_get_memo dbf.tables._VfpMemo._init dbf.tables._VfpMemo-class.html#_init dbf.tables._DbfMemo.put_memo dbf.tables._DbfMemo-class.html#put_memo dbf.tables._VfpMemo._put_memo dbf.tables._VfpMemo-class.html#_put_memo dbf.tables._DbfMemo.get_memo dbf.tables._DbfMemo-class.html#get_memo dbf.tables._DbfMemo.__init__ dbf.tables._DbfMemo-class.html#__init__ dbf.tables.property dbf.tables.property-class.html dbf.tables.property.setter dbf.tables.property-class.html#setter dbf.tables.property.__set__ dbf.tables.property-class.html#__set__ dbf.tables.property.__init__ dbf.tables.property-class.html#__init__ dbf.tables.property.__call__ dbf.tables.property-class.html#__call__ dbf.tables.property.__get__ dbf.tables.property-class.html#__get__ dbf.tables.property.deleter dbf.tables.property-class.html#deleter dbf.tables.property.__delete__ dbf.tables.property-class.html#__delete__ dbf-0.88.16/dbf/html/dbf.tables.DbfTable._TableHeader-class.html0000666000175100017510000003103611477216672023053 0ustar margamarga dbf.tables.DbfTable._TableHeader
Package dbf :: Module tables :: Class DbfTable :: Class _TableHeader
[hide private]

Class _TableHeader

source code

object --+
         |
        DbfTable._TableHeader

Instance Methods [hide private]
 
__init__(yo, data)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
codepage(yo, cp=None)
get/set code page of table
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Class Variables [hide private]
  data = <dbf.tables.property object at 0x00EC7C30>
  extra = <dbf.tables.property object at 0x00EC7C50>
  field_count = <dbf.tables.property object at 0x00EC7C70>
  fields = <dbf.tables.property object at 0x00EC7C90>
  record_count = <dbf.tables.property object at 0x00EC7CB0>
  record_length = <dbf.tables.property object at 0x00EC7CD0>
  start = <dbf.tables.property object at 0x00EC7CF0>
  update = <dbf.tables.property object at 0x00EC7D10>
  version = <dbf.tables.property object at 0x00EC7D30>
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, data)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables.VfpTable-class.html0000666000175100017510000010627611477216672020626 0ustar margamarga dbf.tables.VfpTable
Package dbf :: Module tables :: Class VfpTable
[hide private]

Class VfpTable

source code

object --+    
         |    
  DbfTable --+
             |
            VfpTable

Provides an interface for working with Visual FoxPro 6 tables

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'Visual Foxpro v6'
  _versionabbv = 'vfp'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.fpt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '0'
  _noMemoMask = '0'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('0')
  _dbfTableHeader = '0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\...
  _dbfTableHeaderExtra = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x...
  _use_deleted = True

Inherited from DbfTable: backup, codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Inherited from DbfTable (private): _meta_only, _read_only

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity
(inherited documentation)

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'B': {'Blank': <type 'float'>,
       'Init': <function addVfpDouble at 0x00ECA830>,
       'Retrieve': <function retrieveDouble at 0x00ECA2F0>,
...

_dbfTableHeader

Value:
'0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

_dbfTableHeaderExtra

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
...

dbf-0.88.16/dbf/html/index.html0000666000175100017510000006337611477216676015133 0ustar margamarga dbf
Package dbf
[hide private]

Package dbf

source code

Copyright

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Summary

Python package for reading/writing dBase III and VFP 6 tables and memos

The entire table is read into memory, and all operations occur on the in-memory table, with data changes being written to disk as they occur.

Goals: programming style with databases

NOTE: Of the VFP data types, auto-increment and null settings are not implemented.

Submodules [hide private]

Functions [hide private]
 
Table(filename, field_specs='', memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None)
returns an open table of the correct dbf_type, or creates it if field_specs is given
source code
 
index(sequence)
returns integers 0 - len(sequence)
source code
 
guess_table_type(filename) source code
 
table_type(filename)
returns text representation of a table's dbf version
source code
 
add_fields(table, field_specs)
adds fields to an existing table
source code
 
delete_fields(table, field_names)
deletes fields from an existing table
source code
 
export(table, filename='', fields='', format='csv', header=True)
creates a csv or tab-delimited file from an existing table
source code
 
first_record(table)
prints the first record of a table
source code
 
from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1)
creates a Character table from a csv file to_disk will create a table with the same name filename will be used if provided field_names default to f0, f1, f2, etc, unless specified (list) extra_fields can be used to add additional fields -- should be normal field specifiers (list)
source code
 
get_fields(table)
returns the list of field names of a table
source code
 
info(table)
prints table info
source code
 
rename_field(table, oldfield, newfield)
renames a field in a table
source code
 
structure(table, field=None)
returns the definition of a field (or all fields)
source code
 
hex_dump(records)
just what it says ;)
source code
Variables [hide private]
  version = (0, 88, 16)
  default_type = 'db3'
  sql_user_functions = {}

Imports: os, csv, Date, DateTime, Time, DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex, DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv, sql, ascii, codepage, encoding, version_map, _io, dates, exceptions, tables


dbf-0.88.16/dbf/html/dbf.exceptions.DataOverflow-class.html0000666000175100017510000002152511477216672022420 0ustar margamarga dbf.exceptions.DataOverflow
Package dbf :: Module exceptions :: Class DataOverflow
[hide private]

Class DataOverflow

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                        DbfError --+
                                   |
                                  DataOverflow

Data too large for field

Instance Methods [hide private]
 
__init__(yo, message, data=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, message, data=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.exceptions.DoNotIndex-class.html0000666000175100017510000002332711477216672022040 0ustar margamarga dbf.exceptions.DoNotIndex
Package dbf :: Module exceptions :: Class DoNotIndex
[hide private]

Class DoNotIndex

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
                                  DoNotIndex

Returned by indexing functions to suppress a record from becoming part of the index

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'Not indexing record'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/module-tree.html0000666000175100017510000001160711477216670016226 0ustar margamarga Module Hierarchy
 
[hide private]
[ Module Hierarchy | Class Hierarchy ]

Module Hierarchy

dbf-0.88.16/dbf/html/dbf.old.tables._DbfRecord-class.html0000666000175100017510000010214011477216672021653 0ustar margamarga dbf.old.tables._DbfRecord
Package dbf :: Package old :: Module tables :: Class _DbfRecord
[hide private]

Class _DbfRecord

source code

object --+
         |
        _DbfRecord

Provides routines to extract and save data within the fields of a dbf record.

Instance Methods [hide private]
 
_retrieveFieldValue(yo, record_data, fielddef)
calls appropriate routine to fetch value stored in field from array
source code
 
_updateFieldValue(yo, fielddef, value)
calls appropriate routine to convert value to ascii bytes, and save it in record
source code
 
_update_disk(yo, location='', data=None) source code
 
__call__(yo, *specs) source code
 
__contains__(yo, key) source code
 
__iter__(yo) source code
 
__getattr__(yo, name) source code
 
__getitem__(yo, item) source code
 
__len__(yo) source code
 
__setattr__(yo, name, value)
x.__setattr__('name', value) <==> x.name = value
source code
 
__setitem__(yo, name, value) source code
 
__str__(yo)
str(x)
source code
 
__repr__(yo)
repr(x)
source code
 
_createBlankRecord(yo)
creates a blank record data chunk
source code
 
delete_record(yo)
marks record as deleted
source code
 
gather_fields(yo, dictionary, drop=False)
saves a dictionary into a record's fields keys with no matching field will raise a FieldMissing exception unless drop_missing = True
source code
 
read(yo)
refresh record data from disk
source code
 
reindex(yo) source code
 
reset_record(yo, keep_fields=None)
blanks record
source code
 
scatter_fields(yo, blank=False)
returns a dictionary of fieldnames and values which can be used with gather_fields().
source code
 
undelete_record(yo)
marks record as active
source code
 
write(yo, **kwargs)
write record data to disk
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __init__, __reduce__, __reduce_ex__

Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, recnum, layout, kamikaze='', _fromdisk=False)
record = ascii array of entire record; layout=record specification; memo = memo object for table
source code
Class Variables [hide private]
  field_names = <dbf.old.tables.property object at 0x01197FD0>
  has_been_deleted = <dbf.old.tables.property object at 0x011A4030>
  record_number = <dbf.old.tables.property object at 0x011A4050>
  record_table = <dbf.old.tables.property object at 0x011A4070>
Properties [hide private]
  _data
  _dirty
  _layout
  _recnum

Inherited from object: __class__

Method Details [hide private]

_retrieveFieldValue(yo, record_data, fielddef)

source code 

calls appropriate routine to fetch value stored in field from array

Parameters:
  • record_data (array of characters) - the data portion of the record
  • fielddef (dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags') - description of the field definition
Returns:
python data stored in field

__new__(cls, recnum, layout, kamikaze='', _fromdisk=False)
Static Method

source code 

record = ascii array of entire record; layout=record specification; memo = memo object for table

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__setattr__(yo, name, value)

source code 

x.__setattr__('name', value) <==> x.name = value

Overrides: object.__setattr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

scatter_fields(yo, blank=False)

source code 

returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty.


dbf-0.88.16/dbf/html/dbf.old.exceptions.DbfWarning-class.html0000666000175100017510000001505411477216672022621 0ustar margamarga dbf.old.exceptions.DbfWarning
Package dbf :: Package old :: Module exceptions :: Class DbfWarning
[hide private]

Class DbfWarning

source code

              object --+        
                       |        
exceptions.BaseException --+    
                           |    
        exceptions.Exception --+
                               |
                              DbfWarning
Known Subclasses:

Normal operations elicit this response

Instance Methods [hide private]

Inherited from exceptions.Exception: __init__, __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf.old._io-module.html0000666000175100017510000011421111477216670017337 0ustar margamarga dbf.old._io
Package dbf :: Package old :: Module _io
[hide private]

Module _io

source code

Routines for saving, retrieving, and creating fields

Functions [hide private]
 
packShortInt(value, bigendian=False)
Returns a two-bye integer from the value, or raises DbfError
source code
 
packLongInt(value, bigendian=False)
Returns a four-bye integer from the value, or raises DbfError
source code
 
packDate(date)
Returns a group of three bytes, in integer form, of the date
source code
 
packStr(string)
Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes
source code
 
unpackShortInt(bytes, bigendian=False)
Returns the value in the two-byte integer passed in
source code
 
unpackLongInt(bytes, bigendian=False)
Returns the value in the four-byte integer passed in
source code
 
unpackDate(bytestr)
Returns a Date() of the packed three-byte date passed in
source code
 
unpackStr(chars)
Returns a normal, lower-cased string from a null-padded byte string
source code
 
convertToBool(value)
Returns boolean true or false; normal rules apply to non-string values; string values must be 'y','t', 'yes', or 'true' (case insensitive) to be True
source code
 
unsupportedType(something, field, memo=None)
called if a data type is not supported for that style of table
source code
 
retrieveCharacter(bytes, fielddef={}, memo=None)
Returns the string in bytes with trailing white space removed
source code
 
updateCharacter(string, fielddef, memo=None)
returns the string, truncating if string is longer than it's field
source code
 
retrieveCurrency(bytes, fielddef={}, memo=None) source code
 
updateCurrency(value, fielddef={}, memo=None) source code
 
retrieveDate(bytes, fielddef={}, memo=None)
Returns the ascii coded date as a Date object
source code
 
updateDate(moment, fielddef={}, memo=None)
returns the Date or datetime.date object ascii-encoded (yyyymmdd)
source code
 
retrieveDouble(bytes, fielddef={}, memo=None) source code
 
updateDouble(value, fielddef={}, memo=None) source code
 
retrieveInteger(bytes, fielddef={}, memo=None)
Returns the binary number stored in bytes in little-endian format
source code
 
updateInteger(value, fielddef={}, memo=None)
returns value in little-endian binary format
source code
 
retrieveLogical(bytes, fielddef={}, memo=None)
Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise
source code
 
updateLogical(logical, fielddef={}, memo=None)
Returs 'T' if logical is True, 'F' otherwise
source code
 
retrieveMemo(bytes, fielddef, memo)
Returns the block of data from a memo file
source code
 
updateMemo(string, fielddef, memo)
Writes string as a memo, returns the block number it was saved into
source code
 
retrieveNumeric(bytes, fielddef, memo=None)
Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise
source code
 
updateNumeric(value, fielddef, memo=None)
returns value as ascii representation, rounding decimal portion as necessary
source code
 
retrieveVfpDateTime(bytes, fielddef={}, memo=None)
returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 may not be accurate; BC dates are nulled.
source code
 
updateVfpDateTime(moment, fielddef={}, memo=None)
sets the date/time stored in moment moment must have fields year, month, day, hour, minute, second, microsecond
source code
 
retrieveVfpMemo(bytes, fielddef, memo)
Returns the block of data from a memo file
source code
 
updateVfpMemo(string, fielddef, memo)
Writes string as a memo, returns the block number it was saved into
source code
 
addCharacter(format) source code
 
addDate(format) source code
 
addLogical(format) source code
 
addMemo(format) source code
 
addNumeric(format) source code
 
addVfpCurrency(format) source code
 
addVfpDateTime(format) source code
 
addVfpDouble(format) source code
 
addVfpInteger(format) source code
 
addVfpMemo(format) source code
 
addVfpNumeric(format) source code
Variables [hide private]
  VFPTIME = 1721425

Imports: struct, Decimal, DbfError, DataOverflow, Date, DateTime, Time


dbf-0.88.16/dbf/html/epydoc.js0000666000175100017510000002517211477216670014741 0ustar margamargafunction toggle_private() { // Search for any private/public links on this page. Store // their old text in "cmd," so we will know what action to // take; and change their text to the opposite action. var cmd = "?"; var elts = document.getElementsByTagName("a"); for(var i=0; i...
"; elt.innerHTML = s; } } function toggle(id) { elt = document.getElementById(id+"-toggle"); if (elt.innerHTML == "-") collapse(id); else expand(id); return false; } function highlight(id) { var elt = document.getElementById(id+"-def"); if (elt) elt.className = "py-highlight-hdr"; var elt = document.getElementById(id+"-expanded"); if (elt) elt.className = "py-highlight"; var elt = document.getElementById(id+"-collapsed"); if (elt) elt.className = "py-highlight"; } function num_lines(s) { var n = 1; var pos = s.indexOf("\n"); while ( pos > 0) { n += 1; pos = s.indexOf("\n", pos+1); } return n; } // Collapse all blocks that mave more than `min_lines` lines. function collapse_all(min_lines) { var elts = document.getElementsByTagName("div"); for (var i=0; i 0) if (elt.id.substring(split, elt.id.length) == "-expanded") if (num_lines(elt.innerHTML) > min_lines) collapse(elt.id.substring(0, split)); } } function expandto(href) { var start = href.indexOf("#")+1; if (start != 0 && start != href.length) { if (href.substring(start, href.length) != "-") { collapse_all(4); pos = href.indexOf(".", start); while (pos != -1) { var id = href.substring(start, pos); expand(id); pos = href.indexOf(".", pos+1); } var id = href.substring(start, href.length); expand(id); highlight(id); } } } function kill_doclink(id) { var parent = document.getElementById(id); parent.removeChild(parent.childNodes.item(0)); } function auto_kill_doclink(ev) { if (!ev) var ev = window.event; if (!this.contains(ev.toElement)) { var parent = document.getElementById(this.parentID); parent.removeChild(parent.childNodes.item(0)); } } function doclink(id, name, targets_id) { var elt = document.getElementById(id); // If we already opened the box, then destroy it. // (This case should never occur, but leave it in just in case.) if (elt.childNodes.length > 1) { elt.removeChild(elt.childNodes.item(0)); } else { // The outer box: relative + inline positioning. var box1 = document.createElement("div"); box1.style.position = "relative"; box1.style.display = "inline"; box1.style.top = 0; box1.style.left = 0; // A shadow for fun var shadow = document.createElement("div"); shadow.style.position = "absolute"; shadow.style.left = "-1.3em"; shadow.style.top = "-1.3em"; shadow.style.background = "#404040"; // The inner box: absolute positioning. var box2 = document.createElement("div"); box2.style.position = "relative"; box2.style.border = "1px solid #a0a0a0"; box2.style.left = "-.2em"; box2.style.top = "-.2em"; box2.style.background = "white"; box2.style.padding = ".3em .4em .3em .4em"; box2.style.fontStyle = "normal"; box2.onmouseout=auto_kill_doclink; box2.parentID = id; // Get the targets var targets_elt = document.getElementById(targets_id); var targets = targets_elt.getAttribute("targets"); var links = ""; target_list = targets.split(","); for (var i=0; i" + target[0] + ""; } // Put it all together. elt.insertBefore(box1, elt.childNodes.item(0)); //box1.appendChild(box2); box1.appendChild(shadow); shadow.appendChild(box2); box2.innerHTML = "Which "+name+" do you want to see documentation for?" + ""; } return false; } function get_anchor() { var href = location.href; var start = href.indexOf("#")+1; if ((start != 0) && (start != href.length)) return href.substring(start, href.length); } function redirect_url(dottedName) { // Scan through each element of the "pages" list, and check // if "name" matches with any of them. for (var i=0; i-m" or "-c"; // extract the portion & compare it to dottedName. var pagename = pages[i].substring(0, pages[i].length-2); if (pagename == dottedName.substring(0,pagename.length)) { // We've found a page that matches `dottedName`; // construct its URL, using leftover `dottedName` // content to form an anchor. var pagetype = pages[i].charAt(pages[i].length-1); var url = pagename + ((pagetype=="m")?"-module.html": "-class.html"); if (dottedName.length > pagename.length) url += "#" + dottedName.substring(pagename.length+1, dottedName.length); return url; } } } dbf-0.88.16/dbf/html/dbf.exceptions.DbfError-class.html0000666000175100017510000001475411477216672021536 0ustar margamarga dbf.exceptions.DbfError
Package dbf :: Module exceptions :: Class DbfError
[hide private]

Class DbfError

source code

              object --+        
                       |        
exceptions.BaseException --+    
                           |    
        exceptions.Exception --+
                               |
                              DbfError
Known Subclasses:

Fatal errors elicit this response.

Instance Methods [hide private]

Inherited from exceptions.Exception: __init__, __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf-pysrc.html0000666000175100017510000032406211477216672015701 0ustar margamarga dbf
Package dbf
[hide private]

Source Code for Package dbf

  1  """ 
  2  Copyright 
  3  ========= 
  4      - Copyright: 2008-2009 Ad-Mail, Inc -- All rights reserved. 
  5      - Author: Ethan Furman 
  6      - Contact: ethan@stoneleaf.us 
  7      - Organization: Ad-Mail, Inc. 
  8      - Version: 0.88.016 as of 06 Dec 2010 
  9   
 10  Redistribution and use in source and binary forms, with or without 
 11  modification, are permitted provided that the following conditions are met: 
 12      - Redistributions of source code must retain the above copyright 
 13        notice, this list of conditions and the following disclaimer. 
 14      - Redistributions in binary form must reproduce the above copyright 
 15        notice, this list of conditions and the following disclaimer in the 
 16        documentation and/or other materials provided with the distribution. 
 17      - Neither the name of Ad-Mail, Inc nor the 
 18        names of its contributors may be used to endorse or promote products 
 19        derived from this software without specific prior written permission. 
 20   
 21  THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY 
 22  EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
 23  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
 24  DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY 
 25  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 
 26  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
 27  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 
 28  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 
 29  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 
 30  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 31   
 32  B{I{Summary}} 
 33   
 34  Python package for reading/writing dBase III and VFP 6 tables and memos 
 35   
 36  The entire table is read into memory, and all operations occur on the in-memory 
 37  table, with data changes being written to disk as they occur. 
 38   
 39  Goals:  programming style with databases 
 40      - C{table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])} 
 41          - fielddesc examples:  C{name C(30); age N(3,0); wisdom M; marriage D} 
 42      - C{record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]} 
 43      - C{record.field | record['field']} accesses the field 
 44   
 45  NOTE:  Of the VFP data types, auto-increment and null settings are not implemented. 
 46  """ 
 47  import os 
 48  import csv 
 49   
 50  from dbf.dates import Date, DateTime, Time 
 51  from dbf.exceptions import DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex 
 52  from dbf.tables import DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv 
 53  from dbf.tables import sql, ascii, codepage, encoding, version_map 
 54   
 55  version = (0, 88, 16) 
 56   
 57  default_type = 'db3'    # default format if none specified 
 58  sql_user_functions = {}      # user-defined sql functions 
 59   
 60  __docformat__ = 'epytext' 
 61   
62 -def Table(filename, field_specs='', memo_size=128, ignore_memos=False, \ 63 read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None):
64 "returns an open table of the correct dbf_type, or creates it if field_specs is given" 65 #- print "dbf.Table(%s)" % ', '.join(['%r' % arg for arg in (filename, field_specs, dbf_type, codepage)]) 66 if field_specs and dbf_type is None: 67 dbf_type = default_type 68 if dbf_type is not None: 69 dbf_type = dbf_type.lower() 70 if dbf_type == 'db3': 71 return Db3Table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 72 elif dbf_type == 'fp': 73 return FpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 74 elif dbf_type == 'vfp': 75 return VfpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 76 elif dbf_type == 'dbf': 77 return DbfTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) 78 else: 79 raise DbfError("Unknown table type: %s" % dbf_type) 80 else: 81 possibles = guess_table_type(filename) 82 if len(possibles) == 1: 83 return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \ 84 read_only, keep_memos, meta_only) 85 else: 86 for type, desc, cls in possibles: 87 if type == default_type: 88 return cls(filename, field_specs, memo_size, ignore_memos, \ 89 read_only, keep_memos, meta_only) 90 else: 91 types = ', '.join(["%s" % item[1] for item in possibles]) 92 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' 93 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
94 -def index(sequence):
95 "returns integers 0 - len(sequence)" 96 for i in xrange(len(sequence)): 97 yield i
98 -def guess_table_type(filename):
99 reported = table_type(filename) 100 possibles = [] 101 version = reported[0] 102 for tabletype in (Db3Table, FpTable, VfpTable): 103 if version in tabletype._supported_tables: 104 possibles.append((tabletype._versionabbv, tabletype._version, tabletype)) 105 if not possibles: 106 raise DbfError("Tables of type %s not supported" % str(reported)) 107 return possibles
108 -def table_type(filename):
109 "returns text representation of a table's dbf version" 110 base, ext = os.path.splitext(filename) 111 if ext == '': 112 filename = base + '.dbf' 113 if not os.path.exists(filename): 114 raise DbfError('File %s not found' % filename) 115 fd = open(filename) 116 version = fd.read(1) 117 fd.close() 118 fd = None 119 if not version in version_map: 120 raise DbfError("Unknown dbf type: %s (%x)" % (version, ord(version))) 121 return version, version_map[version]
122
123 -def add_fields(table, field_specs):
124 "adds fields to an existing table" 125 table = Table(table) 126 try: 127 table.add_fields(field_specs) 128 finally: 129 table.close()
130 -def delete_fields(table, field_names):
131 "deletes fields from an existing table" 132 table = Table(table) 133 try: 134 table.delete_fields(field_names) 135 finally: 136 table.close()
137 -def export(table, filename='', fields='', format='csv', header=True):
138 "creates a csv or tab-delimited file from an existing table" 139 if fields is None: 140 fields = [] 141 table = Table(table) 142 try: 143 table.export(filename=filename, field_specs=fields, format=format, header=header) 144 finally: 145 table.close()
146 -def first_record(table):
147 "prints the first record of a table" 148 table = Table(table) 149 try: 150 print str(table[0]) 151 finally: 152 table.close()
153 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1):
154 """creates a Character table from a csv file 155 to_disk will create a table with the same name 156 filename will be used if provided 157 field_names default to f0, f1, f2, etc, unless specified (list) 158 extra_fields can be used to add additional fields -- should be normal field specifiers (list)""" 159 reader = csv.reader(open(csvfile)) 160 if field_names: 161 field_names = ['%s M' % fn for fn in field_names] 162 else: 163 field_names = ['f0 M'] 164 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size) 165 fields_so_far = 1 166 for row in reader: 167 while fields_so_far < len(row): 168 if fields_so_far == len(field_names): 169 field_names.append('f%d M' % fields_so_far) 170 mtable.add_fields(field_names[fields_so_far]) 171 fields_so_far += 1 172 mtable.append(tuple(row)) 173 if filename: 174 to_disk = True 175 if not to_disk: 176 if extra_fields: 177 mtable.add_fields(extra_fields) 178 else: 179 if not filename: 180 filename = os.path.splitext(csvfile)[0] 181 length = [min_field_size] * len(field_names) 182 for record in mtable: 183 for i in index(record.field_names): 184 length[i] = max(length[i], len(record[i])) 185 fields = mtable.field_names 186 fielddef = [] 187 for i in index(length): 188 if length[i] < 255: 189 fielddef.append('%s C(%d)' % (fields[i], length[i])) 190 else: 191 fielddef.append('%s M' % (fields[i])) 192 if extra_fields: 193 fielddef.extend(extra_fields) 194 csvtable = Table(filename, fielddef, dbf_type=dbf_type) 195 for record in mtable: 196 csvtable.append(record.scatter_fields()) 197 return csvtable 198 return mtable
199 -def get_fields(table):
200 "returns the list of field names of a table" 201 table = Table(table) 202 return table.field_names
203 -def info(table):
204 "prints table info" 205 table = Table(table) 206 print str(table)
207 -def rename_field(table, oldfield, newfield):
208 "renames a field in a table" 209 table = Table(table) 210 try: 211 table.rename_field(oldfield, newfield) 212 finally: 213 table.close()
214 -def structure(table, field=None):
215 "returns the definition of a field (or all fields)" 216 table = Table(table) 217 return table.structure(field)
218 -def hex_dump(records):
219 "just what it says ;)" 220 for index,dummy in enumerate(records): 221 chars = dummy._data 222 print "%2d: " % index, 223 for char in chars[1:]: 224 print " %2x " % ord(char), 225 print
226

dbf-0.88.16/dbf/html/dbf.tables.Index-class.html0000666000175100017510000005636211477216672020172 0ustar margamarga dbf.tables.Index
Package dbf :: Module tables :: Class Index
[hide private]

Class Index

source code

object --+
         |
        Index

Nested Classes [hide private]
  IndexIterator
returns records using this index
Instance Methods [hide private]
 
__init__(yo, table, key, field_names=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__call__(yo, record) source code
 
__contains__(yo, match) source code
 
__getitem__(yo, key) source code
 
__enter__(yo) source code
 
__exit__(yo, *exc_info) source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
_partial_match(yo, target, match) source code
 
_purge(yo, rec_num) source code
 
_search(yo, match, lo=0, hi=None) source code
 
clear(yo)
removes all entries from index
source code
 
close(yo) source code
 
find(yo, match, partial=False)
returns numeric index of (partial) match, or -1
source code
 
find_index(yo, match)
returns numeric index of either (partial) match, or position of where match would be
source code
 
index(yo, match, partial=False)
returns numeric index of (partial) match, or raises ValueError
source code
 
reindex(yo)
reindexes all records
source code
 
query(yo, sql_command=None, python=None)
recognized sql commands are SELECT, UPDATE, REPLACE, INSERT, DELETE, and RECALL
source code
 
search(yo, match, partial=False)
returns dbf.List of all (partially) matching records
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Class Methods [hide private]
 
from_file(cls, table, index_file) source code
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table, key, field_names=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/epydoc.css0000666000175100017510000003773111477216670015121 0ustar margamarga /* Epydoc CSS Stylesheet * * This stylesheet can be used to customize the appearance of epydoc's * HTML output. * */ /* Default Colors & Styles * - Set the default foreground & background color with 'body'; and * link colors with 'a:link' and 'a:visited'. * - Use bold for decision list terms. * - The heading styles defined here are used for headings *within* * docstring descriptions. All headings used by epydoc itself use * either class='epydoc' or class='toc' (CSS styles for both * defined below). */ body { background: #ffffff; color: #000000; } p { margin-top: 0.5em; margin-bottom: 0.5em; } a:link { color: #0000ff; } a:visited { color: #204080; } dt { font-weight: bold; } h1 { font-size: +140%; font-style: italic; font-weight: bold; } h2 { font-size: +125%; font-style: italic; font-weight: bold; } h3 { font-size: +110%; font-style: italic; font-weight: normal; } code { font-size: 100%; } /* N.B.: class, not pseudoclass */ a.link { font-family: monospace; } /* Page Header & Footer * - The standard page header consists of a navigation bar (with * pointers to standard pages such as 'home' and 'trees'); a * breadcrumbs list, which can be used to navigate to containing * classes or modules; options links, to show/hide private * variables and to show/hide frames; and a page title (using *

). The page title may be followed by a link to the * corresponding source code (using 'span.codelink'). * - The footer consists of a navigation bar, a timestamp, and a * pointer to epydoc's homepage. */ h1.epydoc { margin: 0; font-size: +140%; font-weight: bold; } h2.epydoc { font-size: +130%; font-weight: bold; } h3.epydoc { font-size: +115%; font-weight: bold; margin-top: 0.2em; } td h3.epydoc { font-size: +115%; font-weight: bold; margin-bottom: 0; } table.navbar { background: #a0c0ff; color: #000000; border: 2px groove #c0d0d0; } table.navbar table { color: #000000; } th.navbar-select { background: #70b0ff; color: #000000; } table.navbar a { text-decoration: none; } table.navbar a:link { color: #0000ff; } table.navbar a:visited { color: #204080; } span.breadcrumbs { font-size: 85%; font-weight: bold; } span.options { font-size: 70%; } span.codelink { font-size: 85%; } td.footer { font-size: 85%; } /* Table Headers * - Each summary table and details section begins with a 'header' * row. This row contains a section title (marked by * 'span.table-header') as well as a show/hide private link * (marked by 'span.options', defined above). * - Summary tables that contain user-defined groups mark those * groups using 'group header' rows. */ td.table-header { background: #70b0ff; color: #000000; border: 1px solid #608090; } td.table-header table { color: #000000; } td.table-header table a:link { color: #0000ff; } td.table-header table a:visited { color: #204080; } span.table-header { font-size: 120%; font-weight: bold; } th.group-header { background: #c0e0f8; color: #000000; text-align: left; font-style: italic; font-size: 115%; border: 1px solid #608090; } /* Summary Tables (functions, variables, etc) * - Each object is described by a single row of the table with * two cells. The left cell gives the object's type, and is * marked with 'code.summary-type'. The right cell gives the * object's name and a summary description. * - CSS styles for the table's header and group headers are * defined above, under 'Table Headers' */ table.summary { border-collapse: collapse; background: #e8f0f8; color: #000000; border: 1px solid #608090; margin-bottom: 0.5em; } td.summary { border: 1px solid #608090; } code.summary-type { font-size: 85%; } table.summary a:link { color: #0000ff; } table.summary a:visited { color: #204080; } /* Details Tables (functions, variables, etc) * - Each object is described in its own div. * - A single-row summary table w/ table-header is used as * a header for each details section (CSS style for table-header * is defined above, under 'Table Headers'). */ table.details { border-collapse: collapse; background: #e8f0f8; color: #000000; border: 1px solid #608090; margin: .2em 0 0 0; } table.details table { color: #000000; } table.details a:link { color: #0000ff; } table.details a:visited { color: #204080; } /* Fields */ dl.fields { margin-left: 2em; margin-top: 1em; margin-bottom: 1em; } dl.fields dd ul { margin-left: 0em; padding-left: 0em; } dl.fields dd ul li ul { margin-left: 2em; padding-left: 0em; } div.fields { margin-left: 2em; } div.fields p { margin-bottom: 0.5em; } /* Index tables (identifier index, term index, etc) * - link-index is used for indices containing lists of links * (namely, the identifier index & term index). * - index-where is used in link indices for the text indicating * the container/source for each link. * - metadata-index is used for indices containing metadata * extracted from fields (namely, the bug index & todo index). */ table.link-index { border-collapse: collapse; background: #e8f0f8; color: #000000; border: 1px solid #608090; } td.link-index { border-width: 0px; } table.link-index a:link { color: #0000ff; } table.link-index a:visited { color: #204080; } span.index-where { font-size: 70%; } table.metadata-index { border-collapse: collapse; background: #e8f0f8; color: #000000; border: 1px solid #608090; margin: .2em 0 0 0; } td.metadata-index { border-width: 1px; border-style: solid; } table.metadata-index a:link { color: #0000ff; } table.metadata-index a:visited { color: #204080; } /* Function signatures * - sig* is used for the signature in the details section. * - .summary-sig* is used for the signature in the summary * table, and when listing property accessor functions. * */ .sig-name { color: #006080; } .sig-arg { color: #008060; } .sig-default { color: #602000; } .summary-sig { font-family: monospace; } .summary-sig-name { color: #006080; font-weight: bold; } table.summary a.summary-sig-name:link { color: #006080; font-weight: bold; } table.summary a.summary-sig-name:visited { color: #006080; font-weight: bold; } .summary-sig-arg { color: #006040; } .summary-sig-default { color: #501800; } /* Subclass list */ ul.subclass-list { display: inline; } ul.subclass-list li { display: inline; } /* To render variables, classes etc. like functions */ table.summary .summary-name { color: #006080; font-weight: bold; font-family: monospace; } table.summary a.summary-name:link { color: #006080; font-weight: bold; font-family: monospace; } table.summary a.summary-name:visited { color: #006080; font-weight: bold; font-family: monospace; } /* Variable values * - In the 'variable details' sections, each varaible's value is * listed in a 'pre.variable' box. The width of this box is * restricted to 80 chars; if the value's repr is longer than * this it will be wrapped, using a backslash marked with * class 'variable-linewrap'. If the value's repr is longer * than 3 lines, the rest will be ellided; and an ellipsis * marker ('...' marked with 'variable-ellipsis') will be used. * - If the value is a string, its quote marks will be marked * with 'variable-quote'. * - If the variable is a regexp, it is syntax-highlighted using * the re* CSS classes. */ pre.variable { padding: .5em; margin: 0; background: #dce4ec; color: #000000; border: 1px solid #708890; } .variable-linewrap { color: #604000; font-weight: bold; } .variable-ellipsis { color: #604000; font-weight: bold; } .variable-quote { color: #604000; font-weight: bold; } .variable-group { color: #008000; font-weight: bold; } .variable-op { color: #604000; font-weight: bold; } .variable-string { color: #006030; } .variable-unknown { color: #a00000; font-weight: bold; } .re { color: #000000; } .re-char { color: #006030; } .re-op { color: #600000; } .re-group { color: #003060; } .re-ref { color: #404040; } /* Base tree * - Used by class pages to display the base class hierarchy. */ pre.base-tree { font-size: 80%; margin: 0; } /* Frames-based table of contents headers * - Consists of two frames: one for selecting modules; and * the other listing the contents of the selected module. * - h1.toc is used for each frame's heading * - h2.toc is used for subheadings within each frame. */ h1.toc { text-align: center; font-size: 105%; margin: 0; font-weight: bold; padding: 0; } h2.toc { font-size: 100%; font-weight: bold; margin: 0.5em 0 0 -0.3em; } /* Syntax Highlighting for Source Code * - doctest examples are displayed in a 'pre.py-doctest' block. * If the example is in a details table entry, then it will use * the colors specified by the 'table pre.py-doctest' line. * - Source code listings are displayed in a 'pre.py-src' block. * Each line is marked with 'span.py-line' (used to draw a line * down the left margin, separating the code from the line * numbers). Line numbers are displayed with 'span.py-lineno'. * The expand/collapse block toggle button is displayed with * 'a.py-toggle' (Note: the CSS style for 'a.py-toggle' should not * modify the font size of the text.) * - If a source code page is opened with an anchor, then the * corresponding code block will be highlighted. The code * block's header is highlighted with 'py-highlight-hdr'; and * the code block's body is highlighted with 'py-highlight'. * - The remaining py-* classes are used to perform syntax * highlighting (py-string for string literals, py-name for names, * etc.) */ pre.py-doctest { padding: .5em; margin: 1em; background: #e8f0f8; color: #000000; border: 1px solid #708890; } table pre.py-doctest { background: #dce4ec; color: #000000; } pre.py-src { border: 2px solid #000000; background: #f0f0f0; color: #000000; } .py-line { border-left: 2px solid #000000; margin-left: .2em; padding-left: .4em; } .py-lineno { font-style: italic; font-size: 90%; padding-left: .5em; } a.py-toggle { text-decoration: none; } div.py-highlight-hdr { border-top: 2px solid #000000; border-bottom: 2px solid #000000; background: #d8e8e8; } div.py-highlight { border-bottom: 2px solid #000000; background: #d0e0e0; } .py-prompt { color: #005050; font-weight: bold;} .py-more { color: #005050; font-weight: bold;} .py-string { color: #006030; } .py-comment { color: #003060; } .py-keyword { color: #600000; } .py-output { color: #404040; } .py-name { color: #000050; } .py-name:link { color: #000050 !important; } .py-name:visited { color: #000050 !important; } .py-number { color: #005000; } .py-defname { color: #000060; font-weight: bold; } .py-def-name { color: #000060; font-weight: bold; } .py-base-class { color: #000060; } .py-param { color: #000060; } .py-docstring { color: #006030; } .py-decorator { color: #804020; } /* Use this if you don't want links to names underlined: */ /*a.py-name { text-decoration: none; }*/ /* Graphs & Diagrams * - These CSS styles are used for graphs & diagrams generated using * Graphviz dot. 'img.graph-without-title' is used for bare * diagrams (to remove the border created by making the image * clickable). */ img.graph-without-title { border: none; } img.graph-with-title { border: 1px solid #000000; } span.graph-title { font-weight: bold; } span.graph-caption { } /* General-purpose classes * - 'p.indent-wrapped-lines' defines a paragraph whose first line * is not indented, but whose subsequent lines are. * - The 'nomargin-top' class is used to remove the top margin (e.g. * from lists). The 'nomargin' class is used to remove both the * top and bottom margin (but not the left or right margin -- * for lists, that would cause the bullets to disappear.) */ p.indent-wrapped-lines { padding: 0 0 0 7em; text-indent: -7em; margin: 0; } .nomargin-top { margin-top: 0; } .nomargin { margin-top: 0; margin-bottom: 0; } /* HTML Log */ div.log-block { padding: 0; margin: .5em 0 .5em 0; background: #e8f0f8; color: #000000; border: 1px solid #000000; } div.log-error { padding: .1em .3em .1em .3em; margin: 4px; background: #ffb0b0; color: #000000; border: 1px solid #000000; } div.log-warning { padding: .1em .3em .1em .3em; margin: 4px; background: #ffffb0; color: #000000; border: 1px solid #000000; } div.log-info { padding: .1em .3em .1em .3em; margin: 4px; background: #b0ffb0; color: #000000; border: 1px solid #000000; } h2.log-hdr { background: #70b0ff; color: #000000; margin: 0; padding: 0em 0.5em 0em 0.5em; border-bottom: 1px solid #000000; font-size: 110%; } p.log { font-weight: bold; margin: .5em 0 .5em 0; } tr.opt-changed { color: #000000; font-weight: bold; } tr.opt-default { color: #606060; } pre.log { margin: 0; padding: 0; padding-left: 1em; } dbf-0.88.16/dbf/html/crarr.png0000666000175100017510000000052411477216670014731 0ustar margamarga‰PNG  IHDR e¢E,tEXtCreation TimeTue 22 Aug 2006 00:43:10 -0500` XtIMEÖ)Ó}Ö pHYsÂÂnÐu>gAMA± üaEPLTEÿÿÿÍð×ÏÀ€f4sW áÛЊrD`@bCÜÕÈéäÜ–X{`,¯Ÿ€lN‡o@õóðª™xdEðí螊dÐÆ´”~TÖwÅvtRNS@æØfMIDATxÚc`@¼ì¼0&+š—Šˆ°»(’ˆ€ ;; /ðEXùØ‘?Ð n ƒª†— b;'ª+˜˜YÐ#œ(r<£"IEND®B`‚dbf-0.88.16/dbf/html/dbf.tables.DbfCsv-class.html0000666000175100017510000001732111477216672020262 0ustar margamarga dbf.tables.DbfCsv
Package dbf :: Module tables :: Class DbfCsv
[hide private]

Class DbfCsv

source code

csv.Dialect --+
              |
             DbfCsv

csv format for exporting tables

Instance Methods [hide private]

Inherited from csv.Dialect: __init__

Inherited from csv.Dialect (private): _validate

Class Variables [hide private]
  delimiter = ','
  doublequote = True
  escapechar = None
  lineterminator = '\n'
  quotechar = '"'
  skipinitialspace = True
  quoting = 2

Inherited from csv.Dialect (private): _name, _valid

dbf-0.88.16/dbf/html/dbf.dates.Time-class.html0000666000175100017510000005501711477216672017643 0ustar margamarga dbf.dates.Time
Package dbf :: Module dates :: Class Time
[hide private]

Class Time

source code

object --+
         |
        Time

adds null capable datetime.time constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, hour=None, minute=0, second=0, microsec=0)
hour may be a datetime.time
source code
Class Variables [hide private]
  max = Time(23, 59, 59, 999999)
  min = Time(0, 0, 0, 0)
Properties [hide private]
  _time

Inherited from object: __class__

Method Details [hide private]

__new__(cls, hour=None, minute=0, second=0, microsec=0)
Static Method

source code 

hour may be a datetime.time

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfTable.DbfIterator-class.html0000666000175100017510000002246111477216672023540 0ustar margamarga dbf.old.tables.DbfTable.DbfIterator
Package dbf :: Package old :: Module tables :: Class DbfTable :: Class DbfIterator
[hide private]

Class DbfIterator

source code

object --+
         |
        DbfTable.DbfIterator

returns records using current index

Instance Methods [hide private]
 
__init__(yo, table)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
next(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.DbfError-class.html0000666000175100017510000001510211477216672022277 0ustar margamarga dbf.old.exceptions.DbfError
Package dbf :: Package old :: Module exceptions :: Class DbfError
[hide private]

Class DbfError

source code

              object --+        
                       |        
exceptions.BaseException --+    
                           |    
        exceptions.Exception --+
                               |
                              DbfError
Known Subclasses:

Fatal errors elicit this response.

Instance Methods [hide private]

Inherited from exceptions.Exception: __init__, __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf.old.dates-pysrc.html0000666000175100017510000074311411477216672017560 0ustar margamarga dbf.old.dates
Package dbf :: Package old :: Module dates
[hide private]

Source Code for Module dbf.old.dates

  1  """wrappers around datetime objects to allow null values""" 
  2   
  3  import datetime 
  4  import time 
5 6 7 -class Date(object):
8 "adds null capable datetime.date constructs" 9 __slots__ = ['_date']
10 - def __new__(cls, year=None, month=0, day=0):
11 """date should be either a datetime.date, a string in yyyymmdd format, 12 or date/month/day should all be appropriate integers""" 13 nd = object.__new__(cls) 14 nd._date = False 15 if type(year) == datetime.date: 16 nd._date = year 17 elif type(year) == Date: 18 nd._date = year._date 19 elif year == 'no date': 20 pass # date object is already False 21 elif year is not None: 22 nd._date = datetime.date(year, month, day) 23 return nd
24 - def __add__(yo, other):
25 if yo and type(other) == datetime.timedelta: 26 return Date(yo._date + other) 27 else: 28 return NotImplemented
29 - def __eq__(yo, other):
30 if yo: 31 if type(other) == datetime.date: 32 return yo._date == other 33 elif type(other) == Date: 34 if other: 35 return yo._date == other._date 36 return False 37 else: 38 if type(other) == datetime.date: 39 return False 40 elif type(other) == Date: 41 if other: 42 return False 43 return True 44 return NotImplemented
45 - def __getattr__(yo, name):
46 if yo: 47 attribute = yo._date.__getattribute__(name) 48 return attribute 49 else: 50 raise AttributeError('null Date object has no attribute %s' % name)
51 - def __ge__(yo, other):
52 if yo: 53 if type(other) == datetime.date: 54 return yo._date >= other 55 elif type(other) == Date: 56 if other: 57 return yo._date >= other._date 58 return False 59 else: 60 if type(other) == datetime.date: 61 return False 62 elif type(other) == Date: 63 if other: 64 return False 65 return True 66 return NotImplemented
67 - def __gt__(yo, other):
68 if yo: 69 if type(other) == datetime.date: 70 return yo._date > other 71 elif type(other) == Date: 72 if other: 73 return yo._date > other._date 74 return True 75 else: 76 if type(other) == datetime.date: 77 return False 78 elif type(other) == Date: 79 if other: 80 return False 81 return False 82 return NotImplemented
83 - def __hash__(yo):
84 return yo._date.__hash__()
85 - def __le__(yo, other):
86 if yo: 87 if type(other) == datetime.date: 88 return yo._date <= other 89 elif type(other) == Date: 90 if other: 91 return yo._date <= other._date 92 return False 93 else: 94 if type(other) == datetime.date: 95 return True 96 elif type(other) == Date: 97 if other: 98 return True 99 return True 100 return NotImplemented
101 - def __lt__(yo, other):
102 if yo: 103 if type(other) == datetime.date: 104 return yo._date < other 105 elif type(other) == Date: 106 if other: 107 return yo._date < other._date 108 return False 109 else: 110 if type(other) == datetime.date: 111 return True 112 elif type(other) == Date: 113 if other: 114 return True 115 return False 116 return NotImplemented
117 - def __ne__(yo, other):
118 if yo: 119 if type(other) == datetime.date: 120 return yo._date != other 121 elif type(other) == Date: 122 if other: 123 return yo._date != other._date 124 return True 125 else: 126 if type(other) == datetime.date: 127 return True 128 elif type(other) == Date: 129 if other: 130 return True 131 return False 132 return NotImplemented
133 - def __nonzero__(yo):
134 if yo._date: 135 return True 136 return False
137 __radd__ = __add__
138 - def __rsub__(yo, other):
139 if yo and type(other) == datetime.date: 140 return other - yo._date 141 elif yo and type(other) == Date: 142 return other._date - yo._date 143 elif yo and type(other) == datetime.timedelta: 144 return Date(other - yo._date) 145 else: 146 return NotImplemented
147 - def __repr__(yo):
148 if yo: 149 return "Date(%d, %d, %d)" % yo.timetuple()[:3] 150 else: 151 return "Date()"
152 - def __str__(yo):
153 if yo: 154 return yo.isoformat() 155 return "no date"
156 - def __sub__(yo, other):
157 if yo and type(other) == datetime.date: 158 return yo._date - other 159 elif yo and type(other) == Date: 160 return yo._date - other._date 161 elif yo and type(other) == datetime.timedelta: 162 return Date(yo._date - other) 163 else: 164 return NotImplemented
165 - def date(yo):
166 if yo: 167 return yo._date 168 return None
169 @classmethod
170 - def fromordinal(cls, number):
171 if number: 172 return cls(datetime.date.fromordinal(number)) 173 return cls()
174 @classmethod
175 - def fromtimestamp(cls, timestamp):
176 return cls(datetime.date.fromtimestamp(timestamp))
177 @classmethod
178 - def fromymd(cls, yyyymmdd):
179 if yyyymmdd in ('', ' ','no date'): 180 return cls() 181 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
182 - def strftime(yo, format):
183 if yo: 184 return yo._date.strftime(format) 185 return '<no date>'
186 @classmethod
187 - def today(cls):
188 return cls(datetime.date.today())
189 - def ymd(yo):
190 if yo: 191 return "%04d%02d%02d" % yo.timetuple()[:3] 192 else: 193 return ' '
194 Date.max = Date(datetime.date.max) 195 Date.min = Date(datetime.date.min)
196 -class DateTime(object):
197 "adds null capable datetime.datetime constructs" 198 __slots__ = ['_datetime']
199 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0):
200 """year may be a datetime.datetime""" 201 ndt = object.__new__(cls) 202 ndt._datetime = False 203 if type(year) == datetime.datetime: 204 ndt._datetime = year 205 elif type(year) == DateTime: 206 ndt._datetime = year._datetime 207 elif year is not None: 208 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsec) 209 return ndt
210 - def __add__(yo, other):
211 if yo and type(other) == datetime.timedelta: 212 return DateTime(yo._datetime + other) 213 else: 214 return NotImplemented
215 - def __eq__(yo, other):
216 if yo: 217 if type(other) == datetime.datetime: 218 return yo._datetime == other 219 elif type(other) == DateTime: 220 if other: 221 return yo._datetime == other._datetime 222 return False 223 else: 224 if type(other) == datetime.datetime: 225 return False 226 elif type(other) == DateTime: 227 if other: 228 return False 229 return True 230 return NotImplemented
231 - def __getattr__(yo, name):
232 if yo: 233 attribute = yo._datetime.__getattribute__(name) 234 return attribute 235 else: 236 raise AttributeError('null DateTime object has no attribute %s' % name)
237 - def __ge__(yo, other):
238 if yo: 239 if type(other) == datetime.datetime: 240 return yo._datetime >= other 241 elif type(other) == DateTime: 242 if other: 243 return yo._datetime >= other._datetime 244 return False 245 else: 246 if type(other) == datetime.datetime: 247 return False 248 elif type(other) == DateTime: 249 if other: 250 return False 251 return True 252 return NotImplemented
253 - def __gt__(yo, other):
254 if yo: 255 if type(other) == datetime.datetime: 256 return yo._datetime > other 257 elif type(other) == DateTime: 258 if other: 259 return yo._datetime > other._datetime 260 return True 261 else: 262 if type(other) == datetime.datetime: 263 return False 264 elif type(other) == DateTime: 265 if other: 266 return False 267 return False 268 return NotImplemented
269 - def __hash__(yo):
270 return yo._datetime.__hash__()
271 - def __le__(yo, other):
272 if yo: 273 if type(other) == datetime.datetime: 274 return yo._datetime <= other 275 elif type(other) == DateTime: 276 if other: 277 return yo._datetime <= other._datetime 278 return False 279 else: 280 if type(other) == datetime.datetime: 281 return True 282 elif type(other) == DateTime: 283 if other: 284 return True 285 return True 286 return NotImplemented
287 - def __lt__(yo, other):
288 if yo: 289 if type(other) == datetime.datetime: 290 return yo._datetime < other 291 elif type(other) == DateTime: 292 if other: 293 return yo._datetime < other._datetime 294 return False 295 else: 296 if type(other) == datetime.datetime: 297 return True 298 elif type(other) == DateTime: 299 if other: 300 return True 301 return False 302 return NotImplemented
303 - def __ne__(yo, other):
304 if yo: 305 if type(other) == datetime.datetime: 306 return yo._datetime != other 307 elif type(other) == DateTime: 308 if other: 309 return yo._datetime != other._datetime 310 return True 311 else: 312 if type(other) == datetime.datetime: 313 return True 314 elif type(other) == DateTime: 315 if other: 316 return True 317 return False 318 return NotImplemented
319 - def __nonzero__(yo):
320 if yo._datetime is not False: 321 return True 322 return False
323 __radd__ = __add__
324 - def __rsub__(yo, other):
325 if yo and type(other) == datetime.datetime: 326 return other - yo._datetime 327 elif yo and type(other) == DateTime: 328 return other._datetime - yo._datetime 329 elif yo and type(other) == datetime.timedelta: 330 return DateTime(other - yo._datetime) 331 else: 332 return NotImplemented
333 - def __repr__(yo):
334 if yo: 335 return "DateTime(%d, %d, %d, %d, %d, %d, %d, %d, %d)" % yo._datetime.timetuple()[:] 336 else: 337 return "DateTime()"
338 - def __str__(yo):
339 if yo: 340 return yo.isoformat() 341 return "no datetime"
342 - def __sub__(yo, other):
343 if yo and type(other) == datetime.datetime: 344 return yo._datetime - other 345 elif yo and type(other) == DateTime: 346 return yo._datetime - other._datetime 347 elif yo and type(other) == datetime.timedelta: 348 return DateTime(yo._datetime - other) 349 else: 350 return NotImplemented
351 @classmethod
352 - def combine(cls, date, time):
353 if Date(date) and Time(time): 354 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) 355 return cls()
356 - def date(yo):
357 if yo: 358 return Date(yo.year, yo.month, yo.day) 359 return Date()
360 - def datetime(yo):
361 if yo: 362 return yo._datetime 363 return None
364 @classmethod
365 - def fromordinal(cls, number):
366 if number: 367 return cls(datetime.datetime.fromordinal(number)) 368 else: 369 return cls()
370 @classmethod
371 - def fromtimestamp(cls, timestamp):
372 return DateTime(datetime.datetime.fromtimestamp(timestamp))
373 @classmethod
374 - def now(cls):
375 return cls(datetime.datetime.now())
376 - def time(yo):
377 if yo: 378 return Time(yo.hour, yo.minute, yo.second, yo.microsecond) 379 return Time()
380 @classmethod
381 - def utcnow(cls):
382 return cls(datetime.datetime.utcnow())
383 @classmethod
384 - def today(cls):
385 return cls(datetime.datetime.today())
386 DateTime.max = DateTime(datetime.datetime.max) 387 DateTime.min = DateTime(datetime.datetime.min)
388 -class Time(object):
389 "adds null capable datetime.time constructs" 390 __slots__ = ['_time']
391 - def __new__(cls, hour=None, minute=0, second=0, microsec=0):
392 """hour may be a datetime.time""" 393 nt = object.__new__(cls) 394 nt._time = False 395 if type(hour) == datetime.time: 396 nt._time = hour 397 elif type(hour) == Time: 398 nt._time = hour._time 399 elif hour is not None: 400 nt._time = datetime.time(hour, minute, second, microsec) 401 return nt
402 - def __add__(yo, other):
403 if yo and type(other) == datetime.timedelta: 404 return Time(yo._time + other) 405 else: 406 return NotImplemented
407 - def __eq__(yo, other):
408 if yo: 409 if type(other) == datetime.time: 410 return yo._time == other 411 elif type(other) == Time: 412 if other: 413 return yo._time == other._time 414 return False 415 else: 416 if type(other) == datetime.time: 417 return False 418 elif type(other) == Time: 419 if other: 420 return False 421 return True 422 return NotImplemented
423 - def __getattr__(yo, name):
424 if yo: 425 attribute = yo._time.__getattribute__(name) 426 return attribute 427 else: 428 raise AttributeError('null Time object has no attribute %s' % name)
429 - def __ge__(yo, other):
430 if yo: 431 if type(other) == datetime.time: 432 return yo._time >= other 433 elif type(other) == Time: 434 if other: 435 return yo._time >= other._time 436 return False 437 else: 438 if type(other) == datetime.time: 439 return False 440 elif type(other) == Time: 441 if other: 442 return False 443 return True 444 return NotImplemented
445 - def __gt__(yo, other):
446 if yo: 447 if type(other) == datetime.time: 448 return yo._time > other 449 elif type(other) == DateTime: 450 if other: 451 return yo._time > other._time 452 return True 453 else: 454 if type(other) == datetime.time: 455 return False 456 elif type(other) == Time: 457 if other: 458 return False 459 return False 460 return NotImplemented
461 - def __hash__(yo):
462 return yo._datetime.__hash__()
463 - def __le__(yo, other):
464 if yo: 465 if type(other) == datetime.time: 466 return yo._time <= other 467 elif type(other) == Time: 468 if other: 469 return yo._time <= other._time 470 return False 471 else: 472 if type(other) == datetime.time: 473 return True 474 elif type(other) == Time: 475 if other: 476 return True 477 return True 478 return NotImplemented
479 - def __lt__(yo, other):
480 if yo: 481 if type(other) == datetime.time: 482 return yo._time < other 483 elif type(other) == Time: 484 if other: 485 return yo._time < other._time 486 return False 487 else: 488 if type(other) == datetime.time: 489 return True 490 elif type(other) == Time: 491 if other: 492 return True 493 return False 494 return NotImplemented
495 - def __ne__(yo, other):
496 if yo: 497 if type(other) == datetime.time: 498 return yo._time != other 499 elif type(other) == Time: 500 if other: 501 return yo._time != other._time 502 return True 503 else: 504 if type(other) == datetime.time: 505 return True 506 elif type(other) == Time: 507 if other: 508 return True 509 return False 510 return NotImplemented
511 - def __nonzero__(yo):
512 if yo._time is not False: 513 return True 514 return False
515 __radd__ = __add__
516 - def __rsub__(yo, other):
517 if yo and type(other) == datetime.time: 518 return other - yo._time 519 elif yo and type(other) == Time: 520 return other._time - yo._time 521 elif yo and type(other) == datetime.timedelta: 522 return Time(other - yo._datetime) 523 else: 524 return NotImplemented
525 - def __repr__(yo):
526 if yo: 527 return "Time(%d, %d, %d, %d)" % (yo.hour, yo.minute, yo.second, yo.microsecond) 528 else: 529 return "Time()"
530 - def __str__(yo):
531 if yo: 532 return yo.isoformat() 533 return "no time"
534 - def __sub__(yo, other):
535 if yo and type(other) == datetime.time: 536 return yo._time - other 537 elif yo and type(other) == Time: 538 return yo._time - other._time 539 elif yo and type(other) == datetime.timedelta: 540 return Time(yo._time - other) 541 else: 542 return NotImplemented
543 Time.max = Time(datetime.time.max) 544 Time.min = Time(datetime.time.min) 545

dbf-0.88.16/dbf/html/dbf.old.dates-module.html0000666000175100017510000001211311477216670017667 0ustar margamarga dbf.old.dates
Package dbf :: Package old :: Module dates
[hide private]

Module dates

source code

wrappers around datetime objects to allow null values

Classes [hide private]
  Date
adds null capable datetime.date constructs
  DateTime
adds null capable datetime.datetime constructs
  Time
adds null capable datetime.time constructs

Imports: datetime, time


dbf-0.88.16/dbf/html/dbf._io-module.html0000666000175100017510000011371011477216670016565 0ustar margamarga dbf._io
Package dbf :: Module _io
[hide private]

Module _io

source code

Routines for saving, retrieving, and creating fields

Functions [hide private]
 
packShortInt(value, bigendian=False)
Returns a two-bye integer from the value, or raises DbfError
source code
 
packLongInt(value, bigendian=False)
Returns a four-bye integer from the value, or raises DbfError
source code
 
packDate(date)
Returns a group of three bytes, in integer form, of the date
source code
 
packStr(string)
Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes
source code
 
unpackShortInt(bytes, bigendian=False)
Returns the value in the two-byte integer passed in
source code
 
unpackLongInt(bytes, bigendian=False)
Returns the value in the four-byte integer passed in
source code
 
unpackDate(bytestr)
Returns a Date() of the packed three-byte date passed in
source code
 
unpackStr(chars)
Returns a normal, lower-cased string from a null-padded byte string
source code
 
convertToBool(value)
Returns boolean true or false; normal rules apply to non-string values; string values must be 'y','t', 'yes', or 'true' (case insensitive) to be True
source code
 
unsupportedType(something, field, memo=None)
called if a data type is not supported for that style of table
source code
 
retrieveCharacter(bytes, fielddef={}, memo=None)
Returns the string in bytes with trailing white space removed
source code
 
updateCharacter(string, fielddef, memo=None)
returns the string, truncating if string is longer than it's field
source code
 
retrieveCurrency(bytes, fielddef={}, memo=None) source code
 
updateCurrency(value, fielddef={}, memo=None) source code
 
retrieveDate(bytes, fielddef={}, memo=None)
Returns the ascii coded date as a Date object
source code
 
updateDate(moment, fielddef={}, memo=None)
returns the Date or datetime.date object ascii-encoded (yyyymmdd)
source code
 
retrieveDouble(bytes, fielddef={}, memo=None) source code
 
updateDouble(value, fielddef={}, memo=None) source code
 
retrieveInteger(bytes, fielddef={}, memo=None)
Returns the binary number stored in bytes in little-endian format
source code
 
updateInteger(value, fielddef={}, memo=None)
returns value in little-endian binary format
source code
 
retrieveLogical(bytes, fielddef={}, memo=None)
Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise
source code
 
updateLogical(logical, fielddef={}, memo=None)
Returs 'T' if logical is True, 'F' otherwise
source code
 
retrieveMemo(bytes, fielddef, memo)
Returns the block of data from a memo file
source code
 
updateMemo(string, fielddef, memo)
Writes string as a memo, returns the block number it was saved into
source code
 
retrieveNumeric(bytes, fielddef, memo=None)
Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise
source code
 
updateNumeric(value, fielddef, memo=None)
returns value as ascii representation, rounding decimal portion as necessary
source code
 
retrieveVfpDateTime(bytes, fielddef={}, memo=None)
returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 may not be accurate; BC dates are nulled.
source code
 
updateVfpDateTime(moment, fielddef={}, memo=None)
sets the date/time stored in moment moment must have fields year, month, day, hour, minute, second, microsecond
source code
 
retrieveVfpMemo(bytes, fielddef, memo)
Returns the block of data from a memo file
source code
 
updateVfpMemo(string, fielddef, memo)
Writes string as a memo, returns the block number it was saved into
source code
 
addCharacter(format) source code
 
addDate(format) source code
 
addLogical(format) source code
 
addMemo(format) source code
 
addNumeric(format) source code
 
addVfpCurrency(format) source code
 
addVfpDateTime(format) source code
 
addVfpDouble(format) source code
 
addVfpInteger(format) source code
 
addVfpMemo(format) source code
 
addVfpNumeric(format) source code
Variables [hide private]
  VFPTIME = 1721425

Imports: struct, Decimal, DbfError, DataOverflow, Date, DateTime, Time, floor


dbf-0.88.16/dbf/html/class-tree.html0000666000175100017510000004502511477216670016047 0ustar margamarga Class Hierarchy
 
[hide private]
[ Module Hierarchy | Class Hierarchy ]

Class Hierarchy

dbf-0.88.16/dbf/html/dbf.tables.DbfTable.DbfIterator-class.html0000666000175100017510000002231711477216672022763 0ustar margamarga dbf.tables.DbfTable.DbfIterator
Package dbf :: Module tables :: Class DbfTable :: Class DbfIterator
[hide private]

Class DbfIterator

source code

object --+
         |
        DbfTable.DbfIterator

returns records using current index

Instance Methods [hide private]
 
__init__(yo, table)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
next(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.FieldMissing-class.html0000666000175100017510000002255111477216672023155 0ustar margamarga dbf.old.exceptions.FieldMissing
Package dbf :: Package old :: Module exceptions :: Class FieldMissing
[hide private]

Class FieldMissing

source code

              object --+                    
                       |                    
exceptions.BaseException --+                
                           |                
        exceptions.Exception --+            
                               |            
        exceptions.StandardError --+        
                                   |        
              exceptions.LookupError --+    
                                       |    
                     exceptions.KeyError --+
                                           |
                      object --+           |
                               |           |
        exceptions.BaseException --+       |
                                   |       |
                exceptions.Exception --+   |
                                       |   |
                                DbfError --+
                                           |
                                          FieldMissing

Field does not exist in table

Instance Methods [hide private]
 
__init__(yo, fieldname)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.KeyError: __new__, __str__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, fieldname)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables-pysrc.html0000666000175100017510000472107311477216676017742 0ustar margamarga dbf.old.tables
Package dbf :: Package old :: Module tables
[hide private]

Source Code for Module dbf.old.tables

   1  "table definitions" 
   2  import os 
   3  import sys 
   4  import csv 
   5  import codecs 
   6  import locale 
   7  import unicodedata 
   8  import weakref 
   9  from array import array 
  10  from bisect import bisect_left, bisect_right 
  11  from decimal import Decimal 
  12  from shutil import copyfileobj 
  13  from dbf import _io as io 
  14  from dbf.dates import Date, DateTime, Time 
  15  from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode, DoNotIndex 
  16   
  17  input_decoding = locale.getdefaultlocale()[1]    # treat non-unicode data as ... 
  18  default_codepage = 'cp1252'  # if no codepage specified on dbf creation, use this 
  19  return_ascii = True         # convert back to icky ascii, losing chars if no mapping 
  20   
  21  version_map = { 
  22          '\x02' : 'FoxBASE', 
  23          '\x03' : 'dBase III Plus', 
  24          '\x04' : 'dBase IV', 
  25          '\x05' : 'dBase V', 
  26          '\x30' : 'Visual FoxPro', 
  27          '\x31' : 'Visual FoxPro (auto increment field)', 
  28          '\x43' : 'dBase IV SQL', 
  29          '\x7b' : 'dBase IV w/memos', 
  30          '\x83' : 'dBase III Plus w/memos', 
  31          '\x8b' : 'dBase IV w/memos', 
  32          '\x8e' : 'dBase IV w/SQL table', 
  33          '\xf5' : 'FoxPro w/memos'} 
  34   
  35  code_pages = { 
  36          '\x00' : ('ascii', "plain ol' ascii"), 
  37          '\x01' : ('cp437', 'U.S. MS-DOS'), 
  38          '\x02' : ('cp850', 'International MS-DOS'), 
  39          '\x03' : ('cp1252', 'Windows ANSI'), 
  40          '\x04' : ('mac_roman', 'Standard Macintosh'), 
  41          '\x08' : ('cp865', 'Danish OEM'), 
  42          '\x09' : ('cp437', 'Dutch OEM'), 
  43          '\x0A' : ('cp850', 'Dutch OEM (secondary)'), 
  44          '\x0B' : ('cp437', 'Finnish OEM'), 
  45          '\x0D' : ('cp437', 'French OEM'), 
  46          '\x0E' : ('cp850', 'French OEM (secondary)'), 
  47          '\x0F' : ('cp437', 'German OEM'), 
  48          '\x10' : ('cp850', 'German OEM (secondary)'), 
  49          '\x11' : ('cp437', 'Italian OEM'), 
  50          '\x12' : ('cp850', 'Italian OEM (secondary)'), 
  51          '\x13' : ('cp932', 'Japanese Shift-JIS'), 
  52          '\x14' : ('cp850', 'Spanish OEM (secondary)'), 
  53          '\x15' : ('cp437', 'Swedish OEM'), 
  54          '\x16' : ('cp850', 'Swedish OEM (secondary)'), 
  55          '\x17' : ('cp865', 'Norwegian OEM'), 
  56          '\x18' : ('cp437', 'Spanish OEM'), 
  57          '\x19' : ('cp437', 'English OEM (Britain)'), 
  58          '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'), 
  59          '\x1B' : ('cp437', 'English OEM (U.S.)'), 
  60          '\x1C' : ('cp863', 'French OEM (Canada)'), 
  61          '\x1D' : ('cp850', 'French OEM (secondary)'), 
  62          '\x1F' : ('cp852', 'Czech OEM'), 
  63          '\x22' : ('cp852', 'Hungarian OEM'), 
  64          '\x23' : ('cp852', 'Polish OEM'), 
  65          '\x24' : ('cp860', 'Portugese OEM'), 
  66          '\x25' : ('cp850', 'Potugese OEM (secondary)'), 
  67          '\x26' : ('cp866', 'Russian OEM'), 
  68          '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'), 
  69          '\x40' : ('cp852', 'Romanian OEM'), 
  70          '\x4D' : ('cp936', 'Chinese GBK (PRC)'), 
  71          '\x4E' : ('cp949', 'Korean (ANSI/OEM)'), 
  72          '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'), 
  73          '\x50' : ('cp874', 'Thai (ANSI/OEM)'), 
  74          '\x57' : ('cp1252', 'ANSI'), 
  75          '\x58' : ('cp1252', 'Western European ANSI'), 
  76          '\x59' : ('cp1252', 'Spanish ANSI'), 
  77          '\x64' : ('cp852', 'Eastern European MS-DOS'), 
  78          '\x65' : ('cp866', 'Russian MS-DOS'), 
  79          '\x66' : ('cp865', 'Nordic MS-DOS'), 
  80          '\x67' : ('cp861', 'Icelandic MS-DOS'), 
  81          '\x68' : (None, 'Kamenicky (Czech) MS-DOS'), 
  82          '\x69' : (None, 'Mazovia (Polish) MS-DOS'), 
  83          '\x6a' : ('cp737', 'Greek MS-DOS (437G)'), 
  84          '\x6b' : ('cp857', 'Turkish MS-DOS'), 
  85          '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 
  86          '\x79' : ('cp949', 'Korean Windows'), 
  87          '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 
  88          '\x7b' : ('cp932', 'Japanese Windows'), 
  89          '\x7c' : ('cp874', 'Thai Windows'), 
  90          '\x7d' : ('cp1255', 'Hebrew Windows'), 
  91          '\x7e' : ('cp1256', 'Arabic Windows'), 
  92          '\xc8' : ('cp1250', 'Eastern European Windows'), 
  93          '\xc9' : ('cp1251', 'Russian Windows'), 
  94          '\xca' : ('cp1254', 'Turkish Windows'), 
  95          '\xcb' : ('cp1253', 'Greek Windows'), 
  96          '\x96' : ('mac_cyrillic', 'Russian Macintosh'), 
  97          '\x97' : ('mac_latin2', 'Macintosh EE'), 
  98          '\x98' : ('mac_greek', 'Greek Macintosh') } 
  99   
 100  if sys.version_info[:2] < (2, 6): 
101 # define our own property type 102 - class property(object):
103 "Emulate PyProperty_Type() in Objects/descrobject.c" 104
105 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
106 self.fget = fget 107 self.fset = fset 108 self.fdel = fdel 109 self.__doc__ = doc or fget.__doc__
110 - def __call__(self, func):
111 self.fget = func 112 if not self.__doc__: 113 self.__doc__ = fget.__doc__
114 - def __get__(self, obj, objtype=None):
115 if obj is None: 116 return self 117 if self.fget is None: 118 raise AttributeError, "unreadable attribute" 119 return self.fget(obj)
120 - def __set__(self, obj, value):
121 if self.fset is None: 122 raise AttributeError, "can't set attribute" 123 self.fset(obj, value)
124 - def __delete__(self, obj):
125 if self.fdel is None: 126 raise AttributeError, "can't delete attribute" 127 self.fdel(obj)
128 - def setter(self, func):
129 self.fset = func 130 return self
131 - def deleter(self, func):
132 self.fdel = func 133 return self
134 # Internal classes
135 -class _DbfRecord(object):
136 """Provides routines to extract and save data within the fields of a dbf record.""" 137 __slots__ = ['_recnum', '_layout', '_data', '_dirty', '__weakref__']
138 - def _retrieveFieldValue(yo, record_data, fielddef):
139 """calls appropriate routine to fetch value stored in field from array 140 @param record_data: the data portion of the record 141 @type record_data: array of characters 142 @param fielddef: description of the field definition 143 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags' 144 @returns: python data stored in field""" 145 146 field_type = fielddef['type'] 147 retrieve = yo._layout.fieldtypes[field_type]['Retrieve'] 148 datum = retrieve(record_data, fielddef, yo._layout.memo) 149 if field_type in yo._layout.character_fields: 150 datum = yo._layout.decoder(datum)[0] 151 if yo._layout.return_ascii: 152 try: 153 datum = yo._layout.output_encoder(datum)[0] 154 except UnicodeEncodeError: 155 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore') 156 return datum
157 - def _updateFieldValue(yo, fielddef, value):
158 "calls appropriate routine to convert value to ascii bytes, and save it in record" 159 field_type = fielddef['type'] 160 update = yo._layout.fieldtypes[field_type]['Update'] 161 if field_type in yo._layout.character_fields: 162 if not isinstance(value, unicode): 163 if yo._layout.input_decoder is None: 164 raise NonUnicode("String not in unicode format, no default encoding specified") 165 value = yo._layout.input_decoder(value)[0] # input ascii => unicode 166 value = yo._layout.encoder(value)[0] # unicode => table ascii 167 bytes = array('c', update(value, fielddef, yo._layout.memo)) 168 size = fielddef['length'] 169 if len(bytes) > size: 170 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) 171 blank = array('c', ' ' * size) 172 start = fielddef['start'] 173 end = start + size 174 blank[:len(bytes)] = bytes[:] 175 yo._data[start:end] = blank[:] 176 yo._dirty = True
177 - def _update_disk(yo, location='', data=None):
178 if not yo._layout.inmemory: 179 if yo._recnum < 0: 180 raise DbfError("Attempted to update record that has been packed") 181 if location == '': 182 location = yo._recnum * yo._layout.header.record_length + yo._layout.header.start 183 if data is None: 184 data = yo._data 185 yo._layout.dfd.seek(location) 186 yo._layout.dfd.write(data) 187 yo._dirty = False 188 for index in yo.record_table._indexen: 189 index(yo)
190 - def __call__(yo, *specs):
191 results = [] 192 if not specs: 193 specs = yo._layout.index 194 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop]) 195 for field, func in specs: 196 results.append(func(yo[field])) 197 return tuple(results)
198
199 - def __contains__(yo, key):
200 return key in yo._layout.fields
201 - def __iter__(yo):
202 return (yo[field] for field in yo._layout.fields)
203 - def __getattr__(yo, name):
204 if name[0:2] == '__' and name[-2:] == '__': 205 raise AttributeError, 'Method %s is not implemented.' % name 206 elif not name in yo._layout.fields: 207 raise FieldMissing(name) 208 try: 209 fielddef = yo._layout[name] 210 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) 211 return value 212 except DbfError, error: 213 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 214 raise
215 - def __getitem__(yo, item):
216 if type(item) in (int, long): 217 if not -yo._layout.header.field_count <= item < yo._layout.header.field_count: 218 raise IndexError("Field offset %d is not in record" % item) 219 return yo[yo._layout.fields[item]] 220 elif type(item) == slice: 221 sequence = [] 222 for index in yo._layout.fields[item]: 223 sequence.append(yo[index]) 224 return sequence 225 elif type(item) == str: 226 return yo.__getattr__(item) 227 else: 228 raise TypeError("%s is not a field name" % item)
229 - def __len__(yo):
230 return yo._layout.header.field_count
231 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
232 """record = ascii array of entire record; layout=record specification; memo = memo object for table""" 233 record = object.__new__(cls) 234 record._dirty = False 235 record._recnum = recnum 236 record._layout = layout 237 if layout.blankrecord is None and not _fromdisk: 238 record._createBlankRecord() 239 record._data = layout.blankrecord 240 if recnum == -1: # not a disk-backed record 241 return record 242 elif type(kamikaze) == array: 243 record._data = kamikaze[:] 244 elif type(kamikaze) == str: 245 record._data = array('c', kamikaze) 246 else: 247 record._data = kamikaze._data[:] 248 datalen = len(record._data) 249 if datalen < layout.header.record_length: 250 record._data.extend(layout.blankrecord[datalen:]) 251 elif datalen > layout.header.record_length: 252 record._data = record._data[:layout.header.record_length] 253 if not _fromdisk and not layout.inmemory: 254 record._update_disk() 255 return record
256 - def __setattr__(yo, name, value):
257 if name in yo.__slots__: 258 object.__setattr__(yo, name, value) 259 return 260 elif not name in yo._layout.fields: 261 raise FieldMissing(name) 262 fielddef = yo._layout[name] 263 try: 264 yo._updateFieldValue(fielddef, value) 265 except DbfError, error: 266 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 267 error.data = name 268 raise
269 - def __setitem__(yo, name, value):
270 if type(name) == str: 271 yo.__setattr__(name, value) 272 elif type(name) in (int, long): 273 yo.__setattr__(yo._layout.fields[name], value) 274 elif type(name) == slice: 275 sequence = [] 276 for field in yo._layout.fields[name]: 277 sequence.append(field) 278 if len(sequence) != len(value): 279 raise DbfError("length of slices not equal") 280 for field, val in zip(sequence, value): 281 yo[field] = val 282 else: 283 raise TypeError("%s is not a field name" % name)
284 - def __str__(yo):
285 result = [] 286 for seq, field in enumerate(yo.field_names): 287 result.append("%3d - %-10s: %s" % (seq, field, yo[field])) 288 return '\n'.join(result)
289 - def __repr__(yo):
290 return yo._data.tostring()
291 - def _createBlankRecord(yo):
292 "creates a blank record data chunk" 293 layout = yo._layout 294 ondisk = layout.ondisk 295 layout.ondisk = False 296 yo._data = array('c', ' ' * layout.header.record_length) 297 layout.memofields = [] 298 for field in layout.fields: 299 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) 300 if layout[field]['type'] in layout.memotypes: 301 layout.memofields.append(field) 302 layout.blankrecord = yo._data[:] 303 layout.ondisk = ondisk
304 - def delete_record(yo):
305 "marks record as deleted" 306 yo._data[0] = '*' 307 yo._dirty = True 308 return yo
309 @property
310 - def field_names(yo):
311 "fields in table/record" 312 return yo._layout.fields[:]
313 - def gather_fields(yo, dictionary, drop=False): # dict, drop_missing=False):
314 "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True" 315 old_data = yo._data[:] 316 try: 317 for key in dictionary: 318 if not key in yo.field_names: 319 if drop: 320 continue 321 raise FieldMissing(key) 322 yo.__setattr__(key, dictionary[key]) 323 except: 324 yo._data[:] = old_data 325 raise 326 return yo
327 @property
328 - def has_been_deleted(yo):
329 "marked for deletion?" 330 return yo._data[0] == '*'
331 - def read(yo):
332 "refresh record data from disk" 333 size = yo._layout.header.record_length 334 location = yo._recnum * size + yo._layout.header.start 335 yo._layout.dfd.seek(location) 336 yo._data[:] = yo._meta.dfd.read(size) 337 yo._dirty = False 338 return yo
339 @property
340 - def record_number(yo):
341 "physical record number" 342 return yo._recnum
343 @property
344 - def record_table(yo):
345 table = yo._layout.table() 346 if table is None: 347 raise DbfError("table is no longer available") 348 return table
349 - def reindex(yo):
350 for dbfindex in yo._layout.table()._indexen: 351 dbfindex(yo)
352 - def reset_record(yo, keep_fields=None):
353 "blanks record" 354 if keep_fields is None: 355 keep_fields = [] 356 keep = {} 357 for field in keep_fields: 358 keep[field] = yo[field] 359 if yo._layout.blankrecord == None: 360 yo._createBlankRecord() 361 yo._data[:] = yo._layout.blankrecord[:] 362 for field in keep_fields: 363 yo[field] = keep[field] 364 yo._dirty = True 365 return yo
366 - def scatter_fields(yo, blank=False):
367 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." 368 keys = yo._layout.fields 369 if blank: 370 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] 371 else: 372 values = [yo[field] for field in keys] 373 return dict(zip(keys, values))
374 - def undelete_record(yo):
375 "marks record as active" 376 yo._data[0] = ' ' 377 yo._dirty = True 378 return yo
379 - def write(yo, **kwargs):
380 "write record data to disk" 381 if kwargs: 382 yo.gather_fields(kwargs) 383 if yo._dirty: 384 yo._update_disk()
385 -class _DbfMemo(object):
386 """Provides access to memo fields as dictionaries 387 must override _init, _get_memo, and _put_memo to 388 store memo contents to disk"""
389 - def _init(yo):
390 "initialize disk file usage"
391 - def _get_memo(yo, block):
392 "retrieve memo contents from disk"
393 - def _put_memo(yo, data):
394 "store memo contents to disk"
395 - def __init__(yo, meta):
396 "" 397 yo.meta = meta 398 yo.memory = {} 399 yo.nextmemo = 1 400 yo._init() 401 yo.meta.newmemofile = False
402 - def get_memo(yo, block, field):
403 "gets the memo in block" 404 if yo.meta.ignorememos or not block: 405 return '' 406 if yo.meta.ondisk: 407 return yo._get_memo(block) 408 else: 409 return yo.memory[block]
410 - def put_memo(yo, data):
411 "stores data in memo file, returns block number" 412 if yo.meta.ignorememos or data == '': 413 return 0 414 if yo.meta.inmemory: 415 thismemo = yo.nextmemo 416 yo.nextmemo += 1 417 yo.memory[thismemo] = data 418 else: 419 thismemo = yo._put_memo(data) 420 return thismemo
421 -class _Db3Memo(_DbfMemo):
422 - def _init(yo):
423 "dBase III specific" 424 yo.meta.memo_size= 512 425 yo.record_header_length = 2 426 if yo.meta.ondisk and not yo.meta.ignorememos: 427 if yo.meta.newmemofile: 428 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 429 yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508) 430 else: 431 try: 432 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 433 yo.meta.mfd.seek(0) 434 yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4)) 435 except: 436 raise DbfError("memo file appears to be corrupt")
437 - def _get_memo(yo, block):
438 block = int(block) 439 yo.meta.mfd.seek(block * yo.meta.memo_size) 440 eom = -1 441 data = '' 442 while eom == -1: 443 newdata = yo.meta.mfd.read(yo.meta.memo_size) 444 if not newdata: 445 return data 446 data += newdata 447 eom = data.find('\x1a\x1a') 448 return data[:eom].rstrip()
449 - def _put_memo(yo, data):
450 data = data.rstrip() 451 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 452 blocks = length // yo.meta.memo_size 453 if length % yo.meta.memo_size: 454 blocks += 1 455 thismemo = yo.nextmemo 456 yo.nextmemo = thismemo + blocks 457 yo.meta.mfd.seek(0) 458 yo.meta.mfd.write(io.packLongInt(yo.nextmemo)) 459 yo.meta.mfd.seek(thismemo * yo.meta.memo_size) 460 yo.meta.mfd.write(data) 461 yo.meta.mfd.write('\x1a\x1a') 462 double_check = yo._get_memo(thismemo) 463 if len(double_check) != len(data): 464 uhoh = open('dbf_memo_dump.err','wb') 465 uhoh.write('thismemo: %d' % thismemo) 466 uhoh.write('nextmemo: %d' % yo.nextmemo) 467 uhoh.write('saved: %d bytes' % len(data)) 468 uhoh.write(data) 469 uhoh.write('retrieved: %d bytes' % len(double_check)) 470 uhoh.write(double_check) 471 uhoh.close() 472 raise DbfError("unknown error: memo not saved") 473 return thismemo
474 -class _VfpMemo(_DbfMemo):
475 - def _init(yo):
476 "Visual Foxpro 6 specific" 477 if yo.meta.ondisk and not yo.meta.ignorememos: 478 yo.record_header_length = 8 479 if yo.meta.newmemofile: 480 if yo.meta.memo_size == 0: 481 yo.meta.memo_size = 1 482 elif 1 < yo.meta.memo_size < 33: 483 yo.meta.memo_size *= 512 484 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 485 nextmemo = 512 // yo.meta.memo_size 486 if nextmemo * yo.meta.memo_size < 512: 487 nextmemo += 1 488 yo.nextmemo = nextmemo 489 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ 490 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) 491 else: 492 try: 493 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 494 yo.meta.mfd.seek(0) 495 header = yo.meta.mfd.read(512) 496 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True) 497 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True) 498 except: 499 raise DbfError("memo file appears to be corrupt")
500 - def _get_memo(yo, block):
501 yo.meta.mfd.seek(block * yo.meta.memo_size) 502 header = yo.meta.mfd.read(8) 503 length = io.unpackLongInt(header[4:], bigendian=True) 504 return yo.meta.mfd.read(length)
505 - def _put_memo(yo, data):
506 data = data.rstrip() # no trailing whitespace 507 yo.meta.mfd.seek(0) 508 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True) 509 yo.meta.mfd.seek(0) 510 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 511 blocks = length // yo.meta.memo_size 512 if length % yo.meta.memo_size: 513 blocks += 1 514 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True)) 515 yo.meta.mfd.seek(thismemo*yo.meta.memo_size) 516 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data) 517 return thismemo
518 # Public classes
519 -class DbfTable(object):
520 """Provides a framework for dbf style tables.""" 521 _version = 'basic memory table' 522 _versionabbv = 'dbf' 523 _fieldtypes = { 524 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, }, 525 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, }, 526 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } } 527 _memoext = '' 528 _memotypes = tuple('M', ) 529 _memoClass = _DbfMemo 530 _yesMemoMask = '' 531 _noMemoMask = '' 532 _fixed_fields = ('M','D','L') # always same length in table 533 _variable_fields = tuple() # variable length in table 534 _character_fields = tuple('M', ) # field representing character data 535 _decimal_fields = tuple() # text-based numeric fields 536 _numeric_fields = tuple() # fields representing a number 537 _dbfTableHeader = array('c', '\x00' * 32) 538 _dbfTableHeader[0] = '\x00' # table type - none 539 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 540 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 541 _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii 542 _dbfTableHeader = _dbfTableHeader.tostring() 543 _dbfTableHeaderExtra = '' 544 _supported_tables = [] 545 _read_only = False 546 _meta_only = False 547 _use_deleted = True 548 _backed_up = False
549 - class _DbfLists(object):
550 "implements the weakref structure for DbfLists"
551 - def __init__(yo):
552 yo._lists = set()
553 - def __iter__(yo):
554 yo._lists = set([s for s in yo._lists if s() is not None]) 555 return (s() for s in yo._lists if s() is not None)
556 - def __len__(yo):
557 yo._lists = set([s for s in yo._lists if s() is not None]) 558 return len(yo._lists)
559 - def add(yo, new_list):
560 yo._lists.add(weakref.ref(new_list)) 561 yo._lists = set([s for s in yo._lists if s() is not None])
562 - class _Indexen(object):
563 "implements the weakref structure for seperate indexes"
564 - def __init__(yo):
565 yo._indexen = set()
566 - def __iter__(yo):
567 yo._indexen = set([s for s in yo._indexen if s() is not None]) 568 return (s() for s in yo._indexen if s() is not None)
569 - def __len__(yo):
570 yo._indexen = set([s for s in yo._indexen if s() is not None]) 571 return len(yo._indexen)
572 - def add(yo, new_list):
573 yo._indexen.add(weakref.ref(new_list)) 574 yo._indexen = set([s for s in yo._indexen if s() is not None])
575 - class _MetaData(dict):
576 blankrecord = None 577 fields = None 578 filename = None 579 dfd = None 580 memoname = None 581 newmemofile = False 582 memo = None 583 mfd = None 584 ignorememos = False 585 memofields = None 586 current = -1
587 - class _TableHeader(object):
588 - def __init__(yo, data):
589 if len(data) != 32: 590 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) 591 yo._data = array('c', data + '\x0d')
592 - def codepage(yo, cp=None):
593 "get/set code page of table" 594 if cp is None: 595 return yo._data[29] 596 else: 597 cp, sd, ld = _codepage_lookup(cp) 598 yo._data[29] = cp 599 return cp
600 @property
601 - def data(yo):
602 "main data structure" 603 date = io.packDate(Date.today()) 604 yo._data[1:4] = array('c', date) 605 return yo._data.tostring()
606 @data.setter
607 - def data(yo, bytes):
608 if len(bytes) < 32: 609 raise DbfError("length for data of %d is less than 32" % len(bytes)) 610 yo._data[:] = array('c', bytes)
611 @property
612 - def extra(yo):
613 "extra dbf info (located after headers, before data records)" 614 fieldblock = yo._data[32:] 615 for i in range(len(fieldblock)//32+1): 616 cr = i * 32 617 if fieldblock[cr] == '\x0d': 618 break 619 else: 620 raise DbfError("corrupt field structure") 621 cr += 33 # skip past CR 622 return yo._data[cr:].tostring()
623 @extra.setter
624 - def extra(yo, data):
625 fieldblock = yo._data[32:] 626 for i in range(len(fieldblock)//32+1): 627 cr = i * 32 628 if fieldblock[cr] == '\x0d': 629 break 630 else: 631 raise DbfError("corrupt field structure") 632 cr += 33 # skip past CR 633 yo._data[cr:] = array('c', data) # extra 634 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start
635 @property
636 - def field_count(yo):
637 "number of fields (read-only)" 638 fieldblock = yo._data[32:] 639 for i in range(len(fieldblock)//32+1): 640 cr = i * 32 641 if fieldblock[cr] == '\x0d': 642 break 643 else: 644 raise DbfError("corrupt field structure") 645 return len(fieldblock[:cr]) // 32
646 @property
647 - def fields(yo):
648 "field block structure" 649 fieldblock = yo._data[32:] 650 for i in range(len(fieldblock)//32+1): 651 cr = i * 32 652 if fieldblock[cr] == '\x0d': 653 break 654 else: 655 raise DbfError("corrupt field structure") 656 return fieldblock[:cr].tostring()
657 @fields.setter
658 - def fields(yo, block):
659 fieldblock = yo._data[32:] 660 for i in range(len(fieldblock)//32+1): 661 cr = i * 32 662 if fieldblock[cr] == '\x0d': 663 break 664 else: 665 raise DbfError("corrupt field structure") 666 cr += 32 # convert to indexing main structure 667 fieldlen = len(block) 668 if fieldlen % 32 != 0: 669 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 670 yo._data[32:cr] = array('c', block) # fields 671 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start 672 fieldlen = fieldlen // 32 673 recordlen = 1 # deleted flag 674 for i in range(fieldlen): 675 recordlen += ord(block[i*32+16]) 676 yo._data[10:12] = array('c', io.packShortInt(recordlen))
677 @property
678 - def record_count(yo):
679 "number of records (maximum 16,777,215)" 680 return io.unpackLongInt(yo._data[4:8].tostring())
681 @record_count.setter
682 - def record_count(yo, count):
683 yo._data[4:8] = array('c', io.packLongInt(count))
684 @property
685 - def record_length(yo):
686 "length of a record (read_only) (max of 65,535)" 687 return io.unpackShortInt(yo._data[10:12].tostring())
688 @property
689 - def start(yo):
690 "starting position of first record in file (must be within first 64K)" 691 return io.unpackShortInt(yo._data[8:10].tostring())
692 @start.setter
693 - def start(yo, pos):
694 yo._data[8:10] = array('c', io.packShortInt(pos))
695 @property
696 - def update(yo):
697 "date of last table modification (read-only)" 698 return io.unpackDate(yo._data[1:4].tostring())
699 @property
700 - def version(yo):
701 "dbf version" 702 return yo._data[0]
703 @version.setter
704 - def version(yo, ver):
705 yo._data[0] = ver
706 - class _Table(object):
707 "implements the weakref table for records"
708 - def __init__(yo, count, meta):
709 yo._meta = meta 710 yo._weakref_list = [weakref.ref(lambda x: None)] * count
711 - def __getitem__(yo, index):
712 maybe = yo._weakref_list[index]() 713 if maybe is None: 714 if index < 0: 715 index += yo._meta.header.record_count 716 size = yo._meta.header.record_length 717 location = index * size + yo._meta.header.start 718 yo._meta.dfd.seek(location) 719 bytes = yo._meta.dfd.read(size) 720 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) 721 yo._weakref_list[index] = weakref.ref(maybe) 722 return maybe
723 - def append(yo, record):
724 yo._weakref_list.append(weakref.ref(record))
725 - def clear(yo):
726 yo._weakref_list[:] = []
727 - class DbfIterator(object):
728 "returns records using current index"
729 - def __init__(yo, table):
730 yo._table = table 731 yo._index = -1 732 yo._more_records = True
733 - def __iter__(yo):
734 return yo
735 - def next(yo):
736 while yo._more_records: 737 yo._index += 1 738 if yo._index >= len(yo._table): 739 yo._more_records = False 740 continue 741 record = yo._table[yo._index] 742 if not yo._table.use_deleted and record.has_been_deleted: 743 continue 744 return record 745 else: 746 raise StopIteration
747 - def _buildHeaderFields(yo):
748 "constructs fieldblock for disk table" 749 fieldblock = array('c', '') 750 memo = False 751 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask)) 752 for field in yo._meta.fields: 753 if yo._meta.fields.count(field) > 1: 754 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") 755 fielddef = array('c', '\x00' * 32) 756 fielddef[:11] = array('c', io.packStr(field)) 757 fielddef[11] = yo._meta[field]['type'] 758 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start'])) 759 fielddef[16] = chr(yo._meta[field]['length']) 760 fielddef[17] = chr(yo._meta[field]['decimals']) 761 fielddef[18] = chr(yo._meta[field]['flags']) 762 fieldblock.extend(fielddef) 763 if yo._meta[field]['type'] in yo._meta.memotypes: 764 memo = True 765 yo._meta.header.fields = fieldblock.tostring() 766 if memo: 767 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask)) 768 if yo._meta.memo is None: 769 yo._meta.memo = yo._memoClass(yo._meta)
770 - def _checkMemoIntegrity(yo):
771 "dBase III specific" 772 if yo._meta.header.version == '\x83': 773 try: 774 yo._meta.memo = yo._memoClass(yo._meta) 775 except: 776 yo._meta.dfd.close() 777 yo._meta.dfd = None 778 raise 779 if not yo._meta.ignorememos: 780 for field in yo._meta.fields: 781 if yo._meta[field]['type'] in yo._memotypes: 782 if yo._meta.header.version != '\x83': 783 yo._meta.dfd.close() 784 yo._meta.dfd = None 785 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 786 elif not os.path.exists(yo._meta.memoname): 787 yo._meta.dfd.close() 788 yo._meta.dfd = None 789 raise DbfError("Table structure corrupt: memo fields exist without memo file") 790 break
791 - def _initializeFields(yo):
792 "builds the FieldList of names, types, and descriptions from the disk file" 793 yo._meta.fields[:] = [] 794 offset = 1 795 fieldsdef = yo._meta.header.fields 796 if len(fieldsdef) % 32 != 0: 797 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 798 if len(fieldsdef) // 32 != yo.field_count: 799 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 800 for i in range(yo.field_count): 801 fieldblock = fieldsdef[i*32:(i+1)*32] 802 name = io.unpackStr(fieldblock[:11]) 803 type = fieldblock[11] 804 if not type in yo._meta.fieldtypes: 805 raise DbfError("Unknown field type: %s" % type) 806 start = offset 807 length = ord(fieldblock[16]) 808 offset += length 809 end = start + length 810 decimals = ord(fieldblock[17]) 811 flags = ord(fieldblock[18]) 812 if name in yo._meta.fields: 813 raise DbfError('Duplicate field name found: %s' % name) 814 yo._meta.fields.append(name) 815 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
816 - def _fieldLayout(yo, i):
817 "Returns field information Name Type(Length[,Decimals])" 818 name = yo._meta.fields[i] 819 type = yo._meta[name]['type'] 820 length = yo._meta[name]['length'] 821 decimals = yo._meta[name]['decimals'] 822 if type in yo._decimal_fields: 823 description = "%s %s(%d,%d)" % (name, type, length, decimals) 824 elif type in yo._fixed_fields: 825 description = "%s %s" % (name, type) 826 else: 827 description = "%s %s(%d)" % (name, type, length) 828 return description
829 - def _loadtable(yo):
830 "loads the records from disk to memory" 831 if yo._meta_only: 832 raise DbfError("%s has been closed, records are unavailable" % yo.filename) 833 dfd = yo._meta.dfd 834 header = yo._meta.header 835 dfd.seek(header.start) 836 allrecords = dfd.read() # kludge to get around mysterious errno 0 problems 837 dfd.seek(0) 838 length = header.record_length 839 for i in range(header.record_count): 840 record_data = allrecords[length*i:length*i+length] 841 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) 842 dfd.seek(0)
843 - def _list_fields(yo, specs, sep=','):
844 if specs is None: 845 specs = yo.field_names 846 elif isinstance(specs, str): 847 specs = specs.split(sep) 848 else: 849 specs = list(specs) 850 specs = [s.strip() for s in specs] 851 return specs
852 - def _update_disk(yo, headeronly=False):
853 "synchronizes the disk file with current data" 854 if yo._meta.inmemory: 855 return 856 fd = yo._meta.dfd 857 fd.seek(0) 858 fd.write(yo._meta.header.data) 859 if not headeronly: 860 for record in yo._table: 861 record._update_disk() 862 fd.flush() 863 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length)
864 - def __contains__(yo, key):
865 return key in yo.field_names
866 - def __enter__(yo):
867 return yo
868 - def __exit__(yo, *exc_info):
869 yo.close()
870 - def __getattr__(yo, name):
871 if name in ('_table'): 872 if yo._meta.ondisk: 873 yo._table = yo._Table(len(yo), yo._meta) 874 else: 875 yo._table = [] 876 yo._loadtable() 877 return object.__getattribute__(yo, name)
878 - def __getitem__(yo, value):
879 if type(value) == int: 880 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count: 881 raise IndexError("Record %d is not in table." % value) 882 return yo._table[value] 883 elif type(value) == slice: 884 sequence = List(desc='%s --> %s' % (yo.filename, value)) 885 yo._dbflists.add(sequence) 886 for index in range(len(yo))[value]: 887 record = yo._table[index] 888 if yo.use_deleted is True or not record.has_been_deleted: 889 sequence.append(record) 890 return sequence 891 else: 892 raise TypeError('type <%s> not valid for indexing' % type(value))
893 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, 894 read_only=False, keep_memos=False, meta_only=False, codepage=None):
895 """open/create dbf file 896 filename should include path if needed 897 field_specs can be either a ;-delimited string or a list of strings 898 memo_size is always 512 for db3 memos 899 ignore_memos is useful if the memo file is missing or corrupt 900 read_only will load records into memory, then close the disk file 901 keep_memos will also load any memo fields into memory 902 meta_only will ignore all records, keeping only basic table information 903 codepage will override whatever is set in the table itself""" 904 if filename[0] == filename[-1] == ':': 905 if field_specs is None: 906 raise DbfError("field list must be specified for memory tables") 907 elif type(yo) is DbfTable: 908 raise DbfError("only memory tables supported") 909 yo._dbflists = yo._DbfLists() 910 yo._indexen = yo._Indexen() 911 yo._meta = meta = yo._MetaData() 912 meta.table = weakref.ref(yo) 913 meta.filename = filename 914 meta.fields = [] 915 meta.fieldtypes = yo._fieldtypes 916 meta.fixed_fields = yo._fixed_fields 917 meta.variable_fields = yo._variable_fields 918 meta.character_fields = yo._character_fields 919 meta.decimal_fields = yo._decimal_fields 920 meta.numeric_fields = yo._numeric_fields 921 meta.memotypes = yo._memotypes 922 meta.ignorememos = ignore_memos 923 meta.memo_size = memo_size 924 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 925 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 926 meta.return_ascii = return_ascii 927 meta.header = header = yo._TableHeader(yo._dbfTableHeader) 928 header.extra = yo._dbfTableHeaderExtra 929 header.data #force update of date 930 if filename[0] == filename[-1] == ':': 931 yo._table = [] 932 meta.ondisk = False 933 meta.inmemory = True 934 meta.memoname = filename 935 else: 936 base, ext = os.path.splitext(filename) 937 if ext == '': 938 meta.filename = base + '.dbf' 939 meta.memoname = base + yo._memoext 940 meta.ondisk = True 941 meta.inmemory = False 942 if field_specs: 943 if meta.ondisk: 944 meta.dfd = open(meta.filename, 'w+b') 945 meta.newmemofile = True 946 yo.add_fields(field_specs) 947 header.codepage(codepage or default_codepage) 948 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 949 meta.decoder = codecs.getdecoder(sd) 950 meta.encoder = codecs.getencoder(sd) 951 return 952 dfd = meta.dfd = open(meta.filename, 'r+b') 953 dfd.seek(0) 954 meta.header = header = yo._TableHeader(dfd.read(32)) 955 if not header.version in yo._supported_tables: 956 dfd.close() 957 dfd = None 958 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 959 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 960 yo._meta.decoder = codecs.getdecoder(sd) 961 yo._meta.encoder = codecs.getencoder(sd) 962 fieldblock = dfd.read(header.start - 32) 963 for i in range(len(fieldblock)//32+1): 964 fieldend = i * 32 965 if fieldblock[fieldend] == '\x0d': 966 break 967 else: 968 raise DbfError("corrupt field structure in header") 969 if len(fieldblock[:fieldend]) % 32 != 0: 970 raise DbfError("corrupt field structure in header") 971 header.fields = fieldblock[:fieldend] 972 header.extra = fieldblock[fieldend+1:] # skip trailing \r 973 yo._initializeFields() 974 yo._checkMemoIntegrity() 975 meta.current = -1 976 if len(yo) > 0: 977 meta.current = 0 978 dfd.seek(0) 979 if meta_only: 980 yo.close(keep_table=False, keep_memos=False) 981 elif read_only: 982 yo.close(keep_table=True, keep_memos=keep_memos) 983 if codepage is not None: 984 cp, sd, ld = _codepage_lookup(codepage) 985 yo._meta.decoder = codecs.getdecoder(sd) 986 yo._meta.encoder = codecs.getencoder(sd)
987
988 - def __iter__(yo):
989 return yo.DbfIterator(yo)
990 - def __len__(yo):
991 return yo._meta.header.record_count
992 - def __nonzero__(yo):
993 return yo._meta.header.record_count != 0
994 - def __repr__(yo):
995 if yo._read_only: 996 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename 997 elif yo._meta_only: 998 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename 999 else: 1000 return __name__ + ".Table('%s')" % yo._meta.filename
1001 - def __str__(yo):
1002 if yo._read_only: 1003 status = "read-only" 1004 elif yo._meta_only: 1005 status = "meta-only" 1006 else: 1007 status = "read/write" 1008 str = """ 1009 Table: %s 1010 Type: %s 1011 Codepage: %s 1012 Status: %s 1013 Last updated: %s 1014 Record count: %d 1015 Field count: %d 1016 Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version, 1017 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status, 1018 yo.last_update, len(yo), yo.field_count, yo.record_length) 1019 str += "\n --Fields--\n" 1020 for i in range(len(yo._meta.fields)): 1021 str += "%11d) %s\n" % (i, yo._fieldLayout(i)) 1022 return str
1023 @property
1024 - def codepage(yo):
1025 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
1026 @codepage.setter
1027 - def codepage(yo, cp):
1028 cp = code_pages[yo._meta.header.codepage(cp)][0] 1029 yo._meta.decoder = codecs.getdecoder(cp) 1030 yo._meta.encoder = codecs.getencoder(cp) 1031 yo._update_disk(headeronly=True)
1032 @property
1033 - def field_count(yo):
1034 "the number of fields in the table" 1035 return yo._meta.header.field_count
1036 @property
1037 - def field_names(yo):
1038 "a list of the fields in the table" 1039 return yo._meta.fields[:]
1040 @property
1041 - def filename(yo):
1042 "table's file name, including path (if specified on open)" 1043 return yo._meta.filename
1044 @property
1045 - def last_update(yo):
1046 "date of last update" 1047 return yo._meta.header.update
1048 @property
1049 - def memoname(yo):
1050 "table's memo name (if path included in filename on open)" 1051 return yo._meta.memoname
1052 @property
1053 - def record_length(yo):
1054 "number of bytes in a record" 1055 return yo._meta.header.record_length
1056 @property
1057 - def record_number(yo):
1058 "index number of the current record" 1059 return yo._meta.current
1060 @property
1061 - def supported_tables(yo):
1062 "allowable table types" 1063 return yo._supported_tables
1064 @property
1065 - def use_deleted(yo):
1066 "process or ignore deleted records" 1067 return yo._use_deleted
1068 @use_deleted.setter
1069 - def use_deleted(yo, new_setting):
1070 yo._use_deleted = new_setting
1071 @property
1072 - def version(yo):
1073 "returns the dbf type of the table" 1074 return yo._version
1075 - def add_fields(yo, field_specs):
1076 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 1077 backup table is created with _backup appended to name 1078 then modifies current structure""" 1079 all_records = [record for record in yo] 1080 if yo: 1081 yo.create_backup() 1082 yo._meta.blankrecord = None 1083 meta = yo._meta 1084 offset = meta.header.record_length 1085 fields = yo._list_fields(field_specs, sep=';') 1086 for field in fields: 1087 try: 1088 name, format = field.split() 1089 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): 1090 raise DbfError("Field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1091 name = name.lower() 1092 if name in meta.fields: 1093 raise DbfError("Field '%s' already exists" % name) 1094 field_type = format[0].upper() 1095 if len(name) > 10: 1096 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 1097 if not field_type in meta.fieldtypes.keys(): 1098 raise DbfError("Unknown field type: %s" % field_type) 1099 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) 1100 except ValueError: 1101 raise DbfError("invalid field specifier: %s" % field) 1102 start = offset 1103 end = offset + length 1104 offset = end 1105 meta.fields.append(name) 1106 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} 1107 if meta[name]['type'] in yo._memotypes and meta.memo is None: 1108 meta.memo = yo._memoClass(meta) 1109 for record in yo: 1110 record[name] = meta.fieldtypes[field_type]['Blank']() 1111 yo._buildHeaderFields() 1112 yo._update_disk()
1113 - def append(yo, kamikaze='', drop=False, multiple=1):
1114 "adds <multiple> blank records, and fills fields with dict/tuple values if present" 1115 if not yo.field_count: 1116 raise DbfError("No fields defined, cannot append") 1117 empty_table = len(yo) == 0 1118 dictdata = False 1119 tupledata = False 1120 if not isinstance(kamikaze, _DbfRecord): 1121 if isinstance(kamikaze, dict): 1122 dictdata = kamikaze 1123 kamikaze = '' 1124 elif isinstance(kamikaze, tuple): 1125 tupledata = kamikaze 1126 kamikaze = '' 1127 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze) 1128 yo._table.append(newrecord) 1129 yo._meta.header.record_count += 1 1130 if dictdata: 1131 newrecord.gather_fields(dictdata, drop=drop) 1132 elif tupledata: 1133 for index, item in enumerate(tupledata): 1134 newrecord[index] = item 1135 elif kamikaze == str: 1136 for field in yo._meta.memofields: 1137 newrecord[field] = '' 1138 elif kamikaze: 1139 for field in yo._meta.memofields: 1140 newrecord[field] = kamikaze[field] 1141 newrecord.write() 1142 multiple -= 1 1143 if multiple: 1144 data = newrecord._data 1145 single = yo._meta.header.record_count 1146 total = single + multiple 1147 while single < total: 1148 multi_record = _DbfRecord(single, yo._meta, kamikaze=data) 1149 yo._table.append(multi_record) 1150 for field in yo._meta.memofields: 1151 multi_record[field] = newrecord[field] 1152 single += 1 1153 multi_record.write() 1154 yo._meta.header.record_count = total # += multiple 1155 yo._meta.current = yo._meta.header.record_count - 1 1156 newrecord = multi_record 1157 yo._update_disk(headeronly=True) 1158 if empty_table: 1159 yo._meta.current = 0 1160 return newrecord
1161 - def bof(yo):
1162 "moves record pointer to previous usable record; returns True if no more usable records" 1163 while yo._meta.current > 0: 1164 yo._meta.current -= 1 1165 if yo.use_deleted or not yo.current().has_been_deleted: 1166 break 1167 else: 1168 yo._meta.current = -1 1169 return True 1170 return False
1171 - def bottom(yo, get_record=False):
1172 """sets record pointer to bottom of table 1173 if get_record, seeks to and returns last (non-deleted) record 1174 DbfError if table is empty 1175 Bof if all records deleted and use_deleted is False""" 1176 yo._meta.current = yo._meta.header.record_count 1177 if get_record: 1178 try: 1179 return yo.prev() 1180 except Bof: 1181 yo._meta.current = yo._meta.header.record_count 1182 raise Eof()
1183 - def close(yo, keep_table=False, keep_memos=False):
1184 """closes disk files 1185 ensures table data is available if keep_table 1186 ensures memo data is available if keep_memos""" 1187 yo._meta.inmemory = True 1188 if '_table' in dir(yo): 1189 del yo._table 1190 if keep_table: 1191 yo._table # force read of table into memory 1192 yo._read_only = True 1193 else: 1194 if yo._meta.ondisk: 1195 yo._meta.dfd.close() 1196 yo._meta.dfd = None 1197 yo._meta_only = True 1198 if yo._meta.mfd is not None: 1199 if not keep_memos: 1200 yo._meta.ignorememos = True 1201 else: 1202 memo_fields = [] 1203 for field in yo.field_names: 1204 if yo.is_memotype(field): 1205 memo_fields.append(field) 1206 for record in yo: 1207 for field in memo_fields: 1208 record[field] = record[field] 1209 yo._meta.mfd.close() 1210 yo._meta.mfd = None 1211 yo._meta.ondisk = False
1212 - def create_backup(yo, new_name=None, overwrite=False):
1213 "creates a backup table -- ignored if memory table" 1214 if yo.filename[0] == yo.filename[-1] == ':': 1215 return 1216 if new_name is None: 1217 new_name = os.path.splitext(yo.filename)[0] + '_backup.dbf' 1218 else: 1219 overwrite = True 1220 if overwrite or not yo._backed_up: 1221 bkup = open(new_name, 'wb') 1222 try: 1223 yo._meta.dfd.seek(0) 1224 copyfileobj(yo._meta.dfd, bkup) 1225 yo._backed_up = True 1226 finally: 1227 bkup.close()
1228 - def create_index(yo, key):
1229 return Index(yo, key)
1230 - def current(yo, index=False):
1231 "returns current logical record, or its index" 1232 if yo._meta.current < 0: 1233 raise Bof() 1234 elif yo._meta.current >= yo._meta.header.record_count: 1235 raise Eof() 1236 if index: 1237 return yo._meta.current 1238 return yo._table[yo._meta.current]
1239 - def delete_fields(yo, doomed):
1240 """removes field(s) from the table 1241 creates backup files with _backup appended to the file name, 1242 then modifies current structure""" 1243 doomed = yo._list_fields(doomed) 1244 for victim in doomed: 1245 if victim not in yo._meta.fields: 1246 raise DbfError("field %s not in table -- delete aborted" % victim) 1247 all_records = [record for record in yo] 1248 yo.create_backup() 1249 for victim in doomed: 1250 yo._meta.fields.pop(yo._meta.fields.index(victim)) 1251 start = yo._meta[victim]['start'] 1252 end = yo._meta[victim]['end'] 1253 for record in yo: 1254 record._data = record._data[:start] + record._data[end:] 1255 for field in yo._meta.fields: 1256 if yo._meta[field]['start'] == end: 1257 end = yo._meta[field]['end'] 1258 yo._meta[field]['start'] = start 1259 yo._meta[field]['end'] = start + yo._meta[field]['length'] 1260 start = yo._meta[field]['end'] 1261 yo._buildHeaderFields() 1262 yo._update_disk()
1263 - def eof(yo):
1264 "moves record pointer to next usable record; returns True if no more usable records" 1265 while yo._meta.current < yo._meta.header.record_count - 1: 1266 yo._meta.current += 1 1267 if yo.use_deleted or not yo.current().has_been_deleted: 1268 break 1269 else: 1270 yo._meta.current = yo._meta.header.record_count 1271 return True 1272 return False
1273 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1274 """writes the table using CSV or tab-delimited format, using the filename 1275 given if specified, otherwise the table name""" 1276 if filename is not None: 1277 path, filename = os.path.split(filename) 1278 else: 1279 path, filename = os.path.split(yo.filename) 1280 filename = os.path.join(path, filename) 1281 field_specs = yo._list_fields(field_specs) 1282 if records is None: 1283 records = yo 1284 format = format.lower() 1285 if format not in ('csv', 'tab', 'fixed'): 1286 raise DbfError("export format: csv, tab, or fixed -- not %s" % format) 1287 if format == 'fixed': 1288 format = 'txt' 1289 base, ext = os.path.splitext(filename) 1290 if ext.lower() in ('', '.dbf'): 1291 filename = base + "." + format[:3] 1292 fd = open(filename, 'w') 1293 try: 1294 if format == 'csv': 1295 csvfile = csv.writer(fd, dialect='dbf') 1296 if header: 1297 csvfile.writerow(field_specs) 1298 for record in records: 1299 fields = [] 1300 for fieldname in field_specs: 1301 fields.append(record[fieldname]) 1302 csvfile.writerow(fields) 1303 elif format == 'tab': 1304 if header: 1305 fd.write('\t'.join(field_specs) + '\n') 1306 for record in records: 1307 fields = [] 1308 for fieldname in field_specs: 1309 fields.append(str(record[fieldname])) 1310 fd.write('\t'.join(fields) + '\n') 1311 else: # format == 'fixed' 1312 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w') 1313 header.write("%-15s Size\n" % "Field Name") 1314 header.write("%-15s ----\n" % ("-" * 15)) 1315 sizes = [] 1316 for field in field_specs: 1317 size = yo.size(field)[0] 1318 sizes.append(size) 1319 header.write("%-15s %3d\n" % (field, size)) 1320 header.write('\nTotal Records in file: %d\n' % len(records)) 1321 header.close() 1322 for record in records: 1323 fields = [] 1324 for i, field_name in enumerate(field_specs): 1325 fields.append("%-*s" % (sizes[i], record[field_name])) 1326 fd.write(''.join(fields) + '\n') 1327 finally: 1328 fd.close() 1329 fd = None 1330 return len(records)
1331 - def get_record(yo, recno):
1332 "returns record at physical_index[recno]" 1333 return yo._table[recno]
1334 - def goto(yo, criteria):
1335 """changes the record pointer to the first matching (non-deleted) record 1336 criteria should be either a tuple of tuple(value, field, func) triples, 1337 or an integer to go to""" 1338 if isinstance(criteria, int): 1339 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count: 1340 raise IndexError("Record %d does not exist" % criteria) 1341 if criteria < 0: 1342 criteria += yo._meta.header.record_count 1343 yo._meta.current = criteria 1344 return yo.current() 1345 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) 1346 specs = tuple([(field, func) for value, field, func in criteria]) 1347 match = tuple([value for value, field, func in criteria]) 1348 current = yo.current(index=True) 1349 matchlen = len(match) 1350 while not yo.Eof(): 1351 record = yo.current() 1352 results = record(*specs) 1353 if results == match: 1354 return record 1355 return yo.goto(current)
1356 - def is_decimal(yo, name):
1357 "returns True if name is a variable-length field type" 1358 return yo._meta[name]['type'] in yo._decimal_fields
1359 - def is_memotype(yo, name):
1360 "returns True if name is a memo type field" 1361 return yo._meta[name]['type'] in yo._memotypes
1362 - def new(yo, filename, field_specs=None):
1363 "returns a new table of the same type" 1364 if field_specs is None: 1365 field_specs = yo.structure() 1366 if not (filename[0] == filename[-1] == ':'): 1367 path, name = os.path.split(filename) 1368 if path == "": 1369 filename = os.path.join(os.path.split(yo.filename)[0], filename) 1370 elif name == "": 1371 filename = os.path.join(path, os.path.split(yo.filename)[1]) 1372 return yo.__class__(filename, field_specs)
1373 - def next(yo):
1374 "set record pointer to next (non-deleted) record, and return it" 1375 if yo.eof(): 1376 raise Eof() 1377 return yo.current()
1378 - def open(yo):
1379 meta = yo._meta 1380 meta.inmemory = False 1381 meta.ondisk = True 1382 yo._read_only = False 1383 yo._meta_only = False 1384 if '_table' in dir(yo): 1385 del yo._table 1386 dfd = meta.dfd = open(meta.filename, 'r+b') 1387 dfd.seek(0) 1388 meta.header = header = yo._TableHeader(dfd.read(32)) 1389 if not header.version in yo._supported_tables: 1390 dfd.close() 1391 dfd = None 1392 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 1393 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 1394 meta.decoder = codecs.getdecoder(sd) 1395 meta.encoder = codecs.getencoder(sd) 1396 fieldblock = dfd.read(header.start - 32) 1397 for i in range(len(fieldblock)//32+1): 1398 fieldend = i * 32 1399 if fieldblock[fieldend] == '\x0d': 1400 break 1401 else: 1402 raise DbfError("corrupt field structure in header") 1403 if len(fieldblock[:fieldend]) % 32 != 0: 1404 raise DbfError("corrupt field structure in header") 1405 header.fields = fieldblock[:fieldend] 1406 header.extra = fieldblock[fieldend+1:] # skip trailing \r 1407 yo._initializeFields() 1408 yo._checkMemoIntegrity() 1409 meta.current = -1 1410 if len(yo) > 0: 1411 meta.current = 0 1412 dfd.seek(0)
1413
1414 - def pack(yo, _pack=True):
1415 "physically removes all deleted records" 1416 for dbfindex in yo._indexen: 1417 dbfindex.clear() 1418 newtable = [] 1419 index = 0 1420 offset = 0 # +1 for each purged record 1421 for record in yo._table: 1422 found = False 1423 if record.has_been_deleted and _pack: 1424 for dbflist in yo._dbflists: 1425 if dbflist._purge(record, record.record_number - offset, 1): 1426 found = True 1427 record._recnum = -1 1428 else: 1429 record._recnum = index 1430 newtable.append(record) 1431 index += 1 1432 if found: 1433 offset += 1 1434 found = False 1435 yo._table.clear() 1436 for record in newtable: 1437 yo._table.append(record) 1438 yo._meta.header.record_count = index 1439 yo._current = -1 1440 yo._update_disk() 1441 yo.reindex()
1442 - def prev(yo):
1443 "set record pointer to previous (non-deleted) record, and return it" 1444 if yo.bof(): 1445 raise Bof 1446 return yo.current()
1447 - def query(yo, sql_command=None, python=None):
1448 "uses exec to perform queries on the table" 1449 if sql_command: 1450 return sql(yo, sql_command) 1451 elif python is None: 1452 raise DbfError("query: python parameter must be specified") 1453 possible = List(desc="%s --> %s" % (yo.filename, python)) 1454 yo._dbflists.add(possible) 1455 query_result = {} 1456 select = 'query_result["keep"] = %s' % python 1457 g = {} 1458 use_deleted = yo.use_deleted 1459 for record in yo: 1460 query_result['keep'] = False 1461 g['query_result'] = query_result 1462 exec select in g, record 1463 if query_result['keep']: 1464 possible.append(record) 1465 record.write() 1466 return possible
1467 - def reindex(yo):
1468 for dbfindex in yo._indexen: 1469 dbfindex.reindex()
1470 - def rename_field(yo, oldname, newname):
1471 "renames an existing field" 1472 if yo: 1473 yo.create_backup() 1474 if not oldname in yo._meta.fields: 1475 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) 1476 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): 1477 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1478 newname = newname.lower() 1479 if newname in yo._meta.fields: 1480 raise DbfError("field --%s-- already exists" % newname) 1481 if len(newname) > 10: 1482 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 1483 yo._meta[newname] = yo._meta[oldname] 1484 yo._meta.fields[yo._meta.fields.index(oldname)] = newname 1485 yo._buildHeaderFields() 1486 yo._update_disk(headeronly=True)
1487 - def size(yo, field):
1488 "returns size of field as a tuple of (length, decimals)" 1489 if field in yo: 1490 return (yo._meta[field]['length'], yo._meta[field]['decimals']) 1491 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1492 - def structure(yo, fields=None):
1493 """return list of fields suitable for creating same table layout 1494 @param fields: list of fields or None for all fields""" 1495 field_specs = [] 1496 fields = yo._list_fields(fields) 1497 try: 1498 for name in fields: 1499 field_specs.append(yo._fieldLayout(yo.field_names.index(name))) 1500 except ValueError: 1501 raise DbfError("field --%s-- does not exist" % name) 1502 return field_specs
1503 - def top(yo, get_record=False):
1504 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record 1505 DbfError if table is empty 1506 Eof if all records are deleted and use_deleted is False""" 1507 yo._meta.current = -1 1508 if get_record: 1509 try: 1510 return yo.next() 1511 except Eof: 1512 yo._meta.current = -1 1513 raise Bof()
1514 - def type(yo, field):
1515 "returns type of field" 1516 if field in yo: 1517 return yo._meta[field]['type'] 1518 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1519 - def zap(yo, areyousure=False):
1520 """removes all records from table -- this cannot be undone! 1521 areyousure must be True, else error is raised""" 1522 if areyousure: 1523 if yo._meta.inmemory: 1524 yo._table = [] 1525 else: 1526 yo._table.clear() 1527 yo._meta.header.record_count = 0 1528 yo._current = -1 1529 yo._update_disk() 1530 else: 1531 raise DbfError("You must say you are sure to wipe the table")
1532 # these asignments are for backward compatibility, and will go away
1533 -class Db3Table(DbfTable):
1534 """Provides an interface for working with dBase III tables.""" 1535 _version = 'dBase III Plus' 1536 _versionabbv = 'db3' 1537 _fieldtypes = { 1538 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1539 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1540 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1541 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1542 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} } 1543 _memoext = '.dbt' 1544 _memotypes = ('M',) 1545 _memoClass = _Db3Memo 1546 _yesMemoMask = '\x80' 1547 _noMemoMask = '\x7f' 1548 _fixed_fields = ('D','L','M') 1549 _variable_fields = ('C','N') 1550 _character_fields = ('C','M') 1551 _decimal_fields = ('N',) 1552 _numeric_fields = ('N',) 1553 _dbfTableHeader = array('c', '\x00' * 32) 1554 _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's 1555 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 1556 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1557 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1558 _dbfTableHeader = _dbfTableHeader.tostring() 1559 _dbfTableHeaderExtra = '' 1560 _supported_tables = ['\x03', '\x83'] 1561 _read_only = False 1562 _meta_only = False 1563 _use_deleted = True
1564 - def _checkMemoIntegrity(yo):
1565 "dBase III specific" 1566 if yo._meta.header.version == '\x83': 1567 try: 1568 yo._meta.memo = yo._memoClass(yo._meta) 1569 except: 1570 yo._meta.dfd.close() 1571 yo._meta.dfd = None 1572 raise 1573 if not yo._meta.ignorememos: 1574 for field in yo._meta.fields: 1575 if yo._meta[field]['type'] in yo._memotypes: 1576 if yo._meta.header.version != '\x83': 1577 yo._meta.dfd.close() 1578 yo._meta.dfd = None 1579 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1580 elif not os.path.exists(yo._meta.memoname): 1581 yo._meta.dfd.close() 1582 yo._meta.dfd = None 1583 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1584 break
1585 - def _initializeFields(yo):
1586 "builds the FieldList of names, types, and descriptions" 1587 yo._meta.fields[:] = [] 1588 offset = 1 1589 fieldsdef = yo._meta.header.fields 1590 if len(fieldsdef) % 32 != 0: 1591 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1592 if len(fieldsdef) // 32 != yo.field_count: 1593 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1594 for i in range(yo.field_count): 1595 fieldblock = fieldsdef[i*32:(i+1)*32] 1596 name = io.unpackStr(fieldblock[:11]) 1597 type = fieldblock[11] 1598 if not type in yo._meta.fieldtypes: 1599 raise DbfError("Unknown field type: %s" % type) 1600 start = offset 1601 length = ord(fieldblock[16]) 1602 offset += length 1603 end = start + length 1604 decimals = ord(fieldblock[17]) 1605 flags = ord(fieldblock[18]) 1606 yo._meta.fields.append(name) 1607 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1608 -class FpTable(DbfTable):
1609 'Provides an interface for working with FoxPro 2 tables' 1610 _version = 'Foxpro' 1611 _versionabbv = 'fp' 1612 _fieldtypes = { 1613 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1614 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1615 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1616 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1617 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1618 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1619 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1620 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1621 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1622 _memoext = '.fpt' 1623 _memotypes = ('G','M','P') 1624 _memoClass = _VfpMemo 1625 _yesMemoMask = '\xf5' # 1111 0101 1626 _noMemoMask = '\x03' # 0000 0011 1627 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1628 _variable_fields = ('C','F','N') 1629 _character_fields = ('C','M') # field representing character data 1630 _decimal_fields = ('F','N') 1631 _numeric_fields = ('B','F','I','N','Y') 1632 _supported_tables = ('\x03', '\xf5') 1633 _dbfTableHeader = array('c', '\x00' * 32) 1634 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1635 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1636 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1637 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1638 _dbfTableHeader = _dbfTableHeader.tostring() 1639 _dbfTableHeaderExtra = '\x00' * 263 1640 _use_deleted = True
1641 - def _checkMemoIntegrity(yo):
1642 if os.path.exists(yo._meta.memoname): 1643 try: 1644 yo._meta.memo = yo._memoClass(yo._meta) 1645 except: 1646 yo._meta.dfd.close() 1647 yo._meta.dfd = None 1648 raise 1649 if not yo._meta.ignorememos: 1650 for field in yo._meta.fields: 1651 if yo._meta[field]['type'] in yo._memotypes: 1652 if not os.path.exists(yo._meta.memoname): 1653 yo._meta.dfd.close() 1654 yo._meta.dfd = None 1655 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1656 break
1657 - def _initializeFields(yo):
1658 "builds the FieldList of names, types, and descriptions" 1659 yo._meta.fields[:] = [] 1660 offset = 1 1661 fieldsdef = yo._meta.header.fields 1662 if len(fieldsdef) % 32 != 0: 1663 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1664 if len(fieldsdef) // 32 != yo.field_count: 1665 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1666 for i in range(yo.field_count): 1667 fieldblock = fieldsdef[i*32:(i+1)*32] 1668 name = io.unpackStr(fieldblock[:11]) 1669 type = fieldblock[11] 1670 if not type in yo._meta.fieldtypes: 1671 raise DbfError("Unknown field type: %s" % type) 1672 elif type == '0': 1673 return # ignore nullflags 1674 start = offset 1675 length = ord(fieldblock[16]) 1676 offset += length 1677 end = start + length 1678 decimals = ord(fieldblock[17]) 1679 flags = ord(fieldblock[18]) 1680 yo._meta.fields.append(name) 1681 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1682
1683 -class VfpTable(DbfTable):
1684 'Provides an interface for working with Visual FoxPro 6 tables' 1685 _version = 'Visual Foxpro v6' 1686 _versionabbv = 'vfp' 1687 _fieldtypes = { 1688 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1689 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1690 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1691 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1692 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1693 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1694 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1695 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1696 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1697 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1698 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1699 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1700 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1701 _memoext = '.fpt' 1702 _memotypes = ('G','M','P') 1703 _memoClass = _VfpMemo 1704 _yesMemoMask = '\x30' # 0011 0000 1705 _noMemoMask = '\x30' # 0011 0000 1706 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1707 _variable_fields = ('C','F','N') 1708 _character_fields = ('C','M') # field representing character data 1709 _decimal_fields = ('F','N') 1710 _numeric_fields = ('B','F','I','N','Y') 1711 _supported_tables = ('\x30',) 1712 _dbfTableHeader = array('c', '\x00' * 32) 1713 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1714 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1715 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1716 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1717 _dbfTableHeader = _dbfTableHeader.tostring() 1718 _dbfTableHeaderExtra = '\x00' * 263 1719 _use_deleted = True
1720 - def _checkMemoIntegrity(yo):
1721 if os.path.exists(yo._meta.memoname): 1722 try: 1723 yo._meta.memo = yo._memoClass(yo._meta) 1724 except: 1725 yo._meta.dfd.close() 1726 yo._meta.dfd = None 1727 raise 1728 if not yo._meta.ignorememos: 1729 for field in yo._meta.fields: 1730 if yo._meta[field]['type'] in yo._memotypes: 1731 if not os.path.exists(yo._meta.memoname): 1732 yo._meta.dfd.close() 1733 yo._meta.dfd = None 1734 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1735 break
1736 - def _initializeFields(yo):
1737 "builds the FieldList of names, types, and descriptions" 1738 yo._meta.fields[:] = [] 1739 offset = 1 1740 fieldsdef = yo._meta.header.fields 1741 for i in range(yo.field_count): 1742 fieldblock = fieldsdef[i*32:(i+1)*32] 1743 name = io.unpackStr(fieldblock[:11]) 1744 type = fieldblock[11] 1745 if not type in yo._meta.fieldtypes: 1746 raise DbfError("Unknown field type: %s" % type) 1747 elif type == '0': 1748 return # ignore nullflags 1749 start = io.unpackLongInt(fieldblock[12:16]) 1750 length = ord(fieldblock[16]) 1751 offset += length 1752 end = start + length 1753 decimals = ord(fieldblock[17]) 1754 flags = ord(fieldblock[18]) 1755 yo._meta.fields.append(name) 1756 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1757 -class List(object):
1758 "list of Dbf records, with set-like behavior" 1759 _desc = ''
1760 - def __init__(yo, new_records=None, desc=None, key=None):
1761 yo._list = [] 1762 yo._set = set() 1763 if key is not None: 1764 yo.key = key 1765 if key.__doc__ is None: 1766 key.__doc__ = 'unknown' 1767 key = yo.key 1768 yo._current = -1 1769 if isinstance(new_records, yo.__class__) and key is new_records.key: 1770 yo._list = new_records._list[:] 1771 yo._set = new_records._set.copy() 1772 yo._current = 0 1773 elif new_records is not None: 1774 for record in new_records: 1775 value = key(record) 1776 item = (record.record_table, record.record_number, value) 1777 if value not in yo._set: 1778 yo._set.add(value) 1779 yo._list.append(item) 1780 yo._current = 0 1781 if desc is not None: 1782 yo._desc = desc
1783 - def __add__(yo, other):
1784 key = yo.key 1785 if isinstance(other, (DbfTable, list)): 1786 other = yo.__class__(other, key=key) 1787 if isinstance(other, yo.__class__): 1788 result = yo.__class__() 1789 result._set = yo._set.copy() 1790 result._list[:] = yo._list[:] 1791 result.key = yo.key 1792 if key is other.key: # same key? just compare key values 1793 for item in other._list: 1794 if item[2] not in result._set: 1795 result._set.add(item[2]) 1796 result._list.append(item) 1797 else: # different keys, use this list's key on other's records 1798 for rec in other: 1799 value = key(rec) 1800 if value not in result._set: 1801 result._set.add(value) 1802 result._list.append((rec.record_table, rec.record_number, value)) 1803 result._current = 0 if result else -1 1804 return result 1805 return NotImplemented
1806 - def __contains__(yo, record):
1807 if isinstance(record, tuple): 1808 item = record 1809 else: 1810 item = record.record_table, record.record_number, yo.key(record) 1811 return item in yo._set
1812 - def __delitem__(yo, key):
1813 if isinstance(key, int): 1814 item = yo._list.pop[key] 1815 yo._set.remove(item[2]) 1816 elif isinstance(key, slice): 1817 yo._set.difference_update([item[2] for item in yo._list[key]]) 1818 yo._list.__delitem__(key) 1819 else: 1820 raise TypeError
1821 - def __getitem__(yo, key):
1822 if isinstance(key, int): 1823 count = len(yo._list) 1824 if not -count <= key < count: 1825 raise IndexError("Record %d is not in list." % key) 1826 return yo._get_record(*yo._list[key]) 1827 elif isinstance(key, slice): 1828 result = yo.__class__() 1829 result._list[:] = yo._list[key] 1830 result._set = set(result._list) 1831 result.key = yo.key 1832 result._current = 0 if result else -1 1833 return result 1834 else: 1835 raise TypeError('indices must be integers')
1836 - def __iter__(yo):
1837 return (table.get_record(recno) for table, recno, value in yo._list)
1838 - def __len__(yo):
1839 return len(yo._list)
1840 - def __nonzero__(yo):
1841 return len(yo) > 0
1842 - def __radd__(yo, other):
1843 return yo.__add__(other)
1844 - def __repr__(yo):
1845 if yo._desc: 1846 return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list)) 1847 else: 1848 return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list))
1849 - def __rsub__(yo, other):
1850 key = yo.key 1851 if isinstance(other, (DbfTable, list)): 1852 other = yo.__class__(other, key=key) 1853 if isinstance(other, yo.__class__): 1854 result = yo.__class__() 1855 result._list[:] = other._list[:] 1856 result._set = other._set.copy() 1857 result.key = key 1858 lost = set() 1859 if key is other.key: 1860 for item in yo._list: 1861 if item[2] in result._list: 1862 result._set.remove(item[2]) 1863 lost.add(item) 1864 else: 1865 for rec in other: 1866 value = key(rec) 1867 if value in result._set: 1868 result._set.remove(value) 1869 lost.add((rec.record_table, rec.record_number, value)) 1870 result._list = [item for item in result._list if item not in lost] 1871 result._current = 0 if result else -1 1872 return result 1873 return NotImplemented
1874 - def __sub__(yo, other):
1875 key = yo.key 1876 if isinstance(other, (DbfTable, list)): 1877 other = yo.__class__(other, key=key) 1878 if isinstance(other, yo.__class__): 1879 result = yo.__class__() 1880 result._list[:] = yo._list[:] 1881 result._set = yo._set.copy() 1882 result.key = key 1883 lost = set() 1884 if key is other.key: 1885 for item in other._list: 1886 if item[2] in result._set: 1887 result._set.remove(item[2]) 1888 lost.add(item[2]) 1889 else: 1890 for rec in other: 1891 value = key(rec) 1892 if value in result._set: 1893 result._set.remove(value) 1894 lost.add(value) 1895 result._list = [item for item in result._list if item[2] not in lost] 1896 result._current = 0 if result else -1 1897 return result 1898 return NotImplemented
1899 - def _maybe_add(yo, item):
1900 if item[2] not in yo._set: 1901 yo._set.add(item[2]) 1902 yo._list.append(item)
1903 - def _get_record(yo, table=None, rec_no=None, value=None):
1904 if table is rec_no is None: 1905 table, rec_no, value = yo._list[yo._current] 1906 return table.get_record(rec_no)
1907 - def _purge(yo, record, old_record_number, offset):
1908 partial = record.record_table, old_record_number 1909 records = sorted(yo._list, key=lambda item: (item[0], item[1])) 1910 for item in records: 1911 if partial == item[:2]: 1912 found = True 1913 break 1914 elif partial[0] is item[0] and partial[1] < item[1]: 1915 found = False 1916 break 1917 else: 1918 found = False 1919 if found: 1920 yo._list.pop(yo._list.index(item)) 1921 yo._set.remove(item[2]) 1922 start = records.index(item) + found 1923 for item in records[start:]: 1924 if item[0] is not partial[0]: # into other table's records 1925 break 1926 i = yo._list.index(item) 1927 yo._set.remove(item[2]) 1928 item = item[0], (item[1] - offset), item[2] 1929 yo._list[i] = item 1930 yo._set.add(item[2]) 1931 return found
1932 - def append(yo, new_record):
1933 yo._maybe_add((new_record.record_table, new_record.record_number, yo.key(new_record))) 1934 if yo._current == -1 and yo._list: 1935 yo._current = 0
1936 - def bottom(yo):
1937 if yo._list: 1938 yo._current = len(yo._list) - 1 1939 return yo._get_record() 1940 raise DbfError("dbf.List is empty")
1941 - def clear(yo):
1942 yo._list = [] 1943 yo._set = set() 1944 yo._current = -1
1945 - def current(yo):
1946 if yo._current < 0: 1947 raise Bof() 1948 elif yo._current == len(yo._list): 1949 raise Eof() 1950 return yo._get_record()
1951 - def extend(yo, new_records):
1952 key = yo.key 1953 if isinstance(new_records, yo.__class__): 1954 if key is new_records.key: # same key? just compare key values 1955 for item in new_records._list: 1956 yo._maybe_add(item) 1957 else: # different keys, use this list's key on other's records 1958 for rec in new_records: 1959 value = key(rec) 1960 yo._maybe_add((rec.record_table, rec.record_number, value)) 1961 else: 1962 for record in new_records: 1963 value = key(rec) 1964 yo._maybe_add((rec.record_table, rec.record_number, value)) 1965 if yo._current == -1 and yo._list: 1966 yo._current = 0
1967 - def goto(yo, index_number):
1968 if yo._list: 1969 if 0 <= index_number <= len(yo._list): 1970 yo._current = index_number 1971 return yo._get_record() 1972 raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list))) 1973 raise DbfError("dbf.List is empty")
1974 - def index(yo, sort=None, reverse=False):
1975 "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'" 1976 if sort is None: 1977 results = [] 1978 for field, func in yo._meta.index: 1979 results.append("%s(%s)" % (func.__name__, field)) 1980 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) 1981 yo._meta.index_reversed = reverse 1982 if sort == 'ORIGINAL': 1983 yo._index = range(yo._meta.header.record_count) 1984 yo._meta.index = 'ORIGINAL' 1985 if reverse: 1986 yo._index.reverse() 1987 return 1988 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) 1989 yo._meta.index = tuple(new_sort) 1990 yo._meta.orderresults = [''] * len(yo) 1991 for record in yo: 1992 yo._meta.orderresults[record.record_number] = record() 1993 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
1994 - def index(yo, record, start=None, stop=None):
1995 item = record.record_table, record.record_number, yo.key(record) 1996 if start is None: 1997 start = 0 1998 if stop is None: 1999 stop = len(yo._list) 2000 return yo._list.index(item, start, stop)
2001 - def insert(yo, i, record):
2002 item = record.record_table, record.record_number, yo.key(record) 2003 if item not in yo._set: 2004 yo._set.add(item[2]) 2005 yo._list.insert(i, item)
2006 - def key(yo, record):
2007 "table_name, record_number" 2008 return record.record_table, record.record_number
2009 - def next(yo):
2010 if yo._current < len(yo._list): 2011 yo._current += 1 2012 if yo._current < len(yo._list): 2013 return yo._get_record() 2014 raise Eof()
2015 - def pop(yo, index=None):
2016 if index is None: 2017 table, recno, value = yo._list.pop() 2018 else: 2019 table, recno, value = yo._list.pop(index) 2020 yo._set.remove(value) 2021 return yo._get_record(table, recno, value)
2022 - def prev(yo):
2023 if yo._current >= 0: 2024 yo._current -= 1 2025 if yo._current > -1: 2026 return yo._get_record() 2027 raise Bof()
2028 - def remove(yo, record):
2029 item = record.record_table, record.record_number, yo.key(record) 2030 yo._list.remove(item) 2031 yo._set.remove(item[2])
2032 - def reverse(yo):
2033 return yo._list.reverse()
2034 - def top(yo):
2035 if yo._list: 2036 yo._current = 0 2037 return yo._get_record() 2038 raise DbfError("dbf.List is empty")
2039 - def sort(yo, key=None, reverse=False):
2040 if key is None: 2041 return yo._list.sort(reverse=reverse) 2042 return yo._list.sort(key=lambda item: key(item[0].get_record(item[1])), reverse=reverse)
2043
2044 -class DbfCsv(csv.Dialect):
2045 "csv format for exporting tables" 2046 delimiter = ',' 2047 doublequote = True 2048 escapechar = None 2049 lineterminator = '\n' 2050 quotechar = '"' 2051 skipinitialspace = True 2052 quoting = csv.QUOTE_NONNUMERIC
2053 -class Index(object):
2054 - class IndexIterator(object):
2055 "returns records using this index"
2056 - def __init__(yo, table, records):
2057 yo.table = table 2058 yo.records = records 2059 yo.index = 0
2060 - def __iter__(yo):
2061 return yo
2062 - def next(yo):
2063 while yo.index < len(yo.records): 2064 record = yo.table.get_record(yo.records[yo.index]) 2065 yo.index += 1 2066 if not yo.table.use_deleted and record.has_been_deleted: 2067 continue 2068 return record 2069 else: 2070 raise StopIteration
2071 - def __init__(yo, table, key):
2072 yo._table = table 2073 yo._values = [] # ordered list of values 2074 yo._rec_by_val = [] # matching record numbers 2075 yo._records = {} # record numbers:values 2076 yo.__doc__ = key.__doc__ or 'unknown' 2077 yo.key = key 2078 for record in table: 2079 value = key(record) 2080 if value is DoNotIndex: 2081 continue 2082 rec_num = record.record_number 2083 if not isinstance(value, tuple): 2084 value = (value, ) 2085 vindex = bisect_right(yo._values, value) 2086 yo._values.insert(vindex, value) 2087 yo._rec_by_val.insert(vindex, rec_num) 2088 yo._records[rec_num] = value 2089 table._indexen.add(yo)
2090 - def __call__(yo, record):
2091 rec_num = record.record_number 2092 if rec_num in yo._records: 2093 value = yo._records[rec_num] 2094 vindex = bisect_left(yo._values, value) 2095 yo._values.pop(vindex) 2096 yo._rec_by_val.pop(vindex) 2097 value = yo.key(record) 2098 if value is DoNotIndex: 2099 return 2100 if not isinstance(value, tuple): 2101 value = (value, ) 2102 vindex = bisect_right(yo._values, value) 2103 yo._values.insert(vindex, value) 2104 yo._rec_by_val.insert(vindex, rec_num) 2105 yo._records[rec_num] = value
2106 - def __contains__(yo, match):
2107 if isinstance(match, _DbfRecord): 2108 if match.record_table is yo._table: 2109 return match.record_number in yo._records 2110 match = yo.key(match) 2111 elif not isinstance(match, tuple): 2112 match = (match, ) 2113 return yo.find(match) != -1
2114 - def __getitem__(yo, key):
2115 if isinstance(key, int): 2116 count = len(yo._values) 2117 if not -count <= key < count: 2118 raise IndexError("Record %d is not in list." % key) 2119 rec_num = yo._rec_by_val[key] 2120 return yo._table.get_record(rec_num) 2121 elif isinstance(key, slice): 2122 result = List() 2123 yo._table._dbflists.add(result) 2124 start, stop, step = key.start, key.stop, key.step 2125 if start is None: start = 0 2126 if stop is None: stop = len(yo._rec_by_val) 2127 if step is None: step = 1 2128 for loc in range(start, stop, step): 2129 record = yo._table.get_record(yo._rec_by_val[loc]) 2130 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2131 result._current = 0 if result else -1 2132 return result 2133 elif isinstance (key, (str, unicode, tuple, _DbfRecord)): 2134 if isinstance(key, _DbfRecord): 2135 key = yo.key(key) 2136 elif not isinstance(key, tuple): 2137 key = (key, ) 2138 loc = yo.find(key) 2139 if loc == -1: 2140 raise KeyError(key) 2141 return yo._table.get_record(yo._rec_by_val[loc]) 2142 else: 2143 raise TypeError('indices must be integers, match objects must by strings or tuples')
2144 - def __enter__(yo):
2145 return yo
2146 - def __exit__(yo, *exc_info):
2147 yo._table.close() 2148 yo._values[:] = [] 2149 yo._rec_by_val[:] = [] 2150 yo._records.clear() 2151 return False
2152 - def __iter__(yo):
2153 return yo.IndexIterator(yo._table, yo._rec_by_val)
2154 - def __len__(yo):
2155 return len(yo._records)
2156 - def _partial_match(yo, target, match):
2157 target = target[:len(match)] 2158 if isinstance(match[-1], (str, unicode)): 2159 target = list(target) 2160 target[-1] = target[-1][:len(match[-1])] 2161 target = tuple(target) 2162 return target == match
2163 - def _purge(yo, rec_num):
2164 value = yo._records.get(rec_num) 2165 if value is not None: 2166 vindex = bisect_left(yo._values, value) 2167 del yo._records[rec_num] 2168 yo._values.pop(vindex) 2169 yo._rec_by_val.pop(vindex)
2170 - def _search(yo, match, lo=0, hi=None):
2171 if hi is None: 2172 hi = len(yo._values) 2173 return bisect_left(yo._values, match, lo, hi)
2174 - def clear(yo):
2175 "removes all entries from index" 2176 yo._values[:] = [] 2177 yo._rec_by_val[:] = [] 2178 yo._records.clear()
2179 close = __exit__
2180 - def find(yo, match, partial=False):
2181 "returns numeric index of (partial) match, or -1" 2182 if isinstance(match, _DbfRecord): 2183 if match.record_number in yo._records: 2184 return yo._values.index(yo._records[match.record_number]) 2185 else: 2186 return -1 2187 if not isinstance(match, tuple): 2188 match = (match, ) 2189 loc = yo._search(match) 2190 while loc < len(yo._values) and yo._values[loc] == match: 2191 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 2192 loc += 1 2193 continue 2194 return loc 2195 if partial: 2196 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 2197 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 2198 loc += 1 2199 continue 2200 return loc 2201 return -1
2202 - def find_index(yo, match):
2203 "returns numeric index of either (partial) match, or position of where match would be" 2204 if isinstance(match, _DbfRecord): 2205 if match.record_number in yo._records: 2206 return yo._values.index(yo._records[match.record_number]) 2207 else: 2208 match = yo.key(match) 2209 if not isinstance(match, tuple): 2210 match = (match, ) 2211 loc = yo._search(match) 2212 return loc
2213 - def index(yo, match, partial=False):
2214 "returns numeric index of (partial) match, or raises ValueError" 2215 loc = yo.find(match, partial) 2216 if loc == -1: 2217 if isinstance(match, _DbfRecord): 2218 raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__)) 2219 else: 2220 raise ValueError("match criteria <%s> not in index" % (match, )) 2221 return loc
2222 - def reindex(yo):
2223 "reindexes all records" 2224 for record in yo._table: 2225 yo(record)
2226 - def query(yo, sql_command=None, python=None):
2227 """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL""" 2228 if sql_command: 2229 return sql(yo, command) 2230 elif python is None: 2231 raise DbfError("query: python parameter must be specified") 2232 possible = List(desc="%s --> %s" % (yo._table.filename, python)) 2233 yo._table._dbflists.add(possible) 2234 query_result = {} 2235 select = 'query_result["keep"] = %s' % python 2236 g = {} 2237 for record in yo: 2238 query_result['keep'] = False 2239 g['query_result'] = query_result 2240 exec select in g, record 2241 if query_result['keep']: 2242 possible.append(record) 2243 record.write() 2244 return possible
2245 - def search(yo, match, partial=False):
2246 "returns dbf.List of all (partially) matching records" 2247 result = List() 2248 yo._table._dbflists.add(result) 2249 if not isinstance(match, tuple): 2250 match = (match, ) 2251 loc = yo._search(match) 2252 if loc == len(yo._values): 2253 return result 2254 while loc < len(yo._values) and yo._values[loc] == match: 2255 record = yo._table.get_record(yo._rec_by_val[loc]) 2256 if not yo._table.use_deleted and record.has_been_deleted: 2257 loc += 1 2258 continue 2259 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2260 loc += 1 2261 if partial: 2262 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 2263 record = yo._table.get_record(yo._rec_by_val[loc]) 2264 if not yo._table.use_deleted and record.has_been_deleted: 2265 loc += 1 2266 continue 2267 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 2268 loc += 1 2269 return result
2270 2271 csv.register_dialect('dbf', DbfCsv)
2272 2273 -def sql(records, command):
2274 """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL""" 2275 table = records[0].record_table 2276 sql_command = command 2277 if ' for ' in command: 2278 command, condition = command.split(' for ') 2279 else: 2280 condition = 'True' 2281 name, command = command.split(' ', 1) 2282 name = name.lower() 2283 if name not in ('delete','insert','recall','select','update'): 2284 raise DbfError("unrecognized sql command: %s" % name.upper()) 2285 if name == 'insert' and condition != 'True': 2286 raise DbfError("FOR clause not allowed with INSERT") 2287 possible = List(desc=sql_command) 2288 tables = set() 2289 #yo._table._dbflists.add(possible) 2290 query_result = {} 2291 select = 'query_result["keep"] = %s' % condition 2292 g = {} 2293 if name == 'insert': 2294 #raise DbfError("INSERT not currently implemented") 2295 record = table.append() 2296 exec command in {}, record 2297 record.write() 2298 record.reindex() 2299 possible.append(record) 2300 else: 2301 for record in records: 2302 query_result['keep'] = False 2303 g['query_result'] = query_result 2304 exec select in g, record 2305 if query_result['keep']: 2306 possible.append(record) 2307 tables.add(record.record_table) 2308 if name == 'delete': 2309 record.delete_record() 2310 elif name == 'recall': 2311 record.undelete_record() 2312 elif name == 'select': 2313 pass 2314 elif name == 'update': 2315 exec command in g, record 2316 else: 2317 raise DbfError("unrecognized sql command: %s" % sql.upper) 2318 record.write() 2319 if name == 'select': 2320 fields = command.replace(' ','').split(',') 2321 field_sizes = dict([(field, (0, 0)) for field in fields]) 2322 for t in tables: 2323 for field in fields: 2324 field_sizes[field] = max(field_sizes[field], t.size(field)) 2325 field_specs = [] 2326 for field in fields: 2327 type = table.type(field) 2328 length, decimals = field_sizes[field] 2329 if type in table._decimal_fields: 2330 description = "%s %s(%d,%d)" % (field, type, length, decimals) 2331 elif type in table._fixed_fields: 2332 description = "%s %s" % (field, type) 2333 else: 2334 description = "%s %s(%d)" % (field, type, length) 2335 field_specs.append(description) 2336 select = table.new(filename=':%s:' % sql_command, field_specs=field_specs) 2337 for record in possible: 2338 select.append(record.scatter_fields(), drop=True) 2339 return select 2340 else: 2341 for list_table in tables: 2342 list_table._dbflists.add(possible) 2343 return possible
2344 -def _nop(value):
2345 "returns parameter unchanged" 2346 return value
2347 -def _normalize_tuples(tuples, length, filler):
2348 "ensures each tuple is the same length, using filler[-missing] for the gaps" 2349 final = [] 2350 for t in tuples: 2351 if len(t) < length: 2352 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 2353 else: 2354 final.append(t) 2355 return tuple(final)
2356 -def _codepage_lookup(cp):
2357 if cp not in code_pages: 2358 for code_page in sorted(code_pages.keys()): 2359 sd, ld = code_pages[code_page] 2360 if cp == sd or cp == ld: 2361 if sd is None: 2362 raise DbfError("Unsupported codepage: %s" % ld) 2363 cp = code_page 2364 break 2365 else: 2366 raise DbfError("Unsupported codepage: %s" % cp) 2367 sd, ld = code_pages[cp] 2368 return cp, sd, ld
2369 -def ascii(new_setting=None):
2370 "get/set return_ascii setting" 2371 global return_ascii 2372 if new_setting is None: 2373 return return_ascii 2374 else: 2375 return_ascii = new_setting
2376 -def codepage(cp=None):
2377 "get/set default codepage for any new tables" 2378 global default_codepage 2379 cp, sd, ld = _codepage_lookup(cp or default_codepage) 2380 default_codepage = sd 2381 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
2382 -def encoding(cp=None):
2383 "get/set default encoding for non-unicode strings passed into a table" 2384 global input_decoding 2385 cp, sd, ld = _codepage_lookup(cp or input_decoding) 2386 default_codepage = sd 2387 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
2388 -class _Db4Table(DbfTable):
2389 version = 'dBase IV w/memos (non-functional)' 2390 _versionabbv = 'db4' 2391 _fieldtypes = { 2392 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 2393 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 2394 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 2395 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 2396 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 2397 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 2398 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 2399 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 2400 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 2401 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2402 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2403 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 2404 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 2405 _memoext = '.dbt' 2406 _memotypes = ('G','M','P') 2407 _memoClass = _VfpMemo 2408 _yesMemoMask = '\x8b' # 0011 0000 2409 _noMemoMask = '\x04' # 0011 0000 2410 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 2411 _variable_fields = ('C','F','N') 2412 _character_fields = ('C','M') # field representing character data 2413 _decimal_fields = ('F','N') 2414 _numeric_fields = ('B','F','I','N','Y') 2415 _supported_tables = ('\x04', '\x8b') 2416 _dbfTableHeader = ['\x00'] * 32 2417 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 2418 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 2419 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 2420 _dbfTableHeader = ''.join(_dbfTableHeader) 2421 _dbfTableHeaderExtra = '' 2422 _use_deleted = True
2423 - def _checkMemoIntegrity(yo):
2424 "dBase III specific" 2425 if yo._meta.header.version == '\x8b': 2426 try: 2427 yo._meta.memo = yo._memoClass(yo._meta) 2428 except: 2429 yo._meta.dfd.close() 2430 yo._meta.dfd = None 2431 raise 2432 if not yo._meta.ignorememos: 2433 for field in yo._meta.fields: 2434 if yo._meta[field]['type'] in yo._memotypes: 2435 if yo._meta.header.version != '\x8b': 2436 yo._meta.dfd.close() 2437 yo._meta.dfd = None 2438 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 2439 elif not os.path.exists(yo._meta.memoname): 2440 yo._meta.dfd.close() 2441 yo._meta.dfd = None 2442 raise DbfError("Table structure corrupt: memo fields exist without memo file") 2443 break
2444

dbf-0.88.16/dbf/html/dbf.old.tables.Index-class.html0000666000175100017510000005353711477216672020750 0ustar margamarga dbf.old.tables.Index
Package dbf :: Package old :: Module tables :: Class Index
[hide private]

Class Index

source code

object --+
         |
        Index

Nested Classes [hide private]
  IndexIterator
returns records using this index
Instance Methods [hide private]
 
__init__(yo, table, key)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__call__(yo, record) source code
 
__contains__(yo, match) source code
 
__getitem__(yo, key) source code
 
__enter__(yo) source code
 
__exit__(yo, *exc_info) source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
_partial_match(yo, target, match) source code
 
_purge(yo, rec_num) source code
 
_search(yo, match, lo=0, hi=None) source code
 
clear(yo)
removes all entries from index
source code
 
close(yo, *exc_info) source code
 
find(yo, match, partial=False)
returns numeric index of (partial) match, or -1
source code
 
find_index(yo, match)
returns numeric index of either (partial) match, or position of where match would be
source code
 
index(yo, match, partial=False)
returns numeric index of (partial) match, or raises ValueError
source code
 
reindex(yo)
reindexes all records
source code
 
query(yo, sql_command=None, python=None)
recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL
source code
 
search(yo, match, partial=False)
returns dbf.List of all (partially) matching records
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table, key)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables._Db3Memo-class.html0000666000175100017510000002722711477216672021263 0ustar margamarga dbf.old.tables._Db3Memo
Package dbf :: Package old :: Module tables :: Class _Db3Memo
[hide private]

Class _Db3Memo

source code

object --+    
         |    
  _DbfMemo --+
             |
            _Db3Memo

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
dBase III specific
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code

Inherited from _DbfMemo: __init__, get_memo, put_memo

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_init(yo)

source code 

dBase III specific

Overrides: _DbfMemo._init

_get_memo(yo, block)

source code 

retrieve memo contents from disk

Overrides: _DbfMemo._get_memo
(inherited documentation)

_put_memo(yo, data)

source code 

store memo contents to disk

Overrides: _DbfMemo._put_memo
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.Index.IndexIterator-class.html0000666000175100017510000002261011477216672023514 0ustar margamarga dbf.old.tables.Index.IndexIterator
Package dbf :: Package old :: Module tables :: Class Index :: Class IndexIterator
[hide private]

Class IndexIterator

source code

object --+
         |
        Index.IndexIterator

returns records using this index

Instance Methods [hide private]
 
__init__(yo, table, records)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
next(yo) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, table, records)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.tables.DbfCsv-class.html0000666000175100017510000001743311477216672021043 0ustar margamarga dbf.old.tables.DbfCsv
Package dbf :: Package old :: Module tables :: Class DbfCsv
[hide private]

Class DbfCsv

source code

csv.Dialect --+
              |
             DbfCsv

csv format for exporting tables

Instance Methods [hide private]

Inherited from csv.Dialect: __init__

Inherited from csv.Dialect (private): _validate

Class Variables [hide private]
  delimiter = ','
  doublequote = True
  escapechar = None
  lineterminator = '\n'
  quotechar = '"'
  skipinitialspace = True
  quoting = 2

Inherited from csv.Dialect (private): _name, _valid

dbf-0.88.16/dbf/html/dbf.exceptions.Eof-class.html0000666000175100017510000002361111477216672020532 0ustar margamarga dbf.exceptions.Eof
Package dbf :: Module exceptions :: Class Eof
[hide private]

Class Eof

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
              object --+           |
                       |           |
exceptions.BaseException --+       |
                           |       |
        exceptions.Exception --+   |
                               |   |
        exceptions.StopIteration --+
                                   |
                                  Eof

End of file reached

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.StopIteration: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'End of file reached'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.Eof-class.html0000666000175100017510000002374311477216672021315 0ustar margamarga dbf.old.exceptions.Eof
Package dbf :: Package old :: Module exceptions :: Class Eof
[hide private]

Class Eof

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
              object --+           |
                       |           |
exceptions.BaseException --+       |
                           |       |
        exceptions.Exception --+   |
                               |   |
        exceptions.StopIteration --+
                                   |
                                  Eof

End of file reached

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.StopIteration: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'End of file reached'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables.List-class.html0000666000175100017510000007452711477216672020041 0ustar margamarga dbf.tables.List
Package dbf :: Module tables :: Class List
[hide private]

Class List

source code

object --+
         |
        List

list of Dbf records, with set-like behavior

Instance Methods [hide private]
 
__init__(yo, new_records=None, desc=None, key=None, field_names=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__add__(yo, other) source code
 
__contains__(yo, record) source code
 
__delitem__(yo, key) source code
 
__getitem__(yo, key) source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__rsub__(yo, other) source code
 
__sub__(yo, other) source code
 
_maybe_add(yo, item) source code
 
_get_record(yo, table=None, rec_no=None, value=None) source code
 
_purge(yo, record, old_record_number, offset) source code
 
append(yo, new_record) source code
 
bottom(yo) source code
 
clear(yo) source code
 
current(yo) source code
 
extend(yo, new_records) source code
 
goto(yo, index_number) source code
 
index(yo, record, start=None, stop=None) source code
 
insert(yo, i, record) source code
 
key(yo, record)
table_name, record_number
source code
 
next(yo) source code
 
pop(yo, index=None) source code
 
prev(yo) source code
 
remove(yo, record) source code
 
reverse(yo) source code
 
top(yo) source code
 
sort(yo, key=None, reverse=False) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__, __str__

Class Variables [hide private]
  _desc = ''
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, new_records=None, desc=None, key=None, field_names=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.tables.DbfTable._MetaData-class.html0000666000175100017510000002410311477216672022370 0ustar margamarga dbf.tables.DbfTable._MetaData
Package dbf :: Module tables :: Class DbfTable :: Class _MetaData
[hide private]

Class _MetaData

source code

object --+    
         |    
      dict --+
             |
            DbfTable._MetaData

Instance Methods [hide private]

Inherited from dict: __cmp__, __contains__, __delitem__, __eq__, __ge__, __getattribute__, __getitem__, __gt__, __hash__, __init__, __iter__, __le__, __len__, __lt__, __ne__, __new__, __repr__, __setitem__, clear, copy, fromkeys, get, has_key, items, iteritems, iterkeys, itervalues, keys, pop, popitem, setdefault, update, values

Inherited from object: __delattr__, __reduce__, __reduce_ex__, __setattr__, __str__

Class Variables [hide private]
  blankrecord = None
  fields = None
  filename = None
  dfd = None
  memoname = None
  newmemofile = False
  memo = None
  mfd = None
  ignorememos = False
  memofields = None
  current = -1
Properties [hide private]

Inherited from object: __class__

dbf-0.88.16/dbf/html/dbf.old.exceptions.DoNotIndex-class.html0000666000175100017510000002346111477216672022614 0ustar margamarga dbf.old.exceptions.DoNotIndex
Package dbf :: Package old :: Module exceptions :: Class DoNotIndex
[hide private]

Class DoNotIndex

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
                                  DoNotIndex

Returned by indexing functions to suppress a record from becoming part of the index

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.Exception: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'Not indexing record'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions-pysrc.html0000666000175100017510000010223211477216672020627 0ustar margamarga dbf.old.exceptions
Package dbf :: Package old :: Module exceptions
[hide private]

Source Code for Module dbf.old.exceptions

 1  "warnings and errors" 
 2   
3 -class DbfError(Exception):
4 "Fatal errors elicit this response." 5 pass
6 -class DataOverflow(DbfError):
7 "Data too large for field"
8 - def __init__(yo, message, data=None):
9 super(DataOverflow, yo).__init__(message) 10 yo.data = data
11 -class FieldMissing(KeyError, DbfError):
12 "Field does not exist in table"
13 - def __init__(yo, fieldname):
14 super(FieldMissing, yo).__init__('%s: no such field in table' % fieldname) 15 yo.data = fieldname
16 -class NonUnicode(DbfError):
17 "Data for table not in unicode"
18 - def __init__(yo, message=None):
19 super(NonUnicode, yo).__init__(message)
20 -class DbfWarning(Exception):
21 "Normal operations elicit this response"
22 -class Eof(DbfWarning, StopIteration):
23 "End of file reached" 24 message = 'End of file reached'
25 - def __init__(yo):
26 super(Eof, yo).__init__(yo.message)
27 -class Bof(DbfWarning, StopIteration):
28 "Beginning of file reached" 29 message = 'Beginning of file reached'
30 - def __init__(yo):
31 super(Bof, yo).__init__(yo.message)
32 -class DoNotIndex(DbfWarning):
33 "Returned by indexing functions to suppress a record from becoming part of the index" 34 message = 'Not indexing record'
35 - def __init__(yo):
36 super(DoNotIndex, yo).__init__(yo.message)
37

dbf-0.88.16/dbf/html/dbf.old.tables.property-class.html0000666000175100017510000003126211477216672021554 0ustar margamarga dbf.old.tables.property
Package dbf :: Package old :: Module tables :: Class property
[hide private]

Class property

source code

object --+
         |
        property

Emulate PyProperty_Type() in Objects/descrobject.c

Instance Methods [hide private]
 
__init__(self, fget=None, fset=None, fdel=None, doc=None)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__call__(self, func) source code
 
__get__(self, obj, objtype=None) source code
 
__set__(self, obj, value) source code
 
__delete__(self, obj) source code
 
setter(self, func) source code
 
deleter(self, func) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(self, fget=None, fset=None, fdel=None, doc=None)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.exceptions.FieldMissing-class.html0000666000175100017510000002241711477216672022401 0ustar margamarga dbf.exceptions.FieldMissing
Package dbf :: Module exceptions :: Class FieldMissing
[hide private]

Class FieldMissing

source code

              object --+                    
                       |                    
exceptions.BaseException --+                
                           |                
        exceptions.Exception --+            
                               |            
        exceptions.StandardError --+        
                                   |        
              exceptions.LookupError --+    
                                       |    
                     exceptions.KeyError --+
                                           |
                      object --+           |
                               |           |
        exceptions.BaseException --+       |
                                   |       |
                exceptions.Exception --+   |
                                       |   |
                                DbfError --+
                                           |
                                          FieldMissing

Field does not exist in table

Instance Methods [hide private]
 
__init__(yo, fieldname)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.KeyError: __new__, __str__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__

Inherited from object: __hash__, __reduce_ex__

Properties [hide private]

Inherited from exceptions.BaseException: args, message

Inherited from object: __class__

Method Details [hide private]

__init__(yo, fieldname)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.exceptions.Bof-class.html0000666000175100017510000002376511477216672021316 0ustar margamarga dbf.old.exceptions.Bof
Package dbf :: Package old :: Module exceptions :: Class Bof
[hide private]

Class Bof

source code

              object --+            
                       |            
exceptions.BaseException --+        
                           |        
        exceptions.Exception --+    
                               |    
                      DbfWarning --+
                                   |
              object --+           |
                       |           |
exceptions.BaseException --+       |
                           |       |
        exceptions.Exception --+   |
                               |   |
        exceptions.StopIteration --+
                                   |
                                  Bof

Beginning of file reached

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code

Inherited from exceptions.StopIteration: __new__

Inherited from exceptions.BaseException: __delattr__, __getattribute__, __getitem__, __getslice__, __reduce__, __repr__, __setattr__, __setstate__, __str__

Inherited from object: __hash__, __reduce_ex__

Class Variables [hide private]
  message = 'Beginning of file reached'
exception message
Properties [hide private]

Inherited from exceptions.BaseException: args

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.dates.Date-class.html0000666000175100017510000006725411477216670017626 0ustar margamarga dbf.dates.Date
Package dbf :: Module dates :: Class Date
[hide private]

Class Date

source code

object --+
         |
        Date

adds null capable datetime.date constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code
 
date(yo) source code
 
strftime(yo, format) source code
 
ymd(yo) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Class Methods [hide private]
 
fromordinal(cls, number) source code
 
fromtimestamp(cls, timestamp) source code
 
fromymd(cls, yyyymmdd) source code
 
today(cls) source code
Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, year=None, month=0, day=0)
date should be either a datetime.date, a string in yyyymmdd format, or date/month/day should all be appropriate integers
source code
Class Variables [hide private]
  max = Date(9999, 12, 31)
  min = Date(1, 1, 1)
Properties [hide private]
  _date

Inherited from object: __class__

Method Details [hide private]

__new__(cls, year=None, month=0, day=0)
Static Method

source code 

date should be either a datetime.date, a string in yyyymmdd format, or date/month/day should all be appropriate integers

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.dates-module.html0000666000175100017510000001177111477216670017123 0ustar margamarga dbf.dates
Package dbf :: Module dates
[hide private]

Module dates

source code

wrappers around datetime objects to allow null values

Classes [hide private]
  Date
adds null capable datetime.date constructs
  DateTime
adds null capable datetime.datetime constructs
  Time
adds null capable datetime.time constructs

Imports: datetime, time


dbf-0.88.16/dbf/html/dbf.old.tables.FpTable-class.html0000666000175100017510000010737511477216672021216 0ustar margamarga dbf.old.tables.FpTable
Package dbf :: Package old :: Module tables :: Class FpTable
[hide private]

Class FpTable

source code

object --+    
         |    
  DbfTable --+
             |
            FpTable

Provides an interface for working with FoxPro 2 tables

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'Foxpro'
  _versionabbv = 'fp'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.fpt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '\xf5'
  _noMemoMask = '\x03'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('\x03', '\xf5')
  _dbfTableHeader = '0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\...
  _dbfTableHeaderExtra = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x...
  _use_deleted = True

Inherited from DbfTable: codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted, version

Inherited from DbfTable (private): _backed_up, _meta_only, _read_only

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity
(inherited documentation)

_initializeFields(yo)

source code 

builds the FieldList of names, types, and descriptions

Overrides: DbfTable._initializeFields

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'C': {'Blank': <type 'str'>,
       'Init': <function addCharacter at 0x00ECA670>,
       'Retrieve': <function retrieveCharacter at 0x00ECA170>,
...

_dbfTableHeader

Value:
'0\x00\x00\x00\x00\x00\x00\x00(\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

_dbfTableHeaderExtra

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0\
...

dbf-0.88.16/dbf/html/dbf.tables.DbfTable._Indexen-class.html0000666000175100017510000002342311477216672022306 0ustar margamarga dbf.tables.DbfTable._Indexen
Package dbf :: Module tables :: Class DbfTable :: Class _Indexen
[hide private]

Class _Indexen

source code

object --+
         |
        DbfTable._Indexen

implements the weakref structure for seperate indexes

Instance Methods [hide private]
 
__init__(yo)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
add(yo, new_list) source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo)
(Constructor)

source code 

x.__init__(...) initializes x; see x.__class__.__doc__ for signature

Overrides: object.__init__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old-module.html0000666000175100017510000005664711477216670016614 0ustar margamarga dbf.old
Package dbf :: Package old
[hide private]

Package old

source code

Copyright

  • Copyright: 2008-2009 Ad-Mail, Inc -- All rights reserved.
  • Author: Ethan Furman
  • Contact: ethanf@admailinc.com
  • Organization: Ad-Mail, Inc.
  • Version: 0.87.003 as of 03 Dec 2009

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

  • Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
  • Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
  • Neither the name of Ad-Mail, Inc nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Summary

Python package for reading/writing dBase III and VFP 6 tables and memos

The entire table is read into memory, and all operations occur on the in-memory table, with data changes being written to disk as they occur.

Goals: programming style with databases

  • table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])
    • fielddesc examples: name C(30); age N(3,0); wisdom M; marriage D
  • record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]
  • record.field | record['field'] accesses the field

NOTE: Of the VFP data types, auto-increment and null settings are not implemented.

Submodules [hide private]

Functions [hide private]
 
Table(filename, field_specs='', memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None)
returns an open table of the correct dbf_type, or creates it if field_specs is given
source code
 
index(sequence)
returns integers 0 - len(sequence)
source code
 
guess_table_type(filename) source code
 
table_type(filename)
returns text representation of a table's dbf version
source code
 
add_fields(table, field_specs)
adds fields to an existing table
source code
 
delete_fields(table, field_names)
deletes fields from an existing table
source code
 
export(table, filename='', fields='', format='csv', header=True)
creates a csv or tab-delimited file from an existing table
source code
 
first_record(table)
prints the first record of a table
source code
 
from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1)
creates a Character table from a csv file to_disk will create a table with the same name filename will be used if provided field_names default to f0, f1, f2, etc, unless specified (list) extra_fields can be used to add additional fields -- should be normal field specifiers (list)
source code
 
get_fields(table)
returns the list of field names of a table
source code
 
info(table)
prints table info
source code
 
rename_field(table, oldfield, newfield)
renames a field in a table
source code
 
structure(table, field=None)
returns the definition of a field (or all fields)
source code
 
hex_dump(records)
just what it says ;)
source code
Variables [hide private]
  version = (0, 88, 7)

Imports: os, csv, Date, DateTime, Time, DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex, DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv, sql, ascii, codepage, encoding, version_map


dbf-0.88.16/dbf/html/dbf.tables._VfpMemo-class.html0000666000175100017510000002702511477216672020625 0ustar margamarga dbf.tables._VfpMemo
Package dbf :: Module tables :: Class _VfpMemo
[hide private]

Class _VfpMemo

source code

object --+    
         |    
  _DbfMemo --+
             |
            _VfpMemo

Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Instance Methods [hide private]
 
_init(yo)
Visual Foxpro 6 specific
source code
 
_get_memo(yo, block)
retrieve memo contents from disk
source code
 
_put_memo(yo, data)
store memo contents to disk
source code

Inherited from _DbfMemo: __init__, get_memo, put_memo

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __repr__, __setattr__, __str__

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_init(yo)

source code 

Visual Foxpro 6 specific

Overrides: _DbfMemo._init

_get_memo(yo, block)

source code 

retrieve memo contents from disk

Overrides: _DbfMemo._get_memo
(inherited documentation)

_put_memo(yo, data)

source code 

store memo contents to disk

Overrides: _DbfMemo._put_memo
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old.dates.Time-class.html0000666000175100017510000005527111477216672020422 0ustar margamarga dbf.old.dates.Time
Package dbf :: Package old :: Module dates :: Class Time
[hide private]

Class Time

source code

object --+
         |
        Time

adds null capable datetime.time constructs

Instance Methods [hide private]
 
__add__(yo, other) source code
 
__eq__(yo, other) source code
 
__getattr__(yo, name) source code
 
__ge__(yo, other) source code
 
__gt__(yo, other) source code
 
__hash__(yo)
hash(x)
source code
 
__le__(yo, other) source code
 
__lt__(yo, other) source code
 
__ne__(yo, other) source code
 
__nonzero__(yo) source code
 
__radd__(yo, other) source code
 
__rsub__(yo, other) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
__sub__(yo, other) source code

Inherited from object: __delattr__, __getattribute__, __init__, __reduce__, __reduce_ex__, __setattr__

Static Methods [hide private]
a new object with type S, a subtype of T
__new__(cls, hour=None, minute=0, second=0, microsec=0)
hour may be a datetime.time
source code
Class Variables [hide private]
  max = Time(23, 59, 59, 999999)
  min = Time(0, 0, 0, 0)
Properties [hide private]
  _time

Inherited from object: __class__

Method Details [hide private]

__new__(cls, hour=None, minute=0, second=0, microsec=0)
Static Method

source code 

hour may be a datetime.time

Returns: a new object with type S, a subtype of T
Overrides: object.__new__

__hash__(yo)
(Hashing function)

source code 

hash(x)

Overrides: object.__hash__
(inherited documentation)

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

dbf-0.88.16/dbf/html/dbf.old._io-pysrc.html0000666000175100017510000032224311477216674017224 0ustar margamarga dbf.old._io
Package dbf :: Package old :: Module _io
[hide private]

Source Code for Module dbf.old._io

  1  """Routines for saving, retrieving, and creating fields""" 
  2   
  3  import struct 
  4  from decimal import Decimal 
  5  from dbf.exceptions import DbfError, DataOverflow 
  6  from dbf.dates import Date, DateTime, Time 
  7   
  8   
  9  # Constants 
 10  VFPTIME = 1721425 
 11   
12 -def packShortInt(value, bigendian=False):
13 "Returns a two-bye integer from the value, or raises DbfError" 14 # 256 / 65,536 15 if value > 65535: 16 raise DateOverflow("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) 17 if bigendian: 18 return struct.pack('>H', value) 19 else: 20 return struct.pack('<H', value)
21 -def packLongInt(value, bigendian=False):
22 "Returns a four-bye integer from the value, or raises DbfError" 23 # 256 / 65,536 / 16,777,216 24 if value > 4294967295: 25 raise DateOverflow("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) 26 if bigendian: 27 return struct.pack('>L', value) 28 else: 29 return struct.pack('<L', value)
30 -def packDate(date):
31 "Returns a group of three bytes, in integer form, of the date" 32 return "%c%c%c" % (date.year-1900, date.month, date.day)
33 -def packStr(string):
34 "Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes" 35 if len(string) > 10: 36 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) 37 return struct.pack('11s', string.upper())
38 -def unpackShortInt(bytes, bigendian=False):
39 "Returns the value in the two-byte integer passed in" 40 if bigendian: 41 return struct.unpack('>H', bytes)[0] 42 else: 43 return struct.unpack('<H', bytes)[0]
44 -def unpackLongInt(bytes, bigendian=False):
45 "Returns the value in the four-byte integer passed in" 46 if bigendian: 47 return int(struct.unpack('>L', bytes)[0]) 48 else: 49 return int(struct.unpack('<L', bytes)[0])
50 -def unpackDate(bytestr):
51 "Returns a Date() of the packed three-byte date passed in" 52 year, month, day = struct.unpack('<BBB', bytestr) 53 year += 1900 54 return Date(year, month, day)
55 -def unpackStr(chars):
56 "Returns a normal, lower-cased string from a null-padded byte string" 57 return struct.unpack('%ds' % len(chars), chars)[0].replace('\x00','').lower()
58 -def convertToBool(value):
59 """Returns boolean true or false; normal rules apply to non-string values; string values 60 must be 'y','t', 'yes', or 'true' (case insensitive) to be True""" 61 if type(value) == str: 62 return bool(value.lower() in ['t', 'y', 'true', 'yes']) 63 else: 64 return bool(value)
65 -def unsupportedType(something, field, memo=None):
66 "called if a data type is not supported for that style of table" 67 raise DbfError('field type is not supported.')
68 -def retrieveCharacter(bytes, fielddef={}, memo=None):
69 "Returns the string in bytes with trailing white space removed" 70 return bytes.tostring().rstrip()
71 -def updateCharacter(string, fielddef, memo=None):
72 "returns the string, truncating if string is longer than it's field" 73 if type(string) != str: 74 raise DbfError("incompatible type: %s" % type(string)) 75 return string.rstrip()
76 -def retrieveCurrency(bytes, fielddef={}, memo=None):
77 value = struct.unpack('<q', bytes)[0] 78 return Decimal("%de-4" % value)
79 -def updateCurrency(value, fielddef={}, memo=None):
80 currency = int(value * 10000) 81 if not -9223372036854775808 < currency < 9223372036854775808: 82 raise DataOverflow("value %s is out of bounds" % value) 83 return struct.pack('<q', currency)
84 -def retrieveDate(bytes, fielddef={}, memo=None):
85 "Returns the ascii coded date as a Date object" 86 return Date.fromymd(bytes.tostring())
87 -def updateDate(moment, fielddef={}, memo=None):
88 "returns the Date or datetime.date object ascii-encoded (yyyymmdd)" 89 if moment: 90 return "%04d%02d%02d" % moment.timetuple()[:3] 91 return ' '
92 -def retrieveDouble(bytes, fielddef={}, memo=None):
93 return struct.unpack('<d', bytes)[0]
94 -def updateDouble(value, fielddef={}, memo=None):
95 if not (type(value) in (int, long, float)): 96 raise DbfError("incompatible type: %s" % type(value)) 97 return struct.pack('<d', value)
98 -def retrieveInteger(bytes, fielddef={}, memo=None):
99 "Returns the binary number stored in bytes in little-endian format" 100 return struct.unpack('<i', bytes)[0]
101 -def updateInteger(value, fielddef={}, memo=None):
102 "returns value in little-endian binary format" 103 if not (type(value) in (int, long)): 104 raise DbfError("incompatible type: %s" % type(value)) 105 if not -2147483648 < value < 2147483647: 106 raise DataOverflow("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value) 107 return struct.pack('<i', value)
108 -def retrieveLogical(bytes, fielddef={}, memo=None):
109 "Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise" 110 bytes = bytes.tostring() 111 if bytes == '?': 112 return None 113 return bytes in ['t','T','y','Y']
114 -def updateLogical(logical, fielddef={}, memo=None):
115 "Returs 'T' if logical is True, 'F' otherwise" 116 if type(logical) != bool: 117 logical = convertToBool(logical) 118 if type(logical) <> bool: 119 raise DbfError('Value %s is not logical.' % logical) 120 return logical and 'T' or 'F'
121 -def retrieveMemo(bytes, fielddef, memo):
122 "Returns the block of data from a memo file" 123 stringval = bytes.tostring() 124 if stringval.strip(): 125 block = int(stringval.strip()) 126 else: 127 block = 0 128 return memo.get_memo(block, fielddef)
129 -def updateMemo(string, fielddef, memo):
130 "Writes string as a memo, returns the block number it was saved into" 131 block = memo.put_memo(string) 132 if block == 0: 133 block = '' 134 return "%*s" % (fielddef['length'], block)
135 -def retrieveNumeric(bytes, fielddef, memo=None):
136 "Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise" 137 string = bytes.tostring() 138 if string[0:1] == '*': # value too big to store (Visual FoxPro idiocy) 139 return None 140 if not string.strip(): 141 string = '0' 142 if fielddef['decimals'] == 0: 143 return int(string) 144 else: 145 return float(string)
146 -def updateNumeric(value, fielddef, memo=None):
147 "returns value as ascii representation, rounding decimal portion as necessary" 148 if not (type(value) in (int, long, float)): 149 raise DbfError("incompatible type: %s" % type(value)) 150 decimalsize = fielddef['decimals'] 151 if decimalsize: 152 decimalsize += 1 153 maxintegersize = fielddef['length']-decimalsize 154 integersize = len("%.0f" % value) 155 if integersize > maxintegersize: 156 raise DataOverflow('Integer portion too big') 157 return "%*.*f" % (fielddef['length'], fielddef['decimals'], value)
158 -def retrieveVfpDateTime(bytes, fielddef={}, memo=None):
159 """returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 160 may not be accurate; BC dates are nulled.""" 161 # two four-byte integers store the date and time. 162 # millesecords are discarded from time 163 time = retrieveInteger(bytes[4:]) 164 microseconds = (time % 1000) * 1000 165 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds 166 hours = time // 3600 167 mins = time % 3600 // 60 168 secs = time % 3600 % 60 169 time = Time(hours, mins, secs, microseconds) 170 possible = retrieveInteger(bytes[:4]) 171 possible -= VFPTIME 172 possible = max(0, possible) 173 date = Date.fromordinal(possible) 174 return DateTime.combine(date, time)
175 -def updateVfpDateTime(moment, fielddef={}, memo=None):
176 """sets the date/time stored in moment 177 moment must have fields year, month, day, hour, minute, second, microsecond""" 178 bytes = [0] * 8 179 hour = moment.hour 180 minute = moment.minute 181 second = moment.second 182 millisecond = moment.microsecond // 1000 # convert from millionths to thousandths 183 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond 184 bytes[4:] = updateInteger(time) 185 bytes[:4] = updateInteger(moment.toordinal() + VFPTIME) 186 return ''.join(bytes)
187 -def retrieveVfpMemo(bytes, fielddef, memo):
188 "Returns the block of data from a memo file" 189 block = struct.unpack('<i', bytes)[0] 190 return memo.get_memo(block, fielddef)
191 -def updateVfpMemo(string, fielddef, memo):
192 "Writes string as a memo, returns the block number it was saved into" 193 block = memo.put_memo(string) 194 return struct.pack('<i', block)
195 -def addCharacter(format):
196 if format[1] != '(' or format[-1] != ')': 197 raise DbfError("Format for Character field creation is C(n), not %s" % format) 198 length = int(format[2:-1]) 199 if not 0 < length < 255: 200 raise ValueError 201 decimals = 0 202 return length, decimals
203 -def addDate(format):
204 length = 8 205 decimals = 0 206 return length, decimals
207 -def addLogical(format):
208 length = 1 209 decimals = 0 210 return length, decimals
211 -def addMemo(format):
212 length = 10 213 decimals = 0 214 return length, decimals
215 -def addNumeric(format):
216 if format[1] != '(' or format[-1] != ')': 217 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 218 length, decimals = format[2:-1].split(',') 219 length = int(length) 220 decimals = int(decimals) 221 if not (0 < length < 18 and 0 <= decimals <= length - 2): 222 raise ValueError 223 return length, decimals
224 -def addVfpCurrency(format):
225 length = 8 226 decimals = 0 227 return length, decimals
228 -def addVfpDateTime(format):
229 length = 8 230 decimals = 8 231 return length, decimals
232 -def addVfpDouble(format):
233 length = 8 234 decimals = 0 235 return length, decimals
236 -def addVfpInteger(format):
237 length = 4 238 decimals = 0 239 return length, decimals
240 -def addVfpMemo(format):
241 length = 4 242 decimals = 0 243 return length, decimals
244 -def addVfpNumeric(format):
245 if format[1] != '(' or format[-1] != ')': 246 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 247 length, decimals = format[2:-1].split(',') 248 length = int(length) 249 decimals = int(decimals) 250 if not (0 < length < 21 and 0 <= decimals <= length - 2): 251 raise ValueError 252 return length, decimals
253

dbf-0.88.16/dbf/html/redirect.html0000666000175100017510000000567611477216676015624 0ustar margamargaEpydoc Redirect Page

Epydoc Auto-redirect page

When javascript is enabled, this page will redirect URLs of the form redirect.html#dotted.name to the documentation for the object with the given fully-qualified dotted name.

 

dbf-0.88.16/dbf/html/dbf.tables._Db4Table-class.html0000666000175100017510000007631211477216672020640 0ustar margamarga dbf.tables._Db4Table
Package dbf :: Module tables :: Class _Db4Table
[hide private]

Class _Db4Table

source code

object --+    
         |    
  DbfTable --+
             |
            _Db4Table

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk

Inherited from DbfTable: DbfIterator

Inherited from DbfTable (private): _DbfLists, _Indexen, _MetaData, _Table, _TableHeader

Instance Methods [hide private]
 
_checkMemoIntegrity(yo)
dBase III specific
source code

Inherited from DbfTable: __contains__, __enter__, __exit__, __getattr__, __getitem__, __init__, __iter__, __len__, __nonzero__, __repr__, __str__, add_fields, append, bof, bottom, close, create_backup, create_index, current, delete_fields, eof, export, get_record, goto, is_decimal, is_memotype, new, next, open, pack, prev, query, reindex, rename_field, size, structure, top, type, zap

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  version = 'dBase IV w/memos (non-functional)'
  _versionabbv = 'db4'
  _fieldtypes = {'0': {'Blank': <type 'int'>, 'Init': None, 'Ret...
  _memoext = '.dbt'
  _memotypes = ('G', 'M', 'P')
  _yesMemoMask = '\x8b'
  _noMemoMask = '\x04'
  _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
  _variable_fields = ('C', 'F', 'N')
  _character_fields = ('C', 'M')
  _decimal_fields = ('F', 'N')
  _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
  _supported_tables = ('\x04', '\x8b')
  _dbfTableHeader = '\x8b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0...
  _dbfTableHeaderExtra = ''
  _use_deleted = True

Inherited from DbfTable: backup, codepage, field_count, field_names, filename, last_update, memoname, record_length, record_number, supported_tables, use_deleted

Inherited from DbfTable (private): _meta_only, _read_only, _version

Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

_checkMemoIntegrity(yo)

source code 

dBase III specific

Overrides: DbfTable._checkMemoIntegrity

Class Variable Details [hide private]

_fieldtypes

Value:
{'0': {'Blank': <type 'int'>,
       'Init': None,
       'Retrieve': <function unsupportedType at 0x00ECA130>,
       'Type': '_NullFlags',
       'Update': <function unsupportedType at 0x00ECA130>},
 'B': {'Blank': <type 'float'>,
       'Init': <function addVfpDouble at 0x00ECA830>,
       'Retrieve': <function retrieveDouble at 0x00ECA2F0>,
...

_dbfTableHeader

Value:
'\x8b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\\
x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00'

dbf-0.88.16/dbf/html/dbf.tables.DbfTable-class.html0000666000175100017510000020413311477216672020555 0ustar margamarga dbf.tables.DbfTable
Package dbf :: Module tables :: Class DbfTable
[hide private]

Class DbfTable

source code

object --+
         |
        DbfTable
Known Subclasses:

Provides a framework for dbf style tables.

Nested Classes [hide private]
  _memoClass
Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk
  _DbfLists
implements the weakref structure for DbfLists
  _Indexen
implements the weakref structure for seperate indexes
  _MetaData
  _TableHeader
  _Table
implements the weakref table for records
  DbfIterator
returns records using current index
Instance Methods [hide private]
 
_buildHeaderFields(yo)
constructs fieldblock for disk table
source code
 
_checkMemoIntegrity(yo)
dBase III specific
source code
 
_initializeFields(yo)
builds the FieldList of names, types, and descriptions from the disk file
source code
 
_fieldLayout(yo, i)
Returns field information Name Type(Length[,Decimals])
source code
 
_loadtable(yo)
loads the records from disk to memory
source code
 
_list_fields(yo, specs, sep=',') source code
 
_update_disk(yo, headeronly=False)
synchronizes the disk file with current data
source code
 
__contains__(yo, key) source code
 
__enter__(yo) source code
 
__exit__(yo, *exc_info) source code
 
__getattr__(yo, name) source code
 
__getitem__(yo, value) source code
 
__init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, codepage=None)
open/create dbf file filename should include path if needed field_specs can be either a ;-delimited string or a list of strings memo_size is always 512 for db3 memos ignore_memos is useful if the memo file is missing or corrupt read_only will load records into memory, then close the disk file keep_memos will also load any memo fields into memory meta_only will ignore all records, keeping only basic table information codepage will override whatever is set in the table itself
source code
 
__iter__(yo) source code
 
__len__(yo) source code
 
__nonzero__(yo) source code
 
__repr__(yo)
repr(x)
source code
 
__str__(yo)
str(x)
source code
 
add_fields(yo, field_specs)
adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] backup table is created with _backup appended to name then modifies current structure
source code
 
append(yo, kamikaze='', drop=False, multiple=1)
adds <multiple> blank records, and fills fields with dict/tuple values if present
source code
 
bof(yo, _move=False)
moves record pointer to previous usable record; returns True if no more usable records
source code
 
bottom(yo, get_record=False)
sets record pointer to bottom of table if get_record, seeks to and returns last (non-deleted) record DbfError if table is empty Bof if all records deleted and use_deleted is False
source code
 
close(yo, keep_table=False, keep_memos=False)
closes disk files ensures table data is available if keep_table ensures memo data is available if keep_memos
source code
 
create_backup(yo, new_name=None, overwrite=False)
creates a backup table -- ignored if memory table
source code
 
create_index(yo, key) source code
 
current(yo, index=False)
returns current logical record, or its index
source code
 
delete_fields(yo, doomed)
removes field(s) from the table creates backup files with _backup appended to the file name, then modifies current structure
source code
 
eof(yo, _move=False)
moves record pointer to next usable record; returns True if no more usable records
source code
 
export(yo, records=None, filename=None, field_specs=None, format='csv', header=True)
writes the table using CSV or tab-delimited format, using the filename given if specified, otherwise the table name
source code
 
get_record(yo, recno)
returns record at physical_index[recno]
source code
 
goto(yo, criteria)
changes the record pointer to the first matching (non-deleted) record criteria should be either a tuple of tuple(value, field, func) triples, or an integer to go to
source code
 
is_decimal(yo, name)
returns True if name is a variable-length field type
source code
 
is_memotype(yo, name)
returns True if name is a memo type field
source code
 
new(yo, filename, field_specs=None, codepage=None)
returns a new table of the same type
source code
 
next(yo)
set record pointer to next (non-deleted) record, and return it
source code
 
open(yo) source code
 
pack(yo, _pack=True)
physically removes all deleted records
source code
 
prev(yo)
set record pointer to previous (non-deleted) record, and return it
source code
 
query(yo, sql_command=None, python=None)
uses exec to perform queries on the table
source code
 
reindex(yo) source code
 
rename_field(yo, oldname, newname)
renames an existing field
source code
 
size(yo, field)
returns size of field as a tuple of (length, decimals)
source code
 
structure(yo, fields=None)
return list of fields suitable for creating same table layout
source code
 
top(yo, get_record=False)
sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record DbfError if table is empty Eof if all records are deleted and use_deleted is False
source code
 
type(yo, field)
returns type of field
source code
 
zap(yo, areyousure=False)
removes all records from table -- this cannot be undone! areyousure must be True, else error is raised
source code

Inherited from object: __delattr__, __getattribute__, __hash__, __new__, __reduce__, __reduce_ex__, __setattr__

Class Variables [hide private]
  _version = 'basic memory table'
  _versionabbv = 'dbf'
  _fieldtypes = {'D': {'Blank': <bound method type.today of <cla...
  _memoext = ''
  _memotypes = ('M')
  _yesMemoMask = ''
  _noMemoMask = ''
  _fixed_fields = ('M', 'D', 'L')
  _variable_fields = ()
  _character_fields = ('M')
  _decimal_fields = ()
  _numeric_fields = ()
  _dbfTableHeader = '\x00\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x...
  _dbfTableHeaderExtra = ''
  _supported_tables = []
  _read_only = False
  _meta_only = False
  _use_deleted = True
  backup = False
  codepage = <dbf.tables.property object at 0x00EC7DD0>
  field_count = <dbf.tables.property object at 0x00EC7DF0>
  field_names = <dbf.tables.property object at 0x00EC7E10>
  filename = <dbf.tables.property object at 0x00EC7E30>
  last_update = <dbf.tables.property object at 0x00EC7E50>
  memoname = <dbf.tables.property object at 0x00EC7E70>
  record_length = <dbf.tables.property object at 0x00EC7E90>
  record_number = <dbf.tables.property object at 0x00EC7EB0>
  supported_tables = <dbf.tables.property object at 0x00EC7ED0>
  use_deleted = <dbf.tables.property object at 0x00EC7EF0>
  version = <dbf.tables.property object at 0x00EC7F10>
Properties [hide private]

Inherited from object: __class__

Method Details [hide private]

__init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, codepage=None)
(Constructor)

source code 

open/create dbf file filename should include path if needed field_specs can be either a ;-delimited string or a list of strings memo_size is always 512 for db3 memos ignore_memos is useful if the memo file is missing or corrupt read_only will load records into memory, then close the disk file keep_memos will also load any memo fields into memory meta_only will ignore all records, keeping only basic table information codepage will override whatever is set in the table itself

Overrides: object.__init__

__repr__(yo)
(Representation operator)

source code 

repr(x)

Overrides: object.__repr__
(inherited documentation)

__str__(yo)
(Informal representation operator)

source code 

str(x)

Overrides: object.__str__
(inherited documentation)

structure(yo, fields=None)

source code 

return list of fields suitable for creating same table layout

Parameters:
  • fields - list of fields or None for all fields

Class Variable Details [hide private]

_fieldtypes

Value:
{'D': {'Type': 'Date', 'Init': io.addDate, 'Blank': Date.today, 'Retri\
eve': io.retrieveDate, 'Update': io.updateDate,}, 'L': {'Type': 'Logic\
al', 'Init': io.addLogical, 'Blank': bool, 'Retrieve': io.retrieveLogi\
cal, 'Update': io.updateLogical,}, 'M': {'Type': 'Memo', 'Init': io.ad\
dMemo, 'Blank': str, 'Retrieve': io.retrieveMemo, 'Update': io.updateM\
emo,}}

_dbfTableHeader

Value:
'\x00\x00\x00\x00\x00\x00\x00\x00!\x00\x01\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'

dbf-0.88.16/dbf/__init__.py0000666000175100017510000002263111477214720014254 0ustar margamarga""" Copyright ========= - Copyright: 2008-2009 Ad-Mail, Inc -- All rights reserved. - Author: Ethan Furman - Contact: ethan@stoneleaf.us - Organization: Ad-Mail, Inc. - Version: 0.88.016 as of 06 Dec 2010 Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Ad-Mail, Inc nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. B{I{Summary}} Python package for reading/writing dBase III and VFP 6 tables and memos The entire table is read into memory, and all operations occur on the in-memory table, with data changes being written to disk as they occur. Goals: programming style with databases - C{table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])} - fielddesc examples: C{name C(30); age N(3,0); wisdom M; marriage D} - C{record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]} - C{record.field | record['field']} accesses the field NOTE: Of the VFP data types, auto-increment and null settings are not implemented. """ import os import csv from dbf.dates import Date, DateTime, Time from dbf.exceptions import DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex from dbf.tables import DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv from dbf.tables import sql, ascii, codepage, encoding, version_map version = (0, 88, 16) default_type = 'db3' # default format if none specified sql_user_functions = {} # user-defined sql functions __docformat__ = 'epytext' def Table(filename, field_specs='', memo_size=128, ignore_memos=False, \ read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None): "returns an open table of the correct dbf_type, or creates it if field_specs is given" #- print "dbf.Table(%s)" % ', '.join(['%r' % arg for arg in (filename, field_specs, dbf_type, codepage)]) if field_specs and dbf_type is None: dbf_type = default_type if dbf_type is not None: dbf_type = dbf_type.lower() if dbf_type == 'db3': return Db3Table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) elif dbf_type == 'fp': return FpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) elif dbf_type == 'vfp': return VfpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) elif dbf_type == 'dbf': return DbfTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage) else: raise DbfError("Unknown table type: %s" % dbf_type) else: possibles = guess_table_type(filename) if len(possibles) == 1: return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \ read_only, keep_memos, meta_only) else: for type, desc, cls in possibles: if type == default_type: return cls(filename, field_specs, memo_size, ignore_memos, \ read_only, keep_memos, meta_only) else: types = ', '.join(["%s" % item[1] for item in possibles]) abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs)) def index(sequence): "returns integers 0 - len(sequence)" for i in xrange(len(sequence)): yield i def guess_table_type(filename): reported = table_type(filename) possibles = [] version = reported[0] for tabletype in (Db3Table, FpTable, VfpTable): if version in tabletype._supported_tables: possibles.append((tabletype._versionabbv, tabletype._version, tabletype)) if not possibles: raise DbfError("Tables of type %s not supported" % str(reported)) return possibles def table_type(filename): "returns text representation of a table's dbf version" base, ext = os.path.splitext(filename) if ext == '': filename = base + '.dbf' if not os.path.exists(filename): raise DbfError('File %s not found' % filename) fd = open(filename) version = fd.read(1) fd.close() fd = None if not version in version_map: raise DbfError("Unknown dbf type: %s (%x)" % (version, ord(version))) return version, version_map[version] def add_fields(table, field_specs): "adds fields to an existing table" table = Table(table) try: table.add_fields(field_specs) finally: table.close() def delete_fields(table, field_names): "deletes fields from an existing table" table = Table(table) try: table.delete_fields(field_names) finally: table.close() def export(table, filename='', fields='', format='csv', header=True): "creates a csv or tab-delimited file from an existing table" if fields is None: fields = [] table = Table(table) try: table.export(filename=filename, field_specs=fields, format=format, header=header) finally: table.close() def first_record(table): "prints the first record of a table" table = Table(table) try: print str(table[0]) finally: table.close() def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1): """creates a Character table from a csv file to_disk will create a table with the same name filename will be used if provided field_names default to f0, f1, f2, etc, unless specified (list) extra_fields can be used to add additional fields -- should be normal field specifiers (list)""" reader = csv.reader(open(csvfile)) if field_names: field_names = ['%s M' % fn for fn in field_names] else: field_names = ['f0 M'] mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size) fields_so_far = 1 for row in reader: while fields_so_far < len(row): if fields_so_far == len(field_names): field_names.append('f%d M' % fields_so_far) mtable.add_fields(field_names[fields_so_far]) fields_so_far += 1 mtable.append(tuple(row)) if filename: to_disk = True if not to_disk: if extra_fields: mtable.add_fields(extra_fields) else: if not filename: filename = os.path.splitext(csvfile)[0] length = [min_field_size] * len(field_names) for record in mtable: for i in index(record.field_names): length[i] = max(length[i], len(record[i])) fields = mtable.field_names fielddef = [] for i in index(length): if length[i] < 255: fielddef.append('%s C(%d)' % (fields[i], length[i])) else: fielddef.append('%s M' % (fields[i])) if extra_fields: fielddef.extend(extra_fields) csvtable = Table(filename, fielddef, dbf_type=dbf_type) for record in mtable: csvtable.append(record.scatter_fields()) return csvtable return mtable def get_fields(table): "returns the list of field names of a table" table = Table(table) return table.field_names def info(table): "prints table info" table = Table(table) print str(table) def rename_field(table, oldfield, newfield): "renames a field in a table" table = Table(table) try: table.rename_field(oldfield, newfield) finally: table.close() def structure(table, field=None): "returns the definition of a field (or all fields)" table = Table(table) return table.structure(field) def hex_dump(records): "just what it says ;)" for index,dummy in enumerate(records): chars = dummy._data print "%2d: " % index, for char in chars[1:]: print " %2x " % ord(char), print dbf-0.88.16/dbf/_io.py0000666000175100017510000002476611477157626013311 0ustar margamarga"""Routines for saving, retrieving, and creating fields""" import struct from decimal import Decimal from dbf.exceptions import DbfError, DataOverflow from dbf.dates import Date, DateTime, Time from math import floor # Constants VFPTIME = 1721425 def packShortInt(value, bigendian=False): "Returns a two-bye integer from the value, or raises DbfError" # 256 / 65,536 if value > 65535: raise DateOverflow("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) if bigendian: return struct.pack('>H', value) else: return struct.pack(' 4294967295: raise DateOverflow("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) if bigendian: return struct.pack('>L', value) else: return struct.pack(' 10: raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) return struct.pack('11s', string.upper()) def unpackShortInt(bytes, bigendian=False): "Returns the value in the two-byte integer passed in" if bigendian: return struct.unpack('>H', bytes)[0] else: return struct.unpack('L', bytes)[0]) else: return int(struct.unpack(' bool: raise DbfError('Value %s is not logical.' % logical) return logical and 'T' or 'F' def retrieveMemo(bytes, fielddef, memo): "Returns the block of data from a memo file" stringval = bytes.tostring() if stringval.strip(): block = int(stringval.strip()) else: block = 0 return memo.get_memo(block, fielddef) def updateMemo(string, fielddef, memo): "Writes string as a memo, returns the block number it was saved into" block = memo.put_memo(string) if block == 0: block = '' return "%*s" % (fielddef['length'], block) def retrieveNumeric(bytes, fielddef, memo=None): "Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise" string = bytes.tostring() if string[0:1] == '*': # value too big to store (Visual FoxPro idiocy) return None if not string.strip(): string = '0' if fielddef['decimals'] == 0: return int(string) else: return float(string) def updateNumeric(value, fielddef, memo=None): "returns value as ascii representation, rounding decimal portion as necessary" if not (type(value) in (int, long, float)): raise DbfError("incompatible type: %s" % type(value)) decimalsize = fielddef['decimals'] if decimalsize: decimalsize += 1 maxintegersize = fielddef['length']-decimalsize integersize = len("%.0f" % floor(value)) if integersize > maxintegersize: raise DataOverflow('Integer portion too big') return "%*.*f" % (fielddef['length'], fielddef['decimals'], value) def retrieveVfpDateTime(bytes, fielddef={}, memo=None): """returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 may not be accurate; BC dates are nulled.""" # two four-byte integers store the date and time. # millesecords are discarded from time time = retrieveInteger(bytes[4:]) microseconds = (time % 1000) * 1000 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds hours = time // 3600 mins = time % 3600 // 60 secs = time % 3600 % 60 time = Time(hours, mins, secs, microseconds) possible = retrieveInteger(bytes[:4]) possible -= VFPTIME possible = max(0, possible) date = Date.fromordinal(possible) return DateTime.combine(date, time) def updateVfpDateTime(moment, fielddef={}, memo=None): """sets the date/time stored in moment moment must have fields year, month, day, hour, minute, second, microsecond""" bytes = [0] * 8 hour = moment.hour minute = moment.minute second = moment.second millisecond = moment.microsecond // 1000 # convert from millionths to thousandths time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond bytes[4:] = updateInteger(time) bytes[:4] = updateInteger(moment.toordinal() + VFPTIME) return ''.join(bytes) def retrieveVfpMemo(bytes, fielddef, memo): "Returns the block of data from a memo file" block = struct.unpack(' unicode value = yo._layout.encoder(value)[0] # unicode => table ascii bytes = array('c', update(value, fielddef, yo._layout.memo)) size = fielddef['length'] if len(bytes) > size: raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) blank = array('c', ' ' * size) start = fielddef['start'] end = start + size blank[:len(bytes)] = bytes[:] yo._data[start:end] = blank[:] yo._dirty = True def _update_disk(yo, location='', data=None): if not yo._layout.inmemory: if yo._recnum < 0: raise DbfError("Attempted to update record that has been packed") if location == '': location = yo._recnum * yo._layout.header.record_length + yo._layout.header.start if data is None: data = yo._data yo._layout.dfd.seek(location) yo._layout.dfd.write(data) yo._dirty = False for index in yo.record_table._indexen: index(yo) def __call__(yo, *specs): results = [] if not specs: specs = yo._layout.index specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop]) for field, func in specs: results.append(func(yo[field])) return tuple(results) def __contains__(yo, key): return key in yo._layout.fields or key in ['record_number','delete_flag'] def __iter__(yo): return (yo[field] for field in yo._layout.fields) def __getattr__(yo, name): if name[0:2] == '__' and name[-2:] == '__': raise AttributeError, 'Method %s is not implemented.' % name elif name == 'record_number': return yo._recnum elif name == 'delete_flag': return yo._data[0] != ' ' elif not name in yo._layout.fields: raise FieldMissing(name) try: fielddef = yo._layout[name] value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) return value except DbfError, error: error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) raise def __getitem__(yo, item): if type(item) in (int, long): if not -yo._layout.header.field_count <= item < yo._layout.header.field_count: raise IndexError("Field offset %d is not in record" % item) return yo[yo._layout.fields[item]] elif type(item) == slice: sequence = [] for index in yo._layout.fields[item]: sequence.append(yo[index]) return sequence elif type(item) == str: return yo.__getattr__(item) else: raise TypeError("%s is not a field name" % item) def __len__(yo): return yo._layout.header.field_count def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False): """record = ascii array of entire record; layout=record specification; memo = memo object for table""" record = object.__new__(cls) record._dirty = False record._recnum = recnum record._layout = layout if layout.blankrecord is None and not _fromdisk: record._createBlankRecord() record._data = layout.blankrecord if recnum == -1: # not a disk-backed record return record elif type(kamikaze) == array: record._data = kamikaze[:] elif type(kamikaze) == str: record._data = array('c', kamikaze) else: record._data = kamikaze._data[:] datalen = len(record._data) if datalen < layout.header.record_length: record._data.extend(layout.blankrecord[datalen:]) elif datalen > layout.header.record_length: record._data = record._data[:layout.header.record_length] if not _fromdisk and not layout.inmemory: record._update_disk() return record def __setattr__(yo, name, value): if name in yo.__slots__: object.__setattr__(yo, name, value) return elif not name in yo._layout.fields: raise FieldMissing(name) fielddef = yo._layout[name] try: yo._updateFieldValue(fielddef, value) except DbfError, error: error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) error.data = name raise def __setitem__(yo, name, value): if type(name) == str: yo.__setattr__(name, value) elif type(name) in (int, long): yo.__setattr__(yo._layout.fields[name], value) elif type(name) == slice: sequence = [] for field in yo._layout.fields[name]: sequence.append(field) if len(sequence) != len(value): raise DbfError("length of slices not equal") for field, val in zip(sequence, value): yo[field] = val else: raise TypeError("%s is not a field name" % name) def __str__(yo): result = [] for seq, field in enumerate(yo.field_names): result.append("%3d - %-10s: %s" % (seq, field, yo[field])) return '\n'.join(result) def __repr__(yo): return yo._data.tostring() def _createBlankRecord(yo): "creates a blank record data chunk" layout = yo._layout ondisk = layout.ondisk layout.ondisk = False yo._data = array('c', ' ' * layout.header.record_length) layout.memofields = [] for field in layout.fields: yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) if layout[field]['type'] in layout.memotypes: layout.memofields.append(field) layout.blankrecord = yo._data[:] layout.ondisk = ondisk def delete_record(yo): "marks record as deleted" yo._data[0] = '*' yo._dirty = True return yo @property def field_names(yo): "fields in table/record" return yo._layout.fields[:] def gather_fields(yo, dictionary, drop=False): # dict, drop_missing=False): "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True" old_data = yo._data[:] try: for key in dictionary: if not key in yo.field_names: if drop: continue raise FieldMissing(key) yo.__setattr__(key, dictionary[key]) except: yo._data[:] = old_data raise return yo @property def has_been_deleted(yo): "marked for deletion?" return yo._data[0] == '*' def read_record(yo): "refresh record data from disk" size = yo._layout.header.record_length location = yo._recnum * size + yo._layout.header.start yo._layout.dfd.seek(location) yo._data[:] = yo._meta.dfd.read(size) yo._dirty = False return yo @property def record_number(yo): "physical record number" return yo._recnum @property def record_table(yo): table = yo._layout.table() if table is None: raise DbfError("table is no longer available") return table def check_index(yo): for dbfindex in yo._layout.table()._indexen: dbfindex(yo) def reset_record(yo, keep_fields=None): "blanks record" if keep_fields is None: keep_fields = [] keep = {} for field in keep_fields: keep[field] = yo[field] if yo._layout.blankrecord == None: yo._createBlankRecord() yo._data[:] = yo._layout.blankrecord[:] for field in keep_fields: yo[field] = keep[field] yo._dirty = True return yo def scatter_fields(yo, blank=False): "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." keys = yo._layout.fields if blank: values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] else: values = [yo[field] for field in keys] return dict(zip(keys, values)) def undelete_record(yo): "marks record as active" yo._data[0] = ' ' yo._dirty = True return yo def write_record(yo, **kwargs): "write record data to disk" if kwargs: yo.gather_fields(kwargs) if yo._dirty: yo._update_disk() return 1 return 0 class _DbfMemo(object): """Provides access to memo fields as dictionaries must override _init, _get_memo, and _put_memo to store memo contents to disk""" def _init(yo): "initialize disk file usage" def _get_memo(yo, block): "retrieve memo contents from disk" def _put_memo(yo, data): "store memo contents to disk" def __init__(yo, meta): "" yo.meta = meta yo.memory = {} yo.nextmemo = 1 yo._init() yo.meta.newmemofile = False def get_memo(yo, block, field): "gets the memo in block" if yo.meta.ignorememos or not block: return '' if yo.meta.ondisk: return yo._get_memo(block) else: return yo.memory[block] def put_memo(yo, data): "stores data in memo file, returns block number" if yo.meta.ignorememos or data == '': return 0 if yo.meta.inmemory: thismemo = yo.nextmemo yo.nextmemo += 1 yo.memory[thismemo] = data else: thismemo = yo._put_memo(data) return thismemo class _Db3Memo(_DbfMemo): def _init(yo): "dBase III specific" yo.meta.memo_size= 512 yo.record_header_length = 2 if yo.meta.ondisk and not yo.meta.ignorememos: if yo.meta.newmemofile: yo.meta.mfd = open(yo.meta.memoname, 'w+b') yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508) else: try: yo.meta.mfd = open(yo.meta.memoname, 'r+b') yo.meta.mfd.seek(0) yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4)) except: raise DbfError("memo file appears to be corrupt") def _get_memo(yo, block): block = int(block) yo.meta.mfd.seek(block * yo.meta.memo_size) eom = -1 data = '' while eom == -1: newdata = yo.meta.mfd.read(yo.meta.memo_size) if not newdata: return data data += newdata eom = data.find('\x1a\x1a') return data[:eom].rstrip() def _put_memo(yo, data): data = data.rstrip() length = len(data) + yo.record_header_length # room for two ^Z at end of memo blocks = length // yo.meta.memo_size if length % yo.meta.memo_size: blocks += 1 thismemo = yo.nextmemo yo.nextmemo = thismemo + blocks yo.meta.mfd.seek(0) yo.meta.mfd.write(io.packLongInt(yo.nextmemo)) yo.meta.mfd.seek(thismemo * yo.meta.memo_size) yo.meta.mfd.write(data) yo.meta.mfd.write('\x1a\x1a') double_check = yo._get_memo(thismemo) if len(double_check) != len(data): uhoh = open('dbf_memo_dump.err','wb') uhoh.write('thismemo: %d' % thismemo) uhoh.write('nextmemo: %d' % yo.nextmemo) uhoh.write('saved: %d bytes' % len(data)) uhoh.write(data) uhoh.write('retrieved: %d bytes' % len(double_check)) uhoh.write(double_check) uhoh.close() raise DbfError("unknown error: memo not saved") return thismemo class _VfpMemo(_DbfMemo): def _init(yo): "Visual Foxpro 6 specific" if yo.meta.ondisk and not yo.meta.ignorememos: yo.record_header_length = 8 if yo.meta.newmemofile: if yo.meta.memo_size == 0: yo.meta.memo_size = 1 elif 1 < yo.meta.memo_size < 33: yo.meta.memo_size *= 512 yo.meta.mfd = open(yo.meta.memoname, 'w+b') nextmemo = 512 // yo.meta.memo_size if nextmemo * yo.meta.memo_size < 512: nextmemo += 1 yo.nextmemo = nextmemo yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) else: try: yo.meta.mfd = open(yo.meta.memoname, 'r+b') yo.meta.mfd.seek(0) header = yo.meta.mfd.read(512) yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True) yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True) except: raise DbfError("memo file appears to be corrupt") def _get_memo(yo, block): yo.meta.mfd.seek(block * yo.meta.memo_size) header = yo.meta.mfd.read(8) length = io.unpackLongInt(header[4:], bigendian=True) return yo.meta.mfd.read(length) def _put_memo(yo, data): data = data.rstrip() # no trailing whitespace yo.meta.mfd.seek(0) thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True) yo.meta.mfd.seek(0) length = len(data) + yo.record_header_length # room for two ^Z at end of memo blocks = length // yo.meta.memo_size if length % yo.meta.memo_size: blocks += 1 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True)) yo.meta.mfd.seek(thismemo*yo.meta.memo_size) yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data) return thismemo # Public classes class DbfTable(object): """Provides a framework for dbf style tables.""" _version = 'basic memory table' _versionabbv = 'dbf' _fieldtypes = { 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, }, 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, }, 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } } _memoext = '' _memotypes = tuple('M', ) _memoClass = _DbfMemo _yesMemoMask = '' _noMemoMask = '' _fixed_fields = ('M','D','L') # always same length in table _variable_fields = tuple() # variable length in table _character_fields = tuple('M', ) # field representing character data _decimal_fields = tuple() # text-based numeric fields _numeric_fields = tuple() # fields representing a number _dbfTableHeader = array('c', '\x00' * 32) _dbfTableHeader[0] = '\x00' # table type - none _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) _dbfTableHeader[10] = '\x01' # record length -- one for delete flag _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii _dbfTableHeader = _dbfTableHeader.tostring() _dbfTableHeaderExtra = '' _supported_tables = [] _read_only = False _meta_only = False _use_deleted = True backup = False class _DbfLists(object): "implements the weakref structure for DbfLists" def __init__(yo): yo._lists = set() def __iter__(yo): yo._lists = set([s for s in yo._lists if s() is not None]) return (s() for s in yo._lists if s() is not None) def __len__(yo): yo._lists = set([s for s in yo._lists if s() is not None]) return len(yo._lists) def add(yo, new_list): yo._lists.add(weakref.ref(new_list)) yo._lists = set([s for s in yo._lists if s() is not None]) class _Indexen(object): "implements the weakref structure for seperate indexes" def __init__(yo): yo._indexen = set() def __iter__(yo): yo._indexen = set([s for s in yo._indexen if s() is not None]) return (s() for s in yo._indexen if s() is not None) def __len__(yo): yo._indexen = set([s for s in yo._indexen if s() is not None]) return len(yo._indexen) def add(yo, new_list): yo._indexen.add(weakref.ref(new_list)) yo._indexen = set([s for s in yo._indexen if s() is not None]) class _MetaData(dict): blankrecord = None fields = None filename = None dfd = None memoname = None newmemofile = False memo = None mfd = None ignorememos = False memofields = None current = -1 class _TableHeader(object): def __init__(yo, data): if len(data) != 32: raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) yo._data = array('c', data + '\x0d') def codepage(yo, cp=None): "get/set code page of table" if cp is None: return yo._data[29] else: cp, sd, ld = _codepage_lookup(cp) yo._data[29] = cp return cp @property def data(yo): "main data structure" date = io.packDate(Date.today()) yo._data[1:4] = array('c', date) return yo._data.tostring() @data.setter def data(yo, bytes): if len(bytes) < 32: raise DbfError("length for data of %d is less than 32" % len(bytes)) yo._data[:] = array('c', bytes) @property def extra(yo): "extra dbf info (located after headers, before data records)" fieldblock = yo._data[32:] for i in range(len(fieldblock)//32+1): cr = i * 32 if fieldblock[cr] == '\x0d': break else: raise DbfError("corrupt field structure") cr += 33 # skip past CR return yo._data[cr:].tostring() @extra.setter def extra(yo, data): fieldblock = yo._data[32:] for i in range(len(fieldblock)//32+1): cr = i * 32 if fieldblock[cr] == '\x0d': break else: raise DbfError("corrupt field structure") cr += 33 # skip past CR yo._data[cr:] = array('c', data) # extra yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start @property def field_count(yo): "number of fields (read-only)" fieldblock = yo._data[32:] for i in range(len(fieldblock)//32+1): cr = i * 32 if fieldblock[cr] == '\x0d': break else: raise DbfError("corrupt field structure") return len(fieldblock[:cr]) // 32 @property def fields(yo): "field block structure" fieldblock = yo._data[32:] for i in range(len(fieldblock)//32+1): cr = i * 32 if fieldblock[cr] == '\x0d': break else: raise DbfError("corrupt field structure") return fieldblock[:cr].tostring() @fields.setter def fields(yo, block): fieldblock = yo._data[32:] for i in range(len(fieldblock)//32+1): cr = i * 32 if fieldblock[cr] == '\x0d': break else: raise DbfError("corrupt field structure") cr += 32 # convert to indexing main structure fieldlen = len(block) if fieldlen % 32 != 0: raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) yo._data[32:cr] = array('c', block) # fields yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start fieldlen = fieldlen // 32 recordlen = 1 # deleted flag for i in range(fieldlen): recordlen += ord(block[i*32+16]) yo._data[10:12] = array('c', io.packShortInt(recordlen)) @property def record_count(yo): "number of records (maximum 16,777,215)" return io.unpackLongInt(yo._data[4:8].tostring()) @record_count.setter def record_count(yo, count): yo._data[4:8] = array('c', io.packLongInt(count)) @property def record_length(yo): "length of a record (read_only) (max of 65,535)" return io.unpackShortInt(yo._data[10:12].tostring()) @property def start(yo): "starting position of first record in file (must be within first 64K)" return io.unpackShortInt(yo._data[8:10].tostring()) @start.setter def start(yo, pos): yo._data[8:10] = array('c', io.packShortInt(pos)) @property def update(yo): "date of last table modification (read-only)" return io.unpackDate(yo._data[1:4].tostring()) @property def version(yo): "dbf version" return yo._data[0] @version.setter def version(yo, ver): yo._data[0] = ver class _Table(object): "implements the weakref table for records" def __init__(yo, count, meta): yo._meta = meta yo._weakref_list = [weakref.ref(lambda x: None)] * count def __getitem__(yo, index): maybe = yo._weakref_list[index]() if maybe is None: if index < 0: index += yo._meta.header.record_count size = yo._meta.header.record_length location = index * size + yo._meta.header.start yo._meta.dfd.seek(location) if yo._meta.dfd.tell() != location: raise ValueError("unable to seek to offset %d in file" % location) bytes = yo._meta.dfd.read(size) if not bytes: raise ValueError("unable to read record data from %s at location %d" % (yo._meta.filename, location)) maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) yo._weakref_list[index] = weakref.ref(maybe) return maybe def append(yo, record): yo._weakref_list.append(weakref.ref(record)) def clear(yo): yo._weakref_list[:] = [] class DbfIterator(object): "returns records using current index" def __init__(yo, table): yo._table = table yo._index = -1 yo._more_records = True def __iter__(yo): return yo def next(yo): while yo._more_records: yo._index += 1 if yo._index >= len(yo._table): yo._more_records = False continue record = yo._table[yo._index] if not yo._table.use_deleted and record.has_been_deleted: continue return record else: raise StopIteration def _buildHeaderFields(yo): "constructs fieldblock for disk table" fieldblock = array('c', '') memo = False yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask)) for field in yo._meta.fields: if yo._meta.fields.count(field) > 1: raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") fielddef = array('c', '\x00' * 32) fielddef[:11] = array('c', io.packStr(field)) fielddef[11] = yo._meta[field]['type'] fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start'])) fielddef[16] = chr(yo._meta[field]['length']) fielddef[17] = chr(yo._meta[field]['decimals']) fielddef[18] = chr(yo._meta[field]['flags']) fieldblock.extend(fielddef) if yo._meta[field]['type'] in yo._meta.memotypes: memo = True yo._meta.header.fields = fieldblock.tostring() if memo: yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask)) if yo._meta.memo is None: yo._meta.memo = yo._memoClass(yo._meta) def _checkMemoIntegrity(yo): "dBase III specific" if yo._meta.header.version == '\x83': try: yo._meta.memo = yo._memoClass(yo._meta) except: yo._meta.dfd.close() yo._meta.dfd = None raise if not yo._meta.ignorememos: for field in yo._meta.fields: if yo._meta[field]['type'] in yo._memotypes: if yo._meta.header.version != '\x83': yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") elif not os.path.exists(yo._meta.memoname): yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist without memo file") break def _initializeFields(yo): "builds the FieldList of names, types, and descriptions from the disk file" yo._meta.fields[:] = [] offset = 1 fieldsdef = yo._meta.header.fields if len(fieldsdef) % 32 != 0: raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) if len(fieldsdef) // 32 != yo.field_count: raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) for i in range(yo.field_count): fieldblock = fieldsdef[i*32:(i+1)*32] name = io.unpackStr(fieldblock[:11]) type = fieldblock[11] if not type in yo._meta.fieldtypes: raise DbfError("Unknown field type: %s" % type) start = offset length = ord(fieldblock[16]) offset += length end = start + length decimals = ord(fieldblock[17]) flags = ord(fieldblock[18]) if name in yo._meta.fields: raise DbfError('Duplicate field name found: %s' % name) yo._meta.fields.append(name) yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags} def _fieldLayout(yo, i): "Returns field information Name Type(Length[,Decimals])" name = yo._meta.fields[i] type = yo._meta[name]['type'] length = yo._meta[name]['length'] decimals = yo._meta[name]['decimals'] if type in yo._decimal_fields: description = "%s %s(%d,%d)" % (name, type, length, decimals) elif type in yo._fixed_fields: description = "%s %s" % (name, type) else: description = "%s %s(%d)" % (name, type, length) return description def _loadtable(yo): "loads the records from disk to memory" if yo._meta_only: raise DbfError("%s has been closed, records are unavailable" % yo.filename) dfd = yo._meta.dfd header = yo._meta.header dfd.seek(header.start) allrecords = dfd.read() # kludge to get around mysterious errno 0 problems dfd.seek(0) length = header.record_length for i in range(header.record_count): record_data = allrecords[length*i:length*i+length] yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) dfd.seek(0) def _list_fields(yo, specs, sep=','): if specs is None: specs = yo.field_names elif isinstance(specs, str): specs = specs.split(sep) else: specs = list(specs) specs = [s.strip() for s in specs] return specs def _update_disk(yo, headeronly=False): "synchronizes the disk file with current data" if yo._meta.inmemory: return fd = yo._meta.dfd fd.seek(0) fd.write(yo._meta.header.data) if not headeronly: for record in yo._table: record._update_disk() fd.flush() fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length) if 'db3' in yo._versionabbv: fd.seek(0, os.SEEK_END) fd.write('\x1a') # required for dBase III fd.flush() fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length + 1) def __contains__(yo, key): return key in yo.field_names def __enter__(yo): return yo def __exit__(yo, *exc_info): yo.close() def __getattr__(yo, name): if name in ('_table'): if yo._meta.ondisk: yo._table = yo._Table(len(yo), yo._meta) else: yo._table = [] yo._loadtable() return object.__getattribute__(yo, name) def __getitem__(yo, value): if type(value) == int: if not -yo._meta.header.record_count <= value < yo._meta.header.record_count: raise IndexError("Record %d is not in table." % value) return yo._table[value] elif type(value) == slice: sequence = List(desc='%s --> %s' % (yo.filename, value), field_names=yo.field_names) yo._dbflists.add(sequence) for index in range(len(yo))[value]: record = yo._table[index] if yo.use_deleted is True or not record.has_been_deleted: sequence.append(record) return sequence else: raise TypeError('type <%s> not valid for indexing' % type(value)) def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, read_only=False, keep_memos=False, meta_only=False, codepage=None): """open/create dbf file filename should include path if needed field_specs can be either a ;-delimited string or a list of strings memo_size is always 512 for db3 memos ignore_memos is useful if the memo file is missing or corrupt read_only will load records into memory, then close the disk file keep_memos will also load any memo fields into memory meta_only will ignore all records, keeping only basic table information codepage will override whatever is set in the table itself""" if filename[0] == filename[-1] == ':': if field_specs is None: raise DbfError("field list must be specified for memory tables") elif type(yo) is DbfTable: raise DbfError("only memory tables supported") yo._dbflists = yo._DbfLists() yo._indexen = yo._Indexen() yo._meta = meta = yo._MetaData() meta.table = weakref.ref(yo) meta.filename = filename meta.fields = [] meta.fieldtypes = yo._fieldtypes meta.fixed_fields = yo._fixed_fields meta.variable_fields = yo._variable_fields meta.character_fields = yo._character_fields meta.decimal_fields = yo._decimal_fields meta.numeric_fields = yo._numeric_fields meta.memotypes = yo._memotypes meta.ignorememos = ignore_memos meta.memo_size = memo_size meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii meta.return_ascii = return_ascii meta.header = header = yo._TableHeader(yo._dbfTableHeader) header.extra = yo._dbfTableHeaderExtra header.data #force update of date if filename[0] == filename[-1] == ':': yo._table = [] meta.ondisk = False meta.inmemory = True meta.memoname = filename else: base, ext = os.path.splitext(filename) if ext == '': meta.filename = base + '.dbf' meta.memoname = base + yo._memoext meta.ondisk = True meta.inmemory = False if field_specs: if meta.ondisk: meta.dfd = open(meta.filename, 'w+b') meta.newmemofile = True yo.add_fields(field_specs) header.codepage(codepage or default_codepage) cp, sd, ld = _codepage_lookup(meta.header.codepage()) meta.decoder = codecs.getdecoder(sd) meta.encoder = codecs.getencoder(sd) return try: dfd = meta.dfd = open(meta.filename, 'r+b') except IOError, e: raise DbfError(str(e)) dfd.seek(0) meta.header = header = yo._TableHeader(dfd.read(32)) if not header.version in yo._supported_tables: dfd.close() dfd = None raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) cp, sd, ld = _codepage_lookup(meta.header.codepage()) yo._meta.decoder = codecs.getdecoder(sd) yo._meta.encoder = codecs.getencoder(sd) fieldblock = dfd.read(header.start - 32) for i in range(len(fieldblock)//32+1): fieldend = i * 32 if fieldblock[fieldend] == '\x0d': break else: raise DbfError("corrupt field structure in header") if len(fieldblock[:fieldend]) % 32 != 0: raise DbfError("corrupt field structure in header") header.fields = fieldblock[:fieldend] header.extra = fieldblock[fieldend+1:] # skip trailing \r yo._initializeFields() yo._checkMemoIntegrity() meta.current = -1 if len(yo) > 0: meta.current = 0 dfd.seek(0) if meta_only: yo.close(keep_table=False, keep_memos=False) elif read_only: yo.close(keep_table=True, keep_memos=keep_memos) if codepage is not None: cp, sd, ld = _codepage_lookup(codepage) yo._meta.decoder = codecs.getdecoder(sd) yo._meta.encoder = codecs.getencoder(sd) def __iter__(yo): return yo.DbfIterator(yo) def __len__(yo): return yo._meta.header.record_count def __nonzero__(yo): return yo._meta.header.record_count != 0 def __repr__(yo): if yo._read_only: return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename elif yo._meta_only: return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename else: return __name__ + ".Table('%s')" % yo._meta.filename def __str__(yo): if yo._read_only: status = "read-only" elif yo._meta_only: status = "meta-only" else: status = "read/write" str = """ Table: %s Type: %s Codepage: %s Status: %s Last updated: %s Record count: %d Field count: %d Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version, 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status, yo.last_update, len(yo), yo.field_count, yo.record_length) str += "\n --Fields--\n" for i in range(len(yo._meta.fields)): str += "%11d) %s\n" % (i, yo._fieldLayout(i)) return str @property def codepage(yo): return "%s (%s)" % code_pages[yo._meta.header.codepage()] @codepage.setter def codepage(yo, cp): cp = code_pages[yo._meta.header.codepage(cp)][0] yo._meta.decoder = codecs.getdecoder(cp) yo._meta.encoder = codecs.getencoder(cp) yo._update_disk(headeronly=True) @property def field_count(yo): "the number of fields in the table" return yo._meta.header.field_count @property def field_names(yo): "a list of the fields in the table" return yo._meta.fields[:] @property def filename(yo): "table's file name, including path (if specified on open)" return yo._meta.filename @property def last_update(yo): "date of last update" return yo._meta.header.update @property def memoname(yo): "table's memo name (if path included in filename on open)" return yo._meta.memoname @property def record_length(yo): "number of bytes in a record" return yo._meta.header.record_length @property def record_number(yo): "index number of the current record" return yo._meta.current @property def supported_tables(yo): "allowable table types" return yo._supported_tables @property def use_deleted(yo): "process or ignore deleted records" return yo._use_deleted @use_deleted.setter def use_deleted(yo, new_setting): yo._use_deleted = new_setting @property def version(yo): "returns the dbf type of the table" return yo._version def add_fields(yo, field_specs): """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] backup table is created with _backup appended to name then modifies current structure""" all_records = [record for record in yo] if yo: yo.create_backup() yo._meta.blankrecord = None meta = yo._meta offset = meta.header.record_length fields = yo._list_fields(field_specs, sep=';') for field in fields: try: name, format = field.split() if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): raise DbfError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name) name = name.lower() if name in meta.fields: raise DbfError("Field '%s' already exists" % name) field_type = format[0].upper() if len(name) > 10: raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) if not field_type in meta.fieldtypes.keys(): raise DbfError("Unknown field type: %s" % field_type) length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) except ValueError: raise DbfError("invalid field specifier: %s" % field) start = offset end = offset + length offset = end meta.fields.append(name) meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} if meta[name]['type'] in yo._memotypes and meta.memo is None: meta.memo = yo._memoClass(meta) for record in yo: record[name] = meta.fieldtypes[field_type]['Blank']() yo._buildHeaderFields() yo._update_disk() def append(yo, kamikaze='', drop=False, multiple=1): "adds blank records, and fills fields with dict/tuple values if present" if not yo.field_count: raise DbfError("No fields defined, cannot append") empty_table = len(yo) == 0 dictdata = False tupledata = False if not isinstance(kamikaze, _DbfRecord): if isinstance(kamikaze, dict): dictdata = kamikaze kamikaze = '' elif isinstance(kamikaze, tuple): tupledata = kamikaze kamikaze = '' newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze) yo._table.append(newrecord) yo._meta.header.record_count += 1 if dictdata: newrecord.gather_fields(dictdata, drop=drop) elif tupledata: for index, item in enumerate(tupledata): newrecord[index] = item elif kamikaze == str: for field in yo._meta.memofields: newrecord[field] = '' elif kamikaze: for field in yo._meta.memofields: newrecord[field] = kamikaze[field] newrecord.write_record() multiple -= 1 if multiple: data = newrecord._data single = yo._meta.header.record_count total = single + multiple while single < total: multi_record = _DbfRecord(single, yo._meta, kamikaze=data) yo._table.append(multi_record) for field in yo._meta.memofields: multi_record[field] = newrecord[field] single += 1 multi_record.write_record() yo._meta.header.record_count = total # += multiple yo._meta.current = yo._meta.header.record_count - 1 newrecord = multi_record yo._update_disk(headeronly=True) if empty_table: yo._meta.current = 0 return newrecord def bof(yo, _move=False): "moves record pointer to previous usable record; returns True if no more usable records" current = yo._meta.current try: while yo._meta.current > 0: yo._meta.current -= 1 if yo.use_deleted or not yo.current().has_been_deleted: break else: yo._meta.current = -1 return True return False finally: if not _move: yo._meta.current = current def bottom(yo, get_record=False): """sets record pointer to bottom of table if get_record, seeks to and returns last (non-deleted) record DbfError if table is empty Bof if all records deleted and use_deleted is False""" yo._meta.current = yo._meta.header.record_count if get_record: try: return yo.prev() except Bof: yo._meta.current = yo._meta.header.record_count raise Eof() def close(yo, keep_table=False, keep_memos=False): """closes disk files ensures table data is available if keep_table ensures memo data is available if keep_memos""" yo._meta.inmemory = True if keep_table: replacement_table = [] for record in yo._table: replacement_table.append(record) yo._table = replacement_table else: if yo._meta.ondisk: yo._meta_only = True if yo._meta.mfd is not None: if not keep_memos: yo._meta.ignorememos = True else: memo_fields = [] for field in yo.field_names: if yo.is_memotype(field): memo_fields.append(field) for record in yo: for field in memo_fields: record[field] = record[field] yo._meta.mfd.close() yo._meta.mfd = None if yo._meta.ondisk: yo._meta.dfd.close() yo._meta.dfd = None if keep_table: yo._read_only = True yo._meta.ondisk = False def create_backup(yo, new_name=None, overwrite=False): "creates a backup table -- ignored if memory table" if yo.filename[0] == yo.filename[-1] == ':': return if new_name is None: new_name = os.path.splitext(yo.filename)[0] + '_backup.dbf' else: overwrite = True if overwrite or not yo.backup: bkup = open(new_name, 'wb') try: yo._meta.dfd.seek(0) copyfileobj(yo._meta.dfd, bkup) yo.backup = new_name finally: bkup.close() def create_index(yo, key): return Index(yo, key) def current(yo, index=False): "returns current logical record, or its index" if yo._meta.current < 0: raise Bof() elif yo._meta.current >= yo._meta.header.record_count: raise Eof() if index: return yo._meta.current return yo._table[yo._meta.current] def delete_fields(yo, doomed): """removes field(s) from the table creates backup files with _backup appended to the file name, then modifies current structure""" doomed = yo._list_fields(doomed) for victim in doomed: if victim not in yo._meta.fields: raise DbfError("field %s not in table -- delete aborted" % victim) all_records = [record for record in yo] yo.create_backup() for victim in doomed: yo._meta.fields.pop(yo._meta.fields.index(victim)) start = yo._meta[victim]['start'] end = yo._meta[victim]['end'] for record in yo: record._data = record._data[:start] + record._data[end:] for field in yo._meta.fields: if yo._meta[field]['start'] == end: end = yo._meta[field]['end'] yo._meta[field]['start'] = start yo._meta[field]['end'] = start + yo._meta[field]['length'] start = yo._meta[field]['end'] yo._buildHeaderFields() yo._update_disk() def eof(yo, _move=False): "moves record pointer to next usable record; returns True if no more usable records" current = yo._meta.current try: while yo._meta.current < yo._meta.header.record_count - 1: yo._meta.current += 1 if yo.use_deleted or not yo.current().has_been_deleted: break else: yo._meta.current = yo._meta.header.record_count return True return False finally: if not _move: yo._meta.current = current def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True): """writes the table using CSV or tab-delimited format, using the filename given if specified, otherwise the table name""" if filename is not None: path, filename = os.path.split(filename) else: path, filename = os.path.split(yo.filename) filename = os.path.join(path, filename) field_specs = yo._list_fields(field_specs) if records is None: records = yo format = format.lower() if format not in ('csv', 'tab', 'fixed'): raise DbfError("export format: csv, tab, or fixed -- not %s" % format) if format == 'fixed': format = 'txt' base, ext = os.path.splitext(filename) if ext.lower() in ('', '.dbf'): filename = base + "." + format[:3] fd = open(filename, 'w') try: if format == 'csv': csvfile = csv.writer(fd, dialect='dbf') if header: csvfile.writerow(field_specs) for record in records: fields = [] for fieldname in field_specs: fields.append(record[fieldname]) csvfile.writerow(fields) elif format == 'tab': if header: fd.write('\t'.join(field_specs) + '\n') for record in records: fields = [] for fieldname in field_specs: fields.append(str(record[fieldname])) fd.write('\t'.join(fields) + '\n') else: # format == 'fixed' header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w') header.write("%-15s Size\n" % "Field Name") header.write("%-15s ----\n" % ("-" * 15)) sizes = [] for field in field_specs: size = yo.size(field)[0] sizes.append(size) header.write("%-15s %3d\n" % (field, size)) header.write('\nTotal Records in file: %d\n' % len(records)) header.close() for record in records: fields = [] for i, field_name in enumerate(field_specs): fields.append("%-*s" % (sizes[i], record[field_name])) fd.write(''.join(fields) + '\n') finally: fd.close() fd = None return len(records) def get_record(yo, recno): "returns record at physical_index[recno]" return yo._table[recno] def goto(yo, criteria): """changes the record pointer to the first matching (non-deleted) record criteria should be either a tuple of tuple(value, field, func) triples, or an integer to go to""" if isinstance(criteria, int): if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count: raise IndexError("Record %d does not exist" % criteria) if criteria < 0: criteria += yo._meta.header.record_count yo._meta.current = criteria return yo.current() criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) specs = tuple([(field, func) for value, field, func in criteria]) match = tuple([value for value, field, func in criteria]) current = yo.current(index=True) matchlen = len(match) while not yo.Eof(): record = yo.current() results = record(*specs) if results == match: return record return yo.goto(current) def is_decimal(yo, name): "returns True if name is a variable-length field type" return yo._meta[name]['type'] in yo._decimal_fields def is_memotype(yo, name): "returns True if name is a memo type field" return yo._meta[name]['type'] in yo._memotypes def new(yo, filename, field_specs=None, codepage=None): "returns a new table of the same type" if field_specs is None: field_specs = yo.structure() if not (filename[0] == filename[-1] == ':'): path, name = os.path.split(filename) if path == "": filename = os.path.join(os.path.split(yo.filename)[0], filename) elif name == "": filename = os.path.join(path, os.path.split(yo.filename)[1]) if codepage is None: codepage = yo._meta.header.codepage()[0] return yo.__class__(filename, field_specs, codepage=codepage) def next(yo): "set record pointer to next (non-deleted) record, and return it" if yo.eof(_move=True): raise Eof() return yo.current() def open(yo): meta = yo._meta meta.inmemory = False meta.ondisk = True yo._read_only = False yo._meta_only = False if '_table' in dir(yo): del yo._table dfd = meta.dfd = open(meta.filename, 'r+b') dfd.seek(0) meta.header = header = yo._TableHeader(dfd.read(32)) if not header.version in yo._supported_tables: dfd.close() dfd = None raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) cp, sd, ld = _codepage_lookup(meta.header.codepage()) meta.decoder = codecs.getdecoder(sd) meta.encoder = codecs.getencoder(sd) fieldblock = dfd.read(header.start - 32) for i in range(len(fieldblock)//32+1): fieldend = i * 32 if fieldblock[fieldend] == '\x0d': break else: raise DbfError("corrupt field structure in header") if len(fieldblock[:fieldend]) % 32 != 0: raise DbfError("corrupt field structure in header") header.fields = fieldblock[:fieldend] header.extra = fieldblock[fieldend+1:] # skip trailing \r yo._initializeFields() yo._checkMemoIntegrity() meta.current = -1 if len(yo) > 0: meta.current = 0 dfd.seek(0) def pack(yo, _pack=True): "physically removes all deleted records" for dbfindex in yo._indexen: dbfindex.clear() newtable = [] index = 0 offset = 0 # +1 for each purged record for record in yo._table: found = False if record.has_been_deleted and _pack: for dbflist in yo._dbflists: if dbflist._purge(record, record.record_number - offset, 1): found = True record._recnum = -1 else: record._recnum = index newtable.append(record) index += 1 if found: offset += 1 found = False yo._table.clear() for record in newtable: yo._table.append(record) yo._meta.header.record_count = index yo._current = -1 yo._update_disk() yo.reindex() def prev(yo): "set record pointer to previous (non-deleted) record, and return it" if yo.bof(_move=True): raise Bof return yo.current() def query(yo, sql_command=None, python=None): "uses exec to perform queries on the table" if sql_command: return sql(yo, sql_command) elif python is None: raise DbfError("query: python parameter must be specified") possible = List(desc="%s --> %s" % (yo.filename, python), field_names=yo.field_names) yo._dbflists.add(possible) query_result = {} select = 'query_result["keep"] = %s' % python g = {} use_deleted = yo.use_deleted for record in yo: query_result['keep'] = False g['query_result'] = query_result exec select in g, record if query_result['keep']: possible.append(record) record.write_record() return possible def reindex(yo): for dbfindex in yo._indexen: dbfindex.reindex() def rename_field(yo, oldname, newname): "renames an existing field" if yo: yo.create_backup() if not oldname in yo._meta.fields: raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") newname = newname.lower() if newname in yo._meta.fields: raise DbfError("field --%s-- already exists" % newname) if len(newname) > 10: raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) yo._meta[newname] = yo._meta[oldname] yo._meta.fields[yo._meta.fields.index(oldname)] = newname yo._buildHeaderFields() yo._update_disk(headeronly=True) def size(yo, field): "returns size of field as a tuple of (length, decimals)" if field in yo: return (yo._meta[field]['length'], yo._meta[field]['decimals']) raise DbfError("%s is not a field in %s" % (field, yo.filename)) def structure(yo, fields=None): """return list of fields suitable for creating same table layout @param fields: list of fields or None for all fields""" field_specs = [] fields = yo._list_fields(fields) try: for name in fields: field_specs.append(yo._fieldLayout(yo.field_names.index(name))) except ValueError: raise DbfError("field --%s-- does not exist" % name) return field_specs def top(yo, get_record=False): """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record DbfError if table is empty Eof if all records are deleted and use_deleted is False""" yo._meta.current = -1 if get_record: try: return yo.next() except Eof: yo._meta.current = -1 raise Bof() def type(yo, field): "returns type of field" if field in yo: return yo._meta[field]['type'] raise DbfError("%s is not a field in %s" % (field, yo.filename)) def zap(yo, areyousure=False): """removes all records from table -- this cannot be undone! areyousure must be True, else error is raised""" if areyousure: if yo._meta.inmemory: yo._table = [] else: yo._table.clear() yo._meta.header.record_count = 0 yo._current = -1 yo._update_disk() else: raise DbfError("You must say you are sure to wipe the table") class Db3Table(DbfTable): """Provides an interface for working with dBase III tables.""" _version = 'dBase III Plus' _versionabbv = 'db3' _fieldtypes = { 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} } _memoext = '.dbt' _memotypes = ('M',) _memoClass = _Db3Memo _yesMemoMask = '\x80' _noMemoMask = '\x7f' _fixed_fields = ('D','L','M') _variable_fields = ('C','N') _character_fields = ('C','M') _decimal_fields = ('N',) _numeric_fields = ('N',) _dbfTableHeader = array('c', '\x00' * 32) _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) _dbfTableHeader[10] = '\x01' # record length -- one for delete flag _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS _dbfTableHeader = _dbfTableHeader.tostring() _dbfTableHeaderExtra = '' _supported_tables = ['\x03', '\x83'] _read_only = False _meta_only = False _use_deleted = True def _checkMemoIntegrity(yo): "dBase III specific" if yo._meta.header.version == '\x83': try: yo._meta.memo = yo._memoClass(yo._meta) except: yo._meta.dfd.close() yo._meta.dfd = None raise if not yo._meta.ignorememos: for field in yo._meta.fields: if yo._meta[field]['type'] in yo._memotypes: if yo._meta.header.version != '\x83': yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") elif not os.path.exists(yo._meta.memoname): yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist without memo file") break def _initializeFields(yo): "builds the FieldList of names, types, and descriptions" yo._meta.fields[:] = [] offset = 1 fieldsdef = yo._meta.header.fields if len(fieldsdef) % 32 != 0: raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) if len(fieldsdef) // 32 != yo.field_count: raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) for i in range(yo.field_count): fieldblock = fieldsdef[i*32:(i+1)*32] name = io.unpackStr(fieldblock[:11]) type = fieldblock[11] if not type in yo._meta.fieldtypes: raise DbfError("Unknown field type: %s" % type) start = offset length = ord(fieldblock[16]) offset += length end = start + length decimals = ord(fieldblock[17]) flags = ord(fieldblock[18]) yo._meta.fields.append(name) yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags} class FpTable(DbfTable): 'Provides an interface for working with FoxPro 2 tables' _version = 'Foxpro' _versionabbv = 'fp' _fieldtypes = { 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo}, 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } _memoext = '.fpt' _memotypes = ('G','M','P') _memoClass = _VfpMemo _yesMemoMask = '\xf5' # 1111 0101 _noMemoMask = '\x03' # 0000 0011 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') _variable_fields = ('C','F','N') _character_fields = ('C','M') # field representing character data _decimal_fields = ('F','N') _numeric_fields = ('B','F','I','N','Y') _supported_tables = ('\x03', '\xf5') _dbfTableHeader = array('c', '\x00' * 32) _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) _dbfTableHeader[10] = '\x01' # record length -- one for delete flag _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS _dbfTableHeader = _dbfTableHeader.tostring() _dbfTableHeaderExtra = '\x00' * 263 _use_deleted = True def _checkMemoIntegrity(yo): if os.path.exists(yo._meta.memoname): try: yo._meta.memo = yo._memoClass(yo._meta) except: yo._meta.dfd.close() yo._meta.dfd = None raise if not yo._meta.ignorememos: for field in yo._meta.fields: if yo._meta[field]['type'] in yo._memotypes: if not os.path.exists(yo._meta.memoname): yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist without memo file") break def _initializeFields(yo): "builds the FieldList of names, types, and descriptions" yo._meta.fields[:] = [] offset = 1 fieldsdef = yo._meta.header.fields if len(fieldsdef) % 32 != 0: raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) if len(fieldsdef) // 32 != yo.field_count: raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) for i in range(yo.field_count): fieldblock = fieldsdef[i*32:(i+1)*32] name = io.unpackStr(fieldblock[:11]) type = fieldblock[11] if not type in yo._meta.fieldtypes: raise DbfError("Unknown field type: %s" % type) elif type == '0': return # ignore nullflags start = offset length = ord(fieldblock[16]) offset += length end = start + length decimals = ord(fieldblock[17]) flags = ord(fieldblock[18]) yo._meta.fields.append(name) yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags} class VfpTable(DbfTable): 'Provides an interface for working with Visual FoxPro 6 tables' _version = 'Visual Foxpro v6' _versionabbv = 'vfp' _fieldtypes = { 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } _memoext = '.fpt' _memotypes = ('G','M','P') _memoClass = _VfpMemo _yesMemoMask = '\x30' # 0011 0000 _noMemoMask = '\x30' # 0011 0000 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') _variable_fields = ('C','F','N') _character_fields = ('C','M') # field representing character data _decimal_fields = ('F','N') _numeric_fields = ('B','F','I','N','Y') _supported_tables = ('\x30',) _dbfTableHeader = array('c', '\x00' * 32) _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) _dbfTableHeader[10] = '\x01' # record length -- one for delete flag _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS _dbfTableHeader = _dbfTableHeader.tostring() _dbfTableHeaderExtra = '\x00' * 263 _use_deleted = True def _checkMemoIntegrity(yo): if os.path.exists(yo._meta.memoname): try: yo._meta.memo = yo._memoClass(yo._meta) except: yo._meta.dfd.close() yo._meta.dfd = None raise if not yo._meta.ignorememos: for field in yo._meta.fields: if yo._meta[field]['type'] in yo._memotypes: if not os.path.exists(yo._meta.memoname): yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist without memo file") break def _initializeFields(yo): "builds the FieldList of names, types, and descriptions" yo._meta.fields[:] = [] offset = 1 fieldsdef = yo._meta.header.fields for i in range(yo.field_count): fieldblock = fieldsdef[i*32:(i+1)*32] name = io.unpackStr(fieldblock[:11]) type = fieldblock[11] if not type in yo._meta.fieldtypes: raise DbfError("Unknown field type: %s" % type) elif type == '0': return # ignore nullflags start = io.unpackLongInt(fieldblock[12:16]) length = ord(fieldblock[16]) offset += length end = start + length decimals = ord(fieldblock[17]) flags = ord(fieldblock[18]) yo._meta.fields.append(name) yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags} class List(object): "list of Dbf records, with set-like behavior" _desc = '' def __init__(yo, new_records=None, desc=None, key=None, field_names=None): yo.field_names = field_names yo._list = [] yo._set = set() if key is not None: yo.key = key if key.__doc__ is None: key.__doc__ = 'unknown' key = yo.key yo._current = -1 if isinstance(new_records, yo.__class__) and key is new_records.key: yo._list = new_records._list[:] yo._set = new_records._set.copy() yo._current = 0 elif new_records is not None: for record in new_records: value = key(record) item = (record.record_table, record.record_number, value) if value not in yo._set: yo._set.add(value) yo._list.append(item) yo._current = 0 if desc is not None: yo._desc = desc def __add__(yo, other): key = yo.key if isinstance(other, (DbfTable, list)): other = yo.__class__(other, key=key) if isinstance(other, yo.__class__): result = yo.__class__() result._set = yo._set.copy() result._list[:] = yo._list[:] result.key = yo.key if key is other.key: # same key? just compare key values for item in other._list: if item[2] not in result._set: result._set.add(item[2]) result._list.append(item) else: # different keys, use this list's key on other's records for rec in other: value = key(rec) if value not in result._set: result._set.add(value) result._list.append((rec.record_table, rec.record_number, value)) result._current = 0 if result else -1 return result return NotImplemented def __contains__(yo, record): if isinstance(record, tuple): item = record else: item = yo.key(record) return item in yo._set def __delitem__(yo, key): if isinstance(key, int): item = yo._list.pop[key] yo._set.remove(item[2]) elif isinstance(key, slice): yo._set.difference_update([item[2] for item in yo._list[key]]) yo._list.__delitem__(key) else: raise TypeError def __getitem__(yo, key): if isinstance(key, int): count = len(yo._list) if not -count <= key < count: raise IndexError("Record %d is not in list." % key) return yo._get_record(*yo._list[key]) elif isinstance(key, slice): result = yo.__class__() result._list[:] = yo._list[key] result._set = set(result._list) result.key = yo.key result._current = 0 if result else -1 return result else: raise TypeError('indices must be integers') def __iter__(yo): return (table.get_record(recno) for table, recno, value in yo._list) def __len__(yo): return len(yo._list) def __nonzero__(yo): return len(yo) > 0 def __radd__(yo, other): return yo.__add__(other) def __repr__(yo): if yo._desc: return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list)) else: return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list)) def __rsub__(yo, other): key = yo.key if isinstance(other, (DbfTable, list)): other = yo.__class__(other, key=key) if isinstance(other, yo.__class__): result = yo.__class__() result._list[:] = other._list[:] result._set = other._set.copy() result.key = key lost = set() if key is other.key: for item in yo._list: if item[2] in result._list: result._set.remove(item[2]) lost.add(item) else: for rec in other: value = key(rec) if value in result._set: result._set.remove(value) lost.add((rec.record_table, rec.record_number, value)) result._list = [item for item in result._list if item not in lost] result._current = 0 if result else -1 return result return NotImplemented def __sub__(yo, other): key = yo.key if isinstance(other, (DbfTable, list)): other = yo.__class__(other, key=key) if isinstance(other, yo.__class__): result = yo.__class__() result._list[:] = yo._list[:] result._set = yo._set.copy() result.key = key lost = set() if key is other.key: for item in other._list: if item[2] in result._set: result._set.remove(item[2]) lost.add(item[2]) else: for rec in other: value = key(rec) if value in result._set: result._set.remove(value) lost.add(value) result._list = [item for item in result._list if item[2] not in lost] result._current = 0 if result else -1 return result return NotImplemented def _maybe_add(yo, item): if item[2] not in yo._set: yo._set.add(item[2]) yo._list.append(item) def _get_record(yo, table=None, rec_no=None, value=None): if table is rec_no is None: table, rec_no, value = yo._list[yo._current] return table.get_record(rec_no) def _purge(yo, record, old_record_number, offset): partial = record.record_table, old_record_number records = sorted(yo._list, key=lambda item: (item[0], item[1])) for item in records: if partial == item[:2]: found = True break elif partial[0] is item[0] and partial[1] < item[1]: found = False break else: found = False if found: yo._list.pop(yo._list.index(item)) yo._set.remove(item[2]) start = records.index(item) + found for item in records[start:]: if item[0] is not partial[0]: # into other table's records break i = yo._list.index(item) yo._set.remove(item[2]) item = item[0], (item[1] - offset), item[2] yo._list[i] = item yo._set.add(item[2]) return found def append(yo, new_record): yo._maybe_add((new_record.record_table, new_record.record_number, yo.key(new_record))) if yo._current == -1 and yo._list: yo._current = 0 return new_record def bottom(yo): if yo._list: yo._current = len(yo._list) - 1 return yo._get_record() raise DbfError("dbf.List is empty") def clear(yo): yo._list = [] yo._set = set() yo._current = -1 def current(yo): if yo._current < 0: raise Bof() elif yo._current == len(yo._list): raise Eof() return yo._get_record() def extend(yo, new_records): key = yo.key if isinstance(new_records, yo.__class__): if key is new_records.key: # same key? just compare key values for item in new_records._list: yo._maybe_add(item) else: # different keys, use this list's key on other's records for rec in new_records: value = key(rec) yo._maybe_add((rec.record_table, rec.record_number, value)) else: for record in new_records: value = key(rec) yo._maybe_add((rec.record_table, rec.record_number, value)) if yo._current == -1 and yo._list: yo._current = 0 def goto(yo, index_number): if yo._list: if 0 <= index_number <= len(yo._list): yo._current = index_number return yo._get_record() raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list))) raise DbfError("dbf.List is empty") def index(yo, sort=None, reverse=False): "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'" if sort is None: results = [] for field, func in yo._meta.index: results.append("%s(%s)" % (func.__name__, field)) return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) yo._meta.index_reversed = reverse if sort == 'ORIGINAL': yo._index = range(yo._meta.header.record_count) yo._meta.index = 'ORIGINAL' if reverse: yo._index.reverse() return new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) yo._meta.index = tuple(new_sort) yo._meta.orderresults = [''] * len(yo) for record in yo: yo._meta.orderresults[record.record_number] = record() yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse) def index(yo, record, start=None, stop=None): item = record.record_table, record.record_number, yo.key(record) if start is None: start = 0 if stop is None: stop = len(yo._list) return yo._list.index(item, start, stop) def insert(yo, i, record): item = record.record_table, record.record_number, yo.key(record) if item not in yo._set: yo._set.add(item[2]) yo._list.insert(i, item) def key(yo, record): "table_name, record_number" return record.record_table, record.record_number def next(yo): if yo._current < len(yo._list): yo._current += 1 if yo._current < len(yo._list): return yo._get_record() raise Eof() def pop(yo, index=None): if index is None: table, recno, value = yo._list.pop() else: table, recno, value = yo._list.pop(index) yo._set.remove(value) return yo._get_record(table, recno, value) def prev(yo): if yo._current >= 0: yo._current -= 1 if yo._current > -1: return yo._get_record() raise Bof() def remove(yo, record): item = record.record_table, record.record_number, yo.key(record) yo._list.remove(item) yo._set.remove(item[2]) def reverse(yo): return yo._list.reverse() def top(yo): if yo._list: yo._current = 0 return yo._get_record() raise DbfError("dbf.List is empty") def sort(yo, key=None, reverse=False): if key is None: return yo._list.sort(reverse=reverse) return yo._list.sort(key=lambda item: key(item[0].get_record(item[1])), reverse=reverse) class DbfCsv(csv.Dialect): "csv format for exporting tables" delimiter = ',' doublequote = True escapechar = None lineterminator = '\n' quotechar = '"' skipinitialspace = True quoting = csv.QUOTE_NONNUMERIC class Index(object): class IndexIterator(object): "returns records using this index" def __init__(yo, table, records): yo.table = table yo.records = records yo.index = 0 def __iter__(yo): return yo def next(yo): while yo.index < len(yo.records): record = yo.table.get_record(yo.records[yo.index]) yo.index += 1 if not yo.table.use_deleted and record.has_been_deleted: continue return record else: raise StopIteration def __init__(yo, table, key, field_names=None): yo._table = table yo._values = [] # ordered list of values yo._rec_by_val = [] # matching record numbers yo._records = {} # record numbers:values yo.__doc__ = key.__doc__ or 'unknown' yo.key = key yo.field_names = field_names or table.field_names for record in table: value = key(record) if value is DoNotIndex: continue rec_num = record.record_number if not isinstance(value, tuple): value = (value, ) vindex = bisect_right(yo._values, value) yo._values.insert(vindex, value) yo._rec_by_val.insert(vindex, rec_num) yo._records[rec_num] = value table._indexen.add(yo) def __call__(yo, record): rec_num = record.record_number if rec_num in yo._records: value = yo._records[rec_num] vindex = bisect_left(yo._values, value) yo._values.pop(vindex) yo._rec_by_val.pop(vindex) value = yo.key(record) if value is DoNotIndex: return if not isinstance(value, tuple): value = (value, ) vindex = bisect_right(yo._values, value) yo._values.insert(vindex, value) yo._rec_by_val.insert(vindex, rec_num) yo._records[rec_num] = value def __contains__(yo, match): if isinstance(match, _DbfRecord): if match.record_table is yo._table: return match.record_number in yo._records match = yo.key(match) elif not isinstance(match, tuple): match = (match, ) return yo.find(match) != -1 def __getitem__(yo, key): if isinstance(key, int): count = len(yo._values) if not -count <= key < count: raise IndexError("Record %d is not in list." % key) rec_num = yo._rec_by_val[key] return yo._table.get_record(rec_num) elif isinstance(key, slice): result = List(field_names=yo._table.field_names) yo._table._dbflists.add(result) start, stop, step = key.start, key.stop, key.step if start is None: start = 0 if stop is None: stop = len(yo._rec_by_val) if step is None: step = 1 for loc in range(start, stop, step): record = yo._table.get_record(yo._rec_by_val[loc]) result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) result._current = 0 if result else -1 return result elif isinstance (key, (str, unicode, tuple, _DbfRecord)): if isinstance(key, _DbfRecord): key = yo.key(key) elif not isinstance(key, tuple): key = (key, ) loc = yo.find(key) if loc == -1: raise KeyError(key) return yo._table.get_record(yo._rec_by_val[loc]) else: raise TypeError('indices must be integers, match objects must by strings or tuples') def __enter__(yo): return yo def __exit__(yo, *exc_info): yo._table.close() yo._values[:] = [] yo._rec_by_val[:] = [] yo._records.clear() return False def __iter__(yo): return yo.IndexIterator(yo._table, yo._rec_by_val) def __len__(yo): return len(yo._records) def _partial_match(yo, target, match): target = target[:len(match)] if isinstance(match[-1], (str, unicode)): target = list(target) target[-1] = target[-1][:len(match[-1])] target = tuple(target) return target == match def _purge(yo, rec_num): value = yo._records.get(rec_num) if value is not None: vindex = bisect_left(yo._values, value) del yo._records[rec_num] yo._values.pop(vindex) yo._rec_by_val.pop(vindex) def _search(yo, match, lo=0, hi=None): if hi is None: hi = len(yo._values) return bisect_left(yo._values, match, lo, hi) def clear(yo): "removes all entries from index" yo._values[:] = [] yo._rec_by_val[:] = [] yo._records.clear() def close(yo): yo._table.close() def find(yo, match, partial=False): "returns numeric index of (partial) match, or -1" if isinstance(match, _DbfRecord): if match.record_number in yo._records: return yo._values.index(yo._records[match.record_number]) else: return -1 if not isinstance(match, tuple): match = (match, ) loc = yo._search(match) while loc < len(yo._values) and yo._values[loc] == match: if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: loc += 1 continue return loc if partial: while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: loc += 1 continue return loc return -1 def find_index(yo, match): "returns numeric index of either (partial) match, or position of where match would be" if isinstance(match, _DbfRecord): if match.record_number in yo._records: return yo._values.index(yo._records[match.record_number]) else: match = yo.key(match) if not isinstance(match, tuple): match = (match, ) loc = yo._search(match) return loc @classmethod def from_file(cls, table, index_file): def get_idx_records(data, length, howmany): ptr = 0 current = 0 while current < howmany: key = data[ptr:ptr+length].replace('\x00','') rec = io.unpackLongInt(data[ptr+length:ptr+length+4], bigendian=True) yield key, recnum ptr += length + 4 current += 1 def next_item(idx_file, node_loc, keylen): idx_file.seek(node_loc) data_chunk = idx_file.read(512) attributes = io.unpackShortInt(data_chunk[:2]) howmany = io.unpackShortInt(data_chunk[2:4]) if attributes in (2, 3): for key, recnum in get_idx_records(data_chunk[12:512], keylen, howmany): yield key, recnum else: for ignore, next_node in get_idx_records(data_chunk[12:512], keylen, howmany): print ignore, next_node for key, recnum in next_item(idx_file, next_node, keylen): yield key, recnum idx = object.__new__(cls) #- idx.key = lambda rec: DoNotIndex data = open(index_file, 'rb') header = data.read(512) rootnode = io.unpackLongInt(header[:4]) keylen = io.unpackShortInt(header[12:14]) idx.__doc__ = header[16:236].replace('\x00','') for_expr = header[236:456].replace('\x00','') if for_expr: idx.__doc__ += ' for ' + for_expr.replace('=','==') for rec in next_item(data, rootnode, keylen): print rec def index(yo, match, partial=False): "returns numeric index of (partial) match, or raises ValueError" loc = yo.find(match, partial) if loc == -1: if isinstance(match, _DbfRecord): raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__)) else: raise ValueError("match criteria <%s> not in index" % (match, )) return loc def reindex(yo): "reindexes all records" for record in yo._table: yo(record) def query(yo, sql_command=None, python=None): """recognized sql commands are SELECT, UPDATE, REPLACE, INSERT, DELETE, and RECALL""" if sql_command: return sql(yo, sql_command) elif python is None: raise DbfError("query: python parameter must be specified") possible = List(desc="%s --> %s" % (yo._table.filename, python), field_names=yo._table.field_names) yo._table._dbflists.add(possible) query_result = {} select = 'query_result["keep"] = %s' % python g = {} for record in yo: query_result['keep'] = False g['query_result'] = query_result exec select in g, record if query_result['keep']: possible.append(record) record.write_record() return possible def search(yo, match, partial=False): "returns dbf.List of all (partially) matching records" result = List(field_names=yo._table.field_names) yo._table._dbflists.add(result) if not isinstance(match, tuple): match = (match, ) loc = yo._search(match) if loc == len(yo._values): return result while loc < len(yo._values) and yo._values[loc] == match: record = yo._table.get_record(yo._rec_by_val[loc]) if not yo._table.use_deleted and record.has_been_deleted: loc += 1 continue result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) loc += 1 if partial: while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): record = yo._table.get_record(yo._rec_by_val[loc]) if not yo._table.use_deleted and record.has_been_deleted: loc += 1 continue result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) loc += 1 return result csv.register_dialect('dbf', DbfCsv) sql_functions = { 'select':None, 'update':None, 'insert':None, 'delete':None, 'count': None} def sql_criteria(records, criteria): "creates a function matching the sql criteria" function = """def func(records): \"\"\"%s\"\"\" matched = List(field_names=records[0].field_names) for rec in records: %s if %s: matched.append(rec) return matched""" fields = [] for field in records[0].field_names: if field in criteria: fields.append(field) fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) g = {'List':List} function %= (criteria, fields, criteria) #- print function exec function in g return g['func'] def sql_cmd(records, command): "creates a function matching to apply command to each record in records" function = """def func(records): \"\"\"%s\"\"\" changed = 0 for rec in records: %s %s %s changed += rec.write_record() return changed""" fields = [] for field in records[0].field_names: if field in command: fields.append(field) pre_fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) post_fields = '\n '.join(['rec.%s = %s' % (field, field) for field in fields]) g = dbf.sql_user_functions.copy() if '=' not in command and ' with ' in command.lower(): offset = command.lower().index(' with ') command = command[:offset] + ' = ' + command[offset+6:] function %= (command, pre_fields, command, post_fields) #- print function exec function in g return g['func'] def sql(records, command): """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL""" table = records[0].record_table sql_command = command no_condition = False if ' for ' in command: command, condition = command.split(' for ') condition = sql_criteria(records, condition) else: def condition(records): return records[:] no_condition = True name, command = command.split(' ', 1) name = name.lower() field_names = table.field_names if name == 'select': if command.strip() != '*': field_names = command.replace(' ','').split(',') def command(records): return else: command = sql_cmd(records, command) if name not in ('delete','insert','recall','select','update','replace'): raise DbfError("unrecognized sql command: %s" % name.upper()) if name == 'insert' and not no_condition: raise DbfError("FOR clause not allowed with INSERT") possible = List(desc=sql_command, field_names=field_names) tables = set() if name == 'insert': raise DbfError("INSERT not currently implemented") record = table.append() command(record) record.write_record() record.check_index() possible.append(record) changed = 0 else: possible = condition(records) possible.field_names = field_names changed = command(possible) for record in possible: tables.add(record.record_table) if name == 'delete': record.delete_record() elif name == 'recall': record.undelete_record() elif name == 'select': pass elif name == 'update' or name == 'replace': pass #command(record) else: raise DbfError("unrecognized sql command: %s" % sql.upper) record.write_record() for list_table in tables: list_table._dbflists.add(possible) possible.modified = changed return possible def _nop(value): "returns parameter unchanged" return value def _normalize_tuples(tuples, length, filler): "ensures each tuple is the same length, using filler[-missing] for the gaps" final = [] for t in tuples: if len(t) < length: final.append( tuple([item for item in t] + filler[len(t)-length:]) ) else: final.append(t) return tuple(final) def _codepage_lookup(cp): if cp not in code_pages: for code_page in sorted(code_pages.keys()): sd, ld = code_pages[code_page] if cp == sd or cp == ld: if sd is None: raise DbfError("Unsupported codepage: %s" % ld) cp = code_page break else: raise DbfError("Unsupported codepage: %s" % cp) sd, ld = code_pages[cp] return cp, sd, ld def ascii(new_setting=None): "get/set return_ascii setting" global return_ascii if new_setting is None: return return_ascii else: return_ascii = new_setting def codepage(cp=None): "get/set default codepage for any new tables" global default_codepage cp, sd, ld = _codepage_lookup(cp or default_codepage) default_codepage = sd return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld) def encoding(cp=None): "get/set default encoding for non-unicode strings passed into a table" global input_decoding cp, sd, ld = _codepage_lookup(cp or input_decoding) default_codepage = sd return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld) class _Db4Table(DbfTable): version = 'dBase IV w/memos (non-functional)' _versionabbv = 'db4' _fieldtypes = { 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } _memoext = '.dbt' _memotypes = ('G','M','P') _memoClass = _VfpMemo _yesMemoMask = '\x8b' # 0011 0000 _noMemoMask = '\x04' # 0011 0000 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') _variable_fields = ('C','F','N') _character_fields = ('C','M') # field representing character data _decimal_fields = ('F','N') _numeric_fields = ('B','F','I','N','Y') _supported_tables = ('\x04', '\x8b') _dbfTableHeader = ['\x00'] * 32 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS _dbfTableHeader = ''.join(_dbfTableHeader) _dbfTableHeaderExtra = '' _use_deleted = True def _checkMemoIntegrity(yo): "dBase III specific" if yo._meta.header.version == '\x8b': try: yo._meta.memo = yo._memoClass(yo._meta) except: yo._meta.dfd.close() yo._meta.dfd = None raise if not yo._meta.ignorememos: for field in yo._meta.fields: if yo._meta[field]['type'] in yo._memotypes: if yo._meta.header.version != '\x8b': yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") elif not os.path.exists(yo._meta.memoname): yo._meta.dfd.close() yo._meta.dfd = None raise DbfError("Table structure corrupt: memo fields exist without memo file") break dbf-0.88.16/README0000666000175100017510000000236411150464704012266 0ustar margamargasample table & data: sample = dbf.table('/temp/sample', "name C(30), age N(3.0), wisdom M") record = sample.append() record['name'] = 'Ethan' record['age'] = 37 record['wisdom'] = 'Python rules!' record = {'name':'Allen', 'age':51, 'wisdom':'code smarter, not harder'} sample.append(record) sample.append() record = sample[-1] record.name = 'Alexis' record.age = 29 record.wisdom = 'take a break! refresh the little grey cells!' retrieving data to store it somewhere else: source = dbf.table('/some/path/to/file.dbf') for record in source: data = record.scatterFields() # creates dictionary {fieldname:value, fieldname:value, ...} data = list(record) # creates list of values in field order # do something with the data Important notes: * When accessing a text field, the returned data does not include trailing blanks... for record in sample: print '"' + record.name + '"' prints: NOT: "Ethan" "Ethan " "Allen" "Allen " "Alexis" "Alexis " keep this in mind when doing comparisons. Things to do: Better documentation. dbf-0.88.16/setup.py0000666000175100017510000000222411477216464013125 0ustar margamargafrom distutils.core import setup from glob import glob import os html_docs = glob('dbf/html/*') long_desc=""" Currently supports dBase III, and FoxPro - Visual FoxPro 6 tables. Text is returned as unicode, and codepage settings in tables are honored. Documentation needs work, but author is very responsive to e-mails. Not supported: index files, null fields, auto-incrementing fields. """ setup( name='dbf', version='0.88.16', license='BSD License', description='Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)', long_description=long_desc, url='http://groups.google.com/group/python-dbase', py_modules=['test_dbf'], packages=['dbf'], provides=['dbf'], author='Ethan Furman', author_email='ethan@stoneleaf.us', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Topic :: Database' ], package_data={'dbf': ['html/*']}, )