Commit fc049e1f authored by Tim Peters's avatar Tim Peters

Whitespace normalization.

parent c0914442
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Test ExtensionClass support in Persistence.Persistent """Test ExtensionClass support in Persistence.Persistent
$Id: test_ExtensionClass.py,v 1.6 2004/02/20 17:19:43 jeremy Exp $ $Id: test_ExtensionClass.py,v 1.7 2004/04/19 21:19:03 tim_one Exp $
""" """
from doctest import DocTestSuite from doctest import DocTestSuite
...@@ -505,4 +505,3 @@ def test_pickling_w_slots_w_empty_dict(): ...@@ -505,4 +505,3 @@ def test_pickling_w_slots_w_empty_dict():
def test_suite(): def test_suite():
return DocTestSuite() return DocTestSuite()
...@@ -25,7 +25,7 @@ register_loop_callback() to register interest. When the mainloop ...@@ -25,7 +25,7 @@ register_loop_callback() to register interest. When the mainloop
thread calls loop(), each registered callback will be called with the thread calls loop(), each registered callback will be called with the
socket map as its first argument. socket map as its first argument.
""" """
__version__ = '$Revision: 1.13 $'[11:-2] __version__ = '$Revision: 1.14 $'[11:-2]
import asyncore import asyncore
import select import select
...@@ -168,7 +168,7 @@ def loop(timeout=30.0, use_poll=0, map=None): ...@@ -168,7 +168,7 @@ def loop(timeout=30.0, use_poll=0, map=None):
while map and exit_status is None: while map and exit_status is None:
poll_fun(timeout, map) poll_fun(timeout, map)
_stop_loop() _stop_loop()
# This module used to do something evil -- it rebound asyncore.loop to the # This module used to do something evil -- it rebound asyncore.loop to the
# above loop() function. What was evil about this is that if you added some # above loop() function. What was evil about this is that if you added some
......
...@@ -613,7 +613,7 @@ class InvqTests(CommonSetupTearDown): ...@@ -613,7 +613,7 @@ class InvqTests(CommonSetupTearDown):
self.assertEqual(perstorage.verify_result, "quick verification") self.assertEqual(perstorage.verify_result, "quick verification")
self.assertEqual(perstorage._server._last_invals, self.assertEqual(perstorage._server._last_invals,
(revid, [(oid, '')])) (revid, [(oid, '')]))
self.assertEqual(perstorage.load(oid, ''), self.assertEqual(perstorage.load(oid, ''),
self._storage.load(oid, '')) self._storage.load(oid, ''))
perstorage.close() perstorage.close()
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Handy standard storage machinery """Handy standard storage machinery
$Id: BaseStorage.py,v 1.48 2004/04/17 22:19:30 gintautasm Exp $ $Id: BaseStorage.py,v 1.49 2004/04/19 21:19:05 tim_one Exp $
""" """
import cPickle import cPickle
import threading import threading
...@@ -77,7 +77,7 @@ class BaseStorage(UndoLogCompatible): ...@@ -77,7 +77,7 @@ class BaseStorage(UndoLogCompatible):
The other lock appears to protect _oid and _transaction and The other lock appears to protect _oid and _transaction and
perhaps other things. It is always held when load() is called, so perhaps other things. It is always held when load() is called, so
presumably the load() implementation should also acquire the lock. presumably the load() implementation should also acquire the lock.
""" """
_transaction=None # Transaction that is being committed _transaction=None # Transaction that is being committed
_tstatus=' ' # Transaction status, used for copying data _tstatus=' ' # Transaction status, used for copying data
_is_read_only = False _is_read_only = False
...@@ -400,7 +400,7 @@ class BaseStorage(UndoLogCompatible): ...@@ -400,7 +400,7 @@ class BaseStorage(UndoLogCompatible):
self.tpc_begin(transaction, tid, transaction.status) self.tpc_begin(transaction, tid, transaction.status)
for r in transaction: for r in transaction:
oid=r.oid oid=r.oid
if verbose: if verbose:
print utils.oid_repr(oid), r.version, len(r.data) print utils.oid_repr(oid), r.version, len(r.data)
if restoring: if restoring:
self.restore(oid, r.tid, r.data, r.version, self.restore(oid, r.tid, r.data, r.version,
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Database objects """Database objects
$Id: DB.py,v 1.76 2004/04/17 23:04:52 gintautasm Exp $""" $Id: DB.py,v 1.77 2004/04/19 21:19:05 tim_one Exp $"""
import cPickle, cStringIO, sys import cPickle, cStringIO, sys
from thread import allocate_lock from thread import allocate_lock
...@@ -427,7 +427,7 @@ class DB(object): ...@@ -427,7 +427,7 @@ class DB(object):
used the default transaction manager. used the default transaction manager.
- `synch`: boolean indicating whether Connection should - `synch`: boolean indicating whether Connection should
register for afterCompletion() calls. register for afterCompletion() calls.
""" """
self._a() self._a()
try: try:
......
...@@ -687,4 +687,3 @@ class FileStoragePacker(FileStorageFormatter): ...@@ -687,4 +687,3 @@ class FileStoragePacker(FileStorageFormatter):
if self._lock_counter % 20 == 0: if self._lock_counter % 20 == 0:
self._commit_lock_acquire() self._commit_lock_acquire()
return ipos return ipos
...@@ -21,7 +21,7 @@ It is meant to illustrate the simplest possible storage. ...@@ -21,7 +21,7 @@ It is meant to illustrate the simplest possible storage.
The Mapping storage uses a single data structure to map object ids to data. The Mapping storage uses a single data structure to map object ids to data.
""" """
__version__='$Revision: 1.13 $'[11:-2] __version__='$Revision: 1.14 $'[11:-2]
from ZODB import utils from ZODB import utils
from ZODB import BaseStorage from ZODB import BaseStorage
...@@ -76,7 +76,7 @@ class MappingStorage(BaseStorage.BaseStorage): ...@@ -76,7 +76,7 @@ class MappingStorage(BaseStorage.BaseStorage):
return s[:8] return s[:8]
finally: finally:
self._lock_release() self._lock_release()
def store(self, oid, serial, data, version, transaction): def store(self, oid, serial, data, version, transaction):
if transaction is not self._transaction: if transaction is not self._transaction:
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Broken object support """Broken object support
$Id: broken.py,v 1.3 2004/03/04 22:41:52 jim Exp $ $Id: broken.py,v 1.4 2004/04/19 21:19:05 tim_one Exp $
""" """
import sys import sys
...@@ -115,7 +115,7 @@ class Broken(object): ...@@ -115,7 +115,7 @@ class Broken(object):
def __setstate__(self, state): def __setstate__(self, state):
self.__dict__['__Broken_state__'] = state self.__dict__['__Broken_state__'] = state
def __repr__(self): def __repr__(self):
return "<broken %s.%s instance>" % ( return "<broken %s.%s instance>" % (
self.__class__.__module__, self.__class__.__name__) self.__class__.__module__, self.__class__.__name__)
...@@ -233,7 +233,7 @@ def rebuild(modulename, globalname, *args): ...@@ -233,7 +233,7 @@ def rebuild(modulename, globalname, *args):
""" """
class_ = find_global(modulename, globalname) class_ = find_global(modulename, globalname)
return class_.__new__(class_, *args) return class_.__new__(class_, *args)
class BrokenModified(TypeError): class BrokenModified(TypeError):
"""Attempt to modify a broken object """Attempt to modify a broken object
""" """
...@@ -256,7 +256,7 @@ class PersistentBroken(Broken, persistent.Persistent): ...@@ -256,7 +256,7 @@ class PersistentBroken(Broken, persistent.Persistent):
>>> persistentBroken(Atall) is PAtall >>> persistentBroken(Atall) is PAtall
True True
) )
Persistent broken classes work a lot like broken classes:: Persistent broken classes work a lot like broken classes::
...@@ -315,7 +315,7 @@ class PersistentBroken(Broken, persistent.Persistent): ...@@ -315,7 +315,7 @@ class PersistentBroken(Broken, persistent.Persistent):
persistent.Persistent.__setattr__(self, name, value) persistent.Persistent.__setattr__(self, name, value)
else: else:
raise BrokenModified("Can't change broken objects") raise BrokenModified("Can't change broken objects")
def __repr__(self): def __repr__(self):
return "<persistent broken %s.%s instance %r>" % ( return "<persistent broken %s.%s instance %r>" % (
self.__class__.__module__, self.__class__.__name__, self.__class__.__module__, self.__class__.__name__,
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Interfaces for ZODB. """Interfaces for ZODB.
$Id: interfaces.py,v 1.3 2004/02/24 13:51:03 srichter Exp $ $Id: interfaces.py,v 1.4 2004/04/19 21:19:05 tim_one Exp $
""" """
try: try:
...@@ -88,7 +88,7 @@ class IDataManager(Interface): ...@@ -88,7 +88,7 @@ class IDataManager(Interface):
flag set to false. flag set to false.
""" """
def tpc_abort(transaction): def tpc_abort(transaction):
"""Abort a transaction. """Abort a transaction.
......
...@@ -269,7 +269,7 @@ class BaseObjectWriter: ...@@ -269,7 +269,7 @@ class BaseObjectWriter:
# It's possible that __getnewargs__ is degenerate and # It's possible that __getnewargs__ is degenerate and
# returns (), but we don't want to have to deghostify # returns (), but we don't want to have to deghostify
# the object to find out. # the object to find out.
return oid return oid
return oid, klass return oid, klass
...@@ -527,7 +527,7 @@ def referencesf(p, rootl=None): ...@@ -527,7 +527,7 @@ def referencesf(p, rootl=None):
# tuples, so that we wrap oids that are lists or tuples in # tuples, so that we wrap oids that are lists or tuples in
# tuples. # tuples.
# #
# - oids may *not* be False. I'm not sure why. # - oids may *not* be False. I'm not sure why.
out = [] out = []
for v in rootl: for v in rootl:
......
...@@ -281,9 +281,9 @@ class PackableStorage(PackableStorageBase): ...@@ -281,9 +281,9 @@ class PackableStorage(PackableStorageBase):
packt = time.time() packt = time.time()
for dummy in choices: for dummy in choices:
for i in choices: for i in choices:
root[i].value = MinPO(i) root[i].value = MinPO(i)
transaction.commit() transaction.commit()
NUM_LOOP_TRIP = 100 NUM_LOOP_TRIP = 100
timer = ElapsedTimer(time.time()) timer = ElapsedTimer(time.time())
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Sample objects for use in tests """Sample objects for use in tests
$Id: sampledm.py,v 1.2 2004/02/19 02:59:10 jeremy Exp $ $Id: sampledm.py,v 1.3 2004/04/19 21:19:07 tim_one Exp $
""" """
class DataManager(object): class DataManager(object):
...@@ -78,7 +78,7 @@ class DataManager(object): ...@@ -78,7 +78,7 @@ class DataManager(object):
>>> dm.commit(t1) >>> dm.commit(t1)
Our changes are"permanent". The state reflects the changes and the Our changes are"permanent". The state reflects the changes and the
delta has been reset to 0. delta has been reset to 0.
>>> dm.state >>> dm.state
1 1
...@@ -139,7 +139,7 @@ class DataManager(object): ...@@ -139,7 +139,7 @@ class DataManager(object):
TypeError: ('Transaction missmatch', '2', '1') TypeError: ('Transaction missmatch', '2', '1')
>>> dm.prepare(t1) >>> dm.prepare(t1)
""" """
if self.prepared: if self.prepared:
raise TypeError('Already prepared') raise TypeError('Already prepared')
...@@ -183,7 +183,7 @@ class DataManager(object): ...@@ -183,7 +183,7 @@ class DataManager(object):
If savepoints are used, abort must be passed the same If savepoints are used, abort must be passed the same
transaction: transaction:
>>> dm.inc() >>> dm.inc()
>>> r = dm.savepoint(t1) >>> r = dm.savepoint(t1)
>>> t2 = '2' >>> t2 = '2'
...@@ -208,15 +208,15 @@ class DataManager(object): ...@@ -208,15 +208,15 @@ class DataManager(object):
Of course, the transactions passed to prepare and abort must Of course, the transactions passed to prepare and abort must
match: match:
>>> dm.prepare(t1) >>> dm.prepare(t1)
>>> dm.abort(t2) >>> dm.abort(t2)
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: ('Transaction missmatch', '2', '1') TypeError: ('Transaction missmatch', '2', '1')
>>> dm.abort(t1) >>> dm.abort(t1)
""" """
self._checkTransaction(transaction) self._checkTransaction(transaction)
...@@ -262,7 +262,7 @@ class DataManager(object): ...@@ -262,7 +262,7 @@ class DataManager(object):
If course, the transactions given to prepare and commit must If course, the transactions given to prepare and commit must
be the same: be the same:
>>> dm.inc() >>> dm.inc()
>>> t3 = '3' >>> t3 = '3'
>>> dm.prepare(t3) >>> dm.prepare(t3)
...@@ -270,7 +270,7 @@ class DataManager(object): ...@@ -270,7 +270,7 @@ class DataManager(object):
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: ('Transaction missmatch', '2', '3') TypeError: ('Transaction missmatch', '2', '3')
""" """
if not self.prepared: if not self.prepared:
raise TypeError('Not prepared to commit') raise TypeError('Not prepared to commit')
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Test broken-object suppport """Test broken-object suppport
$Id: testBroken.py,v 1.4 2004/04/16 15:58:11 jeremy Exp $ $Id: testBroken.py,v 1.5 2004/04/19 21:19:07 tim_one Exp $
""" """
import sys import sys
...@@ -29,7 +29,7 @@ def test_integration(): ...@@ -29,7 +29,7 @@ def test_integration():
>>> db = DB() >>> db = DB()
We'll create a fake module with a class: We'll create a fake module with a class:
>>> class NotThere: >>> class NotThere:
... Atall = type('Atall', (persistent.Persistent, ), ... Atall = type('Atall', (persistent.Persistent, ),
... {'__module__': 'ZODB.not.there'}) ... {'__module__': 'ZODB.not.there'})
...@@ -70,7 +70,7 @@ def test_integration(): ...@@ -70,7 +70,7 @@ def test_integration():
>>> a3.__Broken_state__ >>> a3.__Broken_state__
{'x': 1} {'x': 1}
Let's clean up: Let's clean up:
>>> db.close() >>> db.close()
......
...@@ -178,7 +178,7 @@ class LRUCacheTests(CacheTestBase): ...@@ -178,7 +178,7 @@ class LRUCacheTests(CacheTestBase):
CONNS = 3 CONNS = 3
for i in range(CONNS): for i in range(CONNS):
self.noodle_new_connection() self.noodle_new_connection()
self.assertEquals(self.db.cacheSize(), CACHE_SIZE * CONNS) self.assertEquals(self.db.cacheSize(), CACHE_SIZE * CONNS)
details = self.db.cacheDetailSize() details = self.db.cacheDetailSize()
self.assertEquals(len(details), CONNS) self.assertEquals(len(details), CONNS)
...@@ -189,7 +189,7 @@ class LRUCacheTests(CacheTestBase): ...@@ -189,7 +189,7 @@ class LRUCacheTests(CacheTestBase):
# The (poorly named) cache size is a target for non-ghosts. # The (poorly named) cache size is a target for non-ghosts.
# The cache *usually* contains non-ghosts, so that the # The cache *usually* contains non-ghosts, so that the
# size normally exceeds the target size. # size normally exceeds the target size.
#self.assertEquals(d['size'], CACHE_SIZE) #self.assertEquals(d['size'], CACHE_SIZE)
def checkDetail(self): def checkDetail(self):
...@@ -211,7 +211,7 @@ class LRUCacheTests(CacheTestBase): ...@@ -211,7 +211,7 @@ class LRUCacheTests(CacheTestBase):
# deactivated before the MinPO objects. # deactivated before the MinPO objects.
# #
# - Without the gc call, the cache will contain ghost MinPOs # - Without the gc call, the cache will contain ghost MinPOs
# and the check of the MinPO count below will fail. That's # and the check of the MinPO count below will fail. That's
# because the counts returned by cacheDetail include ghosts. # because the counts returned by cacheDetail include ghosts.
# #
# - If the mapping object containing the MinPOs isn't # - If the mapping object containing the MinPOs isn't
...@@ -219,7 +219,7 @@ class LRUCacheTests(CacheTestBase): ...@@ -219,7 +219,7 @@ class LRUCacheTests(CacheTestBase):
# the test will fail anyway. # the test will fail anyway.
# #
# This test really needs to be thought through and documented # This test really needs to be thought through and documented
# better. # better.
for klass, count in self.db.cacheDetail(): for klass, count in self.db.cacheDetail():
......
...@@ -73,7 +73,7 @@ class CacheTests: ...@@ -73,7 +73,7 @@ class CacheTests:
After committing a transaction and calling cacheGC(), there After committing a transaction and calling cacheGC(), there
should be cache-size (4) objects in the cache. One of the should be cache-size (4) objects in the cache. One of the
RegularObjects was deactivated. RegularObjects was deactivated.
>>> cn._cache.ringlen() >>> cn._cache.ringlen()
4 4
>>> RegularObject.deactivations >>> RegularObject.deactivations
...@@ -81,7 +81,7 @@ class CacheTests: ...@@ -81,7 +81,7 @@ class CacheTests:
If we explicitly activate the objects again, the ringlen If we explicitly activate the objects again, the ringlen
should go back up to 5. should go back up to 5.
>>> for o in L: >>> for o in L:
... o._p_activate() ... o._p_activate()
>>> cn._cache.ringlen() >>> cn._cache.ringlen()
...@@ -92,7 +92,7 @@ class CacheTests: ...@@ -92,7 +92,7 @@ class CacheTests:
4 4
>>> RegularObject.deactivations >>> RegularObject.deactivations
2 2
>>> cn.cacheMinimize() >>> cn.cacheMinimize()
>>> cn._cache.ringlen() >>> cn._cache.ringlen()
0 0
...@@ -102,10 +102,10 @@ class CacheTests: ...@@ -102,10 +102,10 @@ class CacheTests:
If we activate all the objects again and mark one as modified, If we activate all the objects again and mark one as modified,
then the one object should not be deactivated even by a then the one object should not be deactivated even by a
minimize. minimize.
>>> for o in L: >>> for o in L:
... o._p_activate() ... o._p_activate()
>>> o.attr = 1 >>> o.attr = 1
>>> cn._cache.ringlen() >>> cn._cache.ringlen()
5 5
>>> cn.cacheMinimize() >>> cn.cacheMinimize()
...@@ -113,7 +113,7 @@ class CacheTests: ...@@ -113,7 +113,7 @@ class CacheTests:
1 1
>>> RegularObject.deactivations >>> RegularObject.deactivations
10 10
""" """
def test_cache_gc_recalcitrant(self): def test_cache_gc_recalcitrant(self):
...@@ -181,7 +181,7 @@ class CacheTests: ...@@ -181,7 +181,7 @@ class CacheTests:
Modify three of the objects and verify that they are Modify three of the objects and verify that they are
deactivated when the transaction aborts. deactivated when the transaction aborts.
>>> for i in range(0, 5, 2): >>> for i in range(0, 5, 2):
... L[i].attr = i ... L[i].attr = i
>>> [L[i]._p_state for i in range(0, 5, 2)] >>> [L[i]._p_state for i in range(0, 5, 2)]
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Conventience function for creating test databases """Conventience function for creating test databases
$Id: util.py,v 1.4 2004/04/16 15:58:11 jeremy Exp $ $Id: util.py,v 1.5 2004/04/19 21:19:07 tim_one Exp $
""" """
import time import time
...@@ -36,7 +36,7 @@ def pack(db): ...@@ -36,7 +36,7 @@ def pack(db):
db.pack(time.time()+1) db.pack(time.time()+1)
class P(persistent.Persistent): class P(persistent.Persistent):
def __init__(self, name): def __init__(self, name):
self.name = name self.name = name
......
...@@ -55,4 +55,3 @@ class WarningsHook: ...@@ -55,4 +55,3 @@ class WarningsHook:
def clear(self): def clear(self):
self.warnings = [] self.warnings = []
...@@ -83,7 +83,7 @@ class DataRecordConvertingTxn(object): ...@@ -83,7 +83,7 @@ class DataRecordConvertingTxn(object):
skipped += 1 skipped += 1
continue continue
sio = StringIO() sio = StringIO()
p = Pickler(sio, 1) p = Pickler(sio, 1)
p.persistent_id = get_persistent_id p.persistent_id = get_persistent_id
...@@ -118,7 +118,7 @@ them on zope3-dev. ...@@ -118,7 +118,7 @@ them on zope3-dev.
In any case, keep your original data file in case you decide to rerun In any case, keep your original data file in case you decide to rerun
the conversion. the conversion.
""" """
......
...@@ -55,7 +55,7 @@ class ConversionApp: ...@@ -55,7 +55,7 @@ class ConversionApp:
self.parse_args(args) self.parse_args(args)
def run(self): def run(self):
# Load server-independent site config # Load server-independent site config
from zope.configuration import xmlconfig from zope.configuration import xmlconfig
context = xmlconfig.file('site.zcml', execute=True) context = xmlconfig.file('site.zcml', execute=True)
......
...@@ -60,4 +60,3 @@ class CorruptedDataError(CorruptedError): ...@@ -60,4 +60,3 @@ class CorruptedDataError(CorruptedError):
class FileStorageQuotaError(FileStorageError, StorageSystemError): class FileStorageQuotaError(FileStorageError, StorageSystemError):
"""File storage quota exceeded.""" """File storage quota exceeded."""
...@@ -26,7 +26,7 @@ the public APIs of the database. ...@@ -26,7 +26,7 @@ the public APIs of the database.
The IDatabase, IConnection, and ITransactionAttrs interfaces describe The IDatabase, IConnection, and ITransactionAttrs interfaces describe
private APIs used by the implementation. private APIs used by the implementation.
$Id: z4interfaces.py,v 1.2 2004/02/20 19:01:07 jeremy Exp $ $Id: z4interfaces.py,v 1.3 2004/04/19 21:19:09 tim_one Exp $
""" """
from ZODB.zodb4 import z4utils from ZODB.zodb4 import z4utils
...@@ -238,7 +238,7 @@ class POSKeyError(KeyError, POSError): ...@@ -238,7 +238,7 @@ class POSKeyError(KeyError, POSError):
## Attributes: ## Attributes:
## obj is the invalid object ## obj is the invalid object
## jar is the manager that attempted to store it. ## jar is the manager that attempted to store it.
## obj._p_jar != jar ## obj._p_jar != jar
## """ ## """
...@@ -248,7 +248,7 @@ class POSKeyError(KeyError, POSError): ...@@ -248,7 +248,7 @@ class POSKeyError(KeyError, POSError):
## def __str__(self): ## def __str__(self):
## return "Invalid reference to object %s." % _fmt_oid(self.obj._p_jar) ## return "Invalid reference to object %s." % _fmt_oid(self.obj._p_jar)
##class IAppDatabase(Interface): ##class IAppDatabase(Interface):
## """Interface exported by database to applications. ## """Interface exported by database to applications.
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Persistence Interfaces """Persistence Interfaces
$Id: interfaces.py,v 1.3 2004/02/24 13:54:05 srichter Exp $ $Id: interfaces.py,v 1.4 2004/04/19 21:19:09 tim_one Exp $
""" """
try: try:
from zope.interface import Interface from zope.interface import Interface
...@@ -286,7 +286,7 @@ class ICache(Interface): ...@@ -286,7 +286,7 @@ class ICache(Interface):
unreferenced objects in memory. We assume that there is a good unreferenced objects in memory. We assume that there is a good
chance the object will be used again soon, so keeping it memory chance the object will be used again soon, so keeping it memory
avoids the cost of recreating the object. avoids the cost of recreating the object.
An ICache implementation is intended for use by an An ICache implementation is intended for use by an
IPersistentDataManager. IPersistentDataManager.
""" """
...@@ -307,7 +307,7 @@ class ICache(Interface): ...@@ -307,7 +307,7 @@ class ICache(Interface):
"""Make all of the objects in oids ghosts. """Make all of the objects in oids ghosts.
`oids` is an iterable object that yields oids. `oids` is an iterable object that yields oids.
The cache must attempt to change each object to a ghost by The cache must attempt to change each object to a ghost by
calling _p_deactivate(). calling _p_deactivate().
...@@ -330,7 +330,7 @@ class ICache(Interface): ...@@ -330,7 +330,7 @@ class ICache(Interface):
def statistics(): def statistics():
"""Return dictionary of statistics about cache size. """Return dictionary of statistics about cache size.
Contains at least the following keys: Contains at least the following keys:
active -- number of active objects active -- number of active objects
ghosts -- number of ghost objects ghosts -- number of ghost objects
......
...@@ -351,7 +351,7 @@ class PersistentTest(Test): ...@@ -351,7 +351,7 @@ class PersistentTest(Test):
# verify that the inc is reflected: # verify that the inc is reflected:
self.assertEqual(p2.x, p.x) self.assertEqual(p2.x, p.x)
# This assertion would be invalid. Interfaces # This assertion would be invalid. Interfaces
# are compared by identity and copying doesn't # are compared by identity and copying doesn't
# preserve identity. We would get false negatives due # preserve identity. We would get false negatives due
...@@ -370,4 +370,3 @@ class PersistentTest(Test): ...@@ -370,4 +370,3 @@ class PersistentTest(Test):
class BasePersistentTest(Test): class BasePersistentTest(Test):
klass = B klass = B
has_dict = 0 has_dict = 0
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
This module tests and documents, through example, overriding attribute This module tests and documents, through example, overriding attribute
access methods. access methods.
$Id: test_overriding_attrs.py,v 1.6 2004/04/16 15:58:10 jeremy Exp $ $Id: test_overriding_attrs.py,v 1.7 2004/04/19 21:19:10 tim_one Exp $
""" """
from persistent import Persistent from persistent import Persistent
...@@ -26,7 +26,7 @@ from ZODB.tests.util import DB ...@@ -26,7 +26,7 @@ from ZODB.tests.util import DB
class SampleOverridingGetattr(Persistent): class SampleOverridingGetattr(Persistent):
"""Example of overriding __getattr__ """Example of overriding __getattr__
""" """
def __getattr__(self, name): def __getattr__(self, name):
"""Get attributes that can't be gotten the usual way """Get attributes that can't be gotten the usual way
...@@ -59,7 +59,7 @@ class SampleOverridingGetattr(Persistent): ...@@ -59,7 +59,7 @@ class SampleOverridingGetattr(Persistent):
>>> o._p_deactivate() >>> o._p_deactivate()
>>> o._p_changed >>> o._p_changed
And now, if we ask for an attribute it doesn't have, And now, if we ask for an attribute it doesn't have,
>>> o.eggs >>> o.eggs
('EGGS', False) ('EGGS', False)
...@@ -87,7 +87,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -87,7 +87,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
The class will have the policy that variables with names starting The class will have the policy that variables with names starting
with 'tmp_' will be volatile. with 'tmp_' will be volatile.
""" """
def __init__(self, **kw): def __init__(self, **kw):
...@@ -107,7 +107,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -107,7 +107,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
>>> o._p_changed >>> o._p_changed
0 0
>>> o._p_oid >>> o._p_oid
>>> o._p_jar >>> o._p_jar
>>> o.x >>> o.x
1 1
>>> o.y >>> o.y
...@@ -116,7 +116,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -116,7 +116,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
AttributeError: y AttributeError: y
Next, we'll save the object in a database so that we can Next, we'll save the object in a database so that we can
deactivate it: deactivate it:
>>> db = DB() >>> db = DB()
>>> conn = db.open() >>> conn = db.open()
...@@ -136,10 +136,10 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -136,10 +136,10 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
0 0
It works for missing attribes too: It works for missing attribes too:
>>> o._p_deactivate() >>> o._p_deactivate()
>>> o._p_changed >>> o._p_changed
>>> o.y >>> o.y
Traceback (most recent call last): Traceback (most recent call last):
... ...
...@@ -179,9 +179,9 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -179,9 +179,9 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
meth = getattr(self.__class__, name, None) meth = getattr(self.__class__, name, None)
if meth is None: if meth is None:
raise AttributeError, name raise AttributeError, name
return meth.__get__(self, self.__class__) return meth.__get__(self, self.__class__)
def __setattr__(self, name, value): def __setattr__(self, name, value):
"""Set an attribute value """Set an attribute value
...@@ -219,9 +219,9 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -219,9 +219,9 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
>>> 'x' in o.__dict__ >>> 'x' in o.__dict__
False False
Next, we'll save the object in a database so that we can Next, we'll save the object in a database so that we can
deactivate it: deactivate it:
>>> db = DB() >>> db = DB()
>>> conn = db.open() >>> conn = db.open()
...@@ -243,7 +243,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -243,7 +243,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
1 1
Now, if commit: Now, if commit:
>>> transaction.commit() >>> transaction.commit()
>>> o._p_changed >>> o._p_changed
0 0
...@@ -263,7 +263,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -263,7 +263,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
0 0
>>> o.tmp_foo >>> o.tmp_foo
3 3
We always close databases after we use them: We always close databases after we use them:
>>> db.close() >>> db.close()
...@@ -288,7 +288,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -288,7 +288,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
if not name.startswith('tmp_'): if not name.startswith('tmp_'):
self._p_changed = 1 self._p_changed = 1
def __delattr__(self, name): def __delattr__(self, name):
"""Delete an attribute value """Delete an attribute value
...@@ -321,7 +321,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -321,7 +321,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
AttributeError: x AttributeError: x
Next, we'll save the object in a database so that we can Next, we'll save the object in a database so that we can
deactivate it: deactivate it:
>>> db = DB() >>> db = DB()
>>> conn = db.open() >>> conn = db.open()
...@@ -348,7 +348,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -348,7 +348,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
3 3
Now, if commit: Now, if commit:
>>> transaction.commit() >>> transaction.commit()
>>> o._p_changed >>> o._p_changed
0 0
...@@ -370,7 +370,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -370,7 +370,7 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
Traceback (most recent call last): Traceback (most recent call last):
... ...
AttributeError: tmp_z AttributeError: tmp_z
We always close databases after we use them: We always close databases after we use them:
>>> db.close() >>> db.close()
...@@ -392,10 +392,10 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent): ...@@ -392,10 +392,10 @@ class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
return return
del self.__dict__['__secret__'][name] del self.__dict__['__secret__'][name]
if not name.startswith('tmp_'): if not name.startswith('tmp_'):
self._p_changed = 1 self._p_changed = 1
def test_suite(): def test_suite():
from doctest import DocTestSuite from doctest import DocTestSuite
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Basic pickling tests """Basic pickling tests
$Id: test_pickle.py,v 1.5 2004/02/19 18:13:34 jeremy Exp $ $Id: test_pickle.py,v 1.6 2004/04/19 21:19:10 tim_one Exp $
""" """
from persistent import Persistent from persistent import Persistent
...@@ -70,7 +70,7 @@ def test_basic_pickling(): ...@@ -70,7 +70,7 @@ def test_basic_pickling():
1 1
>>> pickle.loads(pickle.dumps(x, 1)) == x >>> pickle.loads(pickle.dumps(x, 1)) == x
1 1
XXX disable until Python 2.3.4 >>> pickle.loads(pickle.dumps(x, 2)) == x XXX disable until Python 2.3.4 >>> pickle.loads(pickle.dumps(x, 2)) == x
1 1
...@@ -160,7 +160,7 @@ def test_pickling_w_slots_only(): ...@@ -160,7 +160,7 @@ def test_pickling_w_slots_only():
1 1
>>> pickle.loads(pickle.dumps(x, 1)) == x >>> pickle.loads(pickle.dumps(x, 1)) == x
1 1
XXX disable until Python 2.3.4 >>> pickle.loads(pickle.dumps(x, 2)) == x XXX disable until Python 2.3.4 >>> pickle.loads(pickle.dumps(x, 2)) == x
1 1
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""ZODB-based persistent weakrefs """ZODB-based persistent weakrefs
$Id: wref.py,v 1.2 2004/02/19 02:59:30 jeremy Exp $ $Id: wref.py,v 1.3 2004/04/19 21:19:09 tim_one Exp $
""" """
from persistent import Persistent from persistent import Persistent
...@@ -46,7 +46,7 @@ class WeakRef(object): ...@@ -46,7 +46,7 @@ class WeakRef(object):
>>> WeakRef(ob) == ref >>> WeakRef(ob) == ref
True True
>>> ob2 = persistent.list.PersistentList([1]) >>> ob2 = persistent.list.PersistentList([1])
>>> WeakRef(ob2) == ref >>> WeakRef(ob2) == ref
False False
...@@ -54,7 +54,7 @@ class WeakRef(object): ...@@ -54,7 +54,7 @@ class WeakRef(object):
Lets save the reference and the referenced object in a database: Lets save the reference and the referenced object in a database:
>>> db = ZODB.tests.util.DB() >>> db = ZODB.tests.util.DB()
>>> conn1 = db.open() >>> conn1 = db.open()
>>> conn1.root()['ob'] = ob >>> conn1.root()['ob'] = ob
>>> conn1.root()['ref'] = ref >>> conn1.root()['ref'] = ref
...@@ -83,9 +83,9 @@ class WeakRef(object): ...@@ -83,9 +83,9 @@ class WeakRef(object):
KeyError: 'ob' KeyError: 'ob'
Trying to dereference the reference returns None: Trying to dereference the reference returns None:
>>> conn3.root()['ref']() >>> conn3.root()['ref']()
Trying to get a hash, raises a type error: Trying to get a hash, raises a type error:
>>> hash(conn3.root()['ref']) >>> hash(conn3.root()['ref'])
...@@ -94,7 +94,7 @@ class WeakRef(object): ...@@ -94,7 +94,7 @@ class WeakRef(object):
TypeError: Weakly-referenced object has gone away TypeError: Weakly-referenced object has gone away
Always explicitly close databases: :) Always explicitly close databases: :)
>>> db.close() >>> db.close()
""" """
...@@ -133,8 +133,8 @@ class WeakRef(object): ...@@ -133,8 +133,8 @@ class WeakRef(object):
raise TypeError('Weakly-referenced object has gone away') raise TypeError('Weakly-referenced object has gone away')
return self == other return self == other
class PersistentWeakKeyDictionary(Persistent): class PersistentWeakKeyDictionary(Persistent):
"""Persistent weak key dictionary """Persistent weak key dictionary
...@@ -170,9 +170,9 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -170,9 +170,9 @@ class PersistentWeakKeyDictionary(Persistent):
[True, True, True, False] [True, True, True, False]
We can add the dict and the referenced objects to a database: We can add the dict and the referenced objects to a database:
>>> db = ZODB.tests.util.DB() >>> db = ZODB.tests.util.DB()
>>> conn1 = db.open() >>> conn1 = db.open()
>>> conn1.root()['p1'] = p1 >>> conn1.root()['p1'] = p1
>>> conn1.root()['d'] = d >>> conn1.root()['d'] = d
...@@ -217,7 +217,7 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -217,7 +217,7 @@ class PersistentWeakKeyDictionary(Persistent):
Now if we access the dictionary in a new connection, it no longer Now if we access the dictionary in a new connection, it no longer
has p2: has p2:
>>> conn3 = db.open() >>> conn3 = db.open()
>>> d = conn3.root()['d'] >>> d = conn3.root()['d']
>>> l = [(str(k), d[k], d.get(k)) for k in d] >>> l = [(str(k), d[k], d.get(k)) for k in d]
...@@ -227,17 +227,17 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -227,17 +227,17 @@ class PersistentWeakKeyDictionary(Persistent):
It's worth nothing that that the versions of the dictionary in It's worth nothing that that the versions of the dictionary in
conn1 and conn2 still have p2, because p2 is still in the caches conn1 and conn2 still have p2, because p2 is still in the caches
for those connections. for those connections.
Always explicitly close databases: :) Always explicitly close databases: :)
>>> db.close() >>> db.close()
""" """
# XXX it is expensive trying to load dead objects from the database. # XXX it is expensive trying to load dead objects from the database.
# It would be helpful if the data manager/connection cached these. # It would be helpful if the data manager/connection cached these.
def __init__(self, adict=None, **kwargs): def __init__(self, adict=None, **kwargs):
self.data = {} self.data = {}
if adict is not None: if adict is not None:
...@@ -259,13 +259,13 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -259,13 +259,13 @@ class PersistentWeakKeyDictionary(Persistent):
if k() is not None if k() is not None
]) ])
Persistent.__setstate__(self, state) Persistent.__setstate__(self, state)
def __setitem__(self, key, value): def __setitem__(self, key, value):
self.data[WeakRef(key)] = value self.data[WeakRef(key)] = value
def __getitem__(self, key): def __getitem__(self, key):
return self.data[WeakRef(key)] return self.data[WeakRef(key)]
def __delitem__(self, key): def __delitem__(self, key):
del self.data[WeakRef(key)] del self.data[WeakRef(key)]
...@@ -286,7 +286,7 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -286,7 +286,7 @@ class PersistentWeakKeyDictionary(Persistent):
def __contains__(self, key): def __contains__(self, key):
return WeakRef(key) in self.data return WeakRef(key) in self.data
def __iter__(self): def __iter__(self):
for k in self.data: for k in self.data:
yield k() yield k()
...@@ -297,6 +297,5 @@ class PersistentWeakKeyDictionary(Persistent): ...@@ -297,6 +297,5 @@ class PersistentWeakKeyDictionary(Persistent):
else: else:
for k, v in adict.items(): for k, v in adict.items():
self.data[WeakRef(k)] = v self.data[WeakRef(k)] = v
# XXX Someone else can fill out the rest of the methods, with tests. :) # XXX Someone else can fill out the rest of the methods, with tests. :)
...@@ -91,4 +91,3 @@ class ThreadTransactionManager(object): ...@@ -91,4 +91,3 @@ class ThreadTransactionManager(object):
tid = thread.get_ident() tid = thread.get_ident()
L = self._synchs.get(tid) L = self._synchs.get(tid)
L.remove(synch) L.remove(synch)
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Transaction Interfaces """Transaction Interfaces
$Id: interfaces.py,v 1.7 2004/02/24 13:52:05 srichter Exp $ $Id: interfaces.py,v 1.8 2004/04/19 21:19:10 tim_one Exp $
""" """
try: try:
from zope.interface import Interface from zope.interface import Interface
...@@ -38,7 +38,7 @@ class IDataManager(Interface): ...@@ -38,7 +38,7 @@ class IDataManager(Interface):
two-phase commit. two-phase commit.
- The savepoint api may need some more thought. - The savepoint api may need some more thought.
""" """
def prepare(transaction): def prepare(transaction):
...@@ -54,7 +54,7 @@ class IDataManager(Interface): ...@@ -54,7 +54,7 @@ class IDataManager(Interface):
The transaction must match that used for preceeding The transaction must match that used for preceeding
savepoints, if any. savepoints, if any.
""" """
# This is equivalent to zodb3's tpc_begin, commit, and # This is equivalent to zodb3's tpc_begin, commit, and
# tpc_vote combined. # tpc_vote combined.
...@@ -77,7 +77,7 @@ class IDataManager(Interface): ...@@ -77,7 +77,7 @@ class IDataManager(Interface):
The prepare method must be called, with the same transaction, The prepare method must be called, with the same transaction,
before calling commit. before calling commit.
""" """
# This is equivalent to zodb3's tpc_finish # This is equivalent to zodb3's tpc_finish
...@@ -87,7 +87,7 @@ class IDataManager(Interface): ...@@ -87,7 +87,7 @@ class IDataManager(Interface):
Should return an object implementing IRollback that can be used Should return an object implementing IRollback that can be used
to rollback to the savepoint. to rollback to the savepoint.
Note that (unlike zodb3) this doesn't use a 2-phase commit Note that (unlike zodb3) this doesn't use a 2-phase commit
protocol. If this call fails, or if a rollback call on the protocol. If this call fails, or if a rollback call on the
result fails, the (containing) transaction should be result fails, the (containing) transaction should be
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Sample objects for use in tests """Sample objects for use in tests
$Id: test_SampleDataManager.py,v 1.2 2004/02/20 16:56:57 fdrake Exp $ $Id: test_SampleDataManager.py,v 1.3 2004/04/19 21:19:11 tim_one Exp $
""" """
class DataManager(object): class DataManager(object):
...@@ -78,7 +78,7 @@ class DataManager(object): ...@@ -78,7 +78,7 @@ class DataManager(object):
>>> dm.commit(t1) >>> dm.commit(t1)
Our changes are"permanent". The state reflects the changes and the Our changes are"permanent". The state reflects the changes and the
delta has been reset to 0. delta has been reset to 0.
>>> dm.state >>> dm.state
1 1
...@@ -139,7 +139,7 @@ class DataManager(object): ...@@ -139,7 +139,7 @@ class DataManager(object):
TypeError: ('Transaction missmatch', '2', '1') TypeError: ('Transaction missmatch', '2', '1')
>>> dm.prepare(t1) >>> dm.prepare(t1)
""" """
if self.prepared: if self.prepared:
raise TypeError('Already prepared') raise TypeError('Already prepared')
...@@ -183,7 +183,7 @@ class DataManager(object): ...@@ -183,7 +183,7 @@ class DataManager(object):
If savepoints are used, abort must be passed the same If savepoints are used, abort must be passed the same
transaction: transaction:
>>> dm.inc() >>> dm.inc()
>>> r = dm.savepoint(t1) >>> r = dm.savepoint(t1)
>>> t2 = '2' >>> t2 = '2'
...@@ -208,15 +208,15 @@ class DataManager(object): ...@@ -208,15 +208,15 @@ class DataManager(object):
Of course, the transactions passed to prepare and abort must Of course, the transactions passed to prepare and abort must
match: match:
>>> dm.prepare(t1) >>> dm.prepare(t1)
>>> dm.abort(t2) >>> dm.abort(t2)
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: ('Transaction missmatch', '2', '1') TypeError: ('Transaction missmatch', '2', '1')
>>> dm.abort(t1) >>> dm.abort(t1)
""" """
self._checkTransaction(transaction) self._checkTransaction(transaction)
...@@ -262,7 +262,7 @@ class DataManager(object): ...@@ -262,7 +262,7 @@ class DataManager(object):
If course, the transactions given to prepare and commit must If course, the transactions given to prepare and commit must
be the same: be the same:
>>> dm.inc() >>> dm.inc()
>>> t3 = '3' >>> t3 = '3'
>>> dm.prepare(t3) >>> dm.prepare(t3)
...@@ -270,7 +270,7 @@ class DataManager(object): ...@@ -270,7 +270,7 @@ class DataManager(object):
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: ('Transaction missmatch', '2', '3') TypeError: ('Transaction missmatch', '2', '3')
""" """
if not self.prepared: if not self.prepared:
raise TypeError('Not prepared to commit') raise TypeError('Not prepared to commit')
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
############################################################################## ##############################################################################
"""Utility classes or functions """Utility classes or functions
$Id: util.py,v 1.2 2004/02/20 16:56:56 fdrake Exp $ $Id: util.py,v 1.3 2004/04/19 21:19:10 tim_one Exp $
""" """
from transaction.interfaces import IRollback from transaction.interfaces import IRollback
...@@ -37,7 +37,7 @@ class NoSavepointSupportRollback: ...@@ -37,7 +37,7 @@ class NoSavepointSupportRollback:
NotImplementedError: """ \ NotImplementedError: """ \
"""DataManager data managers do not support """ \ """DataManager data managers do not support """ \
"""savepoints (aka subtransactions """savepoints (aka subtransactions
""" """
implements(IRollback) implements(IRollback)
......
...@@ -738,7 +738,7 @@ def process_args(argv=None): ...@@ -738,7 +738,7 @@ def process_args(argv=None):
elif k == "-b": elif k == "-b":
build = True build = True
elif k == "-B": elif k == "-B":
build = build_inplace = True build = build_inplace = True
elif k == "-c": elif k == "-c":
# make sure you have a recent version of pychecker # make sure you have a recent version of pychecker
if not os.environ.get("PYCHECKER"): if not os.environ.get("PYCHECKER"):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment