Commit 255e256e authored by Jason Madden's avatar Jason Madden

All the unit tests pass under PyPy. (The functional tests still have some resource issues.)

The changes were mostly in the Persistent module. Here, the changes were minimal:

- Introduce zodbpickle for PyPy and Python 2.7 to fix noload.

- Centralize the construction of Picklers/Unpicklers to account for the differences between Python2/3/zodbpickle.

- A few extra gc.collect() calls.

- Some minor printing differences in the doctests due to the Python implementation of BTrees.
parent f865508e
language: python language: python
sudo: false sudo: false
python: python:
- pypy
- 2.6 - 2.6
- 2.7 - 2.7
- 3.2 - 3.2
......
...@@ -2,12 +2,16 @@ ...@@ -2,12 +2,16 @@
Change History Change History
================ ================
4.1.1 (unreleased) 4.2.0 (unreleased)
================== ==================
- Fix command-line parsing of --verbose and --verify arguments. - Fix command-line parsing of --verbose and --verify arguments.
(The short versions -v and -V were parsed correctly.) (The short versions -v and -V were parsed correctly.)
- Add support for PyPy, and fix the methods in `ZODB.serialize` that
find object references under Python 2.7. This requires the addition
of the `zodbpickle` dependency.
4.1.0 (2015-01-11) 4.1.0 (2015-01-11)
================== ==================
......
...@@ -20,7 +20,7 @@ to application logic. ZODB includes features such as a plugable storage ...@@ -20,7 +20,7 @@ to application logic. ZODB includes features such as a plugable storage
interface, rich transaction support, and undo. interface, rich transaction support, and undo.
""" """
VERSION = "4.1.0" VERSION = "4.2.0.dev0"
import os import os
import platform import platform
...@@ -59,6 +59,7 @@ Programming Language :: Python :: 3.2 ...@@ -59,6 +59,7 @@ Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3 Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.4
Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Database Topic :: Database
Topic :: Software Development :: Libraries :: Python Modules Topic :: Software Development :: Libraries :: Python Modules
Operating System :: Microsoft :: Windows Operating System :: Microsoft :: Windows
...@@ -158,7 +159,7 @@ setup(name="ZODB", ...@@ -158,7 +159,7 @@ setup(name="ZODB",
tests_require = tests_require, tests_require = tests_require,
extras_require = dict(test=tests_require), extras_require = dict(test=tests_require),
install_requires = [ install_requires = [
'persistent', 'persistent', # XXX: When new persistent release is out need to add version number for PyPy
'BTrees >= 4.1.2', 'BTrees >= 4.1.2',
'ZConfig', 'ZConfig',
'transaction >= 1.4.1' if PY3 else 'transaction', 'transaction >= 1.4.1' if PY3 else 'transaction',
......
...@@ -13,13 +13,12 @@ ...@@ -13,13 +13,12 @@
############################################################################## ##############################################################################
import logging import logging
import sys
import six import six
import zope.interface import zope.interface
from ZODB.POSException import ConflictError from ZODB.POSException import ConflictError
from ZODB.loglevels import BLATHER from ZODB.loglevels import BLATHER
from ZODB._compat import BytesIO, Unpickler, Pickler, _protocol from ZODB._compat import BytesIO, PersistentUnpickler, PersistentPickler, _protocol
# Subtle: Python 2.x has pickle.PicklingError and cPickle.PicklingError, # Subtle: Python 2.x has pickle.PicklingError and cPickle.PicklingError,
# and these are unrelated classes! So we shouldn't use pickle.PicklingError, # and these are unrelated classes! So we shouldn't use pickle.PicklingError,
...@@ -74,9 +73,7 @@ def state(self, oid, serial, prfactory, p=''): ...@@ -74,9 +73,7 @@ def state(self, oid, serial, prfactory, p=''):
p = p or self.loadSerial(oid, serial) p = p or self.loadSerial(oid, serial)
p = self._crs_untransform_record_data(p) p = self._crs_untransform_record_data(p)
file = BytesIO(p) file = BytesIO(p)
unpickler = Unpickler(file) unpickler = PersistentUnpickler(find_global, prfactory.persistent_load, file)
unpickler.find_global = find_global
unpickler.persistent_load = prfactory.persistent_load
unpickler.load() # skip the class tuple unpickler.load() # skip the class tuple
return unpickler.load() return unpickler.load()
...@@ -243,9 +240,7 @@ def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, ...@@ -243,9 +240,7 @@ def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle,
prfactory = PersistentReferenceFactory() prfactory = PersistentReferenceFactory()
newpickle = self._crs_untransform_record_data(newpickle) newpickle = self._crs_untransform_record_data(newpickle)
file = BytesIO(newpickle) file = BytesIO(newpickle)
unpickler = Unpickler(file) unpickler = PersistentUnpickler(find_global, prfactory.persistent_load, file)
unpickler.find_global = find_global
unpickler.persistent_load = prfactory.persistent_load
meta = unpickler.load() meta = unpickler.load()
if isinstance(meta, tuple): if isinstance(meta, tuple):
klass = meta[0] klass = meta[0]
...@@ -286,11 +281,7 @@ def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, ...@@ -286,11 +281,7 @@ def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle,
resolved = resolve(old, committed, newstate) resolved = resolve(old, committed, newstate)
file = BytesIO() file = BytesIO()
pickler = Pickler(file, _protocol) pickler = PersistentPickler(persistent_id, file, _protocol)
if sys.version_info[0] < 3:
pickler.inst_persistent_id = persistent_id
else:
pickler.persistent_id = persistent_id
pickler.dump(meta) pickler.dump(meta)
pickler.dump(resolved) pickler.dump(resolved)
return self._crs_transform_record_data(file.getvalue()) return self._crs_transform_record_data(file.getvalue())
......
...@@ -439,7 +439,6 @@ class Connection(ExportImport, object): ...@@ -439,7 +439,6 @@ class Connection(ExportImport, object):
# the savepoint, then they won't have _p_oid or _p_jar after # the savepoint, then they won't have _p_oid or _p_jar after
# they've been unadded. This will make the code in _abort # they've been unadded. This will make the code in _abort
# confused. # confused.
self._abort() self._abort()
if self._savepoint_storage is not None: if self._savepoint_storage is not None:
...@@ -463,7 +462,6 @@ class Connection(ExportImport, object): ...@@ -463,7 +462,6 @@ class Connection(ExportImport, object):
if obj._p_changed: if obj._p_changed:
obj._p_changed = False obj._p_changed = False
else: else:
# Note: If we invalidate a non-ghostifiable object # Note: If we invalidate a non-ghostifiable object
# (i.e. a persistent class), the object will # (i.e. a persistent class), the object will
# immediately reread its state. That means that the # immediately reread its state. That means that the
......
...@@ -375,12 +375,12 @@ Now, we create a demostorage. ...@@ -375,12 +375,12 @@ Now, we create a demostorage.
If we ask for an oid, we'll get 1042. If we ask for an oid, we'll get 1042.
>>> u64(storage.new_oid()) >>> print(u64(storage.new_oid()))
1042 1042
oids are allocated seuentially: oids are allocated seuentially:
>>> u64(storage.new_oid()) >>> print(u64(storage.new_oid()))
1043 1043
Now, we'll save 1044 in changes so that it has to pick a new one randomly. Now, we'll save 1044 in changes so that it has to pick a new one randomly.
...@@ -388,7 +388,7 @@ Now, we'll save 1044 in changes so that it has to pick a new one randomly. ...@@ -388,7 +388,7 @@ Now, we'll save 1044 in changes so that it has to pick a new one randomly.
>>> t = transaction.get() >>> t = transaction.get()
>>> ZODB.tests.util.store(storage.changes, 1044) >>> ZODB.tests.util.store(storage.changes, 1044)
>>> u64(storage.new_oid()) >>> print(u64(storage.new_oid()))
called randint called randint
2042 2042
...@@ -400,7 +400,7 @@ to force another attempt: ...@@ -400,7 +400,7 @@ to force another attempt:
>>> oid = storage.new_oid() >>> oid = storage.new_oid()
called randint called randint
called randint called randint
>>> u64(oid) >>> print(u64(oid))
3042 3042
DemoStorage keeps up with the issued OIDs to know when not to reissue them... DemoStorage keeps up with the issued OIDs to know when not to reissue them...
...@@ -426,4 +426,3 @@ DemoStorage keeps up with the issued OIDs to know when not to reissue them... ...@@ -426,4 +426,3 @@ DemoStorage keeps up with the issued OIDs to know when not to reissue them...
.. restore time .. restore time
>>> time.time = real_time_time >>> time.time = real_time_time
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
import logging import logging
import os import os
import sys
from tempfile import TemporaryFile from tempfile import TemporaryFile
import six import six
...@@ -25,7 +24,7 @@ from ZODB.interfaces import IBlobStorage ...@@ -25,7 +24,7 @@ from ZODB.interfaces import IBlobStorage
from ZODB.POSException import ExportError from ZODB.POSException import ExportError
from ZODB.serialize import referencesf from ZODB.serialize import referencesf
from ZODB.utils import p64, u64, cp, mktemp from ZODB.utils import p64, u64, cp, mktemp
from ZODB._compat import Pickler, Unpickler, BytesIO, _protocol from ZODB._compat import PersistentPickler, Unpickler, BytesIO, _protocol
logger = logging.getLogger('ZODB.ExportImport') logger = logging.getLogger('ZODB.ExportImport')
...@@ -178,11 +177,7 @@ class ExportImport: ...@@ -178,11 +177,7 @@ class ExportImport:
unpickler.persistent_load = persistent_load unpickler.persistent_load = persistent_load
newp = BytesIO() newp = BytesIO()
pickler = Pickler(newp, _protocol) pickler = PersistentPickler(persistent_id, newp, _protocol)
if sys.version_info[0] < 3:
pickler.inst_persistent_id = persistent_id
else:
pickler.persistent_id = persistent_id
pickler.dump(unpickler.load()) pickler.dump(unpickler.load())
pickler.dump(unpickler.load()) pickler.dump(unpickler.load())
......
...@@ -16,7 +16,7 @@ import sys ...@@ -16,7 +16,7 @@ import sys
try: try:
# Python 2.x # Python 2.x
import cPickle import cPickle
if not hasattr(cPickle.Unpickler, 'noload') or sys.version_info >= (2,7): if (hasattr(cPickle.Unpickler, 'load') and not hasattr(cPickle.Unpickler, 'noload')) or sys.version_info >= (2,7):
# PyPy doesn't have noload, and noload is broken in Python 2.7. # PyPy doesn't have noload, and noload is broken in Python 2.7.
# Get the fastest version we can (PyPy has no fastpickle) # Get the fastest version we can (PyPy has no fastpickle)
try: try:
...@@ -70,8 +70,45 @@ except ImportError: ...@@ -70,8 +70,45 @@ except ImportError:
FILESTORAGE_MAGIC = b"FS30" FILESTORAGE_MAGIC = b"FS30"
# XXX: consistent spelling of inst_persistent_id/persistent_id? def PersistentPickler(persistent_id, *args, **kwargs):
# e.g. StorageTestBase and probably elsewhere """
Returns a :class:`Pickler` that will use the given ``persistent_id``
to get persistent IDs. The remainder of the arguments are passed to the
Pickler itself.
This covers the differences between Python 2 and 3 and PyPy/zodbpickle.
"""
p = Pickler(*args, **kwargs)
if sys.version_info[0] < 3:
p.inst_persistent_id = persistent_id
# PyPy uses a python implementation of cPickle in both Python 2
# and Python 3. We can't really detect inst_persistent_id as its
# a magic attribute that's not readable, but it doesn't hurt to
# simply always assign to persistent_id also
p.persistent_id = persistent_id
else:
p.persistent_id = persistent_id
return p
def PersistentUnpickler(find_global, load_persistent, *args, **kwargs):
"""
Returns a :class:`Unpickler` that will use the given `find_global` function
to locate classes, and the given `load_persistent` function to load
objects from a persistent id.
This covers the differences between Python 2 and 3 and PyPy/zodbpickle.
"""
unpickler = Unpickler(*args, **kwargs)
if find_global is not None:
unpickler.find_global = find_global
try:
unpickler.find_class = find_global # PyPy, zodbpickle, the non-c-accelerated version
except AttributeError:
pass
if load_persistent is not None:
unpickler.persistent_load = load_persistent
return unpickler
try: try:
......
...@@ -32,7 +32,7 @@ from ZODB.interfaces import BlobError ...@@ -32,7 +32,7 @@ from ZODB.interfaces import BlobError
from ZODB import utils from ZODB import utils
from ZODB.POSException import POSKeyError from ZODB.POSException import POSKeyError
from ZODB._compat import BytesIO from ZODB._compat import BytesIO
from ZODB._compat import Unpickler from ZODB._compat import PersistentUnpickler
from ZODB._compat import decodebytes from ZODB._compat import decodebytes
from ZODB._compat import ascii_bytes from ZODB._compat import ascii_bytes
from ZODB._compat import INT_TYPES from ZODB._compat import INT_TYPES
...@@ -937,8 +937,7 @@ def is_blob_record(record): ...@@ -937,8 +937,7 @@ def is_blob_record(record):
""" """
if record and (b'ZODB.blob' in record): if record and (b'ZODB.blob' in record):
unpickler = Unpickler(BytesIO(record)) unpickler = PersistentUnpickler(find_global_Blob, None, BytesIO(record))
unpickler.find_global = find_global_Blob
try: try:
return unpickler.load() is Blob return unpickler.load() is Blob
......
...@@ -134,13 +134,12 @@ A number of legacyforms are defined: ...@@ -134,13 +134,12 @@ A number of legacyforms are defined:
""" """
import logging import logging
import sys
from persistent import Persistent from persistent import Persistent
from persistent.wref import WeakRefMarker, WeakRef from persistent.wref import WeakRefMarker, WeakRef
from ZODB import broken from ZODB import broken
from ZODB.POSException import InvalidObjectReference from ZODB.POSException import InvalidObjectReference
from ZODB._compat import Pickler, Unpickler, BytesIO, _protocol from ZODB._compat import PersistentPickler, PersistentUnpickler, BytesIO, _protocol
_oidtypes = bytes, type(None) _oidtypes = bytes, type(None)
...@@ -172,16 +171,7 @@ class ObjectWriter: ...@@ -172,16 +171,7 @@ class ObjectWriter:
def __init__(self, obj=None): def __init__(self, obj=None):
self._file = BytesIO() self._file = BytesIO()
self._p = Pickler(self._file, _protocol) self._p = PersistentPickler(self.persistent_id, self._file, _protocol)
if sys.version_info[0] < 3:
self._p.inst_persistent_id = self.persistent_id
# PyPy uses a python implementation of cPickle in both Python 2
# and Python 3. We can't really detect inst_persistent_id as its
# a magic attribute that's not readable, but it doesn't hurt to
# simply always assign to persistent_id also
self._p.persistent_id = self.persistent_id
else:
self._p.persistent_id = self.persistent_id
self._stack = [] self._stack = []
if obj is not None: if obj is not None:
self._stack.append(obj) self._stack.append(obj)
...@@ -474,19 +464,13 @@ class ObjectReader: ...@@ -474,19 +464,13 @@ class ObjectReader:
def _get_unpickler(self, pickle): def _get_unpickler(self, pickle):
file = BytesIO(pickle) file = BytesIO(pickle)
unpickler = Unpickler(file)
unpickler.persistent_load = self._persistent_load
factory = self._factory factory = self._factory
conn = self._conn conn = self._conn
def find_global(modulename, name): def find_global(modulename, name):
return factory(conn, modulename, name) return factory(conn, modulename, name)
unpickler = PersistentUnpickler(find_global, self._persistent_load, file)
unpickler.find_global = find_global
try:
unpickler.find_class = find_global # PyPy, zodbpickle, the non-c-accelerated version
except AttributeError:
pass
return unpickler return unpickler
...@@ -650,8 +634,7 @@ def referencesf(p, oids=None): ...@@ -650,8 +634,7 @@ def referencesf(p, oids=None):
""" """
refs = [] refs = []
u = Unpickler(BytesIO(p)) u = PersistentUnpickler(None, refs.append, BytesIO(p))
u.persistent_load = refs.append
u.noload() u.noload()
u.noload() u.noload()
...@@ -692,8 +675,7 @@ def get_refs(a_pickle): ...@@ -692,8 +675,7 @@ def get_refs(a_pickle):
""" """
refs = [] refs = []
u = Unpickler(BytesIO(a_pickle)) u = PersistentUnpickler(None, refs.append, BytesIO(a_pickle))
u.persistent_load = refs.append
u.noload() u.noload()
u.noload() u.noload()
......
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
from __future__ import print_function from __future__ import print_function
import doctest import doctest
import sys
import time import time
from persistent import Persistent from persistent import Persistent
...@@ -26,7 +25,7 @@ from ZODB.serialize import referencesf ...@@ -26,7 +25,7 @@ from ZODB.serialize import referencesf
from ZODB.tests.MinPO import MinPO from ZODB.tests.MinPO import MinPO
from ZODB.tests.MTStorage import TestThread from ZODB.tests.MTStorage import TestThread
from ZODB.tests.StorageTestBase import snooze from ZODB.tests.StorageTestBase import snooze
from ZODB._compat import loads, Pickler, Unpickler, BytesIO, _protocol from ZODB._compat import loads, PersistentPickler, Pickler, Unpickler, BytesIO, _protocol
import transaction import transaction
import ZODB.interfaces import ZODB.interfaces
import ZODB.tests.util import ZODB.tests.util
...@@ -85,11 +84,7 @@ def dumps(obj): ...@@ -85,11 +84,7 @@ def dumps(obj):
return obj.getoid() return obj.getoid()
return None return None
s = BytesIO() s = BytesIO()
p = Pickler(s, _protocol) p = PersistentPickler(getpersid, s, _protocol)
if sys.version_info[0] < 3:
p.inst_persistent_id = getpersid
else:
p.persistent_id = getpersid
p.dump(obj) p.dump(obj)
p.dump(None) p.dump(None)
return s.getvalue() return s.getvalue()
......
...@@ -25,7 +25,7 @@ import transaction ...@@ -25,7 +25,7 @@ import transaction
from ZODB.utils import u64 from ZODB.utils import u64
from ZODB.tests.MinPO import MinPO from ZODB.tests.MinPO import MinPO
from ZODB._compat import Pickler, Unpickler, BytesIO, _protocol from ZODB._compat import PersistentPickler, Unpickler, BytesIO, _protocol
import ZODB.tests.util import ZODB.tests.util
...@@ -50,11 +50,7 @@ def _persistent_id(obj): ...@@ -50,11 +50,7 @@ def _persistent_id(obj):
def zodb_pickle(obj): def zodb_pickle(obj):
"""Create a pickle in the format expected by ZODB.""" """Create a pickle in the format expected by ZODB."""
f = BytesIO() f = BytesIO()
p = Pickler(f, _protocol) p = PersistentPickler(_persistent_id, f, _protocol)
if sys.version_info[0] < 3:
p.inst_persistent_id = _persistent_id
else:
p.persistent_id = _persistent_id
klass = obj.__class__ klass = obj.__class__
assert not hasattr(obj, '__getinitargs__'), "not ready for constructors" assert not hasattr(obj, '__getinitargs__'), "not ready for constructors"
args = None args = None
......
...@@ -32,7 +32,6 @@ import unittest ...@@ -32,7 +32,6 @@ import unittest
import ZODB import ZODB
import ZODB.MappingStorage import ZODB.MappingStorage
import ZODB.tests.util import ZODB.tests.util
from ZODB.tests.util import PYPY
PY2 = sys.version_info[0] == 2 PY2 = sys.version_info[0] == 2
...@@ -191,7 +190,6 @@ class DBMethods(CacheTestBase): ...@@ -191,7 +190,6 @@ class DBMethods(CacheTestBase):
class LRUCacheTests(CacheTestBase): class LRUCacheTests(CacheTestBase):
@unittest.skipIf(PYPY, "Implementation details of the PickleCache")
def testLRU(self): def testLRU(self):
# verify the LRU behavior of the cache # verify the LRU behavior of the cache
dataset_size = 5 dataset_size = 5
......
...@@ -244,10 +244,14 @@ class UserMethodTests(unittest.TestCase): ...@@ -244,10 +244,14 @@ class UserMethodTests(unittest.TestCase):
If all references to the object are released, then a new If all references to the object are released, then a new
object will be returned. The cache doesn't keep unreferenced object will be returned. The cache doesn't keep unreferenced
ghosts alive. (The next object returned my still have the ghosts alive, although on some implementations like PyPy we
same id, because Python may re-use the same memory.) need to run a garbage collection to be sure they go away. (The
next object returned my still have the same id, because Python
may re-use the same memory.)
>>> del obj, obj2 >>> del obj, obj2
>>> import gc
>>> _ = gc.collect()
>>> cn._cache.get(p64(0), None) >>> cn._cache.get(p64(0), None)
If the object is unghosted, then it will stay in the cache If the object is unghosted, then it will stay in the cache
...@@ -683,8 +687,8 @@ def doctest_proper_ghost_initialization_with_empty__p_deactivate(): ...@@ -683,8 +687,8 @@ def doctest_proper_ghost_initialization_with_empty__p_deactivate():
>>> transaction.commit() >>> transaction.commit()
>>> conn2 = db.open() >>> conn2 = db.open()
>>> conn2.root.x._p_changed >>> bool(conn2.root.x._p_changed)
False
>>> conn2.root.x.y >>> conn2.root.x.y
1 1
......
...@@ -112,10 +112,10 @@ If we provide entries that cause an unexpected error: ...@@ -112,10 +112,10 @@ If we provide entries that cause an unexpected error:
... ('sally', 10.0), ... ('sally', 10.0),
... ('bob', '20.0'), ... ('bob', '20.0'),
... ('sally', 10.0), ... ('sally', 10.0),
... ]) ... ]) #doctest: +ELLIPSIS
Updated bob Updated bob
Updated sally Updated sally
Unexpected exception unsupported operand type(s) for +=: 'float' and 'str' Unexpected exception unsupported operand type(s) for +...: 'float' and 'str'
Because the apply_entries used a savepoint for the entire function, it was Because the apply_entries used a savepoint for the entire function, it was
able to rollback the partial changes without rolling back changes made in the able to rollback the partial changes without rolling back changes made in the
...@@ -194,4 +194,3 @@ However, using a savepoint invalidates any savepoints that come after it: ...@@ -194,4 +194,3 @@ However, using a savepoint invalidates any savepoints that come after it:
InvalidSavepointRollbackError: invalidated by a later savepoint InvalidSavepointRollbackError: invalidated by a later savepoint
>>> transaction.abort() >>> transaction.abort()
...@@ -252,7 +252,7 @@ if sys.version_info >= (2, 6): ...@@ -252,7 +252,7 @@ if sys.version_info >= (2, 6):
>>> with db.transaction() as conn2: >>> with db.transaction() as conn2:
... conn2.root()['y'] = 2 ... conn2.root()['y'] = 2
... XXX ... XXX #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last): Traceback (most recent call last):
... ...
NameError: name 'XXX' is not defined NameError: name 'XXX' is not defined
......
...@@ -177,6 +177,7 @@ def _functest_load(fqn): ...@@ -177,6 +177,7 @@ def _functest_load(fqn):
# Open the database and attempt to deserialize the tree # Open the database and attempt to deserialize the tree
# (run in separate process) # (run in separate process)
from ZODB import DB from ZODB import DB
import transaction
WORKING, FAILING = _working_failing_datetimes() WORKING, FAILING = _working_failing_datetimes()
db = DB(fqn) db = DB(fqn)
conn = db.open() conn = db.open()
...@@ -185,6 +186,7 @@ def _functest_load(fqn): ...@@ -185,6 +186,7 @@ def _functest_load(fqn):
tree = root['tree'] tree = root['tree']
assert tree[WORKING] == 'working' assert tree[WORKING] == 'working'
assert tree[FAILING] == 'failing' assert tree[FAILING] == 'failing'
transaction.abort()
finally: # Windoze finally: # Windoze
conn.close() conn.close()
db.close() db.close()
......
...@@ -14,8 +14,8 @@ ...@@ -14,8 +14,8 @@
"""Test behavior of Connection plus cPickleCache.""" """Test behavior of Connection plus cPickleCache."""
from persistent import Persistent from persistent import Persistent
from ZODB.config import databaseFromString from ZODB.config import databaseFromString
import doctest
import transaction import transaction
import doctest
class RecalcitrantObject(Persistent): class RecalcitrantObject(Persistent):
"""A Persistent object that will not become a ghost.""" """A Persistent object that will not become a ghost."""
...@@ -52,7 +52,7 @@ class RegularObject(Persistent): ...@@ -52,7 +52,7 @@ class RegularObject(Persistent):
class PersistentObject(Persistent): class PersistentObject(Persistent):
pass pass
class CacheTests: class CacheTests(object):
def test_cache(self): def test_cache(self):
r"""Test basic cache methods. r"""Test basic cache methods.
...@@ -199,12 +199,15 @@ class CacheTests: ...@@ -199,12 +199,15 @@ class CacheTests:
5 5
>>> transaction.abort() >>> transaction.abort()
>>> len(cn._cache)
6
>>> cn._cache.cache_non_ghost_count
2
>>> cn._cache.ringlen() >>> cn._cache.ringlen()
2 2
>>> RegularObject.deactivations >>> RegularObject.deactivations
4 4
""" """
def test_gc_on_open_connections(self): def test_gc_on_open_connections(self):
r"""Test that automatic GC is not applied to open connections. r"""Test that automatic GC is not applied to open connections.
......
...@@ -58,7 +58,7 @@ Trans #00000 tid=... time=... offset=<OFFSET> ...@@ -58,7 +58,7 @@ Trans #00000 tid=... time=... offset=<OFFSET>
Trans #00001 tid=... time=... offset=<OFFSET> Trans #00001 tid=... time=... offset=<OFFSET>
status=' ' user='' description='added an OOBTree' status=' ' user='' description='added an OOBTree'
data #00000 oid=0000000000000000 size=<SIZE> class=persistent.mapping.PersistentMapping data #00000 oid=0000000000000000 size=<SIZE> class=persistent.mapping.PersistentMapping
data #00001 oid=0000000000000001 size=<SIZE> class=BTrees.OOBTree.OOBTree data #00001 oid=0000000000000001 size=<SIZE> class=BTrees.OOBTree.OOBTree...
Now we see two transactions and two changed objects. Now we see two transactions and two changed objects.
......
...@@ -90,12 +90,12 @@ oid 0x00 persistent.mapping.PersistentMapping 2 revisions ...@@ -90,12 +90,12 @@ oid 0x00 persistent.mapping.PersistentMapping 2 revisions
tid user='' tid user=''
tid description='added an OOBTree' tid description='added an OOBTree'
new revision persistent.mapping.PersistentMapping at <OFFSET> new revision persistent.mapping.PersistentMapping at <OFFSET>
references 0x01 BTrees.OOBTree.OOBTree at <OFFSET> references 0x01 BTrees.OOBTree.OOBTree... at <OFFSET>
oid 0x01 BTrees.OOBTree.OOBTree 1 revision oid 0x01 BTrees.OOBTree.OOBTree... 1 revision
tid 0x... offset=<OFFSET> ... tid 0x... offset=<OFFSET> ...
tid user='' tid user=''
tid description='added an OOBTree' tid description='added an OOBTree'
new revision BTrees.OOBTree.OOBTree at <OFFSET> new revision BTrees.OOBTree.OOBTree... at <OFFSET>
referenced by 0x00 persistent.mapping.PersistentMapping at <OFFSET> referenced by 0x00 persistent.mapping.PersistentMapping at <OFFSET>
So there are two revisions of oid 0 now, and the second references oid 1. So there are two revisions of oid 0 now, and the second references oid 1.
...@@ -118,21 +118,21 @@ oid 0x00 persistent.mapping.PersistentMapping 2 revisions ...@@ -118,21 +118,21 @@ oid 0x00 persistent.mapping.PersistentMapping 2 revisions
tid user='' tid user=''
tid description='added an OOBTree' tid description='added an OOBTree'
new revision persistent.mapping.PersistentMapping at <OFFSET> new revision persistent.mapping.PersistentMapping at <OFFSET>
references 0x01 BTrees.OOBTree.OOBTree at <OFFSET> references 0x01 BTrees.OOBTree.OOBTree... at <OFFSET>
tid 0x... offset=<OFFSET> ... tid 0x... offset=<OFFSET> ...
tid user='' tid user=''
tid description='circling back to the root' tid description='circling back to the root'
referenced by 0x01 BTrees.OOBTree.OOBTree at <OFFSET> referenced by 0x01 BTrees.OOBTree.OOBTree... at <OFFSET>
oid 0x01 BTrees.OOBTree.OOBTree 2 revisions oid 0x01 BTrees.OOBTree.OOBTree... 2 revisions
tid 0x... offset=<OFFSET> ... tid 0x... offset=<OFFSET> ...
tid user='' tid user=''
tid description='added an OOBTree' tid description='added an OOBTree'
new revision BTrees.OOBTree.OOBTree at <OFFSET> new revision BTrees.OOBTree.OOBTree... at <OFFSET>
referenced by 0x00 persistent.mapping.PersistentMapping at <OFFSET> referenced by 0x00 persistent.mapping.PersistentMapping at <OFFSET>
tid 0x... offset=<OFFSET> ... tid 0x... offset=<OFFSET> ...
tid user='' tid user=''
tid description='circling back to the root' tid description='circling back to the root'
new revision BTrees.OOBTree.OOBTree at <OFFSET> new revision BTrees.OOBTree.OOBTree... at <OFFSET>
references 0x00 persistent.mapping.PersistentMapping at <OFFSET> references 0x00 persistent.mapping.PersistentMapping at <OFFSET>
oid 0x02 <unknown> 0 revisions oid 0x02 <unknown> 0 revisions
this oid was not defined (no data record for it found) this oid was not defined (no data record for it found)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment