Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Z
zodburi
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
zodburi
Commits
3f9350f6
Commit
3f9350f6
authored
Apr 17, 2017
by
Jim Fulton
Committed by
GitHub
Apr 17, 2017
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #15 from jimfulton/support-more-db-parameters
Support more db parameters
parents
059e53f0
69463a29
Changes
7
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
163 additions
and
27 deletions
+163
-27
CHANGES.txt
CHANGES.txt
+5
-0
CONTRIBUTORS.txt
CONTRIBUTORS.txt
+1
-0
docs/index.rst
docs/index.rst
+58
-2
zodburi/__init__.py
zodburi/__init__.py
+32
-6
zodburi/resolvers.py
zodburi/resolvers.py
+4
-4
zodburi/tests/__init__.py
zodburi/tests/__init__.py
+14
-8
zodburi/tests/test_resolvers.py
zodburi/tests/test_resolvers.py
+49
-7
No files found.
CHANGES.txt
View file @
3f9350f6
...
...
@@ -4,6 +4,11 @@
2.1 (unreleased)
----------------
- Add support for additional database configuration parameters:
pool_timeout, cache_size_bytes, historical_pool_size,
historical_cache_size, historical_cache_size_bytes,
historical_timeout, and large_record_size
- Add support for Python 3.5.
- Drop support for Python 2.6 and 3.2.
...
...
CONTRIBUTORS.txt
View file @
3f9350f6
...
...
@@ -101,3 +101,4 @@ Contributors
- Tres Seaver, 2012/05/27
- Steve Piercy, 2016/07/21
- Todd Koym, 2016/07/21
- Jim Fulton, 2017/04/13
docs/index.rst
View file @
3f9350f6
...
...
@@ -93,9 +93,65 @@ Connection-related
These arguments relate to connections created from the database.
connection_cache_size
integer (default 10000)
integer (default 10000) target size, in number of objects, of each
connection's object cache
connection_cache_size_bytes
integer (default 0) target estimated size, in bytes, of each
connection's object cache
0 means no limit.
A suffix of KB, MB, or GB may be used to provide units.
connection_historical_cache_size
integer (default 1000) target size, in number of objects, of each
historical connection's object cache
connection_historical_cache_size_bytes
integer (default 0) target estimated size, in bytes, of each
historical connection's object cache
0 means no limit.
A suffix of KB, MB, or GB may be used to provide units.
connection_historical_pool_size
integer (default 3) expected maximum total number of historical connections
simultaneously open
connection_historical_timeout
integer (default 300) maximum age of inactive historical connections
When a historical connection has remained unused in a historical
connection pool for more than connection_historical_timeout seconds,
it will be discarded and its resources released.
connection_large_record_size
integer (default 16MB) record size limit before suggesting using blobs
When object records are saved that are larger than this, a warning
is issued, suggesting that blobs should be used instead.
A suffix of KB, MB, or GB may be used to provide units.
connection_pool_size
integer (default 7)
integer (default 7) expected maximum number of simultaneously open
connections
There is no hard limit (as many connections as are requested
will be opened, until system resources are exhausted). Exceeding
pool-size connections causes a warning message to be logged,
and exceeding twice pool-size connections causes a critical
message to be logged.
connection_pool_timeout
integer (default unlimited) maximum age of inactive (non-historical)
connections
When a connection has remained unused in a connection pool for more
than connection_pool_timeout seconds, it will be discarded and its
resources released.
Blob-related
++++++++++++
...
...
zodburi/__init__.py
View file @
3f9350f6
import
re
from
pkg_resources
import
iter_entry_points
...
...
@@ -16,6 +17,28 @@ def resolve_uri(uri):
else
:
raise
KeyError
(
'No resolver found for uri: %s'
%
uri
)
connection_parameters
=
'''
pool_size pool_timeout cache_size cache_size_bytes
historical_pool_size historical_cache_size historical_cache_size_bytes
historical_timeout large_record_size
'''
.
strip
().
split
()
bytes_parameters
=
(
'cache_size_bytes'
,
'historical_cache_size_bytes'
,
'large_record_size'
)
parameters
=
dict
(
database_name
=
'database_name'
)
for
parameter
in
connection_parameters
:
parameters
[
'connection_'
+
parameter
]
=
parameter
has_units
=
re
.
compile
(
'
\
s*(
\
d+)
\
s*([kmg])
b
\
s*$
'
).match
units = dict(k=1<<10, m=1<<20, g=1<<30)
def _parse_bytes(s):
m = has_units(s.lower())
if m:
v, uname = m.group(1, 2)
return int(v) * units[uname]
else:
return int(s)
def _get_dbkw(kw):
dbkw = {
...
...
@@ -23,12 +46,15 @@ def _get_dbkw(kw):
'
pool_size
': 7,
'
database_name
': '
unnamed
',
}
if
'connection_cache_size'
in
kw
:
dbkw
[
'cache_size'
]
=
int
(
kw
.
pop
(
'connection_cache_size'
))
if
'connection_pool_size'
in
kw
:
dbkw
[
'pool_size'
]
=
int
(
kw
.
pop
(
'connection_pool_size'
))
if
'database_name'
in
kw
:
dbkw
[
'database_name'
]
=
kw
.
pop
(
'database_name'
)
for parameter in parameters:
if parameter in kw:
v = kw.pop(parameter)
if parameter.startswith('
connection_
'):
if parameters[parameter] in bytes_parameters:
v = _parse_bytes(v)
else:
v = int(v)
dbkw[parameters[parameter]] = v
if kw:
raise KeyError('
Unrecognized
database
keyword
(
s
):
%
s
' % '
,
'.join(kw))
...
...
zodburi/resolvers.py
View file @
3f9350f6
...
...
@@ -194,10 +194,10 @@ class ZConfigURIResolver(object):
if
isinstance
(
config_item
,
ZODBDatabase
):
config
=
config_item
.
config
factory
=
config
.
storage
dbkw
=
{
'connection_cache_size'
:
config
.
cache_size
,
'connection_pool_size'
:
config
.
pool_size
,
}
from
zodburi
import
connection_parameters
dbkw
=
{
'connection_'
+
name
:
getattr
(
config
,
name
)
for
name
in
connection_parameters
if
getattr
(
config
,
name
)
is
not
None
}
if
config
.
database_name
:
dbkw
[
'database_name'
]
=
config
.
database_name
else
:
...
...
zodburi/tests/__init__.py
View file @
3f9350f6
...
...
@@ -17,16 +17,22 @@ class TestResolveURI(unittest.TestCase):
@
mock
.
patch
(
'zodburi.resolvers.MappingStorage'
)
def
test_it_with_dbkw
(
self
,
MappingStorage
):
from
zodburi
import
resolve_uri
factory
,
dbkw
=
resolve_uri
(
'memory://test?connection_cache_size=1&connection_pool_size=2&'
'database_name=dbname'
)
from
zodburi
import
resolve_uri
,
connection_parameters
,
parameters
uri
=
'memory://test?database_name=dbname'
for
i
,
parameter
in
enumerate
(
connection_parameters
):
uri
+=
'&connection_%s=%d'
%
(
parameter
,
i
)
if
parameter
==
'cache_size_bytes'
:
uri
+=
'MB'
factory
,
dbkw
=
resolve_uri
(
uri
)
factory
()
MappingStorage
.
assert_called_once_with
(
'test'
)
self
.
assertEqual
(
dbkw
,
{
'cache_size'
:
1
,
'pool_size'
:
2
,
'database_name'
:
'dbname'
})
expect
=
dict
(
database_name
=
'dbname'
)
for
i
,
parameter
in
enumerate
(
connection_parameters
):
parameter
=
'connection_'
+
parameter
expect
[
parameters
[
parameter
]]
=
i
if
parameter
==
'connection_cache_size_bytes'
:
expect
[
parameters
[
parameter
]]
*=
1
<<
20
self
.
assertEqual
(
dbkw
,
expect
)
def
test_it_cant_resolve
(
self
):
from
zodburi
import
resolve_uri
...
...
zodburi/tests/test_resolvers.py
View file @
3f9350f6
...
...
@@ -423,9 +423,15 @@ class TestZConfigURIResolver(unittest.TestCase):
storage
=
factory
()
from
ZODB.MappingStorage
import
MappingStorage
self
.
assertTrue
(
isinstance
(
storage
,
MappingStorage
))
self
.
assertEqual
(
dbkw
,
{
'connection_cache_size'
:
5000
,
'connection_pool_size'
:
7
})
self
.
assertEqual
(
dbkw
,
{
'connection_cache_size'
:
5000
,
'connection_cache_size_bytes'
:
0
,
'connection_historical_cache_size'
:
1000
,
'connection_historical_cache_size_bytes'
:
0
,
'connection_historical_pool_size'
:
3
,
'connection_historical_timeout'
:
300
,
'connection_large_record_size'
:
16777216
,
'connection_pool_size'
:
7
})
def
test_named_database
(
self
):
self
.
tmp
.
write
(
b"""
...
...
@@ -443,10 +449,46 @@ class TestZConfigURIResolver(unittest.TestCase):
storage
=
factory
()
from
ZODB.MappingStorage
import
MappingStorage
self
.
assertTrue
(
isinstance
(
storage
,
MappingStorage
))
self
.
assertEqual
(
dbkw
,
{
'connection_cache_size'
:
20000
,
'connection_pool_size'
:
5
,
'database_name'
:
'foo'
})
self
.
assertEqual
(
dbkw
,
{
'connection_cache_size'
:
20000
,
'connection_cache_size_bytes'
:
0
,
'connection_historical_cache_size'
:
1000
,
'connection_historical_cache_size_bytes'
:
0
,
'connection_historical_pool_size'
:
3
,
'connection_historical_timeout'
:
300
,
'connection_large_record_size'
:
16777216
,
'connection_pool_size'
:
5
,
'database_name'
:
'foo'
})
def
test_database_all_options
(
self
):
from
zodburi
import
connection_parameters
,
bytes_parameters
self
.
tmp
.
write
((
"""
<zodb x>
<mappingstorage>
</mappingstorage>
database-name foo
%s
</zodb>
"""
%
'
\
n
'
.
join
(
"%s %s"
%
(
name
.
replace
(
'_'
,
'-'
),
'%sMB'
%
i
if
name
in
bytes_parameters
else
i
,
)
for
(
i
,
name
)
in
enumerate
(
connection_parameters
)
)).
encode
())
self
.
tmp
.
flush
()
resolver
=
self
.
_makeOne
()
factory
,
dbkw
=
resolver
(
'zconfig://%s#x'
%
self
.
tmp
.
name
)
storage
=
factory
()
from
ZODB.MappingStorage
import
MappingStorage
self
.
assertTrue
(
isinstance
(
storage
,
MappingStorage
))
expect
=
dict
(
database_name
=
'foo'
)
for
i
,
parameter
in
enumerate
(
connection_parameters
):
cparameter
=
'connection_'
+
parameter
expect
[
cparameter
]
=
i
if
parameter
in
bytes_parameters
:
expect
[
cparameter
]
*=
1
<<
20
self
.
assertEqual
(
dbkw
,
expect
)
class
TestMappingStorageURIResolver
(
Base
,
unittest
.
TestCase
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment