Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
erp5
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Laurent S
erp5
Commits
5645d0da
Commit
5645d0da
authored
Apr 26, 2012
by
Julien Muchembled
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
CMFActivity: move SQLDict specific code out of SQLBase.getProcessableMessageList
parent
31b4bb58
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
64 additions
and
76 deletions
+64
-76
product/CMFActivity/Activity/SQLBase.py
product/CMFActivity/Activity/SQLBase.py
+28
-43
product/CMFActivity/Activity/SQLDict.py
product/CMFActivity/Activity/SQLDict.py
+36
-26
product/CMFActivity/Activity/SQLQueue.py
product/CMFActivity/Activity/SQLQueue.py
+0
-7
No files found.
product/CMFActivity/Activity/SQLBase.py
View file @
5645d0da
...
@@ -168,6 +168,14 @@ class SQLBase(Queue):
...
@@ -168,6 +168,14 @@ class SQLBase(Queue):
activity_tool
.
SQLBase_makeMessageListAvailable
(
table
=
self
.
sql_table
,
activity_tool
.
SQLBase_makeMessageListAvailable
(
table
=
self
.
sql_table
,
uid
=
uid_list
)
uid
=
uid_list
)
def
getProcessableMessageLoader
(
self
,
activity_tool
,
processing_node
):
# do not merge anything
def
load
(
line
):
uid
=
line
.
uid
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
return
m
,
uid
,
()
return
load
def
getProcessableMessageList
(
self
,
activity_tool
,
processing_node
):
def
getProcessableMessageList
(
self
,
activity_tool
,
processing_node
):
"""
"""
Always true:
Always true:
...
@@ -206,24 +214,16 @@ class SQLBase(Queue):
...
@@ -206,24 +214,16 @@ class SQLBase(Queue):
if
line_list
:
if
line_list
:
self
.
_log
(
TRACE
,
'Reserved messages: %r'
%
[
x
.
uid
for
x
in
line_list
])
self
.
_log
(
TRACE
,
'Reserved messages: %r'
%
[
x
.
uid
for
x
in
line_list
])
return
line_list
return
line_list
def
getDuplicateMessageUidList
(
line
):
uid_list
=
self
.
getDuplicateMessageUidList
(
activity_tool
=
activity_tool
,
line
=
line
,
processing_node
=
processing_node
)
if
uid_list
:
self
.
_log
(
TRACE
,
'Reserved duplicate messages: %r'
%
(
uid_list
,
))
return
uid_list
now_date
=
self
.
getNow
(
activity_tool
)
now_date
=
self
.
getNow
(
activity_tool
)
uid_to_duplicate_uid_list_dict
=
{}
uid_to_duplicate_uid_list_dict
=
{}
try
:
try
:
result
=
getReservedMessageList
(
1
)
result
=
getReservedMessageList
(
1
)
if
result
:
if
result
:
line
=
result
[
0
]
load
=
self
.
getProcessableMessageLoader
(
activity_tool
,
processing_node
)
uid
=
line
.
uid
m
,
uid
,
uid_list
=
load
(
result
[
0
])
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
message_list
=
[
m
]
message_list
=
[
m
]
uid_to_duplicate_uid_list_dict
[
uid
]
=
getDuplicateMessageUidList
(
line
)
uid_to_duplicate_uid_list_dict
[
uid
]
=
uid_list
group_method_id
=
line
.
group_method_id
group_method_id
=
m
.
line
.
group_method_id
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid
=
[
uid
])
if
group_method_id
!=
'
\
0
'
:
if
group_method_id
!=
'
\
0
'
:
# Count the number of objects to prevent too many objects.
# Count the number of objects to prevent too many objects.
cost
=
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
cost
=
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
...
@@ -235,39 +235,24 @@ class SQLBase(Queue):
...
@@ -235,39 +235,24 @@ class SQLBase(Queue):
if
limit
>
1
:
# <=> cost * count < 1
if
limit
>
1
:
# <=> cost * count < 1
cost
*=
count
cost
*=
count
# Retrieve objects which have the same group method.
# Retrieve objects which have the same group method.
result
=
getReservedMessageList
(
limit
,
group_method_id
)
result
=
iter
(
getReservedMessageList
(
limit
,
group_method_id
))
if
self
.
merge_duplicate
:
path_and_method_id_dict
=
{(
line
.
path
,
line
.
method_id
):
uid
}
unreserve_uid_list
=
[]
for
line
in
result
:
for
line
in
result
:
if
line
.
uid
==
uid
:
if
line
.
uid
in
uid_to_duplicate_uid_list_dict
:
continue
continue
# All fetched lines have the same group_method_id and
m
,
uid
,
uid_list
=
load
(
line
)
# processing_node.
if
m
is
None
:
# Their dates are lower-than or equal-to now_date.
uid_to_duplicate_uid_list_dict
[
uid
]
+=
uid_list
# We read each line once so lines have distinct uids.
# So what remains to be filtered on are path and method_id.
if
self
.
merge_duplicate
:
key
=
line
.
path
,
line
.
method_id
original_uid
=
path_and_method_id_dict
.
get
(
key
)
if
original_uid
is
not
None
:
uid_to_duplicate_uid_list_dict
[
original_uid
].
append
(
line
.
uid
)
continue
continue
path_and_method_id_dict
[
key
]
=
line
.
uid
uid_to_duplicate_uid_list_dict
[
uid
]
=
uid_list
uid_to_duplicate_uid_list_dict
[
line
.
uid
]
=
\
getDuplicateMessageUidList
(
line
)
if
cost
<
1
:
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
line
.
uid
,
line
=
line
)
cost
+=
len
(
m
.
getObjectList
(
activity_tool
))
*
\
cost
+=
len
(
m
.
getObjectList
(
activity_tool
))
*
\
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
message_list
.
append
(
m
)
message_list
.
append
(
m
)
else
:
if
cost
>=
1
:
unreserve_uid_list
.
append
(
line
.
uid
)
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid
=
[
m
.
uid
for
m
in
message_list
])
# Unreserve extra messages as soon as possible.
# Unreserve extra messages as soon as possible.
self
.
makeMessageListAvailable
(
activity_tool
=
activity_tool
,
self
.
makeMessageListAvailable
(
activity_tool
=
activity_tool
,
uid_list
=
unreserve_uid_list
)
uid_list
=
[
line
.
uid
for
line
in
result
if
line
.
uid
!=
uid
])
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid
=
uid_to_duplicate_uid_list_dict
.
keys
())
return
message_list
,
group_method_id
,
uid_to_duplicate_uid_list_dict
return
message_list
,
group_method_id
,
uid_to_duplicate_uid_list_dict
except
:
except
:
self
.
_log
(
WARNING
,
'Exception while reserving messages.'
)
self
.
_log
(
WARNING
,
'Exception while reserving messages.'
)
...
...
product/CMFActivity/Activity/SQLDict.py
View file @
5645d0da
...
@@ -109,15 +109,24 @@ class SQLDict(SQLBase):
...
@@ -109,15 +109,24 @@ class SQLDict(SQLBase):
message_list
=
activity_buffer
.
getMessageList
(
self
)
message_list
=
activity_buffer
.
getMessageList
(
self
)
return
[
m
for
m
in
message_list
if
m
.
is_registered
]
return
[
m
for
m
in
message_list
if
m
.
is_registered
]
def
getDuplicateMessageUidList
(
self
,
activity_tool
,
line
,
processing_node
):
def
getProcessableMessageLoader
(
self
,
activity_tool
,
processing_node
):
"""
path_and_method_id_dict
=
{}
Reserve unreserved messages matching given line.
def
load
(
line
):
Return their uids.
# getProcessableMessageList already fetch messages with the same
"""
# group_method_id, so what remains to be filtered on are path and
# method_id.
# XXX: What about tag ?
path
=
line
.
path
method_id
=
line
.
method_id
key
=
path
,
method_id
uid
=
line
.
uid
original_uid
=
path_and_method_id_dict
.
get
(
key
)
if
original_uid
is
None
:
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
try
:
try
:
result
=
activity_tool
.
SQLDict_selectDuplicatedLineList
(
result
=
activity_tool
.
SQLDict_selectDuplicatedLineList
(
path
=
line
.
path
,
path
=
path
,
method_id
=
line
.
method_id
,
method_id
=
method_id
,
group_method_id
=
line
.
group_method_id
,
group_method_id
=
line
.
group_method_id
,
)
)
uid_list
=
[
x
.
uid
for
x
in
result
]
uid_list
=
[
x
.
uid
for
x
in
result
]
...
@@ -125,16 +134,17 @@ class SQLDict(SQLBase):
...
@@ -125,16 +134,17 @@ class SQLDict(SQLBase):
activity_tool
.
SQLDict_reserveDuplicatedLineList
(
activity_tool
.
SQLDict_reserveDuplicatedLineList
(
processing_node
=
processing_node
,
uid
=
uid_list
)
processing_node
=
processing_node
,
uid
=
uid_list
)
else
:
else
:
# Release locks
activity_tool
.
SQLDict_commit
()
# release locks
activity_tool
.
SQLDict_commit
()
except
:
except
:
# Log
self
.
_log
(
WARNING
,
'getDuplicateMessageUidList got an exception'
)
LOG
(
'SQLDict'
,
WARNING
,
'getDuplicateMessageUidList got an exception'
,
error
=
sys
.
exc_info
())
activity_tool
.
SQLDict_rollback
()
# release locks
# Release lock
activity_tool
.
SQLDict_rollback
()
# And re-raise
raise
raise
return
uid_list
if
uid_list
:
self
.
_log
(
TRACE
,
'Reserved duplicate messages: %r'
%
uid_list
)
path_and_method_id_dict
[
key
]
=
uid
return
m
,
uid
,
uid_list
return
None
,
original_uid
,
[
uid
]
return
load
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
hasMessage
=
getattr
(
activity_tool
,
'SQLDict_hasMessage'
,
None
)
hasMessage
=
getattr
(
activity_tool
,
'SQLDict_hasMessage'
,
None
)
...
...
product/CMFActivity/Activity/SQLQueue.py
View file @
5645d0da
...
@@ -82,13 +82,6 @@ class SQLQueue(SQLBase):
...
@@ -82,13 +82,6 @@ class SQLQueue(SQLBase):
processing_node_list
=
None
,
processing_node_list
=
None
,
serialization_tag_list
=
serialization_tag_list
)
serialization_tag_list
=
serialization_tag_list
)
def
getDuplicateMessageUidList
(
self
,
activity_tool
,
line
,
processing_node
):
"""
Reserve unreserved messages matching given line.
Return their uids.
"""
return
()
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
hasMessage
=
getattr
(
activity_tool
,
'SQLQueue_hasMessage'
,
None
)
hasMessage
=
getattr
(
activity_tool
,
'SQLQueue_hasMessage'
,
None
)
if
hasMessage
is
not
None
:
if
hasMessage
is
not
None
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment