Commit 73d89df3 authored by Julien Muchembled's avatar Julien Muchembled

CMFActivity: optimization, cleanup, limit insertion by size in bytes instead of number of rows

parents 7fb53e8f 3ca5bf97
...@@ -33,14 +33,6 @@ from zLOG import LOG, WARNING, ERROR ...@@ -33,14 +33,6 @@ from zLOG import LOG, WARNING, ERROR
from ZODB.POSException import ConflictError from ZODB.POSException import ConflictError
from cStringIO import StringIO from cStringIO import StringIO
import transaction
# Error values for message validation
EXCEPTION = -1
VALID = 0
INVALID_PATH = 1
INVALID_ORDER = 2
# Time global parameters # Time global parameters
MAX_PROCESSING_TIME = 900 # in seconds MAX_PROCESSING_TIME = 900 # in seconds
VALIDATION_ERROR_DELAY = 15 # in seconds VALIDATION_ERROR_DELAY = 15 # in seconds
...@@ -96,52 +88,6 @@ class Queue(object): ...@@ -96,52 +88,6 @@ class Queue(object):
def distribute(self, activity_tool, node_count): def distribute(self, activity_tool, node_count):
raise NotImplementedError raise NotImplementedError
def validate(self, activity_tool, message, check_order_validation=1, **kw):
"""
This is the place where activity semantics is implemented
**kw contains all parameters which allow to implement synchronisation,
constraints, delays, etc.
Standard synchronisation parameters:
after_method_id -- never validate message if after_method_id
is in the list of methods which are
going to be executed
after_message_uid -- never validate message if after_message_uid
is in the list of messages which are
going to be executed
after_path -- never validate message if after_path
is in the list of path which are
going to be executed
"""
try:
if activity_tool.unrestrictedTraverse(message.object_path, None) is None:
# Do not try to call methods on objects which do not exist
LOG('CMFActivity', WARNING,
'Object %s does not exist' % '/'.join(message.object_path))
return INVALID_PATH
if check_order_validation:
for k, v in kw.iteritems():
if activity_tool.validateOrder(message, k, v):
return INVALID_ORDER
except ConflictError:
raise
except:
LOG('CMFActivity', WARNING,
'Validation of Object %s raised exception' % '/'.join(message.object_path),
error=sys.exc_info())
# Do not try to call methods on objects which cause errors
return EXCEPTION
return VALID
def getDependentMessageList(self, activity_tool, message):
message_list = []
for k, v in message.activity_kw.iteritems():
message_list += activity_tool.getDependentMessageList(message, k, v)
return message_list
def getExecutableMessageList(self, activity_tool, message, message_dict, def getExecutableMessageList(self, activity_tool, message, message_dict,
validation_text_dict, now_date=None): validation_text_dict, now_date=None):
"""Get messages which have no dependent message, and store them in the dictionary. """Get messages which have no dependent message, and store them in the dictionary.
...@@ -165,8 +111,7 @@ class Queue(object): ...@@ -165,8 +111,7 @@ class Queue(object):
cached_result = validation_text_dict.get(message.order_validation_text) cached_result = validation_text_dict.get(message.order_validation_text)
if cached_result is None: if cached_result is None:
message_list = self.getDependentMessageList(activity_tool, message) message_list = activity_tool.getDependentMessageList(message, self)
transaction.commit() # Release locks.
if message_list: if message_list:
# The result is not empty, so this message is not executable. # The result is not empty, so this message is not executable.
validation_text_dict[message.order_validation_text] = 0 validation_text_dict[message.order_validation_text] = 0
...@@ -189,9 +134,6 @@ class Queue(object): ...@@ -189,9 +134,6 @@ class Queue(object):
elif cached_result: elif cached_result:
message_dict[message.uid] = message message_dict[message.uid] = message
def hasActivity(self, activity_tool, object, processing_node=None, active_process=None, **kw):
return 0
def flush(self, activity_tool, object, **kw): def flush(self, activity_tool, object, **kw):
pass pass
...@@ -201,7 +143,7 @@ class Queue(object): ...@@ -201,7 +143,7 @@ class Queue(object):
key_list = message.activity_kw.keys() key_list = message.activity_kw.keys()
key_list.sort() key_list.sort()
for key in key_list: for key in key_list:
method_id = "_validate_%s" % key method_id = "_validate_" + key
if getattr(self, method_id, None) is not None: if getattr(self, method_id, None) is not None:
order_validation_item_list.append((key, message.activity_kw[key])) order_validation_item_list.append((key, message.activity_kw[key]))
if len(order_validation_item_list) == 0: if len(order_validation_item_list) == 0:
...@@ -216,14 +158,6 @@ class Queue(object): ...@@ -216,14 +158,6 @@ class Queue(object):
def getMessageList(self, activity_tool, processing_node=None,**kw): def getMessageList(self, activity_tool, processing_node=None,**kw):
return [] return []
def countMessage(self, activity_tool,**kw):
return 0
def countMessageWithTag(self, activity_tool,value):
"""Return the number of messages which match the given tag.
"""
return self.countMessage(activity_tool, tag=value)
# Transaction Management # Transaction Management
def prepareQueueMessageList(self, activity_tool, message_list): def prepareQueueMessageList(self, activity_tool, message_list):
# Called to prepare transaction commit for queued messages # Called to prepare transaction commit for queued messages
......
...@@ -33,22 +33,19 @@ import MySQLdb ...@@ -33,22 +33,19 @@ import MySQLdb
from MySQLdb.constants.ER import DUP_ENTRY from MySQLdb.constants.ER import DUP_ENTRY
from DateTime import DateTime from DateTime import DateTime
from Shared.DC.ZRDB.Results import Results from Shared.DC.ZRDB.Results import Results
from Shared.DC.ZRDB.DA import DatabaseError
from zLOG import LOG, TRACE, INFO, WARNING, ERROR, PANIC from zLOG import LOG, TRACE, INFO, WARNING, ERROR, PANIC
from ZODB.POSException import ConflictError from ZODB.POSException import ConflictError
from Products.CMFActivity.ActivityTool import ( from Products.CMFActivity.ActivityTool import (
Message, MESSAGE_NOT_EXECUTED, MESSAGE_EXECUTED, SkippedMessage) Message, MESSAGE_NOT_EXECUTED, MESSAGE_EXECUTED, SkippedMessage)
from Products.CMFActivity.ActivityRuntimeEnvironment import ( from Products.CMFActivity.ActivityRuntimeEnvironment import (
DEFAULT_MAX_RETRY, ActivityRuntimeEnvironment) DEFAULT_MAX_RETRY, ActivityRuntimeEnvironment)
from Queue import Queue, VALIDATION_ERROR_DELAY, VALID, INVALID_PATH from Queue import Queue, VALIDATION_ERROR_DELAY
from Products.CMFActivity.Errors import ActivityFlushError from Products.CMFActivity.Errors import ActivityFlushError
# Stop validating more messages when this limit is reached # Stop validating more messages when this limit is reached
MAX_VALIDATED_LIMIT = 1000 MAX_VALIDATED_LIMIT = 1000
# Read this many messages to validate. # Read this many messages to validate.
READ_MESSAGE_LIMIT = 1000 READ_MESSAGE_LIMIT = 1000
# TODO: Limit by size in bytes instead of number of rows.
MAX_MESSAGE_LIST_SIZE = 100
INVOKE_ERROR_STATE = -2 INVOKE_ERROR_STATE = -2
# Activity uids are stored as 64 bits unsigned integers. # Activity uids are stored as 64 bits unsigned integers.
# No need to depend on a database that supports unsigned integers. # No need to depend on a database that supports unsigned integers.
...@@ -70,13 +67,14 @@ def sort_message_key(message): ...@@ -70,13 +67,14 @@ def sort_message_key(message):
_DequeueMessageException = Exception() _DequeueMessageException = Exception()
def render_datetime(x):
return "%.4d-%.2d-%.2d %.2d:%.2d:%09.6f" % x.toZone('UTC').parts()[:6]
# sqltest_dict ({'condition_name': <render_function>}) defines how to render # sqltest_dict ({'condition_name': <render_function>}) defines how to render
# condition statements in the SQL query used by SQLBase.getMessageList # condition statements in the SQL query used by SQLBase.getMessageList
def sqltest_dict(): def sqltest_dict():
sqltest_dict = {} sqltest_dict = {}
no_quote_type = int, float, long no_quote_type = int, float, long
def render_datetime(x):
return "%.4d-%.2d-%.2d %.2d:%.2d:%09.6f" % x.toZone('UTC').parts()[:6]
def _(name, column=None, op="="): def _(name, column=None, op="="):
if column is None: if column is None:
column = name column = name
...@@ -102,7 +100,6 @@ def sqltest_dict(): ...@@ -102,7 +100,6 @@ def sqltest_dict():
_('group_method_id') _('group_method_id')
_('method_id') _('method_id')
_('path') _('path')
_('processing')
_('processing_node') _('processing_node')
_('serialization_tag') _('serialization_tag')
_('tag') _('tag')
...@@ -115,103 +112,143 @@ def sqltest_dict(): ...@@ -115,103 +112,143 @@ def sqltest_dict():
return sqltest_dict return sqltest_dict
sqltest_dict = sqltest_dict() sqltest_dict = sqltest_dict()
def getNow(db):
"""
Return the UTC date from the point of view of the SQL server.
Note that this value is not cached, and is not transactionnal on MySQL
side.
"""
return db.query("SELECT UTC_TIMESTAMP(6)", 0)[1][0][0]
class SQLBase(Queue): class SQLBase(Queue):
""" """
Define a set of common methods for SQL-based storage of activities. Define a set of common methods for SQL-based storage of activities.
""" """
def createTableSQL(self):
return """\
CREATE TABLE %s (
`uid` BIGINT UNSIGNED NOT NULL,
`date` DATETIME(6) NOT NULL,
`path` VARCHAR(255) NOT NULL,
`active_process_uid` INT UNSIGNED NULL,
`method_id` VARCHAR(255) NOT NULL,
`processing_node` SMALLINT NOT NULL DEFAULT -1,
`priority` TINYINT NOT NULL DEFAULT 0,
`group_method_id` VARCHAR(255) NOT NULL DEFAULT '',
`tag` VARCHAR(255) NOT NULL,
`serialization_tag` VARCHAR(255) NOT NULL,
`retry` TINYINT UNSIGNED NOT NULL DEFAULT 0,
`message` LONGBLOB NOT NULL,
PRIMARY KEY (`uid`),
KEY `processing_node_priority_date` (`processing_node`, `priority`, `date`),
KEY `node_group_priority_date` (`processing_node`, `group_method_id`, `priority`, `date`),
KEY `serialization_tag_processing_node` (`serialization_tag`, `processing_node`),
KEY (`path`),
KEY (`active_process_uid`),
KEY (`method_id`),
KEY (`tag`)
) ENGINE=InnoDB""" % self.sql_table
def initialize(self, activity_tool, clear): def initialize(self, activity_tool, clear):
folder = activity_tool.getPortalObject().portal_skins.activity db = activity_tool.getSQLConnection()
try: create = self.createTableSQL()
createMessageTable = folder.SQLBase_createMessageTable
except AttributeError:
return
if clear: if clear:
folder.SQLBase_dropMessageTable(table=self.sql_table) db.query("DROP TABLE IF EXISTS " + self.sql_table)
createMessageTable(table=self.sql_table) db.query(create)
else: else:
src = createMessageTable._upgradeSchema(create_if_not_exists=1, src = db.upgradeSchema(create, create_if_not_exists=1,
initialize=self._initialize, initialize=self._initialize)
table=self.sql_table)
if src: if src:
LOG('CMFActivity', INFO, "%r table upgraded\n%s" LOG('CMFActivity', INFO, "%r table upgraded\n%s"
% (self.sql_table, src)) % (self.sql_table, src))
self._insert_max_payload = (db.getMaxAllowedPacket()
+ len(self._insert_separator)
- len(self._insert_template % (self.sql_table, '')))
def _initialize(self, db, column_list): def _initialize(self, db, column_list):
LOG('CMFActivity', ERROR, "Non-empty %r table upgraded." LOG('CMFActivity', ERROR, "Non-empty %r table upgraded."
" The following added columns could not be initialized: %s" " The following added columns could not be initialized: %s"
% (self.sql_table, ", ".join(column_list))) % (self.sql_table, ", ".join(column_list)))
_insert_template = ("INSERT INTO %s (uid,"
" path, active_process_uid, date, method_id, processing_node,"
" priority, group_method_id, tag, serialization_tag,"
" message) VALUES\n(%s)")
_insert_separator = "),\n("
def prepareQueueMessageList(self, activity_tool, message_list): def prepareQueueMessageList(self, activity_tool, message_list):
registered_message_list = [m for m in message_list if m.is_registered] db = activity_tool.getSQLConnection()
portal = activity_tool.getPortalObject() quote = db.string_literal
for i in xrange(0, len(registered_message_list), MAX_MESSAGE_LIST_SIZE): def insert(reset_uid):
message_list = registered_message_list[i:i+MAX_MESSAGE_LIST_SIZE] values = self._insert_separator.join(values_list)
path_list = ['/'.join(m.object_path) for m in message_list] del values_list[:]
active_process_uid_list = [m.active_process_uid for m in message_list]
method_id_list = [m.method_id for m in message_list]
priority_list = [m.activity_kw.get('priority', 1) for m in message_list]
date_list = [m.activity_kw.get('at_date') for m in message_list]
group_method_id_list = [m.getGroupId() for m in message_list]
tag_list = [m.activity_kw.get('tag', '') for m in message_list]
serialization_tag_list = [m.activity_kw.get('serialization_tag', '')
for m in message_list]
processing_node_list = []
for m in message_list:
m.order_validation_text = x = self.getOrderValidationText(m)
processing_node_list.append(0 if x == 'none' else -1)
for _ in xrange(UID_ALLOCATION_TRY_COUNT): for _ in xrange(UID_ALLOCATION_TRY_COUNT):
if reset_uid:
reset_uid = False
# Overflow will result into IntegrityError.
db.query("SET @uid := %s" % getrandbits(UID_SAFE_BITSIZE))
try: try:
portal.SQLBase_writeMessageList( db.query(self._insert_template % (self.sql_table, values))
table=self.sql_table,
uid_list=[
getrandbits(UID_SAFE_BITSIZE)
for _ in xrange(len(message_list))
],
path_list=path_list,
active_process_uid_list=active_process_uid_list,
method_id_list=method_id_list,
priority_list=priority_list,
message_list=map(Message.dump, message_list),
group_method_id_list=group_method_id_list,
date_list=date_list,
tag_list=tag_list,
processing_node_list=processing_node_list,
serialization_tag_list=serialization_tag_list)
except MySQLdb.IntegrityError, (code, _): except MySQLdb.IntegrityError, (code, _):
if code != DUP_ENTRY: if code != DUP_ENTRY:
raise raise
reset_uid = True
else: else:
break break
else: else:
raise ValueError("Maximum retry for SQLBase_writeMessageList reached") raise RuntimeError("Maximum retry for prepareQueueMessageList reached")
i = 0
def getNow(self, context): reset_uid = True
""" values_list = []
Return the current value for SQL server's NOW(). max_payload = self._insert_max_payload
Note that this value is not cached, and is not transactionnal on MySQL sep_len = len(self._insert_separator)
side. for m in message_list:
""" if m.is_registered:
result = context.SQLBase_getNow() active_process_uid = m.active_process_uid
assert len(result) == 1 order_validation_text = m.order_validation_text = \
assert len(result[0]) == 1 self.getOrderValidationText(m)
return result[0][0] date = m.activity_kw.get('at_date')
row = ','.join((
'@uid+%s' % i,
quote('/'.join(m.object_path)),
'NULL' if active_process_uid is None else str(active_process_uid),
"UTC_TIMESTAMP(6)" if date is None else quote(render_datetime(date)),
quote(m.method_id),
'0' if order_validation_text == 'none' else '-1',
str(m.activity_kw.get('priority', 1)),
quote(m.getGroupId()),
quote(m.activity_kw.get('tag', '')),
quote(m.activity_kw.get('serialization_tag', '')),
quote(Message.dump(m))))
i += 1
n = sep_len + len(row)
max_payload -= n
if max_payload < 0:
if values_list:
insert(reset_uid)
reset_uid = False
max_payload = self._insert_max_payload - n
else:
raise ValueError("max_allowed_packet too small to insert message")
values_list.append(row)
if values_list:
insert(reset_uid)
def _getMessageList(self, activity_tool, count=1000, src__=0, **kw): def _getMessageList(self, db, count=1000, src__=0, **kw):
# XXX: Because most columns have NOT NULL constraint, conditions with None # XXX: Because most columns have NOT NULL constraint, conditions with None
# value should be ignored, instead of trying to render them # value should be ignored, instead of trying to render them
# (with comparisons with NULL). # (with comparisons with NULL).
sql_connection = activity_tool.getPortalObject().cmf_activity_sql_connection q = db.string_literal
q = sql_connection.sql_quote__
sql = '\n AND '.join(sqltest_dict[k](v, q) for k, v in kw.iteritems()) sql = '\n AND '.join(sqltest_dict[k](v, q) for k, v in kw.iteritems())
sql = "SELECT * FROM %s%s\nORDER BY priority, date, uid%s" % ( sql = "SELECT * FROM %s%s\nORDER BY priority, date, uid%s" % (
self.sql_table, self.sql_table,
sql and '\nWHERE ' + sql, sql and '\nWHERE ' + sql,
'' if count is None else '\nLIMIT %d' % count, '' if count is None else '\nLIMIT %d' % count,
) )
return sql if src__ else Results(sql_connection().query(sql, max_rows=0)) return sql if src__ else Results(db.query(sql, max_rows=0))
def getMessageList(self, *args, **kw): def getMessageList(self, activity_tool, *args, **kw):
result = self._getMessageList(*args, **kw) result = self._getMessageList(activity_tool.getSQLConnection(), *args, **kw)
if type(result) is str: # src__ == 1 if type(result) is str: # src__ == 1
return result, return result,
class_name = self.__class__.__name__ class_name = self.__class__.__name__
...@@ -219,61 +256,30 @@ class SQLBase(Queue): ...@@ -219,61 +256,30 @@ class SQLBase(Queue):
activity=class_name, activity=class_name,
uid=line.uid, uid=line.uid,
processing_node=line.processing_node, processing_node=line.processing_node,
retry=line.retry, retry=line.retry)
processing=line.processing)
for line in result] for line in result]
def countMessage(self, activity_tool, tag=None, path=None, def countMessageSQL(self, quote, **kw):
method_id=None, message_uid=None, **kw): return "SELECT count(*) FROM %s WHERE processing_node > -10 AND %s" % (
"""Return the number of messages which match the given parameters. self.sql_table, " AND ".join(
""" sqltest_dict[k](v, quote) for (k, v) in kw.iteritems() if v
if isinstance(tag, str): ) or "1")
tag = [tag]
if isinstance(path, str): def hasActivitySQL(self, quote, only_valid=False, only_invalid=False, **kw):
path = [path] where = [sqltest_dict[k](v, quote) for (k, v) in kw.iteritems() if v]
if isinstance(method_id, str): if only_valid:
method_id = [method_id] where.append('processing_node > -2')
result = activity_tool.SQLBase_validateMessageList(table=self.sql_table, if only_invalid:
method_id=method_id, where.append('processing_node < -1')
path=path, return "SELECT 1 FROM %s WHERE %s LIMIT 1" % (
message_uid=message_uid, self.sql_table, " AND ".join(where) or "1")
tag=tag,
serialization_tag=None,
count=1)
return result[0].uid_count
def hasActivity(self, activity_tool, object, method_id=None, only_valid=None,
active_process_uid=None,
only_invalid=False):
hasMessage = getattr(activity_tool, 'SQLBase_hasMessage', None)
if hasMessage is not None:
if object is None:
path = None
else:
path = '/'.join(object.getPhysicalPath())
try:
result = hasMessage(table=self.sql_table, path=path, method_id=method_id,
only_valid=only_valid, active_process_uid=active_process_uid,
only_invalid=only_invalid)
except DatabaseError:
LOG(
'SQLBase',
ERROR,
'%r raised, considering there are no activities' % (
hasMessage,
),
error=True,
)
else:
return result[0].message_count > 0
return 0
def getPriority(self, activity_tool): def getPriority(self, activity_tool):
result = activity_tool.SQLBase_getPriority(table=self.sql_table) result = activity_tool.getSQLConnection().query(
if result: "SELECT priority, date FROM %s"
result, = result " WHERE processing_node=0 AND date <= UTC_TIMESTAMP(6)"
return result['priority'], result['date'] " ORDER BY priority, date LIMIT 1" % self.sql_table, 0)[1]
return Queue.getPriority(self, activity_tool) return result[0] if result else Queue.getPriority(self, activity_tool)
def _retryOnLockError(self, method, args=(), kw={}): def _retryOnLockError(self, method, args=(), kw={}):
while True: while True:
...@@ -285,74 +291,61 @@ class SQLBase(Queue): ...@@ -285,74 +291,61 @@ class SQLBase(Queue):
LOG('SQLBase', INFO, 'Got a lock error, retrying...') LOG('SQLBase', INFO, 'Got a lock error, retrying...')
# Validation private methods # Validation private methods
def _validate(self, activity_tool, method_id=None, message_uid=None, path=None, tag=None, def getValidationSQL(self, quote, activate_kw, same_queue):
serialization_tag=None): validate_list = []
if isinstance(method_id, str): for k, v in activate_kw.iteritems():
method_id = [method_id] if v is not None:
if isinstance(path, str): try:
path = [path] method = getattr(self, '_validate_' + k, None)
if isinstance(tag, str): if method:
tag = [tag] validate_list.append(' AND '.join(method(v, quote)))
except Exception:
if method_id or message_uid or path or tag or serialization_tag: LOG('CMFActivity', WARNING, 'invalid %s value: %r' % (k, v),
result = activity_tool.SQLBase_validateMessageList(table=self.sql_table, error=True)
method_id=method_id, # Prevent validation by depending on anything, at least itself.
message_uid=message_uid, validate_list = '1',
path=path, same_queue = False
tag=tag, break
count=False, if validate_list:
serialization_tag=serialization_tag) return ("SELECT '%s' as activity, uid, date, processing_node,"
message_list = [] " priority, group_method_id, message FROM %s"
for line in result: " WHERE processing_node > -10 AND (%s) LIMIT %s" % (
m = Message.load(line.message, type(self).__name__, self.sql_table,
line=line, ' OR '.join(validate_list),
uid=line.uid, READ_MESSAGE_LIMIT if same_queue else 1))
date=line.date,
processing_node=line.processing_node)
if not hasattr(m, 'order_validation_text'): # BBB
m.order_validation_text = self.getOrderValidationText(m)
message_list.append(m)
return message_list
def _validate_after_method_id(self, activity_tool, message, value): def _validate_after_method_id(self, *args):
return self._validate(activity_tool, method_id=value) return sqltest_dict['method_id'](*args),
def _validate_after_path(self, activity_tool, message, value): def _validate_after_path(self, *args):
return self._validate(activity_tool, path=value) return sqltest_dict['path'](*args),
def _validate_after_message_uid(self, activity_tool, message, value): def _validate_after_message_uid(self, *args):
return self._validate(activity_tool, message_uid=value) return sqltest_dict['uid'](*args),
def _validate_after_path_and_method_id(self, activity_tool, message, value): def _validate_after_path_and_method_id(self, value, quote):
if not (isinstance(value, (tuple, list)) and len(value) == 2): path, method_id = value
LOG('CMFActivity', WARNING, return (sqltest_dict['method_id'](method_id, quote),
'unable to recognize value for after_path_and_method_id: %r' % (value,)) sqltest_dict['path'](path, quote))
return []
return self._validate(activity_tool, path=value[0], method_id=value[1])
def _validate_after_tag(self, activity_tool, message, value): def _validate_after_tag(self, *args):
return self._validate(activity_tool, tag=value) return sqltest_dict['tag'](*args),
def _validate_after_tag_and_method_id(self, activity_tool, message, value): def _validate_after_tag_and_method_id(self, value, quote):
# Count number of occurances of tag and method_id tag, method_id = value
if not (isinstance(value, (tuple, list)) and len(value) == 2): return (sqltest_dict['method_id'](method_id, quote),
LOG('CMFActivity', WARNING, sqltest_dict['tag'](tag, quote))
'unable to recognize value for after_tag_and_method_id: %r' % (value,))
return []
return self._validate(activity_tool, tag=value[0], method_id=value[1])
def _validate_serialization_tag(self, activity_tool, message, value): def _validate_serialization_tag(self, *args):
return self._validate(activity_tool, serialization_tag=value) return 'processing_node > -1', sqltest_dict['serialization_tag'](*args)
def _log(self, severity, summary): def _log(self, severity, summary):
LOG(self.__class__.__name__, severity, summary, LOG(self.__class__.__name__, severity, summary,
error=severity>INFO and sys.exc_info() or None) error=severity>INFO and sys.exc_info() or None)
def distribute(self, activity_tool, node_count): def distribute(self, activity_tool, node_count):
assignMessage = getattr(activity_tool, 'SQLBase_assignMessage', None) db = activity_tool.getSQLConnection()
if assignMessage is None: now_date = getNow(db)
return
now_date = self.getNow(activity_tool)
where_kw = { where_kw = {
'processing_node': -1, 'processing_node': -1,
'to_date': now_date, 'to_date': now_date,
...@@ -360,7 +353,7 @@ class SQLBase(Queue): ...@@ -360,7 +353,7 @@ class SQLBase(Queue):
} }
validated_count = 0 validated_count = 0
while 1: while 1:
result = self._getMessageList(activity_tool, **where_kw) result = self._getMessageList(db, **where_kw)
if not result: if not result:
return return
transaction.commit() transaction.commit()
...@@ -373,6 +366,7 @@ class SQLBase(Queue): ...@@ -373,6 +366,7 @@ class SQLBase(Queue):
message.order_validation_text = self.getOrderValidationText(message) message.order_validation_text = self.getOrderValidationText(message)
self.getExecutableMessageList(activity_tool, message, message_dict, self.getExecutableMessageList(activity_tool, message, message_dict,
validation_text_dict, now_date=now_date) validation_text_dict, now_date=now_date)
transaction.commit()
if message_dict: if message_dict:
distributable_uid_set = set() distributable_uid_set = set()
serialization_tag_dict = {} serialization_tag_dict = {}
...@@ -395,8 +389,7 @@ class SQLBase(Queue): ...@@ -395,8 +389,7 @@ class SQLBase(Queue):
distributable_uid_set.add(message.uid) distributable_uid_set.add(message.uid)
distributable_count = len(distributable_uid_set) distributable_count = len(distributable_uid_set)
if distributable_count: if distributable_count:
assignMessage(table=self.sql_table, self.assignMessageList(db, 0, distributable_uid_set)
processing_node=0, uid=tuple(distributable_uid_set))
validated_count += distributable_count validated_count += distributable_count
if validated_count >= MAX_VALIDATED_LIMIT: if validated_count >= MAX_VALIDATED_LIMIT:
return return
...@@ -404,60 +397,47 @@ class SQLBase(Queue): ...@@ -404,60 +397,47 @@ class SQLBase(Queue):
where_kw['from_date'] = line.date where_kw['from_date'] = line.date
where_kw['above_uid'] = line.uid where_kw['above_uid'] = line.uid
def getReservedMessageList(self, activity_tool, date, processing_node, def getReservedMessageList(self, db, date, processing_node, limit,
limit=None, group_method_id=None): group_method_id=None):
""" """
Get and reserve a list of messages. Get and reserve a list of messages.
limit limit
Maximum number of messages to fetch. Maximum number of messages to fetch.
This number is not garanted to be reached, because of: This number is not garanted to be reached, because of not enough
- not enough messages being pending execution messages being pending execution.
- race condition (other nodes reserving the same messages at the same
time)
This number is guaranted not to be exceeded.
If None (or not given) no limit apply.
""" """
assert limit assert limit
# Do not check already-assigned messages when trying to reserve more quote = db.string_literal
# activities, because in such case we will find one reserved activity. query = db.query
result = activity_tool.SQLBase_selectReservedMessageList( args = (self.sql_table, sqltest_dict['to_date'](date, quote),
table=self.sql_table, ' AND group_method_id=' + quote(group_method_id)
count=limit, if group_method_id else '' , limit)
processing_node=processing_node,
group_method_id=group_method_id, # Get reservable messages.
) # During normal operation, sorting by date (as last criteria) is fairer
limit -= len(result) # for users and reduce the probability to do the same work several times
if limit: # (think of an object that is modified several times in a short period of
reservable = activity_tool.SQLBase_getReservableMessageList( # time).
table=self.sql_table, if 1:
count=limit, result = Results(query(
processing_node=processing_node, "SELECT * FROM %s WHERE processing_node=0 AND %s%s"
to_date=date, " ORDER BY priority, date LIMIT %s FOR UPDATE" % args, 0))
group_method_id=group_method_id, if result:
) # Reserve messages.
if reservable: uid_list = [x.uid for x in result]
activity_tool.SQLBase_reserveMessageList( self.assignMessageList(db, processing_node, uid_list)
uid=[x.uid for x in reservable], self._log(TRACE, 'Reserved messages: %r' % uid_list)
table=self.sql_table,
processing_node=processing_node,
)
# DC.ZRDB.Results.Results does not implement concatenation
# Implement an imperfect (but cheap) concatenation. Do not update
# __items__ nor _data_dictionary.
assert result._names == reservable._names, (result._names,
reservable._names)
result._data += reservable._data
return result return result
return ()
def makeMessageListAvailable(self, activity_tool, uid_list): def assignMessageList(self, db, state, uid_list):
""" """
Put messages back in processing_node=0 . Put messages back in given processing_node.
""" """
if len(uid_list): db.query("UPDATE %s SET processing_node=%s WHERE uid IN (%s)\0COMMIT" % (
activity_tool.SQLBase_makeMessageListAvailable(table=self.sql_table, self.sql_table, state, ','.join(map(str, uid_list))))
uid=uid_list)
def getProcessableMessageLoader(self, activity_tool, processing_node): def getProcessableMessageLoader(self, db, processing_node):
# do not merge anything # do not merge anything
def load(line): def load(line):
uid = line.uid uid = line.uid
...@@ -472,14 +452,12 @@ class SQLBase(Queue): ...@@ -472,14 +452,12 @@ class SQLBase(Queue):
reserved (definitely lost, but they are expandable since redundant). reserved (definitely lost, but they are expandable since redundant).
- reserve a message - reserve a message
- set reserved message to processing=1 state
- if this message has a group_method_id: - if this message has a group_method_id:
- reserve a bunch of messages - reserve a bunch of messages
- until the total "cost" of the group goes over 1 - until the total "cost" of the group goes over 1
- get one message from the reserved bunch (this messages will be - get one message from the reserved bunch (this messages will be
"needed") "needed")
- update the total cost - update the total cost
- set "needed" reserved messages to processing=1 state
- unreserve "unneeded" messages - unreserve "unneeded" messages
- return still-reserved message list and a group_method_id - return still-reserved message list and a group_method_id
...@@ -494,21 +472,28 @@ class SQLBase(Queue): ...@@ -494,21 +472,28 @@ class SQLBase(Queue):
- group_method_id - group_method_id
- uid_to_duplicate_uid_list_dict - uid_to_duplicate_uid_list_dict
""" """
def getReservedMessageList(limit, group_method_id=None): db = activity_tool.getSQLConnection()
line_list = self.getReservedMessageList(activity_tool=activity_tool, now_date = getNow(db)
date=now_date,
processing_node=processing_node,
limit=limit,
group_method_id=group_method_id)
if line_list:
self._log(TRACE, 'Reserved messages: %r' % [x.uid for x in line_list])
return line_list
now_date = self.getNow(activity_tool)
uid_to_duplicate_uid_list_dict = {} uid_to_duplicate_uid_list_dict = {}
try: try:
result = getReservedMessageList(1) while 1: # not a loop
if result: # Select messages that were either assigned manually or left
load = self.getProcessableMessageLoader(activity_tool, processing_node) # unprocessed after a shutdown. Most of the time, there's none.
# To minimize the probability of deadlocks, we also COMMIT so that a
# new transaction starts on the first 'FOR UPDATE' query, which is all
# the more important as the current on started with getPriority().
result = db.query("SELECT * FROM %s WHERE processing_node=%s"
" ORDER BY priority, date LIMIT 1\0COMMIT" % (
self.sql_table, processing_node), 0)
already_assigned = result[1]
if already_assigned:
result = Results(result)
else:
result = self.getReservedMessageList(db, now_date, processing_node,
1)
if not result:
break
load = self.getProcessableMessageLoader(db, processing_node)
m, uid, uid_list = load(result[0]) m, uid, uid_list = load(result[0])
message_list = [m] message_list = [m]
uid_to_duplicate_uid_list_dict[uid] = uid_list uid_to_duplicate_uid_list_dict[uid] = uid_list
...@@ -524,7 +509,17 @@ class SQLBase(Queue): ...@@ -524,7 +509,17 @@ class SQLBase(Queue):
if limit > 1: # <=> cost * count < 1 if limit > 1: # <=> cost * count < 1
cost *= count cost *= count
# Retrieve objects which have the same group method. # Retrieve objects which have the same group method.
result = iter(getReservedMessageList(limit, group_method_id)) result = iter(already_assigned
and Results(db.query("SELECT * FROM %s"
" WHERE processing_node=%s AND group_method_id=%s"
" ORDER BY priority, date LIMIT %s" % (
self.sql_table, processing_node,
db.string_literal(group_method_id), limit), 0))
# Do not optimize rare case: keep the code simple by not
# adding more results from getReservedMessageList if the
# limit is not reached.
or self.getReservedMessageList(db, now_date, processing_node,
limit, group_method_id))
for line in result: for line in result:
if line.uid in uid_to_duplicate_uid_list_dict: if line.uid in uid_to_duplicate_uid_list_dict:
continue continue
...@@ -538,10 +533,9 @@ class SQLBase(Queue): ...@@ -538,10 +533,9 @@ class SQLBase(Queue):
message_list.append(m) message_list.append(m)
if cost >= 1: if cost >= 1:
# Unreserve extra messages as soon as possible. # Unreserve extra messages as soon as possible.
self.makeMessageListAvailable(activity_tool=activity_tool, uid_list = [line.uid for line in result if line.uid != uid]
uid_list=[line.uid for line in result if line.uid != uid]) if uid_list:
activity_tool.SQLBase_processMessage(table=self.sql_table, self.assignMessageList(db, 0, uid_list)
uid=uid_to_duplicate_uid_list_dict.keys())
return message_list, group_method_id, uid_to_duplicate_uid_list_dict return message_list, group_method_id, uid_to_duplicate_uid_list_dict
except: except:
self._log(WARNING, 'Exception while reserving messages.') self._log(WARNING, 'Exception while reserving messages.')
...@@ -550,8 +544,7 @@ class SQLBase(Queue): ...@@ -550,8 +544,7 @@ class SQLBase(Queue):
for uid_list in uid_to_duplicate_uid_list_dict.itervalues(): for uid_list in uid_to_duplicate_uid_list_dict.itervalues():
to_free_uid_list += uid_list to_free_uid_list += uid_list
try: try:
self.makeMessageListAvailable(activity_tool=activity_tool, self.assignMessageList(db, 0, to_free_uid_list)
uid_list=to_free_uid_list)
except: except:
self._log(ERROR, 'Failed to free messages: %r' % to_free_uid_list) self._log(ERROR, 'Failed to free messages: %r' % to_free_uid_list)
else: else:
...@@ -559,7 +552,7 @@ class SQLBase(Queue): ...@@ -559,7 +552,7 @@ class SQLBase(Queue):
self._log(TRACE, 'Freed messages %r' % to_free_uid_list) self._log(TRACE, 'Freed messages %r' % to_free_uid_list)
else: else:
self._log(TRACE, '(no message was reserved)') self._log(TRACE, '(no message was reserved)')
return [], None, uid_to_duplicate_uid_list_dict return (), None, None
def _abort(self): def _abort(self):
try: try:
...@@ -636,6 +629,18 @@ class SQLBase(Queue): ...@@ -636,6 +629,18 @@ class SQLBase(Queue):
transaction.commit() transaction.commit()
return not message_list return not message_list
def deleteMessageList(self, db, uid_list):
db.query("DELETE FROM %s WHERE uid IN (%s)" % (
self.sql_table, ','.join(map(str, uid_list))))
def reactivateMessageList(self, db, uid_list, delay, retry):
db.query("UPDATE %s SET"
" date = DATE_ADD(UTC_TIMESTAMP(6), INTERVAL %s SECOND)"
"%s WHERE uid IN (%s)" % (
self.sql_table, delay,
", priority = priority + 1, retry = retry + 1" if retry else "",
",".join(map(str, uid_list))))
def finalizeMessageExecution(self, activity_tool, message_list, def finalizeMessageExecution(self, activity_tool, message_list,
uid_to_duplicate_uid_list_dict=None): uid_to_duplicate_uid_list_dict=None):
""" """
...@@ -648,6 +653,7 @@ class SQLBase(Queue): ...@@ -648,6 +653,7 @@ class SQLBase(Queue):
be put in a permanent-error state. be put in a permanent-error state.
- In all other cases, retry count is increased and message is delayed. - In all other cases, retry count is increased and message is delayed.
""" """
db = activity_tool.getSQLConnection()
deletable_uid_list = [] deletable_uid_list = []
delay_uid_list = [] delay_uid_list = []
final_error_uid_list = [] final_error_uid_list = []
...@@ -692,10 +698,7 @@ class SQLBase(Queue): ...@@ -692,10 +698,7 @@ class SQLBase(Queue):
delay = VALIDATION_ERROR_DELAY * (retry * retry + 1) * 2 delay = VALIDATION_ERROR_DELAY * (retry * retry + 1) * 2
try: try:
# Immediately update, because values different for every message # Immediately update, because values different for every message
activity_tool.SQLBase_reactivate(table=self.sql_table, self.reactivateMessageList(db, (uid,), delay, True)
uid=[uid],
delay=delay,
retry=1)
except: except:
self._log(WARNING, 'Failed to reactivate %r' % uid) self._log(WARNING, 'Failed to reactivate %r' % uid)
make_available_uid_list.append(uid) make_available_uid_list.append(uid)
...@@ -709,9 +712,7 @@ class SQLBase(Queue): ...@@ -709,9 +712,7 @@ class SQLBase(Queue):
deletable_uid_list.append(uid) deletable_uid_list.append(uid)
if deletable_uid_list: if deletable_uid_list:
try: try:
self._retryOnLockError(activity_tool.SQLBase_delMessage, self._retryOnLockError(self.deleteMessageList, (db, deletable_uid_list))
kw={'table': self.sql_table,
'uid': deletable_uid_list})
except: except:
self._log(ERROR, 'Failed to delete messages %r' % deletable_uid_list) self._log(ERROR, 'Failed to delete messages %r' % deletable_uid_list)
else: else:
...@@ -719,21 +720,19 @@ class SQLBase(Queue): ...@@ -719,21 +720,19 @@ class SQLBase(Queue):
if delay_uid_list: if delay_uid_list:
try: try:
# If this is a conflict error, do not increase 'retry' but only delay. # If this is a conflict error, do not increase 'retry' but only delay.
activity_tool.SQLBase_reactivate(table=self.sql_table, self.reactivateMessageList(db, delay_uid_list,
uid=delay_uid_list, delay=VALIDATION_ERROR_DELAY, retry=None) VALIDATION_ERROR_DELAY, False)
except: except:
self._log(ERROR, 'Failed to delay %r' % delay_uid_list) self._log(ERROR, 'Failed to delay %r' % delay_uid_list)
if final_error_uid_list: if final_error_uid_list:
try: try:
activity_tool.SQLBase_assignMessage(table=self.sql_table, self.assignMessageList(db, INVOKE_ERROR_STATE, final_error_uid_list)
uid=final_error_uid_list, processing_node=INVOKE_ERROR_STATE)
except: except:
self._log(ERROR, 'Failed to set message to error state for %r' self._log(ERROR, 'Failed to set message to error state for %r'
% final_error_uid_list) % final_error_uid_list)
if make_available_uid_list: if make_available_uid_list:
try: try:
self.makeMessageListAvailable(activity_tool=activity_tool, self.assignMessageList(db, 0, make_available_uid_list)
uid_list=make_available_uid_list)
except: except:
self._log(ERROR, 'Failed to unreserve %r' % make_available_uid_list) self._log(ERROR, 'Failed to unreserve %r' % make_available_uid_list)
else: else:
...@@ -762,17 +761,14 @@ class SQLBase(Queue): ...@@ -762,17 +761,14 @@ class SQLBase(Queue):
except AttributeError: except AttributeError:
pass pass
line = getattr(message, 'line', None) line = getattr(message, 'line', None)
validate_value = VALID if line and line.processing_node != -1 else \ if (line and line.processing_node != -1 or
message.validate(self, activity_tool) not activity_tool.getDependentMessageList(message)):
if validate_value == VALID:
# Try to invoke the message - what happens if invoke calls flushActivity ?? # Try to invoke the message - what happens if invoke calls flushActivity ??
with ActivityRuntimeEnvironment(message): with ActivityRuntimeEnvironment(message):
activity_tool.invoke(message) activity_tool.invoke(message)
if message.getExecutionState() != MESSAGE_EXECUTED: if message.getExecutionState() != MESSAGE_EXECUTED:
raise ActivityFlushError('Could not invoke %s on %s' raise ActivityFlushError('Could not invoke %s on %s'
% (message.method_id, path)) % (message.method_id, path))
elif validate_value is INVALID_PATH:
raise ActivityFlushError('The document %s does not exist' % path)
else: else:
raise ActivityFlushError('Could not validate %s on %s' raise ActivityFlushError('Could not validate %s on %s'
% (message.method_id, path)) % (message.method_id, path))
...@@ -783,13 +779,14 @@ class SQLBase(Queue): ...@@ -783,13 +779,14 @@ class SQLBase(Queue):
invoke(m) invoke(m)
activity_tool.unregisterMessage(self, m) activity_tool.unregisterMessage(self, m)
uid_list = [] uid_list = []
for line in self._getMessageList(activity_tool, path=path, processing=0, db = activity_tool.getSQLConnection()
for line in self._getMessageList(db, path=path,
**({'method_id': method_id} if method_id else {})): **({'method_id': method_id} if method_id else {})):
uid_list.append(line.uid) uid_list.append(line.uid)
if invoke: if invoke and line.processing_node <= 0:
invoke(Message.load(line.message, uid=line.uid, line=line)) invoke(Message.load(line.message, uid=line.uid, line=line))
if uid_list: if uid_list:
activity_tool.SQLBase_delMessage(table=self.sql_table, uid=uid_list) self.deleteMessageList(db, uid_list)
# Required for tests # Required for tests
def timeShift(self, activity_tool, delay, processing_node=None): def timeShift(self, activity_tool, delay, processing_node=None):
...@@ -797,5 +794,8 @@ class SQLBase(Queue): ...@@ -797,5 +794,8 @@ class SQLBase(Queue):
To simulate time shift, we simply substract delay from To simulate time shift, we simply substract delay from
all dates in message(_queue) table all dates in message(_queue) table
""" """
activity_tool.SQLBase_timeShift(table=self.sql_table, delay=delay, activity_tool.getSQLConnection().query("UPDATE %s SET"
processing_node=processing_node) " date = DATE_SUB(date, INTERVAL %s SECOND)"
% (self.sql_table, delay)
+ ('' if processing_node is None else
"WHERE processing_node=%s" % processing_node))
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
# #
############################################################################## ##############################################################################
from Shared.DC.ZRDB.Results import Results
from Products.CMFActivity.ActivityTool import Message from Products.CMFActivity.ActivityTool import Message
import sys import sys
#from time import time #from time import time
...@@ -74,8 +75,9 @@ class SQLDict(SQLBase): ...@@ -74,8 +75,9 @@ class SQLDict(SQLBase):
message_list = activity_buffer.getMessageList(self) message_list = activity_buffer.getMessageList(self)
return [m for m in message_list if m.is_registered] return [m for m in message_list if m.is_registered]
def getProcessableMessageLoader(self, activity_tool, processing_node): def getProcessableMessageLoader(self, db, processing_node):
path_and_method_id_dict = {} path_and_method_id_dict = {}
quote = db.string_literal
def load(line): def load(line):
# getProcessableMessageList already fetch messages with the same # getProcessableMessageList already fetch messages with the same
# group_method_id, so what remains to be filtered on are path and # group_method_id, so what remains to be filtered on are path and
...@@ -87,6 +89,8 @@ class SQLDict(SQLBase): ...@@ -87,6 +89,8 @@ class SQLDict(SQLBase):
uid = line.uid uid = line.uid
original_uid = path_and_method_id_dict.get(key) original_uid = path_and_method_id_dict.get(key)
if original_uid is None: if original_uid is None:
sql_method_id = " AND method_id = %s AND group_method_id = %s" % (
quote(method_id), quote(line.group_method_id))
m = Message.load(line.message, uid=uid, line=line) m = Message.load(line.message, uid=uid, line=line)
merge_parent = m.activity_kw.get('merge_parent') merge_parent = m.activity_kw.get('merge_parent')
try: try:
...@@ -101,11 +105,14 @@ class SQLDict(SQLBase): ...@@ -101,11 +105,14 @@ class SQLDict(SQLBase):
path_list.append(path) path_list.append(path)
uid_list = [] uid_list = []
if path_list: if path_list:
result = activity_tool.SQLDict_selectParentMessage( # Select parent messages.
path=path_list, result = Results(db.query("SELECT * FROM message"
method_id=method_id, " WHERE processing_node IN (0, %s) AND path IN (%s)%s"
group_method_id=line.group_method_id, " ORDER BY path LIMIT 1 FOR UPDATE" % (
processing_node=processing_node) processing_node,
','.join(map(quote, path_list)),
sql_method_id,
), 0))
if result: # found a parent if result: # found a parent
# mark child as duplicate # mark child as duplicate
uid_list.append(uid) uid_list.append(uid)
...@@ -115,29 +122,32 @@ class SQLDict(SQLBase): ...@@ -115,29 +122,32 @@ class SQLDict(SQLBase):
uid = line.uid uid = line.uid
m = Message.load(line.message, uid=uid, line=line) m = Message.load(line.message, uid=uid, line=line)
# return unreserved similar children # return unreserved similar children
result = activity_tool.SQLDict_selectChildMessageList( path = line.path
path=line.path, result = db.query("SELECT uid FROM message"
method_id=method_id, " WHERE processing_node = 0 AND (path = %s OR path LIKE %s)"
group_method_id=line.group_method_id) "%s FOR UPDATE" % (
reserve_uid_list = [x.uid for x in result] quote(path), quote(path.replace('_', r'\_') + '/%'),
sql_method_id,
), 0)[1]
reserve_uid_list = [x for x, in result]
uid_list += reserve_uid_list uid_list += reserve_uid_list
if not line.processing_node: if not line.processing_node:
# reserve found parent # reserve found parent
reserve_uid_list.append(uid) reserve_uid_list.append(uid)
else: else:
result = activity_tool.SQLDict_selectDuplicatedLineList( # Select duplicates.
path=path, result = db.query("SELECT uid FROM message"
method_id=method_id, " WHERE processing_node = 0 AND path = %s%s FOR UPDATE" % (
group_method_id=line.group_method_id) quote(path), sql_method_id,
reserve_uid_list = uid_list = [x.uid for x in result] ), 0)[1]
reserve_uid_list = uid_list = [x for x, in result]
if reserve_uid_list: if reserve_uid_list:
activity_tool.SQLDict_reserveDuplicatedLineList( self.assignMessageList(db, processing_node, reserve_uid_list)
processing_node=processing_node, uid=reserve_uid_list)
else: else:
activity_tool.SQLDict_commit() # release locks db.query("COMMIT") # XXX: useful ?
except: except:
self._log(WARNING, 'getDuplicateMessageUidList got an exception') self._log(WARNING, 'Failed to reserve duplicates')
activity_tool.SQLDict_rollback() # release locks db.query("ROLLBACK")
raise raise
if uid_list: if uid_list:
self._log(TRACE, 'Reserved duplicate messages: %r' % uid_list) self._log(TRACE, 'Reserved duplicate messages: %r' % uid_list)
......
...@@ -31,7 +31,7 @@ from zLOG import LOG, TRACE, INFO, WARNING, ERROR, PANIC ...@@ -31,7 +31,7 @@ from zLOG import LOG, TRACE, INFO, WARNING, ERROR, PANIC
import MySQLdb import MySQLdb
from MySQLdb.constants.ER import DUP_ENTRY from MySQLdb.constants.ER import DUP_ENTRY
from SQLBase import ( from SQLBase import (
SQLBase, sort_message_key, MAX_MESSAGE_LIST_SIZE, SQLBase, sort_message_key,
UID_SAFE_BITSIZE, UID_ALLOCATION_TRY_COUNT, UID_SAFE_BITSIZE, UID_ALLOCATION_TRY_COUNT,
) )
from Products.CMFActivity.ActivityTool import Message from Products.CMFActivity.ActivityTool import Message
...@@ -45,77 +45,103 @@ class SQLJoblib(SQLDict): ...@@ -45,77 +45,103 @@ class SQLJoblib(SQLDict):
sql_table = 'message_job' sql_table = 'message_job'
uid_group = 'portal_activity_job' uid_group = 'portal_activity_job'
def initialize(self, activity_tool, clear): def createTableSQL(self):
""" return """\
Initialize the message table using MYISAM Engine CREATE TABLE %s (
""" `uid` BIGINT UNSIGNED NOT NULL,
folder = activity_tool.getPortalObject().portal_skins.activity `date` DATETIME(6) NOT NULL,
try: `path` VARCHAR(255) NOT NULL,
createMessageTable = folder.SQLJoblib_createMessageTable `active_process_uid` INT UNSIGNED NULL,
except AttributeError: `method_id` VARCHAR(255) NOT NULL,
return `processing_node` SMALLINT NOT NULL DEFAULT -1,
if clear: `priority` TINYINT NOT NULL DEFAULT 0,
folder.SQLBase_dropMessageTable(table=self.sql_table) `group_method_id` VARCHAR(255) NOT NULL DEFAULT '',
createMessageTable() `tag` VARCHAR(255) NOT NULL,
else: `signature` BINARY(16) NOT NULL,
src = createMessageTable._upgradeSchema(create_if_not_exists=1, `serialization_tag` VARCHAR(255) NOT NULL,
initialize=self._initialize, `retry` TINYINT UNSIGNED NOT NULL DEFAULT 0,
table=self.sql_table) `message` LONGBLOB NOT NULL,
if src: PRIMARY KEY (`uid`),
LOG('CMFActivity', INFO, "%r table upgraded\n%s" KEY `processing_node_priority_date` (`processing_node`, `priority`, `date`),
% (self.sql_table, src)) KEY `node_group_priority_date` (`processing_node`, `group_method_id`, `priority`, `date`),
KEY `serialization_tag_processing_node` (`serialization_tag`, `processing_node`),
KEY (`path`),
KEY (`active_process_uid`),
KEY (`method_id`),
KEY (`tag`)
) ENGINE=InnoDB""" % self.sql_table
def generateMessageUID(self, m): def generateMessageUID(self, m):
return (tuple(m.object_path), m.method_id, m.activity_kw.get('signature'), return (tuple(m.object_path), m.method_id, m.activity_kw.get('signature'),
m.activity_kw.get('tag'), m.activity_kw.get('group_id')) m.activity_kw.get('tag'), m.activity_kw.get('group_id'))
_insert_template = ("INSERT INTO %s (uid,"
" path, active_process_uid, date, method_id, processing_node,"
" priority, group_method_id, tag, signature, serialization_tag,"
" message) VALUES\n(%s)")
def prepareQueueMessageList(self, activity_tool, message_list): def prepareQueueMessageList(self, activity_tool, message_list):
registered_message_list = [m for m in message_list if m.is_registered] db = activity_tool.getSQLConnection()
portal = activity_tool.getPortalObject() quote = db.string_literal
for i in xrange(0, len(registered_message_list), MAX_MESSAGE_LIST_SIZE): def insert(reset_uid):
message_list = registered_message_list[i:i+MAX_MESSAGE_LIST_SIZE] values = self._insert_separator.join(values_list)
path_list = ['/'.join(m.object_path) for m in message_list] del values_list[:]
active_process_uid_list = [m.active_process_uid for m in message_list]
method_id_list = [m.method_id for m in message_list]
priority_list = [m.activity_kw.get('priority', 1) for m in message_list]
date_list = [m.activity_kw.get('at_date') for m in message_list]
group_method_id_list = [m.getGroupId() for m in message_list]
tag_list = [m.activity_kw.get('tag', '') for m in message_list]
signature_list=[m.activity_kw.get('signature', '') for m in message_list]
serialization_tag_list = [m.activity_kw.get('serialization_tag', '')
for m in message_list]
processing_node_list = []
for m in message_list:
m.order_validation_text = x = self.getOrderValidationText(m)
processing_node_list.append(0 if x == 'none' else -1)
for _ in xrange(UID_ALLOCATION_TRY_COUNT): for _ in xrange(UID_ALLOCATION_TRY_COUNT):
if reset_uid:
reset_uid = False
# Overflow will result into IntegrityError.
db.query("SET @uid := %s" % getrandbits(UID_SAFE_BITSIZE))
try: try:
portal.SQLJoblib_writeMessage( db.query(self._insert_template % (self.sql_table, values))
uid_list=[
getrandbits(UID_SAFE_BITSIZE)
for _ in xrange(len(message_list))
],
path_list=path_list,
active_process_uid_list=active_process_uid_list,
method_id_list=method_id_list,
priority_list=priority_list,
message_list=map(Message.dump, message_list),
group_method_id_list=group_method_id_list,
date_list=date_list,
tag_list=tag_list,
processing_node_list=processing_node_list,
signature_list=signature_list,
serialization_tag_list=serialization_tag_list)
except MySQLdb.IntegrityError, (code, _): except MySQLdb.IntegrityError, (code, _):
if code != DUP_ENTRY: if code != DUP_ENTRY:
raise raise
reset_uid = True
else: else:
break break
else: else:
raise ValueError("Maximum retry for SQLBase_writeMessageList reached") raise ValueError("Maximum retry for prepareQueueMessageList reached")
i = 0
reset_uid = True
values_list = []
max_payload = self._insert_max_payload
sep_len = len(self._insert_separator)
for m in message_list:
if m.is_registered:
active_process_uid = m.active_process_uid
order_validation_text = m.order_validation_text = \
self.getOrderValidationText(m)
date = m.activity_kw.get('at_date')
row = ','.join((
'@uid+%s' % i,
quote('/'.join(m.object_path)),
'NULL' if active_process_uid is None else str(active_process_uid),
"UTC_TIMESTAMP(6)" if date is None else quote(render_datetime(date)),
quote(m.method_id),
'0' if order_validation_text == 'none' else '-1',
str(m.activity_kw.get('priority', 1)),
quote(m.getGroupId()),
quote(m.activity_kw.get('tag', '')),
quote(m.activity_kw.get('signature', '')),
quote(m.activity_kw.get('serialization_tag', '')),
quote(Message.dump(m))))
i += 1
n = sep_len + len(row)
max_payload -= n
if max_payload < 0:
if values_list:
insert(reset_uid)
reset_uid = False
max_payload = self._insert_max_payload - n
else:
raise ValueError("max_allowed_packet too small to insert message")
values_list.append(row)
if values_list:
insert(reset_uid)
def getProcessableMessageLoader(self, activity_tool, processing_node): def getProcessableMessageLoader(self, db, processing_node):
path_and_method_id_dict = {} path_and_method_id_dict = {}
quote = db.string_literal
def load(line): def load(line):
# getProcessableMessageList already fetch messages with the same # getProcessableMessageList already fetch messages with the same
# group_method_id, so what remains to be filtered on are path, method_id # group_method_id, so what remains to be filtered on are path, method_id
...@@ -128,19 +154,21 @@ class SQLJoblib(SQLDict): ...@@ -128,19 +154,21 @@ class SQLJoblib(SQLDict):
if original_uid is None: if original_uid is None:
m = Message.load(line.message, uid=uid, line=line) m = Message.load(line.message, uid=uid, line=line)
try: try:
result = activity_tool.SQLJoblib_selectDuplicatedLineList( # Select duplicates.
path=path, result = db.query("SELECT uid FROM message_job"
method_id=method_id, " WHERE processing_node = 0 AND path = %s AND signature = %s"
group_method_id=line.group_method_id, " AND method_id = %s AND group_method_id = %s FOR UPDATE" % (
signature=line.signature) quote(path), quote(line.signature),
reserve_uid_list = uid_list = [x.uid for x in result] quote(method_id), quote(line.group_method_id),
if reserve_uid_list: ), 0)[1]
activity_tool.SQLBase_reserveMessageList( uid_list = [x for x, in result]
table=self.sql_table, if uid_list:
processing_node=processing_node, self.assignMessageList(db, processing_node, uid_list)
uid=reserve_uid_list) else:
db.query("COMMIT") # XXX: useful ?
except: except:
self._log(WARNING, 'getDuplicateMessageUidList got an exception') self._log(WARNING, 'Failed to reserve duplicates')
db.query("ROLLBACK")
raise raise
if uid_list: if uid_list:
self._log(TRACE, 'Reserved duplicate messages: %r' % uid_list) self._log(TRACE, 'Reserved duplicate messages: %r' % uid_list)
......
...@@ -57,6 +57,7 @@ from Products.ERP5Type.UnrestrictedMethod import PrivilegedUser ...@@ -57,6 +57,7 @@ from Products.ERP5Type.UnrestrictedMethod import PrivilegedUser
from zope.site.hooks import setSite from zope.site.hooks import setSite
import transaction import transaction
from App.config import getConfiguration from App.config import getConfiguration
from Shared.DC.ZRDB.Results import Results
import Products.Localizer.patches import Products.Localizer.patches
localizer_lock = Products.Localizer.patches._requests_lock localizer_lock = Products.Localizer.patches._requests_lock
...@@ -191,7 +192,6 @@ class Message(BaseMessage): ...@@ -191,7 +192,6 @@ class Message(BaseMessage):
call_traceback = None call_traceback = None
exc_info = None exc_info = None
is_executed = MESSAGE_NOT_EXECUTED is_executed = MESSAGE_NOT_EXECUTED
processing = None
traceback = None traceback = None
oid = None oid = None
is_registered = False is_registered = False
...@@ -367,11 +367,6 @@ class Message(BaseMessage): ...@@ -367,11 +367,6 @@ class Message(BaseMessage):
except: except:
self.setExecutionState(MESSAGE_NOT_EXECUTED, context=activity_tool) self.setExecutionState(MESSAGE_NOT_EXECUTED, context=activity_tool)
def validate(self, activity, activity_tool, check_order_validation=1):
return activity.validate(activity_tool, self,
check_order_validation=check_order_validation,
**self.activity_kw)
def notifyUser(self, activity_tool, retry=False): def notifyUser(self, activity_tool, retry=False):
"""Notify the user that the activity failed.""" """Notify the user that the activity failed."""
portal = activity_tool.getPortalObject() portal = activity_tool.getPortalObject()
...@@ -655,11 +650,6 @@ class ActivityTool (BaseTool): ...@@ -655,11 +650,6 @@ class ActivityTool (BaseTool):
activity_timing_log = False activity_timing_log = False
cancel_and_invoke_links_hidden = False cancel_and_invoke_links_hidden = False
def SQLDict_setPriority(self, **kw):
real_SQLDict_setPriority = getattr(self.aq_parent, 'SQLDict_setPriority')
LOG('ActivityTool', 0, real_SQLDict_setPriority(src__=1, **kw))
return real_SQLDict_setPriority(**kw)
# Filter content (ZMI)) # Filter content (ZMI))
def filtered_meta_types(self, user=None): def filtered_meta_types(self, user=None):
# Filters the list of available meta types. # Filters the list of available meta types.
...@@ -670,6 +660,9 @@ class ActivityTool (BaseTool): ...@@ -670,6 +660,9 @@ class ActivityTool (BaseTool):
meta_types.append(meta_type) meta_types.append(meta_type)
return meta_types return meta_types
def getSQLConnection(self):
return self.aq_inner.aq_parent.cmf_activity_sql_connection()
def maybeMigrateConnectionClass(self): def maybeMigrateConnectionClass(self):
connection_id = 'cmf_activity_sql_connection' connection_id = 'cmf_activity_sql_connection'
sql_connection = getattr(self, connection_id, None) sql_connection = getattr(self, connection_id, None)
...@@ -689,6 +682,20 @@ class ActivityTool (BaseTool): ...@@ -689,6 +682,20 @@ class ActivityTool (BaseTool):
self.maybeMigrateConnectionClass() self.maybeMigrateConnectionClass()
for activity in activity_dict.itervalues(): for activity in activity_dict.itervalues():
activity.initialize(self, clear=False) activity.initialize(self, clear=False)
# Remove old skin if any.
skins_tool = self.getPortalObject().portal_skins
name = 'activity'
if (getattr(skins_tool.get(name), '_dirpath', None)
== 'Products.CMFActivity:skins/activity'):
for selection, skins in skins_tool.getSkinPaths():
skins = skins.split(',')
try:
skins.remove(name)
except ValueError:
continue
skins_tool.manage_skinLayers(
add_skin=1, skinname=selection, skinpath=skins)
skins_tool._delObject(name)
def _callSafeFunction(self, batch_function): def _callSafeFunction(self, batch_function):
return batch_function() return batch_function()
...@@ -1127,14 +1134,16 @@ class ActivityTool (BaseTool): ...@@ -1127,14 +1134,16 @@ class ActivityTool (BaseTool):
def hasActivity(self, *args, **kw): def hasActivity(self, *args, **kw):
# Check in each queue if the object has deferred tasks # Check in each queue if the object has deferred tasks
# if not argument is provided, then check on self # if not argument is provided, then check on self
if len(args) > 0: if args:
obj = args[0] obj, = args
else: else:
obj = self obj = self
for activity in activity_dict.itervalues(): path = None if obj is None else '/'.join(obj.getPhysicalPath())
if activity.hasActivity(aq_inner(self), obj, **kw): db = self.getSQLConnection()
return True quote = db.string_literal
return False return bool(db.query("(%s)" % ") UNION ALL (".join(
activity.hasActivitySQL(quote, path=path, **kw)
for activity in activity_dict.itervalues()))[1])
security.declarePrivate('getActivityBuffer') security.declarePrivate('getActivityBuffer')
def getActivityBuffer(self, create_if_not_found=True): def getActivityBuffer(self, create_if_not_found=True):
...@@ -1443,8 +1452,9 @@ class ActivityTool (BaseTool): ...@@ -1443,8 +1452,9 @@ class ActivityTool (BaseTool):
""" """
if not(isinstance(message_uid_list, list)): if not(isinstance(message_uid_list, list)):
message_uid_list = [message_uid_list] message_uid_list = [message_uid_list]
self.SQLBase_makeMessageListAvailable(table=activity_dict[activity].sql_table, if message_uid_list:
uid=message_uid_list) activity_dict[activity].unreserveMessageList(self.getSQLConnection(),
0, message_uid_list)
if REQUEST is not None: if REQUEST is not None:
return REQUEST.RESPONSE.redirect('%s/%s' % ( return REQUEST.RESPONSE.redirect('%s/%s' % (
self.absolute_url(), 'view')) self.absolute_url(), 'view'))
...@@ -1470,8 +1480,8 @@ class ActivityTool (BaseTool): ...@@ -1470,8 +1480,8 @@ class ActivityTool (BaseTool):
""" """
if not(isinstance(message_uid_list, list)): if not(isinstance(message_uid_list, list)):
message_uid_list = [message_uid_list] message_uid_list = [message_uid_list]
self.SQLBase_delMessage(table=activity_dict[activity].sql_table, activity_dict[activity].deleteMessageList(
uid=message_uid_list) self.getSQLConnection(), message_uid_list)
if REQUEST is not None: if REQUEST is not None:
return REQUEST.RESPONSE.redirect('%s/%s' % ( return REQUEST.RESPONSE.redirect('%s/%s' % (
self.absolute_url(), 'view')) self.absolute_url(), 'view'))
...@@ -1523,10 +1533,7 @@ class ActivityTool (BaseTool): ...@@ -1523,10 +1533,7 @@ class ActivityTool (BaseTool):
""" """
Return the number of messages which match the given tag. Return the number of messages which match the given tag.
""" """
message_count = 0 return self.countMessage(tag=value)
for activity in activity_dict.itervalues():
message_count += activity.countMessageWithTag(aq_inner(self), value)
return message_count
security.declarePublic('countMessage') security.declarePublic('countMessage')
def countMessage(self, **kw): def countMessage(self, **kw):
...@@ -1540,10 +1547,11 @@ class ActivityTool (BaseTool): ...@@ -1540,10 +1547,11 @@ class ActivityTool (BaseTool):
tag : activities with a particular tag tag : activities with a particular tag
message_uid : activities with a particular uid message_uid : activities with a particular uid
""" """
message_count = 0 db = self.getSQLConnection()
for activity in activity_dict.itervalues(): quote = db.string_literal
message_count += activity.countMessage(aq_inner(self), **kw) return sum(x for x, in db.query("(%s)" % ") UNION ALL (".join(
return message_count activity.countMessageSQL(quote, **kw)
for activity in activity_dict.itervalues()))[1])
security.declareProtected( CMFCorePermissions.ManagePortal , 'newActiveProcess' ) security.declareProtected( CMFCorePermissions.ManagePortal , 'newActiveProcess' )
def newActiveProcess(self, REQUEST=None, **kw): def newActiveProcess(self, REQUEST=None, **kw):
...@@ -1554,23 +1562,31 @@ class ActivityTool (BaseTool): ...@@ -1554,23 +1562,31 @@ class ActivityTool (BaseTool):
REQUEST['RESPONSE'].redirect( 'manage_main' ) REQUEST['RESPONSE'].redirect( 'manage_main' )
return obj return obj
# Active synchronisation methods
security.declarePrivate('validateOrder')
def validateOrder(self, message, validator_id, validation_value):
message_list = self.getDependentMessageList(message, validator_id, validation_value)
return len(message_list) > 0
security.declarePrivate('getDependentMessageList') security.declarePrivate('getDependentMessageList')
def getDependentMessageList(self, message, validator_id, validation_value): def getDependentMessageList(self, message, validating_queue=None):
message_list = [] activity_kw = message.activity_kw
method_id = "_validate_" + validator_id db = self.getSQLConnection()
quote = db.string_literal
queries = []
for activity in activity_dict.itervalues(): for activity in activity_dict.itervalues():
method = getattr(activity, method_id, None) q = activity.getValidationSQL(
if method is not None: quote, activity_kw, activity is validating_queue)
result = method(aq_inner(self), message, validation_value) if q:
if result: queries.append(q)
message_list += [(activity, m) for m in result] if queries:
message_list = []
for line in Results(db.query("(%s)" % ") UNION ALL (".join(queries))):
activity = activity_dict[line.activity]
m = Message.load(line.message,
line=line,
uid=line.uid,
date=line.date,
processing_node=line.processing_node)
if not hasattr(m, 'order_validation_text'): # BBB
m.order_validation_text = activity.getOrderValidationText(m)
message_list.append((activity, m))
return message_list return message_list
return ()
# Required for tests (time shift) # Required for tests (time shift)
def timeShift(self, delay): def timeShift(self, delay):
......
#!/bin/sh #!/bin/sh
set -e set -e
# Small watching script based on Sébastien idea.
# ideas: # ideas:
# - more control on what would be displayed # - more control on what would be displayed
...@@ -32,13 +31,47 @@ INTERVAL=$2 ...@@ -32,13 +31,47 @@ INTERVAL=$2
exit 1 exit 1
} }
SELECT="" node_priority_cols="processing_node AS node, MIN(priority) AS min_pri, MAX(priority) AS max_pri"
for t in message message_queue ; do for t in message:dict message_queue:queue message_job:joblib; do
SELECT=$SELECT""" table=${t%:*}
SELECT count(*) AS $t, ${text_group:-method_id}, processing, processing_node AS node, min(priority) AS min_pri, max(priority) AS max_pri FROM $t GROUP BY ${text_group:-method_id}, processing, processing_node ORDER BY node; t=${t#*:}
SELECT count(*) AS $t, processing, processing_node, min(priority) AS min_pri, max(priority) AS max_pri FROM $t GROUP BY processing, processing_node; create=$create"
SELECT priority as pri, MIN(timediff(NOW(), date)) AS min, AVG(timediff(NOW() , date)) AS avg, MAX(timediff(NOW() , date)) AS max FROM $t GROUP BY priority; CREATE TEMPORARY TABLE _$t(
SELECT count(*) AS ${t}_count FROM $t; n INT UNSIGNED NOT NULL,
""" ${text_group:-method_id} VARCHAR(255) NOT NULL,
processing_node SMALLINT NOT NULL,
priority TINYINT NOT NULL,
min_date DATETIME(6) NOT NULL,
max_date DATETIME(6) NOT NULL,
max_retry TINYINT UNSIGNED NOT NULL
) ENGINE=MEMORY;"
collect=$collect"
INSERT INTO _$t SELECT count(*), ${text_group:-method_id},
processing_node, priority, MIN(date), MAX(date), MAX(retry) FROM $table
GROUP BY processing_node, priority, ${text_group:-method_id};"
select=$select"
SELECT IFNULL(SUM(n),0) AS $t, ${text_group:-method_id},
$node_priority_cols, MAX(max_retry) AS max_retry FROM _$t
GROUP BY processing_node, ${text_group:-method_id}
ORDER BY processing_node, ${text_group:-method_id};
SELECT priority,
TIME_FORMAT(TIMEDIFF(UTC_TIMESTAMP(6), MAX(max_date)), \"%T\") AS min,
TIME_FORMAT(TIMEDIFF(UTC_TIMESTAMP(6), MIN(min_date)), \"%T\") AS max
FROM _$t GROUP BY priority ORDER BY priority;"
[ "$count" ] && {
not_processing=$not_processing" UNION ALL "
count=$count,
}
not_processing=$not_processing"
SELECT IFNULL(SUM(n),0) AS count, $node_priority_cols,
MIN(min_date) AS min_date, MAX(max_date) AS max_date
FROM _$t WHERE processing_node<=0 GROUP BY processing_node"
count=$count"(SELECT IFNULL(SUM(n),0) AS $t FROM _$t) as $t"
total=$total+$t
done done
exec watch -n ${INTERVAL:-5} "${MYSQL:-mysql} $MYSQL_OPT --disable-pager -t -e '$SELECT' " exec watch -n ${INTERVAL:-5} "${MYSQL:-mysql} $MYSQL_OPT --disable-pager -t -e '
SET autocommit=off;$create$collect
SELECT *, $total as total FROM $count;$select
SELECT SUM(count) as count, node, MIN(min_pri) AS min_pri, MAX(max_pri) AS max_pri,
MIN(min_date) AS min_date, MAX(max_date) AS max_date
FROM ($not_processing) as t GROUP BY node;'"
...@@ -50,7 +50,6 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. ...@@ -50,7 +50,6 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
<th align="left" valign="top">Named Parameters</th> <th align="left" valign="top">Named Parameters</th>
<th align="left" valign="top">Processing Node</th> <th align="left" valign="top">Processing Node</th>
<th align="left" valign="top">Retry</th> <th align="left" valign="top">Retry</th>
<th align="left" valign="top">Processing</th>
<th align="left" valign="top">Call Traceback</th> <th align="left" valign="top">Call Traceback</th>
</tr> </tr>
<dtml-in expr="getMessageList()"> <dtml-in expr="getMessageList()">
...@@ -84,11 +83,6 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. ...@@ -84,11 +83,6 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
</td> </td>
<td align="left" valign="top"><dtml-var processing_node></td> <td align="left" valign="top"><dtml-var processing_node></td>
<td align="left" valign="top"><dtml-var retry></td> <td align="left" valign="top"><dtml-var retry></td>
<td align="left" valign="top">
<dtml-if expr="processing is not None">
<dtml-var processing>
</dtml-if>
</td>
<td align="left" valign="top"> <td align="left" valign="top">
<dtml-if expr="call_traceback is not None"> <dtml-if expr="call_traceback is not None">
<pre><dtml-var call_traceback></pre> <pre><dtml-var call_traceback></pre>
......
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
processing_node
uid:list
</params>
UPDATE
<dtml-var table>
SET
processing_node=<dtml-sqlvar processing_node type="int">,
processing=0
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-var sql_delimiter>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table</params>
CREATE TABLE <dtml-var table> (
`uid` BIGINT UNSIGNED NOT NULL,
`date` DATETIME(6) NOT NULL,
`path` VARCHAR(255) NOT NULL,
`active_process_uid` INT UNSIGNED NULL,
`method_id` VARCHAR(255) NOT NULL,
`processing_node` SMALLINT NOT NULL DEFAULT -1,
`processing` TINYINT NOT NULL DEFAULT 0,
`processing_date` DATETIME(6),
`priority` TINYINT NOT NULL DEFAULT 0,
`group_method_id` VARCHAR(255) NOT NULL DEFAULT '',
`tag` VARCHAR(255) NOT NULL,
`serialization_tag` VARCHAR(255) NOT NULL,
`retry` TINYINT UNSIGNED NOT NULL DEFAULT 0,
`message` LONGBLOB NOT NULL,
PRIMARY KEY (`uid`),
KEY (`path`),
KEY (`active_process_uid`),
KEY (`method_id`),
KEY `processing_node_processing` (`processing_node`, `processing`),
KEY `processing_node_priority_date` (`processing_node`, `priority`, `date`),
KEY `node_group_priority_date` (`processing_node`, `group_method_id`, `priority`, `date`),
KEY `serialization_tag_processing_node` (`serialization_tag`, `processing_node`),
KEY (`priority`),
KEY (`tag`)
) ENGINE=InnoDB
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:100
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
uid:list
</params>
DELETE FROM
<dtml-var table>
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:100
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table</params>
DROP TABLE IF EXISTS <dtml-var table>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params></params>
SELECT UTC_TIMESTAMP(6)
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
</params>
SELECT `priority`, `date` FROM
<dtml-var table>
WHERE
processing_node = 0
AND date <= UTC_TIMESTAMP(6)
ORDER BY priority, date
LIMIT 1
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
processing_node
to_date
count
group_method_id
</params>
SELECT
*
FROM
<dtml-var table>
WHERE
processing_node=0
AND date <= <dtml-sqlvar to_date type="datetime(6)">
<dtml-if expr="group_method_id is not None">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
</dtml-if>
ORDER BY
<dtml-comment>
During normal operation, sorting by date (as 2nd criteria) is fairer
for users and reduce the probability to do the same work several times
(think of an object that is modified several times in a short period of time).
</dtml-comment>
priority, date
LIMIT <dtml-sqlvar count type="int">
FOR UPDATE
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
path
method_id
active_process_uid
only_valid
only_invalid</params>
SELECT count(path) as message_count FROM
<dtml-var table>
WHERE 1 = 1
<dtml-if expr="path is not None">AND path = <dtml-sqlvar path type="string"> </dtml-if>
<dtml-if expr="method_id is not None">AND method_id = <dtml-sqlvar method_id type="string"></dtml-if>
<dtml-if expr="only_valid">AND processing_node > -2</dtml-if>
<dtml-if expr="only_invalid">AND processing_node < -1</dtml-if>
<dtml-if expr="active_process_uid is not None"> AND active_process_uid = <dtml-sqlvar active_process_uid type="int"> </dtml-if>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
uid</params>
UPDATE
<dtml-var table>
SET
processing_node=0,
processing=0
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-var sql_delimiter>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
uid</params>
UPDATE
<dtml-var table>
SET
processing_date = UTC_TIMESTAMP(6),
processing = 1
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-var sql_delimiter>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:100
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
uid:list
retry
delay
</params>
UPDATE
<dtml-var table>
SET
date = DATE_ADD(UTC_TIMESTAMP(6), INTERVAL
<dtml-sqlvar delay type="int"> SECOND)
<dtml-if expr="retry is not None">
, priority = priority + <dtml-sqlvar retry type="int">
, retry = retry + <dtml-sqlvar retry type="int">
</dtml-if>
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
processing_node
uid
</params>
UPDATE
<dtml-var table>
SET
processing_node=<dtml-sqlvar processing_node type="int">
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-var sql_delimiter>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
processing_node
group_method_id
count</params>
SELECT
*
FROM
<dtml-var table>
WHERE
processing_node = <dtml-sqlvar processing_node type="int">
<dtml-if expr="group_method_id is not None">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
</dtml-if>
<dtml-if expr="count is not None">
LIMIT <dtml-sqlvar count type="int">
</dtml-if>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
delay
processing_node</params>
UPDATE
<dtml-var table>
SET
date = DATE_SUB(date, INTERVAL <dtml-sqlvar delay type="int"> SECOND),
processing_date = DATE_SUB(processing_date, INTERVAL <dtml-sqlvar delay type="int"> SECOND)
<dtml-if expr="processing_node is not None">
WHERE <dtml-sqltest processing_node type="int">
</dtml-if>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
method_id
message_uid
path
tag
count
serialization_tag
</params>
SELECT
<dtml-if expr="count">
COUNT(*) AS uid_count
<dtml-else>
*
</dtml-if>
FROM
<dtml-var table>
WHERE
processing_node > -10
<dtml-if expr="method_id">
AND method_id IN (
<dtml-in method_id><dtml-sqlvar sequence-item type="string"><dtml-if sequence-end><dtml-else>,</dtml-if></dtml-in>
)
</dtml-if>
<dtml-if expr="message_uid is not None">AND uid = <dtml-sqlvar message_uid type="int"> </dtml-if>
<dtml-if expr="path">
AND path IN (
<dtml-in path><dtml-sqlvar sequence-item type="string"><dtml-if sequence-end><dtml-else>,</dtml-if></dtml-in>
)
</dtml-if>
<dtml-if expr="tag">
AND tag IN (
<dtml-in tag><dtml-sqlvar sequence-item type="string"><dtml-if sequence-end><dtml-else>,</dtml-if></dtml-in>
)
</dtml-if>
<dtml-if expr="serialization_tag is not None">
AND processing_node > -1
AND serialization_tag = <dtml-sqlvar serialization_tag type="string">
</dtml-if>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:100
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>table
uid_list
path_list
active_process_uid_list
method_id_list
message_list
priority_list
processing_node_list
date_list
group_method_id_list
tag_list
serialization_tag_list
</params>
INSERT INTO <dtml-var table>
(uid, path, active_process_uid, date, method_id, processing_node, processing, priority, group_method_id, tag, serialization_tag, message)
VALUES
<dtml-in prefix="loop" expr="_.range(_.len(path_list))">
<dtml-if sequence-start><dtml-else>,</dtml-if>
(
<dtml-sqlvar expr="uid_list[loop_item]" type="int">,
<dtml-sqlvar expr="path_list[loop_item]" type="string">,
<dtml-sqlvar expr="active_process_uid_list[loop_item]" type="int" optional>,
<dtml-if expr="date_list[loop_item] is not None"><dtml-sqlvar expr="date_list[loop_item]" type="datetime(6)"><dtml-else>UTC_TIMESTAMP(6)</dtml-if>,
<dtml-sqlvar expr="method_id_list[loop_item]" type="string">,
<dtml-sqlvar expr="processing_node_list[loop_item]" type="int">,
0,
<dtml-sqlvar expr="priority_list[loop_item]" type="int">,
<dtml-sqlvar expr="group_method_id_list[loop_item]" type="string">,
<dtml-sqlvar expr="tag_list[loop_item]" type="string">,
<dtml-sqlvar expr="serialization_tag_list[loop_item]" type="string">,
<dtml-sqlvar expr="message_list[loop_item]" type="string">
)
</dtml-in>
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params></params>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
processing_node
uid
</params>
UPDATE
message
SET
processing_node=<dtml-sqlvar processing_node type="int">
WHERE
<dtml-sqltest uid type="int" multiple>
<dtml-var sql_delimiter>
COMMIT
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params></params>
ROLLBACK
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
path
method_id
group_method_id
</params>
SELECT uid FROM
message
WHERE
processing_node = 0
AND (path = <dtml-sqlvar path type="string">
OR path LIKE <dtml-sqlvar type="string"
expr="path.replace('_', r'\_') + '/%'">)
AND method_id = <dtml-sqlvar method_id type="string">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
FOR UPDATE
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
path
method_id
group_method_id
</params>
SELECT uid FROM
message
WHERE
processing_node = 0
AND path = <dtml-sqlvar path type="string">
AND method_id = <dtml-sqlvar method_id type="string">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
FOR UPDATE
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
path
method_id
group_method_id
processing_node
</params>
SELECT * FROM
message
WHERE
processing_node IN (0, <dtml-sqlvar processing_node type="int">)
AND <dtml-sqltest path type="string" multiple>
AND method_id = <dtml-sqlvar method_id type="string">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
ORDER BY path
LIMIT 1
FOR UPDATE
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params></params>
CREATE TABLE message_job (
`uid` BIGINT UNSIGNED NOT NULL,
`date` DATETIME(6) NOT NULL,
`path` VARCHAR(255) NOT NULL,
`active_process_uid` INT UNSIGNED NULL,
`method_id` VARCHAR(255) NOT NULL,
`processing_node` SMALLINT NOT NULL DEFAULT -1,
`processing` TINYINT NOT NULL DEFAULT 0,
`processing_date` DATETIME(6),
`priority` TINYINT NOT NULL DEFAULT 0,
`group_method_id` VARCHAR(255) NOT NULL DEFAULT '',
`tag` VARCHAR(255) NOT NULL,
`signature` BINARY(16) NOT NULL,
`serialization_tag` VARCHAR(255) NOT NULL,
`retry` TINYINT UNSIGNED NOT NULL DEFAULT 0,
`message` LONGBLOB NOT NULL,
PRIMARY KEY (`uid`),
KEY (`path`),
KEY (`active_process_uid`),
KEY (`method_id`),
KEY `processing_node_processing` (`processing_node`, `processing`),
KEY `processing_node_priority_date` (`processing_node`, `priority`, `date`),
KEY `node_group_priority_date` (`processing_node`, `group_method_id`, `priority`, `date`),
KEY `serialization_tag_processing_node` (`serialization_tag`, `processing_node`),
KEY (`priority`),
KEY (`tag`)
) ENGINE=InnoDB
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:0
max_cache:0
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
path
method_id
group_method_id
signature
</params>
SELECT uid FROM
message_job
WHERE
processing_node = 0
AND path = <dtml-sqlvar path type="string">
AND method_id = <dtml-sqlvar method_id type="string">
AND group_method_id = <dtml-sqlvar group_method_id type="string">
AND signature = <dtml-sqlvar signature type="string">
FOR UPDATE
<dtml-comment>
title:
connection_id:cmf_activity_sql_connection
max_rows:1000
max_cache:100
cache_time:0
class_name:
class_file:
</dtml-comment>
<params>
uid_list
path_list
active_process_uid_list
method_id_list
message_list
priority_list
processing_node_list
date_list
group_method_id_list
tag_list
signature_list
serialization_tag_list
</params>
INSERT INTO message_job
(uid, path, active_process_uid, date, method_id, processing_node, processing, priority, group_method_id, tag, signature, serialization_tag, message)
VALUES
<dtml-in prefix="loop" expr="_.range(_.len(path_list))">
<dtml-if sequence-start><dtml-else>,</dtml-if>
(
<dtml-sqlvar expr="uid_list[loop_item]" type="int">,
<dtml-sqlvar expr="path_list[loop_item]" type="string">,
<dtml-sqlvar expr="active_process_uid_list[loop_item]" type="int" optional>,
<dtml-if expr="date_list[loop_item] is not None"><dtml-sqlvar expr="date_list[loop_item]" type="datetime(6)"><dtml-else>UTC_TIMESTAMP(6)</dtml-if>,
<dtml-sqlvar expr="method_id_list[loop_item]" type="string">,
<dtml-sqlvar expr="processing_node_list[loop_item]" type="int">,
0,
<dtml-sqlvar expr="priority_list[loop_item]" type="int">,
<dtml-sqlvar expr="group_method_id_list[loop_item]" type="string">,
<dtml-sqlvar expr="tag_list[loop_item]" type="string">,
<dtml-sqlvar expr="signature_list[loop_item]" type="string">,
<dtml-sqlvar expr="serialization_tag_list[loop_item]" type="string">,
<dtml-sqlvar expr="message_list[loop_item]" type="string">
)
</dtml-in>
...@@ -28,23 +28,24 @@ ...@@ -28,23 +28,24 @@
import inspect import inspect
import unittest import unittest
from functools import wraps
from Products.ERP5Type.tests.utils import LogInterceptor from Products.ERP5Type.tests.utils import LogInterceptor
from Testing import ZopeTestCase from Testing import ZopeTestCase
from Products.ERP5Type.tests.ERP5TypeTestCase import ERP5TypeTestCase from Products.ERP5Type.tests.ERP5TypeTestCase import ERP5TypeTestCase
from Products.ERP5Type.tests.utils import createZODBPythonScript from Products.ERP5Type.tests.utils import createZODBPythonScript
from Products.ERP5Type.Base import Base from Products.ERP5Type.Base import Base
from Products.CMFActivity import ActivityTool
from Products.CMFActivity.Activity.SQLBase import INVOKE_ERROR_STATE from Products.CMFActivity.Activity.SQLBase import INVOKE_ERROR_STATE
from Products.CMFActivity.Activity.Queue import VALIDATION_ERROR_DELAY from Products.CMFActivity.Activity.Queue import VALIDATION_ERROR_DELAY
from Products.CMFActivity.Activity.SQLDict import SQLDict from Products.CMFActivity.Activity.SQLDict import SQLDict
import Products.CMFActivity.ActivityTool
from Products.CMFActivity.Errors import ActivityPendingError, ActivityFlushError from Products.CMFActivity.Errors import ActivityPendingError, ActivityFlushError
from erp5.portal_type import Organisation from erp5.portal_type import Organisation
from AccessControl.SecurityManagement import newSecurityManager from AccessControl.SecurityManagement import newSecurityManager
from zLOG import LOG from zLOG import LOG
from ZODB.POSException import ConflictError from ZODB.POSException import ConflictError
from DateTime import DateTime from DateTime import DateTime
from Products.CMFActivity.ActivityTool import Message from Products.CMFActivity.ActivityTool import (
cancelProcessShutdown, Message, getCurrentNode, getServerAddress)
from _mysql_exceptions import OperationalError from _mysql_exceptions import OperationalError
from Products.ZMySQLDA.db import DB from Products.ZMySQLDA.db import DB
from sklearn.externals.joblib.hashing import hash as joblib_hash from sklearn.externals.joblib.hashing import hash as joblib_hash
...@@ -53,7 +54,6 @@ import random ...@@ -53,7 +54,6 @@ import random
import threading import threading
import weakref import weakref
import transaction import transaction
from Products.CMFActivity.ActivityTool import getCurrentNode, getServerAddress
from App.config import getConfiguration from App.config import getConfiguration
from asyncore import socket_map from asyncore import socket_map
import socket import socket
...@@ -61,6 +61,15 @@ import socket ...@@ -61,6 +61,15 @@ import socket
class CommitFailed(Exception): class CommitFailed(Exception):
pass pass
def for_each_activity(wrapped):
def wrapper(self):
getMessageList = self.portal.portal_activities.getMessageList
for activity in ActivityTool.activity_dict:
wrapped(self, activity)
self.abort()
self.assertFalse(getMessageList())
return wraps(wrapped)(wrapper)
def registerFailingTransactionManager(*args, **kw): def registerFailingTransactionManager(*args, **kw):
from Shared.DC.ZRDB.TM import TM from Shared.DC.ZRDB.TM import TM
class dummy_tm(TM): class dummy_tm(TM):
...@@ -109,6 +118,30 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -109,6 +118,30 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
o1 = organisation_module.newContent(id=self.company_id) o1 = organisation_module.newContent(id=self.company_id)
self.tic() self.tic()
def tearDown(self):
# Override ERP5 tearDown to make sure that tests do not leave unprocessed
# activity messages. We are testing CMFActivity so it's important to check
# that everything works as expected on this subject.
try:
if self._resultForDoCleanups.wasSuccessful():
getMessageList = self.portal.portal_activities.getMessageList
self.assertFalse(getMessageList())
# Also check if a test drop them without committing.
self.abort()
self.assertFalse(getMessageList())
finally:
ERP5TypeTestCase.tearDown(self)
def getMessageList(self, activity, **kw):
return ActivityTool.activity_dict[activity].getMessageList(
self.portal.portal_activities, **kw)
def deleteMessageList(self, activity, message_list):
ActivityTool.activity_dict[activity].deleteMessageList(
self.portal.portal_activities.getSQLConnection(),
[m.uid for m in message_list])
self.commit()
def login(self): def login(self):
uf = self.portal.acl_users uf = self.portal.acl_users
uf._doAddUser('seb', '', ['Manager'], []) uf._doAddUser('seb', '', ['Manager'], [])
...@@ -116,7 +149,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -116,7 +149,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
user = uf.getUserById('seb').__of__(uf) user = uf.getUserById('seb').__of__(uf)
newSecurityManager(None, user) newSecurityManager(None, user)
def InvokeAndCancelActivity(self, activity): @for_each_activity
def testInvokeAndCancelActivity(self, activity):
""" """
Simple test where we invoke and cancel an activity Simple test where we invoke and cancel an activity
""" """
...@@ -144,10 +178,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -144,10 +178,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Needed so that the message are removed from the queue # Needed so that the message are removed from the queue
self.commit() self.commit()
self.assertEqual(self.title2,organisation.getTitle()) self.assertEqual(self.title2,organisation.getTitle())
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
def DeferredSetTitleActivity(self, activity): @for_each_activity
def testDeferredSetTitleActivity(self, activity):
""" """
We check that the title is changed only after that We check that the title is changed only after that
the activity was called the activity was called
...@@ -162,10 +195,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -162,10 +195,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(self.title1,organisation.getTitle()) self.assertEqual(self.title1,organisation.getTitle())
activity_tool.tic() activity_tool.tic()
self.assertEqual(self.title2,organisation.getTitle()) self.assertEqual(self.title2,organisation.getTitle())
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
def CallOnceWithActivity(self, activity): @for_each_activity
def testCallOnceWithActivity(self, activity):
""" """
With this test we can check if methods are called With this test we can check if methods are called
only once (sometimes it was twice !!!) only once (sometimes it was twice !!!)
...@@ -201,11 +233,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -201,11 +233,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.manageInvoke(organisation.getPhysicalPath(),'setFoobar') activity_tool.manageInvoke(organisation.getPhysicalPath(),'setFoobar')
# Needed so that the message are commited into the queue # Needed so that the message are commited into the queue
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
self.assertEqual(2,organisation.getFoobar()) self.assertEqual(2,organisation.getFoobar())
def TryFlushActivity(self, activity): @for_each_activity
def testTryFlushActivity(self, activity):
""" """
Check the method flush Check the method flush
""" """
...@@ -227,7 +258,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -227,7 +258,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(organisation.getTitle(),self.title2) self.assertEqual(organisation.getTitle(),self.title2)
self.commit() self.commit()
def TryActivateInsideFlush(self, activity): @for_each_activity
def testTryActivateInsideFlush(self, activity):
""" """
Create a new activity inside a flush action Create a new activity inside a flush action
""" """
...@@ -242,11 +274,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -242,11 +274,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.commit() self.commit()
activity_tool.tic() activity_tool.tic()
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
self.assertEqual(organisation.getTitle(),self.title2) self.assertEqual(organisation.getTitle(),self.title2)
def TryTwoMethods(self, activity): @for_each_activity
def testTryTwoMethods(self, activity):
""" """
Try several activities Try several activities
""" """
...@@ -266,12 +297,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -266,12 +297,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.distribute() activity_tool.distribute()
activity_tool.tic() activity_tool.tic()
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
self.assertEqual(organisation.getTitle(),self.title1) self.assertEqual(organisation.getTitle(),self.title1)
self.assertEqual(organisation.getDescription(),self.title1) self.assertEqual(organisation.getDescription(),self.title1)
def TryTwoMethodsAndFlushThem(self, activity): @for_each_activity
def testTryTwoMethodsAndFlushThem(self, activity):
""" """
make sure flush works with several activities make sure flush works with several activities
""" """
...@@ -292,8 +322,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -292,8 +322,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.distribute() activity_tool.distribute()
activity_tool.tic() activity_tool.tic()
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
self.assertEqual(organisation.getTitle(),self.title1) self.assertEqual(organisation.getTitle(),self.title1)
self.assertEqual(organisation.getDescription(),self.title1) self.assertEqual(organisation.getDescription(),self.title1)
...@@ -322,12 +350,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -322,12 +350,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.distribute() activity_tool.distribute()
activity_tool.tic() activity_tool.tic()
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
self.assertEqual(organisation.getTitle(),self.title1) self.assertEqual(organisation.getTitle(),self.title1)
self.assertEqual(organisation.getDescription(),self.title1) self.assertEqual(organisation.getDescription(),self.title1)
def TryMessageWithErrorOnActivity(self, activity): @for_each_activity
def testTryMessageWithErrorOnActivity(self, activity):
""" """
Make sure that message with errors are not deleted Make sure that message with errors are not deleted
""" """
...@@ -350,10 +377,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -350,10 +377,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.manageCancel(organisation.getPhysicalPath(),'crashThisActivity') activity_tool.manageCancel(organisation.getPhysicalPath(),'crashThisActivity')
# Needed so that the message are commited into the queue # Needed so that the message are commited into the queue
self.commit() self.commit()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
def DeferredSetTitleWithRenamedObject(self, activity): @for_each_activity
def testDeferredSetTitleWithRenamedObject(self, activity):
""" """
make sure that it is impossible to rename an object make sure that it is impossible to rename an object
if some activities are still waiting for this object if some activities are still waiting for this object
...@@ -386,8 +412,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -386,8 +412,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
result = active_process.getResultList()[0] result = active_process.getResultList()[0]
self.assertEqual(result.method_id , 'getTitle') self.assertEqual(result.method_id , 'getTitle')
self.assertEqual(result.result , self.title1) self.assertEqual(result.result , self.title1)
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
def TryActiveProcessWithResultDict(self, activity): def TryActiveProcessWithResultDict(self, activity):
""" """
...@@ -417,11 +441,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -417,11 +441,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
result = result_dict[3] result = result_dict[3]
self.assertEqual(result_dict[3].method_id, 'getTitle') self.assertEqual(result_dict[3].method_id, 'getTitle')
self.assertEqual(result.result , self.title1) self.assertEqual(result.result , self.title1)
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),0)
def TryMethodAfterMethod(self, activity): @for_each_activity
def testTryMethodAfterMethod(self, activity):
""" """
Ensure the order of an execution by a method id Ensure the order of an execution by a method id
""" """
...@@ -444,7 +466,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -444,7 +466,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.tic() self.tic()
self.assertEqual(o.getTitle(), 'acb') self.assertEqual(o.getTitle(), 'acb')
def TryAfterTag(self, activity): @for_each_activity
def testTryAfterTag(self, activity):
""" """
Ensure the order of an execution by a tag Ensure the order of an execution by a tag
""" """
...@@ -468,7 +491,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -468,7 +491,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.tic() self.tic()
self.assertEqual(o.getCorporateName(), 'cd') self.assertEqual(o.getCorporateName(), 'cd')
def TryFlushActivityWithAfterTag(self, activity): @for_each_activity
def testTryFlushActivityWithAfterTag(self, activity):
""" """
Ensure the order of an execution by a tag Ensure the order of an execution by a tag
""" """
...@@ -490,11 +514,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -490,11 +514,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(o.getTitle(), 'a') self.assertEqual(o.getTitle(), 'a')
self.assertEqual(o.getDescription(), '?') self.assertEqual(o.getDescription(), '?')
self.tic() self.tic()
self.assertEqual(len(tool.getMessageList()),0)
self.assertEqual(o.getTitle(), 'a') self.assertEqual(o.getTitle(), 'a')
self.assertEqual(o.getDescription(), 'b') self.assertEqual(o.getDescription(), 'b')
def CheckScheduling(self, activity): @for_each_activity
def testScheduling(self, activity):
""" """
Check if active objects with different after parameters are executed in a correct order Check if active objects with different after parameters are executed in a correct order
""" """
...@@ -516,7 +540,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -516,7 +540,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.tic() self.tic()
self.assertEqual(o.getTitle(), 'cb') self.assertEqual(o.getTitle(), 'cb')
def CheckSchedulingAfterTagList(self, activity): @for_each_activity
def testSchedulingAfterTagList(self, activity):
""" """
Check if active objects with different after parameters are executed in a Check if active objects with different after parameters are executed in a
correct order, when after_tag is passed as a list correct order, when after_tag is passed as a list
...@@ -538,7 +563,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -538,7 +563,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.tic() self.tic()
self.assertEqual(o.getTitle(), 'last') self.assertEqual(o.getTitle(), 'last')
def CheckCountMessageWithTag(self, activity): @for_each_activity
def testCheckCountMessageWithTag(self, activity):
""" """
Check countMessageWithTag function. Check countMessageWithTag function.
""" """
...@@ -555,39 +581,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -555,39 +581,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(o.getTitle(), 'a') self.assertEqual(o.getTitle(), 'a')
self.assertEqual(activity_tool.countMessageWithTag('toto'), 0) self.assertEqual(activity_tool.countMessageWithTag('toto'), 0)
def TryConflictErrorsWhileValidating(self, activity): def testTryErrorsWhileFinishingCommitDB(self):
"""Try to execute active objects which may throw conflict errors
while validating, and check if they are still executed."""
o = self.getOrganisation()
# Monkey patch Queue to induce conflict errors artificially.
def validate(self, *args, **kwargs):
from Products.CMFActivity.Activity.Queue import Queue
if Queue.current_num_conflict_errors < Queue.conflict_errors_limit:
Queue.current_num_conflict_errors += 1
# LOG('TryConflictErrorsWhileValidating', 0, 'causing a conflict error artificially')
raise ConflictError
return self.original_validate(*args, **kwargs)
from Products.CMFActivity.Activity.Queue import Queue
Queue.original_validate = Queue.validate
Queue.validate = validate
try:
# Test some range of conflict error occurences.
for i in xrange(10):
Queue.current_num_conflict_errors = 0
Queue.conflict_errors_limit = i
o.activate(activity = activity).getId()
self.commit()
self.flushAllActivities(silent = 1, loop_size = i + 10)
self.assertFalse(self.portal.portal_activities.getMessageList())
finally:
Queue.validate = Queue.original_validate
del Queue.original_validate
del Queue.current_num_conflict_errors
del Queue.conflict_errors_limit
def TryErrorsWhileFinishingCommitDB(self, activity):
"""Try to execute active objects which may throw conflict errors """Try to execute active objects which may throw conflict errors
while validating, and check if they are still executed.""" while validating, and check if they are still executed."""
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
...@@ -602,7 +596,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -602,7 +596,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Test some range of conflict error occurences. # Test some range of conflict error occurences.
self.portal.organisation_module.reindexObject() self.portal.organisation_module.reindexObject()
self.commit() self.commit()
self.assertEqual(len(activity_tool.getMessageList()), 1) message, = activity_tool.getMessageList()
try: try:
DB.original_query = DB.query DB.original_query = DB.query
DB.query = query DB.query = query
...@@ -612,148 +606,43 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -612,148 +606,43 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
DB.query = DB.original_query DB.query = DB.original_query
del DB.original_query del DB.original_query
self.assertEqual(len(activity_tool.getMessageList()), 1) self.deleteMessageList('SQLDict', [message])
def checkIsMessageRegisteredMethod(self, activity): @for_each_activity
def testIsMessageRegisteredMethod(self, activity):
dedup = activity != 'SQLQueue'
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
object_b = self.getOrganisation() object_b = self.getOrganisation()
object_a = object_b.getParentValue() object_a = object_b.getParentValue()
# First case: creating the same activity twice must only register one. def check(count):
self.assertEqual(len(activity_tool.getMessageList()), 0) # Sanity check self.commit()
self.assertEqual(len(activity_tool.getMessageList()), count)
self.tic()
# First case: creating the same activity twice must only register one
# for queues with deduplication.
object_a.activate(activity=activity).getId() object_a.activate(activity=activity).getId()
object_a.activate(activity=activity).getId() object_a.activate(activity=activity).getId()
self.commit() check(1 if dedup else 2)
self.assertEqual(len(activity_tool.getMessageList()), 1) # Second case: creating activity with same tag must only register one,
activity_tool.manageClearActivities() # for queues with deduplication.
self.commit()
# Second case: creating activity with same tag must only register one.
# This behaviour is actually the same as the no-tag behaviour. # This behaviour is actually the same as the no-tag behaviour.
self.assertEqual(len(activity_tool.getMessageList()), 0) # Sanity check
object_a.activate(activity=activity, tag='foo').getId() object_a.activate(activity=activity, tag='foo').getId()
object_a.activate(activity=activity, tag='foo').getId() object_a.activate(activity=activity, tag='foo').getId()
self.commit() check(1 if dedup else 2)
self.assertEqual(len(activity_tool.getMessageList()), 1)
activity_tool.manageClearActivities()
self.commit()
# Third case: creating activities with different tags must register both. # Third case: creating activities with different tags must register both.
self.assertEqual(len(activity_tool.getMessageList()), 0) # Sanity check
object_a.activate(activity=activity, tag='foo').getId() object_a.activate(activity=activity, tag='foo').getId()
object_a.activate(activity=activity, tag='bar').getId() object_a.activate(activity=activity, tag='bar').getId()
self.commit() check(2)
self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.manageClearActivities()
self.commit()
# Fourth case: creating activities on different objects must register # Fourth case: creating activities on different objects must register
# both. # both.
self.assertEqual(len(activity_tool.getMessageList()), 0) # Sanity check
object_a.activate(activity=activity).getId() object_a.activate(activity=activity).getId()
object_b.activate(activity=activity).getId() object_b.activate(activity=activity).getId()
self.commit() check(2)
self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.manageClearActivities()
self.commit()
# Fifth case: creating activities with different method must register # Fifth case: creating activities with different method must register
# both. # both.
self.assertEqual(len(activity_tool.getMessageList()), 0) # Sanity check
object_a.activate(activity=activity).getId() object_a.activate(activity=activity).getId()
object_a.activate(activity=activity).getTitle() object_a.activate(activity=activity).getTitle()
self.commit() check(2)
self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.manageClearActivities()
self.commit()
def test_01_DeferredSetTitleSQLDict(self):
# Test if we can add a complete sales order
self.DeferredSetTitleActivity('SQLDict')
def test_02_DeferredSetTitleSQLQueue(self):
# Test if we can add a complete sales order
self.DeferredSetTitleActivity('SQLQueue')
def test_03_DeferredSetTitleSQLJoblib(self):
# Test if we can add a complete sales order
self.DeferredSetTitleActivity('SQLJoblib')
def test_05_InvokeAndCancelSQLDict(self):
# Test if we can add a complete sales order
self.InvokeAndCancelActivity('SQLDict')
def test_06_InvokeAndCancelSQLQueue(self):
# Test if we can add a complete sales order
self.InvokeAndCancelActivity('SQLQueue')
def test_07_InvokeAndCancelSQLJoblib(self):
self.InvokeAndCancelActivity('SQLJoblib')
def test_09_CallOnceWithSQLDict(self):
# Test if we call methods only once
self.CallOnceWithActivity('SQLDict')
def test_10_CallOnceWithSQLQueue(self):
# Test if we call methods only once
self.CallOnceWithActivity('SQLQueue')
def test_11_CallOnceWithSQLJoblib(self):
self.CallOnceWithActivity('SQLJoblib')
def test_13_TryMessageWithErrorOnSQLDict(self):
# Test if we call methods only once
self.TryMessageWithErrorOnActivity('SQLDict')
def test_14_TryMessageWithErrorOnSQLQueue(self):
# Test if we call methods only once
self.TryMessageWithErrorOnActivity('SQLQueue')
def test_15_TryMessageWithErrorOnSQLJoblib(self):
self.TryMessageWithErrorOnActivity('SQLJoblib')
def test_17_TryFlushActivityWithSQLDict(self):
# Test if we call methods only once
self.TryFlushActivity('SQLDict')
def test_18_TryFlushActivityWithSQLQueue(self):
# Test if we call methods only once
self.TryFlushActivity('SQLQueue')
def test_19_TryFlushActivityWithSQLJoblib(self):
# Test if we call methods only once
self.TryFlushActivity('SQLJoblib')
def test_21_TryActivateInsideFlushWithSQLDict(self):
# Test if we call methods only once
self.TryActivateInsideFlush('SQLDict')
def test_22_TryActivateInsideFlushWithSQLQueue(self):
# Test if we call methods only once
self.TryActivateInsideFlush('SQLQueue')
def test_23_TryActivateInsideFlushWithSQLQueue(self):
# Test if we call methods only once
self.TryActivateInsideFlush('SQLJoblib')
def test_25_TryTwoMethodsWithSQLDict(self):
# Test if we call methods only once
self.TryTwoMethods('SQLDict')
def test_26_TryTwoMethodsWithSQLQueue(self):
# Test if we call methods only once
self.TryTwoMethods('SQLQueue')
def test_27_TryTwoMethodsWithSQLJoblib(self):
# Test if we call methods only once
self.TryTwoMethods('SQLJoblib')
def test_29_TryTwoMethodsAndFlushThemWithSQLDict(self):
# Test if we call methods only once
self.TryTwoMethodsAndFlushThem('SQLDict')
def test_30_TryTwoMethodsAndFlushThemWithSQLQueue(self):
# Test if we call methods only once
self.TryTwoMethodsAndFlushThem('SQLQueue')
def test_31_TryTwoMethodsAndFlushThemWithSQLJoblib(self):
# Test if we call methods only once
self.TryTwoMethodsAndFlushThem('SQLJoblib')
def test_33_TryActivateFlushActivateTicWithSQLDict(self): def test_33_TryActivateFlushActivateTicWithSQLDict(self):
# Test if we call methods only once # Test if we call methods only once
...@@ -776,18 +665,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -776,18 +665,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Test if we call methods only once # Test if we call methods only once
self.TryActivateFlushActivateTic('SQLQueue',commit_sub=1) self.TryActivateFlushActivateTic('SQLQueue',commit_sub=1)
def test_42_TryRenameObjectWithSQLDict(self):
# Test if we call methods only once
self.DeferredSetTitleWithRenamedObject('SQLDict')
def test_43_TryRenameObjectWithSQLQueue(self):
# Test if we call methods only once
self.DeferredSetTitleWithRenamedObject('SQLQueue')
def test_44_TryRenameObjectWithSQLJoblib(self):
# Test if we call methods only once
self.DeferredSetTitleWithRenamedObject('SQLJoblib')
def test_46_TryActiveProcessWithSQLDict(self): def test_46_TryActiveProcessWithSQLDict(self):
# Test if we call methods only once # Test if we call methods only once
self.TryActiveProcess('SQLDict') self.TryActiveProcess('SQLDict')
...@@ -800,18 +677,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -800,18 +677,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Test if we call methods only once # Test if we call methods only once
self.TryActiveProcessWithResultDict('SQLJoblib') self.TryActiveProcessWithResultDict('SQLJoblib')
def test_54_TryAfterMethodIdWithSQLDict(self):
# Test if after_method_id can be used
self.TryMethodAfterMethod('SQLDict')
def test_55_TryAfterMethodIdWithSQLQueue(self):
# Test if after_method_id can be used
self.TryMethodAfterMethod('SQLQueue')
def test_56_TryAfterMethodIdWithSQLJoblib(self):
# Test if after_method_id can be used
self.TryMethodAfterMethod('SQLJoblib')
def test_57_TryCallActivityWithRightUser(self): def test_57_TryCallActivityWithRightUser(self):
# Test if me execute methods with the right user # Test if me execute methods with the right user
# This should be independant of the activity used # This should be independant of the activity used
...@@ -828,49 +693,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -828,49 +693,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Then execute activities as seb # Then execute activities as seb
user = uf.getUserById('seb').__of__(uf) user = uf.getUserById('seb').__of__(uf)
newSecurityManager(None, user) newSecurityManager(None, user)
self.commit() self.tic()
activity_tool.distribute()
activity_tool.tic()
email = organisation.get('email') email = organisation.get('email')
# Check if what we did was executed as toto # Check if what we did was executed as toto
self.assertEqual(email.getOwnerInfo()['id'],'toto') self.assertEqual(email.getOwnerInfo()['id'],'toto')
def test_59_TryAfterTagWithSQLDict(self):
# Test if after_tag can be used
self.TryAfterTag('SQLDict')
def test_60_TryAfterTagWithSQLQueue(self):
# Test if after_tag can be used
self.TryAfterTag('SQLQueue')
def test_61_TryAfterTagWithSQLJoblib(self):
# Test if after_tag can be used
self.TryAfterTag('SQLJoblib')
def test_62_CheckSchedulingWithSQLDict(self):
# Test if scheduling is correct with SQLDict
self.CheckScheduling('SQLDict')
def test_63_CheckSchedulingWithSQLQueue(self):
# Test if scheduling is correct with SQLQueue
self.CheckScheduling('SQLQueue')
def test_64_CheckSchedulingWithSQLJoblib(self):
# Test if scheduling is correct with SQLQueue
self.CheckScheduling('SQLJoblib')
def test_65_CheckSchedulingAfterTagListWithSQLDict(self):
# Test if scheduling is correct with SQLDict
self.CheckSchedulingAfterTagList('SQLDict')
def test_66_CheckSchedulingWithAfterTagListSQLQueue(self):
# Test if scheduling is correct with SQLQueue
self.CheckSchedulingAfterTagList('SQLQueue')
def test_67_CheckSchedulingWithAfterTagListSQLJoblib(self):
# Test if scheduling is correct with SQLQueue
self.CheckSchedulingAfterTagList('SQLJoblib')
def flushAllActivities(self, silent=0, loop_size=1000): def flushAllActivities(self, silent=0, loop_size=1000):
"""Executes all messages until the queue only contains failed """Executes all messages until the queue only contains failed
messages. messages.
...@@ -880,10 +707,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -880,10 +707,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.distribute(node_count=1) activity_tool.distribute(node_count=1)
activity_tool.tic(processing_node=1) activity_tool.tic(processing_node=1)
finished = 1 finished = all(message.processing_node == INVOKE_ERROR_STATE
for message in activity_tool.getMessageList(): for message in activity_tool.getMessageList())
if message.processing_node != INVOKE_ERROR_STATE:
finished = 0
activity_tool.timeShift(3 * VALIDATION_ERROR_DELAY) activity_tool.timeShift(3 * VALIDATION_ERROR_DELAY)
self.commit() self.commit()
...@@ -910,18 +735,17 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -910,18 +735,17 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
title=original_title) title=original_title)
# Monkey patch Organisation to add a failing method # Monkey patch Organisation to add a failing method
def failingMethod(self): def failingMethod(self):
raise ValueError, 'This method always fail' raise ValueError('This method always fail')
Organisation.failingMethod = failingMethod Organisation.failingMethod = failingMethod
activity_list = ['SQLQueue', 'SQLDict', 'SQLJoblib'] for activity in ActivityTool.activity_dict:
for activity in activity_list:
# reset # reset
activity_tool.manageClearActivities() activity_tool.manageClearActivities()
obj.setTitle(original_title) obj.setTitle(original_title)
self.commit() self.commit()
# activate failing message and flush # activate failing message and flush
for fail_activity in activity_list: for fail_activity in ActivityTool.activity_dict:
obj.activate(activity = fail_activity).failingMethod() obj.activate(activity = fail_activity).failingMethod()
self.commit() self.commit()
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
...@@ -942,19 +766,16 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -942,19 +766,16 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
full_message_list = activity_tool.getMessageList() full_message_list = activity_tool.getMessageList()
remaining_messages = [a for a in full_message_list if a.method_id != remaining_messages = [a for a in full_message_list if a.method_id !=
'failingMethod'] 'failingMethod']
if len(full_message_list) != 4: self.assertEqual(len(full_message_list), 4,
self.fail('failingMethod should not have been flushed') 'failingMethod should not have been flushed')
if len(remaining_messages) != 1: self.assertEqual(len(remaining_messages), 1,
self.fail('Activity tool should have one blocked setTitle activity') 'Activity tool should have one blocked setTitle activity')
self.assertEqual(remaining_messages[0].activity_kw['after_method_id'], self.assertEqual(remaining_messages[0].activity_kw['after_method_id'],
['failingMethod']) ['failingMethod'])
self.assertEqual(obj.getTitle(), original_title) self.assertEqual(obj.getTitle(), original_title)
def test_69_TestCountMessageWithTagWithSQLDict(self): activity_tool.manageClearActivities()
""" self.commit()
Test new countMessageWithTag function with SQLDict.
"""
self.CheckCountMessageWithTag('SQLDict')
def test_70_TestCancelFailedActiveObject(self): def test_70_TestCancelFailedActiveObject(self):
"""Cancel an active object to make sure that it does not refer to """Cancel an active object to make sure that it does not refer to
...@@ -969,7 +790,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -969,7 +790,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Monkey patch Organisation to add a failing method # Monkey patch Organisation to add a failing method
def failingMethod(self): def failingMethod(self):
raise ValueError, 'This method always fail' raise ValueError('This method always fail')
Organisation.failingMethod = failingMethod Organisation.failingMethod = failingMethod
# First, index the object. # First, index the object.
...@@ -997,11 +818,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -997,11 +818,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
message = activity_tool.getMessageList()[0] message = activity_tool.getMessageList()[0]
activity_tool.manageCancel(message.object_path, message.method_id) activity_tool.manageCancel(message.object_path, message.method_id)
self.commit() self.commit()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_71_RetryMessageExecution(self): def test_71_RetryMessageExecution(self):
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
self.assertFalse(activity_tool.getMessageList())
exec_count = [0] exec_count = [0]
# priority does not matter anymore # priority does not matter anymore
priority = random.Random().randint priority = random.Random().randint
...@@ -1015,7 +834,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1015,7 +834,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
raise ConflictError if conflict else Exception raise ConflictError if conflict else Exception
def check(retry_list, **activate_kw): def check(retry_list, **activate_kw):
fail = retry_list[-1][0] is not None and 1 or 0 fail = retry_list[-1][0] is not None and 1 or 0
for activity in 'SQLDict', 'SQLQueue', 'SQLJoblib': for activity in ActivityTool.activity_dict:
exec_count[0] = 0 exec_count[0] = 0
activity_tool.activate(activity=activity, priority=priority(1,6), activity_tool.activate(activity=activity, priority=priority(1,6),
**activate_kw).doSomething(retry_list) **activate_kw).doSomething(retry_list)
...@@ -1055,54 +874,14 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1055,54 +874,14 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
del activity_tool.__class__.doSomething del activity_tool.__class__.doSomething
self.assertFalse(activity_tool.getMessageList())
def test_72_TestConflictErrorsWhileValidatingWithSQLDict(self):
"""
Test if conflict errors spoil out active objects with SQLDict.
"""
self.TryConflictErrorsWhileValidating('SQLDict')
def test_73_TestConflictErrorsWhileValidatingWithSQLQueue(self):
"""
Test if conflict errors spoil out active objects with SQLQueue.
"""
self.TryConflictErrorsWhileValidating('SQLQueue')
def test_74_TestConflictErrorsWhileValidatingWithSQLJoblib(self):
"""
Test if conflict errors spoil out active objects with SQLJoblib.
"""
self.TryConflictErrorsWhileValidating('SQLJoblib')
def test_75_TestErrorsWhileFinishingCommitDBWithSQLDict(self):
"""
"""
self.TryErrorsWhileFinishingCommitDB('SQLDict')
def test_76_TestErrorsWhileFinishingCommitDBWithSQLQueue(self):
"""
"""
self.TryErrorsWhileFinishingCommitDB('SQLQueue')
def test_77_TryFlushActivityWithAfterTagSQLDict(self):
# Test if after_tag can be used
self.TryFlushActivityWithAfterTag('SQLDict')
def test_78_TryFlushActivityWithAfterTagWithSQLQueue(self):
# Test if after_tag can be used
self.TryFlushActivityWithAfterTag('SQLQueue')
def test_79_ActivateKwForNewContent(self): def test_79_ActivateKwForNewContent(self):
o1 = self.getOrganisationModule().newContent( o1 = self.getOrganisationModule().newContent(
activate_kw=dict(tag='The Tag')) activate_kw=dict(tag='The Tag'))
self.commit() self.commit()
messages_for_o1 = [m for m in self.getActivityTool().getMessageList() m, = self.getActivityTool().getMessageList(path=o1.getPath())
if m.object_path == o1.getPhysicalPath()]
self.assertNotEquals(0, len(messages_for_o1))
for m in messages_for_o1:
self.assertEqual(m.activity_kw.get('tag'), 'The Tag') self.assertEqual(m.activity_kw.get('tag'), 'The Tag')
self.tic()
def test_80_FlushAfterMultipleActivate(self): def test_80_FlushAfterMultipleActivate(self):
orga_module = self.getOrganisationModule() orga_module = self.getOrganisationModule()
...@@ -1116,7 +895,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1116,7 +895,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.setDescription(d+'a') self.setDescription(d+'a')
Organisation.updateDesc = updateDesc Organisation.updateDesc = updateDesc
self.assertEqual(len(activity_tool.getMessageList()), 0)
# First check dequeue read same message only once # First check dequeue read same message only once
for i in xrange(10): for i in xrange(10):
p.activate(activity="SQLDict").updateDesc() p.activate(activity="SQLDict").updateDesc()
...@@ -1134,13 +912,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1134,13 +912,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(len(activity_tool.getMessageList()), 10) self.assertEqual(len(activity_tool.getMessageList()), 10)
activity_tool.flush(p, invoke=0) activity_tool.flush(p, invoke=0)
self.commit() self.commit()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_81_IsMessageRegisteredSQLDict(self):
"""
This test tests behaviour of IsMessageRegistered method.
"""
self.checkIsMessageRegisteredMethod('SQLDict')
def test_82_AbortTransactionSynchronously(self): def test_82_AbortTransactionSynchronously(self):
""" """
...@@ -1152,7 +923,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1152,7 +923,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
module = self.getOrganisationModule() module = self.getOrganisationModule()
organisation = module.newContent(portal_type = 'Organisation') organisation = module.newContent(portal_type = 'Organisation')
organisation_id = organisation.getId() organisation_id = organisation.getId()
self.commit() self.tic()
organisation = module[organisation_id] organisation = module[organisation_id]
# Now fake a read conflict. # Now fake a read conflict.
...@@ -1174,8 +945,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1174,8 +945,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.abort() self.abort()
organisation.uid organisation.uid
@for_each_activity
def callWithGroupIdParamater(self, activity): def testCallWithGroupIdParamater(self, activity):
dedup = activity != 'SQLQueue'
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
organisation = self.getOrganisation() organisation = self.getOrganisation()
# Defined a group method # Defined a group method
...@@ -1202,7 +974,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1202,7 +974,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
message_list = activity_tool.getMessageList() message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),5) self.assertEqual(len(message_list),5)
activity_tool.tic() activity_tool.tic()
expected = dict(SQLDict=1, SQLQueue=5, SQLJoblib=1)[activity] expected = 1 if dedup else 5
self.assertEqual(expected, organisation.getFoobar()) self.assertEqual(expected, organisation.getFoobar())
...@@ -1233,30 +1005,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1233,30 +1005,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
message_list = activity_tool.getMessageList() message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list),20) self.assertEqual(len(message_list),20)
activity_tool.tic() activity_tool.tic()
self.assertEqual(dict(SQLDict=11, SQLQueue=60, SQLJoblib=11)[activity], self.assertEqual(11 if dedup else 60,
organisation.getFoobar()) organisation.getFoobar())
self.assertEqual(dict(SQLDict=[1, 1, 1], SQLQueue=[5, 5, 10], SQLJoblib=[1,1,1])[activity], self.assertEqual([1, 1, 1] if dedup else [5, 5, 10],
sorted(foobar_list)) sorted(foobar_list))
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list), 0)
def test_83a_CallWithGroupIdParamaterSQLDict(self):
"""
Test that group_id parameter is used to separate execution of the same method
"""
self.callWithGroupIdParamater('SQLDict')
def test_83b_CallWithGroupIdParamaterSQLQueue(self):
"""
Test that group_id parameter is used to separate execution of the same method
"""
self.callWithGroupIdParamater('SQLQueue')
def test_83c_CallWithGroupIdParamaterSQLJoblib(self):
"""
Test that group_id parameter is used to separate execution of the same method
"""
self.callWithGroupIdParamater('SQLJoblib')
def test_84_ActivateKwForWorkflowTransition(self): def test_84_ActivateKwForWorkflowTransition(self):
""" """
...@@ -1266,20 +1018,15 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1266,20 +1018,15 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.tic() self.tic()
o1.validate(activate_kw=dict(tag='The Tag')) o1.validate(activate_kw=dict(tag='The Tag'))
self.commit() self.commit()
messages_for_o1 = [m for m in self.getActivityTool().getMessageList() m, = self.getActivityTool().getMessageList(path=o1.getPath())
if m.object_path == o1.getPhysicalPath()]
self.assertNotEquals(0, len(messages_for_o1))
for m in messages_for_o1:
self.assertEqual(m.activity_kw.get('tag'), 'The Tag') self.assertEqual(m.activity_kw.get('tag'), 'The Tag')
self.tic()
def test_85_LossOfVolatileAttribute(self): def test_85_LossOfVolatileAttribute(self):
""" """
Test that the loss of volatile attribute doesn't loose activities Test that the loss of volatile attribute doesn't loose activities
""" """
self.tic()
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list), 0)
def delete_volatiles(): def delete_volatiles():
for property_id in activity_tool.__dict__.keys(): for property_id in activity_tool.__dict__.keys():
if property_id.startswith('_v_'): if property_id.startswith('_v_'):
...@@ -1296,6 +1043,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1296,6 +1043,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.commit() self.commit()
message_list = activity_tool.getMessageList() message_list = activity_tool.getMessageList()
self.assertEqual(len(message_list), 2) self.assertEqual(len(message_list), 2)
self.tic()
def test_88_ProcessingMultipleMessagesMustRevertIndividualMessagesOnError(self): def test_88_ProcessingMultipleMessagesMustRevertIndividualMessagesOnError(self):
""" """
...@@ -1306,14 +1054,13 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1306,14 +1054,13 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
Queues supporting message batch processing: Queues supporting message batch processing:
- SQLQueue - SQLQueue
""" """
self.tic()
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
active_obj = obj.activate(activity='SQLQueue') active_obj = obj.activate(activity='SQLQueue')
def appendToTitle(self, to_append, fail=False): def appendToTitle(self, to_append, fail=False):
self.setTitle(self.getTitle() + to_append) self.setTitle(self.getTitle() + to_append)
if fail: if fail:
raise ValueError, 'This method always fail' raise ValueError('This method always fail')
try: try:
Organisation.appendToTitle = appendToTitle Organisation.appendToTitle = appendToTitle
obj.setTitle('a') obj.setTitle('a')
...@@ -1325,8 +1072,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1325,8 +1072,9 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(obj.getTitle(), 'a') self.assertEqual(obj.getTitle(), 'a')
self.assertEqual(activity_tool.countMessage(method_id='appendToTitle'), 3) self.assertEqual(activity_tool.countMessage(method_id='appendToTitle'), 3)
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
self.assertEqual(activity_tool.countMessage(method_id='appendToTitle'), 1)
self.assertEqual(sorted(obj.getTitle()), ['a', 'b', 'd']) self.assertEqual(sorted(obj.getTitle()), ['a', 'b', 'd'])
message, = self.getMessageList('SQLQueue', method_id='appendToTitle')
self.deleteMessageList('SQLQueue', [message])
finally: finally:
del Organisation.appendToTitle del Organisation.appendToTitle
...@@ -1337,7 +1085,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1337,7 +1085,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
This only apply to queues supporting batch processing: This only apply to queues supporting batch processing:
- SQLQueue - SQLQueue
""" """
self.tic()
obj = self.portal.organisation_module.newContent(portal_type='Organisation', title='Pending') obj = self.portal.organisation_module.newContent(portal_type='Organisation', title='Pending')
marker_id = 'marker_%i' % (random.randint(1, 10), ) marker_id = 'marker_%i' % (random.randint(1, 10), )
def putMarkerValue(self, marker_id): def putMarkerValue(self, marker_id):
...@@ -1359,7 +1106,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1359,7 +1106,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
del Organisation.putMarkerValue del Organisation.putMarkerValue
del Organisation.checkMarkerValue del Organisation.checkMarkerValue
def TryUserNotificationOnActivityFailure(self, activity): @for_each_activity
def testTryUserNotificationOnActivityFailure(self, activity):
message_list = self.portal.MailHost._message_list message_list = self.portal.MailHost._message_list
del message_list[:] del message_list[:]
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
...@@ -1388,73 +1136,37 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1388,73 +1136,37 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
del Organisation.failingMethod del Organisation.failingMethod
def test_90_userNotificationOnActivityFailureWithSQLDict(self): def test_93_tryUserNotificationRaise(self):
""" activity_tool = self.portal.portal_activities
Check that a user notification method is called on message when activity
fails and will not be tried again.
"""
self.TryUserNotificationOnActivityFailure('SQLDict')
def test_91_userNotificationOnActivityFailureWithSQLJoblib(self):
"""
Check user notification sent on activity final error
"""
self.TryUserNotificationOnActivityFailure('SQLJoblib')
def test_92_userNotificationOnActivityFailureWithSQLQueue(self):
"""
Check that a user notification method is called on message when activity
fails and will not be tried again.
"""
self.TryUserNotificationOnActivityFailure('SQLQueue')
def TryUserNotificationRaise(self, activity):
self.tic()
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
self.tic() self.tic()
from Products.CMFActivity.ActivityTool import Message
original_notifyUser = Message.notifyUser original_notifyUser = Message.notifyUser
def failingMethod(self, *args, **kw): def failingMethod(self, *args, **kw):
raise ValueError, 'This method always fail' raise ValueError('This method always fail')
Message.notifyUser = failingMethod Message.notifyUser = failingMethod
Organisation.failingMethod = failingMethod Organisation.failingMethod = failingMethod
getMessageList = self.getPortalObject().portal_activities.getMessageList
try: try:
for activity in ActivityTool.activity_dict:
obj.activate(activity=activity, priority=6).failingMethod() obj.activate(activity=activity, priority=6).failingMethod()
self.commit() self.commit()
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
message, = getMessageList(activity=activity, method_id='failingMethod') message, = activity_tool.getMessageList(
self.assertEqual(message.processing, 0) activity=activity, method_id='failingMethod')
self.assertEqual(message.processing_node, -2)
self.assertTrue(message.retry)
activity_tool.manageDelete(message.uid, activity)
self.commit()
finally: finally:
Message.notifyUser = original_notifyUser Message.notifyUser = original_notifyUser
del Organisation.failingMethod del Organisation.failingMethod
def test_93_userNotificationRaiseWithSQLDict(self): @for_each_activity
""" def testTryActivityRaiseInCommitDoesNotStallActivityConection(self, activity):
Check that activities are not left with processing=1 when notifyUser raises.
"""
self.TryUserNotificationRaise('SQLDict')
def test_94_userNotificationRaiseWithSQLQueue(self):
"""
Check that activities are not left with processing=1 when notifyUser raises.
"""
self.TryUserNotificationRaise('SQLQueue')
def test_95_userNotificationRaiseWithSQLJoblib(self):
"""
Check that activities are not left with processing=1 when notifyUser raises.
"""
self.TryUserNotificationRaise('SQLJoblib')
def TryActivityRaiseInCommitDoesNotStallActivityConection(self, activity):
""" """
Check that an activity which commit raises (as would a regular conflict Check that an activity which commit raises (as would a regular conflict
error be raised in tpc_vote) does not cause activity connection to error be raised in tpc_vote) does not cause activity connection to
stall. stall.
""" """
self.tic()
activity_tool = self.getActivityTool()
try: try:
Organisation.registerFailingTransactionManager = registerFailingTransactionManager Organisation.registerFailingTransactionManager = registerFailingTransactionManager
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
...@@ -1465,26 +1177,21 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1465,26 +1177,21 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
self.commit() self.commit()
# Check that cmf_activity SQL connection still works # Check that cmf_activity SQL connection still works
connection_da = self.getPortalObject().cmf_activity_sql_connection() connection_da = self.portal.cmf_activity_sql_connection()
self.assertFalse(connection_da._registered) self.assertFalse(connection_da._registered)
connection_da.query('select 1') connection_da.query('select 1')
self.assertTrue(connection_da._registered) self.assertTrue(connection_da._registered)
self.commit() self.commit()
self.assertFalse(connection_da._registered) self.assertFalse(connection_da._registered)
message, = self.getMessageList(activity)
self.deleteMessageList(activity, [message])
finally: finally:
del Organisation.registerFailingTransactionManager del Organisation.registerFailingTransactionManager
def test_96_ActivityRaiseInCommitDoesNotStallActivityConectionSQLDict(self): @for_each_activity
self.TryActivityRaiseInCommitDoesNotStallActivityConection('SQLDict') def testTryActivityRaiseInCommitDoesNotLoseMessages(self, activity):
def test_97_ActivityRaiseInCommitDoesNotStallActivityConectionSQLQueue(self):
self.TryActivityRaiseInCommitDoesNotStallActivityConection('SQLQueue')
def TryActivityRaiseInCommitDoesNotLooseMessages(self, activity):
""" """
""" """
self.tic()
activity_tool = self.getActivityTool()
try: try:
Organisation.registerFailingTransactionManager = registerFailingTransactionManager Organisation.registerFailingTransactionManager = registerFailingTransactionManager
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
...@@ -1494,18 +1201,14 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1494,18 +1201,14 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.commit() self.commit()
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
self.commit() self.commit()
self.assertEqual(activity_tool.countMessage(method_id='registerFailingTransactionManager'), 1) message, = self.getMessageList(activity,
method_id='registerFailingTransactionManager')
self.deleteMessageList(activity, [message])
finally: finally:
del Organisation.registerFailingTransactionManager del Organisation.registerFailingTransactionManager
def test_98_ActivityRaiseInCommitDoesNotLooseMessagesSQLDict(self): @for_each_activity
self.TryActivityRaiseInCommitDoesNotLooseMessages('SQLDict') def testTryChangeSkinInActivity(self, activity):
def test_99_ActivityRaiseInCommitDoesNotLooseMessagesSQLQueue(self):
self.TryActivityRaiseInCommitDoesNotLooseMessages('SQLQueue')
def TryChangeSkinInActivity(self, activity):
self.tic()
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
def changeSkinToNone(self): def changeSkinToNone(self):
self.getPortalObject().changeSkin(None) self.getPortalObject().changeSkin(None)
...@@ -1517,24 +1220,18 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1517,24 +1220,18 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.commit() self.commit()
self.assertEqual(len(activity_tool.getMessageList()), 1) self.assertEqual(len(activity_tool.getMessageList()), 1)
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
self.assertEqual(len(activity_tool.getMessageList()), 0)
finally: finally:
del Organisation.changeSkinToNone del Organisation.changeSkinToNone
def test_100_TryChangeSkinInActivitySQLDict(self): @for_each_activity
self.TryChangeSkinInActivity('SQLDict') def testDeduplicatingQueuesDoNotDeleteSimilaritiesBeforeExecution(self,
activity):
def test_101_TryChangeSkinInActivitySQLQueue(self):
self.TryChangeSkinInActivity('SQLQueue')
def test_102_TryChangeSkinInActivitySQLJoblib(self):
self.TryChangeSkinInActivity('SQLJoblib')
def test_103_1_CheckSQLDictDoesNotDeleteSimilaritiesBeforeExecution(self):
""" """
Test that SQLDict does not delete similar messages which have the same Test that SQLDict does not delete similar messages which have the same
method_id and path but a different tag before execution. method_id and path but a different tag before execution.
""" """
if activity == 'SQLQueue':
return
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
marker = [] marker = []
def doSomething(self, other_tag): def doSomething(self, other_tag):
...@@ -1542,22 +1239,23 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1542,22 +1239,23 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.__class__.doSomething = doSomething activity_tool.__class__.doSomething = doSomething
try: try:
# Adds two similar but not the same activities. # Adds two similar but not the same activities.
activity_tool.activate(activity='SQLDict', after_tag='foo', activity_tool.activate(activity=activity, after_tag='foo',
tag='a').doSomething(other_tag='b') tag='a').doSomething(other_tag='b')
activity_tool.activate(activity='SQLDict', after_tag='bar', activity_tool.activate(activity=activity, after_tag='bar',
tag='b').doSomething(other_tag='a') tag='b').doSomething(other_tag='a')
self.commit() self.commit()
activity_tool.tic() # make sure distribution phase was not skipped activity_tool.tic() # make sure distribution phase was not skipped
activity_tool.distribute() activity_tool.distribute()
# after distribute, similarities are still there. # after distribute, similarities are still there.
self.assertEqual(len(activity_tool.getMessageList()), 2) self.assertEqual(len(self.getMessageList(activity)), 2)
activity_tool.tic() activity_tool.tic()
self.assertEqual(len(activity_tool.getMessageList()), 0)
self.assertEqual(marker, [1]) self.assertEqual(marker, [1])
finally: finally:
del activity_tool.__class__.doSomething del activity_tool.__class__.doSomething
def test_103_2_CheckSQLDictDoesNotDeleteDuplicatesBeforeExecution(self): @for_each_activity
def testDeduplicatingQueuesDoNotDeleteDuplicatesBeforeExecution(self,
activity):
""" """
Test that SQLDict does not delete messages before execution Test that SQLDict does not delete messages before execution
even if messages have the same method_id and path and tag. even if messages have the same method_id and path and tag.
...@@ -1568,49 +1266,29 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1568,49 +1266,29 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
Deduplication is cheap: Deduplication is cheap:
- inside the transaction which spawned duplicate activities, because it - inside the transaction which spawned duplicate activities, because it
has to have created activities around anyway, and can keep track has to have created activities around anyway, and can keep track
- inside the CMFActvitiy-level processing surrounding activity execution - inside the CMFActivity-level processing surrounding activity execution
because it has to load the activities to process them anyway because it has to load the activities to process them anyway
""" """
if activity == 'SQLQueue':
return
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
# Adds two same activities. # Adds two same activities.
activity_tool.activate(activity='SQLDict', after_tag='foo', priority=2, activity_tool.activate(activity=activity, after_tag='foo', priority=2,
tag='a').getId()
self.commit()
uid1, = [x.uid for x in activity_tool.getMessageList()]
activity_tool.activate(activity='SQLDict', after_tag='bar', priority=1,
tag='a').getId()
self.commit()
uid2, = [x.uid for x in activity_tool.getMessageList() if x.uid != uid1]
self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.distribute()
# After distribute, duplicate is still present.
self.assertItemsEqual([uid1, uid2], [x.uid for x in activity_tool.getMessageList()])
activity_tool.tic()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_103_3_CheckSQLJoblibDoesNotDeleteDuplicatesBeforeExecution(self):
"""
(see test_103_2_CheckSQLDictDoesNotDeleteDuplicatesBeforeExecution)
"""
activity_tool = self.getActivityTool()
# Adds two same activities.
activity_tool.activate(activity='SQLJoblib', after_tag='foo', priority=2,
tag='a').getId() tag='a').getId()
self.commit() self.commit()
uid1, = [x.uid for x in activity_tool.getMessageList()] uid1, = [x.uid for x in self.getMessageList(activity)]
activity_tool.activate(activity='SQLJoblib', after_tag='bar', priority=1, activity_tool.activate(activity=activity, after_tag='bar', priority=1,
tag='a').getId() tag='a').getId()
self.commit() self.commit()
uid2, = [x.uid for x in activity_tool.getMessageList() if x.uid != uid1] uid2, = [x.uid for x in self.getMessageList(activity) if x.uid != uid1]
self.assertEqual(len(activity_tool.getMessageList()), 2) self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.distribute() activity_tool.distribute()
# After distribute, duplicate is still present. # After distribute, duplicate is still present.
self.assertItemsEqual([uid1, uid2], [x.uid for x in activity_tool.getMessageList()]) self.assertItemsEqual([uid1, uid2],
[x.uid for x in self.getMessageList(activity)])
activity_tool.tic() activity_tool.tic()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_103_4_CheckSQLDictDistributeWithSerializationTagAndGroupMethodId( def testCheckSQLDictDistributeWithSerializationTagAndGroupMethodId(self):
self):
""" """
Distribuation was at some point buggy with this scenario when there was Distribuation was at some point buggy with this scenario when there was
activate with the same serialization_tag and one time with a group_method activate with the same serialization_tag and one time with a group_method
...@@ -1631,7 +1309,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1631,7 +1309,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# After distribute, there is no deletion because it is different method # After distribute, there is no deletion because it is different method
self.assertEqual(len(activity_tool.getMessageList()), 2) self.assertEqual(len(activity_tool.getMessageList()), 2)
self.tic() self.tic()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_104_interQueuePriorities(self): def test_104_interQueuePriorities(self):
""" """
...@@ -1680,7 +1357,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1680,7 +1357,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
del Organisation.mustRunBefore del Organisation.mustRunBefore
del Organisation.mustRunAfter del Organisation.mustRunAfter
def CheckActivityRuntimeEnvironment(self, activity): @for_each_activity
def testCheckActivityRuntimeEnvironment(self, activity):
document = self.portal.organisation_module document = self.portal.organisation_module
activity_result = [] activity_result = []
def extractActivityRuntimeEnvironment(self): def extractActivityRuntimeEnvironment(self):
...@@ -1708,21 +1386,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1708,21 +1386,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
del document.__class__.doSomething del document.__class__.doSomething
def test_105_activityRuntimeEnvironmentSQLDict(self): @for_each_activity
self.CheckActivityRuntimeEnvironment('SQLDict') def testSerializationTag(self, activity):
def test_106_activityRuntimeEnvironmentSQLQueue(self):
self.CheckActivityRuntimeEnvironment('SQLQueue')
def test_107_activityRuntimeEnvironmentSQLJoblib(self):
self.CheckActivityRuntimeEnvironment('SQLJoblib')
def CheckSerializationTag(self, activity):
organisation = self.portal.organisation_module.newContent(portal_type='Organisation') organisation = self.portal.organisation_module.newContent(portal_type='Organisation')
self.tic() self.tic()
activity_tool = self.getActivityTool() activity_tool = self.getActivityTool()
result = activity_tool.getMessageList()
self.assertEqual(len(result), 0)
# First scenario: activate, distribute, activate, distribute # First scenario: activate, distribute, activate, distribute
# Create first activity and distribute: it must be distributed # Create first activity and distribute: it must be distributed
organisation.activate(activity=activity, serialization_tag='1').getTitle() organisation.activate(activity=activity, serialization_tag='1').getTitle()
...@@ -1741,8 +1409,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1741,8 +1409,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
result = activity_tool.getMessageList() result = activity_tool.getMessageList()
self.assertEqual(len([x for x in result if x.processing_node == 0]), 1) # Distributed message list len is still 1 self.assertEqual(len([x for x in result if x.processing_node == 0]), 1) # Distributed message list len is still 1
self.tic() self.tic()
result = activity_tool.getMessageList()
self.assertEqual(len(result), 0)
# Second scenario: activate, activate, distribute # Second scenario: activate, activate, distribute
# Both messages must be distributed (this is different from regular tags) # Both messages must be distributed (this is different from regular tags)
organisation.activate(activity=activity, serialization_tag='1', priority=2).getTitle() organisation.activate(activity=activity, serialization_tag='1', priority=2).getTitle()
...@@ -1760,19 +1426,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1760,19 +1426,10 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
message, = [x for x in result if x.processing_node == -1] message, = [x for x in result if x.processing_node == -1]
self.assertEqual(message.method_id, 'getTitle') self.assertEqual(message.method_id, 'getTitle')
self.tic() self.tic()
result = activity_tool.getMessageList()
self.assertEqual(len(result), 0)
# Check that giving a None value to serialization_tag does not confuse # Check that giving a None value to serialization_tag does not confuse
# CMFActivity # CMFActivity
organisation.activate(activity=activity, serialization_tag=None).getTitle() organisation.activate(activity=activity, serialization_tag=None).getTitle()
self.tic() self.tic()
self.assertEqual(len(activity_tool.getMessageList()), 0)
def test_108_checkSerializationTagSQLDict(self):
self.CheckSerializationTag('SQLDict')
def test_109_checkSerializationTagSQLQueue(self):
self.CheckSerializationTag('SQLQueue')
def test_110_testAbsoluteUrl(self): def test_110_testAbsoluteUrl(self):
# Tests that absolute_url works in activities. The URL generation is based # Tests that absolute_url works in activities. The URL generation is based
...@@ -1868,7 +1525,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1868,7 +1525,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
def first(context): def first(context):
context.changeSkin(skin_selection_name) context.changeSkin(skin_selection_name)
if getattr(context, script_id, None) is not None: if getattr(context, script_id, None) is not None:
raise Exception, '%s is not supposed to be found here.' % (script_id, ) raise Exception('%s is not supposed to be found here.' % script_id)
def second(context): def second(context):
# If the wrong skin is selected this will raise. # If the wrong skin is selected this will raise.
getattr(context, script_id) getattr(context, script_id)
...@@ -1885,7 +1542,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1885,7 +1542,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Forcibly restore skin selection, otherwise getMessageList would only # Forcibly restore skin selection, otherwise getMessageList would only
# emit a log when retrieving the ZSQLMethod. # emit a log when retrieving the ZSQLMethod.
portal.changeSkin(None) portal.changeSkin(None)
self.assertEqual(len(activity_tool.getMessageList()), 0)
finally: finally:
del Organisation.firstTest del Organisation.firstTest
del Organisation.secondTest del Organisation.secondTest
...@@ -1919,7 +1575,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1919,7 +1575,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
rendez_vous_event.set() rendez_vous_event.set()
# When this event is available, it means test has called process_shutdown. # When this event is available, it means test has called process_shutdown.
activity_event.wait() activity_event.wait()
from Products.CMFActivity.Activity.SQLDict import SQLDict
original_dequeue = SQLDict.dequeueMessage original_dequeue = SQLDict.dequeueMessage
queue_tic_test_dict = {} queue_tic_test_dict = {}
def dequeueMessage(self, activity_tool, processing_node): def dequeueMessage(self, activity_tool, processing_node):
...@@ -1983,7 +1638,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1983,7 +1638,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(len(activity_tool.getMessageList()), 1) self.assertEqual(len(activity_tool.getMessageList()), 1)
finally: finally:
# Put activity tool back in a working state # Put activity tool back in a working state
from Products.CMFActivity.ActivityTool import cancelProcessShutdown
try: try:
cancelProcessShutdown() cancelProcessShutdown()
except StandardException: except StandardException:
...@@ -1994,6 +1648,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -1994,6 +1648,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
del Organisation.waitingActivity del Organisation.waitingActivity
SQLDict.dequeueMessage = original_dequeue SQLDict.dequeueMessage = original_dequeue
self.tic()
def test_hasActivity(self): def test_hasActivity(self):
active_object = self.portal.organisation_module.newContent( active_object = self.portal.organisation_module.newContent(
...@@ -2005,7 +1660,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2005,7 +1660,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertFalse(active_process.hasActivity()) self.assertFalse(active_process.hasActivity())
def test(obj, **kw): def test(obj, **kw):
for activity in ('SQLDict', 'SQLQueue', 'SQLJoblib'): for activity in ActivityTool.activity_dict:
active_object.activate(activity=activity, **kw).getTitle() active_object.activate(activity=activity, **kw).getTitle()
self.commit() self.commit()
self.assertTrue(obj.hasActivity(), activity) self.assertTrue(obj.hasActivity(), activity)
...@@ -2016,7 +1671,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2016,7 +1671,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
test(active_process, active_process=active_process) test(active_process, active_process=active_process)
test(active_process, active_process=active_process.getPath()) test(active_process, active_process=active_process.getPath())
def _test_hasErrorActivity_error(self, activity): @for_each_activity
def test_hasErrorActivity_error(self, activity):
# Monkey patch Organisation to add a failing method # Monkey patch Organisation to add a failing method
def failingMethod(self): def failingMethod(self):
raise ValueError('This method always fail') raise ValueError('This method always fail')
...@@ -2046,17 +1702,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2046,17 +1702,11 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# assert that an error has been seen # assert that an error has been seen
self.assertTrue(active_object.hasErrorActivity()) self.assertTrue(active_object.hasErrorActivity())
self.assertTrue(active_process.hasErrorActivity()) self.assertTrue(active_process.hasErrorActivity())
message, = self.getMessageList(activity)
self.deleteMessageList(activity, [message])
def test_hasErrorActivity_error_SQLQueue(self): @for_each_activity
self._test_hasErrorActivity_error('SQLQueue') def test_hasErrorActivity(self, activity):
def test_hasErrorActivity_error_SQLDict(self):
self._test_hasErrorActivity_error('SQLDict')
def test_hasErrorActivity_error_SQLJoblib(self):
self._test_hasErrorActivity_error('SQLJoblib')
def _test_hasErrorActivity(self, activity):
active_object = self.portal.organisation_module.newContent( active_object = self.portal.organisation_module.newContent(
portal_type='Organisation') portal_type='Organisation')
active_process = self.portal.portal_activities.newActiveProcess() active_process = self.portal.portal_activities.newActiveProcess()
...@@ -2082,15 +1732,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2082,15 +1732,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertFalse(active_object.hasErrorActivity()) self.assertFalse(active_object.hasErrorActivity())
self.assertFalse(active_process.hasErrorActivity()) self.assertFalse(active_process.hasErrorActivity())
def test_hasErrorActivity_SQLQueue(self):
self._test_hasErrorActivity('SQLQueue')
def test_hasErrorActivity_SQLDict(self):
self._test_hasErrorActivity('SQLDict')
def test_hasErrorActivity_SQLJoblib(self):
self._test_hasErrorActivity('SQLJoblib')
def test_active_object_hasActivity_does_not_catch_exceptions(self): def test_active_object_hasActivity_does_not_catch_exceptions(self):
""" """
Some time ago, hasActivity was doing a silent try/except, and this was Some time ago, hasActivity was doing a silent try/except, and this was
...@@ -2120,30 +1761,67 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2120,30 +1761,67 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
finally: finally:
DB.query = DB.original_query DB.query = DB.original_query
del DB.original_query del DB.original_query
self.tic()
def test_MAX_MESSAGE_LIST_SIZE(self): def test_insert_max_payload(self):
from Products.CMFActivity.Activity import SQLBase activity_tool = self.portal.portal_activities
MAX_MESSAGE_LIST_SIZE = SQLBase.MAX_MESSAGE_LIST_SIZE # XXX: For unknown reasons, this test runs faster after the tables are
try: # recreated. We could also make this test run before all others.
SQLBase.MAX_MESSAGE_LIST_SIZE = 3 activity_tool.manageClearActivities()
def dummy_counter(o): self.commit()
self.__call_count += 1 max_allowed_packet = activity_tool.getSQLConnection().getMaxAllowedPacket()
o = self.portal.organisation_module.newContent(portal_type='Organisation') insert_list = []
invoke_list = []
for activity in "SQLDict", "SQLQueue", "SQLJoblib": N = 100
self.__call_count = 0 class Skip(Exception):
"""
Speed up test by not interrupting the first transaction
as soon as we have the information we want.
"""
original_query = DB.query.__func__
def query(self, query_string, *args, **kw):
if query_string.startswith('INSERT'):
insert_list.append(len(query_string))
if not n:
raise Skip
return original_query(self, query_string, *args, **kw)
def check():
for i in xrange(1, N):
activity_tool.activate(activity=activity, group_id=str(i)
).doSomething(arg)
activity_tool.activate(activity=activity, group_id='~'
).doSomething(' ' * n)
self.tic()
self.assertEqual(len(invoke_list), N)
invoke_list.remove(n)
self.assertEqual(set(invoke_list), {len(arg)})
del invoke_list[:]
activity_tool.__class__.doSomething = \
lambda self, arg: invoke_list.append(len(arg))
try: try:
for i in xrange(10): DB.query = query
method_name = 'dummy_counter_%s' % i for activity in ActivityTool.activity_dict:
getattr(o.activate(activity=activity), method_name)() arg = ' ' * (max_allowed_packet // N)
setattr(Organisation, method_name, dummy_counter) # Find the size of the last message argument, such that all messages
self.flushAllActivities() # are inserted in a single query whose size is to the maximum allowed.
finally: n = 0
for i in xrange(10): self.assertRaises(Skip, check)
delattr(Organisation, 'dummy_counter_%s' % i) self.abort()
self.assertEqual(self.__call_count, 10) n = max_allowed_packet - insert_list.pop()
self.assertFalse(insert_list)
# Now check with the biggest insert query possible.
check()
self.assertEqual(max_allowed_packet, insert_list.pop())
self.assertFalse(insert_list)
# And check that the insert query is split
# in order not to exceed max_allowed_packet.
n += 1
check()
self.assertEqual(len(insert_list), 2)
del insert_list[:]
finally: finally:
SQLBase.MAX_MESSAGE_LIST_SIZE = MAX_MESSAGE_LIST_SIZE del activity_tool.__class__.doSomething
DB.query = original_query
def test_115_TestSerializationTagSQLDictPreventsParallelExecution(self): def test_115_TestSerializationTagSQLDictPreventsParallelExecution(self):
""" """
...@@ -2151,11 +1829,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2151,11 +1829,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
then serialization tag guarantees that only one of the same serialization then serialization tag guarantees that only one of the same serialization
tagged activities can be processed at the same time. tagged activities can be processed at the same time.
""" """
from Products.CMFActivity import ActivityTool
portal = self.portal portal = self.portal
activity_tool = portal.portal_activities activity_tool = portal.portal_activities
self.tic()
# Add 6 activities # Add 6 activities
portal.organisation_module.activate(activity='SQLDict', tag='', serialization_tag='test_115').getId() portal.organisation_module.activate(activity='SQLDict', tag='', serialization_tag='test_115').getId()
...@@ -2175,7 +1850,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2175,7 +1850,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_tool.distribute() activity_tool.distribute()
self.commit() self.commit()
from Products.CMFActivity import ActivityTool
activity = ActivityTool.activity_dict['SQLDict'] activity = ActivityTool.activity_dict['SQLDict']
activity.getProcessableMessageList(activity_tool, 1) activity.getProcessableMessageList(activity_tool, 1)
self.commit() self.commit()
...@@ -2184,7 +1858,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2184,7 +1858,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity.getProcessableMessageList(activity_tool, 3) activity.getProcessableMessageList(activity_tool, 3)
self.commit() self.commit()
result = activity._getMessageList(activity_tool) result = activity._getMessageList(activity_tool.getSQLConnection())
try: try:
self.assertEqual(len([message self.assertEqual(len([message
for message in result for message in result
...@@ -2205,21 +1879,19 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2205,21 +1879,19 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
1) 1)
finally: finally:
# Clear activities from all nodes # Clear activities from all nodes
activity_tool.SQLBase_delMessage(table=SQLDict.sql_table, self.deleteMessageList('SQLDict', result)
uid=[message.uid for message in result])
self.commit()
def test_116_RaiseInCommitBeforeMessageExecution(self): def test_116_RaiseInCommitBeforeMessageExecution(self):
""" """
Test behaviour of CMFActivity when the commit just before message Test behaviour of CMFActivity when the commit just before message
execution fails. In particular, CMFActivity should restart the execution fails. In particular, it should restart the messages it
activities it selected (processing=1) instead of ignoring them forever. selected (processing_node=current_node) instead of ignoring them forever.
""" """
processed = [] processed = []
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
activity_tool.__class__.doSomething = processed.append activity_tool.__class__.doSomething = processed.append
try: try:
for activity in 'SQLDict', 'SQLQueue', 'SQLJoblib': for activity in ActivityTool.activity_dict:
activity_tool.activate(activity=activity).doSomething(activity) activity_tool.activate(activity=activity).doSomething(activity)
self.commit() self.commit()
# Make first commit in dequeueMessage raise # Make first commit in dequeueMessage raise
...@@ -2228,7 +1900,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2228,7 +1900,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# Normally, the request stops here and Zope aborts the transaction # Normally, the request stops here and Zope aborts the transaction
self.abort() self.abort()
self.assertEqual(processed, []) self.assertEqual(processed, [])
# Activity is already in 'processing=1' state. Check tic reselects it. # Activity is already reserved for current node. Check tic reselects it.
activity_tool.tic() activity_tool.tic()
self.assertEqual(processed, [activity]) self.assertEqual(processed, [activity])
del processed[:] del processed[:]
...@@ -2273,14 +1945,13 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2273,14 +1945,13 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
# .. now no messages with this tag should apper # .. now no messages with this tag should apper
self.assertEqual(0, portal.portal_activities.countMessageWithTag(tag)) self.assertEqual(0, portal.portal_activities.countMessageWithTag(tag))
def TryNotificationSavedOnEventLogWhenNotifyUserRaises(self, activity): @for_each_activity
activity_tool = self.getActivityTool() def testTryNotificationSavedOnEventLogWhenNotifyUserRaises(self, activity):
self.tic()
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
self.tic() self.tic()
original_notifyUser = Message.notifyUser.im_func original_notifyUser = Message.notifyUser.im_func
def failSendingEmail(self, *args, **kw): def failSendingEmail(self, *args, **kw):
raise MailHostError, 'Mail is not sent' raise MailHostError('Mail is not sent')
activity_unit_test_error = Exception() activity_unit_test_error = Exception()
def failingMethod(self): def failingMethod(self):
raise activity_unit_test_error raise activity_unit_test_error
...@@ -2291,41 +1962,23 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2291,41 +1962,23 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
obj.activate(activity=activity, priority=6).failingMethod() obj.activate(activity=activity, priority=6).failingMethod()
self.commit() self.commit()
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
message, = activity_tool.getMessageList() message, = self.getMessageList(activity)
self.commit() self.commit()
for log_record in self.logged: for log_record in self.logged:
if log_record.name == 'ActivityTool' and log_record.levelname == 'WARNING': if log_record.name == 'ActivityTool' and log_record.levelname == 'WARNING':
type, value, trace = log_record.exc_info type, value, trace = log_record.exc_info
self.commit() self.commit()
self.assertIs(activity_unit_test_error, value) self.assertIs(activity_unit_test_error, value)
self.deleteMessageList(activity, [message])
finally: finally:
Message.notifyUser = original_notifyUser Message.notifyUser = original_notifyUser
del Organisation.failingMethod del Organisation.failingMethod
self._ignore_log_errors() self._ignore_log_errors()
def test_118_userNotificationSavedOnEventLogWhenNotifyUserRaisesWithSQLDict(self): @for_each_activity
""" def testTryUserMessageContainingNoTracebackIsStillSent(self, activity):
Check the error is saved on event log even if the mail notification is not sent.
"""
self.TryNotificationSavedOnEventLogWhenNotifyUserRaises('SQLDict')
def test_119_userNotificationSavedOnEventLogWhenNotifyUserRaisesWithSQLQueue(self):
"""
Check the error is saved on event log even if the mail notification is not sent.
"""
self.TryNotificationSavedOnEventLogWhenNotifyUserRaises('SQLQueue')
def test_120_userNotificationSavedOnEventLogWhenNotifyUserRaisesWithSQLJoblib(self):
"""
Check the error is saved on event log even if the mail notification is not sent.
"""
self.TryNotificationSavedOnEventLogWhenNotifyUserRaises('SQLJoblib')
def TryUserMessageContainingNoTracebackIsStillSent(self, activity):
activity_tool = self.getActivityTool()
# With Message.__call__ # With Message.__call__
# 1: activity context does not exist when activity is executed # 1: activity context does not exist when activity is executed
self.tic()
obj = self.portal.organisation_module.newContent(portal_type='Organisation') obj = self.portal.organisation_module.newContent(portal_type='Organisation')
self.tic() self.tic()
notification_done = [] notification_done = []
...@@ -2334,40 +1987,25 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2334,40 +1987,25 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.traceback = None self.traceback = None
original_notifyUser = Message.notifyUser original_notifyUser = Message.notifyUser
def failingMethod(self): def failingMethod(self):
raise ValueError, "This method always fail" raise ValueError("This method always fail")
Message.notifyUser = fake_notifyUser Message.notifyUser = fake_notifyUser
Organisation.failingMethod = failingMethod Organisation.failingMethod = failingMethod
try: try:
obj.activate(activity=activity).failingMethod() obj.activate(activity=activity).failingMethod()
self.commit() self.commit()
self.flushAllActivities(silent=1, loop_size=100) self.flushAllActivities(silent=1, loop_size=100)
message_list = activity_tool.getMessageList() message, = self.getMessageList(activity)
self.assertEqual(len(message_list), 1)
self.assertEqual(len(notification_done), 1) self.assertEqual(len(notification_done), 1)
message = message_list[0]
self.assertEqual(message.traceback, None) self.assertEqual(message.traceback, None)
message(activity_tool) message(self.getActivityTool())
activity_tool.manageCancel(message.object_path, message.method_id) self.deleteMessageList(activity, [message])
self.commit()
finally: finally:
Message.notifyUser = original_notifyUser Message.notifyUser = original_notifyUser
del Organisation.failingMethod del Organisation.failingMethod
def test_121_sendMessageWithNoTracebackWithSQLQueue(self): @for_each_activity
self.TryUserMessageContainingNoTracebackIsStillSent('SQLQueue') def testTryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises(self, activity):
def test_122_sendMessageWithNoTracebackWithSQLDict(self):
self.TryUserMessageContainingNoTracebackIsStillSent('SQLDict')
def test_123_sendMessageWithNoTracebackWithSQLJoblib(self):
"""
Check that message with no traceback is still sen
"""
self.TryUserMessageContainingNoTracebackIsStillSent('SQLJoblib')
def TryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises(self, activity):
# Make sure that no active object is installed. # Make sure that no active object is installed.
activity_tool = self.portal.portal_activities
o = self.getOrganisation() o = self.getOrganisation()
class ActivityUnitTestError(Exception): class ActivityUnitTestError(Exception):
pass pass
...@@ -2386,7 +2024,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2386,7 +2024,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self._catch_log_errors() self._catch_log_errors()
o.activate(activity = activity).failingMethod() o.activate(activity = activity).failingMethod()
self.commit() self.commit()
self.assertEqual(len(activity_tool.getMessageList()), 1) message, = self.getMessageList(activity)
self.flushAllActivities(silent = 1) self.flushAllActivities(silent = 1)
SiteErrorLog.raising = original_raising SiteErrorLog.raising = original_raising
self.commit() self.commit()
...@@ -2394,64 +2032,12 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2394,64 +2032,12 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
if log_record.name == 'ActivityTool' and log_record.levelname == 'WARNING': if log_record.name == 'ActivityTool' and log_record.levelname == 'WARNING':
type, value, trace = log_record.exc_info type, value, trace = log_record.exc_info
self.assertIs(activity_unit_test_error, value) self.assertIs(activity_unit_test_error, value)
self.deleteMessageList(activity, [message])
finally: finally:
SiteErrorLog.raising = original_raising SiteErrorLog.raising = original_raising
del Organisation.failingMethod del Organisation.failingMethod
self._ignore_log_errors() self._ignore_log_errors()
def test_124_userNotificationSavedOnEventLogWhenSiteErrorLoggerRaisesWithSQLJoblib(self):
"""
Check that message not saved in site error logger is not lost
"""
self.TryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises('SQLJoblib')
def test_125_userNotificationSavedOnEventLogWhenSiteErrorLoggerRaisesWithSQLDict(self):
"""
Check that message not saved in site error logger is not lost'
"""
self.TryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises('SQLDict')
def test_125_userNotificationSavedOnEventLogWhenSiteErrorLoggerRaisesWithSQLJoblib(self):
"""
Check that message not saved in site error logger is not lost'
"""
self.TryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises('SQLJoblib')
def test_126_userNotificationSavedOnEventLogWhenSiteErrorLoggerRaisesWithSQLQueue(self):
self.TryNotificationSavedOnEventLogWhenSiteErrorLoggerRaises('SQLQueue')
def test_127_checkConflictErrorAndNoRemainingActivities(self):
"""
When an activity creates several activities, make sure that all newly
created activities are not commited if there is ZODB Conflict error
"""
from Products.CMFActivity.Activity import SQLBase
MAX_MESSAGE_LIST_SIZE = SQLBase.MAX_MESSAGE_LIST_SIZE
try:
SQLBase.MAX_MESSAGE_LIST_SIZE = 1
activity_tool = self.portal.portal_activities
def doSomething(self):
self.serialize()
self.activate(activity='SQLQueue').getId()
self.activate(activity='SQLQueue').getTitle()
conn = self._p_jar
tid = self._p_serial
oid = self._p_oid
try:
conn.db().invalidate({oid: tid})
except TypeError:
conn.db().invalidate(tid, {oid: tid})
activity_tool.__class__.doSomething = doSomething
activity_tool.activate(activity='SQLQueue').doSomething()
self.commit()
activity_tool.tic()
message_list = activity_tool.getMessageList()
self.assertEqual(['doSomething'],[x.method_id for x in message_list])
activity_tool.manageClearActivities()
finally:
SQLBase.MAX_MESSAGE_LIST_SIZE = MAX_MESSAGE_LIST_SIZE
def test_128_CheckDistributeWithSerializationTagAndGroupMethodId(self): def test_128_CheckDistributeWithSerializationTagAndGroupMethodId(self):
activity_tool = self.portal.portal_activities activity_tool = self.portal.portal_activities
obj1 = activity_tool.newActiveProcess() obj1 = activity_tool.newActiveProcess()
...@@ -2466,7 +2052,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2466,7 +2052,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
group_method_call_list.append(r) group_method_call_list.append(r)
activity_tool.__class__.doSomething = doSomething activity_tool.__class__.doSomething = doSomething
try: try:
for activity in 'SQLDict', 'SQLQueue', 'SQLJoblib': for activity in ActivityTool.activity_dict:
activity_kw = dict(activity=activity, serialization_tag=self.id(), activity_kw = dict(activity=activity, serialization_tag=self.id(),
group_method_id='portal_activities/doSomething') group_method_id='portal_activities/doSomething')
obj1.activate(**activity_kw).dummy(1, x=None) obj1.activate(**activity_kw).dummy(1, x=None)
...@@ -2488,11 +2074,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2488,11 +2074,8 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.assertEqual(len(activity_tool.getMessageList()), 2) self.assertEqual(len(activity_tool.getMessageList()), 2)
activity_tool.tic() activity_tool.tic()
self.assertEqual(group_method_call_list.pop(), self.assertEqual(group_method_call_list.pop(),
dict(SQLDict=[message2], [message2] if activity != 'SQLQueue' else [message1, message2])
SQLQueue=[message1, message2],
SQLJoblib=[message2])[activity])
self.assertFalse(group_method_call_list) self.assertFalse(group_method_call_list)
self.assertFalse(activity_tool.getMessageList())
finally: finally:
del activity_tool.__class__.doSomething del activity_tool.__class__.doSomething
...@@ -2600,7 +2183,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2600,7 +2183,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
transaction.get().addBeforeCommitHook(_raise, (error,)) transaction.get().addBeforeCommitHook(_raise, (error,))
obj.__class__.doSomething = doSomething obj.__class__.doSomething = doSomething
try: try:
for activity in 'SQLDict', 'SQLQueue', 'SQLJoblib': for activity in ActivityTool.activity_dict:
for conflict_error in False, True: for conflict_error in False, True:
weakref_list = [] weakref_list = []
obj.activity_count = obj.on_error_count = 0 obj.activity_count = obj.on_error_count = 0
...@@ -2731,7 +2314,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2731,7 +2314,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
kw = {} kw = {}
self._catch_log_errors(subsystem='CMFActivity') self._catch_log_errors(subsystem='CMFActivity')
try: try:
for kw['activity'] in 'SQLDict', 'SQLQueue', 'SQLJoblib': for kw['activity'] in ActivityTool.activity_dict:
for kw['group_method_id'] in '', None: for kw['group_method_id'] in '', None:
obj = activity_tool.newActiveProcess() obj = activity_tool.newActiveProcess()
self.tic() self.tic()
...@@ -2818,7 +2401,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2818,7 +2401,6 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
self.commit() self.commit()
activity_tool.timeShift(VALIDATION_ERROR_DELAY) activity_tool.timeShift(VALIDATION_ERROR_DELAY)
activity_tool.tic() activity_tool.tic()
self.assertFalse(activity_tool.getMessageList())
finally: finally:
del obj.__class__.doSomething del obj.__class__.doSomething
...@@ -2845,7 +2427,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2845,7 +2427,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
skin.manage_delObjects([script_id]) skin.manage_delObjects([script_id])
self.tic() self.tic()
def testGetCurrentNode(self): def test_getCurrentNode(self):
current_node = getattr(getConfiguration(), 'product_config', {}) \ current_node = getattr(getConfiguration(), 'product_config', {}) \
.get('cmfactivity', {}).get('node-id') .get('cmfactivity', {}).get('node-id')
if not current_node: if not current_node:
...@@ -2855,7 +2437,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor): ...@@ -2855,7 +2437,7 @@ class TestCMFActivity(ERP5TypeTestCase, LogInterceptor):
activity_node = self.portal.portal_activities.getCurrentNode() activity_node = self.portal.portal_activities.getCurrentNode()
self.assertEqual(activity_node, current_node) self.assertEqual(activity_node, current_node)
def testGetServerAddress(self): def test_getServerAddress(self):
ip = port = '' ip = port = ''
for k, v in socket_map.items(): for k, v in socket_map.items():
if hasattr(v, 'addr'): if hasattr(v, 'addr'):
......
...@@ -2211,11 +2211,8 @@ class ERP5Generator(PortalGenerator): ...@@ -2211,11 +2211,8 @@ class ERP5Generator(PortalGenerator):
createDirectoryView(ps, reg_key) createDirectoryView(ps, reg_key)
def setupDefaultSkins(self, p): def setupDefaultSkins(self, p):
from Products.CMFCore.DirectoryView import addDirectoryViews
from Products.CMFActivity import cmfactivity_globals
ps = p.portal_skins ps = p.portal_skins
self.addCMFDefaultDirectoryViews(p) self.addCMFDefaultDirectoryViews(p)
addDirectoryViews(ps, 'skins', cmfactivity_globals)
ps.manage_addProduct['OFSP'].manage_addFolder(id='external_method') ps.manage_addProduct['OFSP'].manage_addFolder(id='external_method')
ps.manage_addProduct['OFSP'].manage_addFolder(id='custom') ps.manage_addProduct['OFSP'].manage_addFolder(id='custom')
# Set the 'custom' layer a high priority, so it remains the first # Set the 'custom' layer a high priority, so it remains the first
...@@ -2223,7 +2220,6 @@ class ERP5Generator(PortalGenerator): ...@@ -2223,7 +2220,6 @@ class ERP5Generator(PortalGenerator):
ps['custom'].manage_addProperty("business_template_skin_layer_priority", 100.0, "float") ps['custom'].manage_addProperty("business_template_skin_layer_priority", 100.0, "float")
skin_folder_list = [ 'custom' skin_folder_list = [ 'custom'
, 'external_method' , 'external_method'
, 'activity'
] + self.CMFDEFAULT_FOLDER_LIST ] + self.CMFDEFAULT_FOLDER_LIST
skin_folders = ', '.join(skin_folder_list) skin_folders = ', '.join(skin_folder_list)
ps.addSkinSelection( 'View' ps.addSkinSelection( 'View'
......
return 'ActivityTool_manageDelete?uid=%s&activity=%s' % (context.uid, context.activity) return 'manageDelete?message_uid_list:int:list=%s&activity=%s' % (context.uid, context.activity)
SELECT count(*) AS message, method_id, processing, processing_node AS node, min(priority) AS min_pri, max(priority) AS max_pri FROM <dtml-var table> GROUP BY method_id, processing, processing_node ORDER BY node SELECT count(*) AS `count`, method_id, processing_node AS node, min(priority) AS min_pri, max(priority) AS max_pri FROM <dtml-var table> GROUP BY processing_node, method_id ORDER BY processing_node, method_id
\ No newline at end of file \ No newline at end of file
...@@ -133,6 +133,14 @@ ...@@ -133,6 +133,14 @@
<key> <string>id</string> </key> <key> <string>id</string> </key>
<value> <string>ActivityTool_getCurrentActivities</string> </value> <value> <string>ActivityTool_getCurrentActivities</string> </value>
</item> </item>
<item>
<key> <string>max_cache_</string> </key>
<value> <int>0</int> </value>
</item>
<item>
<key> <string>max_rows_</string> </key>
<value> <int>0</int> </value>
</item>
<item> <item>
<key> <string>title</string> </key> <key> <string>title</string> </key>
<value> <string></string> </value> <value> <string></string> </value>
......
# searching for k, v in kw.items():
# processing_node column is manage by methods called by getMessageTempObjectList if v:
if kw.get('processing_node', None) == '': if k == "str_object_path":
del kw['processing_node'] kw["path"] = v
elif k == "uid_activity":
kw["uid"] = v
elif k in ('method_id', 'processing_node', 'retry'):
continue
del kw[k]
message_kw = dict([(k,kw[k]) for k in ['uid_activity','str_object_path','method_id', message_list = context.getMessageTempObjectList(**kw)
'args','retry','processing_node', for message in message_list:
'processing'] if not(kw.get(k) in ('',None))]) message.edit(
if message_kw.has_key("str_object_path"): str_object_path = '/'.join(message.object_path),
message_kw["path"] = message_kw.pop("str_object_path") uid_activity = str(message.uid) + ' ('+ message.activity[3:] +')',
if message_kw.has_key("uid_activity"): arguments = str(message.args),
message_kw["uid"] = message_kw.pop("uid_activity") delete = '[Delete]',
restart = '[Restart]',
)
message_list = context.getMessageTempObjectList(**message_kw) return message_list
message_list_to_show = []
while len(message_list) > 0:
message = message_list.pop(0)
message.edit(str_object_path = '/'.join(str(i) for i in message.object_path))
message.edit(uid_activity = str(message.uid) + ' ('+ message.activity[3:] +')')
message.edit(arguments = str(message.args))
message.edit(delete = '[Delete]')
message.edit(restart = '[Restart]')
message_list_to_show.append(message)
return message_list_to_show
SELECT priority as pri, MIN(timediff(NOW(), date)) AS min, AVG(timediff(NOW() , date)) AS avg, MAX(timediff(NOW() , date)) AS max FROM <dtml-var table> GROUP BY priority; SELECT priority AS pri,
\ No newline at end of file TIME_FORMAT(TIMEDIFF(UTC_TIMESTAMP(6), MAX(date)), '%T') AS min,
TIME_FORMAT(TIMEDIFF(UTC_TIMESTAMP(6), AVG(date)), '%T') AS avg,
TIME_FORMAT(TIMEDIFF(UTC_TIMESTAMP(6), MIN(date)), '%T') AS max
FROM <dtml-var table> GROUP BY priority
\ No newline at end of file
...@@ -18,6 +18,14 @@ ...@@ -18,6 +18,14 @@
<key> <string>id</string> </key> <key> <string>id</string> </key>
<value> <string>ActivityTool_getSQLActivities</string> </value> <value> <string>ActivityTool_getSQLActivities</string> </value>
</item> </item>
<item>
<key> <string>max_cache_</string> </key>
<value> <int>0</int> </value>
</item>
<item>
<key> <string>max_rows_</string> </key>
<value> <int>0</int> </value>
</item>
<item> <item>
<key> <string>title</string> </key> <key> <string>title</string> </key>
<value> <string></string> </value> <value> <string></string> </value>
......
data = {}
for d, sql in [('SQLDict',context.ActivityTool_getCurrentActivities(table='message')),
('SQLQueue',context.ActivityTool_getCurrentActivities(table='message_queue'))]:
data[d] = {'line_list':[]}
for line in sql:
tmp = {}
for k in ['message','method_id','processing','node','min_pri','max_pri']:
tmp[k] = line[k]
data[d]['line_list'].append(tmp)
for d, sql in [('SQLDict2',context.ActivityTool_getSQLActivities(table='message')),
('SQLQueue2',context.ActivityTool_getSQLActivities(table='message_queue'))]:
data[d] = {'line_list':[]}
for line in sql:
tmp = {'pri':line['pri']}
for k in ['min','avg','max']:
tmp[k] = str(line[k])
data[d]['line_list'].append(tmp)
import json import json
return json.dumps(data)
return json.dumps({
q + ('2' if i else ''): {
'line_list': [dict(zip(results.names(), row)) for row in results]
}
for i, q in enumerate((context.ActivityTool_getCurrentActivities,
context.ActivityTool_getSQLActivities))
for q, results in (('SQLDict', q(table='message')),
('SQLQueue', q(table='message_queue')))
})
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string>uid,activity,**kw</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>ActivityTool_manageDelete</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string>uid,activity,**kw</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>ActivityTool_manageRestart</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
return 'ActivityTool_manageRestart?uid=%s&activity=%s' % (context.uid, context.activity) return 'manageRestart?message_uid_list:int:list=%s&activity=%s' % (context.uid, context.activity)
...@@ -142,10 +142,6 @@ ...@@ -142,10 +142,6 @@
<string>retry</string> <string>retry</string>
<string>Retry</string> <string>Retry</string>
</tuple> </tuple>
<tuple>
<string>processing</string>
<string>Processing</string>
</tuple>
</list> </list>
</value> </value>
</item> </item>
...@@ -221,10 +217,6 @@ ...@@ -221,10 +217,6 @@
<string>retry</string> <string>retry</string>
<string>Retry</string> <string>Retry</string>
</tuple> </tuple>
<tuple>
<string>processing</string>
<string>Processing</string>
</tuple>
</list> </list>
</value> </value>
</item> </item>
...@@ -301,10 +293,6 @@ ...@@ -301,10 +293,6 @@
<string>retry</string> <string>retry</string>
<string></string> <string></string>
</tuple> </tuple>
<tuple>
<string>processing</string>
<string></string>
</tuple>
</list> </list>
</value> </value>
</item> </item>
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
<table> <table>
<tr> <tr>
<th>Type</th> <th>Type</th>
<th>Message</th> <th>Count</th>
<th>Method Id</th> <th>Method Id</th>
<th>Processing Node</th> <th>Processing Node</th>
<th>Min pri</th> <th>Min pri</th>
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
{{#each messageList1}} {{#each messageList1}}
<tr> <tr>
<td>{{this.messagetype}} </td> <td>{{this.messagetype}} </td>
<td>{{this.message}}</td> <td>{{this.count}}</td>
<td>{{this.method_id}}</td> <td>{{this.method_id}}</td>
<td>{{this.node}}</td> <td>{{this.node}}</td>
<td>{{this.min_pri}}</td> <td>{{this.min_pri}}</td>
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
{{#each messageList2}} {{#each messageList2}}
<tr> <tr>
<td>{{this.messagetype}} </td> <td>{{this.messagetype}} </td>
<td>{{this.message}}</td> <td>{{this.count}}</td>
<td>{{this.method_id}}</td> <td>{{this.method_id}}</td>
<td>{{this.node}}</td> <td>{{this.node}}</td>
<td>{{this.min_pri}}</td> <td>{{this.min_pri}}</td>
...@@ -40,7 +40,7 @@ ...@@ -40,7 +40,7 @@
<table> <table>
<tr> <tr>
<th>Type</th> <th>Type</th>
<th>Pri</th> <th>Priority</th>
<th>Min</th> <th>Min</th>
<th>Avg</th> <th>Avg</th>
<th>Max</th> <th>Max</th>
......
...@@ -482,6 +482,10 @@ class DB(TM): ...@@ -482,6 +482,10 @@ class DB(TM):
if m[0] not in hosed_connection: if m[0] not in hosed_connection:
raise raise
def getMaxAllowedPacket(self):
# minus 2-bytes overhead from mysql library
return self._query("SELECT @@max_allowed_packet-2").fetch_row()[0][0]
@contextmanager @contextmanager
def lock(self): def lock(self):
"""Lock for the connected DB""" """Lock for the connected DB"""
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment