1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
##############################################################################
#
# Copyright (c) 2007 Nexedi SARL and Contributors. All Rights Reserved.
# Aurelien Calonne <aurel@nexedi.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from AccessControl import ClassSecurityInfo
from Globals import InitializeClass, DTMLFile
from Products.ERP5Type.Tool.BaseTool import BaseTool
from Products.ERP5Type import Permissions
from Products.ERP5Type.Cache import CachingMethod, clearCache
from Products.ERP5Catalog import _dtmldir
from zLOG import LOG, INFO
class ArchiveTool(BaseTool):
"""
Archive Tool contains archive objects
"""
title = 'Archive Tool'
id = 'portal_archives'
meta_type = 'ERP5 Archive Tool'
portal_type = 'Archive Tool'
allowed_types = ('ERP5 Archive',)
# Declarative Security
security = ClassSecurityInfo()
security.declareProtected(Permissions.ManagePortal, 'manage_overview' )
manage_overview = DTMLFile( 'explainArchiveTool', _dtmldir)
def getSQLCatalogIdList(self):
"""
Wrapper to CatalogTool method
"""
return self.portal_catalog.getSQLCatalogIdList()
def SQLConnectionIDs(self):
"""
Wrapper to CatalogTool method
"""
return self.portal_catalog.SQLConnectionIDs()
def getArchiveIdList(self):
"""
Return list of usable archive displayed to user
"""
return ["%s - %s" %(x.getId(), x.getTitle()) for x in \
self.portal_catalog(portal_type="Archive",
validation_state="ready")]
def getArchiveList(self):
"""
Return the list of archive use by catalog
"""
def _getArchiveList():
return [x.getPath() for x in self.objectValues() if x.getValidationState() == "validated"]
# getArchiveList = CachingMethod(_getArchiveList,
# id='getArchiveList',
# cache_factory='erp5_content_short')
return _getArchiveList()
def manage_archive(self, destination_archive_id,
archive_id,
update_destination_sql_catalog=None,
update_archive_sql_catalog=None,
clear_destination_sql_catalog=None,
clear_archive_sql_catalog=None,
REQUEST=None, RESPONSE=None):
"""
This method is used to populate an archive from the current catalog
It is base on hot reindexing, we start from a current catalog
in order to create a new current catalog plus an archive catalog.
Archives are defined in portal_archives, they are predicate thus
we use test method to know in which catalog objects must go.
At the end it creates inventories in order to have
consistent data within the new catalog
"""
# First check parameter for destination catalog
if destination_archive_id == archive_id:
raise ValueError, "Archive and destination archive can't be the same"
portal_catalog =self.portal_catalog
# Guess connection id from current catalog
source_catalog = portal_catalog.getSQLCatalog()
source_catalog_id = source_catalog.getId()
source_connection_id = None
source_deferred_connection_id = None
for method in source_catalog.objectValues():
if method.meta_type == "Z SQL Method":
if 'deferred' in method.connection_id:
source_deferred_connection_id = method.connection_id
elif 'transactionless' not in method.connection_id:
source_connection_id = method.connection_id
if source_connection_id is not None and \
source_deferred_connection_id is not None:
break
if source_connection_id is None or source_deferred_connection_id is None:
raise ValueError, "Unable to determine connection id for the current catalog"
# Get destination property from archive
destination_archive_id = destination_archive_id.split(' - ')[0]
destination_archive = self._getOb(destination_archive_id)
destination_sql_catalog_id = destination_archive.getCatalogId()
destination_connection_id = destination_archive.getConnectionId()
destination_deferred_connection_id = destination_archive.getDeferredConnectionId()
# Get archive property from archive
archive_id = archive_id.split(' - ')[0]
archive = self._getOb(archive_id)
archive_sql_catalog_id = archive.getCatalogId()
archive_connection_id = archive.getConnectionId()
archive_deferred_connection_id = archive.getDeferredConnectionId()
# Check we don't use same connection id for source and destination
if destination_sql_catalog_id == source_catalog_id:
raise ValueError, "Destination and source catalog can't be the same"
if destination_connection_id == source_connection_id:
raise ValueError, "Destination and source connection can't be the same"
if destination_deferred_connection_id == source_deferred_connection_id:
raise ValueError, "Destination and source deferred connection can't be the same"
# Same for source and archive
if archive_sql_catalog_id == source_catalog_id:
raise ValueError, "Archive and source catalog can't be the same"
if archive_connection_id == source_connection_id:
raise ValueError, "Archive and source connection can't be the same"
if archive_deferred_connection_id == source_deferred_connection_id:
raise ValueError, "Archive and source deferred connection can't be the same"
# Same for destination and archive
if archive_sql_catalog_id == destination_sql_catalog_id:
raise ValueError, "Archive and destination catalog can't be the same"
if archive_connection_id == destination_connection_id:
raise ValueError, "Archive and destination connection can't be the same"
if archive_deferred_connection_id == destination_deferred_connection_id:
raise ValueError, "Archive and destination deferred connection can't be the same"
# Update connection id in destination and archive catalog if asked
destination_sql_catalog = getattr(portal_catalog, destination_sql_catalog_id)
if update_destination_sql_catalog:
sql_connection_id_dict = {source_connection_id : destination_connection_id,
source_deferred_connection_id : destination_deferred_connection_id}
portal_catalog.changeSQLConnectionIds(destination_sql_catalog,
sql_connection_id_dict)
archive_sql_catalog = getattr(portal_catalog, archive_sql_catalog_id)
if update_archive_sql_catalog:
sql_connection_id_dict = {source_connection_id : archive_connection_id,
source_deferred_connection_id : archive_deferred_connection_id}
portal_catalog.changeSQLConnectionIds(archive_sql_catalog,
sql_connection_id_dict)
# Clear destination and archive catalog if asked
if clear_destination_sql_catalog:
portal_catalog.manage_catalogClear(sql_catalog_id=destination_sql_catalog_id)
if clear_archive_sql_catalog:
portal_catalog.manage_catalogClear(sql_catalog_id=archive_sql_catalog_id)
# validate archive
archive.validate()
destination_archive.validate()
# Call hot reindexing
portal_catalog.manage_hotReindexAll(source_sql_catalog_id=source_catalog_id,
destination_sql_catalog_id=destination_sql_catalog_id,
archive_path=archive.getPath(),
source_sql_connection_id_list=[source_connection_id, source_deferred_connection_id],
destination_sql_connection_id_list=[destination_connection_id, destination_deferred_connection_id],
REQUEST=REQUEST, RESPONSE=RESPONSE)
# Create inventory just before finish of hot reindexing
inventory_date = "%s 23:59:59" %str(archive.getStopDateRangeMax().Date())
LOG("inventory_date", 300, inventory_date)
self.activate(passive_commit=1,
after_method_id=('playBackRecordedObjectList'),
priority=5).runInventoryMethod(archive.id,
source_connection_id,
destination_sql_catalog_id,
inventory_date
)
if RESPONSE is not None:
URL1 = REQUEST.get('URL1')
RESPONSE.redirect(URL1 + '/portal_archives?portal_status_message=Archiving%20Started')
def runInventoryMethod(self, archive_id, source_connection_id, destination_sql_catalog_id, inventory_date):
"""
Use a specific method to create inventory in order to use
activity to execute it
"""
#destination_sql_catalog = getattr(self.portal_catalog, destination_sql_catalog_id)
archive = self._getOb(archive_id)
inventory_method_id = archive.getInventoryMethodId()
inventory_method = getattr(archive, inventory_method_id, None)
if inventory_method is not None:
inventory_method(source_connection_id, destination_sql_catalog_id, inventory_date, tag='runInventoryMethod')
InitializeClass(ArchiveTool)