Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
K
klaus_wendelin
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Eteri
klaus_wendelin
Commits
c0170922
Commit
c0170922
authored
Jun 11, 2023
by
Martin Manchev
Committed by
Ivan Tyagov
Jun 11, 2023
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Revert "Changes in 'erp5_wendelin' bt ..."
This reverts commit e6f7b0097f7c27109fd61c1692c3588b625bd90e.
parent
8ca6ce99
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
19 additions
and
152 deletions
+19
-152
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArray.py
...TemplateItem/portal_components/document.erp5.DataArray.py
+8
-7
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArray.xml
...emplateItem/portal_components/document.erp5.DataArray.xml
+1
-5
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArrayView.py
...lateItem/portal_components/document.erp5.DataArrayView.py
+1
-1
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataBucketStream.py
...eItem/portal_components/document.erp5.DataBucketStream.py
+3
-3
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataBucketStream.xml
...Item/portal_components/document.erp5.DataBucketStream.xml
+4
-28
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataStream_convertoNumpyArray.py
...rtal_skins/erp5_wendelin/DataStream_convertoNumpyArray.py
+0
-44
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataStream_convertoNumpyArray.xml
...tal_skins/erp5_wendelin/DataStream_convertoNumpyArray.xml
+0
-62
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_stopIngestionList.py
.../portal_skins/erp5_wendelin/ERP5Site_stopIngestionList.py
+2
-2
No files found.
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArray.py
View file @
c0170922
...
...
@@ -124,7 +124,7 @@ class DataArray(BigFile):
zarray
=
self
.
getArray
()
if
zarray
is
not
None
:
return
zarray
.
dtype
security
.
declareProtected
(
Permissions
.
AccessContentsInformation
,
'getArrayDtypeNames'
)
def
getArrayDtypeNames
(
self
):
"""
...
...
@@ -142,7 +142,7 @@ class DataArray(BigFile):
self
.
getArray
().
dtype
.
names
=
names
security
.
declareProtected
(
Permissions
.
View
,
'index_html'
)
def
index_html
(
self
,
REQUEST
,
RESPONSE
,
format
=
_MARKER
,
inline
=
_MARKER
,
**
kw
):
def
index_html
(
self
,
REQUEST
,
RESPONSE
,
format
=
_MARKER
,
inline
=
_MARKER
,
**
kw
):
# pylint:disable=redefined-builtin
"""
Support streaming
"""
...
...
@@ -174,15 +174,15 @@ class DataArray(BigFile):
RESPONSE
.
write
(
self
.
getArray
()[
tuple
(
slice_index_list
)].
tobytes
())
return
True
range
=
REQUEST
.
get_header
(
'Range'
,
None
)
http_
range
=
REQUEST
.
get_header
(
'Range'
,
None
)
request_range
=
REQUEST
.
get_header
(
'Request-Range'
,
None
)
if
request_range
is
not
None
:
# Netscape 2 through 4 and MSIE 3 implement a draft version
# Later on, we need to serve a different mime-type as well.
range
=
request_range
http_
range
=
request_range
if_range
=
REQUEST
.
get_header
(
'If-Range'
,
None
)
if
range
is
not
None
:
ranges
=
HTTPRangeSupport
.
parseRange
(
range
)
if
http_
range
is
not
None
:
ranges
=
HTTPRangeSupport
.
parseRange
(
http_
range
)
array
=
self
.
getArray
()
...
...
@@ -200,7 +200,7 @@ class DataArray(BigFile):
# Date
date
=
if_range
.
split
(
';'
)[
0
]
try
:
mod_since
=
long
(
DateTime
(
date
).
timeTime
())
except
:
mod_since
=
None
except
Exception
:
mod_since
=
None
if
mod_since
is
not
None
:
last_mod
=
self
.
_data_mtime
()
if
last_mod
is
None
:
...
...
@@ -291,3 +291,4 @@ class DataArray(BigFile):
data
=
'{}
\
r
\
n
--{}--
\
r
\
n
'
.
format
(
data
,
boundary
)
RESPONSE
.
setBody
(
data
,
lock
=
True
)
return
True
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArray.xml
View file @
c0170922
...
...
@@ -39,11 +39,7 @@
<item>
<key>
<string>
text_content_warning_message
</string>
</key>
<value>
<tuple>
<string>
W:145, 42: Redefining built-in \'format\' (redefined-builtin)
</string>
<string>
W:177, 4: Redefining built-in \'range\' (redefined-builtin)
</string>
<string>
W:203, 10: No exception type(s) specified (bare-except)
</string>
</tuple>
<tuple/>
</value>
</item>
<item>
...
...
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataArrayView.py
View file @
c0170922
...
...
@@ -73,7 +73,7 @@ class DataArrayView(DataArray):
Data Array like view on one or multiple Data Arrays
"""
def
initArray
(
self
,
shape
,
dtype
):
def
initArray
(
self
,
shape
,
d
imensional_
type
):
"""
Not Implemented.
"""
...
...
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataBucketStream.py
View file @
c0170922
...
...
@@ -126,10 +126,10 @@ class DataBucketStream(Document):
PropertySheet
.
SortIndex
)
def
__init__
(
self
,
id
,
**
kw
):
def
__init__
(
self
,
id
entifier
,
**
kw
):
self
.
initBucketTree
()
self
.
initIndexTree
()
Document
.
__init__
(
self
,
id
,
**
kw
)
Document
.
__init__
(
self
,
id
entifier
,
**
kw
)
def
__len__
(
self
):
return
len
(
self
.
_tree
)
...
...
@@ -192,7 +192,7 @@ class DataBucketStream(Document):
except
ValueError
:
return
None
def
_getOb
(
self
,
id
,
*
args
,
**
kw
):
def
_getOb
(
self
,
id
entifier
,
*
args
,
**
kw
):
return
None
def
getBucketByKey
(
self
,
key
=
None
):
...
...
bt5/erp5_wendelin/DocumentTemplateItem/portal_components/document.erp5.DataBucketStream.xml
View file @
c0170922
...
...
@@ -6,12 +6,6 @@
</pickle>
<pickle>
<dictionary>
<item>
<key>
<string>
_recorded_property_dict
</string>
</key>
<value>
<persistent>
<string
encoding=
"base64"
>
AAAAAAAAAAI=
</string>
</persistent>
</value>
</item>
<item>
<key>
<string>
default_reference
</string>
</key>
<value>
<string>
DataBucketStream
</string>
</value>
...
...
@@ -45,10 +39,7 @@
<item>
<key>
<string>
text_content_warning_message
</string>
</key>
<value>
<tuple>
<string>
W:124, 21: Redefining built-in \'id\' (redefined-builtin)
</string>
<string>
W:180, 19: Redefining built-in \'id\' (redefined-builtin)
</string>
</tuple>
<tuple/>
</value>
</item>
<item>
...
...
@@ -58,28 +49,13 @@
<item>
<key>
<string>
workflow_history
</string>
</key>
<value>
<persistent>
<string
encoding=
"base64"
>
AAAAAAAAAA
M
=
</string>
</persistent>
<persistent>
<string
encoding=
"base64"
>
AAAAAAAAAA
I
=
</string>
</persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record
id=
"2"
aka=
"AAAAAAAAAAI="
>
<pickle>
<global
name=
"PersistentMapping"
module=
"Persistence.mapping"
/>
</pickle>
<pickle>
<dictionary>
<item>
<key>
<string>
data
</string>
</key>
<value>
<dictionary/>
</value>
</item>
</dictionary>
</pickle>
</record>
<record
id=
"3"
aka=
"AAAAAAAAAAM="
>
<pickle>
<global
name=
"PersistentMapping"
module=
"Persistence.mapping"
/>
</pickle>
...
...
@@ -92,7 +68,7 @@
<item>
<key>
<string>
component_validation_workflow
</string>
</key>
<value>
<persistent>
<string
encoding=
"base64"
>
AAAAAAAAAA
Q
=
</string>
</persistent>
<persistent>
<string
encoding=
"base64"
>
AAAAAAAAAA
M
=
</string>
</persistent>
</value>
</item>
</dictionary>
...
...
@@ -101,7 +77,7 @@
</dictionary>
</pickle>
</record>
<record
id=
"
4"
aka=
"AAAAAAAAAAQ
="
>
<record
id=
"
3"
aka=
"AAAAAAAAAAM
="
>
<pickle>
<global
name=
"WorkflowHistoryList"
module=
"Products.ERP5Type.Workflow"
/>
</pickle>
...
...
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataStream_convertoNumpyArray.py
deleted
100644 → 0
View file @
8ca6ce99
"""
Get a chunks of data from a Data Stream, convert it to numpy array
and return proper start and end for next record.
This script assumes stream has following format.
{dict1}{dict2}
{dict3}
And it's possible that last chunk in its last line is incomplete dictionary
thus correction needed.
"""
import
json
chunk_text
=
''
.
join
(
chunk_list
)
#context.log('%s %s %s' %(start, end, len(chunk_text)))
# remove last line as it might be uncomplete and correct start and end offsets
line_list
=
chunk_text
.
split
(
'
\
n
'
)
last_line
=
line_list
[
-
1
]
line_list
.
pop
(
-
1
)
for
line
in
line_list
:
# must have proper format
assert
line
.
endswith
(
'}'
)
assert
line
.
startswith
(
'{'
)
# fix ' -> "
line
=
line
.
replace
(
"'"
,
'"'
)
if
line
.
count
(
'{'
)
>
1
:
# multiple concatenated dictionaries in one line, bad format ignore for now
pass
else
:
d
=
json
.
loads
(
line
)
# xxx: save this value as a Data Array identified by data_array_reference
# start and enf offsets may not match existing record structure in stream
# thus corrections in start and end offsets is needed thus we
# return transformed values which is just last line length
start
-=
len
(
last_line
)
end
-=
len
(
last_line
)
return
start
,
end
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataStream_convertoNumpyArray.xml
deleted
100644 → 0
View file @
8ca6ce99
<?xml version="1.0"?>
<ZopeData>
<record
id=
"1"
aka=
"AAAAAAAAAAE="
>
<pickle>
<global
name=
"PythonScript"
module=
"Products.PythonScripts.PythonScript"
/>
</pickle>
<pickle>
<dictionary>
<item>
<key>
<string>
_bind_names
</string>
</key>
<value>
<object>
<klass>
<global
name=
"_reconstructor"
module=
"copy_reg"
/>
</klass>
<tuple>
<global
name=
"NameAssignments"
module=
"Shared.DC.Scripts.Bindings"
/>
<global
name=
"object"
module=
"__builtin__"
/>
<none/>
</tuple>
<state>
<dictionary>
<item>
<key>
<string>
_asgns
</string>
</key>
<value>
<dictionary>
<item>
<key>
<string>
name_container
</string>
</key>
<value>
<string>
container
</string>
</value>
</item>
<item>
<key>
<string>
name_context
</string>
</key>
<value>
<string>
context
</string>
</value>
</item>
<item>
<key>
<string>
name_m_self
</string>
</key>
<value>
<string>
script
</string>
</value>
</item>
<item>
<key>
<string>
name_subpath
</string>
</key>
<value>
<string>
traverse_subpath
</string>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key>
<string>
_params
</string>
</key>
<value>
<string>
chunk_list, start, end, data_array_reference=None
</string>
</value>
</item>
<item>
<key>
<string>
id
</string>
</key>
<value>
<string>
DataStream_convertoNumpyArray
</string>
</value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_stopIngestionList.py
View file @
c0170922
from
DateTime
import
DateTime
from
erp5.component.module.DateUtils
import
addToDate
from
Products.ZSQLCatalog.SQLCatalog
import
Query
,
SimpleQuery
from
Products.ZSQLCatalog.SQLCatalog
import
Query
portal_catalog
=
context
.
getPortalObject
().
portal_catalog
...
...
@@ -55,6 +55,6 @@ if len(parent_uid_list) != 0:
# we need to wait until there are 2 batches until we can stop it
# TODO: this should be implemented in transformation, not here
continue
data_ingestion
.
setStopDate
(
DateTime
())
data_ingestion
.
stop
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment