Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
W
wendelin
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Klaus Wölfel
wendelin
Commits
7bce931e
Commit
7bce931e
authored
Oct 01, 2018
by
Klaus Wölfel
Committed by
Eteri
Jan 28, 2022
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
processing of analysis with transient items
parent
64feca5e
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
36 additions
and
14 deletions
+36
-14
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
..._skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
+13
-7
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
...al_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
+23
-7
No files found.
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
View file @
7bce931e
portal
=
context
.
getPortalObject
()
operation
=
None
use
=
None
use
_list
=
[]
parameter_dict
=
{}
transient_output_item
=
None
context
.
checkConsistency
(
fixit
=
True
)
initial_product
=
context
.
getSpecialiseValue
(
portal_type
=
"Data Transformation"
).
getResourceValue
()
for
analysis_line
in
context
.
objectValues
(
portal_type
=
"Data Analysis Line"
):
analysis_line_list
=
[(
a
.
getIntIndex
(),
a
)
for
a
in
context
.
objectValues
(
portal_type
=
"Data Analysis Line"
)]
for
int_index
,
analysis_line
in
sorted
(
analysis_line_list
):
resource
=
analysis_line
.
getResourceValue
()
if
resource
==
initial_product
:
use
=
analysis_line
.
getUse
()
use
_list
=
analysis_line
.
getUseList
()
if
resource
is
not
None
:
resource_portal_type
=
resource
.
getPortalType
()
else
:
...
...
@@ -17,16 +19,16 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
operation
=
analysis_line
.
getResourceValue
()
else
:
parameter
=
{}
for
portal_type
in
[
"Data Array"
,
"Progress Indicator"
]
+
\
for
portal_type
in
[
"Data Array"
,
"
Data Array View"
,
"
Progress Indicator"
]
+
\
list
(
portal
.
getPortalDataSinkTypeList
())
+
\
list
(
portal
.
getPortalDataDescriptorTypeList
()):
value
=
analysis_line
.
getAggregateValue
(
portal_type
=
portal_type
)
if
value
is
not
None
:
parameter
[
portal_type
]
=
value
if
analysis_line
.
getQuantity
()
<
0
and
analysis_line
.
getUse
()
==
"big_data/analysis/transient"
:
if
analysis_line
.
getQuantity
()
<
0
and
"big_data/analysis/transient"
in
analysis_line
.
getUseList
()
:
# at the moment we only support transient data arrays
parameter
[
'Data Array'
]
=
transient_input_item
if
analysis_line
.
getQuantity
()
>
0
and
analysis_line
.
getUse
()
==
"big_data/analysis/transient"
:
if
analysis_line
.
getQuantity
()
>
0
and
"big_data/analysis/transient"
in
analysis_line
.
getUseList
()
:
# at the moment we only support transient data arrays
transient_output_item
=
portal
.
data_array_module
.
newContent
(
portal_type
=
'Data Array'
,
temp_object
=
True
)
...
...
@@ -42,6 +44,10 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
parameter_dict
[
reference
].
append
(
parameter
)
else
:
parameter_dict
[
reference
]
=
parameter
if
transient_output_item
is
not
None
and
not
consuming_analysis_list
:
return
script_id
=
operation
.
getScriptId
()
out
=
getattr
(
operation_analysis_line
,
script_id
)(
**
parameter_dict
)
...
...
@@ -52,5 +58,5 @@ if out == 1:
context
.
activate
(
serialization_tag
=
str
(
context
.
getUid
())).
DataAnalysis_executeDataOperation
(
consuming_analysis_list
)
else
:
# only stop batch ingestions
if
use
==
"big_data/ingestion/batch"
:
if
"big_data/ingestion/batch"
in
use_list
:
context
.
stop
()
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
View file @
7bce931e
...
...
@@ -101,16 +101,22 @@ for movement in portal_catalog(query):
resource_relative_url
=
resource
.
getRelativeUrl
())
for
related_movement
in
related_movement_list
:
aggregate_set
.
update
(
related_movement
.
getAggregateSet
())
if
related_movement
.
getUse
()
==
"big_data/ingestion/batch"
:
if
"big_data/ingestion/batch"
in
related_movement
.
getUseList
()
:
related_movement
.
getParentValue
().
deliver
()
# create new item based on item_type if it is not already aggregated
aggregate_type_set
=
set
(
[
portal
.
restrictedTraverse
(
a
).
getPortalType
()
for
a
in
aggregate_set
])
for
item_type
in
transformation_line
.
getAggregatedPortalTypeList
():
# create item if it does note exist yet.
# Except if it is a Data Array Line, then it is currently created by
# data operation itself (probably this exception is inconsistent)
if
item_type
not
in
aggregate_type_set
and
item_type
!=
"Data Array Line"
:
# Create item if it does note exist yet.
# Do not create item if it is a Data Array Line, then it is created by
# data operation itself (probably this exception is inconsistent).
# Do not create item if it is a transient Data Array.
# Do not create item if it is an input Data Array
if
item_type
not
in
aggregate_type_set
\
and
item_type
!=
"Data Array Line"
\
and
not
(
item_type
==
"Data Array"
\
and
"big_data/analysis/transient"
in
transformation_line
.
getUseList
()
)
\
and
not
(
quantity
<
0
and
item_type
==
"Data Array"
):
item
=
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
item_type
,
validation_state
=
"validated"
,
...
...
@@ -141,7 +147,9 @@ for movement in portal_catalog(query):
if
line
.
getResourceValue
().
getPortalType
()
==
"Data Operation"
:
aggregate_set
.
update
(
line
.
getAggregateList
())
data_analysis
.
newContent
(
tag
=
"%s-%s"
%
(
data_analysis
.
getUid
(),
transformation_line
.
getUid
())
data_analysis_line
=
data_analysis
.
newContent
(
activate_kw
=
{
'tag'
:
tag
},
portal_type
=
"Data Analysis Line"
,
title
=
transformation_line
.
getTitle
(),
reference
=
transformation_line
.
getReference
(),
...
...
@@ -150,7 +158,15 @@ for movement in portal_catalog(query):
variation_category_list
=
transformation_line
.
getVariationCategoryList
(),
quantity
=
quantity
,
quantity_unit
=
transformation_line
.
getQuantityUnit
(),
use
=
transformation_line
.
getUse
(),
use
_list
=
transformation_line
.
getUseList
(),
aggregate_set
=
aggregate_set
)
# fix consistency of line and all affected items. Do it after reindexing
# activities of newly created Data Analysis Line finished, because check
# consistency script might need to find the newly created Data Analysis
# Line in catalog.
data_analysis_line
.
checkConsistency
(
fixit
=
True
)
for
item
in
data_analysis_line
.
getAggregateValueList
():
item
.
activate
(
after_tag
=
tag
).
checkConsistency
(
fixit
=
True
)
data_analysis
.
start
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment