Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
K
klaus_wendelin
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Eteri
klaus_wendelin
Commits
b04e1a04
Commit
b04e1a04
authored
Jul 05, 2019
by
Eteri
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
erp5_wendelin : fix ERP5Site_createDataAnalysisList for new functionality
parent
9b8f5496
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
42 additions
and
3 deletions
+42
-3
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
...al_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
+42
-3
No files found.
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
View file @
b04e1a04
...
@@ -4,7 +4,6 @@ from Products.ERP5Type.Errors import UnsupportedWorkflowMethod
...
@@ -4,7 +4,6 @@ from Products.ERP5Type.Errors import UnsupportedWorkflowMethod
portal
=
context
.
getPortalObject
()
portal
=
context
.
getPortalObject
()
portal_catalog
=
portal
.
portal_catalog
portal_catalog
=
portal
.
portal_catalog
now
=
DateTime
()
now
=
DateTime
()
if
not
include_delivered
:
if
not
include_delivered
:
...
@@ -49,6 +48,7 @@ for movement in portal_catalog(query):
...
@@ -49,6 +48,7 @@ for movement in portal_catalog(query):
validation_state
=
"validated"
,
validation_state
=
"validated"
,
resource_relative_url
=
movement
.
getResource
()))
resource_relative_url
=
movement
.
getResource
()))
for
transformation
in
transformation_list
:
for
transformation
in
transformation_list
:
is_shared_data_analysis
=
False
is_shared_data_analysis
=
False
# Check if analysis already exists
# Check if analysis already exists
data_analysis
=
portal_catalog
.
getResultValue
(
data_analysis
=
portal_catalog
.
getResultValue
(
...
@@ -85,6 +85,7 @@ for movement in portal_catalog(query):
...
@@ -85,6 +85,7 @@ for movement in portal_catalog(query):
destination
=
delivery
.
getDestination
(),
destination
=
delivery
.
getDestination
(),
destination_section
=
delivery
.
getDestinationSection
(),
destination_section
=
delivery
.
getDestinationSection
(),
destination_project
=
delivery
.
getDestinationProject
())
destination_project
=
delivery
.
getDestinationProject
())
data_analysis
.
checkConsistency
(
fixit
=
True
)
data_analysis
.
checkConsistency
(
fixit
=
True
)
# create input and output lines
# create input and output lines
for
transformation_line
in
transformation
.
objectValues
(
for
transformation_line
in
transformation
.
objectValues
(
...
@@ -99,6 +100,10 @@ for movement in portal_catalog(query):
...
@@ -99,6 +100,10 @@ for movement in portal_catalog(query):
if
is_shared_data_analysis
and
quantity
>
-
1
:
if
is_shared_data_analysis
and
quantity
>
-
1
:
continue
continue
aggregate_set
=
set
()
# manually add device to every line
aggregate_set
.
add
(
movement
.
getAggregateDevice
())
# If it is batch processing we additionally get items from the other
# If it is batch processing we additionally get items from the other
# batch movements and deliver the other batch movements
# batch movements and deliver the other batch movements
if
transformation_line
.
getUse
()
==
"big_data/ingestion/batch"
and
\
if
transformation_line
.
getUse
()
==
"big_data/ingestion/batch"
and
\
...
@@ -116,6 +121,41 @@ for movement in portal_catalog(query):
...
@@ -116,6 +121,41 @@ for movement in portal_catalog(query):
#aggregate_set.update(related_movement.getAggregateSet())
#aggregate_set.update(related_movement.getAggregateSet())
related_movement
.
getParentValue
().
deliver
()
related_movement
.
getParentValue
().
deliver
()
# create new item based on item_type if it is not already aggregated
aggregate_type_set
=
set
(
[
portal
.
restrictedTraverse
(
a
).
getPortalType
()
for
a
in
aggregate_set
])
for
item_type
in
transformation_line
.
getAggregatedPortalTypeList
():
# create item if it does note exist yet.
# Except if it is a Data Array Line, then it is currently created by
# data operation itself (probably this exception is inconsistent)
if
item_type
not
in
aggregate_type_set
and
item_type
!=
"Data Array Line"
:
item
=
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
item_type
,
validation_state
=
"validated"
,
item_variation_text
=
transformation_line
.
getVariationText
(),
item_device_relative_url
=
movement
.
getAggregateDevice
(),
item_project_relative_url
=
data_analysis
.
getDestinationProject
(),
item_resource_uid
=
resource
.
getUid
(),
item_source_relative_url
=
data_analysis
.
getSource
())
#if transformation_line.getRelativeUrl() == "data_transformation_module/woelfel_r0331_statistic_raw":
# raise TypeError("JUST STOP")
if
item
is
None
:
module
=
portal
.
getDefaultModule
(
item_type
)
item
=
module
.
newContent
(
portal_type
=
item_type
,
title
=
transformation
.
getTitle
(),
reference
=
"%s-%s"
%
(
transformation
.
getTitle
(),
delivery
.
getReference
()),
version
=
'001'
)
try
:
item
.
validate
()
except
AttributeError
:
pass
aggregate_set
.
add
(
item
.
getRelativeUrl
())
data_analysis_line
=
data_analysis
.
newContent
(
data_analysis_line
=
data_analysis
.
newContent
(
portal_type
=
"Data Analysis Line"
,
portal_type
=
"Data Analysis Line"
,
title
=
transformation_line
.
getTitle
(),
title
=
transformation_line
.
getTitle
(),
...
@@ -126,14 +166,13 @@ for movement in portal_catalog(query):
...
@@ -126,14 +166,13 @@ for movement in portal_catalog(query):
quantity
=
quantity
,
quantity
=
quantity
,
quantity_unit
=
transformation_line
.
getQuantityUnit
(),
quantity_unit
=
transformation_line
.
getQuantityUnit
(),
use
=
transformation_line
.
getUse
(),
use
=
transformation_line
.
getUse
(),
aggregate
=
movement
.
getAggregateDevice
()
)
aggregate
_set
=
aggregate_set
)
# for intput lines of first level analysis set causality and specialise
# for intput lines of first level analysis set causality and specialise
if
quantity
<
0
and
delivery
.
getPortalType
()
==
"Data Ingestion"
:
if
quantity
<
0
and
delivery
.
getPortalType
()
==
"Data Ingestion"
:
data_analysis_line
.
edit
(
data_analysis_line
.
edit
(
causality_value
=
delivery
,
causality_value
=
delivery
,
specialise_value_list
=
data_supply_list
)
specialise_value_list
=
data_supply_list
)
data_analysis
.
checkConsistency
(
fixit
=
True
)
data_analysis
.
checkConsistency
(
fixit
=
True
)
try
:
try
:
data_analysis
.
start
()
data_analysis
.
start
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment