Commit c794942b authored by Levin Zimmermann's avatar Levin Zimmermann

erp5_wendelin: Add test_16_createDataAnalysisFromDataTransformationWithoutResolution

parent 929f7b7a
......@@ -146,7 +146,6 @@ class Test(ERP5TypeTestCase):
data_stream.setData(None)
self.tic()
def test_01_1_IngestionFromOldFluentd(self):
self.test_01_IngestionFromFluentd(True)
......@@ -633,3 +632,86 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
self.assertEqual(data_array.getArrayDtypeNames(), (dtype_name0,))
data_array.setArrayDtypeNames((dtype_name1,))
self.assertEqual(data_array.getArrayDtypeNames(), (dtype_name1,))
def test_16_createDataAnalysisFromDataTransformationWithoutResolution(self):
portal = self.portal
title_prefix = "Wendelin Test 16"
data_operation = portal.data_operation_module.newContent(
portal_typle="Data Operation",
title="%s Data Operation" % title_prefix,
script_id="DataAnalysisLine_testWendelinConvertAToB",
)
self.addCleanup(self._removeDocument, data_operation)
data_operation.validate()
resource = portal.data_product_module.newContent(
portal_type="Data Product",
title="%s Data Product" % title_prefix,
individual_variation_base_category_list=["resolution"],
)
self.addCleanup(self._removeDocument, resource)
resource.validate()
resource_resolution = resource.newContent(
portal_type="Product Individual Variation",
title="20S",
)
specialise_data_transformation = portal.data_transformation_module.newContent(
portal_type="Data Transformation",
title="%s Specialise Data Transformation" % title_prefix,
resource=resource.getRelativeUrl(),
)
self.addCleanup(self._removeDocument, specialise_data_transformation)
specialise_data_transformation.validate()
initial_data_analysis = portal.data_analysis_module.newContent(
portal_type="Data Analysis",
title="%s Import Raw Data" % title_prefix,
reference="wendelin.test.16.initial.data.analysis",
resource=resource.getRelativeUrl(),
specialise_value_list=[specialise_data_transformation.getRelativeUrl()],
)
self.addCleanup(self._removeDocument, initial_data_analysis)
initial_data_analysis.start()
initial_data_analysis.newContent(
portal_type="Data Analysis Line",
title="Raw Array",
reference="out_array",
resource=resource.getRelativeUrl(),
quantity=1,
variation_category_list=["resolution/%s" % resource_resolution.getRelativeUrl()],
use= "use/big_data/ingestion/stream",
)
initial_data_analysis.newContent(
portal_type="Data Analysis Line",
title="Convert A to B",
reference="data_operation",
resource=data_operation.getRelativeUrl(),
quantity=1,
)
data_transformation = portal.data_transformation_module.newContent(
portal_type="Data Transformation",
title="%s Data Transformation" % title_prefix,
resource=resource.getRelativeUrl(),
)
self.addCleanup(self._removeDocument, data_transformation)
data_transformation.validate()
def getDataAnalysis():
return portal.portal_catalog.getResultValue(
portal_type="Data Analysis",
title=data_transformation.getTitle()
)
self.assertEqual(getDataAnalysis(), None)
self.tic()
for _ in range(2):
self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic()
data_analysis = getDataAnalysis()
self.assertNotEqual(data_analysis, None)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment