Commit ce307bdc authored by Georgios Dagkakis's avatar Georgios Dagkakis

plugin updated not to add stations if we are in a subline

parent 1e254ca7
...@@ -17,65 +17,104 @@ class AddBatchStations(plugin.InputPreparationPlugin): ...@@ -17,65 +17,104 @@ class AddBatchStations(plugin.InputPreparationPlugin):
nodes=copy(data['graph']['node']) nodes=copy(data['graph']['node'])
edges=copy(data['graph']['edge']) edges=copy(data['graph']['edge'])
data_uri_encoded_input_data = data['input'].get(self.configuration_dict['input_id'], {}) data_uri_encoded_input_data = data['input'].get(self.configuration_dict['input_id'], {})
# get the number of units for a standard batch
standardBatchUnits=0
for node_id, node in nodes.iteritems():
if node['_class']=='Dream.BatchSource':
standardBatchUnits=int(node['numberOfUnits'])
# loop through the nodes to find the machines that do need addition
machinesThatNeedAddition={}
for node_id, node in nodes.iteritems(): for node_id, node in nodes.iteritems():
if node['_class']=='Dream.BatchScrapMachine' and self.checkIfMachineProcessesBatches(node_id): if node['_class']=='Dream.BatchScrapMachine' and self.checkIfMachineNeedsAddition(data,node_id,standardBatchUnits):
#create a batchDecomposition machinesThatNeedAddition[node_id]=node
batchDecompositionId=node_id+'_D'
data['graph']['node'][batchDecompositionId]={ # loop through the nodes
"name": batchDecompositionId, for node_id, node in machinesThatNeedAddition.iteritems():
"processingTime": { # find BatchScrapMachines that process batches
"Fixed": { import math
"mean": 0 workingBatchSize=int((node.get('workingBatchSize')))
} numberOfSubBatches=int((math.ceil((standardBatchUnits/float(workingBatchSize)))))
},
"numberOfSubBatches": 8, #create a batchDecomposition
"wip": [], batchDecompositionId=node_id+'_D'
"element_id": "DreamNode_39", data['graph']['node'][batchDecompositionId]={
"_class": "Dream.BatchDecompositionBlocking", "name": batchDecompositionId,
"id": batchDecompositionId "processingTime": {
} "Fixed": {
#put the batchDecomposition between the predecessor and the node "mean": 0
for edge_id, edge in edges.iteritems(): }
if edge['destination']==node_id: },
source=edge['source'] "numberOfSubBatches": numberOfSubBatches,
# remove the edge "wip": [],
data['graph']['edge'].pop(edge_id,None) "element_id": "DreamNode_39",
# add an edge from source to batchDecomposition "_class": "Dream.BatchDecompositionBlocking",
self.addEdge(data, source, batchDecompositionId) "id": batchDecompositionId
# add an edge from batchDecomposition machine }
self.addEdge(data, batchDecompositionId, node_id) #put the batchDecomposition between the predecessor and the node
#create a batchReassembly for edge_id, edge in edges.iteritems():
batchReassemblyId=node_id+'_R' if edge['destination']==node_id:
data['graph']['node'][batchReassemblyId]={ source=edge['source']
"name": batchReassemblyId, # remove the edge
"processingTime": { data['graph']['edge'].pop(edge_id,None)
"Fixed": { # add an edge from source to batchDecomposition
"mean": 0 self.addEdge(data, source, batchDecompositionId)
} # add an edge from batchDecomposition machine
}, self.addEdge(data, batchDecompositionId, node_id)
"outputResults": 1, #create a batchReassembly
"numberOfSubBatches": 8, batchReassemblyId=node_id+'_R'
"wip": [], data['graph']['node'][batchReassemblyId]={
"_class": "Dream.BatchReassemblyBlocking", "name": batchReassemblyId,
"id": batchReassemblyId "processingTime": {
} "Fixed": {
#put the batchReassembly between the node and the successor "mean": 0
for edge_id, edge in edges.iteritems(): }
if edge['source']==node_id: },
destination=edge['destination'] "outputResults": 1,
# remove the edge "numberOfSubBatches": numberOfSubBatches,
data['graph']['edge'].pop(edge_id,None) "wip": [],
# add an edge from machine to batchReassembly "_class": "Dream.BatchReassemblyBlocking",
self.addEdge(data, node_id, batchReassemblyId) "id": batchReassemblyId
# add an edge from batchReassembly to destination }
self.addEdge(data, batchReassemblyId, destination) #put the batchReassembly between the node and the successor
# dataString=json.dumps(data['graph']['edge'], indent=5) for edge_id, edge in edges.iteritems():
# print dataString if edge['source']==node_id:
destination=edge['destination']
# remove the edge
data['graph']['edge'].pop(edge_id,None)
# add an edge from machine to batchReassembly
self.addEdge(data, node_id, batchReassemblyId)
# add an edge from batchReassembly to destination
self.addEdge(data, batchReassemblyId, destination)
dataString=json.dumps(data['graph']['edge'], indent=5)
#print dataString
return data return data
# returns true is a machine processes full batches # returns true if it is needed to add decomposition/reassembly
def checkIfMachineProcessesBatches(self, machinId_id): def checkIfMachineNeedsAddition(self, data, machineId,standardBatchUnits):
# dummy implementation for now nodes=copy(data['graph']['node'])
workingBatchSize=int(nodes[machineId].get('workingBatchSize',standardBatchUnits))
# if the workingBatchSize is equal or higher to standardBatchUnits we do not need to add decomposition/reassembly
if workingBatchSize>=standardBatchUnits:
return False
# loop in the predecessors
currentId=machineId
while 1:
predecessorIdsList=self.findPredecessors(data, currentId)
# get the first. In this model every machine is fed by one point
if predecessorIdsList:
predecessorId=predecessorIdsList[0]
# if there is no predecessor, i.e. the start was reached break
else:
break
predecessorClass=nodes[predecessorId]['_class']
# if BatchDecomposition is reached we are in subline so return False
if predecessorClass=='Dream.BatchDecomposition':
return False
# if BatchReassembly is reached we are not in subline so return True
elif predecessorClass=='Dream.BatchReassembly':
return True
currentId=predecessorId
return True return True
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment