Commit b15c7d46 authored by Yusei Tahara's avatar Yusei Tahara

erp5_wendelin_examples_keras: Add primitive examples of Keras in wendelin.

parent 65315be6
import warnings
import numpy as np
from keras import backend as K
from keras import __version__ as keras_version
from keras.models import Sequential
from keras.models import model_from_config
from keras.optimizers import optimizer_from_config
from keras import optimizers
def save_model(model, model_store=None):
data = {}
data['keras_version'] = keras_version
data['model_config'] = {'class_name':model.__class__.__name__,
'config':model.get_config()}
# save weights
if hasattr(model, 'flattened_layers'):
# Support for legacy Sequential/Merge behavior.
flattened_layers = model.flattened_layers
else:
flattened_layers = model.layers
data['layer_names'] = [layer.name for layer in flattened_layers]
layer_group = {}
for layer in flattened_layers:
group = layer_group[layer.name] = {}
symbolic_weights = layer.weights
weight_values = K.batch_get_value(symbolic_weights)
weight_names = []
for i, (w, val) in enumerate(zip(symbolic_weights, weight_values)):
if hasattr(w, 'name') and w.name:
name = str(w.name)
else:
name = 'param_' + str(i)
weight_names.append(name)
group['weight_names'] = weight_names
group['weight_values'] = []
for name, val in zip(weight_names, weight_values):
group['weight_values'].append(val.copy())
data['model_weights'] = layer_group
if hasattr(model, 'optimizer'):
if isinstance(model.optimizer, optimizers.TFOptimizer):
warnings.warn(
'TensorFlow optimizers do not '
'make it possible to access '
'optimizer attributes or optimizer state '
'after instantiation. '
'As a result, we cannot save the optimizer '
'as part of the model save file.'
'You will have to compile your model again after loading it. '
'Prefer using a Keras optimizer instead '
'(see keras.io/optimizers).')
else:
data['training_config'] = {
'optimizer_config':{
'class_name':model.optimizer.__class__.__name__,
'config':model.optimizer.get_config()},
'loss': model.loss,
'metrics': model.metrics,
'sample_weight_mode': model.sample_weight_mode,
'loss_weights': model.loss_weights,
}
# save optimizer weights
symbolic_weights = getattr(model.optimizer, 'weights')
if symbolic_weights:
data['optimizer_weights'] = {}
weight_values = K.batch_get_value(symbolic_weights)
weight_names = []
for i, (w, val) in enumerate(zip(symbolic_weights, weight_values)):
if hasattr(w, 'name') and w.name:
name = str(w.name)
else:
name = 'param_' + str(i)
weight_names.append(name)
data['optimizer_weights']['weight_names'] = weight_names
data['optimizer_weights']['weight_values'] = []
for name, val in zip(weight_names, weight_values):
data['optimizer_weights']['weight_values'].append(val.copy())
return data
def load_model(data):
# instantiate model
model_config = data['model_config']
if model_config is None:
raise ValueError('No model found in config file.')
model = model_from_config(model_config)
if hasattr(model, 'flattened_layers'):
# Support for legacy Sequential/Merge behavior.
flattened_layers = model.flattened_layers
else:
flattened_layers = model.layers
filtered_layers = []
for layer in flattened_layers:
weights = layer.weights
if weights:
filtered_layers.append(layer)
flattened_layers = filtered_layers
layer_names = data['layer_names']
filtered_layer_names = []
for name in layer_names:
weight_dict = data['model_weights'][name]
weight_names = weight_dict['weight_names']
if len(weight_names):
filtered_layer_names.append(name)
layer_names = filtered_layer_names
if len(layer_names) != len(flattened_layers):
raise ValueError('You are trying to load a weight file '
'containing ' + str(len(layer_names)) +
' layers into a model with ' +
str(len(flattened_layers)) + ' layers.')
# We batch weight value assignments in a single backend call
# which provides a speedup in TensorFlow.
weight_value_tuples = []
for k, name in enumerate(layer_names):
weight_dict = data['model_weights'][name]
weight_names = weight_dict['weight_names']
weight_values = weight_dict['weight_values']
layer = flattened_layers[k]
symbolic_weights = layer.weights
if len(weight_values) != len(symbolic_weights):
raise ValueError('Layer #' + str(k) +
' (named "' + layer.name +
'" in the current model) was found to '
'correspond to layer ' + name +
' in the save file. '
'However the new layer ' + layer.name +
' expects ' + str(len(symbolic_weights)) +
' weights, but the saved weights have ' +
str(len(weight_values)) +
' elements.')
if layer.__class__.__name__ == 'Convolution1D':
# This is for backwards compatibility with
# the old Conv1D weights format.
w = weight_values[0]
shape = w.shape
if shape[:2] != (layer.filter_length, 1) or shape[3] != layer.nb_filter:
# Legacy shape:
# (self.nb_filter, input_dim, self.filter_length, 1)
assert shape[0] == layer.nb_filter and shape[2:] == (layer.filter_length, 1)
w = np.transpose(w, (2, 3, 1, 0))
weight_values[0] = w
weight_value_tuples += zip(symbolic_weights, weight_values)
K.batch_set_value(weight_value_tuples)
# instantiate optimizer
training_config = data.get('training_config')
if training_config is None:
warnings.warn('No training configuration found in save file: '
'the model was *not* compiled. Compile it manually.')
return model
optimizer_config = training_config['optimizer_config']
optimizer = optimizer_from_config(optimizer_config)
# recover loss functions and metrics
loss = training_config['loss']
metrics = training_config['metrics']
sample_weight_mode = training_config['sample_weight_mode']
loss_weights = training_config['loss_weights']
# compile model
model.compile(optimizer=optimizer,
loss=loss,
metrics=metrics,
loss_weights=loss_weights,
sample_weight_mode=sample_weight_mode)
# set optimizer weights
if 'optimizer_weights' in data:
# build train function (to get weight updates)
if isinstance(model, Sequential):
model.model._make_train_function()
else:
model._make_train_function()
optimizer_weights_dict = data['optimizer_weights']
optimizer_weight_names = optimizer_weights_dict['weight_names']
optimizer_weight_values = optimizer_weights_dict['weight_values']
model.optimizer.set_weights(optimizer_weight_values)
return model
\ No newline at end of file
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Extension Component" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>default_reference</string> </key>
<value> <string>keras_save_load</string> </value>
</item>
<item>
<key> <string>description</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_save_load</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Extension Component</string> </value>
</item>
<item>
<key> <string>sid</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>text_content_error_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple>
<string>W:182, 4: Unused variable \'optimizer_weight_names\' (unused-variable)</string>
</tuple>
</value>
</item>
<item>
<key> <string>version</string> </key>
<value> <string>erp5</string> </value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>component_validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
import numpy as np
import time
import sys
import transaction
class Progbar(object):
def output(self, data):
self.output1(str(data))
def __init__(self, target, width=30, verbose=1, interval=0.01, output=None):
"""Dislays a progress bar.
# Arguments:
target: Total number of steps expected.
interval: Minimum visual progress update interval (in seconds).
"""
self.width = width
self.target = target
self.sum_values = {}
self.unique_values = []
self.start = time.time()
self.last_update = 0
self.interval = interval
self.total_width = 0
self.seen_so_far = 0
self.verbose = verbose
self.output1 = output
def update(self, current, values=[], force=False):
"""Updates the progress bar.
# Arguments
current: Index of current step.
values: List of tuples (name, value_for_last_step).
The progress bar will display averages for these values.
force: Whether to force visual progress update.
"""
for k, v in values:
if k not in self.sum_values:
self.sum_values[k] = [v * (current - self.seen_so_far),
current - self.seen_so_far]
self.unique_values.append(k)
else:
self.sum_values[k][0] += v * (current - self.seen_so_far)
self.sum_values[k][1] += (current - self.seen_so_far)
self.seen_so_far = current
now = time.time()
if self.verbose == 1:
if not force and (now - self.last_update) < self.interval:
return
prev_total_width = self.total_width
#self.output('\b' * prev_total_width)
self.output('\r')
numdigits = int(np.floor(np.log10(self.target))) + 1
barstr = '%%%dd/%%%dd [' % (numdigits, numdigits)
bar = barstr % (current, self.target)
prog = float(current) / self.target
prog_width = int(self.width * prog)
if prog_width > 0:
bar += ('=' * (prog_width - 1))
if current < self.target:
bar += '>'
else:
bar += '='
bar += ('.' * (self.width - prog_width))
bar += ']'
self.output(bar)
self.total_width = len(bar)
if current:
time_per_unit = (now - self.start) / current
else:
time_per_unit = 0
eta = time_per_unit * (self.target - current)
info = ''
if current < self.target:
info += ' - ETA: %ds' % eta
else:
info += ' - %ds' % (now - self.start)
for k in self.unique_values:
info += ' - %s:' % k
if isinstance(self.sum_values[k], list):
avg = self.sum_values[k][0] / max(1, self.sum_values[k][1])
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
else:
info += ' %s' % self.sum_values[k]
self.total_width += len(info)
if prev_total_width > self.total_width:
info += ((prev_total_width - self.total_width) * ' ')
self.output(info)
if current >= self.target:
self.output('\r\n')
if self.verbose == 2:
if current >= self.target:
info = '%ds' % (now - self.start)
for k in self.unique_values:
info += ' - %s:' % k
avg = self.sum_values[k][0] / max(1, self.sum_values[k][1])
if avg > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
self.output(info + "\r\n")
self.last_update = now
def add(self, n, values=[]):
self.update(self.seen_so_far + n, values)
from keras.callbacks import ProgbarLogger as OriginalProgbarLogger
class ProgbarLogger(OriginalProgbarLogger):
def __init__(self, output, verbose=0):
self.output = output
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
if self.verbose:
self.output('Epoch %d/%d\r\n' % (epoch + 1, self.nb_epoch))
self.progbar = Progbar(target=self.params['nb_sample'],
verbose=1, output=self.output)
self.seen = 0
def on_epoch_end(self, epoch, logs=None):
super(ProgbarLogger, self).on_epoch_end(epoch, logs)
if epoch % 10 == 0:
transaction.commit()
seed = 7
np.random.seed(seed)
from cStringIO import StringIO
import cPickle
def save(portal, value):
data_stream = portal.data_stream_module.wendelin_examples_keras_nn
data_stream.edit(file=StringIO(cPickle.dumps(value)))
def load(portal):
data_stream = portal.data_stream_module.wendelin_examples_keras_nn
data = data_stream.getData()
if data:
return cPickle.loads(data)
else:
return None
def train(portal):
# This is just a demo of keras.
# 1. you can use keras.
# 2. you can save trained model.
# 3. you can load trained model.
from cStringIO import StringIO
import tensorflow as tf
sess = tf.Session()
from keras import backend as K
K.set_session(sess)
stream = portal.data_stream_module.wendelin_examples_keras_log
def output(value):
stream.appendData(value)
saved_model_data = load(portal)
if saved_model_data is not None:
model = portal.keras_load_model(saved_model_data)
else:
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(12, input_dim=8, init='uniform', activation='relu'))
model.add(Dense(8, init='uniform', activation='relu'))
model.add(Dense(1, init='uniform', activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
dataset = np.loadtxt(StringIO(str(portal.portal_skins.erp5_wendelin_examples_keras['pima.csv'])), delimiter=',')
X = dataset[:, 0:8]
Y = dataset[:, 8]
model.fit(X, Y, nb_epoch=20, batch_size=10, callbacks=[ProgbarLogger(output)])
scores = model.evaluate(X, Y)
output('%s: %.2f%%' % (model.metrics_names[1], scores[1]*100))
model_dict = portal.keras_save_model(model)
K.clear_session()
save(portal, model_dict)
return model_dict
from keras.applications.vgg16 import VGG16, preprocess_input, decode_predictions
from keras.preprocessing import image
import numpy as np
from cStringIO import StringIO
import PIL.Image
model = VGG16(weights='imagenet')
def predict(image_document):
img = PIL.Image.open(StringIO(image_document.getData()))
img = img.resize((224, 224))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
preds = model.predict(preprocess_input(x))
results = decode_predictions(preds, top=5)[0]
return results
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Extension Component" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_recorded_property_dict</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
<item>
<key> <string>default_reference</string> </key>
<value> <string>keras_vgg16_predict</string> </value>
</item>
<item>
<key> <string>description</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_vgg16_predict</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Extension Component</string> </value>
</item>
<item>
<key> <string>sid</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>text_content_error_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>version</string> </key>
<value> <string>erp5</string> </value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary/>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>component_validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAQ=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="4" aka="AAAAAAAAAAQ=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Folder" module="OFS.Folder"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_objects</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>erp5_wendelin_examples_keras</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
You need a wendelin instance that contains keras.
Use this software release to built it.
https://lab.nexedi.com/nexedi/slapos/raw/master/software/wendelin/software-kerastensorflow.cfg
call_keras_vgg16_predict
--------------------------
You can use a trained neural network for image classification.
call_train_keras and call_read_keras_log
--------------------------------------------
You can train and save and load neural network. Call call_train_keras, you can run a model training and save the model in a data stream. Next time you run, it loads the model from the data stream and continue training. You can use call_read_keras_log to read keras output.
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="File" module="OFS.Image"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>__name__</string> </key>
<value> <string>README.erp5_wendelin_examples_keras.txt</string> </value>
</item>
<item>
<key> <string>content_type</string> </key>
<value> <string>text/plain</string> </value>
</item>
<item>
<key> <string>precondition</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
for i in context.keras_vgg16_predict(context.image_module[image_document_id]):
print i
return printed
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string>image_document_id</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>call_keras_vgg16_predict</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>call_read_keras_log</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
return context.organisation_module.keras_train_model(context.getPortalObject())
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>call_train_keras</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ExternalMethod" module="Products.ExternalMethod.ExternalMethod"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_function</string> </key>
<value> <string>load_model</string> </value>
</item>
<item>
<key> <string>_module</string> </key>
<value> <string>keras_save_load</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_load_model</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ExternalMethod" module="Products.ExternalMethod.ExternalMethod"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_function</string> </key>
<value> <string>save_model</string> </value>
</item>
<item>
<key> <string>_module</string> </key>
<value> <string>keras_save_load</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_save_model</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ExternalMethod" module="Products.ExternalMethod.ExternalMethod"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_function</string> </key>
<value> <string>train</string> </value>
</item>
<item>
<key> <string>_module</string> </key>
<value> <string>keras_train_model</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_train_model</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ExternalMethod" module="Products.ExternalMethod.ExternalMethod"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_function</string> </key>
<value> <string>predict</string> </value>
</item>
<item>
<key> <string>_module</string> </key>
<value> <string>keras_vgg16_predict</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>keras_vgg16_predict</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="File" module="OFS.Image"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>__name__</string> </key>
<value> <string>pima.csv</string> </value>
</item>
<item>
<key> <string>content_type</string> </key>
<value> <string>text/csv</string> </value>
</item>
<item>
<key> <string>precondition</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
Primitive examples of Keras in wendelin.
https://keras.io/
\ No newline at end of file
keras_save_load
keras_train_model
keras_vgg16_predict
\ No newline at end of file
erp5_wendelin_examples_keras
\ No newline at end of file
erp5_wendelin_examples_keras
\ No newline at end of file
0.1
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment