in keras/engine/saving.py [0:0]
def _serialize_model(model, f, include_optimizer=True):
"""Model serialization logic.
This method is used for both writing to HDF5 file/group,
as well as pickling. This is achieved via a
`keras.utils.hdf5_utls.H5Dict` object, which can wrap HDF5
files, groups and dicts with a common API.
# Arguments
model: Keras model instance to be serialized.
f: keras.utils.io_utils.HD5Dict instance.
include_optimizer: If True, serialize optimizer's state together.
"""
def get_json_type(obj):
"""Serialize any object to a JSON-serializable structure.
# Arguments
obj: the object to serialize
# Returns
JSON-serializable structure representing `obj`.
# Raises
TypeError: if `obj` cannot be serialized.
"""
# if obj is a serializable Keras class instance
# e.g. optimizer, layer
if hasattr(obj, 'get_config'):
return {'class_name': obj.__class__.__name__,
'config': obj.get_config()}
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
if isinstance(obj, np.ndarray):
return obj.tolist()
else:
return obj.item()
# misc functions (e.g. loss function)
if callable(obj):
return obj.__name__
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
raise TypeError('Not JSON Serializable: %s' % (obj,))
from .. import __version__ as keras_version
f['keras_version'] = str(keras_version).encode('utf8')
f['backend'] = K.backend().encode('utf8')
model_config = {}
model_config['class_name'] = model.__class__.__name__
model_config['config'] = model.get_config()
model_config = json.dumps(model_config, default=get_json_type)
model_config = model_config.encode('utf-8')
f['model_config'] = model_config
model_weights_group = f['model_weights']
model_layers = model.layers
model_weights_group['layer_names'] = [layer.name.encode('utf8')
for layer in model_layers]
model_weights_group['backend'] = K.backend().encode('utf8')
model_weights_group['keras_version'] = str(keras_version).encode('utf8')
for layer in model_layers:
layer_group = model_weights_group[layer.name]
symbolic_weights = layer.weights
weight_values = K.batch_get_value(symbolic_weights)
weight_names = []
for i, (w, val) in enumerate(zip(symbolic_weights, weight_values)):
if hasattr(w, 'name') and w.name:
name = str(w.name)
else:
name = 'param_' + str(i)
if name in weight_names:
idx = 2
unique_name = name + '_1'
while unique_name in weight_names:
unique_name = name + '_' + str(idx)
idx += 1
name = unique_name
weight_names.append(name.encode('utf8'))
layer_group['weight_names'] = weight_names
for name, val in zip(weight_names, weight_values):
layer_group[name] = val
if include_optimizer and model.optimizer:
if isinstance(model.optimizer, optimizers.TFOptimizer):
warnings.warn(
'TensorFlow optimizers do not '
'make it possible to access '
'optimizer attributes or optimizer state '
'after instantiation. '
'As a result, we cannot save the optimizer '
'as part of the model save file.'
'You will have to compile your model again '
'after loading it. '
'Prefer using a Keras optimizer instead '
'(see keras.io/optimizers).')
else:
f['training_config'] = json.dumps({
'optimizer_config': {
'class_name': model.optimizer.__class__.__name__,
'config': model.optimizer.get_config()
},
'loss': model.loss,
'metrics': model.metrics,
'sample_weight_mode': model.sample_weight_mode,
'loss_weights': model.loss_weights,
}, default=get_json_type).encode('utf8')
symbolic_weights = getattr(model.optimizer, 'weights')
if symbolic_weights:
optimizer_weights_group = f['optimizer_weights']
weight_values = K.batch_get_value(symbolic_weights)
weight_names = []
for i, (w, val) in enumerate(zip(symbolic_weights,
weight_values)):
# Default values of symbolic_weights is /variable
# for Theano and CNTK
if K.backend() == 'theano' or K.backend() == 'cntk':
if hasattr(w, 'name'):
if w.name.split('/')[-1] == 'variable':
name = str(w.name) + '_' + str(i)
else:
name = str(w.name)
else:
name = 'param_' + str(i)
else:
if hasattr(w, 'name') and w.name:
name = str(w.name)
else:
name = 'param_' + str(i)
if name in weight_names:
idx = 2
unique_name = name + '_1'
while unique_name in weight_names:
unique_name = name + '_' + str(idx)
idx += 1
name = unique_name
weight_names.append(name.encode('utf8'))
optimizer_weights_group['weight_names'] = weight_names
for name, val in zip(weight_names, weight_values):
optimizer_weights_group[name] = val