in tensorflow_examples/lite/model_maker/core/task/hub_loader.py [0:0]
def _setup_layer_v1(self, trainable=False, **kwargs):
"""Constructs keras layer with relevant weights and losses."""
# Initialize an empty layer, then add_weight() etc. as needed.
super(hub.KerasLayer, self).__init__(trainable=trainable, **kwargs)
if not self._is_hub_module_v1:
raise ValueError(
'Only supports to set up v1 hub module in this function.')
# v2 trainable_variable:
if hasattr(self._func, 'trainable_variables'):
for v in self._func.trainable_variables:
self._add_existing_weight(v, trainable=True)
trainable_variables = {id(v) for v in self._func.trainable_variables}
else:
trainable_variables = set()
if not hasattr(self._func, '_self_unconditional_checkpoint_dependencies'):
raise ValueError('_func doesn\'t contains attribute '
'_self_unconditional_checkpoint_dependencies.')
dependencies = self._func._self_unconditional_checkpoint_dependencies # pylint: disable=protected-access
# Adds trainable variables.
for dep in dependencies:
if dep.name == 'variables':
for v in dep.ref:
if id(v) not in trainable_variables:
self._add_existing_weight(v, trainable=True)
trainable_variables.add(id(v))
# Adds non-trainable variables.
if hasattr(self._func, 'variables'):
for v in self._func.variables:
if id(v) not in trainable_variables:
self._add_existing_weight(v, trainable=False)
# Forward the callable's regularization losses (if any).
if hasattr(self._func, 'regularization_losses'):
for l in self._func.regularization_losses:
if not callable(l):
raise ValueError(
'hub.KerasLayer(obj) expects obj.regularization_losses to be an '
'iterable of callables, each returning a scalar loss term.')
self.add_loss(self._call_loss_if_trainable(l)) # Supports callables.