in tensorflow_hub/module.py [0:0]
def eval_function_for_module(spec, tags=None):
"""Context manager that yields a function to directly evaluate a hub.Module.
Warning: Deprecated. This belongs to the hub.Module API and TF1 Hub format.
For TF2, switch to plain SavedModels and hub.load(). Eager execution in
TF2 obviates the need for this helper.
This creates a separate graph, in which all of the signatures of the module
are instantiated. Then, it creates a session and initializes the module
variables. Finally, it returns a function which can be used to evaluate the
module signatures.
The function returned by eval_function_for_module has the same syntax as
Module.__call__ , except that inputs and outputs are not tensors but actual
values as used with Session.run().
```python
with hub.eval_function_for_module("/tmp/text-embedding") as f:
# The module can be directly evaluated using f without constructing a graph.
embeddings = f(["Hello world!",], signature="mysignature")
```
THIS FUNCTION IS DEPRECATED.
Args:
spec: A ModuleSpec defining the Module to instantiate or a path where to
load a ModuleSpec from via `load_module_spec`.
tags: A set of strings specifying the graph variant to use.
Yields:
A function whose keyword arguments are fed into the tfhub module and which
returns a dictionary with the value of the output tensors.
Raises:
RuntimeError: explaning the reason why it failed to instantiate the
Module.
ValueError: if the requested graph variant does not exists.
"""
# We create a separate graph and add all the signatures of the module to it.
original_graph = tf.compat.v1.get_default_graph()
with tf.Graph().as_default():
module = Module(spec, tags=tags)
input_tensors_per_signature = {}
output_tensors_per_signature = {}
for signature in module.get_signature_names():
# We scope with the signature name as different signatures will likely
# contain tensors with the same name (e.g. the input and output tensors).
with tf.compat.v1.variable_scope(signature):
input_tensors = {}
for name, tensorinfo in module.get_input_info_dict(signature).items():
if tensorinfo.is_sparse:
# There's a bug in sparse_placeholder that causes it to break if
# we pass in `TensorShape(None)` -- work around it by passing in
# `None` instead.
shape = tensorinfo.get_shape()
effective_shape = None if shape.dims is None else shape.as_list()
if tensorinfo.is_sparse:
input_tensors[name] = tf.compat.v1.sparse_placeholder(
tensorinfo.dtype, shape=effective_shape, name=name)
else:
input_tensors[name] = _spec_to_placeholder(tensorinfo.type_spec,
name)
input_tensors_per_signature[signature] = input_tensors
output_tensors_per_signature[signature] = module(
input_tensors_per_signature[signature],
signature=signature,
as_dict=True)
# Evaluating the tfhub module requires an active tensorflow session.
with tf.compat.v1.train.SingularMonitoredSession() as sess:
def func(
inputs=None,
_sentinel=None, # pylint: disable=invalid-name
signature=None,
as_dict=None):
"""Function that directly evaluates a signature in the module."""
signature = signature or "default"
input_tensors = input_tensors_per_signature[signature]
dict_inputs = _prepare_dict_inputs(inputs, input_tensors)
# The input arguments are directly fed into the session.
feed_dict = {
input_tensors[key]: value for key, value in dict_inputs.items()
}
output = output_tensors_per_signature[signature]
output = _prepare_outputs(output, as_dict)
return sess.run(output, feed_dict=feed_dict)
with original_graph.as_default():
# Yield the function since that will keep the session alive until the
# user exits the context.
yield func