tensorflow_federated/python/core/impl/executors/executor_serialization.py [322:434]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _deserialize_dataset_from_zipped_saved_model(serialized_bytes):
  """Deserializes a zipped SavedModel `bytes` object to a `tf.data.Dataset`.

  DEPRECATED: this method is deprecated and replaced by
  `_deserialize_dataset_from_graph_def`.

  Args:
    serialized_bytes: `bytes` object produced by older versions of
      `tensorflow_serialization.serialize_dataset` that produced zipped
      SavedModel `bytes` strings.

  Returns:
    A `tf.data.Dataset` instance.

  Raises:
    SerializationError: if there was an error in TensorFlow during
      serialization.
  """
  py_typecheck.check_type(serialized_bytes, bytes)
  temp_dir = tempfile.mkdtemp('dataset')
  fd, temp_zip = tempfile.mkstemp('zip')
  os.close(fd)
  try:
    with open(temp_zip, 'wb') as f:
      f.write(serialized_bytes)
    with zipfile.ZipFile(temp_zip, 'r') as z:
      z.extractall(path=temp_dir)
    loaded = tf.saved_model.load(temp_dir)
    # TODO(b/156302055): Follow up here when bug is resolved, either remove
    # if this function call stops failing by default, or leave if this is
    # working as intended.
    with tf.device('cpu'):
      ds = loaded.dataset_fn()
  except Exception as e:  # pylint: disable=broad-except
    raise DatasetSerializationError(
        'Error deserializing tff.Sequence value. Inner error: {!s}'.format(
            e)) from e
  finally:
    tf.io.gfile.rmtree(temp_dir)
    tf.io.gfile.remove(temp_zip)
  return ds


def _deserialize_dataset_from_graph_def(serialized_graph_def: bytes,
                                        element_type: computation_types.Type):
  """Deserializes a serialized `tf.compat.v1.GraphDef` to a `tf.data.Dataset`.

  Args:
    serialized_graph_def: `bytes` object produced by
      `tensorflow_serialization.serialize_dataset`
    element_type: a `tff.Type` object representing the type structure of the
      elements yielded from the dataset.

  Returns:
    A `tf.data.Dataset` instance.
  """
  py_typecheck.check_type(element_type, computation_types.Type)
  type_analysis.check_tensorflow_compatible_type(element_type)

  def transform_to_tff_known_type(
      type_spec: computation_types.Type) -> Tuple[computation_types.Type, bool]:
    """Transforms `StructType` to `StructWithPythonType`."""
    if type_spec.is_struct() and not type_spec.is_struct_with_python():
      field_is_named = tuple(
          name is not None for name, _ in structure.iter_elements(type_spec))
      has_names = any(field_is_named)
      is_all_named = all(field_is_named)
      if is_all_named:
        return computation_types.StructWithPythonType(
            elements=structure.iter_elements(type_spec),
            container_type=collections.OrderedDict), True
      elif not has_names:
        return computation_types.StructWithPythonType(
            elements=structure.iter_elements(type_spec),
            container_type=tuple), True
      else:
        raise TypeError('Cannot represent TFF type in TF because it contains '
                        f'partially named structures. Type: {type_spec}')
    return type_spec, False

  if element_type.is_struct():
    # TF doesn't suppor `structure.Strut` types, so we must transform the
    # `StructType` into a `StructWithPythonType` for use as the
    # `tf.data.Dataset.element_spec` later.
    tf_compatible_type, _ = type_transformations.transform_type_postorder(
        element_type, transform_to_tff_known_type)
  else:
    # We've checked this is only a struct or tensors, so we know this is a
    # `TensorType` here and will use as-is.
    tf_compatible_type = element_type

  def type_to_tensorspec(t: computation_types.TensorType) -> tf.TensorSpec:
    return tf.TensorSpec(shape=t.shape, dtype=t.dtype)

  element_spec = type_conversions.structure_from_tensor_type_tree(
      type_to_tensorspec, tf_compatible_type)
  ds = tf.data.experimental.from_variant(
      tf.raw_ops.DatasetFromGraph(graph_def=serialized_graph_def),
      structure=element_spec)
  # If a serialized dataset had elements of nested structes of tensors (e.g.
  # `dict`, `OrderedDict`), the deserialized dataset will return `dict`,
  # `tuple`, or `namedtuple` (loses `collections.OrderedDict` in a conversion).
  #
  # Since the dataset will only be used inside TFF, we wrap the dictionary
  # coming from TF in an `OrderedDict` when necessary (a type that both TF and
  # TFF understand), using the field order stored in the TFF type stored during
  # serialization.
  return tensorflow_utils.coerce_dataset_elements_to_tff_type_spec(
      ds, tf_compatible_type)


@tracing.trace
def _deserialize_sequence_value(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_federated/python/core/impl/executors/value_serialization.py [334:446]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _deserialize_dataset_from_zipped_saved_model(serialized_bytes):
  """Deserializes a zipped SavedModel `bytes` object to a `tf.data.Dataset`.

  DEPRECATED: this method is deprecated and replaced by
  `_deserialize_dataset_from_graph_def`.

  Args:
    serialized_bytes: `bytes` object produced by older versions of
      `tensorflow_serialization.serialize_dataset` that produced zipped
      SavedModel `bytes` strings.

  Returns:
    A `tf.data.Dataset` instance.

  Raises:
    SerializationError: if there was an error in TensorFlow during
      serialization.
  """
  py_typecheck.check_type(serialized_bytes, bytes)
  temp_dir = tempfile.mkdtemp('dataset')
  fd, temp_zip = tempfile.mkstemp('zip')
  os.close(fd)
  try:
    with open(temp_zip, 'wb') as f:
      f.write(serialized_bytes)
    with zipfile.ZipFile(temp_zip, 'r') as z:
      z.extractall(path=temp_dir)
    loaded = tf.saved_model.load(temp_dir)
    # TODO(b/156302055): Follow up here when bug is resolved, either remove
    # if this function call stops failing by default, or leave if this is
    # working as intended.
    with tf.device('cpu'):
      ds = loaded.dataset_fn()
  except Exception as e:  # pylint: disable=broad-except
    raise DatasetSerializationError(
        'Error deserializing tff.Sequence value. Inner error: {!s}'.format(
            e)) from e
  finally:
    tf.io.gfile.rmtree(temp_dir)
    tf.io.gfile.remove(temp_zip)
  return ds


def _deserialize_dataset_from_graph_def(serialized_graph_def: bytes,
                                        element_type: computation_types.Type):
  """Deserializes a serialized `tf.compat.v1.GraphDef` to a `tf.data.Dataset`.

  Args:
    serialized_graph_def: `bytes` object produced by
      `tensorflow_serialization.serialize_dataset`
    element_type: a `tff.Type` object representing the type structure of the
      elements yielded from the dataset.

  Returns:
    A `tf.data.Dataset` instance.
  """
  py_typecheck.check_type(element_type, computation_types.Type)
  type_analysis.check_tensorflow_compatible_type(element_type)

  def transform_to_tff_known_type(
      type_spec: computation_types.Type) -> Tuple[computation_types.Type, bool]:
    """Transforms `StructType` to `StructWithPythonType`."""
    if type_spec.is_struct() and not type_spec.is_struct_with_python():
      field_is_named = tuple(
          name is not None for name, _ in structure.iter_elements(type_spec))
      has_names = any(field_is_named)
      is_all_named = all(field_is_named)
      if is_all_named:
        return computation_types.StructWithPythonType(
            elements=structure.iter_elements(type_spec),
            container_type=collections.OrderedDict), True
      elif not has_names:
        return computation_types.StructWithPythonType(
            elements=structure.iter_elements(type_spec),
            container_type=tuple), True
      else:
        raise TypeError('Cannot represent TFF type in TF because it contains '
                        f'partially named structures. Type: {type_spec}')
    return type_spec, False

  if element_type.is_struct():
    # TF doesn't suppor `structure.Strut` types, so we must transform the
    # `StructType` into a `StructWithPythonType` for use as the
    # `tf.data.Dataset.element_spec` later.
    tf_compatible_type, _ = type_transformations.transform_type_postorder(
        element_type, transform_to_tff_known_type)
  else:
    # We've checked this is only a struct or tensors, so we know this is a
    # `TensorType` here and will use as-is.
    tf_compatible_type = element_type

  def type_to_tensorspec(t: computation_types.TensorType) -> tf.TensorSpec:
    return tf.TensorSpec(shape=t.shape, dtype=t.dtype)

  element_spec = type_conversions.structure_from_tensor_type_tree(
      type_to_tensorspec, tf_compatible_type)
  ds = tf.data.experimental.from_variant(
      tf.raw_ops.DatasetFromGraph(graph_def=serialized_graph_def),
      structure=element_spec)
  # If a serialized dataset had elements of nested structes of tensors (e.g.
  # `dict`, `OrderedDict`), the deserialized dataset will return `dict`,
  # `tuple`, or `namedtuple` (loses `collections.OrderedDict` in a conversion).
  #
  # Since the dataset will only be used inside TFF, we wrap the dictionary
  # coming from TF in an `OrderedDict` when necessary (a type that both TF and
  # TFF understand), using the field order stored in the TFF type stored during
  # serialization.
  return tensorflow_utils.coerce_dataset_elements_to_tff_type_spec(
      ds, tf_compatible_type)


@tracing.trace
def _deserialize_sequence_value(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



