def _convert_helper()

in tensorflow/tensorflow/python/ops/parallel_for/pfor.py [0:0]


  def _convert_helper(self, op_or_tensor):
    stack = [op_or_tensor]
    while stack:
      y = stack[0]
      if y in self._conversion_map:
        assert isinstance(self._conversion_map[y],
                          (WrappedTensor, ops.Operation))
        stack.pop(0)
        continue
      if isinstance(y, ops.Operation):
        assert not y.outputs, (
            "We only support converting Operation objects with no outputs. "
            "Got %s", y)
        y_op = y
      else:
        assert isinstance(y, ops.Tensor), y
        y_op = y.op

      is_while_loop = y_op.type == "Exit"
      if is_while_loop:
        while_op = WhileOp(
            y, pfor_ops=self._pfor_ops, pfor_config=self._pfor_config)
        is_inside_loop = while_op.is_inside_loop
        # If all nodes in the while_loop graph were created inside the pfor, we
        # treat the whole loop subgraph as a single op (y_op) and try to convert
        # it. For while_loops that are created completely or partially outside,
        # we treat them as external and should be able to simply return the Exit
        # node output as is without needing any conversion. Note that for
        # while_loops that are partially constructed inside, we assume they will
        # be loop invariant. If that is not the case, it will create runtime
        # errors since the converted graph would depend on the self._loop_var
        # placeholder.
        if is_inside_loop:
          y_op = while_op
      else:
        is_inside_loop = self.op_is_inside_loop(y_op)

      # If this op was not created inside the loop body, we will return as is.
      # 1. Convert inputs and control inputs.

      def _add_to_stack(x):
        if x not in self._conversion_map:
          stack.insert(0, x)
          return True
        else:
          return False

      if is_inside_loop:
        added_to_stack = False
        for inp in y_op.inputs:
          added_to_stack |= _add_to_stack(inp)
        for cinp in y_op.control_inputs:
          if cinp.outputs:
            for t in cinp.outputs:
              added_to_stack |= _add_to_stack(t)
          else:
            added_to_stack |= _add_to_stack(cinp)
        if added_to_stack:
          continue

        converted_inputs = [self._conversion_map[inp] for inp in y_op.inputs]
        some_input_converted = any(self._was_converted(x) for x in y_op.inputs)
        some_input_stacked = any(x.is_stacked for x in converted_inputs)

        converted_control_ops = set()
        some_control_input_converted = False
        for cinp in y_op.control_inputs:
          if cinp.outputs:
            for t in cinp.outputs:
              converted_t = self._conversion_map[t]
              if self._was_converted(t):
                some_control_input_converted = True
              converted_control_ops.add(converted_t.t.op)
          else:
            converted_cinp = self._conversion_map[cinp]
            assert isinstance(converted_cinp, ops.Operation)
            if converted_cinp != cinp:
              some_control_input_converted = True
            converted_control_ops.add(converted_cinp)
        converted_control_ops = list(converted_control_ops)
        is_stateful = _is_stateful_pfor_op(y_op)
      else:
        converted_inputs = []
        converted_control_ops = []
      logging.vlog(3, "converting op:%s\ninputs:%s\ncontrol_inputs:%s", y_op,
                   converted_inputs, converted_control_ops)

      # 2. Convert y_op
      # If converting a while_loop, we let the while_loop convertor deal with
      # putting the control dependencies appropriately.
      control_dependencies = [] if is_while_loop else converted_control_ops
      with ops.control_dependencies(control_dependencies), ops.name_scope(
          y_op.name + "/pfor/"):
        # Op is a placeholder for a reduction.
        if (self._pfor_config is not None and
            self._pfor_config._lookup_reduction(y) is not None):
          # Handle reductions. Map the placeholder to the unvectorized input
          # that is being reduced.
          reduction_input = self._pfor_config._lookup_reduction(y)
          assert isinstance(reduction_input, ops.Tensor), reduction_input
          # Tensor being reduced should already be converted due to a control
          # dependency on the created placeholder.
          # Note that in cases where reduction_input is in an outer context, one
          # needs to locate the corresponding Enter node and use that to lookup
          # the conversion.
          # TODO(agarwal): handle reductions inside control flow constructs.
          assert reduction_input in self._conversion_map, (
              "Unable to handle reduction of %s, possibly as it was used "
              "inside a control flow construct. Note that reductions across "
              "pfor iterations are currently not supported inside control flow "
              "constructs." % reduction_input)
          output = self._conversion_map[reduction_input]
          # If original input is not stacked, we tile it. Also we always mark
          # output as unstacked.
          new_outputs = [wrap(self._unwrap_or_tile(output), False)]
        # None of the inputs and control inputs were converted.
        elif ((not is_inside_loop or
               (not is_stateful and not some_input_converted and
                not some_control_input_converted)) and
              y.graph == ops.get_default_graph()):
          if y is y_op:
            assert not isinstance(y_op, WhileOp)
            new_outputs = y_op
          else:
            new_outputs = [wrap(x, False) for x in y_op.outputs]
        elif not (is_stateful or is_while_loop or some_input_stacked):
          # All inputs are unstacked or uncoverted but some control inputs are
          # converted.
          # TODO(rachelim): Handle the case where some inputs are sparsely
          # stacked (i.e. any(x.is_sparse_stacked for x in converted_inputs))
          new_op = _create_op(y_op.type, [x.t for x in converted_inputs],
                              [x.dtype for x in y_op.outputs],
                              y_op.node_def.attr)
          if y is y_op:
            new_outputs = new_op
          else:
            new_outputs = [wrap(x, False) for x in new_op.outputs]
        else:
          # Either some inputs are not loop invariant or op is stateful.
          if hasattr(y_op, "pfor_converter"):
            converter = y_op.pfor_converter
          else:
            converter = _pfor_converter_registry.get(y_op.type, None)
          if converter is None:
            if flags.FLAGS.op_conversion_fallback_to_while_loop:
              converter = _fallback_converter
            else:
              raise ValueError(
                  "No converter defined for %s\n%s\ninputs: %s. "
                  "\nEither add a converter or set "
                  "--op_conversion_fallback_to_while_loop=True, "
                  "which may run slower" % (y_op.type, y_op, converted_inputs))
          # TODO(rachelim): Handle the case where some inputs are sparsely
          # stacked. We should only call the converter if it supports handling
          # those inputs.
          new_outputs = converter(_PforInput(self, y_op, converted_inputs))
          if isinstance(new_outputs, WrappedTensor):
            new_outputs = [new_outputs]
          assert isinstance(new_outputs,
                            (list, tuple, ops.Operation)), new_outputs
        logging.vlog(2, "converted %s %s", y_op, new_outputs)

        # Insert into self._conversion_map
        if y is y_op:
          assert isinstance(new_outputs, ops.Operation)
          self._add_conversion(y_op, new_outputs)
        else:
          assert len(y_op.outputs) == len(new_outputs), (
              y_op, y_op.outputs, new_outputs)
          for old_output, new_output in zip(y_op.outputs, new_outputs):
            assert isinstance(new_output, WrappedTensor), (new_output, y, y_op)
            self._add_conversion(old_output, new_output)
        stack.pop(0)

    return self._conversion_map[op_or_tensor]