def run_internal_graph()

in keras/engine/network.py [0:0]


    def run_internal_graph(self, inputs, masks=None):
        """Computes output tensors for new inputs.

        # Note:
            - Expects `inputs` to be a list (potentially with 1 element).
            - Can be run on non-Keras tensors.

        # Arguments
            inputs: List of tensors
            masks: List of masks (tensors or None).

        # Returns
            Three lists: output_tensors, output_masks, output_shapes
        """
        if masks is None:
            masks = [None for _ in range(len(inputs))]

        # Dictionary mapping reference tensors to tuples
        # (computed tensor, compute mask)
        # we assume a 1:1 mapping from tensor to mask
        # TODO: raise exception when a `.compute_mask()` call
        # does not return a list the same size as `call`
        tensor_map = {}
        for x, y, mask in zip(self.inputs, inputs, masks):
            tensor_map[str(id(x))] = (y, mask)

        depth_keys = list(self._nodes_by_depth.keys())
        depth_keys.sort(reverse=True)
        for depth in depth_keys:
            nodes = self._nodes_by_depth[depth]
            for node in nodes:
                # This is always a single layer, never a list.
                layer = node.outbound_layer
                reference_input_tensors = node.input_tensors
                reference_output_tensors = node.output_tensors

                # If all previous input tensors are available in tensor_map,
                # then call node.inbound_layer on them.
                computed_data = []  # List of tuples (input, mask).
                for x in reference_input_tensors:
                    if str(id(x)) in tensor_map:
                        computed_data.append(tensor_map[str(id(x))])

                if len(computed_data) == len(reference_input_tensors):
                    # call layer
                    with K.name_scope(layer.name):
                        if node.arguments:
                            kwargs = node.arguments
                        else:
                            kwargs = {}
                        if len(computed_data) == 1:
                            computed_tensor, computed_mask = computed_data[0]
                            if has_arg(layer.call, 'mask'):
                                if 'mask' not in kwargs:
                                    kwargs['mask'] = computed_mask
                            output_tensors = to_list(
                                layer.call(computed_tensor, **kwargs))
                            output_masks = layer.compute_mask(computed_tensor,
                                                              computed_mask)
                            if output_masks is None:
                                output_masks = [None for _ in output_tensors]
                            else:
                                output_masks = to_list(output_masks)
                            computed_tensors = [computed_tensor]

                            # computed_masks might be used in the future.
                            computed_masks = [computed_mask]
                        else:
                            computed_tensors = [x[0] for x in computed_data]
                            computed_masks = [x[1] for x in computed_data]
                            if has_arg(layer.call, 'mask'):
                                if 'mask' not in kwargs:
                                    kwargs['mask'] = computed_masks
                            output_tensors = to_list(
                                layer.call(computed_tensors, **kwargs))
                            output_masks = layer.compute_mask(computed_tensors,
                                                              computed_masks)
                            if output_masks is None:
                                output_masks = [None for _ in output_tensors]
                            else:
                                output_masks = to_list(output_masks)
                        # Apply activity regularizer if any:
                        if (hasattr(layer, 'activity_regularizer') and
                                layer.activity_regularizer is not None):
                            with K.name_scope('activity_regularizer'):
                                regularization_losses = [
                                    layer.activity_regularizer(x)
                                    for x in output_tensors]
                            layer.add_loss(regularization_losses,
                                           inputs=computed_tensors)

                        if len(output_masks) != len(output_tensors):
                            raise Exception(
                                'Layers should have equal number of output tensors '
                                'and output masks. Layer ' + str(layer.name) + ' has'
                                ' ' + str(len(output_tensors)) + ' output tensors '
                                'and ' + str(len(output_masks)) + ' output masks.')
                    # Update model updates and losses:
                    # Keep track of updates that depend on the inputs
                    # (e.g. BN updates).
                    self.add_update(layer.get_updates_for(computed_tensors), inputs)
                    # Keep track of unconditional updates (e.g. a counter).
                    self.add_update(layer.get_updates_for(None), None)
                    # Keep track of losses that depend on the inputs
                    # (e.g. activity regularizers).
                    self.add_loss(layer.get_losses_for(computed_tensors), inputs)
                    # Keep track of unconditional losses
                    # (e.g. weight regularizers).
                    self.add_loss(layer.get_losses_for(None), None)

                    # Update _keras_shape.
                    if all([hasattr(x, '_keras_shape') for x in computed_tensors]):
                        input_shapes = unpack_singleton(
                            [x._keras_shape for x in computed_tensors])
                        shapes = to_list(layer.compute_output_shape(input_shapes))
                        uses_learning_phase = any(
                            [x._uses_learning_phase for x in computed_tensors])

                        for x, s in zip(output_tensors, shapes):
                            x._keras_shape = s
                            _u = getattr(x, '_uses_learning_phase', False)
                            x._uses_learning_phase = _u or uses_learning_phase

                    # Update tensor_map.
                    for x, y, mask in zip(reference_output_tensors,
                                          output_tensors,
                                          output_masks):
                        tensor_map[str(id(x))] = (y, mask)

        output_tensors = []
        output_masks = []
        output_shapes = []
        for x in self.outputs:
            assert str(id(x)) in tensor_map, 'Could not compute output ' + str(x)
            tensor, mask = tensor_map[str(id(x))]
            if hasattr(tensor, '_keras_shape') and output_shapes is not None:
                shape = tensor._keras_shape
                output_shapes.append(shape)
            else:
                output_shapes = None
            output_tensors.append(tensor)
            output_masks.append(mask)

        # Update cache;
        # keys are based on ids on input tensors and inputs masks.
        cache_key = object_list_uid(inputs)
        cache_key += '_' + object_list_uid(masks)

        output_tensors = unpack_singleton(output_tensors)
        self._output_tensor_cache[cache_key] = output_tensors

        output_masks = unpack_singleton(output_masks)
        self._output_mask_cache[cache_key] = output_masks

        if output_shapes is not None:
            input_shapes = [x._keras_shape for x in inputs]
            cache_key = ', '.join([str(x) for x in input_shapes])

            output_shapes = unpack_singleton(output_shapes)
            self._output_shape_cache[cache_key] = output_shapes
        return output_tensors, output_masks, output_shapes