tensor2tensor/models/research/transformer_vae_flow_prior.py [117:168]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    targets_weights_fn = self._hparams.weights_fn.get(
        "targets", modalities.get_weights_fn(modality))
    if weights is None:
      loss_num, loss_den = loss(logits, feature, self._hparams, vocab_size,
                                weights_fn=targets_weights_fn)
    else:

      def weights_fn(labels):
        """Per-token weights for loss."""
        # Use target_weights_fn() given by modality as well as explicitly given
        # weights.
        modality_weights = targets_weights_fn(labels)

        # Broadcast 'weights' along minor dimensions (TF's default is major).
        explicit_weights = weights
        if len(explicit_weights.shape) < len(modality_weights.shape):
          explicit_weights = common_layers.expand_squeeze_to_nd(
              weights, modality_weights.shape.ndims)

        return explicit_weights * modality_weights

      # Ensure that target.modality_loss() supports "weights_fn" keyword
      # argument. If it doesn't and "weights" is specified, raise an exception.
      argument_names = inspect.getargspec(loss).args
      if "weights_fn" not in argument_names:
        raise ValueError(
            "Explicit 'weights' given but default loss for modality doesn't "
            "support 'weights_fn' keyword argument: %s.loss(%s)." %
            (modality, ", ".join(argument_names)))

      loss_num, loss_den = loss(
          logits, feature, self._hparams, vocab_size, weights_fn=weights_fn)

    loss_num *= self._problem_hparams.loss_multiplier

    if hasattr(self.hparams, "problem") and hasattr(
        self.hparams.problem, "task_list"):
      if weights is not None:
        raise NotImplementedError("weights not yet implemented in "
                                  "multitask setting.")
      loss_num, loss_den, summaries = multi_problem.aggregate_task_losses(
          self.hparams,
          self._problem_hparams,
          logits,
          feature_name,
          feature
      )

      for key, val in summaries:
        tf.summary.scalar(key, val)

    return loss_num, loss_den
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensor2tensor/utils/t2t_model.py [631:682]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    targets_weights_fn = self._hparams.weights_fn.get(
        "targets", modalities.get_weights_fn(modality))
    if weights is None:
      loss_num, loss_den = loss(logits, feature, self._hparams, vocab_size,
                                weights_fn=targets_weights_fn)
    else:

      def weights_fn(labels):
        """Per-token weights for loss."""
        # Use target_weights_fn() given by modality as well as explicitly given
        # weights.
        modality_weights = targets_weights_fn(labels)

        # Broadcast 'weights' along minor dimensions (TF's default is major).
        explicit_weights = weights
        if len(explicit_weights.shape) < len(modality_weights.shape):
          explicit_weights = common_layers.expand_squeeze_to_nd(
              weights, modality_weights.shape.ndims)

        return explicit_weights * modality_weights

      # Ensure that target.modality_loss() supports "weights_fn" keyword
      # argument. If it doesn't and "weights" is specified, raise an exception.
      argument_names = inspect.getargspec(loss).args
      if "weights_fn" not in argument_names:
        raise ValueError(
            "Explicit 'weights' given but default loss for modality doesn't "
            "support 'weights_fn' keyword argument: %s.loss(%s)." %
            (modality, ", ".join(argument_names)))

      loss_num, loss_den = loss(
          logits, feature, self._hparams, vocab_size, weights_fn=weights_fn)

    loss_num *= self._problem_hparams.loss_multiplier

    if hasattr(self.hparams, "problem") and hasattr(
        self.hparams.problem, "task_list"):
      if weights is not None:
        raise NotImplementedError("weights not yet implemented in "
                                  "multitask setting.")
      loss_num, loss_den, summaries = multi_problem.aggregate_task_losses(
          self.hparams,
          self._problem_hparams,
          logits,
          feature_name,
          feature
      )

      for key, val in summaries:
        tf.summary.scalar(key, val)

    return loss_num, loss_den
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



