curiosity/baseline_models.py [267:308]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            )

        if self._disable_facts:
            # If facts are disabled, don't output anything related
            # to them
            fact_loss = 0
        else:
            if self._use_bert:
                # (batch_size, n_turns, n_words, emb_dim)
                fact_repr, fact_mask = self._bert_encoder(facts)
                fact_repr = self._dropout(fact_repr)
                fact_mask[:, ::2] = 0
            else:
                # (batch_size, n_turns, n_facts)
                # Wrapping dim skips over n_messages
                fact_mask = get_text_field_mask(facts, num_wrapping_dims=1)
                # In addition to masking padded facts, also explicitly mask
                # user turns just in case
                fact_mask[:, ::2] = 0

                # (batch_size, n_turns, n_facts, n_words)
                # Wrapping dim skips over n_turns and n_facts
                fact_text_mask = get_text_field_mask(facts, num_wrapping_dims=2)
                # (batch_size, n_turns, n_facts, n_words, emb_dim)
                # Share encoder with utter encoder
                # Again, stupid dimensions
                fact_embed = self._dropout(self._utter_embedder(facts))
                shape = fact_embed.shape
                word_dim = shape[-2]
                emb_dim = shape[-1]
                reshaped_facts = fact_embed.view(-1, word_dim, emb_dim)
                reshaped_fact_text_mask = fact_text_mask.view(-1, word_dim)
                reshaped_fact_repr = self._utter_context(
                    reshaped_facts, reshaped_fact_text_mask
                )
                # No more emb dimension or word/seq dim
                fact_repr = reshaped_fact_repr.view(shape[:-2] + (-1,))

            fact_logits = self._fact_ranker(
                shifted_context,
                fact_repr,
            )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



curiosity/models.py [325:366]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            )

        if self._disable_facts:
            # If facts are disabled, don't output anything related
            # to them
            fact_loss = 0
        else:
            if self._use_bert:
                # (batch_size, n_turns, n_words, emb_dim)
                fact_repr, fact_mask = self._bert_encoder(facts)
                fact_repr = self._dropout(fact_repr)
                fact_mask[:, ::2] = 0
            else:
                # (batch_size, n_turns, n_facts)
                # Wrapping dim skips over n_messages
                fact_mask = get_text_field_mask(facts, num_wrapping_dims=1)
                # In addition to masking padded facts, also explicitly mask
                # user turns just in case
                fact_mask[:, ::2] = 0

                # (batch_size, n_turns, n_facts, n_words)
                # Wrapping dim skips over n_turns and n_facts
                fact_text_mask = get_text_field_mask(facts, num_wrapping_dims=2)
                # (batch_size, n_turns, n_facts, n_words, emb_dim)
                # Share encoder with utter encoder
                # Again, stupid dimensions
                fact_embed = self._dropout(self._utter_embedder(facts))
                shape = fact_embed.shape
                word_dim = shape[-2]
                emb_dim = shape[-1]
                reshaped_facts = fact_embed.view(-1, word_dim, emb_dim)
                reshaped_fact_text_mask = fact_text_mask.view(-1, word_dim)
                reshaped_fact_repr = self._utter_context(
                    reshaped_facts, reshaped_fact_text_mask
                )
                # No more emb dimension or word/seq dim
                fact_repr = reshaped_fact_repr.view(shape[:-2] + (-1,))

            fact_logits = self._fact_ranker(
                shifted_context,
                fact_repr,
            )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



