Models/exprsynth/graph2seqmodel.py [102:116]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self._decoder_model.finalise_minibatch(batch_data, minibatch)
        return minibatch

    # ------- These are the bits that we only need for test-time:
    def _encode_one_test_sample(self, sample_data_dict: Dict[tf.Tensor, Any]) -> Tuple[tf.Tensor, Optional[tf.Tensor]]:
        return (self.sess.run(self.ops['decoder_initial_state'],
                              feed_dict=sample_data_dict),
                None)

    def _tensorise_one_test_sample(self, loaded_sample: Dict[str, Any]) -> Dict[tf.Tensor, Any]:
        test_minibatch = {}
        self._init_minibatch(test_minibatch)

        # Note that we are primarily interested in the context encoding:
        super()._extend_minibatch_by_sample(test_minibatch, loaded_sample)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



Models/exprsynth/seq2seqmodel.py [86:100]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        self._decoder_model.finalise_minibatch(batch_data, minibatch)
        return minibatch

    # ------- These are the bits that we only need for test-time:
    def _encode_one_test_sample(self, sample_data_dict: Dict[tf.Tensor, Any]) -> Tuple[tf.Tensor, Optional[tf.Tensor]]:
        return (self.sess.run(self.ops['decoder_initial_state'],
                              feed_dict=sample_data_dict),
                None)

    def _tensorise_one_test_sample(self, loaded_sample: Dict[str, Any]) -> Dict[tf.Tensor, Any]:
        test_minibatch = {}
        self._init_minibatch(test_minibatch)

        # Note that we are primarily interested in the context encoding:
        super()._extend_minibatch_by_sample(test_minibatch, loaded_sample)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



