models_mnist/generator.py [30:84]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
linear = tf.contrib.layers.fully_connected

# behavior based on type of model
class ProgramGenerator:
  def __init__(self, inputs, assembler, params):
    """Initialize program generator.

    Args:
      inputs:
      assembler:
      params:
    """

    self.params = params
    outputs = {}
    used_inputs = []

    # create embedding matrix
    with tf.variable_scope('embed', reuse=None) as embed_scope:
      size = [params['text_vocab_size'], params['text_embed_size']]
      embed_mat = tf.get_variable('embed_mat', size)

    # remember the scope for further use
    params['embed_scope'] = embed_scope

    cell = tf.contrib.rnn.BasicLSTMCell(params['lstm_size'])
    #--------------------------------------------------------

    # if program is to be predicted
    if 'prog' in params['model']:
      # define a constant for internal use
      use_gt_prog = tf.constant(params['use_gt_prog'], dtype=tf.bool)

      # use a low level model and construct internals
      self.rnn = AttSeq2Seq(inputs, use_gt_prog, assembler, params)
      # if memory based generator is used
      if params['generator'] == 'mem':
        used_inputs.extend(['hist', 'hist_len'])

      outputs['encoder_output'] = self.rnn.encoder_outputs
      outputs['pred_tokens'] = self.rnn.predicted_tokens
      outputs['neg_entropy'] = tf.reduce_mean(self.rnn.neg_entropy)

      # check if attHistory exists
      if hasattr(self.rnn, 'att_history'):
        outputs['att_history'] = self.rnn.att_history

      # also add the encoder states (based on the flag)
      concat_list = [ii.h for ii in self.rnn.encoder_states]
      outputs['enc_dec_h'] = tf.stack(concat_list)
      concat_list = [ii.c for ii in self.rnn.encoder_states]
      outputs['enc_dec_c'] = tf.stack(concat_list)

      # alias
      attention = self.rnn.atts
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



models_vd/generator.py [30:84]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
linear = tf.contrib.layers.fully_connected

# behavior based on type of model
class ProgramGenerator:
  def __init__(self, inputs, assembler, params):
    """Initialize program generator.

    Args:
      inputs:
      assembler:
      params:
    """

    self.params = params
    outputs = {}
    used_inputs = []

    # create embedding matrix
    with tf.variable_scope('embed', reuse=None) as embed_scope:
      size = [params['text_vocab_size'], params['text_embed_size']]
      embed_mat = tf.get_variable('embed_mat', size)

    # remember the scope for further use
    params['embed_scope'] = embed_scope

    cell = tf.contrib.rnn.BasicLSTMCell(params['lstm_size'])
    #--------------------------------------------------------

    # if program is to be predicted
    if 'prog' in params['model']:
      # define a constant for internal use
      use_gt_prog = tf.constant(params['use_gt_prog'], dtype=tf.bool)

      # use a low level model and construct internals
      self.rnn = AttSeq2Seq(inputs, use_gt_prog, assembler, params)
      # if memory based generator is used
      if params['generator'] == 'mem':
        used_inputs.extend(['hist', 'hist_len'])

      outputs['encoder_output'] = self.rnn.encoder_outputs
      outputs['pred_tokens'] = self.rnn.predicted_tokens
      outputs['neg_entropy'] = tf.reduce_mean(self.rnn.neg_entropy)

      # check if attHistory exists
      if hasattr(self.rnn, 'att_history'):
        outputs['att_history'] = self.rnn.att_history

      # also add the encoder states (based on the flag)
      concat_list = [ii.h for ii in self.rnn.encoder_states]
      outputs['enc_dec_h'] = tf.stack(concat_list)
      concat_list = [ii.c for ii in self.rnn.encoder_states]
      outputs['enc_dec_c'] = tf.stack(concat_list)

      # alias
      attention = self.rnn.atts
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



