research/gam/gam/trainer/trainer_classification.py [358:400]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    self.weight_decay_var = weight_decay_var
    self.weight_decay_update = weight_decay_update
    self.iter_cls_total = iter_cls_total
    self.iter_cls_total_update = iter_cls_total_update
    self.accuracy = accuracy
    self.train_op = train_op
    self.loss_op = loss_op
    self.saver = saver
    self.batch_size_actual = tf.shape(self.predictions)[0]
    self.reset_optimizer = tf.variables_initializer(self.optimizer.variables())
    self.is_train = is_train

  def _create_weight_decay_var(self, weight_decay_initial,
                               weight_decay_schedule):
    """Creates a weight decay variable that can be updated using a schedule."""
    weight_decay_var = None
    weight_decay_update = None
    if weight_decay_schedule is None:
      if weight_decay_initial is not None:
        weight_decay_var = tf.constant(
            weight_decay_initial, dtype=tf.float32, name='weight_decay')
      else:
        weight_decay_var = None
    elif weight_decay_schedule == 'linear':
      weight_decay_var = tf.get_variable(
          name='weight_decay',
          initializer=tf.constant(
              weight_decay_initial, name='weight_decay_initial'),
          use_resource=True,
          trainable=False)
      update_rate = weight_decay_initial / float(self.max_num_iter_cotrain)
      weight_decay_update = weight_decay_var.assign_sub(update_rate)
    return weight_decay_var, weight_decay_update

  def _create_counter(self):
    """Creates a cummulative iteration counter for all classification steps."""
    iter_cls_total = tf.get_variable(
        name='iter_cls_total',
        initializer=tf.constant(0, name='iter_cls_total'),
        use_resource=True,
        trainable=False)
    iter_cls_total_update = iter_cls_total.assign_add(1)
    return iter_cls_total, iter_cls_total_update
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



research/gam/gam/trainer/trainer_classification_gcn.py [392:434]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    self.weight_decay_var = weight_decay_var
    self.weight_decay_update = weight_decay_update
    self.iter_cls_total = iter_cls_total
    self.iter_cls_total_update = iter_cls_total_update
    self.accuracy = accuracy
    self.train_op = train_op
    self.loss_op = loss_op
    self.saver = saver
    self.batch_size_actual = tf.shape(self.predictions)[0]
    self.reset_optimizer = tf.variables_initializer(self.optimizer.variables())
    self.is_train = is_train

  def _create_weight_decay_var(self, weight_decay_initial,
                               weight_decay_schedule):
    """Creates a weight decay variable that can be updated using a schedule."""
    weight_decay_var = None
    weight_decay_update = None
    if weight_decay_schedule is None:
      if weight_decay_initial is not None:
        weight_decay_var = tf.constant(
            weight_decay_initial, dtype=tf.float32, name='weight_decay')
      else:
        weight_decay_var = None
    elif weight_decay_schedule == 'linear':
      weight_decay_var = tf.get_variable(
          name='weight_decay',
          initializer=tf.constant(
              weight_decay_initial, name='weight_decay_initial'),
          use_resource=True,
          trainable=False)
      update_rate = weight_decay_initial / float(self.max_num_iter_cotrain)
      weight_decay_update = weight_decay_var.assign_sub(update_rate)
    return weight_decay_var, weight_decay_update

  def _create_counter(self):
    """Creates a cummulative iteration counter for all classification steps."""
    iter_cls_total = tf.get_variable(
        name='iter_cls_total',
        initializer=tf.constant(0, name='iter_cls_total'),
        use_resource=True,
        trainable=False)
    iter_cls_total_update = iter_cls_total.assign_add(1)
    return iter_cls_total, iter_cls_total_update
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



