in recommenders/models/deeprec/models/base_model.py [0:0]
def _fcn_net(self, model_output, layer_sizes, scope):
"""Construct the MLP part for the model.
Args:
model_output (object): The output of upper layers, input of MLP part
layer_sizes (list): The shape of each layer of MLP part
scope (object): The scope of MLP part
Returns:
object: Prediction logit after fully connected layer.
"""
hparams = self.hparams
with tf.compat.v1.variable_scope(scope):
last_layer_size = model_output.shape[-1]
layer_idx = 0
hidden_nn_layers = []
hidden_nn_layers.append(model_output)
with tf.compat.v1.variable_scope(
"nn_part", initializer=self.initializer
) as scope:
for idx, layer_size in enumerate(layer_sizes):
curr_w_nn_layer = tf.compat.v1.get_variable(
name="w_nn_layer" + str(layer_idx),
shape=[last_layer_size, layer_size],
dtype=tf.float32,
)
curr_b_nn_layer = tf.compat.v1.get_variable(
name="b_nn_layer" + str(layer_idx),
shape=[layer_size],
dtype=tf.float32,
initializer=tf.compat.v1.zeros_initializer(),
)
tf.compat.v1.summary.histogram(
"nn_part/" + "w_nn_layer" + str(layer_idx), curr_w_nn_layer
)
tf.compat.v1.summary.histogram(
"nn_part/" + "b_nn_layer" + str(layer_idx), curr_b_nn_layer
)
curr_hidden_nn_layer = (
tf.tensordot(
hidden_nn_layers[layer_idx], curr_w_nn_layer, axes=1
)
+ curr_b_nn_layer
)
scope = "nn_part" + str(idx)
activation = hparams.activation[idx]
if hparams.enable_BN is True:
curr_hidden_nn_layer = tf.compat.v1.layers.batch_normalization(
curr_hidden_nn_layer,
momentum=0.95,
epsilon=0.0001,
training=self.is_train_stage,
)
curr_hidden_nn_layer = self._active_layer(
logit=curr_hidden_nn_layer, activation=activation, layer_idx=idx
)
hidden_nn_layers.append(curr_hidden_nn_layer)
layer_idx += 1
last_layer_size = layer_size
w_nn_output = tf.compat.v1.get_variable(
name="w_nn_output", shape=[last_layer_size, 1], dtype=tf.float32
)
b_nn_output = tf.compat.v1.get_variable(
name="b_nn_output",
shape=[1],
dtype=tf.float32,
initializer=tf.compat.v1.zeros_initializer(),
)
tf.compat.v1.summary.histogram(
"nn_part/" + "w_nn_output" + str(layer_idx), w_nn_output
)
tf.compat.v1.summary.histogram(
"nn_part/" + "b_nn_output" + str(layer_idx), b_nn_output
)
nn_output = (
tf.tensordot(hidden_nn_layers[-1], w_nn_output, axes=1)
+ b_nn_output
)
self.logit = nn_output
return nn_output