recommenders/models/deeprec/models/sequential/rnn_cell_implement.py [64:129]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if not state_is_tuple:
            logging.warn(
                "%s: Using a concatenated state is slower and will soon be "
                "deprecated.  Use state_is_tuple=True.",
                self,
            )
        if num_unit_shards is not None or num_proj_shards is not None:
            logging.warn(
                "%s: The num_unit_shards and proj_unit_shards parameters are "
                "deprecated and will be removed in Jan 2017.  "
                "Use a variable scope with a partitioner instead.",
                self,
            )

        self._num_units = num_units
        self._use_peepholes = use_peepholes
        self._cell_clip = cell_clip
        self._initializer = initializer
        self._num_proj = num_proj
        self._proj_clip = proj_clip
        self._num_unit_shards = num_unit_shards
        self._num_proj_shards = num_proj_shards
        self._forget_bias = forget_bias
        self._state_is_tuple = state_is_tuple
        self._activation = activation or math_ops.tanh

        if num_proj:
            self._state_size = (
                LSTMStateTuple(num_units, num_proj)
                if state_is_tuple
                else num_units + num_proj
            )
            self._output_size = num_proj
        else:
            self._state_size = (
                LSTMStateTuple(num_units, num_units)
                if state_is_tuple
                else 2 * num_units
            )
            self._output_size = num_units
        self._linear1 = None
        self._linear2 = None
        self._time_input_w1 = None
        self._time_input_w2 = None
        self._time_kernel_w1 = None
        self._time_kernel_t1 = None
        self._time_bias1 = None
        self._time_kernel_w2 = None
        self._time_kernel_t2 = None
        self._time_bias2 = None
        self._o_kernel_t1 = None
        self._o_kernel_t2 = None
        if self._use_peepholes:
            self._w_f_diag = None
            self._w_i_diag = None
            self._w_o_diag = None

    @property
    def state_size(self):
        return self._state_size

    @property
    def output_size(self):
        return self._output_size

    def call(self, inputs, state):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



recommenders/models/deeprec/models/sequential/rnn_cell_implement.py [319:384]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if not state_is_tuple:
            logging.warn(
                "%s: Using a concatenated state is slower and will soon be "
                "deprecated.  Use state_is_tuple=True.",
                self,
            )
        if num_unit_shards is not None or num_proj_shards is not None:
            logging.warn(
                "%s: The num_unit_shards and proj_unit_shards parameters are "
                "deprecated and will be removed in Jan 2017.  "
                "Use a variable scope with a partitioner instead.",
                self,
            )

        self._num_units = num_units
        self._use_peepholes = use_peepholes
        self._cell_clip = cell_clip
        self._initializer = initializer
        self._num_proj = num_proj
        self._proj_clip = proj_clip
        self._num_unit_shards = num_unit_shards
        self._num_proj_shards = num_proj_shards
        self._forget_bias = forget_bias
        self._state_is_tuple = state_is_tuple
        self._activation = activation or math_ops.tanh

        if num_proj:
            self._state_size = (
                LSTMStateTuple(num_units, num_proj)
                if state_is_tuple
                else num_units + num_proj
            )
            self._output_size = num_proj
        else:
            self._state_size = (
                LSTMStateTuple(num_units, num_units)
                if state_is_tuple
                else 2 * num_units
            )
            self._output_size = num_units
        self._linear1 = None
        self._linear2 = None
        self._time_input_w1 = None
        self._time_input_w2 = None
        self._time_kernel_w1 = None
        self._time_kernel_t1 = None
        self._time_bias1 = None
        self._time_kernel_w2 = None
        self._time_kernel_t2 = None
        self._time_bias2 = None
        self._o_kernel_t1 = None
        self._o_kernel_t2 = None
        if self._use_peepholes:
            self._w_f_diag = None
            self._w_i_diag = None
            self._w_o_diag = None

    @property
    def state_size(self):
        return self._state_size

    @property
    def output_size(self):
        return self._output_size

    def call(self, inputs, state):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



