def create_model()

in empose/nn/models.py [0:0]


    def create_model(self):
        if self.rnn_init:
            # The initial pose and shape are predicted by an RNN.
            self.rnn = RNNLayer(self.input_size, self.config.m_rnn_hidden_size, self.config.m_rnn_num_layers,
                                dropout=self.config.m_dropout, bidirectional=self.config.m_rnn_bidirectional)
            self.pose_net_init = nn.Linear(self.config.m_rnn_hidden_size, self.pose_size)
            self.shape_net_init = nn.Linear(self.config.m_rnn_hidden_size, self.shape_size)

        else:
            # The `init` networks produce a first estimate of pose and shape given only the inputs.
            # The `iter` networks take as input the previous pose/shape estimate, plus the inputs (plus ...)
            # One MLP to iteratively predict pose.
            self.pose_net_init = MLP(self.input_size, self.pose_size,
                                     self.config.m_hidden_size, self.config.m_num_layers,
                                     self.config.m_dropout_hidden, self.skip_connections,
                                     not self.config.m_no_batch_norm)

            # One MLP to iteratively predict shape.
            self.shape_net_init = MLP(self.input_size, self.shape_size,
                                      self.config.m_hidden_size, self.config.m_num_layers,
                                      self.config.m_dropout_hidden, self.skip_connections,
                                      not self.config.m_no_batch_norm)

        self.pose_net_iter = MLP(self.input_iter_size, self.pose_size,
                                 self.config.m_hidden_size, self.config.m_num_layers,
                                 self.config.m_dropout_hidden, self.skip_connections,
                                 not self.config.m_no_batch_norm)
        self.shape_net_iter = MLP(self.input_iter_size, self.shape_size,
                                  self.config.m_hidden_size, self.config.m_num_layers,
                                  self.config.m_dropout_hidden, self.skip_connections,
                                  not self.config.m_no_batch_norm)

        # Loss on the SMPL parameters.
        self.smpl_loss = nn.L1Loss(reduction='none')