def get_conf()

in Model.py [0:0]


def get_conf(layer_id, layer_name, input_layer_ids, all_layer_configs, model_input_ids, use_gpu,
        conf_dict=None, shared_conf=None, succeed_embedding_flag=False, output_layer_flag=False,
        target_num=None, fixed_lengths=None, target_dict=None):
    """ get layer configuration

    Args
        layer_id: layer identifier
        layer_name: name of layer such as BiLSTM
        input_layer_ids (list): the inputs of current layer
        all_layer_configs (dict): records the conf class of each layer.
        model_input_ids (set): the inputs of the model, e.g. ['query', 'passage']
        use_gpu:
        conf_dict:
        shared_conf: if fixed_lengths is not None, the output_dim of shared_conf should be corrected!
        flag:
        output_layer_flag:
        target_num: used for inference the dimension of output space if someone declare a dimension of -1
        fixed_lengths
    Returns:
        configuration class coresponds to the layer

    """
    if shared_conf:
        conf = copy.deepcopy(shared_conf)
    else:
        try:
            conf_dict['use_gpu'] = use_gpu

            # for Embedding layer, add weight_on_gpu parameters
            if layer_id == EMBED_LAYER_ID:
                conf_dict['weight_on_gpu'] = conf_dict['conf']['weight_on_gpu']
                del conf_dict['conf']['weight_on_gpu']

            # for classification tasks, we usually add a Linear layer to project the output to dimension of number of classes. If we don't know the #classes, we can use '-1' instead and we would calculate the number of classes from the corpus.
            if layer_name == 'Linear':
                if isinstance(conf_dict['hidden_dim'], list):
                    if conf_dict['hidden_dim'][-1] == -1:
                        assert output_layer_flag is True, "Only in the last layer, hidden_dim == -1 is allowed!"
                        assert target_num is not None, "Number of targets should be given!"
                        conf_dict['hidden_dim'][-1] = target_num
                    elif conf_dict['hidden_dim'][-1] == '#target#':
                        logging.info('#target# position will be replace by target num: %d' % target_num)
                        conf_dict['hidden_dim'][-1] = target_num
                elif isinstance(conf_dict['hidden_dim'], int) and conf_dict['hidden_dim'] == -1:
                    assert output_layer_flag is True, "Only in the last layer, hidden_dim == -1 is allowed!"
                    assert target_num is not None, "Number of targets should be given!"
                    conf_dict['hidden_dim'] = target_num
                elif isinstance(conf_dict['hidden_dim'], str) and conf_dict['hidden_dim'] == '#target#':
                    logging.info('#target# position will be replace by target num: %d' % target_num)
                    conf_dict['hidden_dim'] = target_num
            # add some necessary attribute for CRF layer
            if layer_name == 'CRF':
                conf_dict['target_dict'] = target_dict

            conf = eval(layer_name + "Conf")(**conf_dict)
        except NameError as e:
            raise LayerConfigUndefinedError("\"%sConf\" has not been defined" % layer_name)

    # verify the rank consistence of joint layers
    if layer_name == EMBED_LAYER_NAME:
        # the embedding layer
        pass
    else:
        # make sure all the inputs to current layer exist
        for input_layer_id in input_layer_ids:
            if not (input_layer_id in all_layer_configs or input_layer_id in model_input_ids):
                raise ConfigurationError("The input %s of layer %s does not exist. Please define it before "
                    "defining layer %s!" % (input_layer_id, layer_id, layer_id))

        former_output_ranks = [all_layer_configs[input_layer_id].output_rank if input_layer_id in all_layer_configs else all_layer_configs[EMBED_LAYER_ID].output_rank for input_layer_id in input_layer_ids]
        # inference input_dim
        conf.input_dims = [all_layer_configs[input_layer_id].output_dim if input_layer_id in all_layer_configs else all_layer_configs[EMBED_LAYER_ID].output_dim for input_layer_id in input_layer_ids]

        # If the inputs come from embedding layer and fixed_lengths exist, set the length to input_dims
        if len(input_layer_ids) == 1 and input_layer_ids[0] in model_input_ids and fixed_lengths:
            conf.input_dims[0][1] = fixed_lengths[input_layer_ids[0]]

        # check and verify input ranks
        if conf.num_of_inputs > 0:
            if conf.num_of_inputs != len(input_layer_ids):
                raise ConfigurationError("%s only accept %d inputs but you feed %d inputs to it!" % \
                        (layer_name, conf.num_of_inputs, len(input_layer_ids)))
        elif conf.num_of_inputs == -1:
            conf.num_of_inputs = len(input_layer_ids)
            if isinstance(conf.input_ranks, list):
                conf.input_ranks = conf.input_ranks * conf.num_of_inputs
            else:
                logging.warning("[For developer of %s] The input_ranks attribute should be a list!" % (layer_name))
                [conf.input_ranks] * conf.num_of_inputs

        for input_rank, former_output_rank in zip(conf.input_ranks, former_output_ranks):
            if input_rank != -1 and input_rank != former_output_rank:
                raise ConfigurationError("Input ranks of %s are inconsistent with former layers" % layer_id)
        conf.input_ranks = copy.deepcopy(former_output_ranks)

    # inference and varification inside the layer
    conf.inference()        # update some attributes which relies on input dimension or something else
    conf.verify()           # verify if the configuration is legal
    former_conf = None if len(all_layer_configs) == 0 else list(all_layer_configs.values())[-1]
    conf.verify_former_block(former_conf)  # check if has special attribute rely on former layer

    logging.debug('Layer id: %s; name: %s; input_dims: %s; input_ranks: %s; output_dim: %s; output_rank: %s' % (layer_id, layer_name, conf.input_dims if layer_id != 'embedding' else 'None', conf.input_ranks, conf.output_dim, conf.output_rank))

    return conf