def create_pooler()

in src/SSFN/model.py [0:0]


def create_pooler(pooler_type, config, args):
    '''
    pooler building
    :param pooler_type:
    :param config:
    :param args:
    :return:
    '''
    if pooler_type == "seq":
        pooling_type = args.seq_pooling_type
        hidden_size = config.hidden_size
    elif pooler_type == "struct":
        pooling_type = args.struct_pooling_type
        hidden_size = sum(config.struct_output_size)
        if pooling_type is None:
            pooling_type = "sum"
    elif pooler_type == "embedding":
        pooling_type = args.embedding_pooling_type
        hidden_size = config.embedding_input_size
    else:
        raise Exception("Not support pooler_type=%s" % pooler_type)

    if pooling_type == "max":
        return GlobalMaskMaxPooling1D()
    elif pooling_type == "sum":
        return GlobalMaskSumPooling1D(axis=1)
    elif pooling_type == "avg":
        return GlobalMaskAvgPooling1D()
    elif pooling_type == "attention":
        return GlobalMaskContextAttentionPooling1D(embed_size=hidden_size)
    elif pooling_type == "context_attention":
        return GlobalMaskContextAttentionPooling1D(embed_size=hidden_size)
    elif pooling_type == "weighted_attention":
        return GlobalMaskWeightedAttentionPooling1D(embed_size=hidden_size)
    elif pooling_type == "value_attention":
        return GlobalMaskValueAttentionPooling1D(embed_size=hidden_size)
    elif pooling_type == "transformer":
        copy_config = copy.deepcopy(config)
        copy_config.hidden_size = hidden_size
        return GlobalMaskTransformerPooling1D(copy_config)
    else:
        return None