backup/models.py [26:60]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def init_bert_model(self):
        """
        Initialize bert pretrained or finetuned model
        :return:
        """
        load_path = self.args.bert_model
        if self.args.load_fine_tuned:
            load_path = "fine_tune_{}_{}/".format(
                self.args.data_name, self.args.trained_bert_suffix
            )
        self.logbook.write_message_logs("Loading bert from {}".format(load_path))
        self.bert = BertModel.from_pretrained(load_path)
        self.bert.to(self.args.device)
        self.bert.eval()

    def extract_sentence_bert(self, sents):
        """
        Extract sentence bert representation
        where sents is a batch of sentences
        :param sent:
        :return:
        """
        max_indx_len = max([len(sent) for sent in sents])
        segments_tensor = (
            torch.zeros(len(sents), max_indx_len).long().to(self.args.device)
        )
        # batch indexes
        tokens_tensor = (
            torch.zeros(len(sents), max_indx_len).long().to(self.args.device)
        )
        for i, indx in enumerate(sents):
            tokens_tensor[i][: len(indx)] = torch.LongTensor(indx)
        with torch.no_grad():
            outs = self.bert(tokens_tensor, token_type_ids=segments_tensor)
            return outs[1].to("cpu")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



utils.py [218:252]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def init_bert_model(self):
        """
        Initialize bert pretrained or finetuned model
        :return:
        """
        load_path = self.args.bert_model
        if self.args.load_fine_tuned:
            load_path = "fine_tune_{}_{}/".format(
                self.args.data_name, self.args.trained_bert_suffix
            )
        self.logbook.write_message_logs("Loading bert from {}".format(load_path))
        self.bert = BertModel.from_pretrained(load_path)
        self.bert.to(self.args.device)
        self.bert.eval()

    def extract_sentence_bert(self, sents):
        """
        Extract sentence bert representation
        where sents is a batch of sentences
        :param sent:
        :return:
        """
        max_indx_len = max([len(sent) for sent in sents])
        segments_tensor = (
            torch.zeros(len(sents), max_indx_len).long().to(self.args.device)
        )
        # batch indexes
        tokens_tensor = (
            torch.zeros(len(sents), max_indx_len).long().to(self.args.device)
        )
        for i, indx in enumerate(sents):
            tokens_tensor[i][: len(indx)] = torch.LongTensor(indx)
        with torch.no_grad():
            outs = self.bert(tokens_tensor, token_type_ids=segments_tensor)
            return outs[1].to("cpu")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



