我在情绪分析和 pos 标记任务上微调了两个单独的 bert 模型(bert-base-uncased)。现在,我想将 pos 标记器的输出(batch、seqlength、hiddensize)作为情绪模型的输入。原始的 bert-base-uncased 模型位于“bertModel/”文件夹中,其中包含“model.bin”和“配置.json'。这是我的代码:
class DeepSequentialModel(nn.Module):
def __init__(self, sentiment_model_file, postag_model_file, device):
super(DeepSequentialModel, self).__init__()
self.sentiment_model = SentimentModel().to(device)
self.sentiment_model.load_state_dict(torch.load(sentiment_model_file, map_location=device))
self.postag_model = PosTagModel().to(device)
self.postag_model.load_state_dict(torch.load(postag_model_file, map_location=device))
self.classificationLayer = nn.Linear(768, 1)
def forward(self, seq, attn_masks):
postag_context = self.postag_model(seq, attn_masks)
sent_context = self.sentiment_model(postag_context, attn_masks)
logits = self.classificationLayer(sent_context)
return logits
class PosTagModel(nn.Module):
def __init__(self,):
super(PosTagModel, self).__init__()
self.bert_layer = BertModel.from_pretrained('bertModel/')
self.classificationLayer = nn.Linear(768, 43)
def forward(self, seq, attn_masks):
cont_reps, _ = self.bert_layer(seq, attention_mask=attn_masks)
return cont_reps
class SentimentModel(nn.Module):
def __init__(self,):
super(SentimentModel, self).__init__()
self.bert_layer = BertModel.from_pretrained('bertModel/')
self.cls_layer = nn.Linear(768, 1)
def forward(self, input, attn_masks):
cont_reps, _ = self.bert_layer(encoder_hidden_states=input, encoder_attention_mask=attn_masks)
cls_rep = cont_reps[:, 0]
return cls_rep
但我收到以下错误。如果有人可以帮助我,我将不胜感激。谢谢!
cont_reps, _ = self.bert_layer(encoder_hidden_states=input, encoder_attention_mask=attn_masks)
result = self.forward(*input, **kwargs)
TypeError: forward() got an unexpected keyword argument 'encoder_hidden_states'