in pytorch_alternatives/custom_pytorch_nlp/src/main.py [0:0]
def forward(self, x):
x = self.embedding(x)
x = torch.transpose(x,1,2)
x = self.flatten1(self.max_pool1d(self.conv1(x)))
x = self.dropout1(x)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.softmax(x, dim=-1)