diff --git a/classifier.py b/classifier.py index 570f34b..38bc7a5 100644 --- a/classifier.py +++ b/classifier.py @@ -59,6 +59,7 @@ def forward(self, input_ids, attention_mask): # HINT: you should consider what is the appropriate output to return given that # the training loop currently uses F.cross_entropy as the loss function. # Cross entropy already has a softmax therefore this should be okay + # No Dropout because it is the last layer before softmax, else worse performance result = self.bert(input_ids, attention_mask) return self.linear_layer(result['pooler_output'])