Skip to content

Commit

Permalink
[test_all] Tests should all pass
Browse files Browse the repository at this point in the history
  • Loading branch information
LysandreJik committed Mar 14, 2024
1 parent fd19ec2 commit 9e70d94
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,8 @@ class MobileBertConfig(PretrainedConfig):
```
"""

model_type = "mobilebert"

def __init__(
self,
vocab_size=30522,
Expand Down
3 changes: 0 additions & 3 deletions src/transformers/utils/dummy_pt_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -6734,9 +6734,6 @@ def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


PVT_V2_PRETRAINED_MODEL_ARCHIVE_LIST = None


class PvtV2Backbone(metaclass=DummyObject):
_backends = ["torch"]

Expand Down
2 changes: 1 addition & 1 deletion tests/models/t5/test_tokenization_t5.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def test_outputs_not_longer_than_maxlen(self):
# Since T5 does NOT have a max input length,
# this test should be changed to the following in Transformers v5:
# self.assertEqual(batch.input_ids.shape, (2, 8001))
self.assertEqual(batch.input_ids.shape, (2, 512))
self.assertEqual(batch.input_ids.shape, (2, 8001))

def test_eos_in_input(self):
tokenizer = self.t5_base_tokenizer
Expand Down

0 comments on commit 9e70d94

Please sign in to comment.