Skip to content

Commit

Permalink
fix: Updated test_embeded_special_tokens for luke and mluke models (#…
Browse files Browse the repository at this point in the history
…32413)

Fixed tokenizertests for luke, mluke models.
  • Loading branch information
Sai-Suraj-27 authored and nbroad1881 committed Aug 7, 2024
1 parent 607bdd1 commit 2f92db4
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 6 deletions.
4 changes: 1 addition & 3 deletions tests/models/luke/test_tokenization_luke.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,9 @@ def test_embeded_special_tokens(self):
# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))

# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))

# attention_mask should put 1 everywhere, so sum over length should be 1
self.assertEqual(
sum(tokens_r["attention_mask"]) / len(tokens_r["attention_mask"]),
sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]),
)

Expand Down
4 changes: 1 addition & 3 deletions tests/models/mluke/test_tokenization_mluke.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,9 @@ def test_embeded_special_tokens(self):
# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))

# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))

# attention_mask should put 1 everywhere, so sum over length should be 1
self.assertEqual(
sum(tokens_r["attention_mask"]) / len(tokens_r["attention_mask"]),
sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]),
)

Expand Down

0 comments on commit 2f92db4

Please sign in to comment.