Skip to content

Commit

Permalink
Mark MobileNetV1ModelTest::test_batching_equivalence as flaky (#31258)
Browse files Browse the repository at this point in the history
* Mark MobileNetV1ModelTest::test_batching_equivalence as flaky

* Add link to issue

* woops
  • Loading branch information
amyeroberts authored Jun 6, 2024
1 parent 6811839 commit c53fcd8
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion tests/models/mobilenet_v1/test_modeling_mobilenet_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import unittest

from transformers import MobileNetV1Config
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.testing_utils import is_flaky, require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available

from ...test_configuration_common import ConfigTester
Expand Down Expand Up @@ -214,6 +214,10 @@ def test_model_from_pretrained(self):
model = MobileNetV1Model.from_pretrained(model_name)
self.assertIsNotNone(model)

@is_flaky(description="is_flaky https://github.com/huggingface/transformers/pull/31258")
def test_batching_equivalence(self):
super().test_batching_equivalence()


# We will verify our results on an image of cute cats
def prepare_img():
Expand Down

0 comments on commit c53fcd8

Please sign in to comment.