Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CI: avoid human error, automatically infer generative models #33212

Merged
merged 40 commits into from
Feb 13, 2025
Merged
Show file tree
Hide file tree
Changes from 36 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
5c650eb
tmp commit
gante Aug 30, 2024
dd75807
move tests to the right class
gante Aug 30, 2024
dfb8bae
remove ALL all_generative_model_classes = ...
gante Aug 30, 2024
eef5651
skip tf roberta
gante Aug 30, 2024
74e448e
skip InstructBlipForConditionalGenerationDecoderOnlyTest
gante Aug 30, 2024
1e8e794
videollava
gante Aug 30, 2024
bf7dc7f
reduce diff
gante Sep 20, 2024
8bfe7c8
reduce diff
gante Sep 20, 2024
0382898
remove on vlms
gante Sep 20, 2024
441756e
fix a few more
gante Sep 20, 2024
a77d1d9
Merge branch 'main' into run_all_generate_tests_all_times
gante Oct 7, 2024
944a715
Merge branch 'main' into run_all_generate_tests_all_times
gante Oct 25, 2024
ad80364
Merge branch 'main' into run_all_generate_tests_all_times
gante Jan 31, 2025
d83adcd
manual rebase bits
gante Jan 31, 2025
8331afd
more manual rebase
gante Jan 31, 2025
fec48e4
remove all manual generative model class test entries
gante Jan 31, 2025
92209fc
fix up to ernie
gante Jan 31, 2025
185348c
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 10, 2025
9559220
a few more removals
gante Feb 10, 2025
f7d8f0b
handle remaining cases
gante Feb 10, 2025
5491ad6
recurrent gemma
gante Feb 10, 2025
ffe0e9f
it's better here
gante Feb 10, 2025
ca73789
make fixup
gante Feb 10, 2025
5cf4c46
tf idefics is broken
gante Feb 11, 2025
78e81f0
tf bert + generate is broken
gante Feb 11, 2025
3445ac3
don't touch tf :()
gante Feb 11, 2025
2e86329
don't touch tf :(
gante Feb 11, 2025
17b654c
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 11, 2025
3879c91
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 12, 2025
1d2a574
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 12, 2025
0452e08
make fixup
gante Feb 12, 2025
d4b5f7d
better comments for test skips
gante Feb 13, 2025
36d61cd
revert tf changes
gante Feb 13, 2025
7fa987e
remove empty line removal
gante Feb 13, 2025
a3cad24
one more
gante Feb 13, 2025
2e39ac3
missing one
gante Feb 13, 2025
1ab449e
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 13, 2025
e07aff1
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 13, 2025
a223518
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 13, 2025
0a913a0
Merge branch 'main' into run_all_generate_tests_all_times
gante Feb 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/transformers/models/bert/modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -1507,6 +1507,14 @@ def prepare_inputs_for_generation(self, input_ids, attention_mask=None, **model_

return {"input_ids": input_ids, "attention_mask": attention_mask}

@classmethod
def can_generate(cls) -> bool:
"""
Legacy correction: BertForMaskedLM can't call `generate()` from GenerationMixin.
Remove after v4.50, when we stop making `PreTrainedModel` inherit from `GenerationMixin`.
"""
return False


@add_start_docstrings(
"""Bert Model with a `next sentence prediction (classification)` head on top.""",
Expand Down
8 changes: 8 additions & 0 deletions src/transformers/models/ernie/modeling_ernie.py
Original file line number Diff line number Diff line change
Expand Up @@ -1325,6 +1325,14 @@ def prepare_inputs_for_generation(self, input_ids, attention_mask=None, **model_

return {"input_ids": input_ids, "attention_mask": attention_mask}

@classmethod
def can_generate(cls) -> bool:
"""
Legacy correction: ErnieForMaskedLM can't call `generate()` from GenerationMixin.
Remove after v4.50, when we stop making `PreTrainedModel` inherit from `GenerationMixin`.
"""
return False


@add_start_docstrings(
"""Ernie Model with a `next sentence prediction (classification)` head on top.""",
Expand Down
1 change: 0 additions & 1 deletion tests/generation/test_flax_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def random_attention_mask(shape, rng=None):
@require_flax
class FlaxGenerationTesterMixin:
model_tester = None
all_generative_model_classes = ()

def _get_input_ids_and_config(self):
config, inputs = self.model_tester.prepare_config_and_inputs_for_common()
Expand Down
5 changes: 1 addition & 4 deletions tests/generation/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from packaging import version
from parameterized import parameterized

from transformers import AutoConfig, is_torch_available, pipeline
from transformers import AutoConfig, AutoProcessor, AutoTokenizer, is_torch_available, pipeline
from transformers.testing_utils import (
is_flaky,
require_accelerate,
Expand Down Expand Up @@ -61,8 +61,6 @@
AutoModelForSeq2SeqLM,
AutoModelForSpeechSeq2Seq,
AutoModelForVision2Seq,
AutoProcessor,
AutoTokenizer,
BartForConditionalGeneration,
BartTokenizer,
GPT2LMHeadModel,
Expand Down Expand Up @@ -119,7 +117,6 @@
class GenerationTesterMixin:
input_name = "input_ids"
model_tester = None
all_generative_model_classes = ()
max_new_tokens = 3

def prepare_config_and_inputs_for_generate(self, batch_size=2):
Expand Down
1 change: 0 additions & 1 deletion tests/models/aria/test_modeling_aria.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ class AriaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTesterMi
"""

all_model_classes = (AriaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (AriaForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False
_is_composite = True
Expand Down
1 change: 0 additions & 1 deletion tests/models/autoformer/test_modeling_autoformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class AutoformerModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (AutoformerModel, AutoformerForPrediction) if is_torch_available() else ()
all_generative_model_classes = (AutoformerForPrediction,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": AutoformerModel} if is_torch_available() else {}
test_pruning = False
test_head_masking = False
Expand Down
10 changes: 1 addition & 9 deletions tests/models/bamba/test_modeling_bamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,15 +257,7 @@ def create_and_check_decoder_model_past_large_inputs(

@require_torch
class BambaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (
(
BambaModel,
BambaForCausalLM,
)
if is_torch_available()
else ()
)
all_generative_model_classes = (BambaForCausalLM,) if is_torch_available() else ()
all_model_classes = (BambaModel, BambaForCausalLM) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BambaModel,
Expand Down
7 changes: 5 additions & 2 deletions tests/models/bark/test_modeling_bark.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import pytest

from transformers import (
BarkCausalModel,
BarkCoarseConfig,
BarkConfig,
BarkFineConfig,
Expand Down Expand Up @@ -53,7 +54,6 @@
import torch

from transformers import (
BarkCausalModel,
BarkCoarseModel,
BarkFineModel,
BarkModel,
Expand Down Expand Up @@ -527,6 +527,8 @@ def get_pipeline_config(self):
@require_torch
class BarkSemanticModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BarkSemanticModel,) if is_torch_available() else ()
# `BarkSemanticModel` inherits from `BarkCausalModel`, but requires an advanced generation config.
# `BarkCausalModel` does not, so we run generation tests there.
all_generative_model_classes = (BarkCausalModel,) if is_torch_available() else ()

is_encoder_decoder = False
Expand Down Expand Up @@ -614,8 +616,9 @@ def test_generate_fp16(self):

@require_torch
class BarkCoarseModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
# Same tester as BarkSemanticModelTest, except for model_class and config_class
all_model_classes = (BarkCoarseModel,) if is_torch_available() else ()
# `BarkCoarseModel` inherits from `BarkCausalModel`, but requires an advanced generation config.
# `BarkCausalModel` does not, so we run generation tests there.
all_generative_model_classes = (BarkCausalModel,) if is_torch_available() else ()

is_encoder_decoder = False
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bart/test_modeling_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,6 @@ class BartModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
if is_torch_available()
else ()
)
all_generative_model_classes = (BartForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BartModel,
Expand Down Expand Up @@ -1502,7 +1501,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BartStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BartDecoder, BartForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BartForCausalLM,) if is_torch_available() else ()
fx_comptatible = True
test_pruning = False
is_encoder_decoder = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/bart/test_modeling_flax_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,6 @@ class FlaxBartModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGenerationT
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBartForConditionalGeneration,) if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBartModelTester(self)
Expand Down
1 change: 0 additions & 1 deletion tests/models/bert/test_modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,6 @@ class BertModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
if is_torch_available()
else ()
)
all_generative_model_classes = (BertLMHeadModel,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BertModel,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BertGenerationEncoderTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BertGenerationEncoder, BertGenerationDecoder) if is_torch_available() else ()
all_generative_model_classes = (BertGenerationDecoder,) if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": BertGenerationEncoder, "text-generation": BertGenerationDecoder}
if is_torch_available()
Expand Down
1 change: 0 additions & 1 deletion tests/models/big_bird/test_modeling_big_bird.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,6 @@ class BigBirdModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase)
if is_torch_available()
else ()
)
all_generative_model_classes = (BigBirdForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BigBirdModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bigbird_pegasus/test_modeling_bigbird_pegasus.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,6 @@ class BigBirdPegasusModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineT
if is_torch_available()
else ()
)
all_generative_model_classes = (BigBirdPegasusForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BigBirdPegasusModel,
Expand Down Expand Up @@ -792,7 +791,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BigBirdPegasusStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BigBirdPegasusDecoder, BigBirdPegasusForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BigBirdPegasusForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/biogpt/test_modeling_biogpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,6 @@ class BioGptModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
if is_torch_available()
else ()
)
all_generative_model_classes = (BioGptForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BioGptModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/blenderbot/test_modeling_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class BlenderbotModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotModel, BlenderbotForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BlenderbotModel,
Expand Down Expand Up @@ -533,7 +532,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BlenderbotStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotDecoder, BlenderbotForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/blenderbot/test_modeling_flax_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,6 @@ class FlaxBlenderbotModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGener
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBlenderbotForConditionalGeneration,) if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBlenderbotModelTester(self)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class BlenderbotSmallModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotSmallModel, BlenderbotSmallForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotSmallForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BlenderbotSmallModel,
Expand Down Expand Up @@ -542,7 +541,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BlenderbotSmallStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotSmallDecoder, BlenderbotSmallForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotSmallForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,6 @@ class FlaxBlenderbotSmallModelTest(FlaxModelTesterMixin, unittest.TestCase, Flax
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBlenderbotSmallForConditionalGeneration,) if is_flax_available() else ()

def is_pipeline_test_to_skip(
self,
Expand Down
3 changes: 2 additions & 1 deletion tests/models/blip_2/test_modeling_blip_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Blip2ForConditionalGenerationDecoderOnlyTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (Blip2ForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (Blip2ForConditionalGeneration,) if is_torch_available() else ()
fx_compatible = False
test_head_masking = False
test_pruning = False
Expand Down Expand Up @@ -995,6 +994,8 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Blip2ModelTest(ModelTesterMixin, PipelineTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (Blip2ForConditionalGeneration, Blip2Model) if is_torch_available() else ()
# Doesn't run generation tests. TODO: fix generation tests for Blip2ForConditionalGeneration
all_generative_model_classes = ()
Comment on lines +997 to +998
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this was working on main for Blip2ForConditionalGeneration, are there many failures?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we remove this line (= if we don't skip the tests), py.test tests/models/blip_2/test_modeling_blip_2.py rebased on main results in 21 failures :P

I've also double-checked the other models with skips on Monday. Most of them have unique model properties that do not work well with generate

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

BTW, note that there are two testers (Blip2ForConditionalGenerationDecoderOnlyTest and Blip2ModelTest), the skips are only on the latter. I don't know why the latter needs to skip, but that's beyond the scope of this PR :P

They were also being skipped before.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

on main, Blip2ModelTest doesn't have all_generative_model_classes. which means it doesn't run generate tests, and this PR doesn't skip any extra tests for this test class

(I don't know why however)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, thanks! I believe something similar for audio model since for me the skip comments weren't very clear

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah, that's why I ask frequently more (detailed) comments in many PRs I reviewed 😆

pipeline_model_mapping = (
{
"feature-extraction": Blip2Model,
Expand Down
1 change: 0 additions & 1 deletion tests/models/bloom/test_modeling_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,6 @@ class BloomModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixi
else ()
)

all_generative_model_classes = (BloomForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BloomModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bloom/test_modeling_flax_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,6 @@ def check_use_cache_forward_with_attn_mask(self, model_class_name, config, input
@require_flax
class FlaxBloomModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGenerationTesterMixin):
all_model_classes = (FlaxBloomModel, FlaxBloomForCausalLM) if is_flax_available() else ()
all_generative_model_classes = () if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBloomModelTester(self)
Expand Down Expand Up @@ -199,7 +198,6 @@ def test_model_from_pretrained(self):
@require_flax
class FlaxBloomGenerationTest(unittest.TestCase):
all_model_classes = (FlaxBloomForCausalLM,) if is_flax_available() else ()
all_generative_model_classes = () if is_flax_available() else ()

def setUp(self):
self.model_id = "bigscience/bloom-560m"
Expand Down
1 change: 0 additions & 1 deletion tests/models/bros/test_modeling_bros.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,6 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
if is_torch_available()
else ()
)
all_generative_model_classes = () if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": BrosModel, "token-classification": BrosForTokenClassification}
if is_torch_available()
Expand Down
1 change: 0 additions & 1 deletion tests/models/chameleon/test_modeling_chameleon.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class ChameleonModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (ChameleonModel, ChameleonForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (ChameleonForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": ChameleonModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/clvp/test_modeling_clvp.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class ClvpDecoderTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (ClvpModel, ClvpForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (ClvpForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": ClvpModelForConditionalGeneration} if is_torch_available() else {}

test_pruning = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/codegen/test_modeling_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class CodeGenModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CodeGenModel, CodeGenForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (CodeGenForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": CodeGenModel, "text-generation": CodeGenForCausalLM} if is_torch_available() else {}
)
Expand Down
1 change: 0 additions & 1 deletion tests/models/cohere/test_modeling_cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class CohereModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CohereModel, CohereForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (CohereForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": CohereModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/cohere2/test_modeling_cohere2.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ class Cohere2ModelTester(CohereModelTester):
@require_torch
class Cohere2ModelTest(CohereModelTest, unittest.TestCase):
all_model_classes = (Cohere2Model, Cohere2ForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (Cohere2ForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": Cohere2Model,
Expand Down
1 change: 0 additions & 1 deletion tests/models/ctrl/test_modeling_ctrl.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,6 @@ def create_and_check_ctrl_for_sequence_classification(self, config, input_ids, h
@require_torch
class CTRLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CTRLModel, CTRLLMHeadModel, CTRLForSequenceClassification) if is_torch_available() else ()
all_generative_model_classes = (CTRLLMHeadModel,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": CTRLModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/data2vec/test_modeling_data2vec_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,6 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
if is_torch_available()
else ()
)
all_generative_model_classes = (Data2VecTextForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": Data2VecTextModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/dbrx/test_modeling_dbrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class DbrxModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (DbrxModel, DbrxForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (DbrxForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = {"text-generation": DbrxForCausalLM} if is_torch_available() else {}
test_headmasking = False
test_pruning = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class DecisionTransformerModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (DecisionTransformerModel,) if is_torch_available() else ()
all_generative_model_classes = ()
pipeline_model_mapping = {"feature-extraction": DecisionTransformerModel} if is_torch_available() else {}

# Ignoring of a failing test from GenerationTesterMixin, as the model does not use inputs_ids
Expand Down
1 change: 0 additions & 1 deletion tests/models/diffllama/test_modeling_diffllama.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,6 @@ class DiffLlamaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTester
if is_torch_available()
else ()
)
all_generative_model_classes = (DiffLlamaForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": DiffLlamaModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/emu3/test_modeling_emu3.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Emu3Text2TextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (Emu3ForCausalLM,) if is_torch_available() else ()
all_generative_model_classes = (Emu3ForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"text-generation": Emu3ForCausalLM,
Expand Down Expand Up @@ -312,7 +311,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Emu3Vision2TextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (Emu3ForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (Emu3ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = {}
test_headmasking = False
test_pruning = False
Expand Down
Loading