Skip to content

Commit

Permalink
CI: avoid human error, automatically infer generative models (#33212)
Browse files Browse the repository at this point in the history
* tmp commit

* move tests to the right class

* remove ALL all_generative_model_classes = ...

* skip tf roberta

* skip InstructBlipForConditionalGenerationDecoderOnlyTest

* videollava

* reduce diff

* reduce diff

* remove  on vlms

* fix a few more

* manual rebase bits

* more manual rebase

* remove all manual generative model class test entries

* fix up to ernie

* a few more removals

* handle remaining cases

* recurrent gemma

* it's better here

* make fixup

* tf idefics is broken

* tf bert + generate is broken

* don't touch tf :()

* don't touch tf :(

* make fixup

* better comments for test skips

* revert tf changes

* remove empty line removal

* one more

* missing one
  • Loading branch information
gante authored Feb 13, 2025
1 parent 06231fd commit 62c7ea0
Show file tree
Hide file tree
Showing 162 changed files with 50 additions and 198 deletions.
8 changes: 8 additions & 0 deletions src/transformers/models/bert/modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -1507,6 +1507,14 @@ def prepare_inputs_for_generation(self, input_ids, attention_mask=None, **model_

return {"input_ids": input_ids, "attention_mask": attention_mask}

@classmethod
def can_generate(cls) -> bool:
"""
Legacy correction: BertForMaskedLM can't call `generate()` from GenerationMixin.
Remove after v4.50, when we stop making `PreTrainedModel` inherit from `GenerationMixin`.
"""
return False


@add_start_docstrings(
"""Bert Model with a `next sentence prediction (classification)` head on top.""",
Expand Down
8 changes: 8 additions & 0 deletions src/transformers/models/ernie/modeling_ernie.py
Original file line number Diff line number Diff line change
Expand Up @@ -1325,6 +1325,14 @@ def prepare_inputs_for_generation(self, input_ids, attention_mask=None, **model_

return {"input_ids": input_ids, "attention_mask": attention_mask}

@classmethod
def can_generate(cls) -> bool:
"""
Legacy correction: ErnieForMaskedLM can't call `generate()` from GenerationMixin.
Remove after v4.50, when we stop making `PreTrainedModel` inherit from `GenerationMixin`.
"""
return False


@add_start_docstrings(
"""Ernie Model with a `next sentence prediction (classification)` head on top.""",
Expand Down
1 change: 0 additions & 1 deletion tests/generation/test_flax_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def random_attention_mask(shape, rng=None):
@require_flax
class FlaxGenerationTesterMixin:
model_tester = None
all_generative_model_classes = ()

def _get_input_ids_and_config(self):
config, inputs = self.model_tester.prepare_config_and_inputs_for_common()
Expand Down
5 changes: 1 addition & 4 deletions tests/generation/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from packaging import version
from parameterized import parameterized

from transformers import AutoConfig, is_torch_available, pipeline
from transformers import AutoConfig, AutoProcessor, AutoTokenizer, is_torch_available, pipeline
from transformers.testing_utils import (
is_flaky,
require_accelerate,
Expand Down Expand Up @@ -61,8 +61,6 @@
AutoModelForSeq2SeqLM,
AutoModelForSpeechSeq2Seq,
AutoModelForVision2Seq,
AutoProcessor,
AutoTokenizer,
BartForConditionalGeneration,
BartTokenizer,
GPT2LMHeadModel,
Expand Down Expand Up @@ -119,7 +117,6 @@
class GenerationTesterMixin:
input_name = "input_ids"
model_tester = None
all_generative_model_classes = ()
max_new_tokens = 3

def prepare_config_and_inputs_for_generate(self, batch_size=2):
Expand Down
1 change: 0 additions & 1 deletion tests/models/aria/test_modeling_aria.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ class AriaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTesterMi
"""

all_model_classes = (AriaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (AriaForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False
_is_composite = True
Expand Down
1 change: 0 additions & 1 deletion tests/models/autoformer/test_modeling_autoformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class AutoformerModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (AutoformerModel, AutoformerForPrediction) if is_torch_available() else ()
all_generative_model_classes = (AutoformerForPrediction,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": AutoformerModel} if is_torch_available() else {}
test_pruning = False
test_head_masking = False
Expand Down
10 changes: 1 addition & 9 deletions tests/models/bamba/test_modeling_bamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,15 +257,7 @@ def create_and_check_decoder_model_past_large_inputs(

@require_torch
class BambaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (
(
BambaModel,
BambaForCausalLM,
)
if is_torch_available()
else ()
)
all_generative_model_classes = (BambaForCausalLM,) if is_torch_available() else ()
all_model_classes = (BambaModel, BambaForCausalLM) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BambaModel,
Expand Down
7 changes: 5 additions & 2 deletions tests/models/bark/test_modeling_bark.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import pytest

from transformers import (
BarkCausalModel,
BarkCoarseConfig,
BarkConfig,
BarkFineConfig,
Expand Down Expand Up @@ -53,7 +54,6 @@
import torch

from transformers import (
BarkCausalModel,
BarkCoarseModel,
BarkFineModel,
BarkModel,
Expand Down Expand Up @@ -527,6 +527,8 @@ def get_pipeline_config(self):
@require_torch
class BarkSemanticModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BarkSemanticModel,) if is_torch_available() else ()
# `BarkSemanticModel` inherits from `BarkCausalModel`, but requires an advanced generation config.
# `BarkCausalModel` does not, so we run generation tests there.
all_generative_model_classes = (BarkCausalModel,) if is_torch_available() else ()

is_encoder_decoder = False
Expand Down Expand Up @@ -614,8 +616,9 @@ def test_generate_fp16(self):

@require_torch
class BarkCoarseModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
# Same tester as BarkSemanticModelTest, except for model_class and config_class
all_model_classes = (BarkCoarseModel,) if is_torch_available() else ()
# `BarkCoarseModel` inherits from `BarkCausalModel`, but requires an advanced generation config.
# `BarkCausalModel` does not, so we run generation tests there.
all_generative_model_classes = (BarkCausalModel,) if is_torch_available() else ()

is_encoder_decoder = False
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bart/test_modeling_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,6 @@ class BartModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
if is_torch_available()
else ()
)
all_generative_model_classes = (BartForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BartModel,
Expand Down Expand Up @@ -1502,7 +1501,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BartStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BartDecoder, BartForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BartForCausalLM,) if is_torch_available() else ()
fx_comptatible = True
test_pruning = False
is_encoder_decoder = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/bart/test_modeling_flax_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,6 @@ class FlaxBartModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGenerationT
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBartForConditionalGeneration,) if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBartModelTester(self)
Expand Down
1 change: 0 additions & 1 deletion tests/models/bert/test_modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,6 @@ class BertModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
if is_torch_available()
else ()
)
all_generative_model_classes = (BertLMHeadModel,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BertModel,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BertGenerationEncoderTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BertGenerationEncoder, BertGenerationDecoder) if is_torch_available() else ()
all_generative_model_classes = (BertGenerationDecoder,) if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": BertGenerationEncoder, "text-generation": BertGenerationDecoder}
if is_torch_available()
Expand Down
1 change: 0 additions & 1 deletion tests/models/big_bird/test_modeling_big_bird.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,6 @@ class BigBirdModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase)
if is_torch_available()
else ()
)
all_generative_model_classes = (BigBirdForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BigBirdModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bigbird_pegasus/test_modeling_bigbird_pegasus.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,6 @@ class BigBirdPegasusModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineT
if is_torch_available()
else ()
)
all_generative_model_classes = (BigBirdPegasusForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BigBirdPegasusModel,
Expand Down Expand Up @@ -792,7 +791,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BigBirdPegasusStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BigBirdPegasusDecoder, BigBirdPegasusForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BigBirdPegasusForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/biogpt/test_modeling_biogpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,6 @@ class BioGptModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
if is_torch_available()
else ()
)
all_generative_model_classes = (BioGptForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BioGptModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/blenderbot/test_modeling_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class BlenderbotModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotModel, BlenderbotForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BlenderbotModel,
Expand Down Expand Up @@ -533,7 +532,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BlenderbotStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotDecoder, BlenderbotForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/blenderbot/test_modeling_flax_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,6 @@ class FlaxBlenderbotModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGener
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBlenderbotForConditionalGeneration,) if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBlenderbotModelTester(self)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ def check_encoder_decoder_model_standalone(self, config, inputs_dict):
@require_torch
class BlenderbotSmallModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotSmallModel, BlenderbotSmallForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotSmallForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BlenderbotSmallModel,
Expand Down Expand Up @@ -542,7 +541,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class BlenderbotSmallStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (BlenderbotSmallDecoder, BlenderbotSmallForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (BlenderbotSmallForCausalLM,) if is_torch_available() else ()
test_pruning = False
is_encoder_decoder = False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,6 @@ class FlaxBlenderbotSmallModelTest(FlaxModelTesterMixin, unittest.TestCase, Flax
if is_flax_available()
else ()
)
all_generative_model_classes = (FlaxBlenderbotSmallForConditionalGeneration,) if is_flax_available() else ()

def is_pipeline_test_to_skip(
self,
Expand Down
3 changes: 2 additions & 1 deletion tests/models/blip_2/test_modeling_blip_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Blip2ForConditionalGenerationDecoderOnlyTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (Blip2ForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (Blip2ForConditionalGeneration,) if is_torch_available() else ()
fx_compatible = False
test_head_masking = False
test_pruning = False
Expand Down Expand Up @@ -995,6 +994,8 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Blip2ModelTest(ModelTesterMixin, PipelineTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (Blip2ForConditionalGeneration, Blip2Model) if is_torch_available() else ()
# Doesn't run generation tests. TODO: fix generation tests for Blip2ForConditionalGeneration
all_generative_model_classes = ()
pipeline_model_mapping = (
{
"feature-extraction": Blip2Model,
Expand Down
1 change: 0 additions & 1 deletion tests/models/bloom/test_modeling_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,6 @@ class BloomModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixi
else ()
)

all_generative_model_classes = (BloomForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": BloomModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/bloom/test_modeling_flax_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,6 @@ def check_use_cache_forward_with_attn_mask(self, model_class_name, config, input
@require_flax
class FlaxBloomModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGenerationTesterMixin):
all_model_classes = (FlaxBloomModel, FlaxBloomForCausalLM) if is_flax_available() else ()
all_generative_model_classes = () if is_flax_available() else ()

def setUp(self):
self.model_tester = FlaxBloomModelTester(self)
Expand Down Expand Up @@ -199,7 +198,6 @@ def test_model_from_pretrained(self):
@require_flax
class FlaxBloomGenerationTest(unittest.TestCase):
all_model_classes = (FlaxBloomForCausalLM,) if is_flax_available() else ()
all_generative_model_classes = () if is_flax_available() else ()

def setUp(self):
self.model_id = "bigscience/bloom-560m"
Expand Down
1 change: 0 additions & 1 deletion tests/models/bros/test_modeling_bros.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,6 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
if is_torch_available()
else ()
)
all_generative_model_classes = () if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": BrosModel, "token-classification": BrosForTokenClassification}
if is_torch_available()
Expand Down
1 change: 0 additions & 1 deletion tests/models/chameleon/test_modeling_chameleon.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class ChameleonModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (ChameleonModel, ChameleonForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (ChameleonForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": ChameleonModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/clvp/test_modeling_clvp.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class ClvpDecoderTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (ClvpModel, ClvpForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (ClvpForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": ClvpModelForConditionalGeneration} if is_torch_available() else {}

test_pruning = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/codegen/test_modeling_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class CodeGenModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CodeGenModel, CodeGenForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (CodeGenForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{"feature-extraction": CodeGenModel, "text-generation": CodeGenForCausalLM} if is_torch_available() else {}
)
Expand Down
1 change: 0 additions & 1 deletion tests/models/cohere/test_modeling_cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class CohereModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CohereModel, CohereForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (CohereForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": CohereModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/cohere2/test_modeling_cohere2.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ class Cohere2ModelTester(CohereModelTester):
@require_torch
class Cohere2ModelTest(CohereModelTest, unittest.TestCase):
all_model_classes = (Cohere2Model, Cohere2ForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (Cohere2ForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": Cohere2Model,
Expand Down
1 change: 0 additions & 1 deletion tests/models/ctrl/test_modeling_ctrl.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,6 @@ def create_and_check_ctrl_for_sequence_classification(self, config, input_ids, h
@require_torch
class CTRLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (CTRLModel, CTRLLMHeadModel, CTRLForSequenceClassification) if is_torch_available() else ()
all_generative_model_classes = (CTRLLMHeadModel,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": CTRLModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/data2vec/test_modeling_data2vec_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,6 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
if is_torch_available()
else ()
)
all_generative_model_classes = (Data2VecTextForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": Data2VecTextModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/dbrx/test_modeling_dbrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class DbrxModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (DbrxModel, DbrxForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (DbrxForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = {"text-generation": DbrxForCausalLM} if is_torch_available() else {}
test_headmasking = False
test_pruning = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class DecisionTransformerModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (DecisionTransformerModel,) if is_torch_available() else ()
all_generative_model_classes = ()
pipeline_model_mapping = {"feature-extraction": DecisionTransformerModel} if is_torch_available() else {}

# Ignoring of a failing test from GenerationTesterMixin, as the model does not use inputs_ids
Expand Down
1 change: 0 additions & 1 deletion tests/models/diffllama/test_modeling_diffllama.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,6 @@ class DiffLlamaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTester
if is_torch_available()
else ()
)
all_generative_model_classes = (DiffLlamaForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": DiffLlamaModel,
Expand Down
2 changes: 0 additions & 2 deletions tests/models/emu3/test_modeling_emu3.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Emu3Text2TextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (Emu3ForCausalLM,) if is_torch_available() else ()
all_generative_model_classes = (Emu3ForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"text-generation": Emu3ForCausalLM,
Expand Down Expand Up @@ -312,7 +311,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class Emu3Vision2TextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (Emu3ForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (Emu3ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = {}
test_headmasking = False
test_pruning = False
Expand Down
Loading

0 comments on commit 62c7ea0

Please sign in to comment.