Skip to content

Commit

Permalink
Remove the last few TF serving sigs (huggingface#23738)
Browse files Browse the repository at this point in the history
Remove some more serving methods that (I think?) turned up while this PR was open
  • Loading branch information
Rocketknight1 authored and novice03 committed Jun 23, 2023
1 parent 9e62bd8 commit a418b4e
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 53 deletions.
27 changes: 2 additions & 25 deletions src/transformers/models/sam/modeling_tf_sam.py
Original file line number Diff line number Diff line change
Expand Up @@ -1147,12 +1147,8 @@ class TFSamPreTrainedModel(TFPreTrainedModel):

@property
def dummy_inputs(self) -> Dict[str, tf.Tensor]:
"""
Dummy inputs to build the network.
Returns:
`Dict[str, tf.Tensor]`: The dummy inputs.
"""
# We override the default dummy inputs here because SAM has some really explosive memory usage in the
# attention layers, so we want to pass the smallest possible batches
VISION_DUMMY_INPUTS = tf.random.uniform(
shape=(
1,
Expand All @@ -1164,25 +1160,6 @@ def dummy_inputs(self) -> Dict[str, tf.Tensor]:
)
return {"pixel_values": tf.constant(VISION_DUMMY_INPUTS)}

@tf.function(
input_signature=[
{
"pixel_values": tf.TensorSpec((None, None, None, None), tf.float32, name="pixel_values"),
}
]
)
def serving(self, inputs):
"""
Method used for serving the model.
Args:
inputs (`Dict[str, tf.Tensor]`):
The input of the saved model as a dictionary of tensors.
"""
output = self.call(inputs)

return self.serving_output(output)


SAM_START_DOCSTRING = r"""
This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1938,34 +1938,6 @@ class TF{{cookiecutter.camelcase_modelname}}PreTrainedModel(TFPreTrainedModel):
config_class = {{cookiecutter.camelcase_modelname}}Config
base_model_prefix = "model"

@property
def dummy_inputs(self):
pad_token = 1
input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
decoder_input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
dummy_inputs = {
"decoder_input_ids": decoder_input_ids,
"attention_mask": tf.math.not_equal(input_ids, pad_token),
"input_ids": input_ids,
}
return dummy_inputs

@tf.function(
input_signature=[
{
"input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"),
"attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"),
"decoder_input_ids": tf.TensorSpec((None, None), tf.int32, name="decoder_input_ids"),
"decoder_attention_mask": tf.TensorSpec((None, None), tf.int32, name="decoder_attention_mask"),
}
]
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartPretrainedModel.serving
def serving(self, inputs):
output = self.call(inputs)

return self.serving_output(output)


{{cookiecutter.uppercase_modelname}}_START_DOCSTRING = r"""
This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the
Expand Down

0 comments on commit a418b4e

Please sign in to comment.