diff --git a/src/transformers/pipelines/pt_utils.py b/src/transformers/pipelines/pt_utils.py index 4a95d050ec8c..c39f906f641e 100644 --- a/src/transformers/pipelines/pt_utils.py +++ b/src/transformers/pipelines/pt_utils.py @@ -73,7 +73,7 @@ def loader_batch_item(self): """ if isinstance(self._loader_batch_data, torch.Tensor): # Batch data is simple tensor, just fetch the slice - result = self._loader_batch_data[self._loader_batch_index] + result = self._loader_batch_data[self._loader_batch_index].unsqueeze(0) else: # Batch data is assumed to be BaseModelOutput (or dict) loader_batched = {} diff --git a/tests/pipelines/test_pipelines_image_to_text.py b/tests/pipelines/test_pipelines_image_to_text.py index 21b297b1e158..e2d59968ebf4 100644 --- a/tests/pipelines/test_pipelines_image_to_text.py +++ b/tests/pipelines/test_pipelines_image_to_text.py @@ -142,6 +142,35 @@ def test_small_model_pt_conditional(self): outputs = pipe(image, prompt=prompt) self.assertTrue(outputs[0]["generated_text"].startswith(prompt)) + @require_torch + def test_consistent_batching_behaviour(self): + pipe = pipeline("image-to-text", model="hf-internal-testing/tiny-random-BlipForConditionalGeneration") + image = "./tests/fixtures/tests_samples/COCO/000000039769.png" + prompt = "a photo of" + + outputs = pipe([image, image], prompt=prompt) + self.assertTrue(outputs[0][0]["generated_text"].startswith(prompt)) + self.assertTrue(outputs[1][0]["generated_text"].startswith(prompt)) + + outputs = pipe([image, image], prompt=prompt, batch_size=2) + self.assertTrue(outputs[0][0]["generated_text"].startswith(prompt)) + self.assertTrue(outputs[1][0]["generated_text"].startswith(prompt)) + + from torch.utils.data import Dataset + + class MyDataset(Dataset): + def __len__(self): + return 5 + + def __getitem__(self, i): + return "./tests/fixtures/tests_samples/COCO/000000039769.png" + + dataset = MyDataset() + for batch_size in (1, 2, 4): + outputs = pipe(dataset, prompt=prompt, batch_size=batch_size if batch_size > 1 else None) + self.assertTrue(list(outputs)[0][0]["generated_text"].startswith(prompt)) + self.assertTrue(list(outputs)[1][0]["generated_text"].startswith(prompt)) + @slow @require_torch def test_large_model_pt(self):