Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for paligemma deploy #257

Merged
merged 4 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion roboflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from roboflow.models import CLIPModel, GazeModel # noqa: F401
from roboflow.util.general import write_line

__version__ = "1.1.29"
__version__ = "1.1.30"


def check_key(api_key, model, notebook, num_retries=0):
Expand Down
61 changes: 58 additions & 3 deletions roboflow/core/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,11 +432,15 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best
filename (str, optional): The name of the weights file. Defaults to "weights/best.pt".
"""

supported_models = ["yolov5", "yolov7-seg", "yolov8", "yolov9", "yolonas"]
supported_models = ["yolov5", "yolov7-seg", "yolov8", "yolov9", "yolonas", "paligemma"]

if not any(supported_model in model_type for supported_model in supported_models):
raise (ValueError(f"Model type {model_type} not supported. Supported models are" f" {supported_models}"))

if "paligemma" in model_type:
self.deploy_paligemma(model_type, model_path, filename)
return

if "yolonas" in model_type:
self.deploy_yolonas(model_type, model_path, filename)
return
Expand Down Expand Up @@ -548,6 +552,57 @@ def deploy(self, model_type: str, model_path: str, filename: str = "weights/best

self.upload_zip(model_type, model_path)

def deploy_paligemma(
self, model_type: str, model_path: str, filename: str = "fine-tuned-paligemma-3b-pt-224.f16.npz"
) -> None:
# Check if model_path exists
if not os.path.exists(model_path):
raise FileNotFoundError(f"Model path {model_path} does not exist.")
model_files = os.listdir(model_path)
print(f"Model files found in {model_path}: {model_files}")

files_to_deploy = []

# Find first .npz file in model_path
npz_filename = next((file for file in model_files if file.endswith(".npz")), None)
if any([file.endswith(".safetensors") for file in model_files]):
print("Found .safetensors file in model path. Deploying PyTorch PaliGemma model.")
necessary_files = [
"config.json",
"generation_config.json",
"model.safetensors.index.json",
"preprocessor_config.json",
"special_tokens_map.json",
"tokenizer_config.json",
"tokenizer.json",
]
for file in necessary_files:
if file not in model_files:
print("Missing necessary file", file)
res = input("Do you want to continue? (y/n)")
if res.lower() != "y":
exit(1)
for file in model_files:
files_to_deploy.append(file)
elif npz_filename is not None:
print(f"Found .npz file {npz_filename} in model path. Deploying JAX PaliGemma model.")
files_to_deploy.append(npz_filename)
else:
raise FileNotFoundError(f"No .npz or .safetensors file found in model path {model_path}.")

if len(files_to_deploy) == 0:
raise FileNotFoundError(f"No valid files found in model path {model_path}.")
print(f"Zipping files for deploy: {files_to_deploy}")

import tarfile

with tarfile.open(os.path.join(model_path, "roboflow_deploy.tar"), "w") as tar:
for file in files_to_deploy:
tar.add(os.path.join(model_path, file), arcname=file)

print("Uploading to Roboflow... May take several minutes.")
self.upload_zip(model_type, model_path, "roboflow_deploy.tar")

def deploy_yolonas(self, model_type: str, model_path: str, filename: str = "weights/best.pt") -> None:
try:
import torch
Expand Down Expand Up @@ -613,7 +668,7 @@ def deploy_yolonas(self, model_type: str, model_path: str, filename: str = "weig

self.upload_zip(model_type, model_path)

def upload_zip(self, model_type: str, model_path: str):
def upload_zip(self, model_type: str, model_path: str, model_file_name: str = "roboflow_deploy.zip"):
res = requests.get(
f"{API_URL}/{self.workspace}/{self.project}/{self.version}"
f"/uploadModel?api_key={self.__api_key}&modelType={model_type}&nocache=true"
Expand All @@ -632,7 +687,7 @@ def upload_zip(self, model_type: str, model_path: str):

res = requests.put(
res.json()["url"],
data=open(os.path.join(model_path, "roboflow_deploy.zip"), "rb"),
data=open(os.path.join(model_path, model_file_name), "rb"),
)
try:
res.raise_for_status()
Expand Down