Skip to content

Commit

Permalink
Apply black, isort, and autoflake formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] committed May 20, 2024
1 parent 80a5f53 commit 73fb7d0
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 12 deletions.
4 changes: 3 additions & 1 deletion kubejobs/examples/example_pod.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@
gpu_limit=1,
cpu_request=1,
ram_request="1Gi",
volume_mounts={"nfs": {"mountPath": "/nfs", "server": "10.24.1.255", "path": "/"}},
volume_mounts={
"nfs": {"mountPath": "/nfs", "server": "10.24.1.255", "path": "/"}
},
)

pod_yaml = pod.generate_yaml()
Expand Down
41 changes: 30 additions & 11 deletions kubejobs/pods.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import logging
import os
import yaml
import subprocess
from typing import List, Optional

import yaml
from kubernetes import config
import logging

from kubejobs.jobs import fetch_user_info

Expand Down Expand Up @@ -119,7 +120,9 @@ def __init__(
def _add_shm_size(self, container: dict):
"""Adds shared memory volume if shm_size is set."""
if self.shm_size:
container["volumeMounts"].append({"name": "dshm", "mountPath": "/dev/shm"})
container["volumeMounts"].append(
{"name": "dshm", "mountPath": "/dev/shm"}
)
return container

def _add_env_vars(self, container: dict):
Expand Down Expand Up @@ -195,9 +198,13 @@ def generate_yaml(self):
container["args"] = self.args

if not (
self.gpu_type is None or self.gpu_limit is None or self.gpu_product is None
self.gpu_type is None
or self.gpu_limit is None
or self.gpu_product is None
):
container["resources"] = {"limits": {f"{self.gpu_type}": self.gpu_limit}}
container["resources"] = {
"limits": {f"{self.gpu_type}": self.gpu_limit}
}

container = self._add_shm_size(container)
container = self._add_env_vars(container)
Expand All @@ -224,10 +231,14 @@ def generate_yaml(self):
container["resources"]["limits"]["memory"] = self.ram_request

if self.storage_request is not None:
container["resources"]["requests"]["storage"] = self.storage_request
container["resources"]["requests"][
"storage"
] = self.storage_request

if self.gpu_type is not None and self.gpu_limit is not None:
container["resources"]["limits"][f"{self.gpu_type}"] = self.gpu_limit
container["resources"]["limits"][
f"{self.gpu_type}"
] = self.gpu_limit

pod = {
"apiVersion": "v1",
Expand All @@ -248,9 +259,13 @@ def generate_yaml(self):
pod["metadata"]["namespace"] = self.namespace

if not (
self.gpu_type is None or self.gpu_limit is None or self.gpu_product is None
self.gpu_type is None
or self.gpu_limit is None
or self.gpu_product is None
):
pod["spec"]["nodeSelector"] = {f"{self.gpu_type}.product": self.gpu_product}
pod["spec"]["nodeSelector"] = {
f"{self.gpu_type}.product": self.gpu_product
}

# Add shared memory volume if shm_size is set
if self.shm_size:
Expand All @@ -270,7 +285,9 @@ def generate_yaml(self):
volume = {"name": mount_name}

if "pvc" in mount_data:
volume["persistentVolumeClaim"] = {"claimName": mount_data["pvc"]}
volume["persistentVolumeClaim"] = {
"claimName": mount_data["pvc"]
}
elif "emptyDir" in mount_data:
volume["emptyDir"] = {}
# Add more volume types here if needed
Expand All @@ -283,7 +300,9 @@ def generate_yaml(self):
pod["spec"]["volumes"].append(volume)

if self.image_pull_secret:
pod["spec"]["imagePullSecrets"] = [{"name": self.image_pull_secret}]
pod["spec"]["imagePullSecrets"] = [
{"name": self.image_pull_secret}
]

return yaml.dump(pod)

Expand Down

0 comments on commit 73fb7d0

Please sign in to comment.