Skip to content

Commit

Permalink
Merge pull request #902 from kumaranvpl/main
Browse files Browse the repository at this point in the history
Fail gracefully if ollama is already being served
  • Loading branch information
ishaan-jaff authored Nov 24, 2023
2 parents 8241366 + 01fad94 commit b03a7ae
Showing 1 changed file with 9 additions and 4 deletions.
13 changes: 9 additions & 4 deletions litellm/proxy/proxy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,10 +227,15 @@ def celery_setup(use_queue: bool):
celery_app_conn = celery_app

def run_ollama_serve():
command = ['ollama', 'serve']

with open(os.devnull, 'w') as devnull:
process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
try:
command = ['ollama', 'serve']

with open(os.devnull, 'w') as devnull:
process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
except Exception as e:
print(f"""
LiteLLM Warning: proxy started with `ollama` model\n`ollama serve` failed with Exception{e}. \nEnsure you run `ollama serve`
""")

def load_router_config(router: Optional[litellm.Router], config_file_path: str):
global master_key
Expand Down

0 comments on commit b03a7ae

Please sign in to comment.