diff --git a/.env.example b/.env.example
index 992afa0e..9d4e778a 100644
--- a/.env.example
+++ b/.env.example
@@ -14,18 +14,23 @@ VERSION=latest
# GitHub repository (username/repo-name)
GITHUB_REPOSITORY=pyspur-dev/pyspur
+
+# ======================
+# Application Configuration
+# ======================
+
+# Application Host Configuration
+# This is the host that the application will be running on
+# By default, the application will be running on
+
+PYSPUR_HOST=0.0.0.0
+PYSPUR_PORT=6080
+
+
# Backend Configuration
-BACKEND_PORT=8000
-BACKEND_HOST=0.0.0.0
DEBUG=False
# Frontend Configuration
-FRONTEND_PORT=3000
-FRONTEND_HOST=0.0.0.0
-
-# Application Port Configuration
-# This is the port that will be exposed to access the PySpur application
-PYSPUR_PORT=6080
# ======================
# Database Settings
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 4f9be729..defefdf0 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -2,7 +2,7 @@ name: Release
on:
release:
- types: [published]
+ types: [created]
env:
REGISTRY: ghcr.io
@@ -35,6 +35,11 @@ jobs:
git commit -m "chore: update version to ${{ steps.get-version.outputs.version }}"
git push
+ - name: Update release tag
+ run: |
+ git tag -f ${{ github.event.release.tag_name }}
+ git push --force origin ${{ github.event.release.tag_name }}
+
build-and-push-docker:
needs: update-version
runs-on: ubuntu-latest
diff --git a/Dockerfile.backend b/Dockerfile.backend
index e4c7730c..3cc36898 100644
--- a/Dockerfile.backend
+++ b/Dockerfile.backend
@@ -4,10 +4,13 @@ RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
-
+RUN pip install uv
WORKDIR /pyspur/backend
COPY backend/pyproject.toml .
-RUN pip install .
+RUN uv pip compile pyproject.toml > requirements.txt && \
+ uv pip install --system --no-cache-dir -r requirements.txt && \
+ rm requirements.txt
+
# Development stage
FROM base AS development
@@ -30,4 +33,5 @@ COPY backend/ .
RUN mkdir -p /pyspur/backend/pyspur/static
RUN rm -rf /pyspur/backend/pyspur/static/*
COPY --from=frontend-builder /pyspur/frontend/out/ /pyspur/backend/pyspur/static/
+COPY .env.example /pyspur/backend/pyspur/templates/.env.example
# Production-specific instructions here
\ No newline at end of file
diff --git a/README.md b/README.md
index 81be2af1..079090e7 100644
--- a/README.md
+++ b/README.md
@@ -81,66 +81,142 @@ https://github.com/user-attachments/assets/5bef7a16-ef9f-4650-b385-4ea70fa54c8a
# β‘ Quick start
-You can launch PySpur using pre-built docker images in the following steps:
+## Option A: Using `pyspur` Python Package (Experimental)
-1. **Clone the repository:**
+This is the quickest way to get started. Python 3.12 or higher is required.
+
+1. **Install PySpur:**
```sh
- git clone https://github.com/PySpur-com/pyspur.git
- cd pyspur
+ pip install pyspur
```
-2. **Create a .env file:**
-
- Create a `.env` file at the root of the project. You may use `.env.example` as a starting point.
+2. **Initialize a new project:**
```sh
- cp .env.example .env
+ pyspur init my-project
+ cd my-project
```
- **Please go through the .env file and change configs wherver necessary**
- **If you plan to use third party model providers, please add their API keys in the .env file in this step**.
-
-3. **Start the docker services:**
+ This will create a new directory with a `.env` file.
+3. **Start the server:**
```sh
- docker compose -f ./docker-compose.prod.yml up --build -d
+ pyspur serve --sqlite
```
+ By default, this will start PySpur app at `http://localhost:6080` using a sqlite database.
+ We recommend you configure a postgres instance URL in the `.env` file to get a more stable experience.
- This will start a local instance of PySpur that will store spurs and other state information in a postgres database. A local postgres service is used by default. Override `POSTGRES_*` variables in the `.env` file to use an external postgres database.
+4. **[Optional] Customize Your Deployment:**
+ You can customize your PySpur deployment in two ways:
+
+ a. **Through the app** (Recommended):
+ - Navigate to the API Keys tab in the app
+ - Add your API keys for various providers (OpenAI, Anthropic, etc.)
+ - Changes take effect immediately
-4. **Access the portal:**
+ b. **Manual Configuration**:
+ - Edit the `.env` file in your project directory
+ - It is recommended to configure a postgres database in .env for more reliability
+ - Restart the app with `pyspur serve`. Add `--sqlite` if you are not using postgres
- Go to `http://localhost:6080/` in your browser.
+## Option B: Using Docker (Recommended)
+
+This is the recommended way for production deployments:
+
+1. **Install Docker:**
+ First, install Docker by following the official installation guide for your operating system:
+ - [Docker for Linux](https://docs.docker.com/engine/install/)
+ - [Docker Desktop for Mac](https://docs.docker.com/desktop/install/mac-install/)
+
+2. **Create a PySpur Project:**
+ Once Docker is installed, create a new PySpur project with:
+ ```sh
+ curl -fsSL https://raw.githubusercontent.com/PySpur-com/pyspur/main/start_pyspur_docker.sh | bash -s pyspur-project
+ ```
+ This will:
+ - Start a new PySpur project in a new directory called `pyspur-project`
+ - Set up the necessary configuration files
+ - Start PySpur app automatically backed by a local postgres docker instance
+
+3. **Access PySpur:**
+ Go to `http://localhost:6080` in your browser.
+
+4. **[Optional] Customize Your Deployment:**
+ You can customize your PySpur deployment in two ways:
+
+ a. **Through the app** (Recommended):
+ - Navigate to the API Keys tab in the app
+ - Add your API keys for various providers (OpenAI, Anthropic, etc.)
+ - Changes take effect immediately
+
+ b. **Manual Configuration**:
+ - Edit the `.env` file in your project directory
+ - Restart the services with:
+ ```sh
+ docker compose up -d
+ ```
+
+That's it! Click on "New Spur" to create a workflow, or start with one of the stock templates.
+# π οΈ PySpur Development Setup
+#### [ Instructions for development on Unix-like systems. Development on Windows/PC not supported ]
-Set up is completed. Click on "New Spur" to create a workflow, or start with one of the stock templates.
+For development, follow these steps:
+1. **Clone the repository:**
+ ```sh
+ git clone https://github.com/PySpur-com/pyspur.git
+ cd pyspur
+ ```
-5. **[Optional] Manage your LLM provider keys from the app:**
+2. **Launch using docker-compose.dev.yml:**
+ ```sh
+ docker compose -f docker-compose.dev.yml up --build -d
+ ```
+ This will start a local instance of PySpur with hot-reloading enabled for development.
- Once PySpur app is running you can manage your LLM provider keys through the portal:
+3. **Customize your setup:**
+ Edit the `.env` file to configure your environment. By default, PySpur uses a local PostgreSQL database. To use an external database, modify the `POSTGRES_*` variables in `.env`.
-
+# π¦ Using PySpur with Ollama (Local Models)
- Select API keys tab
+PySpur can work with local models served using Ollama.
-
+Steps to configure PySpur to work with Ollama running on the same host.
- Enter your provider's key and click save (save button will appear after you add/modify a key)
+### 1. Configure Ollama
+To ensure Ollama API is reachable from PySpur, we need to start the Ollama service with environment variable `OLLAMA_HOST=0.0.0.0` . This allows requests coming from PySpur docker's bridge network to get through to Ollama.
+An easy way to do this is to launch the ollama service with the following command:
+```sh
+OLLAMA_HOST="0.0.0.0" ollama serve
+```
-
+### 2. Update the PySpur .env file
+Next up we need to update the `OLLAMA_BASE_URL` environment value in the `.env` file.
+If your Ollama port is 11434 (the default port), then the entry in `.env` file should look like this:
+```sh
+OLLAMA_BASE_URL=http://host.docker.internal:11434
+```
+(Please make sure that there is no trailing slash in the end!)
+In PySpur's set up, `host.docker.internal` refers to the host machine where both PySpur and Ollama are running.
-# π οΈ PySpur Development Setup
-#### [ Instructions for development on Unix-like systems. Development on Windows/PC not tested ]
+### 3. Launch the PySpur app
+Follow the usual steps to launch the PySpur app, starting with the command:
+```sh
+docker compose -f docker-compose.prod.yml up --build -d
+```
-The steps for dev setup are same as above, except for step 3: we launch the app in the dev mode instead
+If you wish to do PySpur development with ollama please run the following command instead of above:
+```sh
+docker compose -f docker-compose.yml up --build -d
+```
-3. **Start the docker services:**
- ```sh
- docker compose up --build -d
- ```
+### 4. Using Ollama models in the app
+You will be able to select Ollama models [`ollama/llama3.2`, `ollama/llama3`, ...] from the sidebar for LLM nodes.
+Please make sure the model you select is explicitly downloaded in ollama. That is, you will need to manually manage these models via ollama. To download a model you can simply run `ollama pull `.
- This will start a local instance of PySpur that will store spurs and other state information in a postgres database. A local postgres service is used by default. Override `POSTGRES_*` variables in the `.env` file to use an external postgres database.
+## Note on supported models
+PySpur only works with models that support structured-output and json mode. Most newer models should be good, but it would still be good to confirm this from Ollama documentation for the model you wish to use.
# β Support us
@@ -171,4 +247,4 @@ You can support us in our work by leaving a star! Thank you!
- [ ] Generate Spurs via AI
Your feedback will be massively appreciated.
-Please [tell us](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) which features on that list you like to see next or request entirely new ones.
+Please [tell us](mailto:founders@pyspur.dev?subject=Feature%20Request&body=I%20want%20this%20feature%3Ai) which features on that list you like to see next or request entirely new ones.
\ No newline at end of file
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
index edfc13a6..ba4f66de 100644
--- a/backend/pyproject.toml
+++ b/backend/pyproject.toml
@@ -66,12 +66,16 @@ dependencies = [
"httpx[http2]==0.27.2",
"sendgrid==6.11.0",
"resend==2.6.0",
+ "typer[all]==0.9.0",
]
[project.urls]
Repository = "https://github.com/pyspur-dev/pyspur"
Documentation = "https://docs.pyspur.dev"
+[project.scripts]
+pyspur = "pyspur.cli:main"
+
[project.optional-dependencies]
dev = [
"pytest>=7.0",
@@ -85,6 +89,11 @@ dev = [
[tool.hatch.build.targets.wheel]
universal = false
packages = ["pyspur"]
+zip-safe = false
+
+[tool.hatch.build.targets.wheel.force-include]
+"pyspur/templates" = "pyspur/templates/"
+"pyspur/static" = "pyspur/static/"
[tool.ruff]
line-length = 100
@@ -94,10 +103,14 @@ target-version = "py312"
select = ["E", "F", "I", "N", "W", "B", "C", "D"]
ignore = [
"D100", # Missing docstring in public module
+ "D101", # Missing docstring in public class
+ "D102", # Missing docstring in public method
+ "D103", # Missing docstring in public function
"D104", # Missing docstring in public package
+ "D105", # Missing docstring in magic method
"D107", # Missing docstring in __init__
- "D203", # Ignore 'incorrect-blank-line-before-class' in favor of D211
- "D213", # Ignore 'multi-line-summary-second-line' in favor of D212
+ "I001", # Import block is un-sorted or un-formatted
+ "E402", # Module level import not at top of file
]
[tool.black]
diff --git a/backend/pyspur/api/api_app.py b/backend/pyspur/api/api_app.py
new file mode 100644
index 00000000..f0455aa2
--- /dev/null
+++ b/backend/pyspur/api/api_app.py
@@ -0,0 +1,41 @@
+from fastapi import FastAPI
+
+from ..nodes.registry import NodeRegistry
+
+NodeRegistry.discover_nodes()
+
+from ..integrations.google.auth import router as google_auth_router
+from .dataset_management import router as dataset_management_router
+from .evals_management import router as evals_management_router
+from .file_management import router as file_management_router
+from .key_management import router as key_management_router
+from .node_management import router as node_management_router
+from .openai_compatible_api import router as openai_compatible_api_router
+from .output_file_management import router as output_file_management_router
+from .rag_management import router as rag_management_router
+from .run_management import router as run_management_router
+from .template_management import router as template_management_router
+from .workflow_management import router as workflow_management_router
+from .workflow_run import router as workflow_run_router
+
+# Create a sub-application for API routes
+api_app = FastAPI(
+ docs_url="/docs",
+ redoc_url="/redoc",
+ title="PySpur API",
+ version="1.0.0",
+)
+
+api_app.include_router(node_management_router, prefix="/node")
+api_app.include_router(workflow_management_router, prefix="/wf")
+api_app.include_router(workflow_run_router, prefix="/wf")
+api_app.include_router(dataset_management_router, prefix="/ds")
+api_app.include_router(run_management_router, prefix="/run")
+api_app.include_router(output_file_management_router, prefix="/of")
+api_app.include_router(key_management_router, prefix="/env-mgmt")
+api_app.include_router(template_management_router, prefix="/templates")
+api_app.include_router(openai_compatible_api_router, prefix="/api")
+api_app.include_router(evals_management_router, prefix="/evals")
+api_app.include_router(google_auth_router, prefix="/google")
+api_app.include_router(rag_management_router, prefix="/rag")
+api_app.include_router(file_management_router, prefix="/files")
diff --git a/backend/pyspur/api/main.py b/backend/pyspur/api/main.py
index afae42d0..b4b1a04e 100644
--- a/backend/pyspur/api/main.py
+++ b/backend/pyspur/api/main.py
@@ -1,32 +1,47 @@
from dotenv import load_dotenv
+import tempfile
+import shutil
+from contextlib import ExitStack, asynccontextmanager
+from importlib.resources import files, as_file
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from pathlib import Path
-from ..nodes.registry import NodeRegistry
+from .api_app import api_app
-NodeRegistry.discover_nodes()
+load_dotenv()
-from ..integrations.google.auth import router as google_auth_router
-from .dataset_management import router as dataset_management_router
-from .evals_management import router as evals_management_router
-from .file_management import router as file_management_router
-from .key_management import router as key_management_router
-from .node_management import router as node_management_router
-from .openai_compatible_api import router as openai_compatible_api_router
-from .output_file_management import router as output_file_management_router
-from .rag_management import router as rag_management_router
-from .run_management import router as run_management_router
-from .template_management import router as template_management_router
-from .workflow_management import router as workflow_management_router
-from .workflow_run import router as workflow_run_router
+# Create an ExitStack to manage resources
+exit_stack = ExitStack()
+temporary_static_dir = None
-load_dotenv()
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ """Manage application lifespan and cleanup."""
+ global temporary_static_dir
+
+ # Setup: Create temporary directory and extract static files
+ temporary_static_dir = Path(tempfile.mkdtemp())
+
+ # Extract static files to temporary directory
+ static_files = files("pyspur").joinpath("static")
+ static_dir = exit_stack.enter_context(as_file(static_files))
+
+ # Copy static files to temporary directory
+ if static_dir.exists():
+ shutil.copytree(static_dir, temporary_static_dir, dirs_exist_ok=True)
+
+ yield
-app = FastAPI()
+ # Cleanup: Remove temporary directory and close ExitStack
+ exit_stack.close()
+ shutil.rmtree(temporary_static_dir, ignore_errors=True)
+
+
+app = FastAPI(lifespan=lifespan)
# Add CORS middleware
app.add_middleware(
@@ -37,52 +52,37 @@
allow_headers=["*"],
)
-# Create a sub-application for API routes
-api_app = FastAPI(
- docs_url="/docs",
- redoc_url="/redoc",
- title="PySpur API",
- version="1.0.0",
-)
-api_app.include_router(node_management_router, prefix="/node")
-api_app.include_router(workflow_management_router, prefix="/wf")
-api_app.include_router(workflow_run_router, prefix="/wf")
-api_app.include_router(dataset_management_router, prefix="/ds")
-api_app.include_router(run_management_router, prefix="/run")
-api_app.include_router(output_file_management_router, prefix="/of")
-api_app.include_router(key_management_router, prefix="/env-mgmt")
-api_app.include_router(template_management_router, prefix="/templates")
-api_app.include_router(openai_compatible_api_router, prefix="/api")
-api_app.include_router(evals_management_router, prefix="/evals")
-api_app.include_router(google_auth_router, prefix="/google")
-api_app.include_router(rag_management_router, prefix="/rag")
-api_app.include_router(file_management_router, prefix="/files")
-
# Mount the API routes under /api
app.mount("/api", api_app, name="api")
-# Mount static files to serve frontend
-static_dir = Path(__file__).parent.parent.joinpath("static")
-
# Optionally, mount directories for assets that you want served directly:
-if Path.joinpath(static_dir, "images").exists():
- app.mount("/images", StaticFiles(directory=str(static_dir.joinpath("images"))), name="images")
-if Path.joinpath(static_dir, "_next").exists():
- app.mount("/_next", StaticFiles(directory=str(static_dir.joinpath("_next"))), name="_next")
+if temporary_static_dir and Path.joinpath(temporary_static_dir, "images").exists():
+ app.mount(
+ "/images",
+ StaticFiles(directory=str(temporary_static_dir.joinpath("images"))),
+ name="images",
+ )
+if temporary_static_dir and Path.joinpath(temporary_static_dir, "_next").exists():
+ app.mount(
+ "/_next", StaticFiles(directory=str(temporary_static_dir.joinpath("_next"))), name="_next"
+ )
@app.get("/{full_path:path}", include_in_schema=False)
async def serve_frontend(full_path: str):
+ if not temporary_static_dir:
+ raise RuntimeError("Static directory not initialized")
+
# If the request is empty, serve index.html
if full_path == "":
- return FileResponse(static_dir.joinpath("index.html"))
+ return FileResponse(temporary_static_dir.joinpath("index.html"))
# remove trailing slash
if full_path[-1] == "/":
full_path = full_path[:-1]
# Build a candidate file path from the request.
- candidate = static_dir.joinpath(full_path)
+ candidate = temporary_static_dir.joinpath(full_path)
# If candidate is a directory, try its index.html.
if candidate.is_dir():
@@ -91,7 +91,7 @@ async def serve_frontend(full_path: str):
return FileResponse(candidate_index)
# If no direct file, try appending ".html" (for files like dashboard.html)
- candidate_html = static_dir.joinpath(full_path + ".html")
+ candidate_html = temporary_static_dir.joinpath(full_path + ".html")
if candidate_html.exists():
return FileResponse(candidate_html)
@@ -102,10 +102,10 @@ async def serve_frontend(full_path: str):
# Check if the parent directory contains a file named "[id].html"
parts = full_path.split("/")
if len(parts) >= 2:
- parent = static_dir.joinpath(*parts[:-1])
+ parent = temporary_static_dir.joinpath(*parts[:-1])
dynamic_file = parent.joinpath("[id].html")
if dynamic_file.exists():
return FileResponse(dynamic_file)
# Fallback: serve the main index.html for clientβside routing.
- return FileResponse(static_dir.joinpath("index.html"))
+ return FileResponse(temporary_static_dir.joinpath("index.html"))
diff --git a/backend/pyspur/api/template_management.py b/backend/pyspur/api/template_management.py
index 5a7734ae..20855343 100644
--- a/backend/pyspur/api/template_management.py
+++ b/backend/pyspur/api/template_management.py
@@ -1,6 +1,7 @@
import json
-from pathlib import Path
from typing import List
+from importlib.resources import files, as_file
+import contextlib
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
@@ -15,6 +16,8 @@
class TemplateSchema(BaseModel):
+ """Template schema."""
+
name: str
description: str
features: List[str]
@@ -23,9 +26,7 @@ class TemplateSchema(BaseModel):
router = APIRouter()
-TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates"
-
-print(f"TEMPLATES_DIR resolved to: {TEMPLATES_DIR.resolve()}")
+TEMPLATES_RESOURCE = files("pyspur").joinpath("templates")
@router.get(
@@ -34,32 +35,35 @@ class TemplateSchema(BaseModel):
response_model=List[TemplateSchema],
)
def list_templates() -> List[TemplateSchema]:
- if not TEMPLATES_DIR.exists():
- raise HTTPException(status_code=500, detail="Templates directory not found")
+ """List all available templates."""
+ with contextlib.ExitStack() as stack:
+ templates_dir = stack.enter_context(as_file(TEMPLATES_RESOURCE))
+ if not templates_dir.exists():
+ raise HTTPException(status_code=500, detail="Templates directory not found")
- # Sort by creation time in descending (most recent first)
- sorted_template_files = sorted(
- TEMPLATES_DIR.glob("*.json"),
- key=lambda p: p.stat().st_ctime,
- reverse=True,
- )
+ # Sort by creation time in descending (most recent first)
+ sorted_template_files = sorted(
+ templates_dir.glob("*.json"),
+ key=lambda p: p.stat().st_ctime,
+ reverse=True,
+ )
- templates: List[TemplateSchema] = []
- for template_file in sorted_template_files:
- with open(template_file, "r") as f:
- template_content = json.load(f)
- metadata = template_content.get("metadata", {})
- templates.append(
- TemplateSchema.model_validate(
- {
- "name": metadata.get("name", template_file.stem),
- "description": metadata.get("description", ""),
- "features": metadata.get("features", []),
- "file_name": template_file.name,
- }
+ templates: List[TemplateSchema] = []
+ for template_file in sorted_template_files:
+ with open(template_file, "r") as f:
+ template_content = json.load(f)
+ metadata = template_content.get("metadata", {})
+ templates.append(
+ TemplateSchema.model_validate(
+ {
+ "name": metadata.get("name", template_file.stem),
+ "description": metadata.get("description", ""),
+ "features": metadata.get("features", []),
+ "file_name": template_file.name,
+ }
+ )
)
- )
- return templates
+ return templates
@router.post(
@@ -68,22 +72,25 @@ def list_templates() -> List[TemplateSchema]:
response_model=WorkflowResponseSchema,
)
def instantiate_template(template: TemplateSchema, db: Session = Depends(get_db)):
+ """Instantiate a new workflow from a template."""
template_file_name = template.file_name
- template_path = TEMPLATES_DIR / template_file_name
- print(f"Requested template: {template_file_name}")
- print(f"Resolved template path: {template_path}")
- if not template_path.exists():
- raise HTTPException(status_code=404, detail="Template not found")
- with open(template_path, "r") as f:
- template_content = json.load(f)
- metadata = template_content.get("metadata", {})
- workflow_definition = template_content.get("definition", {})
- new_workflow = create_workflow(
- WorkflowCreateRequestSchema(
- name=metadata.get("name", "Untitled Workflow"),
- description=metadata.get("description", ""),
- definition=workflow_definition,
- ),
- db,
- )
- return new_workflow
+ with contextlib.ExitStack() as stack:
+ templates_dir = stack.enter_context(as_file(TEMPLATES_RESOURCE))
+ template_path = templates_dir / template_file_name
+ print(f"Requested template: {template_file_name}")
+ print(f"Resolved template path: {template_path}")
+ if not template_path.exists():
+ raise HTTPException(status_code=404, detail="Template not found")
+ with open(template_path, "r") as f:
+ template_content = json.load(f)
+ metadata = template_content.get("metadata", {})
+ workflow_definition = template_content.get("definition", {})
+ new_workflow = create_workflow(
+ WorkflowCreateRequestSchema(
+ name=metadata.get("name", "Untitled Workflow"),
+ description=metadata.get("description", ""),
+ definition=workflow_definition,
+ ),
+ db,
+ )
+ return new_workflow
diff --git a/backend/pyspur/api/workflow_run.py b/backend/pyspur/api/workflow_run.py
index 014de2fb..e04c52ce 100644
--- a/backend/pyspur/api/workflow_run.py
+++ b/backend/pyspur/api/workflow_run.py
@@ -33,9 +33,6 @@
router = APIRouter()
-# Define EVALS_DIR (same as in evals_management.py)
-EVALS_DIR = Path(__file__).parent.parent / "evals" / "tasks"
-
async def create_run_model(
workflow_id: str,
diff --git a/backend/pyspur/cli/__init__.py b/backend/pyspur/cli/__init__.py
new file mode 100644
index 00000000..aef4f13b
--- /dev/null
+++ b/backend/pyspur/cli/__init__.py
@@ -0,0 +1,5 @@
+"""PySpur CLI package."""
+
+from .main import main
+
+__all__ = ["main"]
diff --git a/backend/pyspur/cli/main.py b/backend/pyspur/cli/main.py
new file mode 100644
index 00000000..66c543ab
--- /dev/null
+++ b/backend/pyspur/cli/main.py
@@ -0,0 +1,116 @@
+"""Main module for the PySpur CLI."""
+
+import os
+import shutil
+from pathlib import Path
+from typing import Optional
+
+import typer
+from rich import print
+from rich.console import Console
+import uvicorn
+
+from .utils import copy_template_file, load_environment, run_migrations
+
+app = typer.Typer(
+ name="pyspur",
+ help="PySpur CLI - A tool for building and deploying AI Agents",
+ add_completion=False,
+)
+
+console = Console()
+
+
+@app.command()
+def init(
+ path: Optional[str] = typer.Argument(
+ None,
+ help="Path where to initialize PySpur project. Defaults to current directory.",
+ )
+) -> None:
+ """Initialize a new PySpur project in the specified directory."""
+ target_dir = Path(path) if path else Path.cwd()
+
+ if not target_dir.exists():
+ target_dir.mkdir(parents=True)
+
+ # Copy .env.example
+ try:
+ copy_template_file(".env.example", target_dir / ".env.example")
+ print("[green]β[/green] Created .env.example")
+
+ # Create .env if it doesn't exist
+ env_path = target_dir / ".env"
+ if not env_path.exists():
+ shutil.copy2(target_dir / ".env.example", env_path)
+ print("[green]β[/green] Created .env from template")
+
+ # add PROJECT_ROOT to .env
+ with open(env_path, "a") as f:
+ f.write("""\nDO NOT CHANGE THIS VALUE\n""")
+ f.write("\nPROJECT_ROOT=" + str(target_dir) + "\n")
+
+ # Create a data directory
+ data_dir = target_dir / "data"
+ if not data_dir.exists():
+ data_dir.mkdir()
+ print("[green]β[/green] Created data directory")
+
+ print("\n[bold green]PySpur project initialized successfully! π[/bold green]")
+ print("\nNext steps:")
+ print("1. Review and update the .env file with your configuration")
+ print("2. Start the PySpur server with 'pyspur serve'")
+
+ except Exception as e:
+ print(f"[red]Error initializing project: {str(e)}[/red]")
+ raise typer.Exit(1) from e
+
+
+@app.command()
+def serve(
+ host: str = typer.Option(
+ None,
+ help="Host to bind the server to. Defaults to PYSPUR_HOST from environment or 0.0.0.0",
+ ),
+ port: int = typer.Option(
+ None,
+ help="Port to bind the server to. Defaults to PYSPUR_PORT from environment or 6080",
+ ),
+ sqlite: bool = typer.Option(
+ False,
+ help="Use SQLite database instead of PostgreSQL. Useful for local development.",
+ ),
+) -> None:
+ """Start the PySpur server."""
+ try:
+ # Load environment variables
+ load_environment()
+
+ # Use environment variables as defaults if not provided via CLI
+ host = host or os.getenv("PYSPUR_HOST", "0.0.0.0")
+ port = port or int(os.getenv("PYSPUR_PORT", "6080"))
+
+ if sqlite:
+ print("[yellow]Using SQLite database for local development...[/yellow]")
+ os.environ["SQLITE_OVERRIDE_DATABASE_URL"] = "sqlite:///./pyspur.db"
+
+ # Run database migrations
+ print("[yellow]Running database migrations...[/yellow]")
+ run_migrations()
+
+ # Start the server
+ print(f"\n[green]Starting PySpur server at http://{host}:{port} π[/green]")
+ uvicorn.run(
+ "pyspur.api.main:app",
+ host=host,
+ port=port,
+ )
+
+ except Exception as e:
+ print(f"[red]Error starting server: {str(e)}[/red]")
+ raise typer.Exit(1) from e
+
+
+def main() -> None:
+ """PySpur CLI."""
+ app()
diff --git a/backend/pyspur/cli/utils.py b/backend/pyspur/cli/utils.py
new file mode 100644
index 00000000..b54fd40c
--- /dev/null
+++ b/backend/pyspur/cli/utils.py
@@ -0,0 +1,109 @@
+"""Utility functions for the PySpur CLI."""
+
+from pathlib import Path
+import shutil
+from importlib import resources
+import tempfile
+
+from rich import print
+import typer
+from dotenv import load_dotenv
+from sqlalchemy import text
+from alembic import command
+from alembic.config import Config
+from alembic.runtime.migration import MigrationContext
+
+
+def copy_template_file(template_name: str, dest_path: Path) -> None:
+ """Copy a template file from the package templates directory to the destination."""
+ with resources.files("pyspur.templates").joinpath(template_name).open("rb") as src:
+ with open(dest_path, "wb") as dst:
+ shutil.copyfileobj(src, dst)
+
+
+def load_environment() -> None:
+ """Load environment variables from .env file with fallback to .env.example."""
+ env_path = Path.cwd() / ".env"
+ if env_path.exists():
+ load_dotenv(env_path)
+ print("[green]β[/green] Loaded configuration from .env")
+ else:
+ with resources.files("pyspur.templates").joinpath(".env.example").open() as f:
+ load_dotenv(stream=f)
+ print(
+ "[yellow]![/yellow] No .env file found, using default configuration from .env.example"
+ )
+ print("[yellow]![/yellow] Run 'pyspur init' to create a customizable .env file")
+
+
+def run_migrations() -> None:
+ """Run database migrations using SQLAlchemy."""
+ try:
+ from ..database import engine, database_url
+ from ..models.base_model import BaseModel
+
+ # Test connection
+ with engine.connect() as conn:
+ conn.execute(text("SELECT 1"))
+ print("[green]β[/green] Connected to database")
+
+ # If using SQLite, create the database file if it doesn't exist
+ if database_url.startswith("sqlite"):
+ try:
+ BaseModel.metadata.create_all(engine)
+ print("[yellow]![/yellow] SQLite database is not recommended for production")
+ print("[yellow]![/yellow] Please use a postgres instance instead")
+ return
+ except Exception as e:
+ print("[yellow]![/yellow] SQLite database out of sync, recreating from scratch")
+ # Ask for confirmation before dropping all tables
+ confirm = input(
+ "This will delete all data in the SQLite database. Are you sure? (y/N): "
+ )
+ if confirm.lower() != "y":
+ print("[yellow]![/yellow] Database recreation cancelled")
+ print(
+ "[yellow]![/yellow] Please revert pyspur to the original version that was used to create the database"
+ )
+ print("[yellow]![/yellow] OR use a postgres instance to support migrations")
+ return
+ BaseModel.metadata.drop_all(engine)
+ BaseModel.metadata.create_all(engine)
+ print("[green]β[/green] Created SQLite database from scratch")
+ return
+
+ # For other databases, use Alembic migrations
+ # Get migration context
+ context = MigrationContext.configure(conn)
+
+ # Get current revision
+ current_rev = context.get_current_revision()
+
+ if current_rev is None:
+ print("[yellow]![/yellow] No previous migrations found, initializing database")
+ else:
+ print(f"[green]β[/green] Current database version: {current_rev}")
+
+ # Get migration scripts directory using importlib.resources
+ script_location = resources.files("pyspur.models.management.alembic")
+ if not script_location.is_dir():
+ raise FileNotFoundError("Migration scripts not found in package")
+
+ # extract migration scripts directory to a temporary location
+ with (
+ tempfile.TemporaryDirectory() as script_temp_dir,
+ resources.as_file(script_location) as script_location_path,
+ ):
+ shutil.copytree(script_location_path, Path(script_temp_dir), dirs_exist_ok=True)
+ # Create Alembic config programmatically
+ config = Config()
+ config.set_main_option("script_location", str(script_temp_dir))
+ config.set_main_option("sqlalchemy.url", database_url)
+
+ # Run upgrade to head
+ command.upgrade(config, "head")
+ print("[green]β[/green] Database schema is up to date")
+
+ except Exception as e:
+ print(f"[red]Error running migrations: {str(e)}[/red]")
+ raise typer.Exit(1) from e
diff --git a/backend/pyspur/database.py b/backend/pyspur/database.py
index 425e5793..428e1819 100644
--- a/backend/pyspur/database.py
+++ b/backend/pyspur/database.py
@@ -1,10 +1,8 @@
import os
-
-from dotenv import load_dotenv
+from typing import Iterator
from sqlalchemy import create_engine
-from sqlalchemy.orm import sessionmaker
+from sqlalchemy.orm import sessionmaker, Session
-load_dotenv()
# Get the database URL from the environment
POSTGRES_USER = os.getenv("POSTGRES_USER")
@@ -13,18 +11,32 @@
POSTGRES_PORT = os.getenv("POSTGRES_PORT")
POSTGRES_DB = os.getenv("POSTGRES_DB")
-DATABASE_URL = f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
+database_url = f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
+
+sqlite_override_database_url = os.getenv("SQLITE_OVERRIDE_DATABASE_URL")
+if sqlite_override_database_url:
+ database_url = sqlite_override_database_url
# Create the SQLAlchemy engine
-engine = create_engine(DATABASE_URL)
+engine = create_engine(database_url)
# Create a configured "Session" class
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
-def get_db():
+def get_db() -> Iterator[Session]:
+ """Get a database connection."""
db = SessionLocal()
try:
yield db
finally:
db.close()
+
+
+def is_db_connected() -> bool:
+ """Check if the database is connected."""
+ try:
+ engine.connect()
+ return True
+ except Exception:
+ return False
diff --git a/backend/pyspur/integrations/google/auth.py b/backend/pyspur/integrations/google/auth.py
index cadfb40f..0ac85ef8 100644
--- a/backend/pyspur/integrations/google/auth.py
+++ b/backend/pyspur/integrations/google/auth.py
@@ -1,4 +1,5 @@
import json
+import os
import time
# Import the logger
@@ -13,7 +14,11 @@
# Define a router for Google OAuth
router = APIRouter()
-TOKEN_FILE_PATH = Path(__file__).parent / "token.json"
+PROJECT_ROOT = os.getenv("PROJECT_ROOT", os.getcwd())
+BASE_DIR = Path(PROJECT_ROOT) / "credentials" / "google"
+
+# Default file paths for credentials and tokens.
+TOKEN_FILE_PATH = BASE_DIR / "token.json"
class TokenInput(BaseModel):
diff --git a/backend/pyspur/integrations/google/client.py b/backend/pyspur/integrations/google/client.py
index 31dc3461..286fa025 100644
--- a/backend/pyspur/integrations/google/client.py
+++ b/backend/pyspur/integrations/google/client.py
@@ -1,4 +1,5 @@
import json
+import os
from pathlib import Path
from typing import Tuple
@@ -6,8 +7,8 @@
from googleapiclient.discovery import build # type: ignore
from googleapiclient.errors import HttpError # type: ignore
-# Retrieve the current directory using pathlib for better path handling.
-BASE_DIR = Path(__file__).resolve().parent
+PROJECT_ROOT = os.getenv("PROJECT_ROOT", os.getcwd())
+BASE_DIR = Path(PROJECT_ROOT) / "credentials" / "google"
# Default file paths for credentials and tokens.
TOKEN_FILE = BASE_DIR / "token.json"
@@ -47,9 +48,7 @@ def read_sheet(self, spreadsheet_id: str, range_name: str) -> Tuple[bool, str]:
result = (
sheet.values()
- .get( # type: ignore
- spreadsheetId=spreadsheet_id, range=range_name
- )
+ .get(spreadsheetId=spreadsheet_id, range=range_name) # type: ignore
.execute()
)
diff --git a/backend/pyspur/models/management/alembic/env.py b/backend/pyspur/models/management/alembic/env.py
index 4a378e62..5983bb39 100644
--- a/backend/pyspur/models/management/alembic/env.py
+++ b/backend/pyspur/models/management/alembic/env.py
@@ -4,7 +4,7 @@
from sqlalchemy import engine_from_config, pool
# Import database URL
-from pyspur.database import DATABASE_URL
+from pyspur.database import database_url
from pyspur.models.base_model import BaseModel
from pyspur.models.dataset_model import DatasetModel # type: ignore
from pyspur.models.dc_and_vi_model import (
@@ -30,7 +30,7 @@
fileConfig(config.config_file_name)
# Set the database URL in the config
-config.set_main_option("sqlalchemy.url", DATABASE_URL)
+config.set_main_option("sqlalchemy.url", database_url)
# add your model's MetaData object here
target_metadata = BaseModel.metadata
diff --git a/backend/templates/joke_generator.json b/backend/pyspur/templates/joke_generator.json
similarity index 100%
rename from backend/templates/joke_generator.json
rename to backend/pyspur/templates/joke_generator.json
diff --git a/backend/templates/ollama_model_comparison.json b/backend/pyspur/templates/ollama_model_comparison.json
similarity index 100%
rename from backend/templates/ollama_model_comparison.json
rename to backend/pyspur/templates/ollama_model_comparison.json
diff --git a/backend/pyspur/utils/path_utils.py b/backend/pyspur/utils/path_utils.py
index 5cc9578f..4cc2ba08 100644
--- a/backend/pyspur/utils/path_utils.py
+++ b/backend/pyspur/utils/path_utils.py
@@ -1,6 +1,6 @@
from pathlib import Path
-PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent
+PROJECT_ROOT = Path.cwd()
def is_external_url(url: str) -> bool:
@@ -18,21 +18,14 @@ def resolve_file_path(file_path: str) -> Path | str:
"""
Resolve a file path relative to the project root.
Expects paths in format 'data/test_files/S9/20250120_121759_aialy.pdf' and resolves them to
- '/pyspur/backend/data/test_files/S9/20250120_121759_aialy.pdf'
+ 'data/test_files/S9/20250120_121759_aialy.pdf'
If the path is an external URL (starts with http:// or https://), returns it as is.
"""
# Handle external URLs
if is_external_url(file_path):
return file_path
- path = Path(file_path)
- if path.is_absolute():
- return path
-
- # Construct full path under test_files directory
- full_path = Path.joinpath(PROJECT_ROOT, "data", file_path)
-
- if not full_path.exists():
- raise FileNotFoundError(f"File not found at expected location: {full_path}")
-
- return full_path
+ path = Path.joinpath(PROJECT_ROOT, "data", Path(file_path))
+ if not path.exists():
+ raise FileNotFoundError(f"File not found at expected location: {file_path}")
+ return path
diff --git a/docker-compose.prod.yml b/docker-compose.dev.yml
similarity index 52%
rename from docker-compose.prod.yml
rename to docker-compose.dev.yml
index 1aba9c4a..db02e922 100644
--- a/docker-compose.prod.yml
+++ b/docker-compose.dev.yml
@@ -1,13 +1,26 @@
services:
- backend:
- image: ghcr.io/${GITHUB_REPOSITORY:-pyspur-dev/pyspur}-backend:${VERSION:-latest}
- command: bash /pyspur/backend/entrypoint.sh
+ nginx:
+ image: nginx:latest
ports:
- - "${PYSPUR_PORT:-6080}:8000"
+ - "${PYSPUR_PORT:-6080}:80"
+ volumes:
+ - ./nginx/conf.d:/etc/nginx/conf.d
+ depends_on:
+ - backend
+ - frontend
+ restart: on-failure
+
+ backend:
+ build:
+ context: .
+ dockerfile: Dockerfile.backend
+ target: ${ENVIRONMENT:-development}
env_file:
- ./.env.example
- ./.env
+ command: bash /pyspur/backend/entrypoint.sh
volumes:
+ - .:/pyspur
- ./.env:/pyspur/backend/.env
- pyspur_data:/pyspur/backend/data
extra_hosts:
@@ -16,6 +29,21 @@ services:
db:
condition: service_healthy
+ frontend:
+ build:
+ context: .
+ dockerfile: Dockerfile.frontend
+ target: ${ENVIRONMENT:-development}
+ env_file:
+ - ./.env.example
+ - ./.env
+ command: npm run dev
+ volumes:
+ - .:/pyspur
+ - /pyspur/frontend/node_modules
+ depends_on:
+ - backend
+
db:
image: postgres:17-alpine
restart: on-failure
@@ -30,4 +58,4 @@ services:
timeout: 5s
volumes:
postgres_data:
- pyspur_data: # Used to persist data like uploaded files, eval outputs, datasets
\ No newline at end of file
+ pyspur_data: # Used to persist data like uploaded files, eval outputs, datasets
diff --git a/docker-compose.yml b/docker-compose.yml
index 12354602..1aba9c4a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,26 +1,13 @@
services:
- nginx:
- image: nginx:latest
- ports:
- - "${NGINX_PORT:-6080}:80"
- volumes:
- - ./nginx/conf.d:/etc/nginx/conf.d
- depends_on:
- - backend
- - frontend
- restart: on-failure
-
backend:
- build:
- context: .
- dockerfile: Dockerfile.backend
- target: ${ENVIRONMENT:-development}
+ image: ghcr.io/${GITHUB_REPOSITORY:-pyspur-dev/pyspur}-backend:${VERSION:-latest}
+ command: bash /pyspur/backend/entrypoint.sh
+ ports:
+ - "${PYSPUR_PORT:-6080}:8000"
env_file:
- ./.env.example
- ./.env
- command: bash /pyspur/backend/entrypoint.sh
volumes:
- - .:/pyspur
- ./.env:/pyspur/backend/.env
- pyspur_data:/pyspur/backend/data
extra_hosts:
@@ -29,21 +16,6 @@ services:
db:
condition: service_healthy
- frontend:
- build:
- context: .
- dockerfile: Dockerfile.frontend
- target: ${ENVIRONMENT:-development}
- env_file:
- - ./.env.example
- - ./.env
- command: npm run dev
- volumes:
- - .:/pyspur
- - /pyspur/frontend/node_modules
- depends_on:
- - backend
-
db:
image: postgres:17-alpine
restart: on-failure
@@ -58,4 +30,4 @@ services:
timeout: 5s
volumes:
postgres_data:
- pyspur_data: # Used to persist data like uploaded files, eval outputs, datasets
+ pyspur_data: # Used to persist data like uploaded files, eval outputs, datasets
\ No newline at end of file
diff --git a/start_pyspur_docker.sh b/start_pyspur_docker.sh
new file mode 100755
index 00000000..64dabb1a
--- /dev/null
+++ b/start_pyspur_docker.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+# Function to print colored messages
+print_message() {
+ echo -e "${2}${1}${NC}"
+}
+
+# Function to check if a command exists
+command_exists() {
+ command -v "$1" >/dev/null 2>&1
+}
+
+# Function to download a file from GitHub
+download_file() {
+ local file=$1
+ local url="https://raw.githubusercontent.com/pyspur-dev/pyspur/main/${file}"
+
+ if curl -fsSL "$url" -o "$file"; then
+ print_message "Downloaded ${file} successfully" "$GREEN"
+ return 0
+ else
+ print_message "Failed to download ${file}" "$RED"
+ return 1
+ fi
+}
+
+# Check if Docker is installed
+if ! command_exists "docker"; then
+ print_message "Docker is not installed. Please install Docker first:" "$RED"
+ print_message "https://docs.docker.com/engine/install/" "$YELLOW"
+ exit 1
+fi
+
+# Check if curl is installed
+if ! command_exists "curl"; then
+ print_message "curl is not installed. Please install curl first." "$RED"
+ exit 1
+fi
+
+# Get project name from argument or use default
+PROJECT_NAME=${1:-"pyspur-project"}
+
+# Check if directory already exists
+if [ -d "$PROJECT_NAME" ]; then
+ print_message "Directory '$PROJECT_NAME' already exists. Please choose a different name or remove the existing directory." "$RED"
+ exit 1
+fi
+
+print_message "Creating new PySpur project: $PROJECT_NAME..." "$GREEN"
+
+# Create and enter project directory
+mkdir -p "$PROJECT_NAME"
+cd "$PROJECT_NAME"
+
+# Download docker-compose.yml (production version)
+if ! download_file "docker-compose.yml"; then
+ exit 1
+fi
+
+# Download and copy .env.example to .env
+if ! download_file ".env.example"; then
+ exit 1
+fi
+cp .env.example .env
+
+# Start the services
+print_message "Launching PySpur services..." "$GREEN"
+if docker compose up -d; then
+ print_message "\nπ PySpur is now running!" "$GREEN"
+ print_message "\nProject created in: $(pwd)" "$GREEN"
+ print_message "Access PySpur at: http://localhost:6080" "$GREEN"
+ print_message "\nTo customize your deployment:" "$YELLOW"
+ print_message "1. Configure API keys through the portal's API Keys tab, or" "$YELLOW"
+ print_message "2. Manually edit .env and restart with:" "$YELLOW"
+ print_message " docker compose up -d" "$NC"
+ exit 0
+else
+ print_message "\nFailed to start PySpur services. Please check the error messages above." "$RED"
+ exit 1
+fi
\ No newline at end of file