diff --git a/app.py b/app.py new file mode 100644 index 0000000..96b6ac1 --- /dev/null +++ b/app.py @@ -0,0 +1,72 @@ +from kitchenai.contrib.kitchenai_sdk.kitchenai import KitchenAIApp +from kitchenai.contrib.kitchenai_sdk.api import QuerySchema, EmbedSchema +from llama_index.core import VectorStoreIndex, StorageContext +from llama_index.vector_stores.chroma import ChromaVectorStore +from llama_index.llms.openai import OpenAI +import os +import chromadb +from llama_index.llms.openai import OpenAI +from llama_index.core.node_parser import TokenTextSplitter +from llama_index.core.extractors import ( + TitleExtractor, + QuestionsAnsweredExtractor) +from llama_index.core import Document +from kitchenai.contrib.kitchenai_sdk.storage.llama_parser import Parser + +kitchen = KitchenAIApp() + +chroma_client = chromadb.PersistentClient(path="chroma_db") +chroma_collection = chroma_client.get_or_create_collection("quickstart") +llm = OpenAI(model="gpt-4") +chroma_collection_second_collection = chroma_client.get_or_create_collection("second_collection") + +@kitchen.storage("simple-vector") +def simple_vector(dir: str, metadata: dict = {}, *args, **kwargs): + parser = Parser(api_key=os.environ.get("LLAMA_CLOUD_API_KEY", None)) + response = parser.load(dir, metadata=metadata, **kwargs) + vector_store = ChromaVectorStore(chroma_collection=chroma_collection) + storage_context = StorageContext.from_defaults(vector_store=vector_store) + VectorStoreIndex.from_documents( + response["documents"], storage_context=storage_context, show_progress=True, + transformations=[TokenTextSplitter(), TitleExtractor(),QuestionsAnsweredExtractor()] + ) + return {"response": len(response["documents"])} + + +@kitchen.storage("simple-vector2") +def simple_vector2(dir: str, metadata: dict = {}, *args, **kwargs): + parser = Parser(api_key=os.environ.get("LLAMA_CLOUD_API_KEY", None)) + response = parser.load(dir, metadata=metadata, **kwargs) + vector_store = ChromaVectorStore(chroma_collection=chroma_collection_second_collection) + storage_context = StorageContext.from_defaults(vector_store=vector_store) + VectorStoreIndex.from_documents( + response["documents"], storage_context=storage_context, show_progress=True, + transformations=[TokenTextSplitter(), TitleExtractor(),QuestionsAnsweredExtractor()] + ) + return {"response": len(response["documents"])} + +@kitchen.query("simple-query") +def simple_query(data: QuerySchema): + vector_store = ChromaVectorStore(chroma_collection=chroma_collection) + index = VectorStoreIndex.from_vector_store( + vector_store, + ) + query_engine = index.as_query_engine(chat_mode="best", llm=llm, verbose=True) + response = query_engine.query(data.query) + return {"response": response.response} + +@kitchen.stream("simple-stream") +def simple_stream(data: QuerySchema): + vector_store = ChromaVectorStore(chroma_collection=chroma_collection) + index = VectorStoreIndex.from_vector_store( + vector_store, + ) + query_engine = index.as_query_engine(streaming=True, llm=llm, similarity_top_k=1, verbose=True) + response = query_engine.query(data.query) + + return response.response_gen + +@kitchen.query("non-ai") +def non_ai(data: QuerySchema): + msg = "no AI is used in this function" + return {"response": msg} \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index 7ca8b9c..ab317a7 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.11 # -# [constraints] requirements.txt (SHA256: bef74194f8fd80762a1ef00469312dbc8243a3854822b60f9150002d8d517687) +# [constraints] requirements.txt (SHA256: 18df5e1d5be84eeae244327525e84700b34a08f7506779624aa1f99fe146898d) # # - django-browser-reload # - django-debug-toolbar @@ -36,6 +36,7 @@ # - django-allauth[socialaccount] # - django-anymail[resend] # - django-compressor +# - django-eventstream # - django-extensions # - django-health-check # - django-htmx @@ -59,7 +60,7 @@ # - heroicons[django] # - honcho # - jupyterlab -# - kitchenai-python-sdk +# - kitchenai-python-sdk>=1.3.0 # - llama-index # - llama-index-vector-stores-chroma # - nbconvert @@ -357,8 +358,10 @@ django==5.1.2 # django-compressor # django-crispy-forms # django-debug-toolbar + # django-eventstream # django-extensions # django-fastdev + # django-grip # django-health-check # django-htmx # django-lifecycle @@ -411,6 +414,10 @@ django-crispy-forms==2.3 # crispy-tailwind django-debug-toolbar==4.4.6 # via hatch.envs.docs +django-eventstream==5.3.1 + # via + # -c requirements.txt + # hatch.envs.docs django-extensions==3.2.3 # via # -c requirements.txt @@ -418,6 +425,10 @@ django-extensions==3.2.3 # dj-notebook django-fastdev==1.12.0 # via hatch.envs.docs +django-grip==3.5.1 + # via + # -c requirements.txt + # django-eventstream django-health-check==3.18.3 # via # -c requirements.txt @@ -592,6 +603,11 @@ greenlet==3.1.1 # via # -c requirements.txt # sqlalchemy +gripcontrol==4.2.0 + # via + # -c requirements.txt + # django-eventstream + # django-grip grpcio==1.68.1 # via # -c requirements.txt @@ -826,7 +842,7 @@ jupyterlab-widgets==3.0.13 # via ipywidgets keyring==25.4.1 # via hatch -kitchenai-python-sdk==1.1.0 +kitchenai-python-sdk==1.3.0 # via # -c requirements.txt # hatch.envs.docs @@ -1230,6 +1246,11 @@ ptyprocess==0.7.0 # -c requirements.txt # pexpect # terminado +pubcontrol==3.5.0 + # via + # -c requirements.txt + # django-grip + # gripcontrol pure-eval==0.2.3 # via # -c requirements.txt @@ -1282,6 +1303,9 @@ pyjwt==2.9.0 # via # -c requirements.txt # django-allauth + # django-eventstream + # gripcontrol + # pubcontrol pypdf==5.1.0 # via # -c requirements.txt @@ -1393,6 +1417,7 @@ requests==2.32.3 # llama-index-core # llama-index-legacy # posthog + # pubcontrol # requests-oauthlib # sphinx # tiktoken @@ -1471,7 +1496,10 @@ six==1.16.0 # -c requirements.txt # asttokens # cssbeautifier + # django-eventstream + # django-grip # django-pandas + # gripcontrol # jsbeautifier # kubernetes # posthog @@ -1759,8 +1787,11 @@ websockets==14.1 # -c requirements.txt # sphinx-autobuild # uvicorn -werkzeug==3.0.4 - # via hatch.envs.docs +werkzeug==3.1.3 + # via + # -c requirements.txt + # hatch.envs.docs + # django-grip wheel==0.44.0 # via pip-tools whitenoise==6.7.0 diff --git a/kitchenai/api.py b/kitchenai/api.py index be775d2..f2aa690 100644 --- a/kitchenai/api.py +++ b/kitchenai/api.py @@ -1,16 +1,14 @@ import logging from django.apps import apps -from kitchenai.core.api import router as core_router from ninja import NinjaAPI +from kitchenai.core.router import router as core_router logger = logging.getLogger(__name__) -api = NinjaAPI(version="0.9.0") +api = NinjaAPI(version="0.10.0") -# Ensure `ready()` is called for all apps apps.get_app_configs() -# Get the app's router from `MyAppConfig` and add it to the main API -api.add_router("/core", core_router) # Add app's router here +api.add_router("/v1", core_router) diff --git a/kitchenai/cli/client/__init__.py b/kitchenai/cli/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/cli/client.py b/kitchenai/cli/client/client.py similarity index 99% rename from kitchenai/cli/client.py rename to kitchenai/cli/client/client.py index b5c5404..a8931b7 100644 --- a/kitchenai/cli/client.py +++ b/kitchenai/cli/client/client.py @@ -245,6 +245,4 @@ def list_labels(): ) console.print(table) except Exception as e: - console.print(f"[red]Error fetching labels: {e}[/red]") - - + console.print(f"[red]Error fetching labels: {e}[/red]") \ No newline at end of file diff --git a/kitchenai/cli/main.py b/kitchenai/cli/main.py index a390425..9d3942b 100644 --- a/kitchenai/cli/main.py +++ b/kitchenai/cli/main.py @@ -1,385 +1,11 @@ -import logging -import os -import sys - -import django -import typer -from cookiecutter.main import cookiecutter from django.conf import settings from rich.console import Console -from typing import Annotated -from .cook import app as cook_app -from .client import app as client_app +from kitchenai.notebooks.cli.notebooks import app as notebook_app +from kitchenai.core.cli.core import app +from .client.client import app as client_app console = Console() -logger = logging.getLogger(__name__) - -app = typer.Typer() -app.add_typer(cook_app, name="cook") +app.add_typer(notebook_app, name="notebook") app.add_typer(client_app, name="client") - - -@app.command() -def add(module: str = typer.Argument("app.kitchen:kitchen")): - from django.core.management import execute_from_command_line - - execute_from_command_line(["manage", "add_module", module]) - -@app.command() -def init( - verbose: Annotated[int, typer.Option(help="verbosity level. default 0")] = 0, - collect_static: Annotated[bool, typer.Option("--collect-static/--no-collect-static", help="Collect static assets.")] = False, - ): - django.setup() - from django.core.management import execute_from_command_line - from kitchenai.core.models import KitchenAIManagement - from django.conf import settings - import posthog - - posthog.capture("init", "kitchenai_init") - - cmd = ["manage", "migrate","--verbosity", f"{verbose}"] - - if verbose != 1: - with console.status("Applying migrations...", spinner="dots"): - execute_from_command_line(cmd) - - with console.status("Setting up periodic tasks", spinner="dots"): - execute_from_command_line(["manage", "setup_periodic_tasks"]) - - if collect_static: - with console.status("Collecting static assets", spinner="dots"): - execute_from_command_line(["manage", "collectstatic", "--no-input"]) - else: - execute_from_command_line(cmd) - execute_from_command_line(["manage", "setup_periodic_tasks"]) - if collect_static: - execute_from_command_line(["manage", "collectstatic", "--no-input"]) - - - KitchenAIManagement.objects.all().delete() - try: - mgmt = KitchenAIManagement.objects.create( - version = settings.VERSION, - project_name = "default" - ) - except Exception as e: - logger.error(e) - return - - -@app.command() -def qcluster() -> None: - """Run Django-q cluster.""" - from django.core.management import execute_from_command_line - # execute_from_command_line(["manage", "qcluster", *argv[2:]]) - execute_from_command_line(["manage", "qcluster"]) - - -@app.command() -def runserver( - module: Annotated[str, typer.Option(help="Python module to load.")] = "", - ) -> None: - """Run Django runserver.""" - #NOTE: doing this to reset the sys.argv for gunicorn command. - sys.argv = [sys.argv[0]] - - django.setup() - from kitchenai.api import api - from kitchenai.core.utils import setup - - setup( - api, - module=module - ) - _run_dev_uvicorn(sys.argv) - -@app.command() -def run(module: Annotated[str, typer.Option(help="Python module to load.")] = os.environ.get("KITCHENAI_MODULE", "")) -> None: - """Run Django runserver.""" - sys.argv = [sys.argv[0]] - django.setup() - from kitchenai.api import api - from kitchenai.core.utils import setup - - setup( - api, - module=module, - ) - - _run_uvicorn(sys.argv) - - -@app.command() -def dev( - address: str ="0.0.0.0:8001", - module: Annotated[str, typer.Option(help="Python module to load.")] = "", - tailwind: Annotated[bool, typer.Option(help="Tailwind servers.")] = False, - jupyter: Annotated[bool, typer.Option(help="Jupyter Notebook servers.")] = False, - - ): - """ - Reads the kitchen config file, reads the application file and runs the KitchenAI server - """ - import posthog - import django - import uuid - - django.setup() - commands = {"server": "kitchenai runserver"} - - posthog.capture("init", "kitchenai_dev") - - if module: - commands["server"] = f"kitchenai runserver --module {module} " - - if jupyter: - #user is running jupyter alongside kitchenai - from kitchenai.core.models import KitchenAIManagement - mgmt = KitchenAIManagement.objects.filter(name="kitchenai_management").first() - notebook_id = uuid.uuid4() - mgmt.jupyter_token = notebook_id - mgmt.save() - - commands["jupyter"] = f"jupyter lab --NotebookApp.token='{notebook_id}'" - - - - if tailwind: - if "django_tailwind_cli" in settings.INSTALLED_APPS: - commands["tailwind"] = "django-admin tailwind watch" - if "tailwind" in settings.INSTALLED_APPS: - commands["tailwind"] = "django-admin tailwind start" - if "django_q" in settings.INSTALLED_APPS: - commands["qcluster"] = "kitchenai qcluster" - - typer.echo(f"[INFO] starting development server on {address}") - - # call_command("migrate") - _run_with_honcho(commands) - -@app.command() -def manage(args: list[str] = typer.Argument(None, help="Arguments for Django's manage.py")) -> None: - """ - Run Django's manage command with additional arguments. - """ - from django.core.management import execute_from_command_line - - # Build the argument list for Django - if args is None: - sys.argv = ["manage"] - else: - sys.argv = ["manage"] + args - - execute_from_command_line(sys.argv) - -@app.command() -def setup(): - """Run some project setup tasks""" - django.setup() - from django.core.management import execute_from_command_line - import os - - execute_from_command_line(["manage", "migrate"]) - execute_from_command_line(["manage", "setup_periodic_tasks"]) - - # Set environment variables for superuser credentials - email = os.environ.get("DJANGO_SUPERUSER_EMAIL", "admin@localhost") - password = os.environ.get("DJANGO_SUPERUSER_PASSWORD", "admin") - username = os.environ.get("DJANGO_SUPERUSER_USERNAME", email.split("@")[0]) - - if password == "admin": - #set it - os.environ["DJANGO_SUPERUSER_PASSWORD"] = "admin" - execute_from_command_line( - ["manage", "createsuperuser", "--noinput", "--traceback", "--email", email, "--username", username] - ) - -@app.command() -def build( - dir: str, - module: str, - admin: Annotated[bool, typer.Option("--admin/--no-admin", help="Admin status (default is True)")] = False, -): - """ - Reads the kitchen config file, reads the application file and runs the KitchenAI server. - """ - django.setup() - from django.template import loader - import pathlib - import subprocess - from rich.text import Text - - base_dir = pathlib.Path(dir) - - # Flip the admin flag because we want it to default to True unless the flag is passed - admin = not admin - - module_name = module.split(":")[0] - - # Save the configuration to the database - template_name = 'build_templates/Dockerfile.tmpl' - - # Check if requirements.txt and module file exist in the directory - requirements_file = base_dir / 'requirements.txt' - module_path = base_dir / f"{module_name}.py" - - if not requirements_file.exists() or not module_path.exists(): - console.print("[bold red]Error:[/bold red] Both requirements.txt and the module file must exist in the specified directory.") - raise typer.Exit(code=1) - - # Context data to pass into the template - context = { - 'module': module, - "admin": admin - } - - try: - # Load and render the template with the context data - template = loader.get_template(template_name) - rendered_content = template.render(context) - - # Write the rendered Dockerfile to the specified directory - dockerfile_path = base_dir / 'Dockerfile' - with open(dockerfile_path, 'w') as dockerfile: - dockerfile.write(rendered_content) - - console.print(Text(f"Dockerfile successfully created at {dockerfile_path}", style="green")) - - except Exception as e: - console.print(f"[bold red]Error rendering template:[/bold red] {e}", style="bold red") - raise typer.Exit(code=1) - - # Build the Docker image using the Dockerfile - try: - console.print("[cyan]Building Docker image...[/cyan]") - # Run the Docker build command - process = subprocess.Popen( - ["docker", "build", "-t", "kitchenai-app", dir], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - bufsize=1, - universal_newlines=True, - ) - - # Stream output line by line - for stdout_line in iter(process.stdout.readline, ""): - console.print(stdout_line.strip()) - - process.stdout.close() - return_code = process.wait() - - # Check if the Docker build was successful - if return_code == 0: - console.print("[green]Docker image built successfully![/green]") - else: - # Capture and print stderr output in case of an error - for stderr_line in iter(process.stderr.readline, ""): - console.print(f"[bold red]{stderr_line.strip()}[/bold red]") - console.print("[bold red]Docker build failed.[/bold red]") - raise typer.Exit(code=1) - - except FileNotFoundError: - console.print("[bold red]Docker is not installed or not available in your PATH.[/bold red]") - raise typer.Exit(code=1) - except Exception as e: - console.print(f"[bold red]Error during Docker build:[/bold red] {e}") - raise typer.Exit(code=1) - - except FileNotFoundError: - console.print("[bold red]Docker is not installed or not available in your PATH.[/bold red]") - raise typer.Exit(code=1) - except Exception as e: - console.print(f"[bold red]Error during Docker build:[/bold red] {e}") - raise typer.Exit(code=1) - -@app.command() -def new(): - """ - Reads the kitchen config file, reads the application file and runs the KitchenAI server - """ - - cookiecutter("https://github.com/epuerta9/cookiecutter-cookbook.git", output_dir=".") - - - -def _run_with_honcho(commands: dict): - from honcho.manager import Manager - - manager = Manager() - for name, cmd in commands.items(): - manager.add_process(name, cmd) - try: - manager.loop() - finally: - manager.terminate() - - - -def _run_uvicorn(argv: list) -> None: - """ - Run gunicorn + uvicorn workers server. - https://docs.gunicorn.org/en/stable/settings.html - https://adamj.eu/tech/2021/12/29/set-up-a-gunicorn-configuration-file-and-test-it/ - """ - - import multiprocessing - from gunicorn.app import wsgiapp # for gunicorn - - workers = multiprocessing.cpu_count() * 2 + 1 - gunicorn_args = [ - "kitchenai.asgi:application", # Replace WSGI with ASGI app - "--bind", - "0.0.0.0:8001", - # "unix:/run/kitchenai_demo.gunicorn.sock", # Use this if you're using a socket file - "--max-requests", - "1000", - "--max-requests-jitter", - "50", - "--workers", - str(workers), - "--worker-class", - "uvicorn.workers.UvicornWorker", # Use Uvicorn worker for ASGI - "--access-logfile", - "-", - "--error-logfile", - "-", - ] - argv.extend(gunicorn_args) - - wsgiapp.run() - - -def _run_dev_uvicorn(argv: list) -> None: - """ - Run gunicorn + uvicorn workers server. - https://docs.gunicorn.org/en/stable/settings.html - https://adamj.eu/tech/2021/12/29/set-up-a-gunicorn-configuration-file-and-test-it/ - """ - from gunicorn.app import wsgiapp # for gunicorn - - workers = 2 - gunicorn_args = [ - "kitchenai.asgi:application", # Replace WSGI with ASGI app - "--bind", - "0.0.0.0:8001", - # "unix:/run/kitchenai_demo.gunicorn.sock", # Use this if you're using a socket file - "--max-requests", - "1000", - "--max-requests-jitter", - "50", - "--workers", - str(workers), - "--worker-class", - "uvicorn.workers.UvicornWorker", # Use Uvicorn worker for ASGI - "--access-logfile", - "-", - "--error-logfile", - "-", - ] - argv.extend(gunicorn_args) - - wsgiapp.run() diff --git a/kitchenai/contrib/kitchenai_sdk/kitchenai.py b/kitchenai/contrib/kitchenai_sdk/kitchenai.py index 10b5770..336f2c9 100644 --- a/kitchenai/contrib/kitchenai_sdk/kitchenai.py +++ b/kitchenai/contrib/kitchenai_sdk/kitchenai.py @@ -27,42 +27,18 @@ def __init__(self, router: Router = None, namespace: str = 'default', default_db self._default_hook = "kitchenai.contrib.kitchenai_sdk.hooks.default_hook" self._default_db = default_db self._query_handlers = {} + self._query_stream_handlers = {} self._agent_handlers = {} self._embed_tasks= {} self._embed_delete_tasks = {} # Decorators for different route types - def query(self, label: str, streaming=False, llama_stack_emit="", **route_kwargs): + def query(self, label: str, **route_kwargs): """Query is a decorator for query handlers with the ability to add middleware""" def decorator(func, **route_kwargs): @functools.wraps(func) async def wrapper(*args, **kwargs): - if streaming: - #NOTE: Streaming HTTP response is only a synchronous operation. Temporary solution - # async def event_generator(): - # async for event in func(*args, **kwargs): - # # Flush each chunk immediately - # yield event - def event_generator(): - # Call the synchronous function and get the generator - gen = func(*args, **kwargs) - - for event in gen: - # Yield each chunk formatted as Server-Sent Events - yield event - - - result = StreamingHttpResponse( - event_generator(), - content_type="text/event-stream", - headers={ - 'Cache-Control': 'no-cache', - 'Transfer-Encoding': 'chunked', - 'X-Accel-Buffering': 'no', - } - ) - # Non-streaming behavior - elif asyncio.iscoroutinefunction(func): + if asyncio.iscoroutinefunction(func): result = await func(*args, **kwargs) else: loop = asyncio.get_event_loop() @@ -128,29 +104,12 @@ def wrapper(*args, **kwargs): return wrapper return decorator - def agent(self, label: str, streaming=False, **route_kwargs): + def agent(self, label: str, **route_kwargs): """Agent is a decorator for agent handlers with the ability to add middleware""" def decorator(func, **route_kwargs): @functools.wraps(func) async def wrapper(*args, **kwargs): - if streaming: - #NOTE: Streaming HTTP response is only a synchronous operation - async def event_generator(): - async for event in func(*args, **kwargs): - # Flush each chunk immediately - yield event - - return StreamingHttpResponse( - event_generator(), - content_type="text/event-stream", - headers={ - 'Cache-Control': 'no-cache', - 'Transfer-Encoding': 'chunked', - 'X-Accel-Buffering': 'no', - } - ) - # Non-streaming behavior - elif asyncio.iscoroutinefunction(func): + if asyncio.iscoroutinefunction(func): return await func(*args, **kwargs) else: loop = asyncio.get_event_loop() @@ -160,6 +119,16 @@ async def event_generator(): return wrapper return decorator + + def stream(self, label: str, **route_kwargs): + """Stream is a decorator for stream handlers. It returns a Generator function""" + def decorator(func, **route_kwargs): + + self._query_stream_handlers[f"{self._namespace}.{label}"] = func + + return func + + return decorator def storage_create_hook(self, label: str): """Hooks are functions that are run after a storage task is successful""" @@ -228,6 +197,7 @@ def to_dict(self): return { "namespace": self._namespace, "query_handlers": list(self._query_handlers.keys()), + "query_stream_handlers": list(self._query_stream_handlers.keys()), "agent_handlers": list(self._agent_handlers.keys()), "embed_tasks": list(self._embed_tasks.keys()), "embed_delete_tasks": list(self._embed_delete_tasks.keys()), diff --git a/kitchenai/core/admin.py b/kitchenai/core/admin.py index a340e20..d727175 100644 --- a/kitchenai/core/admin.py +++ b/kitchenai/core/admin.py @@ -5,11 +5,7 @@ FileObject, KitchenAIManagement, KitchenAIModule, - KitchenAIRootModule, - CodeFunction, - CodeImport, - CodeSetup, - Notebook + KitchenAIRootModule ) @@ -32,24 +28,6 @@ class EmbedObjectAdmin(admin.ModelAdmin): class KitchenAIRootModuleAdmin(admin.ModelAdmin): pass -@admin.register(CodeFunction) -class CodeFunctionAdmin(admin.ModelAdmin): - pass - - -@admin.register(CodeImport) -class CodeImportAdmin(admin.ModelAdmin): - pass - -@admin.register(CodeSetup) -class CodeSetupAdmin(admin.ModelAdmin): - pass - - -@admin.register(Notebook) -class NotebookAdmin(admin.ModelAdmin): - pass - @admin.register(KitchenAIModule) class KitchenAIModuleAdmin(admin.ModelAdmin): diff --git a/kitchenai/core/api.py b/kitchenai/core/api.py deleted file mode 100644 index f4eb02a..0000000 --- a/kitchenai/core/api.py +++ /dev/null @@ -1,212 +0,0 @@ -from ninja import File -from ninja import Router -from ninja import Schema -from ninja.errors import HttpError -from ninja.files import UploadedFile -from ninja import Schema -from .models import FileObject, EmbedObject -from .utils import get_core_kitchenai_app -from django.http import HttpResponse -import posthog -import logging -from django.apps import apps -from typing import List -from .signals import query_output_signal, query_input_signal - -logger = logging.getLogger(__name__) -router = Router() - -# Create a Schema that represents FileObject -class FileObjectSchema(Schema): - name: str - ingest_label: str | None = None - metadata: dict[str, str] | None = None - # Add any other fields from your FileObject model that you want to include -class FileObjectResponse(Schema): - id: int - name: str - ingest_label: str - metadata: dict[str,str] - status: str - -@router.get("/health") -async def default(request): - return {"msg": "ok"} - - -@router.post("/file", response=FileObjectResponse) -async def file_upload(request, data: FileObjectSchema,file: UploadedFile = File(...)): - """main entry for any file upload. Will upload via django storage and emit signals to any listeners""" - file_object = await FileObject.objects.acreate( - name=data.name, - file=file, - ingest_label=data.ingest_label, - metadata=data.metadata if data.metadata else {}, - status=FileObject.Status.PENDING - ) - return file_object - - -@router.get("/file/{pk}", response=FileObjectResponse) -async def file_get(request, pk: int): - """get a file""" - try: - file_object = await FileObject.objects.aget(pk=pk) - return file_object - except FileObject.DoesNotExist: - raise HttpError(404, "File not found") - - - -@router.delete("/file/{pk}") -async def file_delete(request, pk: int): - """delete a file""" - try: - await FileObject.objects.filter(pk=pk).adelete() - return {"msg": "deleted"} - except FileObject.DoesNotExist: - raise HttpError(404, "File not found") - -@router.get("/file", response=list[FileObjectResponse]) -def files_get(request): - """get all files""" - file_objects = FileObject.objects.all() - return file_objects - - - -class EmbedSchema(Schema): - text: str - ingest_label: str | None = None - metadata: dict[str, str] | None = None - - # Add any other fields from your FileObject model that you want to include -class EmbedObjectResponse(Schema): - id: int - text: str - ingest_label: str - metadata: dict[str,str] - status: str - -#Embed Object API -@router.post("/embed", response=EmbedObjectResponse) -async def embed_create(request, data: EmbedSchema): - """Create a new embed from text""" - embed_object = await EmbedObject.objects.acreate( - text=data.text, - ingest_label=data.ingest_label, - metadata=data.metadata if data.metadata else {}, - status=EmbedObject.Status.PENDING, - ) - return embed_object - -@router.get("/embed/{pk}", response=EmbedObjectResponse) -async def embed_get(request, pk: int): - """Get an embed""" - try: - embed_object = await EmbedObject.objects.aget( - pk=pk, - ) - return embed_object - except EmbedObject.DoesNotExist: - raise HttpError(404, "Embed not found") - -@router.get("/embed", response=list[EmbedObjectResponse]) -def embeds_get(request): - """Get all embeds""" - embed_objects = EmbedObject.objects.all() - return embed_objects - -@router.delete("/embed/{pk}") -async def embed_delete(request, pk: int): - """Delete an embed""" - try: - await EmbedObject.objects.filter(pk=pk).adelete() - return {"msg": "deleted"} - except EmbedObject.DoesNotExist: - raise HttpError(404, "Embed not found") - - - -class QuerySchema(Schema): - query: str - metadata: dict[str, str] | None = None - -class QueryResponseSchema(Schema): - response: str - -class AgentResponseSchema(Schema): - response: str - -class EmbedSchema(Schema): - text: str - metadata: dict[str, str] | None = None - - -@router.post("/agent/{label}", response=AgentResponseSchema) -async def agent(request, label: str, data: QuerySchema): - """Create a new agent""" - try: - posthog.capture("kitchenai_sdk", "agent_handler") - core_app = apps.get_app_config("core") - if not core_app.kitchenai_app: - logger.error("No kitchenai app in core app config") - return HttpResponse(status=404) - agent_func = core_app.kitchenai_app._agent_handlers.get(f"{core_app.kitchenai_app._namespace}.{label}") - if not agent_func: - logger.error(f"Agent function not found for {label}") - return HttpResponse(status=404) - - return await agent_func(data) - except Exception as e: - logger.error(f"Error in agent: {e}") - return HttpError(500, "agent function not found") - -@router.post("/query/{label}", response=QueryResponseSchema) -async def query(request, label: str, data: QuerySchema): - """Create a new query""" - """process file async function for core app using storage task""" - try: - core_app = apps.get_app_config("core") - if not core_app.kitchenai_app: - logger.error("No kitchenai app in core app config") - return HttpResponse(status=404) - - query_func = core_app.kitchenai_app._query_handlers.get(f"{core_app.kitchenai_app._namespace}.{label}") - if not query_func: - logger.error(f"Query function not found for {label}") - return HttpResponse(status=404) - - #Signal the start of the query - #query_input_signal.send(sender="query_input", data=data) - print(f"Querying {label} with {data}") - result = await query_func(data) - #Signal the end of the query - #query_output_signal.send(sender="query_output", result=result) - return result - except Exception as e: - logger.error(f"Error in query: {e}") - return HttpError(500, "query function not found") - -class KitchenAIAppSchema(Schema): - namespace: str - query_handlers: List[str] - agent_handlers: List[str] - embed_tasks: List[str] - embed_delete_tasks: List[str] - storage_tasks: List[str] - storage_delete_tasks: List[str] - storage_create_hooks: List[str] - storage_delete_hooks: List[str] - - -@router.get("/labels", response=KitchenAIAppSchema) -async def labels(request): - """Lists all the custom kitchenai labels""" - core_app = apps.get_app_config("core") - if not core_app.kitchenai_app: - logger.error("No kitchenai app in core app config") - return HttpResponse(status=404) - - return core_app.kitchenai_app.to_dict() - diff --git a/kitchenai/core/api/__init__.py b/kitchenai/core/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/core/api/agent.py b/kitchenai/core/api/agent.py new file mode 100644 index 0000000..1440b98 --- /dev/null +++ b/kitchenai/core/api/agent.py @@ -0,0 +1,43 @@ + +from ninja import Router +from ninja import Schema +from ninja.errors import HttpError +from ninja import Schema +from django.http import HttpResponse +import logging +import posthog +from django.apps import apps + +from django_eventstream import send_event + +logger = logging.getLogger(__name__) +router = Router() +class QuerySchema(Schema): + query: str + metadata: dict[str, str] | None = None + +class QueryResponseSchema(Schema): + response: str + +class AgentResponseSchema(Schema): + response: str + + +@router.post("/{label}", response=AgentResponseSchema) +async def agent(request, label: str, data: QuerySchema): + """Create a new agent""" + try: + posthog.capture("kitchenai_sdk", "agent_handler") + core_app = apps.get_app_config("core") + if not core_app.kitchenai_app: + logger.error("No kitchenai app in core app config") + return HttpResponse(status=404) + agent_func = core_app.kitchenai_app._agent_handlers.get(f"{core_app.kitchenai_app._namespace}.{label}") + if not agent_func: + logger.error(f"Agent function not found for {label}") + return HttpResponse(status=404) + + return await agent_func(data) + except Exception as e: + logger.error(f"Error in agent: {e}") + return HttpError(500, "agent function not found") \ No newline at end of file diff --git a/kitchenai/core/api/embedding.py b/kitchenai/core/api/embedding.py new file mode 100644 index 0000000..d860e77 --- /dev/null +++ b/kitchenai/core/api/embedding.py @@ -0,0 +1,78 @@ +from ninja import Router +from ninja import Schema +from ninja.errors import HttpError +from ninja import Schema +from ..models import EmbedObject + +import logging + + +from django_eventstream import send_event + +logger = logging.getLogger(__name__) +router = Router() + + + +class EmbedSchema(Schema): + text: str + ingest_label: str | None = None + metadata: dict[str, str] | None = None + + # Add any other fields from your FileObject model that you want to include +class EmbedObjectResponse(Schema): + id: int + text: str + ingest_label: str + metadata: dict[str,str] + status: str + +#Embed Object API +@router.post("/", response=EmbedObjectResponse) +async def embed_create(request, data: EmbedSchema): + """Create a new embed from text""" + try: + embed_object = await EmbedObject.objects.acreate( + text=data.text, + ingest_label=data.ingest_label, + metadata=data.metadata if data.metadata else {}, + status=EmbedObject.Status.PENDING, + ) + return embed_object + except Exception as e: + logger.error(f"Unexpected error occurred during embed creation: {e}") + raise HttpError(500, "Internal Server Error") + +@router.get("/{pk}", response=EmbedObjectResponse) +async def embed_get(request, pk: int): + """Get an embed""" + try: + embed_object = await EmbedObject.objects.aget(pk=pk) + return embed_object + except EmbedObject.DoesNotExist: + raise HttpError(404, "Embed not found") + except Exception as e: + logger.error(f"Unexpected error occurred while retrieving embed: {e}") + raise HttpError(500, "Internal Server Error") + +@router.get("/", response=list[EmbedObjectResponse]) +def embeds_get(request): + """Get all embeds""" + try: + embed_objects = EmbedObject.objects.all() + return embed_objects + except Exception as e: + logger.error(f"Unexpected error occurred while retrieving all embeds: {e}") + raise HttpError(500, "Internal Server Error") + +@router.delete("/{pk}") +async def embed_delete(request, pk: int): + """Delete an embed""" + try: + await EmbedObject.objects.filter(pk=pk).adelete() + return {"msg": "deleted"} + except EmbedObject.DoesNotExist: + raise HttpError(404, "Embed not found") + except Exception as e: + logger.error(f"Unexpected error occurred during embed deletion: {e}") + raise HttpError(500, "Internal Server Error") diff --git a/kitchenai/core/api/file.py b/kitchenai/core/api/file.py new file mode 100644 index 0000000..66bebb1 --- /dev/null +++ b/kitchenai/core/api/file.py @@ -0,0 +1,79 @@ + + +from ninja import File +from ninja import Router +from ninja import Schema +from ninja.errors import HttpError +from ninja.files import UploadedFile +from ninja import Schema +from ..models import FileObject +import logging + + +logger = logging.getLogger(__name__) +router = Router() + +# Create a Schema that represents FileObject +class FileObjectSchema(Schema): + name: str + ingest_label: str | None = None + metadata: dict[str, str] | None = None + # Add any other fields from your FileObject model that you want to include +class FileObjectResponse(Schema): + id: int + name: str + ingest_label: str + metadata: dict[str,str] + status: str + +@router.post("/", response=FileObjectResponse) +async def file_upload(request, data: FileObjectSchema,file: UploadedFile = File(...)): + """main entry for any file upload. Will upload via django storage and emit signals to any listeners""" + try: + file_object = await FileObject.objects.acreate( + name=data.name, + file=file, + ingest_label=data.ingest_label, + metadata=data.metadata if data.metadata else {}, + status=FileObject.Status.PENDING + ) + return file_object + except Exception as e: + logger.error(f"Error in file upload: {e}") + raise HttpError(500, "Error in file upload") + + +@router.get("/{pk}", response=FileObjectResponse) +async def file_get(request, pk: int): + """get a file""" + try: + file_object = await FileObject.objects.aget(pk=pk) + return file_object + except FileObject.DoesNotExist: + raise HttpError(404, "File not found") + except Exception as e: + logger.error(f"Error in file get: {e}") + raise HttpError(500, "Error in file get") + + +@router.delete("/{pk}") +async def file_delete(request, pk: int): + """delete a file""" + try: + await FileObject.objects.filter(pk=pk).adelete() + return {"msg": "deleted"} + except FileObject.DoesNotExist: + raise HttpError(404, "File not found") + except Exception as e: + logger.error(f"Error in file delete: {e}") + raise HttpError(500, "Error in file delete") + +@router.get("/", response=list[FileObjectResponse]) +def files_get(request): + """get all files""" + try: + file_objects = FileObject.objects.all() + return file_objects + except Exception as e: + logger.error(f"Error in files get: {e}") + raise HttpError(500, "Error in files get") \ No newline at end of file diff --git a/kitchenai/core/api/query.py b/kitchenai/core/api/query.py new file mode 100644 index 0000000..824117b --- /dev/null +++ b/kitchenai/core/api/query.py @@ -0,0 +1,49 @@ + +from ninja import Router +from ninja import Schema +from ninja.errors import HttpError +from ninja import Schema +from django.http import HttpResponse +import logging +from django.apps import apps +from ..signals import query_output_signal, query_input_signal + + +logger = logging.getLogger(__name__) +router = Router() + +class QuerySchema(Schema): + query: str + metadata: dict[str, str] | None = None + +class QueryResponseSchema(Schema): + response: str + +class AgentResponseSchema(Schema): + response: str + + +@router.post("/{label}", response=QueryResponseSchema) +async def query(request, label: str, data: QuerySchema): + """Create a new query""" + """process file async function for core app using storage task""" + try: + core_app = apps.get_app_config("core") + if not core_app.kitchenai_app: + logger.error("No kitchenai app in core app config") + return HttpResponse(status=404) + + query_func = core_app.kitchenai_app._query_handlers.get(f"{core_app.kitchenai_app._namespace}.{label}") + if not query_func: + logger.error(f"Query function not found for {label}") + return HttpResponse(status=404) + + #Signal the start of the query + query_input_signal.send(sender="query_input", data=data) + result = await query_func(data) + #Signal the end of the query + query_output_signal.send(sender="query_output", result=result) + return result + except Exception as e: + logger.error(f"Error in query: {e}") + return HttpError(500, "query function not found") \ No newline at end of file diff --git a/kitchenai/core/cli/__init__.py b/kitchenai/core/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/core/cli/core.py b/kitchenai/core/cli/core.py new file mode 100644 index 0000000..21cd49f --- /dev/null +++ b/kitchenai/core/cli/core.py @@ -0,0 +1,394 @@ +import logging +import os +import sys + +import django +import typer +from cookiecutter.main import cookiecutter +from django.conf import settings +from rich.console import Console +from typing import Annotated + +app = typer.Typer() +console = Console() + + + +logger = logging.getLogger(__name__) + +@app.command() +def add(module: str = typer.Argument("app.kitchen:kitchen")): + from django.core.management import execute_from_command_line + + execute_from_command_line(["manage", "add_module", module]) + +@app.command() +def init( + verbose: Annotated[int, typer.Option(help="verbosity level. default 0")] = 0, + collect_static: Annotated[bool, typer.Option("--collect-static/--no-collect-static", help="Collect static assets.")] = False, + ): + django.setup() + from django.core.management import execute_from_command_line + from kitchenai.core.models import KitchenAIManagement + from django.conf import settings + import posthog + + posthog.capture("init", "kitchenai_init") + + cmd = ["manage", "migrate","--verbosity", f"{verbose}"] + + if verbose != 1: + with console.status("Applying migrations...", spinner="dots"): + execute_from_command_line(cmd) + + with console.status("Setting up periodic tasks", spinner="dots"): + execute_from_command_line(["manage", "setup_periodic_tasks"]) + + if collect_static: + with console.status("Collecting static assets", spinner="dots"): + execute_from_command_line(["manage", "collectstatic", "--no-input"]) + else: + execute_from_command_line(cmd) + execute_from_command_line(["manage", "setup_periodic_tasks"]) + if collect_static: + execute_from_command_line(["manage", "collectstatic", "--no-input"]) + + + KitchenAIManagement.objects.all().delete() + try: + mgmt = KitchenAIManagement.objects.create( + version = settings.VERSION, + project_name = "default" + ) + except Exception as e: + logger.error(e) + return + + +@app.command() +def qcluster() -> None: + """Run Django-q cluster.""" + from django.core.management import execute_from_command_line + # execute_from_command_line(["manage", "qcluster", *argv[2:]]) + execute_from_command_line(["manage", "qcluster"]) + + +@app.command() +def runserver( + module: Annotated[str, typer.Option(help="Python module to load.")] = "", + address: Annotated[str, typer.Option(help="Address to run the server on.")] = "0.0.0.0:8001", + stream: Annotated[bool, typer.Option(help="Stream events to the event stream.")] = False, + ) -> None: + """Run Django runserver. If stream is true, it will run the uvicorn server. + If stream is false, it will run the dev runserver. + """ + + if stream: + django.setup() + from kitchenai.api import api + from kitchenai.core.utils import setup + sys.argv = [sys.argv[0]] + + setup( + api, + module=module + ) + _run_dev_uvicorn(sys.argv) + else: + from django.core.management import execute_from_command_line + args = ["manage", "runserver"] + args.append(address) + if module: + args.append("--module") + args.append(module) + + execute_from_command_line(args) + +@app.command() +def run(module: Annotated[str, typer.Option(help="Python module to load.")] = os.environ.get("KITCHENAI_MODULE", "")) -> None: + """Run Django runserver.""" + sys.argv = [sys.argv[0]] + django.setup() + from kitchenai.api import api + from kitchenai.core.utils import setup + + setup( + api, + module=module, + ) + + _run_uvicorn(sys.argv) + + +@app.command() +def dev( + address: str ="0.0.0.0:8001", + module: Annotated[str, typer.Option(help="Python module to load.")] = "", + tailwind: Annotated[bool, typer.Option(help="Tailwind servers.")] = False, + jupyter: Annotated[bool, typer.Option(help="Jupyter Notebook servers.")] = False, + stream: Annotated[bool, typer.Option(help="Stream events to the event stream.")] = False, + ): + """ + Reads the kitchen config file, reads the application file and runs the KitchenAI server + """ + import posthog + import django + import uuid + + django.setup() + commands = {"server": "kitchenai runserver"} + + posthog.capture("init", "kitchenai_dev") + + if module: + commands["server"] = f"kitchenai runserver --module {module}" + if stream: + commands["server"] = commands["server"] + " --stream" + + if jupyter: + #user is running jupyter alongside kitchenai + from kitchenai.core.models import KitchenAIManagement + mgmt = KitchenAIManagement.objects.filter(name="kitchenai_management").first() + notebook_id = uuid.uuid4() + mgmt.jupyter_token = notebook_id + mgmt.save() + + commands["jupyter"] = f"jupyter lab --NotebookApp.token='{notebook_id}'" + + + if tailwind: + if "django_tailwind_cli" in settings.INSTALLED_APPS: + commands["tailwind"] = "django-admin tailwind watch" + if "tailwind" in settings.INSTALLED_APPS: + commands["tailwind"] = "django-admin tailwind start" + if "django_q" in settings.INSTALLED_APPS: + commands["qcluster"] = "kitchenai qcluster" + + typer.echo(f"[INFO] starting development server on {address}") + + # call_command("migrate") + _run_with_honcho(commands) + +@app.command() +def manage(args: list[str] = typer.Argument(None, help="Arguments for Django's manage.py")) -> None: + """ + Run Django's manage command with additional arguments. + """ + from django.core.management import execute_from_command_line + + # Build the argument list for Django + if args is None: + sys.argv = ["manage"] + else: + sys.argv = ["manage"] + args + + execute_from_command_line(sys.argv) + +@app.command() +def setup(): + """Run some project setup tasks""" + django.setup() + from django.core.management import execute_from_command_line + import os + + execute_from_command_line(["manage", "migrate"]) + execute_from_command_line(["manage", "setup_periodic_tasks"]) + + # Set environment variables for superuser credentials + email = os.environ.get("DJANGO_SUPERUSER_EMAIL", "admin@localhost") + password = os.environ.get("DJANGO_SUPERUSER_PASSWORD", "admin") + username = os.environ.get("DJANGO_SUPERUSER_USERNAME", email.split("@")[0]) + + if password == "admin": + #set it + os.environ["DJANGO_SUPERUSER_PASSWORD"] = "admin" + execute_from_command_line( + ["manage", "createsuperuser", "--noinput", "--traceback", "--email", email, "--username", username] + ) + +@app.command() +def build( + dir: str, + module: str, + admin: Annotated[bool, typer.Option("--admin/--no-admin", help="Admin status (default is True)")] = False, +): + """ + Reads the kitchen config file, reads the application file and runs the KitchenAI server. + """ + django.setup() + from django.template import loader + import pathlib + import subprocess + from rich.text import Text + + base_dir = pathlib.Path(dir) + + # Flip the admin flag because we want it to default to True unless the flag is passed + admin = not admin + + module_name = module.split(":")[0] + + # Save the configuration to the database + template_name = 'build_templates/Dockerfile.tmpl' + + # Check if requirements.txt and module file exist in the directory + requirements_file = base_dir / 'requirements.txt' + module_path = base_dir / f"{module_name}.py" + + if not requirements_file.exists() or not module_path.exists(): + console.print("[bold red]Error:[/bold red] Both requirements.txt and the module file must exist in the specified directory.") + raise typer.Exit(code=1) + + # Context data to pass into the template + context = { + 'module': module, + "admin": admin + } + + try: + # Load and render the template with the context data + template = loader.get_template(template_name) + rendered_content = template.render(context) + + # Write the rendered Dockerfile to the specified directory + dockerfile_path = base_dir / 'Dockerfile' + with open(dockerfile_path, 'w') as dockerfile: + dockerfile.write(rendered_content) + + console.print(Text(f"Dockerfile successfully created at {dockerfile_path}", style="green")) + + except Exception as e: + console.print(f"[bold red]Error rendering template:[/bold red] {e}", style="bold red") + raise typer.Exit(code=1) + + # Build the Docker image using the Dockerfile + try: + console.print("[cyan]Building Docker image...[/cyan]") + # Run the Docker build command + process = subprocess.Popen( + ["docker", "build", "-t", "kitchenai-app", dir], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, + universal_newlines=True, + ) + + # Stream output line by line + for stdout_line in iter(process.stdout.readline, ""): + console.print(stdout_line.strip()) + + process.stdout.close() + return_code = process.wait() + + # Check if the Docker build was successful + if return_code == 0: + console.print("[green]Docker image built successfully![/green]") + else: + # Capture and print stderr output in case of an error + for stderr_line in iter(process.stderr.readline, ""): + console.print(f"[bold red]{stderr_line.strip()}[/bold red]") + console.print("[bold red]Docker build failed.[/bold red]") + raise typer.Exit(code=1) + + except FileNotFoundError: + console.print("[bold red]Docker is not installed or not available in your PATH.[/bold red]") + raise typer.Exit(code=1) + except Exception as e: + console.print(f"[bold red]Error during Docker build:[/bold red] {e}") + raise typer.Exit(code=1) + + except FileNotFoundError: + console.print("[bold red]Docker is not installed or not available in your PATH.[/bold red]") + raise typer.Exit(code=1) + except Exception as e: + console.print(f"[bold red]Error during Docker build:[/bold red] {e}") + raise typer.Exit(code=1) + +@app.command() +def new(): + """ + Reads the kitchen config file, reads the application file and runs the KitchenAI server + """ + + cookiecutter("https://github.com/epuerta9/cookiecutter-cookbook.git", output_dir=".") + + + +def _run_with_honcho(commands: dict): + from honcho.manager import Manager + + manager = Manager() + for name, cmd in commands.items(): + manager.add_process(name, cmd) + try: + manager.loop() + finally: + manager.terminate() + + + +def _run_uvicorn(argv: list) -> None: + """ + Run gunicorn + uvicorn workers server. + https://docs.gunicorn.org/en/stable/settings.html + https://adamj.eu/tech/2021/12/29/set-up-a-gunicorn-configuration-file-and-test-it/ + """ + + import multiprocessing + from gunicorn.app import wsgiapp # for gunicorn + + workers = multiprocessing.cpu_count() * 2 + 1 + gunicorn_args = [ + "kitchenai.asgi:application", # Replace WSGI with ASGI app + "--bind", + "0.0.0.0:8001", + # "unix:/run/kitchenai_demo.gunicorn.sock", # Use this if you're using a socket file + "--max-requests", + "1000", + "--max-requests-jitter", + "50", + "--workers", + str(workers), + "--worker-class", + "uvicorn.workers.UvicornWorker", # Use Uvicorn worker for ASGI + "--access-logfile", + "-", + "--error-logfile", + "-", + ] + argv.extend(gunicorn_args) + + wsgiapp.run() + + +def _run_dev_uvicorn(argv: list) -> None: + """ + Run gunicorn + uvicorn workers server. + https://docs.gunicorn.org/en/stable/settings.html + https://adamj.eu/tech/2021/12/29/set-up-a-gunicorn-configuration-file-and-test-it/ + """ + from gunicorn.app import wsgiapp # for gunicorn + + workers = 2 + gunicorn_args = [ + "kitchenai.asgi:application", # Replace WSGI with ASGI app + "--bind", + "0.0.0.0:8001", + # "unix:/run/kitchenai_demo.gunicorn.sock", # Use this if you're using a socket file + "--max-requests", + "1000", + "--max-requests-jitter", + "50", + "--workers", + str(workers), + "--worker-class", + "uvicorn.workers.UvicornWorker", # Use Uvicorn worker for ASGI + "--access-logfile", + "-", + "--error-logfile", + "-", + ] + argv.extend(gunicorn_args) + + wsgiapp.run() diff --git a/kitchenai/core/management/commands/runserver.py b/kitchenai/core/management/commands/runserver.py new file mode 100644 index 0000000..c90243e --- /dev/null +++ b/kitchenai/core/management/commands/runserver.py @@ -0,0 +1,52 @@ +""" +License +======= + +Do whatever you like. + +Usage +===== + +1. put it in your project + + manage.py + base/ + management/ + commands/ + __init__.py + runserver.py +""" + +from django.conf import settings +from django.core.management.commands.runserver import Command as RunserverCommand +import os + + + +class Command(RunserverCommand): + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + '--module', + dest='module_path', + default=None, + help='Specifies the kitchenai module to load' + ) + + def run(self, *args, **options): + """Runs the server""" + # Check if this is the main process + if os.environ.get('RUN_MAIN') == 'true': + if settings.KITCHENAI_DEBUG or settings.DEBUG: + from kitchenai.api import api + from kitchenai.core.utils import setup + + module = options.get('module_path') + if module: + setup(api, module=module) + self.stdout.write(self.style.SUCCESS(f"Loaded module: {module}")) + else: + raise Exception("KitchenAI is not in debug mode when running dev server. Please set KITCHENAI_DEBUG=True in your settings.py file.") + + # Always call the parent run method to start the server + super().run(*args, **options) \ No newline at end of file diff --git a/kitchenai/core/migrations/0001_initial.py b/kitchenai/core/migrations/0001_initial.py index 13d521d..4be7dc0 100644 --- a/kitchenai/core/migrations/0001_initial.py +++ b/kitchenai/core/migrations/0001_initial.py @@ -1,7 +1,8 @@ -# Generated by Django 5.1.2 on 2024-11-27 01:42 +# Generated by Django 5.1.2 on 2024-12-18 19:21 import django.db.models.deletion -import kitchenai.core.models +import kitchenai.core.models.file +import kitchenai.core.models.management from django.db import migrations, models @@ -52,7 +53,7 @@ class Migration(migrations.Migration): ( "file", models.FileField( - upload_to=kitchenai.core.models.file_object_directory_path + upload_to=kitchenai.core.models.file.file_object_directory_path ), ), ("name", models.CharField(max_length=255)), @@ -136,7 +137,7 @@ class Migration(migrations.Migration): ( "file", models.FileField( - upload_to=kitchenai.core.models.module_directory_path + upload_to=kitchenai.core.models.management.module_directory_path ), ), ( diff --git a/kitchenai/core/migrations/0003_codefunction_label.py b/kitchenai/core/migrations/0003_codefunction_label.py deleted file mode 100644 index 866c79a..0000000 --- a/kitchenai/core/migrations/0003_codefunction_label.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1.2 on 2024-11-29 20:04 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0002_codefunction_codeimport_codesetup"), - ] - - operations = [ - migrations.AddField( - model_name="codefunction", - name="label", - field=models.CharField(default="", max_length=255), - ), - ] diff --git a/kitchenai/core/migrations/0004_alter_codefunction_label.py b/kitchenai/core/migrations/0004_alter_codefunction_label.py deleted file mode 100644 index fb17c92..0000000 --- a/kitchenai/core/migrations/0004_alter_codefunction_label.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1.2 on 2024-11-29 20:04 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0003_codefunction_label"), - ] - - operations = [ - migrations.AlterField( - model_name="codefunction", - name="label", - field=models.CharField(max_length=255), - ), - ] diff --git a/kitchenai/core/migrations/0005_notebook.py b/kitchenai/core/migrations/0005_notebook.py deleted file mode 100644 index e7764d6..0000000 --- a/kitchenai/core/migrations/0005_notebook.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 5.1.2 on 2024-11-29 21:20 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0004_alter_codefunction_label"), - ] - - operations = [ - migrations.CreateModel( - name="Notebook", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255, unique=True)), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/kitchenai/core/migrations/0006_codefunction_notebook_codeimport_notebook_and_more.py b/kitchenai/core/migrations/0006_codefunction_notebook_codeimport_notebook_and_more.py deleted file mode 100644 index f933c31..0000000 --- a/kitchenai/core/migrations/0006_codefunction_notebook_codeimport_notebook_and_more.py +++ /dev/null @@ -1,44 +0,0 @@ -# Generated by Django 5.1.2 on 2024-11-29 21:24 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0005_notebook"), - ] - - operations = [ - migrations.AddField( - model_name="codefunction", - name="notebook", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="core.notebook", - ), - ), - migrations.AddField( - model_name="codeimport", - name="notebook", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="core.notebook", - ), - ), - migrations.AddField( - model_name="codesetup", - name="notebook", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="core.notebook", - ), - ), - ] diff --git a/kitchenai/core/migrations/0007_codeimport_label_codesetup_label.py b/kitchenai/core/migrations/0007_codeimport_label_codesetup_label.py deleted file mode 100644 index 212b156..0000000 --- a/kitchenai/core/migrations/0007_codeimport_label_codesetup_label.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 5.1.2 on 2024-12-01 17:36 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0006_codefunction_notebook_codeimport_notebook_and_more"), - ] - - operations = [ - migrations.AddField( - model_name="codeimport", - name="label", - field=models.CharField(default="", max_length=255), - ), - migrations.AddField( - model_name="codesetup", - name="label", - field=models.CharField(default="", max_length=255), - ), - ] diff --git a/kitchenai/core/migrations/0008_alter_codeimport_label_alter_codesetup_label.py b/kitchenai/core/migrations/0008_alter_codeimport_label_alter_codesetup_label.py deleted file mode 100644 index 7350448..0000000 --- a/kitchenai/core/migrations/0008_alter_codeimport_label_alter_codesetup_label.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 5.1.2 on 2024-12-01 17:36 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("core", "0007_codeimport_label_codesetup_label"), - ] - - operations = [ - migrations.AlterField( - model_name="codeimport", - name="label", - field=models.CharField(max_length=255), - ), - migrations.AlterField( - model_name="codesetup", - name="label", - field=models.CharField(max_length=255), - ), - ] diff --git a/kitchenai/core/models.py b/kitchenai/core/models.py deleted file mode 100644 index c5205d7..0000000 --- a/kitchenai/core/models.py +++ /dev/null @@ -1,136 +0,0 @@ -import uuid - -from django.db import models -from falco_toolbox.models import TimeStamped - -def file_object_directory_path(instance, filename): - # file will be uploaded to MEDIA_ROOT/uuid/filename - return f"kitchenai/{uuid.uuid4()}/{filename}" - -def module_directory_path(instance, filename): - # file will be uploaded to MEDIA_ROOT/uuid/filename - return f"kitchenai/modules/{filename}" - -class KitchenAIManagement(TimeStamped): - name = models.CharField(max_length=255, primary_key=True, default="kitchenai_management") - project_name = models.CharField(max_length=255) - version = models.CharField(max_length=255) - description = models.TextField(default="") - jupyter_token = models.CharField(max_length=255, default="") - jupyter_host = models.CharField(max_length=255, default="") - jupyter_port = models.CharField(max_length=255, default="8888") - jupyter_protocol = models.CharField(max_length=255, default="http") - - def __str__(self): - return self.name - - -class KitchenAIPlugins(TimeStamped): - name = models.CharField(max_length=255, unique=True) - kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) - - def __str__(self): - return self.name - - -class KitchenAIDependencies(TimeStamped): - name = models.CharField(max_length=255, unique=True) - kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) - - def __str__(self): - return self.name - - -class KitchenAIRootModule(TimeStamped): - name = models.CharField(max_length=255, unique=True) - kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) - -class KitchenAIModule(TimeStamped): - name = models.CharField(max_length=255) - kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) - jupyter_path = models.CharField(max_length=255, default="") - file = models.FileField(upload_to=module_directory_path) - - -class Notebook(TimeStamped): - name = models.CharField(max_length=255, unique=True) - - def __str__(self) -> str: - return self.name - -class CodeFunction(TimeStamped): - class FuncType(models.TextChoices): - STORAGE = "storage" - EMBEDDING = "embedding" - QUERY = "query" - AGENT = "agent" - - hash = models.CharField(max_length=255) - raw_code = models.TextField() - code = models.TextField() - type = models.CharField(max_length=255, choices=FuncType) - label = models.CharField(max_length=255) - notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) - - def __str__(self) -> str: - return self.label - -class CodeImport(TimeStamped): - hash = models.CharField(max_length=255) - code = models.TextField() - notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) - label = models.CharField(max_length=255) - - def __str__(self) -> str: - return f"" - -class CodeSetup(TimeStamped): - hash = models.CharField(max_length=255) - code = models.TextField() - notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) - label = models.CharField(max_length=255) - - - def __str__(self) -> str: - return f"" - - - -class FileObject(TimeStamped): - """ - This is a model for any file that is uploaded to the system. - It will be used to trigger any storage tasks or other processes - """ - class Status(models.TextChoices): - PENDING = "pending" - PROCESSING = "processing" - COMPLETED = "completed" - FAILED = "failed" - - file = models.FileField(upload_to=file_object_directory_path) - name = models.CharField(max_length=255) - ingest_label = models.CharField(max_length=255) - status = models.CharField(max_length=255, default=Status.PENDING) - metadata = models.JSONField(default=dict) - - def __str__(self): - return self.name - -class EmbedObject(TimeStamped): - """ - This is a model for any embed object that is created - """ - class Status(models.TextChoices): - PENDING = "pending" - PROCESSING = "processing" - COMPLETED = "completed" - FAILED = "failed" - - text = models.CharField(max_length=255) - ingest_label = models.CharField(max_length=255) - status = models.CharField(max_length=255, default=Status.PENDING) - metadata = models.JSONField(default=dict) - - def __str__(self): - return self.text - diff --git a/kitchenai/core/models/__init__.py b/kitchenai/core/models/__init__.py new file mode 100644 index 0000000..b39edde --- /dev/null +++ b/kitchenai/core/models/__init__.py @@ -0,0 +1,3 @@ +from .embed import EmbedObject +from .file import FileObject +from .management import KitchenAIManagement, KitchenAIDependencies, KitchenAIRootModule, KitchenAIModule diff --git a/kitchenai/core/models/embed.py b/kitchenai/core/models/embed.py new file mode 100644 index 0000000..d0b7933 --- /dev/null +++ b/kitchenai/core/models/embed.py @@ -0,0 +1,21 @@ +from django.db import models +from falco_toolbox.models import TimeStamped + + +class EmbedObject(TimeStamped): + """ + This is a model for any embed object that is created + """ + class Status(models.TextChoices): + PENDING = "pending" + PROCESSING = "processing" + COMPLETED = "completed" + FAILED = "failed" + + text = models.CharField(max_length=255) + ingest_label = models.CharField(max_length=255) + status = models.CharField(max_length=255, default=Status.PENDING) + metadata = models.JSONField(default=dict) + + def __str__(self): + return self.text diff --git a/kitchenai/core/models/file.py b/kitchenai/core/models/file.py new file mode 100644 index 0000000..afe3935 --- /dev/null +++ b/kitchenai/core/models/file.py @@ -0,0 +1,29 @@ + +import uuid + +from django.db import models +from falco_toolbox.models import TimeStamped + +def file_object_directory_path(instance, filename): + # file will be uploaded to MEDIA_ROOT/uuid/filename + return f"kitchenai/{uuid.uuid4()}/{filename}" + +class FileObject(TimeStamped): + """ + This is a model for any file that is uploaded to the system. + It will be used to trigger any storage tasks or other processes + """ + class Status(models.TextChoices): + PENDING = "pending" + PROCESSING = "processing" + COMPLETED = "completed" + FAILED = "failed" + + file = models.FileField(upload_to=file_object_directory_path) + name = models.CharField(max_length=255) + ingest_label = models.CharField(max_length=255) + status = models.CharField(max_length=255, default=Status.PENDING) + metadata = models.JSONField(default=dict) + + def __str__(self): + return self.name \ No newline at end of file diff --git a/kitchenai/core/models/management.py b/kitchenai/core/models/management.py new file mode 100644 index 0000000..9b86740 --- /dev/null +++ b/kitchenai/core/models/management.py @@ -0,0 +1,46 @@ +from django.db import models +from falco_toolbox.models import TimeStamped + +def module_directory_path(instance, filename): + # file will be uploaded to MEDIA_ROOT/uuid/filename + return f"kitchenai/modules/{filename}" + +class KitchenAIManagement(TimeStamped): + name = models.CharField(max_length=255, primary_key=True, default="kitchenai_management") + project_name = models.CharField(max_length=255) + version = models.CharField(max_length=255) + description = models.TextField(default="") + jupyter_token = models.CharField(max_length=255, default="") + jupyter_host = models.CharField(max_length=255, default="") + jupyter_port = models.CharField(max_length=255, default="8888") + jupyter_protocol = models.CharField(max_length=255, default="http") + + def __str__(self): + return self.name + + +class KitchenAIPlugins(TimeStamped): + name = models.CharField(max_length=255, unique=True) + kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) + + def __str__(self): + return self.name + + +class KitchenAIDependencies(TimeStamped): + name = models.CharField(max_length=255, unique=True) + kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) + + def __str__(self): + return self.name + + +class KitchenAIRootModule(TimeStamped): + name = models.CharField(max_length=255, unique=True) + kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) + +class KitchenAIModule(TimeStamped): + name = models.CharField(max_length=255) + kitchen = models.ForeignKey(KitchenAIManagement, on_delete=models.CASCADE) + jupyter_path = models.CharField(max_length=255, default="") + file = models.FileField(upload_to=module_directory_path) \ No newline at end of file diff --git a/kitchenai/core/router.py b/kitchenai/core/router.py new file mode 100644 index 0000000..243f5c4 --- /dev/null +++ b/kitchenai/core/router.py @@ -0,0 +1,49 @@ +from ninja import Router +from ninja import Schema +from django.http import HttpResponse +import logging +from django.apps import apps +from typing import List +from .signals import query_output_signal, query_input_signal +from .api.query import router as query_router +from .api.agent import router as agent_router +from .api.embedding import router as embedding_router +from .api.file import router as file_router +from ninja.errors import HttpError + +logger = logging.getLogger(__name__) + +router = Router() +router.add_router("/query", query_router, tags=["query"]) +router.add_router("/agent", agent_router, tags=["agent"]) +router.add_router("/embeddings", embedding_router, tags=["embeddings"]) +router.add_router("/file", file_router, tags=["file"]) + +@router.get("/health") +async def default(request): + return {"msg": "ok"} + + +class KitchenAIAppSchema(Schema): + namespace: str + query_handlers: List[str] + agent_handlers: List[str] + embed_tasks: List[str] + embed_delete_tasks: List[str] + storage_tasks: List[str] + storage_delete_tasks: List[str] + storage_create_hooks: List[str] + storage_delete_hooks: List[str] + + +@router.get("/labels", response=KitchenAIAppSchema) +async def labels(request): + """Lists all the custom kitchenai labels""" + core_app = apps.get_app_config("core") + if not core_app.kitchenai_app: + logger.error("No kitchenai app in core app config") + return HttpResponse(status=404) + + return core_app.kitchenai_app.to_dict() + + diff --git a/kitchenai/core/views.py b/kitchenai/core/views.py index d405833..8bcd74f 100644 --- a/kitchenai/core/views.py +++ b/kitchenai/core/views.py @@ -9,14 +9,12 @@ from .forms import KitchenAIModuleForm from .models import KitchenAIModule - async def home(request: HttpRequest): return TemplateResponse( request, "pages/home.html", ) - @for_htmx(use_partial="table") def kitchenaimodule_list(request: HttpRequest): kitchenaimodules = KitchenAIModule.objects.order_by("name") diff --git a/kitchenai/notebooks/__init__.py b/kitchenai/notebooks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/notebooks/admin.py b/kitchenai/notebooks/admin.py new file mode 100644 index 0000000..0cde730 --- /dev/null +++ b/kitchenai/notebooks/admin.py @@ -0,0 +1,22 @@ +from django.contrib import admin + +from .models import CodeFunction, CodeImport, CodeSetup, Notebook + + +@admin.register(CodeFunction) +class CodeFunctionAdmin(admin.ModelAdmin): + pass + + +@admin.register(CodeImport) +class CodeImportAdmin(admin.ModelAdmin): + pass + +@admin.register(CodeSetup) +class CodeSetupAdmin(admin.ModelAdmin): + pass + + +@admin.register(Notebook) +class NotebookAdmin(admin.ModelAdmin): + pass \ No newline at end of file diff --git a/kitchenai/notebooks/apps.py b/kitchenai/notebooks/apps.py new file mode 100644 index 0000000..77e952c --- /dev/null +++ b/kitchenai/notebooks/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class NotebooksConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "kitchenai.notebooks" diff --git a/kitchenai/notebooks/cli/__init__.py b/kitchenai/notebooks/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/cli/cook.py b/kitchenai/notebooks/cli/notebooks.py similarity index 98% rename from kitchenai/cli/cook.py rename to kitchenai/notebooks/cli/notebooks.py index b319323..05818d7 100644 --- a/kitchenai/cli/cook.py +++ b/kitchenai/notebooks/cli/notebooks.py @@ -68,4 +68,4 @@ def cook_select(name: str): console.print(f"[green]Downloaded {file} successfully.[/green]") else: console.print(f"[red]Error downloading {file}.[/red]") - raise typer.Exit(code=1) + raise typer.Exit(code=1) \ No newline at end of file diff --git a/kitchenai/core/migrations/0002_codefunction_codeimport_codesetup.py b/kitchenai/notebooks/migrations/0001_initial.py similarity index 58% rename from kitchenai/core/migrations/0002_codefunction_codeimport_codesetup.py rename to kitchenai/notebooks/migrations/0001_initial.py index 37df528..f80bbf4 100644 --- a/kitchenai/core/migrations/0002_codefunction_codeimport_codesetup.py +++ b/kitchenai/notebooks/migrations/0001_initial.py @@ -1,17 +1,38 @@ -# Generated by Django 5.1.2 on 2024-11-29 19:31 +# Generated by Django 5.1.2 on 2024-12-18 19:21 +import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ("core", "0001_initial"), - ] + initial = True + + dependencies = [] operations = [ migrations.CreateModel( - name="CodeFunction", + name="Notebook", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=255, unique=True)), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="CodeSetup", fields=[ ( "id", @@ -25,18 +46,15 @@ class Migration(migrations.Migration): ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ("hash", models.CharField(max_length=255)), - ("raw_code", models.TextField()), ("code", models.TextField()), + ("label", models.CharField(max_length=255)), ( - "type", - models.CharField( - choices=[ - ("storage", "Storage"), - ("embedding", "Embedding"), - ("query", "Query"), - ("agent", "Agent"), - ], - max_length=255, + "notebook", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="notebooks.notebook", ), ), ], @@ -60,13 +78,23 @@ class Migration(migrations.Migration): ("updated_at", models.DateTimeField(auto_now=True)), ("hash", models.CharField(max_length=255)), ("code", models.TextField()), + ("label", models.CharField(max_length=255)), + ( + "notebook", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="notebooks.notebook", + ), + ), ], options={ "abstract": False, }, ), migrations.CreateModel( - name="CodeSetup", + name="CodeFunction", fields=[ ( "id", @@ -80,7 +108,30 @@ class Migration(migrations.Migration): ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ("hash", models.CharField(max_length=255)), + ("raw_code", models.TextField()), ("code", models.TextField()), + ( + "type", + models.CharField( + choices=[ + ("storage", "Storage"), + ("embedding", "Embedding"), + ("query", "Query"), + ("agent", "Agent"), + ], + max_length=255, + ), + ), + ("label", models.CharField(max_length=255)), + ( + "notebook", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="notebooks.notebook", + ), + ), ], options={ "abstract": False, diff --git a/kitchenai/notebooks/migrations/__init__.py b/kitchenai/notebooks/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/notebooks/models.py b/kitchenai/notebooks/models.py new file mode 100644 index 0000000..2c1999d --- /dev/null +++ b/kitchenai/notebooks/models.py @@ -0,0 +1,45 @@ +from django.db import models +from falco_toolbox.models import TimeStamped + + +class Notebook(TimeStamped): + name = models.CharField(max_length=255, unique=True) + + def __str__(self) -> str: + return self.name + +class CodeFunction(TimeStamped): + class FuncType(models.TextChoices): + STORAGE = "storage" + EMBEDDING = "embedding" + QUERY = "query" + AGENT = "agent" + + hash = models.CharField(max_length=255) + raw_code = models.TextField() + code = models.TextField() + type = models.CharField(max_length=255, choices=FuncType) + label = models.CharField(max_length=255) + notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) + + def __str__(self) -> str: + return self.label + +class CodeImport(TimeStamped): + hash = models.CharField(max_length=255) + code = models.TextField() + notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) + label = models.CharField(max_length=255) + + def __str__(self) -> str: + return f"" + +class CodeSetup(TimeStamped): + hash = models.CharField(max_length=255) + code = models.TextField() + notebook = models.ForeignKey(Notebook, on_delete=models.CASCADE, blank=True, null=True) + label = models.CharField(max_length=255) + + + def __str__(self) -> str: + return f"" diff --git a/kitchenai/notebooks/views.py b/kitchenai/notebooks/views.py new file mode 100644 index 0000000..e69de29 diff --git a/kitchenai/settings.py b/kitchenai/settings.py index 74f8439..441e501 100644 --- a/kitchenai/settings.py +++ b/kitchenai/settings.py @@ -1,16 +1,13 @@ import multiprocessing import os -import sys from email.utils import parseaddr from pathlib import Path import djp import sentry_sdk from environs import Env -from falco_toolbox.sentry import sentry_profiles_sampler -from falco_toolbox.sentry import sentry_traces_sampler -from marshmallow.validate import Email -from marshmallow.validate import OneOf +from falco_toolbox.sentry import sentry_profiles_sampler, sentry_traces_sampler +from marshmallow.validate import Email, OneOf from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import LoggingIntegration @@ -42,7 +39,11 @@ # https://docs.djangoproject.com/en/4.0/ref/settings/ -ALLOWED_HOSTS = env.list("ALLOWED_HOSTS", default=["*"] if DEBUG or KITCHENAI_DEBUG else ["localhost"], subcast=str) +ALLOWED_HOSTS = env.list( + "ALLOWED_HOSTS", + default=["*"] if DEBUG or KITCHENAI_DEBUG else ["localhost"], + subcast=str, +) ASGI_APPLICATION = "kitchenai.asgi.application" @@ -62,7 +63,9 @@ CSRF_COOKIE_SECURE = not DEBUG DATABASES = { - "default": env.dj_db_url("DATABASE_URL", default=f"sqlite:///{KITCHENAI_DB_DIR / 'db.sqlite3'}"), + "default": env.dj_db_url( + "DATABASE_URL", default=f"sqlite:///{KITCHENAI_DB_DIR / 'db.sqlite3'}" + ), } DATABASES["default"]["ATOMIC_REQUESTS"] = False @@ -123,6 +126,7 @@ LOCAL_APPS = [ "kitchenai.core", + "kitchenai.notebooks", ] if DEBUG: @@ -177,39 +181,39 @@ # } LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '{levelname} {asctime} {module} {message}', - 'style': '{', + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {message}", + "style": "{", }, - 'simple': { - 'format': '{levelname} {message}', - 'style': '{', + "simple": { + "format": "{levelname} {message}", + "style": "{", }, }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'simple', + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "simple", }, }, - 'loggers': { - 'django': { - 'handlers': ['console'], - 'level': 'INFO', # Set to INFO or WARNING to suppress DEBUG logs - 'propagate': True, + "loggers": { + "django": { + "handlers": ["console"], + "level": "INFO", # Set to INFO or WARNING to suppress DEBUG logs + "propagate": True, }, - 'urllib3.connectionpool': { - 'handlers': ['console'], - 'level': 'WARNING', # Suppress DEBUG logs from urllib3 - 'propagate': False, + "urllib3.connectionpool": { + "handlers": ["console"], + "level": "WARNING", # Suppress DEBUG logs from urllib3 + "propagate": False, }, - 'chromadb': { - 'handlers': ['console'], - 'level': 'WARNING', # Suppress DEBUG logs from chromadb - 'propagate': False, + "chromadb": { + "handlers": ["console"], + "level": "WARNING", # Suppress DEBUG logs from chromadb + "propagate": False, }, "kitchenai": { "handlers": ["console"], @@ -217,9 +221,9 @@ "propagate": False, # Prevent propagation to the root logger }, }, - 'root': { - 'handlers': ['console'], - 'level': 'INFO', # Set the root logger level + "root": { + "handlers": ["console"], + "level": "INFO", # Set the root logger level }, } @@ -261,7 +265,9 @@ ROOT_URLCONF = "kitchenai.urls" -SECRET_KEY = env.str("SECRET_KEY", default="django-insecure-ef6nIh7LcUjPtixFdz0_aXyUwlKqvBdJEcycRR6RvRY") +SECRET_KEY = env.str( + "SECRET_KEY", default="django-insecure-ef6nIh7LcUjPtixFdz0_aXyUwlKqvBdJEcycRR6RvRY" +) SECURE_HSTS_INCLUDE_SUBDOMAINS = not (DEBUG or KITCHENAI_DEBUG) @@ -270,7 +276,9 @@ # https://docs.djangoproject.com/en/dev/ref/middleware/#http-strict-transport-security # 2 minutes to start with, will increase as HSTS is tested # example of production value: 60 * 60 * 24 * 7 = 604800 (1 week) -SECURE_HSTS_SECONDS = 0 if DEBUG or KITCHENAI_DEBUG else env.int("SECURE_HSTS_SECONDS", default=60 * 2) +SECURE_HSTS_SECONDS = ( + 0 if DEBUG or KITCHENAI_DEBUG else env.int("SECURE_HSTS_SECONDS", default=60 * 2) +) # https://noumenal.es/notes/til/django/csrf-trusted-origins/ # SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") @@ -456,22 +464,24 @@ # sentry if env.bool("KITCHENAI_SENTRY", default=False): - if (SENTRY_DSN := env.url("SENTRY_DSN", default=None)).scheme and not (DEBUG or KITCHENAI_DEBUG): + if (SENTRY_DSN := env.url("SENTRY_DSN", default=None)).scheme and not ( + DEBUG or KITCHENAI_DEBUG + ): sentry_sdk.init( - dsn=SENTRY_DSN.geturl(), - environment=env.str( - "SENTRY_ENV", - default="development", - validate=OneOf(["development", "production"]), - ), - integrations=[ - DjangoIntegration(), - LoggingIntegration(event_level=None, level=None), - ], - traces_sampler=sentry_traces_sampler, - profiles_sampler=sentry_profiles_sampler, - send_default_pii=True, - ) + dsn=SENTRY_DSN.geturl(), + environment=env.str( + "SENTRY_ENV", + default="development", + validate=OneOf(["development", "production"]), + ), + integrations=[ + DjangoIntegration(), + LoggingIntegration(event_level=None, level=None), + ], + traces_sampler=sentry_traces_sampler, + profiles_sampler=sentry_profiles_sampler, + send_default_pii=True, + ) # 4. Project Settings # ----------------------------------------------------------------------------------------------------- diff --git a/kitchenai/static/images/draft-builder.png b/kitchenai/static/images/draft-builder.png deleted file mode 100644 index 3965a02..0000000 Binary files a/kitchenai/static/images/draft-builder.png and /dev/null differ diff --git a/kitchenai/static/images/icon-logo.png b/kitchenai/static/images/icon-logo.png index 90ba3f4..b76b5fb 100644 Binary files a/kitchenai/static/images/icon-logo.png and b/kitchenai/static/images/icon-logo.png differ diff --git a/kitchenai/urls.py b/kitchenai/urls.py index 8b7774c..e1d1573 100644 --- a/kitchenai/urls.py +++ b/kitchenai/urls.py @@ -10,6 +10,7 @@ from django.views.generic.base import RedirectView from falco_toolbox import views as toolbox_views from health_check.views import MainView +import django_eventstream from .api import api @@ -29,7 +30,11 @@ path("health/", MainView.as_view()), path(settings.ADMIN_URL, admin.site.urls), path("accounts/", include("allauth.urls")), - path("core/", include("kitchenai.core.urls", namespace="core")), + path("dashboard/", include("kitchenai.core.urls", namespace="dashboard")), + path("stream/agent/", include(django_eventstream.urls)), + path("stream/query/", include(django_eventstream.urls)), + path("stream/chat/", include(django_eventstream.urls)), + ] + djp.urlpatterns() diff --git a/pyproject.toml b/pyproject.toml index 3e648a3..5f1b12b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,11 @@ dependencies = [ "nbconvert", "faststream[nats]", "starlette", + "django-eventstream", "dynamic-pip", "jupyterlab", "nest_asyncio", - "kitchenai_python_sdk", + "kitchenai_python_sdk>=1.3.0", "llama-index", "chromadb", "llama-index-vector-stores-chroma", diff --git a/requirements-dev.txt b/requirements-dev.txt index 5b7ed80..f7b3a2a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.11 # -# [constraints] requirements.txt (SHA256: bef74194f8fd80762a1ef00469312dbc8243a3854822b60f9150002d8d517687) +# [constraints] requirements.txt (SHA256: 18df5e1d5be84eeae244327525e84700b34a08f7506779624aa1f99fe146898d) # # - django-browser-reload # - django-debug-toolbar @@ -30,6 +30,7 @@ # - django-allauth[socialaccount] # - django-anymail[resend] # - django-compressor +# - django-eventstream # - django-extensions # - django-health-check # - django-htmx @@ -53,7 +54,7 @@ # - heroicons[django] # - honcho # - jupyterlab -# - kitchenai-python-sdk +# - kitchenai-python-sdk>=1.3.0 # - llama-index # - llama-index-vector-stores-chroma # - nbconvert @@ -345,8 +346,10 @@ django==5.1.2 # django-compressor # django-crispy-forms # django-debug-toolbar + # django-eventstream # django-extensions # django-fastdev + # django-grip # django-health-check # django-htmx # django-lifecycle @@ -399,6 +402,10 @@ django-crispy-forms==2.3 # crispy-tailwind django-debug-toolbar==4.4.6 # via hatch.envs.dev +django-eventstream==5.3.1 + # via + # -c requirements.txt + # hatch.envs.dev django-extensions==3.2.3 # via # -c requirements.txt @@ -406,6 +413,10 @@ django-extensions==3.2.3 # dj-notebook django-fastdev==1.12.0 # via hatch.envs.dev +django-grip==3.5.1 + # via + # -c requirements.txt + # django-eventstream django-health-check==3.18.3 # via # -c requirements.txt @@ -574,6 +585,11 @@ greenlet==3.1.1 # via # -c requirements.txt # sqlalchemy +gripcontrol==4.2.0 + # via + # -c requirements.txt + # django-eventstream + # django-grip grpcio==1.68.1 # via # -c requirements.txt @@ -804,7 +820,7 @@ jupyterlab-widgets==3.0.13 # via ipywidgets keyring==25.4.1 # via hatch -kitchenai-python-sdk==1.1.0 +kitchenai-python-sdk==1.3.0 # via # -c requirements.txt # hatch.envs.dev @@ -1201,6 +1217,11 @@ ptyprocess==0.7.0 # -c requirements.txt # pexpect # terminado +pubcontrol==3.5.0 + # via + # -c requirements.txt + # django-grip + # gripcontrol pure-eval==0.2.3 # via # -c requirements.txt @@ -1251,6 +1272,9 @@ pyjwt==2.9.0 # via # -c requirements.txt # django-allauth + # django-eventstream + # gripcontrol + # pubcontrol pypdf==5.1.0 # via # -c requirements.txt @@ -1361,6 +1385,7 @@ requests==2.32.3 # llama-index-core # llama-index-legacy # posthog + # pubcontrol # requests-oauthlib # tiktoken requests-oauthlib==2.0.0 @@ -1438,7 +1463,10 @@ six==1.16.0 # -c requirements.txt # asttokens # cssbeautifier + # django-eventstream + # django-grip # django-pandas + # gripcontrol # jsbeautifier # kubernetes # posthog @@ -1691,8 +1719,11 @@ websockets==14.1 # via # -c requirements.txt # uvicorn -werkzeug==3.0.4 - # via hatch.envs.dev +werkzeug==3.1.3 + # via + # -c requirements.txt + # hatch.envs.dev + # django-grip wheel==0.44.0 # via pip-tools whitenoise==6.7.0 diff --git a/requirements.txt b/requirements.txt index 3e1a9e5..f34e2ea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,7 @@ # - django-allauth[socialaccount] # - django-anymail[resend] # - django-compressor +# - django-eventstream # - django-extensions # - django-health-check # - django-htmx @@ -33,7 +34,7 @@ # - heroicons[django] # - honcho # - jupyterlab -# - kitchenai-python-sdk +# - kitchenai-python-sdk>=1.3.0 # - llama-index # - llama-index-vector-stores-chroma # - nbconvert @@ -203,7 +204,9 @@ django==5.1.2 # django-appconf # django-compressor # django-crispy-forms + # django-eventstream # django-extensions + # django-grip # django-health-check # django-htmx # django-lifecycle @@ -236,8 +239,12 @@ django-compressor==4.5.1 # via hatch.envs.default django-crispy-forms==2.3 # via crispy-tailwind +django-eventstream==5.3.1 + # via hatch.envs.default django-extensions==3.2.3 # via hatch.envs.default +django-grip==3.5.1 + # via django-eventstream django-health-check==3.18.3 # via hatch.envs.default django-htmx==1.19.0 @@ -315,6 +322,10 @@ googleapis-common-protos==1.66.0 # via opentelemetry-exporter-otlp-proto-grpc greenlet==3.1.1 # via sqlalchemy +gripcontrol==4.2.0 + # via + # django-eventstream + # django-grip grpcio==1.68.1 # via # chromadb @@ -426,7 +437,7 @@ jupyterlab-pygments==0.3.0 # via nbconvert jupyterlab-server==2.27.3 # via jupyterlab -kitchenai-python-sdk==1.1.0 +kitchenai-python-sdk==1.3.0 # via hatch.envs.default kubernetes==31.0.0 # via chromadb @@ -493,6 +504,7 @@ markupsafe==3.0.2 # via # jinja2 # nbconvert + # werkzeug marshmallow==3.23.0 # via # dataclasses-json @@ -673,6 +685,10 @@ ptyprocess==0.7.0 # via # pexpect # terminado +pubcontrol==3.5.0 + # via + # django-grip + # gripcontrol pure-eval==0.2.3 # via stack-data pyasn1==0.6.1 @@ -702,7 +718,11 @@ pygments==2.18.0 # nbconvert # rich pyjwt==2.9.0 - # via django-allauth + # via + # django-allauth + # django-eventstream + # gripcontrol + # pubcontrol pypdf==5.1.0 # via llama-index-readers-file pypika==0.48.9 @@ -769,6 +789,7 @@ requests==2.32.3 # llama-index-core # llama-index-legacy # posthog + # pubcontrol # requests-oauthlib # tiktoken requests-oauthlib==2.0.0 @@ -813,6 +834,9 @@ shellingham==1.5.4 six==1.16.0 # via # asttokens + # django-eventstream + # django-grip + # gripcontrol # kubernetes # posthog # python-dateutil @@ -965,6 +989,8 @@ websocket-client==1.8.0 # kubernetes websockets==14.1 # via uvicorn +werkzeug==3.1.3 + # via django-grip whitenoise==6.7.0 # via hatch.envs.default wrapt==1.16.0