Skip to content

Commit

Permalink
wip; streaming
Browse files Browse the repository at this point in the history
  • Loading branch information
esteban-puerta-rs committed Dec 10, 2024
1 parent 5cf26bd commit 26eebdf
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 1 deletion.
Binary file removed docs/_static/videos/kitchenai-demo.webm
Binary file not shown.
30 changes: 29 additions & 1 deletion kitchenai/core/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
from django.apps import apps
from typing import List
from .signals import query_output_signal, query_input_signal

from django.http import StreamingHttpResponse
from typing import AsyncGenerator
logger = logging.getLogger(__name__)
router = Router()

Expand Down Expand Up @@ -188,6 +189,33 @@ async def query(request, label: str, data: QuerySchema):
logger.error(f"Error in query: {e}")
return HttpError(500, "query function not found")

import asyncio
import datetime
@router.get("/stream/{label}")
async def stream(request, label: str):
from llama_index.llms.openai import OpenAI
llm = OpenAI(model="gpt-4o-mini")
# def async_stream_completions():
# completions = llm.stream_complete("Paul Graham is ")
# for completion in completions:
# yield completion.delta
print("starting stream")
async def mock_stream():
while True:
await asyncio.sleep(1)
chunk = f"Hello {datetime.datetime.now()}"
yield chunk
response_server = StreamingHttpResponse(
mock_stream(),
content_type="text/event-stream",
headers={
'Cache-Control': 'no-cache',
'Transfer-Encoding': 'chunked',
'X-Accel-Buffering': 'no',
}
)
return response_server

class KitchenAIAppSchema(Schema):
namespace: str
query_handlers: List[str]
Expand Down

0 comments on commit 26eebdf

Please sign in to comment.