-
Notifications
You must be signed in to change notification settings - Fork 6k
/
Copy pathapp_agent.py
68 lines (57 loc) · 2.37 KB
/
app_agent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from typing import List, cast
import chainlit as cl
import yaml
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.base import Response
from autogen_agentchat.messages import ModelClientStreamingChunkEvent, TextMessage
from autogen_core import CancellationToken
from autogen_core.models import ChatCompletionClient
@cl.set_starters # type: ignore
async def set_starts() -> List[cl.Starter]:
return [
cl.Starter(
label="Greetings",
message="Hello! What can you help me with today?",
),
cl.Starter(
label="Weather",
message="Find the weather in New York City.",
),
]
@cl.step(type="tool") # type: ignore
async def get_weather(city: str) -> str:
return f"The weather in {city} is 73 degrees and Sunny."
@cl.on_chat_start # type: ignore
async def start_chat() -> None:
# Load model configuration and create the model client.
with open("model_config.yaml", "r") as f:
model_config = yaml.safe_load(f)
model_client = ChatCompletionClient.load_component(model_config)
# Create the assistant agent with the get_weather tool.
assistant = AssistantAgent(
name="assistant",
tools=[get_weather],
model_client=model_client,
system_message="You are a helpful assistant",
model_client_stream=True, # Enable model client streaming.
reflect_on_tool_use=True, # Reflect on tool use.
)
# Set the assistant agent in the user session.
cl.user_session.set("prompt_history", "") # type: ignore
cl.user_session.set("agent", assistant) # type: ignore
@cl.on_message # type: ignore
async def chat(message: cl.Message) -> None:
# Get the assistant agent from the user session.
agent = cast(AssistantAgent, cl.user_session.get("agent")) # type: ignore
# Construct the response message.
response = cl.Message(content="")
async for msg in agent.on_messages_stream(
messages=[TextMessage(content=message.content, source="user")],
cancellation_token=CancellationToken(),
):
if isinstance(msg, ModelClientStreamingChunkEvent):
# Stream the model client response to the user.
await response.stream_token(msg.content)
elif isinstance(msg, Response):
# Done streaming the model client response. Send the message.
await response.send()