Skip to content

Commit

Permalink
extract client from Agent
Browse files Browse the repository at this point in the history
  • Loading branch information
lynxrv21 committed Oct 22, 2023
1 parent dfce35f commit aed859a
Show file tree
Hide file tree
Showing 17 changed files with 172 additions and 80 deletions.
21 changes: 16 additions & 5 deletions agents/agent.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
from textwrap import dedent

from clients.base import AIClient
from constants import NO_COMMENT
from gpt.gpt_client import GPTClient

DEFAULT_SYSTEM_PROMPT = dedent(f"""\
DEFAULT_SYSTEM_PROMPT = dedent(
f"""\
Provide succinct, fact-based answers. Eliminate filler words and politeness.
Concentrate on delivering actionable insights and concrete solutions.
Avoid vague or generic statements. Stick to the topic at hand.
If your response doesn't meet these standards, reply with the exact words '{NO_COMMENT}'
"""
)


class Agent:
def __init__(self, name: str, user_prompt: str, system_prompt: str = DEFAULT_SYSTEM_PROMPT):
def __init__(
self,
client: AIClient,
name: str,
user_prompt: str,
system_prompt: str = DEFAULT_SYSTEM_PROMPT,
):
self.name = name
self.gpt_client = GPTClient(system_prompt, user_prompt)

self.client = client
self.client.common_instructions = system_prompt
self.client.user_prompt = user_prompt

def query_gpt(self, transcript: str) -> str:
return self.gpt_client.query(transcript)
return self.client.query(transcript)
28 changes: 13 additions & 15 deletions agents/chairman.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,32 @@

from agents.agent import Agent
from agents.sme import SME

from clients.base import AIClient


class Chairman(Agent):
def __init__(self, name: str, SMEs: list[SME]):
def __init__(self, client: AIClient, executives: list[SME], name: str = "Chairman"):
# Construct the user_prompt string with details of the executives
self.user_prompt = self.update_user_prompt(executives)

self.user_prompt = self.update_user_prompt(SMEs)

system_prompt = (
f"Answer with only the name and nothing else."
)
system_prompt = f"Answer with only the name and nothing else."

# Call the superclass constructor with the constructed user_prompt
super().__init__(name, self.user_prompt, system_prompt)
super().__init__(client, name, self.user_prompt, system_prompt)

self.SMEs = SMEs
self.executives = executives

def update_user_prompt(self, SMEs: list[SME]) -> str:
frequency_info_list = []
for SME in SMEs:
for sme in SMEs:
frequency_info_list.append(
f"{SME.name}: expertise: {SME.expertise}. "
f"concerns: {', '.join(SME.concerns)}. spoken count: {SME.spoken_count}.\n"
f"{sme.name}: expertise: {sme.expertise}. "
f"concerns: {', '.join(sme.concerns)}. spoken count: {sme.spoken_count}.\n"
)

return (
f"Your task is to read the transcript and decide who should speak next. Do not choose the same person all of the time.\n"
f"Your task is to read the transcript and decide who should speak next. "
f"Do not choose the same person all of the time.\n"
f"Participants:\n{''.join(frequency_info_list)} "
)

Expand All @@ -44,12 +42,12 @@ def decide_next_speaker(self, transcript_list: list[str]) -> SME:
logger.info(f"Chairman called speaker: {next_speaker}")

next_executive = next(
(exec for exec in self.SMEs if exec.name == next_speaker), None
(exec for exec in self.executives if exec.name == next_speaker), None
)

if next_executive is not None:
next_executive.spoken_count += 1 # Update the frequency count
self.user_prompt = self.update_user_prompt(self.SMEs)
self.user_prompt = self.update_user_prompt(self.executives)
self.gpt_client.user_prompt = self.user_prompt
return next_executive

Expand Down
22 changes: 13 additions & 9 deletions agents/idea_refiner.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
from loguru import logger
from textwrap import dedent

from agents.agent import Agent
from agents.sme import SME
from clients import AIClient


class IdeaRefiner(Agent):
def __init__(self, name: str):
# Construct the user_prompt string with details of the executives

self.user_prompt = "You are going to presented with an topic for discussion at a meeting. Your task to think deeply and refine the topic presented and note obvious high level constraints and considerations. Your output will serve as an introduction to the meeting participants."
REFINER_PROMPT = dedent(
"""\
You are going to presented with an topic for discussion at a meeting.
Your task to think deeply and refine the topic presented and note obvious
high level constraints and considerations.
Your output will serve as an introduction to the meeting participants.
"""
)


class IdeaRefiner(Agent):
def __init__(self, client: AIClient, name: str = "Refiner"):
# Call the superclass constructor with the constructed user_prompt
super().__init__(name, self.user_prompt)
super().__init__(client, name, REFINER_PROMPT)

def refine_idea(self, idea: str) -> str:
return self.query_gpt(idea)
33 changes: 23 additions & 10 deletions agents/sme.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,33 @@
from textwrap import dedent

from agents.agent import Agent
from clients.base import AIClient

USER_PROMPT_TEMPLATE = dedent(
"""\
Adopt the persona of the {name}.\n
Your expertise is {expertise}.\n
Your concerns are {concerns}.\n
You will be shown a transcript of a meeting.
You have been asked to speak by the meeting chairman.
Specifically, provide insights on {concerns}
based on the meeting transcript.\n
Do not repeat points that have already been made
"""
)


class SME(Agent):
def __init__(self, name: str, expertise: str, concerns: list[str]):
def __init__(
self, client: AIClient, name: str, expertise: str, concerns: list[str]
):
# Construct the user_prompt string
user_prompt_list = [
f"Adopt the persona of the {name}.",
f"Your expertise is {expertise}.",
f"Your concerns are {', '.join(concerns)}.",
f"You will be shown a transacript of a meeting. You have been asked to speak by the meeting chairman. Specifically, provide insights on {', '.join(concerns)} based on the meeting transcript. "
"Do not repeat points that have already been made.",
]
user_prompt = " ".join(user_prompt_list)
user_prompt = USER_PROMPT_TEMPLATE.format(
name=name, expertise=expertise, concerns=", ".join(concerns)
)

# Call the superclass constructor with the constructed user_prompt
super().__init__(name, user_prompt)
super().__init__(client, name, user_prompt)
self.expertise = expertise
self.concerns = concerns
self.spoken_count = 0
Expand Down
3 changes: 3 additions & 0 deletions clients/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .base import AIClient
from .config import AIClientConfig
from .get_client import AIClientType, GPTClient, get_ai_client
7 changes: 7 additions & 0 deletions clients/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod


class AIClient(ABC):
@abstractmethod
def query(self, transcript: str):
pass
7 changes: 7 additions & 0 deletions clients/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from dataclasses import dataclass


@dataclass
class AIClientConfig:
api_key: str
model: str | None
16 changes: 16 additions & 0 deletions clients/get_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from enum import Enum

from clients.base import AIClient
from clients.config import AIClientConfig
from clients.gpt_client import GPTClient


class AIClientType(str, Enum):
ChatGPT = "ChatGPT"


def get_ai_client(client_type: AIClientType, config: AIClientConfig) -> AIClient:
if client_type == AIClientType.ChatGPT:
return GPTClient(config.api_key)
else:
raise ValueError(f"Unknown AI client type: {client_type}")
59 changes: 35 additions & 24 deletions gpt/gpt_client.py → clients/gpt_client.py
Original file line number Diff line number Diff line change
@@ -1,51 +1,61 @@
import json
import os
import time
from enum import Enum

import openai
from dotenv import load_dotenv
from loguru import logger

# Load environment variables from .env file
load_dotenv()
api_key = os.getenv("openai.api_key")
from .base import AIClient

if not api_key:
raise ValueError("API key not found in environment variables")

openai.api_key = api_key
class Models(str, Enum):
GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"

GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"


class GPTClient:
def __init__(
self, system_instructions: str, user_prompt: str, model: str = GPT4
):
self.system_instructions = system_instructions
self.user_prompt = user_prompt
class GPTClient(AIClient):
def __init__(self, api_key: str, model: str = Models.GPT4.value):
openai.api_key = api_key
self._system_instructions = None
self._user_prompt = None
self.model = model
self.max_tokens = 100
self.temperature: float = 0.1
# Log initial configuration on startup
logger.info(f"Initializing GPTClient with the following configuration:")
logger.info(f"System Instructions: {self.system_instructions}")
logger.info(f"User Prompt: {self.user_prompt}")
logger.info(f"Model: {self.model}")
logger.info(f"Max Tokens: {self.max_tokens}")
logger.info(f"Temperature: {self.temperature}")

@property
def system_instructions(self):
return self._system_instructions

@system_instructions.setter
def system_instructions(self, value):
logger.debug(f"Setting system instructions: {self._system_instructions}")
self._system_instructions = value

@property
def user_prompt(self):
return self._user_prompt

@user_prompt.setter
def user_prompt(self, value):
logger.debug(f"Setting user prompt: {self._user_prompt}")
self._user_prompt = value

def query(self, transcript: str) -> str:
max_retries = 6 # Number of retries
retry_delay = 10 # Delay between retries in seconds

# TODO: use backoff decorator
for i in range(max_retries):
try:
start_time = time.time()
messages = [
{"role": "system", "content": self.system_instructions},
{"role": "user", "content": self.user_prompt},
{"role": "system", "content": self._system_instructions},
{"role": "user", "content": self._user_prompt},
{"role": "assistant", "content": transcript},
]
logger.info(json.dumps(messages, indent=4).replace("\\n", "\n"))
Expand All @@ -56,16 +66,17 @@ def query(self, transcript: str) -> str:
messages=messages,
)

end_time = time.time()
elapsed_time = end_time - start_time
elapsed_time = time.time() - start_time

# Log the time taken and token usage
logger.info(f"GPT query took {elapsed_time:.2f} seconds")
logger.info(f"Tokens used in the request: {response['usage']}")

return response.choices[0].message.content.strip()
except openai.error.RateLimitError as e:
logger.warning(f"Rate limit reached. Retrying in {retry_delay} seconds. Details: {e}")
logger.warning(
f"Rate limit reached. Retrying in {retry_delay} seconds. Details: {e}"
)
time.sleep(retry_delay)

logger.error(f"Max retries reached. Could not complete the GPT query.")
Expand Down
2 changes: 1 addition & 1 deletion constants.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
NO_COMMENT = "NO COMMENT"
NO_COMMENT = "NO COMMENT"
Empty file removed gpt/__init__.py
Empty file.
7 changes: 0 additions & 7 deletions logger_config.py

This file was deleted.

31 changes: 24 additions & 7 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
import os
from pathlib import Path

import click
from loguru import logger
from dotenv import load_dotenv

from agents.chairman import Chairman
from agents.idea_refiner import IdeaRefiner
from agents.sme import SME
from clients import AIClientConfig, AIClientType, get_ai_client
from constants import NO_COMMENT
from utils.logging import configure_logging
from utils.parse_config import parse_yaml_config
from utils.print_with_wrap import print_with_wrap
import logger_config

load_dotenv()

# typical C-suite of executives
DEFAULT_SME_DICT = (
Expand Down Expand Up @@ -71,18 +75,31 @@
default=None,
help="yaml file with team personalities details",
)
def main(idea: tuple[str], config: Path = None):
@click.option("-v", "--verbose", default=1, count=True)
def main(idea: str, config: Path = None, verbose: int = 1):
configure_logging(verbose)
load_dotenv()
client = get_ai_client(
AIClientType.ChatGPT, AIClientConfig(api_key=os.getenv("openai.api_key"))
)
if config:
sme_dict = parse_yaml_config(config)
else:
sme_dict = DEFAULT_SME_DICT

smes = [SME(**d) for d in sme_dict]
smes = [SME(client=client, **d) for d in sme_dict]

chairman = Chairman(client, smes)

chairman = Chairman("Chairman", smes)
refiner = IdeaRefiner("Refiner")
refiner = IdeaRefiner(client, "Refiner")

transcript = ["<TRANSCRIPT OF ONGOING MEETING>", ".", "We are here to discuss this idea:", idea, "."]
transcript = [
"<TRANSCRIPT OF ONGOING MEETING>",
".",
"We are here to discuss this idea:",
idea,
".",
]

print_with_wrap("\n".join(transcript))

Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ python-dotenv~=1.0.0
black~=23.10.0
click~=8.1.7
loguru~=0.7.2
PyYAML~=6.0.1
PyYAML~=6.0.1
isort~=5.12.0
Loading

0 comments on commit aed859a

Please sign in to comment.