Skip to content

Commit

Permalink
Merge branch 'stepan-improvements' into main
Browse files Browse the repository at this point in the history
# Conflicts:
#	agents/agent.py
#	agents/chairman.py
#	agents/idea_refiner.py
#	agents/sme.py
#	gpt/gpt_client.py
#	main.py
#	utils/print_with_wrap.py
  • Loading branch information
laurence.hook committed Oct 22, 2023
2 parents 0167e2b + ab7e113 commit d6071bf
Show file tree
Hide file tree
Showing 19 changed files with 240 additions and 117 deletions.
25 changes: 25 additions & 0 deletions .github/workflows/style.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: style

on: [push]

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.11"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip' # caching pip dependencies
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Analysing the code with pylint
run: |
black --check --diff .
isort --check --diff .
21 changes: 16 additions & 5 deletions agents/agent.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
from textwrap import dedent

from clients.base import AIClient
from constants import NO_COMMENT
from gpt.gpt_client import GPTClient

DEFAULT_SYSTEM_PROMPT = dedent(f"""\
DEFAULT_SYSTEM_PROMPT = dedent(
f"""\
Provide succinct, fact-based answers. Eliminate filler words and politeness.
Concentrate on delivering actionable insights and concrete solutions.
Avoid vague or generic statements. Stick to the topic at hand.
If your response doesn't meet these standards, reply with the exact words '{NO_COMMENT}'
"""
)


class Agent:
def __init__(self, name: str, user_prompt: str, system_prompt: str = DEFAULT_SYSTEM_PROMPT):
def __init__(
self,
client: AIClient,
name: str,
user_prompt: str,
system_prompt: str = DEFAULT_SYSTEM_PROMPT,
):
self.name = name
self.gpt_client = GPTClient(system_prompt, user_prompt)

self.client = client
self.client.common_instructions = system_prompt
self.client.user_prompt = user_prompt

def query_gpt(self, transcript: str) -> str:
return self.gpt_client.query(transcript)
return self.client.query(transcript)
28 changes: 13 additions & 15 deletions agents/chairman.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,32 @@

from agents.agent import Agent
from agents.sme import SME

from clients.base import AIClient


class Chairman(Agent):
def __init__(self, name: str, SMEs: list[SME]):
def __init__(self, client: AIClient, executives: list[SME], name: str = "Chairman"):
# Construct the user_prompt string with details of the executives
self.user_prompt = self.update_user_prompt(executives)

self.user_prompt = self.update_user_prompt(SMEs)

system_prompt = (
f"Answer with only the name and nothing else."
)
system_prompt = f"Answer with only the name and nothing else."

# Call the superclass constructor with the constructed user_prompt
super().__init__(name, self.user_prompt, system_prompt)
super().__init__(client, name, self.user_prompt, system_prompt)

self.SMEs = SMEs
self.executives = executives

def update_user_prompt(self, SMEs: list[SME]) -> str:
frequency_info_list = []
for SME in SMEs:
for sme in SMEs:
frequency_info_list.append(
f"{SME.name}: expertise: {SME.expertise}. "
f"concerns: {', '.join(SME.concerns)}. spoken count: {SME.spoken_count}.\n"
f"{sme.name}: expertise: {sme.expertise}. "
f"concerns: {', '.join(sme.concerns)}. spoken count: {sme.spoken_count}.\n"
)

return (
f"Your task is to read the transcript and decide who should speak next. Do not choose the same person all of the time.\n"
f"Your task is to read the transcript and decide who should speak next. "
f"Do not choose the same person all of the time.\n"
f"Participants:\n{''.join(frequency_info_list)} "
)

Expand All @@ -44,12 +42,12 @@ def decide_next_speaker(self, transcript_list: list[str]) -> SME:
logger.info(f"Chairman called speaker: {next_speaker}")

next_executive = next(
(exec for exec in self.SMEs if exec.name == next_speaker), None
(exec for exec in self.executives if exec.name == next_speaker), None
)

if next_executive is not None:
next_executive.spoken_count += 1 # Update the frequency count
self.user_prompt = self.update_user_prompt(self.SMEs)
self.user_prompt = self.update_user_prompt(self.executives)
self.gpt_client.user_prompt = self.user_prompt
return next_executive

Expand Down
22 changes: 13 additions & 9 deletions agents/idea_refiner.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
from loguru import logger
from textwrap import dedent

from agents.agent import Agent
from agents.sme import SME
from clients import AIClient


class IdeaRefiner(Agent):
def __init__(self, name: str):
# Construct the user_prompt string with details of the executives

self.user_prompt = "You are going to presented with an topic for discussion at a meeting. Your task to think deeply and refine the topic presented and note obvious high level constraints and considerations. Your output will serve as an introduction to the meeting participants."
REFINER_PROMPT = dedent(
"""\
You are going to presented with an topic for discussion at a meeting.
Your task to think deeply and refine the topic presented and note obvious
high level constraints and considerations.
Your output will serve as an introduction to the meeting participants.
"""
)


class IdeaRefiner(Agent):
def __init__(self, client: AIClient, name: str = "Refiner"):
# Call the superclass constructor with the constructed user_prompt
super().__init__(name, self.user_prompt)
super().__init__(client, name, REFINER_PROMPT)

def refine_idea(self, idea: str) -> str:
return self.query_gpt(idea)
33 changes: 23 additions & 10 deletions agents/sme.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,33 @@
from textwrap import dedent

from agents.agent import Agent
from clients.base import AIClient

USER_PROMPT_TEMPLATE = dedent(
"""\
Adopt the persona of the {name}.\n
Your expertise is {expertise}.\n
Your concerns are {concerns}.\n
You will be shown a transcript of a meeting.
You have been asked to speak by the meeting chairman.
Specifically, provide insights on {concerns}
based on the meeting transcript.\n
Do not repeat points that have already been made
"""
)


class SME(Agent):
def __init__(self, name: str, expertise: str, concerns: list[str]):
def __init__(
self, client: AIClient, name: str, expertise: str, concerns: list[str]
):
# Construct the user_prompt string
user_prompt_list = [
f"Adopt the persona of the {name}.",
f"Your expertise is {expertise}.",
f"Your concerns are {', '.join(concerns)}.",
f"You will be shown a transacript of a meeting. You have been asked to speak by the meeting chairman. Specifically, provide insights on {', '.join(concerns)} based on the meeting transcript. "
"Do not repeat points that have already been made.",
]
user_prompt = " ".join(user_prompt_list)
user_prompt = USER_PROMPT_TEMPLATE.format(
name=name, expertise=expertise, concerns=", ".join(concerns)
)

# Call the superclass constructor with the constructed user_prompt
super().__init__(name, user_prompt)
super().__init__(client, name, user_prompt)
self.expertise = expertise
self.concerns = concerns
self.spoken_count = 0
Expand Down
3 changes: 3 additions & 0 deletions clients/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .base import AIClient
from .config import AIClientConfig
from .get_client import AIClientType, GPTClient, get_ai_client
7 changes: 7 additions & 0 deletions clients/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod


class AIClient(ABC):
@abstractmethod
def query(self, transcript: str):
pass
7 changes: 7 additions & 0 deletions clients/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from dataclasses import dataclass


@dataclass
class AIClientConfig:
api_key: str
model: str | None
16 changes: 16 additions & 0 deletions clients/get_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from enum import Enum

from clients.base import AIClient
from clients.config import AIClientConfig
from clients.gpt_client import GPTClient


class AIClientType(str, Enum):
ChatGPT = "ChatGPT"


def get_ai_client(client_type: AIClientType, config: AIClientConfig) -> AIClient:
if client_type == AIClientType.ChatGPT:
return GPTClient(config.api_key)
else:
raise ValueError(f"Unknown AI client type: {client_type}")
59 changes: 35 additions & 24 deletions gpt/gpt_client.py → clients/gpt_client.py
Original file line number Diff line number Diff line change
@@ -1,51 +1,61 @@
import json
import os
import time
from enum import Enum

import openai
from dotenv import load_dotenv
from loguru import logger

# Load environment variables from .env file
load_dotenv()
api_key = os.getenv("openai.api_key")
from .base import AIClient

if not api_key:
raise ValueError("API key not found in environment variables")

openai.api_key = api_key
class Models(str, Enum):
GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"

GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"


class GPTClient:
def __init__(
self, system_instructions: str, user_prompt: str, model: str = GPT4
):
self.system_instructions = system_instructions
self.user_prompt = user_prompt
class GPTClient(AIClient):
def __init__(self, api_key: str, model: str = Models.GPT4.value):
openai.api_key = api_key
self._system_instructions = None
self._user_prompt = None
self.model = model
self.max_tokens = 100
self.temperature: float = 0.1
# Log initial configuration on startup
logger.info(f"Initializing GPTClient with the following configuration:")
logger.info(f"System Instructions: {self.system_instructions}")
logger.info(f"User Prompt: {self.user_prompt}")
logger.info(f"Model: {self.model}")
logger.info(f"Max Tokens: {self.max_tokens}")
logger.info(f"Temperature: {self.temperature}")

@property
def system_instructions(self):
return self._system_instructions

@system_instructions.setter
def system_instructions(self, value):
logger.debug(f"Setting system instructions: {self._system_instructions}")
self._system_instructions = value

@property
def user_prompt(self):
return self._user_prompt

@user_prompt.setter
def user_prompt(self, value):
logger.debug(f"Setting user prompt: {self._user_prompt}")
self._user_prompt = value

def query(self, transcript: str) -> str:
max_retries = 6 # Number of retries
retry_delay = 10 # Delay between retries in seconds

# TODO: use backoff decorator
for i in range(max_retries):
try:
start_time = time.time()
messages = [
{"role": "system", "content": self.system_instructions},
{"role": "user", "content": self.user_prompt},
{"role": "system", "content": self._system_instructions},
{"role": "user", "content": self._user_prompt},
{"role": "assistant", "content": transcript},
]
logger.info(json.dumps(messages, indent=4).replace("\\n", "\n"))
Expand All @@ -56,16 +66,17 @@ def query(self, transcript: str) -> str:
messages=messages,
)

end_time = time.time()
elapsed_time = end_time - start_time
elapsed_time = time.time() - start_time

# Log the time taken and token usage
logger.info(f"GPT query took {elapsed_time:.2f} seconds")
logger.info(f"Tokens used in the request: {response['usage']}")

return response.choices[0].message.content.strip()
except openai.error.RateLimitError as e:
logger.warning(f"Rate limit reached. Retrying in {retry_delay} seconds. Details: {e}")
logger.warning(
f"Rate limit reached. Retrying in {retry_delay} seconds. Details: {e}"
)
time.sleep(retry_delay)

logger.error(f"Max retries reached. Could not complete the GPT query.")
Expand Down
2 changes: 2 additions & 0 deletions constants/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from .default_sme import DEFAULT_SME_DICT
from .strings import NO_COMMENT
42 changes: 42 additions & 0 deletions constants/default_sme.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
DEFAULT_SME_DICT = (
{
"name": "CEO",
"expertise": "Corporate Strategy",
"concerns": ["Market Entry", "Competitive Positioning"],
},
{
"name": "CFO",
"expertise": "Financial Products",
"concerns": ["Rate Management", "Regulatory Compliance"],
},
{
"name": "COO",
"expertise": "Operational Efficiency",
"concerns": ["Scalability", "Cost Optimization"],
},
{
"name": "CMO",
"expertise": "Customer Acquisition",
"concerns": ["Target Market", "Onboarding Experience"],
},
{
"name": "CTO",
"expertise": "Technical Infrastructure",
"concerns": ["Data Security", "System Integration"],
},
{
"name": "CRO",
"expertise": "Risk Management",
"concerns": ["Fraud Detection", "Compliance"],
},
{
"name": "CCO",
"expertise": "Customer Experience",
"concerns": ["UX/UI Design", "Customer Support"],
},
{
"name": "CPO",
"expertise": "Product Management",
"concerns": ["Feature Rollout", "Customer Feedback"],
},
)
1 change: 1 addition & 0 deletions constants/strings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
NO_COMMENT = "NO COMMENT"
Empty file removed gpt/__init__.py
Empty file.
Loading

0 comments on commit d6071bf

Please sign in to comment.