diff --git a/prediction_market_agent/agents/microchain_agent/microchain_agent.py b/prediction_market_agent/agents/microchain_agent/microchain_agent.py index 61a56d83..35aa6f44 100644 --- a/prediction_market_agent/agents/microchain_agent/microchain_agent.py +++ b/prediction_market_agent/agents/microchain_agent/microchain_agent.py @@ -1,3 +1,4 @@ +import typer from functions import MARKET_FUNCTIONS, MISC_FUNCTIONS from microchain import LLM, Agent, Engine, OpenAIChatGenerator from microchain.functions import Reasoning, Stop @@ -8,32 +9,41 @@ ) from prediction_market_agent.utils import APIKeys -engine = Engine() -engine.register(Reasoning()) -engine.register(Stop()) -for function in MISC_FUNCTIONS: - engine.register(function()) -for function in MARKET_FUNCTIONS: - engine.register(function(market_type=MarketType.OMEN)) -for function in OMEN_FUNCTIONS: - engine.register(function()) -generator = OpenAIChatGenerator( - model="gpt-4-turbo-preview", - api_key=APIKeys().openai_api_key.get_secret_value(), - api_base="https://api.openai.com/v1", - temperature=0.7, -) -agent = Agent(llm=LLM(generator=generator), engine=engine) -agent.prompt = f"""Act as a agent to maximise your profit. You can use the following functions: - -{engine.help} - - -Only output valid Python function calls. - -""" +def main( + api_base: str = "https://api.openai.com/v1", + model: str = "gpt-4-turbo-preview", +) -> None: + engine = Engine() + engine.register(Reasoning()) + engine.register(Stop()) + for function in MISC_FUNCTIONS: + engine.register(function()) + for function in MARKET_FUNCTIONS: + engine.register(function(market_type=MarketType.OMEN)) + for function in OMEN_FUNCTIONS: + engine.register(function()) + + generator = OpenAIChatGenerator( + model=model, + api_key=APIKeys().openai_api_key.get_secret_value(), + api_base=api_base, + temperature=0.7, + ) + agent = Agent(llm=LLM(generator=generator), engine=engine) + agent.prompt = f"""Act as a agent to maximise your profit. You can use the following functions: + + {engine.help} + + + Only output valid Python function calls. + + """ + + agent.bootstrap = ['Reasoning("I need to reason step-by-step")'] + agent.run(iterations=10) + # generator.print_usage() # Waiting for microchain release + -agent.bootstrap = ['Reasoning("I need to reason step-by-step")'] -agent.run(iterations=10) -generator.print_usage() +if __name__ == "__main__": + typer.run(main) diff --git a/prediction_market_agent/agents/microchain_agent/model_notes.md b/prediction_market_agent/agents/microchain_agent/model_notes.md new file mode 100644 index 00000000..f118ab5b --- /dev/null +++ b/prediction_market_agent/agents/microchain_agent/model_notes.md @@ -0,0 +1,52 @@ +# Microchain Agent Model Behaviour Diary + +## Proprietary models + +### GPT4 + +- Makes many reasoning steps, with coherent and good reasoning w.r.t betting strategy +- Almost always gets function calls correct +- Seems keen on betting large amounts, even when instructed not to! +- Seems keen to `Stop` program after max a couple bets. Doesn't use some of the functions (selling, getting existing positions) + +## Local models + +### Setup + +- Instructions are for Ollama, but you can use any library that allows you to set up a local OpenAI-compatible server. +- Download Ollama [here](https://ollama.com/download/mac) +- In another terminal, run `ollama serve` to start the server. You can set the address and port with the `OLLAMA_HOST` env var, e.g.: + +```bash +OLLAMA_HOST=127.0.0.1:11435 ollama serve` +``` + +- Run the script, passing in the API address and model name as arguments. Note that you must have downloaded these model weights in advance via `ollama run list[str]: return [] def __call__(self) -> None: - redeem_from_all_user_positions(MicrochainAPIKeys().bet_from_private_key) + redeem_from_all_user_positions(APIKeys().bet_from_private_key) # Functions that interact exclusively with Omen prediction markets