From e089da2e6ed2eb10072aeac0eb5bd7a7e0a9fdba Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Wed, 10 Apr 2024 14:58:26 +0200 Subject: [PATCH 01/11] Coinflip agent deployment --- .dockerignore | 12 ++ .gitignore | 2 - Dockerfile | 27 ++++ build.sh | 2 + poetry.lock | 132 +++++++++--------- .../agents/coinflip_agent/coinflip_agent.py | 12 ++ prediction_market_agent/run_agent.py | 30 ++++ pyproject.toml | 2 +- 8 files changed, 151 insertions(+), 68 deletions(-) create mode 100644 .dockerignore create mode 100644 Dockerfile create mode 100755 build.sh create mode 100644 prediction_market_agent/agents/coinflip_agent/coinflip_agent.py create mode 100644 prediction_market_agent/run_agent.py diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..0f40e3aa --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +# Docker specific ignores: +.git + +# .gitignore ignores: +.env +.agents_workspace +.cache +__pycache__ +*.egg-info +logs +.mypy_cache +.pytest_cache diff --git a/.gitignore b/.gitignore index 43754f7a..55578600 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,3 @@ -requirements.txt -.venv .env .agents_workspace .cache diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..f7fbcb6c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,27 @@ +# Install Poetry and create venv in the builder step, +# then copy the venv to the runtime image, so that the runtime image is as small as possible. +FROM python:3.10.14-slim-bookworm AS builder + +RUN pip install poetry==1.8.2 + +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache + +WORKDIR /app + +COPY pyproject.toml poetry.lock ./ + +RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --no-root --only main + +FROM python:3.10.14-slim-bookworm AS runtime + +ENV VIRTUAL_ENV=/app/.venv \ + PATH="/app/.venv/bin:$PATH" + +WORKDIR /app + +COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +COPY prediction_market_agent ./prediction_market_agent diff --git a/build.sh b/build.sh new file mode 100755 index 00000000..1848e380 --- /dev/null +++ b/build.sh @@ -0,0 +1,2 @@ +# TODO: This should be automated as part of CI/CD pipeline, but for now, execute it locally on the main branch. +docker build . -t europe-west1-docker.pkg.dev/gnosis-ai/pma/main && docker push europe-west1-docker.pkg.dev/gnosis-ai/pma/main diff --git a/poetry.lock b/poetry.lock index 1a8da7c5..5022200e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1571,13 +1571,13 @@ tools = ["hypothesis (>=4.18.2,<5.0.0)"] [[package]] name = "eth-account" -version = "0.11.1" +version = "0.11.2" description = "eth-account: Sign Ethereum transactions and messages with local private keys" optional = false python-versions = "<4,>=3.8" files = [ - {file = "eth-account-0.11.1.tar.gz", hash = "sha256:df28f49c6c98fa8b513d7bbf4eae07cc1ff7e379474a957369febfde60e9f520"}, - {file = "eth_account-0.11.1-py3-none-any.whl", hash = "sha256:f832fa1d814e8f2a43d257a5f7b0149f35a9bd8e2d1ab894760a17dedc654777"}, + {file = "eth-account-0.11.2.tar.gz", hash = "sha256:b43daf2c0ae43f2a24ba754d66889f043fae4d3511559cb26eb0122bae9afbbd"}, + {file = "eth_account-0.11.2-py3-none-any.whl", hash = "sha256:95157c262a9823c1e08be826d4bc304bf32f0c32e80afb38c126a325a64f651a"}, ] [package.dependencies] @@ -1589,7 +1589,6 @@ eth-keys = ">=0.4.0" eth-rlp = ">=0.3.0" eth-utils = ">=2.0.0" hexbytes = ">=0.1.0,<0.4.0" -pydantic = ">=2.4.0" rlp = ">=1.0.0" [package.extras] @@ -1769,13 +1768,13 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.13.3" +version = "3.13.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, - {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, ] [package.extras] @@ -2074,13 +2073,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-aiplatform" -version = "1.46.0" +version = "1.47.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.8" files = [ - {file = "google-cloud-aiplatform-1.46.0.tar.gz", hash = "sha256:9eeefcd34dbad2a1ade830d025154189d257e8dcfea45771dcac6e34a2783782"}, - {file = "google_cloud_aiplatform-1.46.0-py2.py3-none-any.whl", hash = "sha256:a30a64f8bcefa0646340cf57abca192bb9ca9c456a47b842651a9564bba0534a"}, + {file = "google-cloud-aiplatform-1.47.0.tar.gz", hash = "sha256:1c4537db09b83957bf0623fd2afb37e339f89a3afcda3efce9dce79b16ab59c7"}, + {file = "google_cloud_aiplatform-1.47.0-py2.py3-none-any.whl", hash = "sha256:454ef0c44ecaeadcffe58f565acfce49e53895fd51bb20da8af0d48202a4cb21"}, ] [package.dependencies] @@ -2099,19 +2098,22 @@ shapely = "<3.0.0dev" [package.extras] autologging = ["mlflow (>=1.27.0,<=2.1.1)"] cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +full = ["cloudpickle (<3.0)", "cloudpickle (>=2.2.1,<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pydantic (<3)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +langchain = ["langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)"] lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] pipelines = ["pyyaml (==5.3.1)"] prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.103.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"] ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"] ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=2.2.1,<3.0)", "pydantic (<3)"] tensorboard = ["tensorflow (>=2.3.0,<2.15.0)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] +testing = ["bigframes", "cloudpickle (<3.0)", "cloudpickle (>=2.2.1,<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pydantic (<3)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] vizier = ["google-vizier (>=0.1.6)"] xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] @@ -3064,13 +3066,13 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain" -version = "0.1.14" +version = "0.1.15" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain-0.1.14-py3-none-any.whl", hash = "sha256:94f9b5df2421faaf762d4f43b9d65c270c2f701934580d281e4c6226deef7234"}, - {file = "langchain-0.1.14.tar.gz", hash = "sha256:124c6244cf3957616b98f2df07dc2992fc40dff6ed1a62d8ee8a40f1e0260a40"}, + {file = "langchain-0.1.15-py3-none-any.whl", hash = "sha256:3ac516463ae7f80047091f04592a1eea138321710bbc266005f9de238d71acd3"}, + {file = "langchain-0.1.15.tar.gz", hash = "sha256:79d43035327fdcc5ac81a3db10f2b879f2bd5db3b268ef82bac7baf3ec32954e"}, ] [package.dependencies] @@ -3078,8 +3080,8 @@ aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-community = ">=0.0.30,<0.1" -langchain-core = ">=0.1.37,<0.2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.41,<0.2.0" langchain-text-splitters = ">=0.0.1,<0.1" langsmith = ">=0.1.17,<0.2.0" numpy = ">=1,<2" @@ -3105,19 +3107,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-community" -version = "0.0.31" +version = "0.0.32" description = "Community contributed LangChain integrations." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_community-0.0.31-py3-none-any.whl", hash = "sha256:905c01b978a1cef7fdcddd2d9241dedc9987db6f23ba1b58d974e38b1cdf2775"}, - {file = "langchain_community-0.0.31.tar.gz", hash = "sha256:9a970bc2bb59bb4c204b696d8c62c2534f6ddb31005005cc1b7d7f934e58a5fc"}, + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.37,<0.2.0" +langchain-core = ">=0.1.41,<0.2.0" langsmith = ">=0.1.0,<0.2.0" numpy = ">=1,<2" PyYAML = ">=5.3" @@ -3127,17 +3129,17 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] [[package]] name = "langchain-core" -version = "0.1.40" +version = "0.1.41" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.1.40-py3-none-any.whl", hash = "sha256:618dbb7ab44d8b263b91e384db1ff07d0db256ae5bdafa0123a115b6a75a13f1"}, - {file = "langchain_core-0.1.40.tar.gz", hash = "sha256:34c06fc0e6d3534b738c63f85403446b4be71161665b7e091f9bb19c914ec100"}, + {file = "langchain_core-0.1.41-py3-none-any.whl", hash = "sha256:92d3da5ca0d5fb1606b3ab42bfd455c7b386fe4c38f6c0d44ba6014abf46147d"}, + {file = "langchain_core-0.1.41.tar.gz", hash = "sha256:4002304883b251af8cb781f01c59f56aeee58be42c965dbbdeca0a3bfb8f96af"}, ] [package.dependencies] @@ -3153,18 +3155,18 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.56" +version = "0.0.57" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_experimental-0.0.56-py3-none-any.whl", hash = "sha256:91fd7a723b0ef3193a63726745523efdd5dd7134116d838c312cfdbf4b354298"}, - {file = "langchain_experimental-0.0.56.tar.gz", hash = "sha256:ebb1c34815739d3af50c9b709c57b91d0357d567ad2042acb724853c6ba1d735"}, + {file = "langchain_experimental-0.0.57-py3-none-any.whl", hash = "sha256:96479a2d45a35722cf2fe49655639e91d3fff5ccaba498cda32b088d5b184325"}, + {file = "langchain_experimental-0.0.57.tar.gz", hash = "sha256:d1fb452aa1f04f32f0e08b83b083f35552f4ece1077c5bdcf86327f56f1758b5"}, ] [package.dependencies] -langchain = ">=0.1.14,<0.2.0" -langchain-core = ">=0.1.37,<0.2.0" +langchain = ">=0.1.15,<0.2.0" +langchain-core = ">=0.1.41,<0.2.0" [package.extras] extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.0.1,<3.0.0)", "presidio-analyzer (>=2.2.352,<3.0.0)", "presidio-anonymizer (>=2.2.352,<3.0.0)", "sentence-transformers (>=2,<3)", "tabulate (>=0.9.0,<0.10.0)", "vowpal-wabbit-next (==0.6.0)"] @@ -3205,13 +3207,13 @@ extended-testing = ["lxml (>=5.1.0,<6.0.0)"] [[package]] name = "langsmith" -version = "0.1.40" +version = "0.1.43" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.40-py3-none-any.whl", hash = "sha256:aa47d0f5a1eabd5c05ac6ce2cd3e28ccfc554d366e856a27b7c3c17c443881cb"}, - {file = "langsmith-0.1.40.tar.gz", hash = "sha256:50fdf313741cf94e978de06025fd180b56acf1d1a4549b0fd5453ef23d5461ef"}, + {file = "langsmith-0.1.43-py3-none-any.whl", hash = "sha256:c0a3658f10cbefaa2f53d15db519592982b59d99f24e018dc73aca6092b4158d"}, + {file = "langsmith-0.1.43.tar.gz", hash = "sha256:983c5a35bf191bb23d93e453c9fc6ea7cda998b1ca96f94680a1446092caf347"}, ] [package.dependencies] @@ -4719,13 +4721,13 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" [[package]] name = "prediction-market-agent-tooling" -version = "0.9.3" +version = "0.10.0" description = "Tools to benchmark, deploy and monitor prediction market agents." optional = false python-versions = "<3.12,>=3.10" files = [ - {file = "prediction_market_agent_tooling-0.9.3-py3-none-any.whl", hash = "sha256:7d2b18ef01091b05d8b43bf74093948d572f166b4c317171f18783de394dff02"}, - {file = "prediction_market_agent_tooling-0.9.3.tar.gz", hash = "sha256:31988b3a8f0b780fd92f130cffd1cf15e32e6c06dbc81d7c60522dfc10bf70f2"}, + {file = "prediction_market_agent_tooling-0.10.0-py3-none-any.whl", hash = "sha256:532c971cd4961b0775eca1026781646215e48851d42faa20b1f1b0fb373195ee"}, + {file = "prediction_market_agent_tooling-0.10.0.tar.gz", hash = "sha256:50b8865f1609efab364eb25e1c52be2512da2ed3c3cc494ec82e327bdc0fc267"}, ] [package.dependencies] @@ -4926,13 +4928,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pyautogen" -version = "0.2.22" +version = "0.2.23" description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework" optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "pyautogen-0.2.22-py3-none-any.whl", hash = "sha256:022d451ea286914024c5fb0e83f5c6622df925ce02a4e10f410389e9f003cbcc"}, - {file = "pyautogen-0.2.22.tar.gz", hash = "sha256:8d25ae881779d95c13f477a42c448e91f1038c080c5bf6b905ccc1559f7ac535"}, + {file = "pyautogen-0.2.23-py3-none-any.whl", hash = "sha256:f4e2d57f60561fd34e3d9ebf31db5005ab147bb356eeaf829a1fdbbf74400f29"}, + {file = "pyautogen-0.2.23.tar.gz", hash = "sha256:88859cca22ba6718a755abb23ed0edd93ceebb9d70b822e921940c23116e5e46"}, ] [package.dependencies] @@ -5901,37 +5903,37 @@ contextlib2 = ">=0.5.5" [[package]] name = "scikit-learn" -version = "1.4.1.post1" +version = "1.4.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.9" files = [ - {file = "scikit-learn-1.4.1.post1.tar.gz", hash = "sha256:93d3d496ff1965470f9977d05e5ec3376fb1e63b10e4fda5e39d23c2d8969a30"}, - {file = "scikit_learn-1.4.1.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c540aaf44729ab5cd4bd5e394f2b375e65ceaea9cdd8c195788e70433d91bbc5"}, - {file = "scikit_learn-1.4.1.post1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4310bff71aa98b45b46cd26fa641309deb73a5d1c0461d181587ad4f30ea3c36"}, - {file = "scikit_learn-1.4.1.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f43dd527dabff5521af2786a2f8de5ba381e182ec7292663508901cf6ceaf6e"}, - {file = "scikit_learn-1.4.1.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02e27d65b0c7dc32f2c5eb601aaf5530b7a02bfbe92438188624524878336f2"}, - {file = "scikit_learn-1.4.1.post1-cp310-cp310-win_amd64.whl", hash = "sha256:629e09f772ad42f657ca60a1a52342eef786218dd20cf1369a3b8d085e55ef8f"}, - {file = "scikit_learn-1.4.1.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6145dfd9605b0b50ae72cdf72b61a2acd87501369a763b0d73d004710ebb76b5"}, - {file = "scikit_learn-1.4.1.post1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1afed6951bc9d2053c6ee9a518a466cbc9b07c6a3f9d43bfe734192b6125d508"}, - {file = "scikit_learn-1.4.1.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce03506ccf5f96b7e9030fea7eb148999b254c44c10182ac55857bc9b5d4815f"}, - {file = "scikit_learn-1.4.1.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ba516fcdc73d60e7f48cbb0bccb9acbdb21807de3651531208aac73c758e3ab"}, - {file = "scikit_learn-1.4.1.post1-cp311-cp311-win_amd64.whl", hash = "sha256:78cd27b4669513b50db4f683ef41ea35b5dddc797bd2bbd990d49897fd1c8a46"}, - {file = "scikit_learn-1.4.1.post1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a1e289f33f613cefe6707dead50db31930530dc386b6ccff176c786335a7b01c"}, - {file = "scikit_learn-1.4.1.post1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0df87de9ce1c0140f2818beef310fb2e2afdc1e66fc9ad587965577f17733649"}, - {file = "scikit_learn-1.4.1.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:712c1c69c45b58ef21635360b3d0a680ff7d83ac95b6f9b82cf9294070cda710"}, - {file = "scikit_learn-1.4.1.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1754b0c2409d6ed5a3380512d0adcf182a01363c669033a2b55cca429ed86a81"}, - {file = "scikit_learn-1.4.1.post1-cp312-cp312-win_amd64.whl", hash = "sha256:1d491ef66e37f4e812db7e6c8286520c2c3fc61b34bf5e59b67b4ce528de93af"}, - {file = "scikit_learn-1.4.1.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aa0029b78ef59af22cfbd833e8ace8526e4df90212db7ceccbea582ebb5d6794"}, - {file = "scikit_learn-1.4.1.post1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:14e4c88436ac96bf69eb6d746ac76a574c314a23c6961b7d344b38877f20fee1"}, - {file = "scikit_learn-1.4.1.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7cd3a77c32879311f2aa93466d3c288c955ef71d191503cf0677c3340ae8ae0"}, - {file = "scikit_learn-1.4.1.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3ee19211ded1a52ee37b0a7b373a8bfc66f95353af058a210b692bd4cda0dd"}, - {file = "scikit_learn-1.4.1.post1-cp39-cp39-win_amd64.whl", hash = "sha256:234b6bda70fdcae9e4abbbe028582ce99c280458665a155eed0b820599377d25"}, + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, ] [package.dependencies] joblib = ">=1.2.0" -numpy = ">=1.19.5,<2.0" +numpy = ">=1.19.5" scipy = ">=1.6.0" threadpoolctl = ">=2.0.0" @@ -7416,4 +7418,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "~3.10.0" -content-hash = "fef761809aab417b1012bd67e68b9b52b6d33a09fb2b5904c1a54306b334bd7a" +content-hash = "b88aab899df02f0aa3c6007cc26728d3f735565734e06a74c6f8766a8a92a0dd" diff --git a/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py b/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py new file mode 100644 index 00000000..cac169eb --- /dev/null +++ b/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py @@ -0,0 +1,12 @@ +import random + +from prediction_market_agent_tooling.deploy.agent import DeployableAgent +from prediction_market_agent_tooling.markets.agent_market import AgentMarket + + +class DeployableCoinFlipAgent(DeployableAgent): + def pick_markets(self, markets: list[AgentMarket]) -> list[AgentMarket]: + return random.sample(markets, 1) + + def answer_binary_market(self, market: AgentMarket) -> bool | None: + return random.choice([True, False]) diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py new file mode 100644 index 00000000..3ed06533 --- /dev/null +++ b/prediction_market_agent/run_agent.py @@ -0,0 +1,30 @@ +""" +Entrypoint for running the agent in GKE. +If the agent adheres to PMAT standard (subclasses DeployableAgent), +simply add the agent to the `RunnableAgent` enum and then `RUNNABLE_AGENTS` dict. +""" + +import typer + +from enum import Enum +from prediction_market_agent_tooling.markets.markets import MarketType +from prediction_market_agent.agents.coinflip_agent.coinflip_agent import ( + DeployableCoinFlipAgent, +) + + +class RunnableAgent(str, Enum): + coinflip = "coinflip" + + +RUNNABLE_AGENTS = { + RunnableAgent.value: DeployableCoinFlipAgent, +} + + +def main(agent: RunnableAgent, market_type: MarketType): + RUNNABLE_AGENTS[agent].run(market_type) + + +if __name__ == "__main__": + typer.run(main) diff --git a/pyproject.toml b/pyproject.toml index f5374f04..ee27e3a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ poetry = "^1.7.1" poetry-plugin-export = "^1.6.0" functions-framework = "^3.5.0" cron-validator = "^1.0.8" -prediction-market-agent-tooling = "^0.9.3" +prediction-market-agent-tooling = "^0.10.0" pydantic-settings = "^2.1.0" autoflake = "^2.2.1" isort = "^5.13.2" From 0a301c50bc963ffbaff0e450801e6ec9fa31bdde Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Wed, 10 Apr 2024 16:13:08 +0200 Subject: [PATCH 02/11] update repo --- build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sh b/build.sh index 1848e380..c40ff70c 100755 --- a/build.sh +++ b/build.sh @@ -1,2 +1,2 @@ # TODO: This should be automated as part of CI/CD pipeline, but for now, execute it locally on the main branch. -docker build . -t europe-west1-docker.pkg.dev/gnosis-ai/pma/main && docker push europe-west1-docker.pkg.dev/gnosis-ai/pma/main +docker build . -t ghcr.io/gnosis/pma:latest && docker push ghcr.io/gnosis/pma:latest From 520a34a0513f3ceb8c1b7bad37046791f42d32d9 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Wed, 10 Apr 2024 16:15:05 +0200 Subject: [PATCH 03/11] fix --- prediction_market_agent/run_agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index 3ed06533..4e718ed2 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -18,7 +18,7 @@ class RunnableAgent(str, Enum): RUNNABLE_AGENTS = { - RunnableAgent.value: DeployableCoinFlipAgent, + RunnableAgent.coinflip: DeployableCoinFlipAgent, } From 202aadeab835e927436bea38f3b142ef647f2e32 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 11:16:02 +0200 Subject: [PATCH 04/11] fix docker --- Dockerfile | 6 ++++-- prediction_market_agent/run_agent.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index f7fbcb6c..ff39de80 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # Install Poetry and create venv in the builder step, # then copy the venv to the runtime image, so that the runtime image is as small as possible. -FROM python:3.10.14-slim-bookworm AS builder +FROM --platform=linux/amd64 python:3.10.14-slim-bookworm AS builder RUN pip install poetry==1.8.2 @@ -15,7 +15,7 @@ COPY pyproject.toml poetry.lock ./ RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --no-root --only main -FROM python:3.10.14-slim-bookworm AS runtime +FROM --platform=linux/amd64 python:3.10.14-slim-bookworm AS runtime ENV VIRTUAL_ENV=/app/.venv \ PATH="/app/.venv/bin:$PATH" @@ -25,3 +25,5 @@ WORKDIR /app COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} COPY prediction_market_agent ./prediction_market_agent + +ENV PYTHONPATH=/app diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index 4e718ed2..1de75762 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -23,7 +23,7 @@ class RunnableAgent(str, Enum): def main(agent: RunnableAgent, market_type: MarketType): - RUNNABLE_AGENTS[agent].run(market_type) + RUNNABLE_AGENTS[agent]().run(market_type) if __name__ == "__main__": From fdfd53330fc53d52983f34410f4cef001fdc02ad Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 11:19:41 +0200 Subject: [PATCH 05/11] Update build.sh Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- build.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/build.sh b/build.sh index c40ff70c..dfff0355 100755 --- a/build.sh +++ b/build.sh @@ -1,2 +1,3 @@ +#!/bin/bash # TODO: This should be automated as part of CI/CD pipeline, but for now, execute it locally on the main branch. docker build . -t ghcr.io/gnosis/pma:latest && docker push ghcr.io/gnosis/pma:latest From 4b4cdd1f4a4edc64bfc17deae5244808e194b946 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 11:23:54 +0200 Subject: [PATCH 06/11] lint --- prediction_market_agent/run_agent.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index 1de75762..cd76bb47 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -4,10 +4,11 @@ simply add the agent to the `RunnableAgent` enum and then `RUNNABLE_AGENTS` dict. """ -import typer - from enum import Enum + +import typer from prediction_market_agent_tooling.markets.markets import MarketType + from prediction_market_agent.agents.coinflip_agent.coinflip_agent import ( DeployableCoinFlipAgent, ) From 37c117e15f1b4fcf49e4581f96141d8ec0fd9647 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 11:25:29 +0200 Subject: [PATCH 07/11] fixes --- .../agents/coinflip_agent/coinflip_agent.py | 3 ++- prediction_market_agent/run_agent.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py b/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py index cac169eb..22c5eede 100644 --- a/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py +++ b/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py @@ -1,11 +1,12 @@ import random +import typing as t from prediction_market_agent_tooling.deploy.agent import DeployableAgent from prediction_market_agent_tooling.markets.agent_market import AgentMarket class DeployableCoinFlipAgent(DeployableAgent): - def pick_markets(self, markets: list[AgentMarket]) -> list[AgentMarket]: + def pick_markets(self, markets: t.Sequence[AgentMarket]) -> t.Sequence[AgentMarket]: return random.sample(markets, 1) def answer_binary_market(self, market: AgentMarket) -> bool | None: diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index cd76bb47..dc676b5e 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -2,6 +2,8 @@ Entrypoint for running the agent in GKE. If the agent adheres to PMAT standard (subclasses DeployableAgent), simply add the agent to the `RunnableAgent` enum and then `RUNNABLE_AGENTS` dict. + +Can also be executed locally, simply by running `python prediction_market_agent/run_agent.py `. """ from enum import Enum @@ -23,7 +25,7 @@ class RunnableAgent(str, Enum): } -def main(agent: RunnableAgent, market_type: MarketType): +def main(agent: RunnableAgent, market_type: MarketType) -> None: RUNNABLE_AGENTS[agent]().run(market_type) From abe8ecbdb7a5a61a2705c361d30d7ff5a573aa48 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 13:07:55 +0200 Subject: [PATCH 08/11] update docker --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index ff39de80..66faebb2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,3 +27,5 @@ COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} COPY prediction_market_agent ./prediction_market_agent ENV PYTHONPATH=/app + +CMD ["python", "prediction_market_agent/run_agent.py", $runnable_agent_name, $market_type] From a6ef95ffb476c7a4cc690783eb9bd1d8a5f699e4 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 13:19:08 +0200 Subject: [PATCH 09/11] fix --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 66faebb2..e341e9b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,4 +28,4 @@ COPY prediction_market_agent ./prediction_market_agent ENV PYTHONPATH=/app -CMD ["python", "prediction_market_agent/run_agent.py", $runnable_agent_name, $market_type] +CMD ["bash", "-c", "python prediction_market_agent/run_agent.py ${runnable_agent_name} ${market_type}"] From 45973f27b5f26003c9fb3f6efb78af7a8c5486b0 Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 14:36:28 +0200 Subject: [PATCH 10/11] rename --- .../agents/coinflip_agent/{coinflip_agent.py => deploy.py} | 0 prediction_market_agent/run_agent.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename prediction_market_agent/agents/coinflip_agent/{coinflip_agent.py => deploy.py} (100%) diff --git a/prediction_market_agent/agents/coinflip_agent/coinflip_agent.py b/prediction_market_agent/agents/coinflip_agent/deploy.py similarity index 100% rename from prediction_market_agent/agents/coinflip_agent/coinflip_agent.py rename to prediction_market_agent/agents/coinflip_agent/deploy.py diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index dc676b5e..5d9d41b6 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -11,7 +11,7 @@ import typer from prediction_market_agent_tooling.markets.markets import MarketType -from prediction_market_agent.agents.coinflip_agent.coinflip_agent import ( +from prediction_market_agent.agents.coinflip_agent.deploy import ( DeployableCoinFlipAgent, ) From fc07332730f12a2596cc1dbc66e7977224bf6c0f Mon Sep 17 00:00:00 2001 From: Peter Jung Date: Thu, 11 Apr 2024 14:44:19 +0200 Subject: [PATCH 11/11] lint --- prediction_market_agent/run_agent.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/prediction_market_agent/run_agent.py b/prediction_market_agent/run_agent.py index 5d9d41b6..ff5eb4e8 100644 --- a/prediction_market_agent/run_agent.py +++ b/prediction_market_agent/run_agent.py @@ -11,9 +11,7 @@ import typer from prediction_market_agent_tooling.markets.markets import MarketType -from prediction_market_agent.agents.coinflip_agent.deploy import ( - DeployableCoinFlipAgent, -) +from prediction_market_agent.agents.coinflip_agent.deploy import DeployableCoinFlipAgent class RunnableAgent(str, Enum):