From b78bec990b81c364242e716f74a2e71457495951 Mon Sep 17 00:00:00 2001 From: Cheng Qian Date: Mon, 11 Nov 2024 00:42:20 -0500 Subject: [PATCH 1/6] feat: watsonx support --- .../user-guide/extensions-user-guide/index.md | 3 +- .../watsonx-client-integration.ipynb | 137 ++++++++++++++++++ 2 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md index dc11a2b9fca1..9318985c0d9f 100644 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md @@ -12,6 +12,7 @@ myst: :hidden: azure-container-code-executor +watsonx-client-integration ``` @@ -61,7 +62,7 @@ Find community samples and examples of how to use AutoGen - Description should be a brief description of the project. 1 short sentence is ideal. --> -## Built-in extenions +## Built-in extensions Read docs for built in extensions: diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb new file mode 100644 index 000000000000..11a4bcb79e0a --- /dev/null +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb @@ -0,0 +1,137 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a3ffff3e-a959-47db-90ab-be7245090cab", + "metadata": {}, + "source": [ + "# Watsonx.ai extension for Autogen\n", + "## Intro\n", + "\n", + "This is an example notebook for demonstrating how the new [autogen>=0.4](https://github.com/microsoft/autogen) can utilize the LLMs hosted on [IBM® watsonx.ai™](https://www.ibm.com/products/watsonx-ai) by using the [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client)\n", + "\n", + "### prerequisites\n", + "\n", + "- `pip install --upgrade autogen-watsonx-client`\n", + "- `pip install --upgrade autogen-agentchat>=0.4 --pre`\n", + "- access to a watsonx.ai instance, setting up environment variables `WATSONX_API_KEY`, one of `WATSONX_SPACE_ID` or `WATSONX_PROJECT_ID`, optionally `WATSONX_URL`" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "0327b180-672b-4b15-b206-d76474bd22a5", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "from autogen_agentchat.agents import AssistantAgent\n", + "from autogen_agentchat.task import Console, TextMentionTermination\n", + "from autogen_agentchat.teams import RoundRobinGroupChat\n", + "from autogen_watsonx_client.client import WatsonXChatCompletionClient\n", + "from autogen_watsonx_client.config import WatsonxClientConfiguration" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "cb6261a5-6bc2-4a8e-8656-1adeee5d0e8b", + "metadata": {}, + "outputs": [], + "source": [ + "wx_config = WatsonxClientConfiguration(\n", + " model_id=\"meta-llama/llama-3-2-90b-vision-instruct\", # pick a model you have access to on wx.ai here\n", + " api_key=os.environ.get(\"WATSONX_API_KEY\"),\n", + " url=os.environ.get(\"WATSONX_URL\"),\n", + " space_id=os.environ.get(\"WATSONX_SPACE_ID\"),\n", + " project_id=os.environ.get(\"WATSONX_PROJECT_ID\"),\n", + ")\n", + "\n", + "wx_client = WatsonXChatCompletionClient(**wx_config)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "8bf08b8e-5849-4fcf-886d-c7911534519c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "---------- user ----------\n", + "What is the weather in New York?\n", + "---------- weather_agent ----------\n", + "[FunctionCall(id='chatcmpl-tool-e29c8d140a60450799914943abffee3a', arguments='{\"city\": \"New York\"}', name='get_weather')]\n", + "[Prompt tokens: 237, Completion tokens: 19]\n", + "---------- weather_agent ----------\n", + "[FunctionExecutionResult(content='The weather in New York is 73 degrees and Sunny.', call_id='chatcmpl-tool-e29c8d140a60450799914943abffee3a')]\n", + "---------- weather_agent ----------\n", + "The weather in New York is 73 degrees and Sunny.\n", + "[Prompt tokens: 315, Completion tokens: 13]\n", + "---------- weather_agent ----------\n", + "<|python_tag|>TERMINATE\n", + "[Prompt tokens: 321, Completion tokens: 5]\n", + "---------- Summary ----------\n", + "Number of messages: 5\n", + "Finish reason: Text 'TERMINATE' mentioned\n", + "Total prompt tokens: 873\n", + "Total completion tokens: 37\n", + "Duration: 2.56 seconds\n" + ] + } + ], + "source": [ + "# Define a tool\n", + "async def get_weather(city: str) -> str:\n", + " return f\"The weather in {city} is 73 degrees and Sunny.\"\n", + "\n", + "\n", + "async def main() -> None:\n", + " # Define an agent\n", + " weather_agent = AssistantAgent(\n", + " name=\"weather_agent\",\n", + " model_client=wx_client,\n", + " tools=[get_weather],\n", + " )\n", + "\n", + " # Define termination condition\n", + " termination = TextMentionTermination(\"TERMINATE\")\n", + "\n", + " # Define a team\n", + " agent_team = RoundRobinGroupChat([weather_agent], termination_condition=termination)\n", + "\n", + " # Run the team and stream messages to the console\n", + " stream = agent_team.run_stream(task=\"What is the weather in New York?\")\n", + " await Console(stream)\n", + "\n", + "\n", + "# NOTE: if running this inside a Python script you'll need to use asyncio.run(main()).\n", + "await main()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 2ebd58c129d55fa3b8be673fb01119faa81eabb3 Mon Sep 17 00:00:00 2001 From: Cheng Qian Date: Mon, 11 Nov 2024 16:40:11 -0500 Subject: [PATCH 2/6] fix: addressing comments --- .../watsonx-client-integration.ipynb | 137 ------------------ .../watsonx-client-integration.md | 5 + 2 files changed, 5 insertions(+), 137 deletions(-) delete mode 100644 python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb create mode 100644 python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb deleted file mode 100644 index 11a4bcb79e0a..000000000000 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.ipynb +++ /dev/null @@ -1,137 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a3ffff3e-a959-47db-90ab-be7245090cab", - "metadata": {}, - "source": [ - "# Watsonx.ai extension for Autogen\n", - "## Intro\n", - "\n", - "This is an example notebook for demonstrating how the new [autogen>=0.4](https://github.com/microsoft/autogen) can utilize the LLMs hosted on [IBM® watsonx.ai™](https://www.ibm.com/products/watsonx-ai) by using the [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client)\n", - "\n", - "### prerequisites\n", - "\n", - "- `pip install --upgrade autogen-watsonx-client`\n", - "- `pip install --upgrade autogen-agentchat>=0.4 --pre`\n", - "- access to a watsonx.ai instance, setting up environment variables `WATSONX_API_KEY`, one of `WATSONX_SPACE_ID` or `WATSONX_PROJECT_ID`, optionally `WATSONX_URL`" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "0327b180-672b-4b15-b206-d76474bd22a5", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "from autogen_agentchat.agents import AssistantAgent\n", - "from autogen_agentchat.task import Console, TextMentionTermination\n", - "from autogen_agentchat.teams import RoundRobinGroupChat\n", - "from autogen_watsonx_client.client import WatsonXChatCompletionClient\n", - "from autogen_watsonx_client.config import WatsonxClientConfiguration" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "cb6261a5-6bc2-4a8e-8656-1adeee5d0e8b", - "metadata": {}, - "outputs": [], - "source": [ - "wx_config = WatsonxClientConfiguration(\n", - " model_id=\"meta-llama/llama-3-2-90b-vision-instruct\", # pick a model you have access to on wx.ai here\n", - " api_key=os.environ.get(\"WATSONX_API_KEY\"),\n", - " url=os.environ.get(\"WATSONX_URL\"),\n", - " space_id=os.environ.get(\"WATSONX_SPACE_ID\"),\n", - " project_id=os.environ.get(\"WATSONX_PROJECT_ID\"),\n", - ")\n", - "\n", - "wx_client = WatsonXChatCompletionClient(**wx_config)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "8bf08b8e-5849-4fcf-886d-c7911534519c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "---------- user ----------\n", - "What is the weather in New York?\n", - "---------- weather_agent ----------\n", - "[FunctionCall(id='chatcmpl-tool-e29c8d140a60450799914943abffee3a', arguments='{\"city\": \"New York\"}', name='get_weather')]\n", - "[Prompt tokens: 237, Completion tokens: 19]\n", - "---------- weather_agent ----------\n", - "[FunctionExecutionResult(content='The weather in New York is 73 degrees and Sunny.', call_id='chatcmpl-tool-e29c8d140a60450799914943abffee3a')]\n", - "---------- weather_agent ----------\n", - "The weather in New York is 73 degrees and Sunny.\n", - "[Prompt tokens: 315, Completion tokens: 13]\n", - "---------- weather_agent ----------\n", - "<|python_tag|>TERMINATE\n", - "[Prompt tokens: 321, Completion tokens: 5]\n", - "---------- Summary ----------\n", - "Number of messages: 5\n", - "Finish reason: Text 'TERMINATE' mentioned\n", - "Total prompt tokens: 873\n", - "Total completion tokens: 37\n", - "Duration: 2.56 seconds\n" - ] - } - ], - "source": [ - "# Define a tool\n", - "async def get_weather(city: str) -> str:\n", - " return f\"The weather in {city} is 73 degrees and Sunny.\"\n", - "\n", - "\n", - "async def main() -> None:\n", - " # Define an agent\n", - " weather_agent = AssistantAgent(\n", - " name=\"weather_agent\",\n", - " model_client=wx_client,\n", - " tools=[get_weather],\n", - " )\n", - "\n", - " # Define termination condition\n", - " termination = TextMentionTermination(\"TERMINATE\")\n", - "\n", - " # Define a team\n", - " agent_team = RoundRobinGroupChat([weather_agent], termination_condition=termination)\n", - "\n", - " # Run the team and stream messages to the console\n", - " stream = agent_team.run_stream(task=\"What is the weather in New York?\")\n", - " await Console(stream)\n", - "\n", - "\n", - "# NOTE: if running this inside a Python script you'll need to use asyncio.run(main()).\n", - "await main()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md new file mode 100644 index 000000000000..742b9a1eead6 --- /dev/null +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md @@ -0,0 +1,5 @@ +# Watsonx extension for Autogen + +- [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client) is a community extension for the Autogen project, specifically for the new Autogen architecture since version `0.4`. The goal is to support Watsonx.ai hosted LLMs in the Autogen framework. +- For any bug report or issues, please create issues in the [extension's github repo](https://github.com/tsinggggg/autogen-watsonx-client) and tag [@tsinggggg](https://github.com/tsinggggg). The extension is NOT maintained by the Autogen developers/maintainers. +- For an example of using the extension, please refer to the README [here](https://github.com/tsinggggg/autogen-watsonx-client?tab=readme-ov-file#usage) \ No newline at end of file From 1eeebf00a5fb1810838dcef2fe703212488d3fbe Mon Sep 17 00:00:00 2001 From: Cheng Qian Date: Mon, 11 Nov 2024 16:44:46 -0500 Subject: [PATCH 3/6] fix: addressing comments --- .../extensions-user-guide/watsonx-client-integration.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md index 742b9a1eead6..e36ecbacd7da 100644 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md @@ -1,5 +1,5 @@ # Watsonx extension for Autogen -- [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client) is a community extension for the Autogen project, specifically for the new Autogen architecture since version `0.4`. The goal is to support Watsonx.ai hosted LLMs in the Autogen framework. -- For any bug report or issues, please create issues in the [extension's github repo](https://github.com/tsinggggg/autogen-watsonx-client) and tag [@tsinggggg](https://github.com/tsinggggg). The extension is NOT maintained by the Autogen developers/maintainers. +- [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client) is a community extension for the Autogen project, specifically for the new Autogen architecture since version `0.4`. The goal is to support Watsonx.ai hosted LLMs in the Autogen framework. This is available on [pypi](https://pypi.org/project/autogen-watsonx-client/). +- For any related bug report or issues, please create issues in the [extension's github repo](https://github.com/tsinggggg/autogen-watsonx-client) and tag [@tsinggggg](https://github.com/tsinggggg). The extension is NOT maintained by the Autogen developers/maintainers. - For an example of using the extension, please refer to the README [here](https://github.com/tsinggggg/autogen-watsonx-client?tab=readme-ov-file#usage) \ No newline at end of file From 64b966ca4eb37f819b8baff85cf1e412ba08945f Mon Sep 17 00:00:00 2001 From: Cheng Qian Date: Thu, 14 Nov 2024 19:59:52 -0500 Subject: [PATCH 4/6] fix: addressing comments --- .../docs/src/user-guide/extensions-user-guide/index.md | 1 + 1 file changed, 1 insertion(+) diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md index 9318985c0d9f..a8f8ab61ff3c 100644 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md @@ -54,6 +54,7 @@ Find community samples and examples of how to use AutoGen | Name | Package | Description | |---|---|---| +| [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client) | [PyPi](https://pypi.org/project/autogen-watsonx-client/) | Model client for [IBM watsonx.ai](https://www.ibm.com/products/watsonx-ai) | From 51d72fc8da99f29e39aa6e8abe3cfd48c2973425 Mon Sep 17 00:00:00 2001 From: Eric Zhu Date: Thu, 14 Nov 2024 20:30:22 -0800 Subject: [PATCH 5/6] Delete python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md --- .../extensions-user-guide/watsonx-client-integration.md | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md deleted file mode 100644 index e36ecbacd7da..000000000000 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/watsonx-client-integration.md +++ /dev/null @@ -1,5 +0,0 @@ -# Watsonx extension for Autogen - -- [autogen-watsonx-client](https://github.com/tsinggggg/autogen-watsonx-client) is a community extension for the Autogen project, specifically for the new Autogen architecture since version `0.4`. The goal is to support Watsonx.ai hosted LLMs in the Autogen framework. This is available on [pypi](https://pypi.org/project/autogen-watsonx-client/). -- For any related bug report or issues, please create issues in the [extension's github repo](https://github.com/tsinggggg/autogen-watsonx-client) and tag [@tsinggggg](https://github.com/tsinggggg). The extension is NOT maintained by the Autogen developers/maintainers. -- For an example of using the extension, please refer to the README [here](https://github.com/tsinggggg/autogen-watsonx-client?tab=readme-ov-file#usage) \ No newline at end of file From ec4741a63c6882a8a52c652aeb360b7e6d999edf Mon Sep 17 00:00:00 2001 From: Eric Zhu Date: Thu, 14 Nov 2024 20:30:54 -0800 Subject: [PATCH 6/6] Update python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md --- .../docs/src/user-guide/extensions-user-guide/index.md | 1 - 1 file changed, 1 deletion(-) diff --git a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md index a8f8ab61ff3c..15f1be3d2c61 100644 --- a/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md +++ b/python/packages/autogen-core/docs/src/user-guide/extensions-user-guide/index.md @@ -12,7 +12,6 @@ myst: :hidden: azure-container-code-executor -watsonx-client-integration ```