diff --git a/python/instrumentation/openinference-instrumentation-bedrock/LICENSE b/python/instrumentation/openinference-instrumentation-bedrock/LICENSE new file mode 100644 index 000000000..0223315cc --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright The OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/python/instrumentation/openinference-instrumentation-bedrock/README.rst b/python/instrumentation/openinference-instrumentation-bedrock/README.rst new file mode 100644 index 000000000..67c9061bd --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/README.rst @@ -0,0 +1,13 @@ +OpenInference Boto Instrumentation +============================================= + +|pypi| + + :target: https://pypi.org/project/openinference-instrumentation-boto/ + +Installation +------------ + +:: + + pip install openinference-instrumentation-boto diff --git a/python/instrumentation/openinference-instrumentation-bedrock/examples/bedrock_example.py b/python/instrumentation/openinference-instrumentation-bedrock/examples/bedrock_example.py new file mode 100644 index 000000000..de897e793 --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/examples/bedrock_example.py @@ -0,0 +1,31 @@ +import json + +import boto3 +from openinference.instrumentation.bedrock import BedrockInstrumentor +from opentelemetry import trace as trace_api +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk import trace as trace_sdk +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace.export import ConsoleSpanExporter, SimpleSpanProcessor + +resource = Resource(attributes={}) +tracer_provider = trace_sdk.TracerProvider(resource=resource) +span_console_exporter = ConsoleSpanExporter() +span_otlp_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces") +tracer_provider.add_span_processor(SimpleSpanProcessor(span_exporter=span_console_exporter)) +tracer_provider.add_span_processor(SimpleSpanProcessor(span_exporter=span_otlp_exporter)) +trace_api.set_tracer_provider(tracer_provider=tracer_provider) + +BedrockInstrumentor().instrument() + +session = boto3.session.Session() +client = session.client("bedrock-runtime") + + +if __name__ == "__main__": + prompt = ( + b'{"prompt": "Human: Hello there, how are you? Assistant:", "max_tokens_to_sample": 1024}' + ) + response = client.invoke_model(modelId="anthropic.claude-v2", body=prompt) + response_body = json.loads(response.get("body").read()) + print(response_body["completion"]) diff --git a/python/instrumentation/openinference-instrumentation-bedrock/pyproject.toml b/python/instrumentation/openinference-instrumentation-bedrock/pyproject.toml new file mode 100644 index 000000000..70b553e32 --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/pyproject.toml @@ -0,0 +1,57 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "openinference-instrumentation-bedrock" +dynamic = ["version"] +description = "OpenInference Bedrock Instrumentation" +readme = "README.rst" +license = "Apache-2.0" +requires-python = ">=3.8, <3.12" +authors = [ + { name = "OpenInference Authors", email = "oss@arize.com" }, +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dependencies = [ + "opentelemetry-api", + "opentelemetry-instrumentation", + "opentelemetry-semantic-conventions", + "openinference-semantic-conventions", + "wrapt", +] + +[project.optional-dependencies] +instruments = [ + "boto3 >= 11.28.57", +] +test = [ + "boto3 == 1.28.57", + "opentelemetry-sdk", + "opentelemetry-instrumentation-httpx", +] + +[project.urls] +Homepage = "https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-bedrock" + +[tool.hatch.version] +path = "src/openinference/instrumentation/bedrock/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/src", + "/tests", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/openinference"] diff --git a/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/__init__.py b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/__init__.py new file mode 100644 index 000000000..330a42e7d --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/__init__.py @@ -0,0 +1,157 @@ +import io +import json +from functools import wraps +from importlib import import_module +from inspect import signature +from typing import IO, Any, Callable, Collection, Dict, Optional, Tuple, TypeVar, cast + +from botocore.client import BaseClient +from botocore.response import StreamingBody +from openinference.instrumentation.bedrock.package import _instruments +from openinference.instrumentation.bedrock.version import __version__ +from openinference.semconv.trace import MessageAttributes, SpanAttributes +from opentelemetry import context as context_api +from opentelemetry import trace as trace_api +from opentelemetry.context import _SUPPRESS_INSTRUMENTATION_KEY +from opentelemetry.instrumentation.instrumentor import BaseInstrumentor # type: ignore +from opentelemetry.trace import Tracer +from opentelemetry.util.types import AttributeValue +from wrapt import wrap_function_wrapper + +ClientCreator = TypeVar("ClientCreator", bound=Callable[..., BaseClient]) + +_MODULE = "botocore.client" + + +class InstrumentedClient(BaseClient): # type: ignore + """ + Proxy class representing an instrumented boto client. + """ + + invoke_model: Callable[..., Any] + _unwrapped_invoke_model: Callable[..., Any] + + +class BufferedStreamingBody(StreamingBody): # type: ignore + _raw_stream: IO[bytes] + + def __init__(self, raw_stream: IO[bytes], content_length: int) -> None: + super().__init__(raw_stream, content_length) + self._buffer: Optional[io.IOBase] = None + + def read(self, amt: Optional[int] = None) -> bytes: + if self._buffer is None: + self._buffer = io.BytesIO(self._raw_stream.read()) + + output: bytes = self._buffer.read(amt) + return output + + def reset(self) -> None: + # Reset the buffer to enable reading the stream again + if self._buffer is not None: + self._buffer.seek(0) + + +def _client_creation_wrapper(tracer: Tracer) -> Callable[[ClientCreator], ClientCreator]: + def _client_wrapper( + wrapped: ClientCreator, + instance: Optional[Any], + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> BaseClient: + """Instruments boto client creation.""" + client = wrapped(*args, **kwargs) + if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY): + return client + + call_signature = signature(wrapped) + bound_arguments = call_signature.bind(*args, **kwargs) + bound_arguments.apply_defaults() + + if bound_arguments.arguments.get("service_name") == "bedrock-runtime": + client = cast(InstrumentedClient, client) + client._unwrapped_invoke_model = client.invoke_model + client.invoke_model = _model_invocation_wrapper(tracer)(client) + return client + + return _client_wrapper # type: ignore + + +def _model_invocation_wrapper(tracer: Tracer) -> Callable[[InstrumentedClient], Callable[..., Any]]: + def _invocation_wrapper(wrapped_client: InstrumentedClient) -> Callable[..., Any]: + """Instruments a bedrock client's `invoke_model` method.""" + + @wraps(wrapped_client.invoke_model) + def instrumented_response(*args: Any, **kwargs: Any) -> Dict[str, Any]: + with tracer.start_as_current_span("bedrock.invoke_model") as span: + response = wrapped_client._unwrapped_invoke_model(*args, **kwargs) + response["body"] = BufferedStreamingBody( + response["body"]._raw_stream, response["body"]._content_length + ) + if raw_request_body := kwargs.get("body"): + request_body = json.loads(raw_request_body) + response_body = json.loads(response.get("body").read()) + response["body"].reset() + + prompt = request_body.pop("prompt") + invocation_parameters = json.dumps(request_body) + + _set_span_attribute(span, SpanAttributes.LLM_PROMPTS, prompt) + _set_span_attribute( + span, SpanAttributes.LLM_INVOCATION_PARAMETERS, invocation_parameters + ) + + if model_id := kwargs.get("modelId"): + _set_span_attribute(span, SpanAttributes.LLM_MODEL_NAME, model_id) + + if isinstance(model_id, str): + (vendor, *_) = model_id.split(".") + + if vendor == "ai21": + content = str(response_body.get("completions")) + elif vendor == "anthropic": + content = str(response_body.get("completion")) + elif vendor == "cohere": + content = str(response_body.get("generations")) + else: + content = "" + + if content: + _set_span_attribute(span, MessageAttributes.MESSAGE_CONTENT, content) + + return response # type: ignore + + return instrumented_response + + return _invocation_wrapper + + +class BedrockInstrumentor(BaseInstrumentor): # type: ignore + __slots__ = ("_original_client_creator",) + + def instrumentation_dependencies(self) -> Collection[str]: + return _instruments + + def _instrument(self, **kwargs: Any) -> None: + if not (tracer_provider := kwargs.get("tracer_provider")): + tracer_provider = trace_api.get_tracer_provider() + tracer = trace_api.get_tracer(__name__, __version__, tracer_provider) + + boto = import_module(_MODULE) + self._original_client_creator = boto.ClientCreator.create_client + + wrap_function_wrapper( + module=_MODULE, + name="ClientCreator.create_client", + wrapper=_client_creation_wrapper(tracer=tracer), + ) + + def _uninstrument(self, **kwargs: Any) -> None: + boto = import_module(_MODULE) + boto.ClientCreator.create_client = self._original_client_creator + self._original_client_creator = None + + +def _set_span_attribute(span: trace_api.Span, name: str, value: AttributeValue) -> None: + if value is not None and value != "": + span.set_attribute(name, value) diff --git a/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/package.py b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/package.py new file mode 100644 index 000000000..f0f48c87f --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/package.py @@ -0,0 +1,2 @@ +_instruments = ("boto3 >= 1.28.57",) +_supports_metrics = False diff --git a/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/py.typed b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/version.py b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/version.py new file mode 100644 index 000000000..6c8e6b979 --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/src/openinference/instrumentation/bedrock/version.py @@ -0,0 +1 @@ +__version__ = "0.0.0" diff --git a/python/instrumentation/openinference-instrumentation-bedrock/tests/test_instrumentor.py b/python/instrumentation/openinference-instrumentation-bedrock/tests/test_instrumentor.py new file mode 100644 index 000000000..209c39c5d --- /dev/null +++ b/python/instrumentation/openinference-instrumentation-bedrock/tests/test_instrumentor.py @@ -0,0 +1,82 @@ +import io +from typing import ( + Generator, +) +from unittest.mock import MagicMock + +import boto3 +import pytest +from botocore.response import StreamingBody +from openinference.instrumentation.bedrock import BedrockInstrumentor +from opentelemetry import trace as trace_api +from opentelemetry.sdk import trace as trace_sdk +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + + +@pytest.fixture(scope="module") +def tracer_provider(in_memory_span_exporter: InMemorySpanExporter) -> trace_api.TracerProvider: + resource = Resource(attributes={}) + tracer_provider = trace_sdk.TracerProvider(resource=resource) + span_processor = SimpleSpanProcessor(span_exporter=in_memory_span_exporter) + tracer_provider.add_span_processor(span_processor=span_processor) + return tracer_provider + + +@pytest.fixture(scope="module") +def in_memory_span_exporter() -> InMemorySpanExporter: + return InMemorySpanExporter() + + +@pytest.fixture(autouse=True) +def instrument( + tracer_provider: trace_api.TracerProvider, + in_memory_span_exporter: InMemorySpanExporter, +) -> Generator[None, None, None]: + BedrockInstrumentor().instrument(tracer_provider=tracer_provider) + yield + BedrockInstrumentor().uninstrument() + in_memory_span_exporter.clear() + + +def test_invoke_client(in_memory_span_exporter: InMemorySpanExporter) -> None: + output = b'{"completion":" Hello!","stop_reason":"stop_sequence","stop":"\\n\\nHuman:"}' + streaming_body = StreamingBody(io.BytesIO(output), len(output)) + mock_response = { + "ResponseMetadata": { + "RequestId": "xxxxxxxx-yyyy-zzzz-1234-abcdefghijklmno", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Sun, 21 Jan 2024 20:00:00 GMT", + "content-type": "application/json", + "content-length": "74", + "connection": "keep-alive", + "x-amzn-requestid": "xxxxxxxx-yyyy-zzzz-1234-abcdefghijklmno", + "x-amzn-bedrock-invocation-latency": "425", + "x-amzn-bedrock-output-token-count": "6", + "x-amzn-bedrock-input-token-count": "12", + }, + "RetryAttempts": 0, + }, + "contentType": "application/json", + "body": streaming_body, + } + session = boto3.session.Session() + client = session.client("bedrock-runtime", region_name="us-east-1") + + # instead of mocking the HTTP response, we mock the boto client method directly to avoid + # complexities with mocking auth + client._unwrapped_invoke_model = MagicMock(return_value=mock_response) + client.invoke_model( + modelId="anthropic.claude-v2", + body=b'{"prompt": "Human: hello there? Assistant:", "max_tokens_to_sample": 1024}', + ) + spans = in_memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + span = spans[0] + assert span.status.is_ok + attributes = dict(span.attributes or dict()) + assert attributes["llm.model_name"] == "anthropic.claude-v2" + assert attributes["llm.prompts"] == "Human: hello there? Assistant:" + assert attributes["message.content"] == " Hello!" diff --git a/python/mypy.ini b/python/mypy.ini index c8ed2018d..290d3ee10 100644 --- a/python/mypy.ini +++ b/python/mypy.ini @@ -9,3 +9,9 @@ exclude = (?x)( [mypy-wrapt] ignore_missing_imports = True + +[mypy-boto3] +ignore_missing_imports = True + +[mypy-botocore.*] +ignore_missing_imports = True \ No newline at end of file diff --git a/python/tox.ini b/python/tox.ini index e8f489a19..bb9a0854e 100644 --- a/python/tox.ini +++ b/python/tox.ini @@ -3,6 +3,7 @@ isolated_build = True skipsdist = True envlist = py3{8,11}-ci-semconv + py3{8,11}-ci-{bedrock,bedrock-latest} py3{8,11}-ci-{openai,openai-latest} py3{8,11}-ci-{llama_index,llama_index-latest} @@ -13,10 +14,13 @@ deps = -r dev-requirements.txt changedir = semconv: openinference-semantic-conventions/ + bedrock: instrumentation/openinference-instrumentation-bedrock/ openai: instrumentation/openinference-instrumentation-openai/ llama_index: instrumentation/openinference-instrumentation-llama-index/ commands_pre = semconv: pip install {toxinidir}/openinference-semantic-conventions + bedrock: pip install {toxinidir}/instrumentation/openinference-instrumentation-bedrock[test] + bedrock-latest: pip install -U boto3 openai: pip install {toxinidir}/instrumentation/openinference-instrumentation-openai[test] openai-latest: pip install -U openai llama_index: pip install {toxinidir}/instrumentation/openinference-instrumentation-llama-index[test]