diff --git a/.codegen.json b/.codegen.json index f28c71f5..8a51a674 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "f073ce3", "specHash": "544d370", "version": "1.8.0" } +{ "engineHash": "a839036", "specHash": "d7dfe68", "version": "1.8.0" } diff --git a/box_sdk_gen/managers/ai.py b/box_sdk_gen/managers/ai.py index c25e8495..16cccb1b 100644 --- a/box_sdk_gen/managers/ai.py +++ b/box_sdk_gen/managers/ai.py @@ -373,7 +373,10 @@ def create_ai_extract( """ Sends an AI request to supported Large Language Models (LLMs) and extracts metadata in form of key-value pairs. - Freeform metadata extraction does not require any metadata template setup before sending the request. + In this request, both the prompt and the output can be freeform. + + + Metadata template setup before sending the request is not required. :param prompt: The prompt provided to a Large Language Model (LLM) in the request. The prompt can be up to 10000 characters long and it can be an XML or a JSON schema. :type prompt: str @@ -414,7 +417,10 @@ def create_ai_extract_structured( """ Sends an AI request to supported Large Language Models (LLMs) and returns extracted metadata as a set of key-value pairs. - For this request, you need to use an already defined metadata template or a define a schema yourself. + For this request, you either need a metadata template or a list of fields you want to extract. + + + Input is **either** a metadata template or a list of fields to ensure the structure. To learn more about creating templates, see [Creating metadata templates in the Admin Console](https://support.box.com/hc/en-us/articles/360044194033-Customizing-Metadata-Templates) diff --git a/box_sdk_gen/schemas/__init__.py b/box_sdk_gen/schemas/__init__.py index 8a9ec282..fe815e5f 100644 --- a/box_sdk_gen/schemas/__init__.py +++ b/box_sdk_gen/schemas/__init__.py @@ -1,5 +1,11 @@ +from box_sdk_gen.schemas.ai_agent_info import * + +from box_sdk_gen.schemas.ai_response import * + from box_sdk_gen.schemas.ai_citation import * +from box_sdk_gen.schemas.ai_response_full import * + from box_sdk_gen.schemas.ai_dialogue_history import * from box_sdk_gen.schemas.ai_extract_response import * @@ -40,10 +46,6 @@ from box_sdk_gen.schemas.ai_ask import * -from box_sdk_gen.schemas.ai_response import * - -from box_sdk_gen.schemas.ai_response_full import * - from box_sdk_gen.schemas.app_item import * from box_sdk_gen.schemas.classification import * diff --git a/box_sdk_gen/schemas/ai_agent_info.py b/box_sdk_gen/schemas/ai_agent_info.py new file mode 100644 index 00000000..fb5da926 --- /dev/null +++ b/box_sdk_gen/schemas/ai_agent_info.py @@ -0,0 +1,49 @@ +from typing import Optional + +from box_sdk_gen.internal.base_object import BaseObject + +from typing import List + +from box_sdk_gen.box.errors import BoxSDKError + + +class AiAgentInfoModelsField(BaseObject): + def __init__( + self, + *, + name: Optional[str] = None, + provider: Optional[str] = None, + supported_purpose: Optional[str] = None, + **kwargs + ): + """ + :param name: The name of the model used for the request, defaults to None + :type name: Optional[str], optional + :param provider: The provider that owns the model used for the request, defaults to None + :type provider: Optional[str], optional + :param supported_purpose: The supported purpose utilized by the model used for the request, defaults to None + :type supported_purpose: Optional[str], optional + """ + super().__init__(**kwargs) + self.name = name + self.provider = provider + self.supported_purpose = supported_purpose + + +class AiAgentInfo(BaseObject): + def __init__( + self, + *, + models: Optional[List[AiAgentInfoModelsField]] = None, + processor: Optional[str] = None, + **kwargs + ): + """ + :param models: The models used for the request, defaults to None + :type models: Optional[List[AiAgentInfoModelsField]], optional + :param processor: The processor used for the request, defaults to None + :type processor: Optional[str], optional + """ + super().__init__(**kwargs) + self.models = models + self.processor = processor diff --git a/box_sdk_gen/schemas/ai_response.py b/box_sdk_gen/schemas/ai_response.py index 2ba52a53..ff59efbc 100644 --- a/box_sdk_gen/schemas/ai_response.py +++ b/box_sdk_gen/schemas/ai_response.py @@ -2,6 +2,8 @@ from box_sdk_gen.internal.base_object import BaseObject +from box_sdk_gen.schemas.ai_agent_info import AiAgentInfo + from box_sdk_gen.box.errors import BoxSDKError from box_sdk_gen.internal.utils import DateTime @@ -14,6 +16,7 @@ def __init__( created_at: DateTime, *, completion_reason: Optional[str] = None, + ai_agent_info: Optional[AiAgentInfo] = None, **kwargs ): """ @@ -28,3 +31,4 @@ def __init__( self.answer = answer self.created_at = created_at self.completion_reason = completion_reason + self.ai_agent_info = ai_agent_info diff --git a/box_sdk_gen/schemas/ai_response_full.py b/box_sdk_gen/schemas/ai_response_full.py index 5da2b4fb..7c9a1c89 100644 --- a/box_sdk_gen/schemas/ai_response_full.py +++ b/box_sdk_gen/schemas/ai_response_full.py @@ -4,6 +4,8 @@ from box_sdk_gen.internal.utils import DateTime +from box_sdk_gen.schemas.ai_agent_info import AiAgentInfo + from box_sdk_gen.schemas.ai_response import AiResponse from box_sdk_gen.schemas.ai_citation import AiCitation @@ -19,6 +21,7 @@ def __init__( *, citations: Optional[List[AiCitation]] = None, completion_reason: Optional[str] = None, + ai_agent_info: Optional[AiAgentInfo] = None, **kwargs ): """ @@ -35,6 +38,7 @@ def __init__( answer=answer, created_at=created_at, completion_reason=completion_reason, + ai_agent_info=ai_agent_info, **kwargs ) self.citations = citations diff --git a/docs/ai.md b/docs/ai.md index 0d182c11..0cca3ac3 100644 --- a/docs/ai.md +++ b/docs/ai.md @@ -159,7 +159,8 @@ This response can be one of the following four objects: ## Extract metadata (freeform) Sends an AI request to supported Large Language Models (LLMs) and extracts metadata in form of key-value pairs. -Freeform metadata extraction does not require any metadata template setup before sending the request. +In this request, both the prompt and the output can be freeform. +Metadata template setup before sending the request is not required. This operation is performed by calling function `create_ai_extract`. @@ -196,7 +197,8 @@ A response including the answer from the LLM. ## Extract metadata (structured) Sends an AI request to supported Large Language Models (LLMs) and returns extracted metadata as a set of key-value pairs. -For this request, you need to use an already defined metadata template or a define a schema yourself. +For this request, you either need a metadata template or a list of fields you want to extract. +Input is **either** a metadata template or a list of fields to ensure the structure. To learn more about creating templates, see [Creating metadata templates in the Admin Console](https://support.box.com/hc/en-us/articles/360044194033-Customizing-Metadata-Templates) or use the [metadata template API](g://metadata/templates/create).