Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add AI LLM endpoint AWS params (box/box-openapi#478) #354

Merged
merged 18 commits into from
Oct 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
8463517
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 9, 2024
42dc955
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 11, 2024
100cab3
feat: sort OpenAPI file (box/box-openapi#460)
box-sdk-build Oct 11, 2024
19ad785
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 15, 2024
ab15ab9
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 17, 2024
9677742
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 18, 2024
a9d4291
feat: Modify schema for teams for integration mapping api (box/box-op…
box-sdk-build Oct 21, 2024
01fd2ac
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 21, 2024
b78e5fe
fix: update client error schema to support schema errors (box/box-ope…
box-sdk-build Oct 22, 2024
37ddf7c
fix: set stream position to 0 for multipart requests (box/box-codegen…
box-sdk-build Oct 22, 2024
284e5c9
Auto resolve conflict by keeping our changes
Oct 22, 2024
c29d982
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 22, 2024
330236f
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 23, 2024
9dfff95
fix: fix additionalProperties (box/box-openapi#477)
box-sdk-build Oct 23, 2024
0183113
fix: add Dict import to the derived type when used in base type (box/…
box-sdk-build Oct 23, 2024
2178854
chore: Update .codegen.json with commit hash of codegen and openapi spec
box-sdk-build Oct 24, 2024
6383dda
Auto resolve conflict by keeping our changes
Oct 24, 2024
8fe5147
feat: add AWS params (box/box-openapi#478)
box-sdk-build Oct 24, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "11fbad9", "specHash": "abd6037", "version": "1.5.1" }
{ "engineHash": "2efc8ab", "specHash": "90cf4e4", "version": "1.5.1" }
2 changes: 2 additions & 0 deletions box_sdk_gen/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from box_sdk_gen.schemas.ai_item_base import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_open_ai import *
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_basic_gen_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

from box_sdk_gen.schemas.ai_agent_basic_text_tool_text_gen import (
Expand All @@ -32,7 +34,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -53,7 +59,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
embeddings=embeddings,
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase


Expand All @@ -18,7 +20,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -34,7 +40,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
model=model,
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws


class AiAgentBasicTextToolBase(BaseObject):
def __init__(
Expand All @@ -16,7 +18,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -26,7 +32,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(**kwargs)
self.model = model
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_basic_text_tool_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase


Expand All @@ -18,7 +20,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -36,7 +42,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
model=model,
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_long_text_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

from box_sdk_gen.schemas.ai_agent_basic_text_tool import AiAgentBasicTextTool
Expand Down Expand Up @@ -59,7 +61,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -75,7 +81,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
system_message=system_message,
Expand Down
10 changes: 8 additions & 2 deletions box_sdk_gen/schemas/ai_agent_long_text_tool_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import AiLlmEndpointParamsGoogle

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import AiLlmEndpointParamsAws

from box_sdk_gen.schemas.ai_agent_basic_text_tool_base import AiAgentBasicTextToolBase

from box_sdk_gen.schemas.ai_agent_basic_text_tool_text_gen import (
Expand Down Expand Up @@ -61,7 +63,11 @@ def __init__(
model: Optional[str] = None,
num_tokens_for_completion: Optional[int] = None,
llm_endpoint_params: Optional[
Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]
Union[
AiLlmEndpointParamsOpenAi,
AiLlmEndpointParamsGoogle,
AiLlmEndpointParamsAws,
]
] = None,
**kwargs
):
Expand All @@ -79,7 +85,7 @@ def __init__(
:param num_tokens_for_completion: The number of tokens for completion., defaults to None
:type num_tokens_for_completion: Optional[int], optional
:param llm_endpoint_params: The parameters for the LLM endpoint specific to OpenAI / Google models., defaults to None
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle]], optional
:type llm_endpoint_params: Optional[Union[AiLlmEndpointParamsOpenAi, AiLlmEndpointParamsGoogle, AiLlmEndpointParamsAws]], optional
"""
super().__init__(
system_message=system_message,
Expand Down
39 changes: 39 additions & 0 deletions box_sdk_gen/schemas/ai_llm_endpoint_params_aws.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from enum import Enum

from typing import Optional

from box_sdk_gen.internal.base_object import BaseObject


class AiLlmEndpointParamsAwsTypeField(str, Enum):
AWS_PARAMS = 'aws_params'


class AiLlmEndpointParamsAws(BaseObject):
_discriminator = 'type', {'aws_params'}

def __init__(
self,
*,
type: AiLlmEndpointParamsAwsTypeField = AiLlmEndpointParamsAwsTypeField.AWS_PARAMS.value,
temperature: Optional[float] = None,
top_p: Optional[float] = None,
**kwargs
):
"""
:param type: The type of the AI LLM endpoint params object for AWS.
This parameter is **required**., defaults to AiLlmEndpointParamsAwsTypeField.AWS_PARAMS.value
:type type: AiLlmEndpointParamsAwsTypeField, optional
:param temperature: What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
while lower values like 0.2 will make it more focused and deterministic.
We generally recommend altering this or `top_p` but not both., defaults to None
:type temperature: Optional[float], optional
:param top_p: An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
mass are considered. We generally recommend altering this or temperature but not both., defaults to None
:type top_p: Optional[float], optional
"""
super().__init__(**kwargs)
self.type = type
self.temperature = temperature
self.top_p = top_p
15 changes: 6 additions & 9 deletions box_sdk_gen/schemas/client_error.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from enum import Enum

from box_sdk_gen.internal.base_object import BaseObject

from typing import Optional

from typing import Dict

from box_sdk_gen.internal.base_object import BaseObject


class ClientErrorTypeField(str, Enum):
ERROR = 'error'
Expand All @@ -29,11 +31,6 @@ class ClientErrorCodeField(str, Enum):
INSUFFICIENT_SCOPE = 'insufficient_scope'


class ClientErrorContextInfoField(BaseObject):
def __init__(self, **kwargs):
super().__init__(**kwargs)


class ClientError(BaseObject):
_discriminator = 'type', {'error'}

Expand All @@ -44,7 +41,7 @@ def __init__(
status: Optional[int] = None,
code: Optional[ClientErrorCodeField] = None,
message: Optional[str] = None,
context_info: Optional[ClientErrorContextInfoField] = None,
context_info: Optional[Dict] = None,
help_url: Optional[str] = None,
request_id: Optional[str] = None,
**kwargs
Expand All @@ -61,7 +58,7 @@ def __init__(
:param context_info: A free-form object that contains additional context
about the error. The possible fields are defined on
a per-endpoint basis. `message` is only one example., defaults to None
:type context_info: Optional[ClientErrorContextInfoField], optional
:type context_info: Optional[Dict], optional
:param help_url: A URL that links to more information about why this error occurred., defaults to None
:type help_url: Optional[str], optional
:param request_id: A unique identifier for this response, which can be used
Expand Down
8 changes: 4 additions & 4 deletions box_sdk_gen/schemas/conflict_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@

from box_sdk_gen.internal.base_object import BaseObject

from typing import Dict

from box_sdk_gen.schemas.client_error import ClientErrorTypeField

from box_sdk_gen.schemas.client_error import ClientErrorCodeField

from box_sdk_gen.schemas.client_error import ClientErrorContextInfoField

from box_sdk_gen.schemas.client_error import ClientError

from box_sdk_gen.schemas.file_conflict import FileConflict
Expand All @@ -33,7 +33,7 @@ def __init__(
status: Optional[int] = None,
code: Optional[ClientErrorCodeField] = None,
message: Optional[str] = None,
context_info: Optional[ClientErrorContextInfoField] = None,
context_info: Optional[Dict] = None,
help_url: Optional[str] = None,
request_id: Optional[str] = None,
**kwargs
Expand All @@ -50,7 +50,7 @@ def __init__(
:param context_info: A free-form object that contains additional context
about the error. The possible fields are defined on
a per-endpoint basis. `message` is only one example., defaults to None
:type context_info: Optional[ClientErrorContextInfoField], optional
:type context_info: Optional[Dict], optional
:param help_url: A URL that links to more information about why this error occurred., defaults to None
:type help_url: Optional[str], optional
:param request_id: A unique identifier for this response, which can be used
Expand Down
Loading