Skip to content

Commit

Permalink
feat: add AI LLM endpoint AWS params (box/box-openapi#478) (#267)
Browse files Browse the repository at this point in the history
Co-authored-by: box-sdk-build <box-sdk-build@box.com>
  • Loading branch information
box-sdk-build and box-sdk-build authored Oct 29, 2024
1 parent 4bcf843 commit 36ee37d
Show file tree
Hide file tree
Showing 11 changed files with 134 additions and 32 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "2efc8ab", "specHash": "e798cb1", "version": "0.5.0" }
{ "engineHash": "2efc8ab", "specHash": "90cf4e4", "version": "0.5.0" }
72 changes: 52 additions & 20 deletions BoxSdkGen.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public class AiAgentBasicGenTool: AiAgentLongTextToolTextGen {
/// - embeddings:
/// - contentTemplate: How the content should be included in a request to the LLM.
/// Input for `{content}` is optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolTextGenEmbeddingsField? = nil, contentTemplate: String? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolTextGenEmbeddingsField? = nil, contentTemplate: String? = nil) {
self.contentTemplate = contentTemplate

super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, systemMessage: systemMessage, promptTemplate: promptTemplate, embeddings: embeddings)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public class AiAgentBasicTextTool: AiAgentBasicTextToolBase {
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
/// When passing `prompt_template` parameters, you **must include** inputs for `{user_question}` and `{content}`.
/// `{current_date}` is optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
self.systemMessage = systemMessage
self.promptTemplate = promptTemplate

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ public class AiAgentBasicTextToolBase: Codable {
public let numTokensForCompletion: Int64?

/// The parameters for the LLM endpoint specific to OpenAI / Google models.
public let llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi?
public let llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi?

/// Initializer for a AiAgentBasicTextToolBase.
///
/// - Parameters:
/// - model: The model used for the AI agent for basic text. For specific model values, see the [available models list](g://box-ai/supported-models).
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil) {
self.model = model
self.numTokensForCompletion = numTokensForCompletion
self.llmEndpointParams = llmEndpointParams
Expand All @@ -33,7 +33,7 @@ public class AiAgentBasicTextToolBase: Codable {
let container = try decoder.container(keyedBy: CodingKeys.self)
model = try container.decodeIfPresent(String.self, forKey: .model)
numTokensForCompletion = try container.decodeIfPresent(Int64.self, forKey: .numTokensForCompletion)
llmEndpointParams = try container.decodeIfPresent(AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, forKey: .llmEndpointParams)
llmEndpointParams = try container.decodeIfPresent(AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, forKey: .llmEndpointParams)
}

public func encode(to encoder: Encoder) throws {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public class AiAgentBasicTextToolTextGen: AiAgentBasicTextToolBase {
///
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and `{content}` are optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
self.systemMessage = systemMessage
self.promptTemplate = promptTemplate

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public class AiAgentLongTextTool: AiAgentBasicTextTool {
/// When passing `prompt_template` parameters, you **must include** inputs for `{user_question}` and `{content}`.
/// `{current_date}` is optional, depending on the use.
/// - embeddings:
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolEmbeddingsField? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolEmbeddingsField? = nil) {
self.embeddings = embeddings

super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, systemMessage: systemMessage, promptTemplate: promptTemplate)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class AiAgentLongTextToolTextGen: AiAgentBasicTextToolTextGen {
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and `{content}` are optional, depending on the use.
/// - embeddings:
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolTextGenEmbeddingsField? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolTextGenEmbeddingsField? = nil) {
self.embeddings = embeddings

super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, systemMessage: systemMessage, promptTemplate: promptTemplate)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import Foundation

/// AI LLM endpoint params AWS object
public class AiLlmEndpointParamsAws: Codable {
private enum CodingKeys: String, CodingKey {
case type
case temperature
case topP = "top_p"
}

/// The type of the AI LLM endpoint params object for AWS.
/// This parameter is **required**.
public let type: AiLlmEndpointParamsAwsTypeField

/// What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
/// while lower values like 0.2 will make it more focused and deterministic.
/// We generally recommend altering this or `top_p` but not both.
public let temperature: Double?

/// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
/// of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
/// mass are considered. We generally recommend altering this or temperature but not both.
public let topP: Double?

/// Initializer for a AiLlmEndpointParamsAws.
///
/// - Parameters:
/// - type: The type of the AI LLM endpoint params object for AWS.
/// This parameter is **required**.
/// - temperature: What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
/// while lower values like 0.2 will make it more focused and deterministic.
/// We generally recommend altering this or `top_p` but not both.
/// - topP: An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
/// of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
/// mass are considered. We generally recommend altering this or temperature but not both.
public init(type: AiLlmEndpointParamsAwsTypeField = AiLlmEndpointParamsAwsTypeField.awsParams, temperature: Double? = nil, topP: Double? = nil) {
self.type = type
self.temperature = temperature
self.topP = topP
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
type = try container.decode(AiLlmEndpointParamsAwsTypeField.self, forKey: .type)
temperature = try container.decodeIfPresent(Double.self, forKey: .temperature)
topP = try container.decodeIfPresent(Double.self, forKey: .topP)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(type, forKey: .type)
try container.encodeIfPresent(temperature, forKey: .temperature)
try container.encodeIfPresent(topP, forKey: .topP)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import Foundation

public enum AiLlmEndpointParamsAwsTypeField: String, CodableStringEnum {
case awsParams = "aws_params"
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import Foundation

public enum AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi: Codable {
public enum AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi: Codable {
case aiLlmEndpointParamsAws(AiLlmEndpointParamsAws)
case aiLlmEndpointParamsGoogle(AiLlmEndpointParamsGoogle)
case aiLlmEndpointParamsOpenAi(AiLlmEndpointParamsOpenAi)

Expand All @@ -12,6 +13,12 @@ public enum AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi: Codable {
if let container = try? decoder.container(keyedBy: DiscriminatorCodingKey.self) {
if let discriminator_0 = try? container.decode(String.self, forKey: .type) {
switch discriminator_0 {
case "aws_params":
if let content = try? AiLlmEndpointParamsAws(from: decoder) {
self = .aiLlmEndpointParamsAws(content)
return
}

case "google_params":
if let content = try? AiLlmEndpointParamsGoogle(from: decoder) {
self = .aiLlmEndpointParamsGoogle(content)
Expand All @@ -25,19 +32,21 @@ public enum AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi: Codable {
}

default:
throw DecodingError.typeMismatch(AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "The Decoded object contains an unexpected value for key type"))
throw DecodingError.typeMismatch(AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "The Decoded object contains an unexpected value for key type"))

}
}

}

throw DecodingError.typeMismatch(AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "The type of the decoded object cannot be determined."))
throw DecodingError.typeMismatch(AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "The type of the decoded object cannot be determined."))

}

public func encode(to encoder: Encoder) throws {
switch self {
case .aiLlmEndpointParamsAws(let aiLlmEndpointParamsAws):
try aiLlmEndpointParamsAws.encode(to: encoder)
case .aiLlmEndpointParamsGoogle(let aiLlmEndpointParamsGoogle):
try aiLlmEndpointParamsGoogle.encode(to: encoder)
case .aiLlmEndpointParamsOpenAi(let aiLlmEndpointParamsOpenAi):
Expand Down

0 comments on commit 36ee37d

Please sign in to comment.