Skip to content

Commit

Permalink
fix: Add missing fields to Sign Template Signer and fix AI schema (bo…
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build authored Aug 22, 2024
1 parent 0074ee3 commit 31920e6
Show file tree
Hide file tree
Showing 17 changed files with 355 additions and 140 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "0b023a8", "specHash": "da821cd", "version": "0.3.0" }
{ "engineHash": "6dfffba", "specHash": "57614c2", "version": "0.3.1" }
104 changes: 84 additions & 20 deletions BoxSdkGen.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions Sources/Schemas/AiAgentAsk/AiAgentAsk.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ public class AiAgentAsk: Codable {

public let longText: AiAgentLongTextTool?

public let basicText: AiAgentBasicTextToolAsk?
public let basicText: AiAgentBasicTextTool?

public let longTextMulti: AiAgentLongTextTool?

public let basicTextMulti: AiAgentBasicTextToolAsk?
public let basicTextMulti: AiAgentBasicTextTool?

/// Initializer for a AiAgentAsk.
///
Expand All @@ -29,7 +29,7 @@ public class AiAgentAsk: Codable {
/// - basicText:
/// - longTextMulti:
/// - basicTextMulti:
public init(type: AiAgentAskTypeField = AiAgentAskTypeField.aiAgentAsk, longText: AiAgentLongTextTool? = nil, basicText: AiAgentBasicTextToolAsk? = nil, longTextMulti: AiAgentLongTextTool? = nil, basicTextMulti: AiAgentBasicTextToolAsk? = nil) {
public init(type: AiAgentAskTypeField = AiAgentAskTypeField.aiAgentAsk, longText: AiAgentLongTextTool? = nil, basicText: AiAgentBasicTextTool? = nil, longTextMulti: AiAgentLongTextTool? = nil, basicTextMulti: AiAgentBasicTextTool? = nil) {
self.type = type
self.longText = longText
self.basicText = basicText
Expand All @@ -41,9 +41,9 @@ public class AiAgentAsk: Codable {
let container = try decoder.container(keyedBy: CodingKeys.self)
type = try container.decode(AiAgentAskTypeField.self, forKey: .type)
longText = try container.decodeIfPresent(AiAgentLongTextTool.self, forKey: .longText)
basicText = try container.decodeIfPresent(AiAgentBasicTextToolAsk.self, forKey: .basicText)
basicText = try container.decodeIfPresent(AiAgentBasicTextTool.self, forKey: .basicText)
longTextMulti = try container.decodeIfPresent(AiAgentLongTextTool.self, forKey: .longTextMulti)
basicTextMulti = try container.decodeIfPresent(AiAgentBasicTextToolAsk.self, forKey: .basicTextMulti)
basicTextMulti = try container.decodeIfPresent(AiAgentBasicTextTool.self, forKey: .basicTextMulti)
}

public func encode(to encoder: Encoder) throws {
Expand Down
20 changes: 10 additions & 10 deletions Sources/Schemas/AiAgentBasicGenTool/AiAgentBasicGenTool.swift
Original file line number Diff line number Diff line change
@@ -1,34 +1,34 @@
import Foundation

/// AI agent basic tool used to generate text.
public class AiAgentBasicGenTool: AiAgentLongTextTool {
public class AiAgentBasicGenTool: AiAgentLongTextToolTextGen {
private enum CodingKeys: String, CodingKey {
case contentTemplate = "content_template"
}

/// How the content should be included in a request to the LLM.
/// When passing this parameter, you must include `{content}`.
/// Input for `{content}` is optional, depending on the use.
public let contentTemplate: String?

/// Initializer for a AiAgentBasicGenTool.
///
/// - Parameters:
/// - model: The model to be used for the AI Agent for basic text.
/// - model: The model used for the AI Agent for basic text.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
/// - systemMessage: System messages try to help the LLM "understand" its role and what it is supposed to do.
/// This parameter requires using `{current_date}`.
/// Input for `{current_date}` is optional, depending on the use.
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
///
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and`{content}` are optional, depending on the use.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams:
/// Inputs for `{current_date}` and `{content}` are optional, depending on the use.
/// - embeddings:
/// - contentTemplate: How the content should be included in a request to the LLM.
/// When passing this parameter, you must include `{content}`.
public init(model: String? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, embeddings: AiAgentLongTextToolEmbeddingsField? = nil, contentTemplate: String? = nil) {
/// Input for `{content}` is optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolTextGenEmbeddingsField? = nil, contentTemplate: String? = nil) {
self.contentTemplate = contentTemplate

super.init(model: model, systemMessage: systemMessage, promptTemplate: promptTemplate, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, embeddings: embeddings)
super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, systemMessage: systemMessage, promptTemplate: promptTemplate, embeddings: embeddings)
}

required public init(from decoder: Decoder) throws {
Expand Down
54 changes: 54 additions & 0 deletions Sources/Schemas/AiAgentBasicTextTool/AiAgentBasicTextTool.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import Foundation

/// AI agent tool used to handle basic text.
public class AiAgentBasicTextTool: AiAgentBasicTextToolBase {
private enum CodingKeys: String, CodingKey {
case systemMessage = "system_message"
case promptTemplate = "prompt_template"
}

/// System messages try to help the LLM "understand" its role and what it is supposed to do.
public let systemMessage: String?

/// The prompt template contains contextual information of the request and the user prompt.
///
/// When passing `prompt_template` parameters, you **must include** inputs for `{user_question}` and `{content}`.
///
/// Input for `{current_date}` is optional, depending on the use.
public let promptTemplate: String?

/// Initializer for a AiAgentBasicTextTool.
///
/// - Parameters:
/// - model: The model used for the AI Agent for basic text.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
/// - systemMessage: System messages try to help the LLM "understand" its role and what it is supposed to do.
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
///
/// When passing `prompt_template` parameters, you **must include** inputs for `{user_question}` and `{content}`.
///
/// Input for `{current_date}` is optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
self.systemMessage = systemMessage
self.promptTemplate = promptTemplate

super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams)
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
systemMessage = try container.decodeIfPresent(String.self, forKey: .systemMessage)
promptTemplate = try container.decodeIfPresent(String.self, forKey: .promptTemplate)

try super.init(from: decoder)
}

public override func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encodeIfPresent(systemMessage, forKey: .systemMessage)
try container.encodeIfPresent(promptTemplate, forKey: .promptTemplate)
try super.encode(to: encoder)
}

}
Original file line number Diff line number Diff line change
@@ -1,64 +1,44 @@
import Foundation

/// AI agent tool used to handle basic text.
public class AiAgentBasicTextToolAsk: Codable {
public class AiAgentBasicTextToolBase: Codable {
private enum CodingKeys: String, CodingKey {
case model
case systemMessage = "system_message"
case promptTemplate = "prompt_template"
case numTokensForCompletion = "num_tokens_for_completion"
case llmEndpointParams = "llm_endpoint_params"
}

/// The model used for the AI Agent for basic text.
public let model: String?

/// System messages try to help the LLM "understand" its role and what it is supposed to do.
public let systemMessage: String?

/// The prompt template contains contextual information of the request and the user prompt.
///
/// When passing `prompt_template` parameters, you **must include** inputs for `{current_date}`, `{user_question}`, and `{content}`.
public let promptTemplate: String?

/// The number of tokens for completion.
public let numTokensForCompletion: Int64?

/// The parameters for the LLM endpoint specific to OpenAI / Google models.
public let llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi?

/// Initializer for a AiAgentBasicTextToolAsk.
/// Initializer for a AiAgentBasicTextToolBase.
///
/// - Parameters:
/// - model: The model used for the AI Agent for basic text.
/// - systemMessage: System messages try to help the LLM "understand" its role and what it is supposed to do.
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
///
/// When passing `prompt_template` parameters, you **must include** inputs for `{current_date}`, `{user_question}`, and `{content}`.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
public init(model: String? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil) {
self.model = model
self.systemMessage = systemMessage
self.promptTemplate = promptTemplate
self.numTokensForCompletion = numTokensForCompletion
self.llmEndpointParams = llmEndpointParams
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
model = try container.decodeIfPresent(String.self, forKey: .model)
systemMessage = try container.decodeIfPresent(String.self, forKey: .systemMessage)
promptTemplate = try container.decodeIfPresent(String.self, forKey: .promptTemplate)
numTokensForCompletion = try container.decodeIfPresent(Int64.self, forKey: .numTokensForCompletion)
llmEndpointParams = try container.decodeIfPresent(AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, forKey: .llmEndpointParams)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encodeIfPresent(model, forKey: .model)
try container.encodeIfPresent(systemMessage, forKey: .systemMessage)
try container.encodeIfPresent(promptTemplate, forKey: .promptTemplate)
try container.encodeIfPresent(numTokensForCompletion, forKey: .numTokensForCompletion)
try container.encodeIfPresent(llmEndpointParams, forKey: .llmEndpointParams)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,69 +1,54 @@
import Foundation

/// AI agent tool used to handle basic text.
public class AiAgentBasicTextToolTextGen: Codable {
public class AiAgentBasicTextToolTextGen: AiAgentBasicTextToolBase {
private enum CodingKeys: String, CodingKey {
case model
case systemMessage = "system_message"
case promptTemplate = "prompt_template"
case numTokensForCompletion = "num_tokens_for_completion"
case llmEndpointParams = "llm_endpoint_params"
}

/// The model to be used for the AI Agent for basic text.
public let model: String?

/// System messages try to help the LLM "understand" its role and what it is supposed to do.
/// This parameter requires using `{current_date}`.
/// Input for `{current_date}` is optional, depending on the use.
public let systemMessage: String?

/// The prompt template contains contextual information of the request and the user prompt.
///
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and`{content}` are optional, depending on the use.
/// Inputs for `{current_date}` and `{content}` are optional, depending on the use.
public let promptTemplate: String?

/// The number of tokens for completion.
public let numTokensForCompletion: Int64?

public let llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi?

/// Initializer for a AiAgentBasicTextToolTextGen.
///
/// - Parameters:
/// - model: The model to be used for the AI Agent for basic text.
/// - model: The model used for the AI Agent for basic text.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
/// - systemMessage: System messages try to help the LLM "understand" its role and what it is supposed to do.
/// This parameter requires using `{current_date}`.
/// Input for `{current_date}` is optional, depending on the use.
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
///
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and`{content}` are optional, depending on the use.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams:
public init(model: String? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil) {
self.model = model
/// Inputs for `{current_date}` and `{content}` are optional, depending on the use.
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil) {
self.systemMessage = systemMessage
self.promptTemplate = promptTemplate
self.numTokensForCompletion = numTokensForCompletion
self.llmEndpointParams = llmEndpointParams

super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams)
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
model = try container.decodeIfPresent(String.self, forKey: .model)
systemMessage = try container.decodeIfPresent(String.self, forKey: .systemMessage)
promptTemplate = try container.decodeIfPresent(String.self, forKey: .promptTemplate)
numTokensForCompletion = try container.decodeIfPresent(Int64.self, forKey: .numTokensForCompletion)
llmEndpointParams = try container.decodeIfPresent(AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.self, forKey: .llmEndpointParams)

try super.init(from: decoder)
}

public func encode(to encoder: Encoder) throws {
public override func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encodeIfPresent(model, forKey: .model)
try container.encodeIfPresent(systemMessage, forKey: .systemMessage)
try container.encodeIfPresent(promptTemplate, forKey: .promptTemplate)
try container.encodeIfPresent(numTokensForCompletion, forKey: .numTokensForCompletion)
try container.encodeIfPresent(llmEndpointParams, forKey: .llmEndpointParams)
try super.encode(to: encoder)
}

}
18 changes: 9 additions & 9 deletions Sources/Schemas/AiAgentLongTextTool/AiAgentLongTextTool.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import Foundation

/// AI agent tool used to to handle longer text.
public class AiAgentLongTextTool: AiAgentBasicTextToolTextGen {
public class AiAgentLongTextTool: AiAgentBasicTextTool {
private enum CodingKeys: String, CodingKey {
case embeddings
}
Expand All @@ -11,20 +11,20 @@ public class AiAgentLongTextTool: AiAgentBasicTextToolTextGen {
/// Initializer for a AiAgentLongTextTool.
///
/// - Parameters:
/// - model: The model to be used for the AI Agent for basic text.
/// - model: The model used for the AI Agent for basic text.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams: The parameters for the LLM endpoint specific to OpenAI / Google models.
/// - systemMessage: System messages try to help the LLM "understand" its role and what it is supposed to do.
/// This parameter requires using `{current_date}`.
/// - promptTemplate: The prompt template contains contextual information of the request and the user prompt.
///
/// When using the `prompt_template` parameter, you **must include** input for `{user_question}`.
/// Inputs for `{current_date}` and`{content}` are optional, depending on the use.
/// - numTokensForCompletion: The number of tokens for completion.
/// - llmEndpointParams:
/// When passing `prompt_template` parameters, you **must include** inputs for `{user_question}` and `{content}`.
///
/// Input for `{current_date}` is optional, depending on the use.
/// - embeddings:
public init(model: String? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, embeddings: AiAgentLongTextToolEmbeddingsField? = nil) {
public init(model: String? = nil, numTokensForCompletion: Int64? = nil, llmEndpointParams: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi? = nil, systemMessage: String? = nil, promptTemplate: String? = nil, embeddings: AiAgentLongTextToolEmbeddingsField? = nil) {
self.embeddings = embeddings

super.init(model: model, systemMessage: systemMessage, promptTemplate: promptTemplate, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams)
super.init(model: model, numTokensForCompletion: numTokensForCompletion, llmEndpointParams: llmEndpointParams, systemMessage: systemMessage, promptTemplate: promptTemplate)
}

required public init(from decoder: Decoder) throws {
Expand Down
Loading

0 comments on commit 31920e6

Please sign in to comment.