Skip to content

Commit

Permalink
feat: Add hubs support to /ai/ask (box/box-codegen#656) (#364)
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build authored Feb 5, 2025
1 parent 3509566 commit 9a49864
Show file tree
Hide file tree
Showing 8 changed files with 104 additions and 16 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "7874ac3", "specHash": "1fdcbef", "version": "0.6.0" }
{ "engineHash": "a88aabb", "specHash": "59747aa", "version": "0.6.0" }
32 changes: 32 additions & 0 deletions BoxSdkGen.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

10 changes: 7 additions & 3 deletions Sources/Managers/Ai/AiManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,16 @@ public class AiManager {
/// - Parameters:
/// - requestBody: Request body of createAiAsk method
/// - headers: Headers of createAiAsk method
/// - Returns: The `AiResponseFull`.
/// - Returns: The `AiResponseFull?`.
/// - Throws: The `GeneralError`.
public func createAiAsk(requestBody: AiAsk, headers: CreateAiAskHeaders = CreateAiAskHeaders()) async throws -> AiResponseFull {
public func createAiAsk(requestBody: AiAsk, headers: CreateAiAskHeaders = CreateAiAskHeaders()) async throws -> AiResponseFull? {
let headersMap: [String: String] = Utils.Dictionary.prepareParams(map: Utils.Dictionary.merge([:], headers.extraHeaders))
let response: FetchResponse = try await self.networkSession.networkClient.fetch(options: FetchOptions(url: "\(self.networkSession.baseUrls.baseUrl)\("/2.0/ai/ask")", method: "POST", headers: headersMap, data: try requestBody.serialize(), contentType: "application/json", responseFormat: "json", auth: self.auth, networkSession: self.networkSession))
return try AiResponseFull.deserialize(from: response.data)
if Utils.Strings.toString(value: response.status) == "204" {
return nil
}

return try AiResponseFull?.deserialize(from: response.data)
}

/// Sends an AI request to supported Large Language Models (LLMs) and returns generated text based on the provided prompt.
Expand Down
6 changes: 3 additions & 3 deletions Sources/Schemas/AiAsk/AiAsk.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class AiAsk: Codable {
/// **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
/// If the file size exceeds 1MB, the first 1MB of text representation will be processed.
/// If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
public let items: [AiItemBase]
public let items: [AiItemAsk]

/// The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response.
public let dialogueHistory: [AiDialogueHistory]?
Expand All @@ -45,7 +45,7 @@ public class AiAsk: Codable {
/// - dialogueHistory: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response.
/// - includeCitations: A flag to indicate whether citations should be returned.
/// - aiAgent:
public init(mode: AiAskModeField, prompt: String, items: [AiItemBase], dialogueHistory: [AiDialogueHistory]? = nil, includeCitations: Bool? = nil, aiAgent: AiAgentAsk? = nil) {
public init(mode: AiAskModeField, prompt: String, items: [AiItemAsk], dialogueHistory: [AiDialogueHistory]? = nil, includeCitations: Bool? = nil, aiAgent: AiAgentAsk? = nil) {
self.mode = mode
self.prompt = prompt
self.items = items
Expand All @@ -58,7 +58,7 @@ public class AiAsk: Codable {
let container = try decoder.container(keyedBy: CodingKeys.self)
mode = try container.decode(AiAskModeField.self, forKey: .mode)
prompt = try container.decode(String.self, forKey: .prompt)
items = try container.decode([AiItemBase].self, forKey: .items)
items = try container.decode([AiItemAsk].self, forKey: .items)
dialogueHistory = try container.decodeIfPresent([AiDialogueHistory].self, forKey: .dialogueHistory)
includeCitations = try container.decodeIfPresent(Bool.self, forKey: .includeCitations)
aiAgent = try container.decodeIfPresent(AiAgentAsk.self, forKey: .aiAgent)
Expand Down
46 changes: 46 additions & 0 deletions Sources/Schemas/AiItemAsk/AiItemAsk.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import Foundation

/// The item to be processed by the LLM for ask requests.
public class AiItemAsk: Codable {
private enum CodingKeys: String, CodingKey {
case id
case type
case content
}

/// The ID of the file.
public let id: String

/// The type of the item. A `hubs` item must be used as a single item.
public let type: AiItemAskTypeField

/// The content of the item, often the text representation.
public let content: String?

/// Initializer for a AiItemAsk.
///
/// - Parameters:
/// - id: The ID of the file.
/// - type: The type of the item. A `hubs` item must be used as a single item.
/// - content: The content of the item, often the text representation.
public init(id: String, type: AiItemAskTypeField, content: String? = nil) {
self.id = id
self.type = type
self.content = content
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
id = try container.decode(String.self, forKey: .id)
type = try container.decode(AiItemAskTypeField.self, forKey: .type)
content = try container.decodeIfPresent(String.self, forKey: .content)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(id, forKey: .id)
try container.encode(type, forKey: .type)
try container.encodeIfPresent(content, forKey: .content)
}

}
6 changes: 6 additions & 0 deletions Sources/Schemas/AiItemAsk/AiItemAskTypeField.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import Foundation

public enum AiItemAskTypeField: String, CodableStringEnum {
case file
case hubs
}
12 changes: 6 additions & 6 deletions Tests/Ai/AiManagerTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,18 @@ class AiManagerTests: XCTestCase {

public func testAskAiSingleItem() async throws {
let fileToAsk: FileFull = try await CommonsManager().uploadNewFile()
let response: AiResponseFull = try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.singleItemQa, prompt: "which direction sun rises", items: [AiItemBase(id: fileToAsk.id, type: AiItemBaseTypeField.file, content: "Sun rises in the East")]))
XCTAssertTrue(response.answer.contains("East"))
XCTAssertTrue(response.completionReason == "done")
let response: AiResponseFull? = try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.singleItemQa, prompt: "which direction sun rises", items: [AiItemAsk(id: fileToAsk.id, type: AiItemAskTypeField.file, content: "Sun rises in the East")]))
XCTAssertTrue(response!.answer.contains("East"))
XCTAssertTrue(response!.completionReason == "done")
try await client.files.deleteFileById(fileId: fileToAsk.id)
}

public func testAskAiMultipleItems() async throws {
let fileToAsk1: FileFull = try await CommonsManager().uploadNewFile()
let fileToAsk2: FileFull = try await CommonsManager().uploadNewFile()
let response: AiResponseFull = try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.multipleItemQa, prompt: "Which direction sun rises?", items: [AiItemBase(id: fileToAsk1.id, type: AiItemBaseTypeField.file, content: "Earth goes around the sun"), AiItemBase(id: fileToAsk2.id, type: AiItemBaseTypeField.file, content: "Sun rises in the East in the morning")]))
XCTAssertTrue(response.answer.contains("East"))
XCTAssertTrue(response.completionReason == "done")
let response: AiResponseFull? = try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.multipleItemQa, prompt: "Which direction sun rises?", items: [AiItemAsk(id: fileToAsk1.id, type: AiItemAskTypeField.file, content: "Earth goes around the sun"), AiItemAsk(id: fileToAsk2.id, type: AiItemAskTypeField.file, content: "Sun rises in the East in the morning")]))
XCTAssertTrue(response!.answer.contains("East"))
XCTAssertTrue(response!.completionReason == "done")
try await client.files.deleteFileById(fileId: fileToAsk1.id)
try await client.files.deleteFileById(fileId: fileToAsk2.id)
}
Expand Down
6 changes: 3 additions & 3 deletions docs/Ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ See the endpoint docs at

<!-- sample post_ai_ask -->
```
try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.multipleItemQa, prompt: "Which direction sun rises?", items: [AiItemBase(id: fileToAsk1.id, type: AiItemBaseTypeField.file, content: "Earth goes around the sun"), AiItemBase(id: fileToAsk2.id, type: AiItemBaseTypeField.file, content: "Sun rises in the East in the morning")]))
try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.multipleItemQa, prompt: "Which direction sun rises?", items: [AiItemAsk(id: fileToAsk1.id, type: AiItemAskTypeField.file, content: "Earth goes around the sun"), AiItemAsk(id: fileToAsk2.id, type: AiItemAskTypeField.file, content: "Sun rises in the East in the morning")]))
```

### Arguments
Expand All @@ -31,9 +31,9 @@ try await client.ai.createAiAsk(requestBody: AiAsk(mode: AiAskModeField.multiple

### Returns

This function returns a value of type `AiResponseFull`.
This function returns a value of type `AiResponseFull?`.

A successful response including the answer from the LLM.
A successful response including the answer from the LLM.No content is available to answer the question. This is returned when the request item is a hub, but content in the hubs is not indexed. To ensure content in the hub is indexed, make sure Box AI for Hubs in the Admin Console was enabled before hub creation.


## Generate text
Expand Down

0 comments on commit 9a49864

Please sign in to comment.