diff --git a/README.md b/README.md index 12576326..8def2a60 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,14 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Submit Tool Outputs for Run](#submit-tool-outputs-for-run) - [Files](#files) - [Upload File](#upload-file) + - [Retrieve File Content](#retrieve-file-content) + - [Delete File](#delete-file) + - [Batch API](#batch-api) + - [Create Batch](#create-batch) + - [Retrieve Batch](#retrieve-batch) + - [List Batches](#list-batches) + - [Cancel Batch](#cancel-batch) + - [Convenience Methods](#convenience-methods) - [Other APIs](#other-apis) - [Models](#models) - [List Models](#list-models) @@ -1478,6 +1486,230 @@ openAI.files(query: query) { result in } ``` +#### Retrieve File Content + +Download the content of a file. + +```swift +let fileData = try await openAI.retrieveFileContent(id: "file-abc123") +let content = String(data: fileData, encoding: .utf8) +``` + +#### Delete File + +Delete a file from OpenAI. + +```swift +let result = try await openAI.deleteFile(id: "file-abc123") +print("Deleted: \(result.deleted)") // true +``` + +### Batch API + +The Batch API allows you to send asynchronous groups of requests with 50% lower costs, a separate pool of significantly higher rate limits, and a clear 24-hour turnaround time. It's ideal for processing jobs that don't require immediate responses. + +Review [Batch API Documentation](https://platform.openai.com/docs/api-reference/batch) for more info. + +#### Create Batch + +Create a batch from an uploaded JSONL file of requests. + +**Request** + +```swift +public struct BatchQuery: Codable, Sendable { + /// The ID of an uploaded file containing requests for the batch. + public let inputFileId: String + /// The endpoint to use for the batch (e.g., /v1/chat/completions). + public let endpoint: BatchEndpoint + /// The time frame within which the batch should be processed. + public let completionWindow: BatchCompletionWindow + /// Optional custom metadata for the batch. + public let metadata: [String: String]? +} +``` + +**Response** + +```swift +public struct BatchResult: Codable, Sendable { + public let id: String + public let status: BatchStatus + public let outputFileId: String? + public let errorFileId: String? + public let requestCounts: RequestCounts? + // ... additional fields +} +``` + +**Example** + +```swift +// Step 1: Create JSONL input file +let requests = [ + BatchRequestLine( + customId: "request-1", + body: ChatQuery( + messages: [.user(.init(content: .string("What is 2+2?")))], + model: .gpt4_o_mini + ) + ), + BatchRequestLine( + customId: "request-2", + body: ChatQuery( + messages: [.user(.init(content: .string("What is the capital of France?")))], + model: .gpt4_o_mini + ) + ) +] + +// Encode to JSONL +let jsonlData = try requests.map { request in + let data = try JSONEncoder().encode(request) + return String(data: data, encoding: .utf8)! +}.joined(separator: "\n") + +// Step 2: Upload file +let fileQuery = FilesQuery( + purpose: "batch", + file: jsonlData.data(using: .utf8)!, + fileName: "batch_requests.jsonl", + contentType: "application/jsonl" +) +let fileResult = try await openAI.files(query: fileQuery) + +// Step 3: Create batch +let batchQuery = BatchQuery( + inputFileId: fileResult.id, + endpoint: .chatCompletions, + completionWindow: .twentyFourHours +) +let batch = try await openAI.createBatch(query: batchQuery) +print("Batch ID: \(batch.id), Status: \(batch.status)") +``` + +#### Retrieve Batch + +Get the status and details of a batch. + +```swift +let batch = try await openAI.retrieveBatch(id: "batch_abc123") +print("Status: \(batch.status)") + +if let counts = batch.requestCounts { + print("Progress: \(counts.completed)/\(counts.total)") +} +``` + +#### List Batches + +List all batches with optional pagination. + +```swift +let result = try await openAI.listBatches(after: nil, limit: 10) +for batch in result.data { + print("\(batch.id): \(batch.status)") +} +``` + +#### Cancel Batch + +Cancel an in-progress batch. The batch will be marked as `cancelling` and eventually `cancelled`. + +```swift +let cancelledBatch = try await openAI.cancelBatch(id: "batch_abc123") +print("Status: \(cancelledBatch.status)") // cancelling or cancelled +``` + +#### Convenience Methods + +The SDK provides two convenience methods that simplify the batch workflow: + +##### submitBatch + +Handles JSONL encoding, file upload, and batch creation in one call. + +```swift +let requests = [ + BatchRequestLine( + customId: "request-1", + body: ChatQuery( + messages: [.user(.init(content: .string("Hello!")))], + model: .gpt4_o_mini + ) + ) +] + +let batch = try await openAI.submitBatch( + requests: requests, + fileName: "my_batch.jsonl", + metadata: ["purpose": "testing"] +) +print("Batch ID: \(batch.id)") +``` + +##### waitForBatch + +Polls for batch completion and returns parsed responses. + +```swift +let responses = try await openAI.waitForBatch( + id: batch.id, + pollingInterval: 5.0, // Check every 5 seconds + timeout: 300 // 5 minute timeout +) + +for response in responses { + print("Response for \(response.customId):") + if let body = response.response { + print(" Status: \(body.statusCode)") + print(" Content: \(body.body.choices.first?.message.content ?? "")") + } else if let error = response.error { + print(" Error: \(error.message)") + } +} +``` + +##### Complete Example with Convenience Methods + +```swift +// Create batch requests +let requests = [ + BatchRequestLine( + customId: "math-1", + body: ChatQuery( + messages: [.user(.init(content: .string("What is 2+2?")))], + model: .gpt4_o_mini + ) + ), + BatchRequestLine( + customId: "geography-1", + body: ChatQuery( + messages: [.user(.init(content: .string("What is the capital of France?")))], + model: .gpt4_o_mini + ) + ) +] + +// Submit and wait for completion +let batch = try await openAI.submitBatch( + requests: requests, + fileName: "questions.jsonl" +) + +let responses = try await openAI.waitForBatch(id: batch.id) + +// Process responses +for response in responses { + if let content = response.response?.body.choices.first?.message.content { + print("\(response.customId): \(content)") + } +} +// Output: +// math-1: 4 +// geography-1: Paris +``` + ## Support for other providers > TL;DR Use `.relaxed` parsing option on Configuration diff --git a/Sources/OpenAI/OpenAI+OpenAIAsync.swift b/Sources/OpenAI/OpenAI+OpenAIAsync.swift index f4d47b94..13193393 100644 --- a/Sources/OpenAI/OpenAI+OpenAIAsync.swift +++ b/Sources/OpenAI/OpenAI+OpenAIAsync.swift @@ -191,7 +191,144 @@ extension OpenAI: OpenAIAsync { request: makeFilesRequest(query: query) ) } - + + public func retrieveFileContent(id: String) async throws -> Data { + try await performRawDataRequestAsync( + request: makeFileContentRequest(id: id) + ) + } + + public func deleteFile(id: String) async throws -> FileDeleteResult { + try await performRequestAsync( + request: makeFileDeleteRequest(id: id) + ) + } + + // MARK: - Batch API + + public func createBatch(query: BatchQuery) async throws -> BatchResult { + try await performRequestAsync( + request: makeBatchCreateRequest(query: query) + ) + } + + public func retrieveBatch(id: String) async throws -> BatchResult { + try await performRequestAsync( + request: makeBatchRetrieveRequest(id: id) + ) + } + + public func listBatches(after: String? = nil, limit: Int = 20) async throws -> BatchListResult { + try await performRequestAsync( + request: makeBatchListRequest(after: after, limit: limit) + ) + } + + public func cancelBatch(id: String) async throws -> BatchResult { + try await performRequestAsync( + request: makeBatchCancelRequest(id: id) + ) + } + + // MARK: - Batch API Convenience Methods + + /// Submits a batch of chat requests for async processing. + /// Handles JSONL encoding, file upload, and batch creation. + /// + /// - Parameters: + /// - requests: Array of batch request lines to process + /// - fileName: Name for the uploaded JSONL file + /// - endpoint: The API endpoint for batch requests (default: chat completions) + /// - metadata: Optional metadata to attach to the batch + /// - Returns: The created batch result with ID for tracking + public func submitBatch( + requests: [BatchRequestLine], + fileName: String, + endpoint: BatchEndpoint = .chatCompletions, + metadata: [String: String]? = nil + ) async throws -> BatchResult { + // Step 1: Encode requests to JSONL + let encoder = JSONEncoder() + let jsonlData = try requests.map { request -> String in + let data = try encoder.encode(request) + return String(data: data, encoding: .utf8)! + }.joined(separator: "\n") + + // Step 2: Upload file + let fileData = jsonlData.data(using: .utf8)! + let fileQuery = FilesQuery( + purpose: "batch", + file: fileData, + fileName: fileName, + contentType: "application/jsonl" + ) + let fileResult = try await files(query: fileQuery) + + // Step 3: Create batch + let batchQuery = BatchQuery( + inputFileId: fileResult.id, + endpoint: endpoint, + metadata: metadata + ) + return try await createBatch(query: batchQuery) + } + + /// Waits for a batch to complete and returns the parsed responses. + /// Polls the batch status until completion, then downloads and parses results. + /// + /// - Parameters: + /// - batchId: The batch ID to wait for + /// - pollingInterval: Time between status checks (default 5 seconds) + /// - timeout: Maximum time to wait (default 24 hours) + /// - Returns: Array of parsed batch response lines + /// - Throws: Error if batch fails, expires, or is cancelled + public func waitForBatch( + id batchId: String, + pollingInterval: TimeInterval = 5.0, + timeout: TimeInterval = 86400 + ) async throws -> [BatchResponseLine] { + let startTime = Date() + + // Step 4: Poll for completion + var batch = try await retrieveBatch(id: batchId) + while batch.status != .completed && + batch.status != .failed && + batch.status != .expired && + batch.status != .cancelled && + batch.status != .cancelling { + + if Date().timeIntervalSince(startTime) > timeout { + throw BatchError.timeout(batchId: batchId, lastStatus: batch.status) + } + + try await Task.sleep(nanoseconds: UInt64(pollingInterval * 1_000_000_000)) + batch = try await retrieveBatch(id: batchId) + } + + // Check for failure states + guard batch.status == .completed else { + throw BatchError.batchFailed(batchId: batchId, status: batch.status) + } + + // Step 5: Download output file + guard let outputFileId = batch.outputFileId else { + throw BatchError.noOutputFile(batchId: batchId) + } + + let outputData = try await retrieveFileContent(id: outputFileId) + guard let outputString = String(data: outputData, encoding: .utf8) else { + throw BatchError.invalidOutputData(batchId: batchId) + } + + // Step 6: Parse JSONL responses + let decoder = JSONDecoder() + let lines = outputString.split(separator: "\n") + return try lines.map { line in + let lineData = line.data(using: .utf8)! + return try decoder.decode(BatchResponseLine.self, from: lineData) + } + } + func performRequestAsync(request: any URLRequestBuildable) async throws -> ResultType { try await asyncClient.performRequest(request: request) } @@ -199,6 +336,10 @@ extension OpenAI: OpenAIAsync { func performSpeechRequestAsync(request: any URLRequestBuildable) async throws -> AudioSpeechResult { try await asyncClient.performSpeechRequest(request: request) } + + func performRawDataRequestAsync(request: any URLRequestBuildable) async throws -> Data { + try await asyncClient.performRawDataRequest(request: request) + } func makeAsyncStream( byWrapping call: (_ onResult: @escaping @Sendable (Result) -> Void, _ completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index 4712fdda..301b5507 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -263,6 +263,36 @@ final public class OpenAI: OpenAIProtocol, @unchecked Sendable { ) } + // MARK: - Batch API + + public func createBatch(query: BatchQuery, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest { + performRequest( + request: makeBatchCreateRequest(query: query), + completion: completion + ) + } + + public func retrieveBatch(id: String, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest { + performRequest( + request: makeBatchRetrieveRequest(id: id), + completion: completion + ) + } + + public func listBatches(after: String? = nil, limit: Int = 20, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest { + performRequest( + request: makeBatchListRequest(after: after, limit: limit), + completion: completion + ) + } + + public func cancelBatch(id: String, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest { + performRequest( + request: makeBatchCancelRequest(id: id), + completion: completion + ) + } + public func images(query: ImagesQuery, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest { performRequest(request: makeImagesRequest(query: query), completion: completion) } @@ -444,7 +474,16 @@ extension APIPath { static let images = "/images/generations" static let imageEdits = "/images/edits" static let imageVariations = "/images/variations" - + + // Files API paths + static func file(_ fileId: String) -> String { "/files/\(fileId)" } + static func fileContent(_ fileId: String) -> String { "/files/\(fileId)/content" } + + // Batch API paths + static let batches = "/batches" + static func batch(_ batchId: String) -> String { "/batches/\(batchId)" } + static func batchCancel(_ batchId: String) -> String { "/batches/\(batchId)/cancel" } + func withPath(_ path: String) -> String { self + "/" + path } diff --git a/Sources/OpenAI/Private/Client/AsyncClient.swift b/Sources/OpenAI/Private/Client/AsyncClient.swift index 7bb28bac..a25bcfca 100644 --- a/Sources/OpenAI/Private/Client/AsyncClient.swift +++ b/Sources/OpenAI/Private/Client/AsyncClient.swift @@ -98,6 +98,41 @@ actor AsyncClient { } } + func performRawDataRequest(request: any URLRequestBuildable) async throws -> Data { + let urlRequest = try request.build(configuration: configuration) + let interceptedRequest = middlewares.reduce(urlRequest) { current, middleware in + middleware.intercept(request: current) + } + if #available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *) { + let (data, response) = try await session.data(for: interceptedRequest, delegate: nil) + return try responseHandler.interceptAndDecodeRaw(response: response, urlRequest: urlRequest, responseData: data) + } else { + let dataTaskStore = URLSessionDataTaskStore() + return try await withTaskCancellationHandler { + return try await withCheckedThrowingContinuation { continuation in + let dataTask = self.dataTaskFactory.makeRawResponseDataTask(forRequest: interceptedRequest) { result in + switch result { + case .success(let success): + continuation.resume(returning: success) + case .failure(let failure): + continuation.resume(throwing: failure) + } + } + + dataTask.resume() + + Task { + await dataTaskStore.setDataTask(dataTask) + } + } + } onCancel: { + Task { + await dataTaskStore.getDataTask()?.cancel() + } + } + } + } + private func makeDataTask( forRequest request: URLRequest, completion: @escaping @Sendable (Result) -> Void diff --git a/Sources/OpenAI/Private/OpenAI+MakeRequest.swift b/Sources/OpenAI/Private/OpenAI+MakeRequest.swift index ff4d6718..766c3102 100644 --- a/Sources/OpenAI/Private/OpenAI+MakeRequest.swift +++ b/Sources/OpenAI/Private/OpenAI+MakeRequest.swift @@ -153,4 +153,36 @@ extension OpenAI { body: query ) } + + func makeFileContentRequest(id: String) -> JSONRequest { + .init(url: buildURL(path: .fileContent(id)), method: "GET") + } + + func makeFileDeleteRequest(id: String) -> JSONRequest { + .init(url: buildURL(path: .file(id)), method: "DELETE") + } + + // MARK: - Batch API + + func makeBatchCreateRequest(query: BatchQuery) -> JSONRequest { + .init(body: query, url: buildURL(path: .batches)) + } + + func makeBatchRetrieveRequest(id: String) -> JSONRequest { + .init(url: buildURL(path: .batch(id)), method: "GET") + } + + func makeBatchListRequest(after: String?, limit: Int) -> JSONRequest { + var components = URLComponents.components(perConfiguration: configuration, path: APIPath.batches) + var queryItems = [URLQueryItem(name: "limit", value: String(limit))] + if let after = after { + queryItems.append(URLQueryItem(name: "after", value: after)) + } + components.queryItems = queryItems + return .init(url: components.urlSafe, method: "GET") + } + + func makeBatchCancelRequest(id: String) -> JSONRequest { + .init(url: buildURL(path: .batchCancel(id)), method: "POST") + } } diff --git a/Sources/OpenAI/Public/Models/BatchError.swift b/Sources/OpenAI/Public/Models/BatchError.swift new file mode 100644 index 00000000..5d2cbdb4 --- /dev/null +++ b/Sources/OpenAI/Public/Models/BatchError.swift @@ -0,0 +1,33 @@ +// +// BatchError.swift +// OpenAI +// +// Errors for batch API convenience methods. +// + +import Foundation + +/// Errors that can occur during batch processing. +public enum BatchError: LocalizedError, Sendable { + /// Batch polling timed out before completion. + case timeout(batchId: String, lastStatus: BatchStatus) + /// Batch ended in a non-completed state. + case batchFailed(batchId: String, status: BatchStatus) + /// Batch completed but no output file was returned. + case noOutputFile(batchId: String) + /// Output file data could not be decoded. + case invalidOutputData(batchId: String) + + public var errorDescription: String? { + switch self { + case .timeout(let batchId, let lastStatus): + return "Batch \(batchId) timed out with status: \(lastStatus)" + case .batchFailed(let batchId, let status): + return "Batch \(batchId) failed with status: \(status)" + case .noOutputFile(let batchId): + return "Batch \(batchId) completed but no output file was returned" + case .invalidOutputData(let batchId): + return "Batch \(batchId) output file could not be decoded" + } + } +} diff --git a/Sources/OpenAI/Public/Models/BatchListResult.swift b/Sources/OpenAI/Public/Models/BatchListResult.swift new file mode 100644 index 00000000..8b4e24b0 --- /dev/null +++ b/Sources/OpenAI/Public/Models/BatchListResult.swift @@ -0,0 +1,36 @@ +// +// BatchListResult.swift +// +// +// Created by Jason Flax on 12/24/2024. +// + +import Foundation + +/// Result of listing batches. +/// https://platform.openai.com/docs/api-reference/batch/list +public struct BatchListResult: Codable, Equatable, Sendable { + + /// The object type, which is always `list`. + public let object: String + + /// The list of batches. + public let data: [BatchResult] + + /// The ID of the first batch in the list. + public let firstId: String? + + /// The ID of the last batch in the list. + public let lastId: String? + + /// Whether there are more batches to retrieve. + public let hasMore: Bool + + enum CodingKeys: String, CodingKey { + case object + case data + case firstId = "first_id" + case lastId = "last_id" + case hasMore = "has_more" + } +} diff --git a/Sources/OpenAI/Public/Models/BatchQuery.swift b/Sources/OpenAI/Public/Models/BatchQuery.swift new file mode 100644 index 00000000..16b26073 --- /dev/null +++ b/Sources/OpenAI/Public/Models/BatchQuery.swift @@ -0,0 +1,95 @@ +// +// BatchQuery.swift +// +// +// Created by Jason Flax on 12/24/2024. +// + +import Foundation + +/// The endpoint to be used for batch requests. +public enum BatchEndpoint: String, Codable, Sendable { + /// Chat completions endpoint. + case chatCompletions = "/v1/chat/completions" + /// Embeddings endpoint. + case embeddings = "/v1/embeddings" + /// Legacy completions endpoint. + case completions = "/v1/completions" +} + +/// The time frame within which the batch should be processed. +public enum BatchCompletionWindow: String, Codable, Sendable { + /// 24 hour completion window (50% cost discount). + case twentyFourHours = "24h" +} + +/// Query for creating a batch job. +/// https://platform.openai.com/docs/api-reference/batch/create +public struct BatchQuery: Codable, Sendable { + + /// The ID of an uploaded file that contains requests for the new batch. + /// Your input file must be formatted as a JSONL file, and must be uploaded with the purpose `batch`. + public let inputFileId: String + + /// The endpoint to be used for all requests in the batch. + public let endpoint: BatchEndpoint + + /// The time frame within which the batch should be processed. + public let completionWindow: BatchCompletionWindow + + /// Optional custom metadata for the batch. + public let metadata: [String: String]? + + enum CodingKeys: String, CodingKey { + case inputFileId = "input_file_id" + case endpoint + case completionWindow = "completion_window" + case metadata + } + + public init( + inputFileId: String, + endpoint: BatchEndpoint = .chatCompletions, + completionWindow: BatchCompletionWindow = .twentyFourHours, + metadata: [String: String]? = nil + ) { + self.inputFileId = inputFileId + self.endpoint = endpoint + self.completionWindow = completionWindow + self.metadata = metadata + } +} + +/// A single request line for batch JSONL input file. +public struct BatchRequestLine: Codable, Sendable { + + /// A developer-provided per-request id that will be used to match outputs to inputs. + public let customId: String + + /// The HTTP method to be used for the request. Always POST. + let method: String + + /// The OpenAI API relative URL to be used for the request. + let url: BatchEndpoint + + /// The body of the request. + public let body: ChatQuery + + enum CodingKeys: String, CodingKey { + case customId = "custom_id" + case method + case url + case body + } + + public init( + customId: String, + body: ChatQuery, + endpoint: BatchEndpoint = .chatCompletions + ) { + self.customId = customId + self.method = "POST" + self.url = endpoint + self.body = body + } +} diff --git a/Sources/OpenAI/Public/Models/BatchResponseLine.swift b/Sources/OpenAI/Public/Models/BatchResponseLine.swift new file mode 100644 index 00000000..ad058971 --- /dev/null +++ b/Sources/OpenAI/Public/Models/BatchResponseLine.swift @@ -0,0 +1,60 @@ +// +// BatchResponseLine.swift +// +// +// Created by Jason Flax on 12/24/2024. +// + +import Foundation + +/// A single response line from a batch output JSONL file. +public struct BatchResponseLine: Codable, Equatable, Sendable { + + /// The unique identifier for this response. + public let id: String + + /// The custom_id from the corresponding request. + public let customId: String + + /// The response body, if the request was successful. + public let response: BatchResponseBody? + + /// Error information, if the request failed. + public let error: BatchResponseError? + + enum CodingKeys: String, CodingKey { + case id + case customId = "custom_id" + case response + case error + } +} + +/// The response body from a successful batch request. +public struct BatchResponseBody: Codable, Equatable, Sendable { + + /// The HTTP status code of the response. + public let statusCode: Int + + /// The request ID. + public let requestId: String + + /// The response body (ChatResult for chat completions). + public let body: ChatResult + + enum CodingKeys: String, CodingKey { + case statusCode = "status_code" + case requestId = "request_id" + case body + } +} + +/// Error information from a failed batch request. +public struct BatchResponseError: Codable, Equatable, Sendable { + + /// The error code. + public let code: String + + /// The error message. + public let message: String +} diff --git a/Sources/OpenAI/Public/Models/BatchResult.swift b/Sources/OpenAI/Public/Models/BatchResult.swift new file mode 100644 index 00000000..7f9cd7fd --- /dev/null +++ b/Sources/OpenAI/Public/Models/BatchResult.swift @@ -0,0 +1,124 @@ +// +// BatchResult.swift +// +// +// Created by Jason Flax on 12/24/2024. +// + +import Foundation + +/// Result of a batch operation. +/// https://platform.openai.com/docs/api-reference/batch/object +public struct BatchResult: Codable, Equatable, Sendable { + + /// The batch ID. + public let id: String + + /// The object type, which is always `batch`. + public let object: String + + /// The OpenAI API endpoint used by the batch. + public let endpoint: String + + /// The ID of the input file for the batch. + public let inputFileId: String + + /// The time frame within which the batch should be processed. + public let completionWindow: String + + /// The current status of the batch. + public let status: BatchStatus + + /// The ID of the file containing the outputs of successfully executed requests. + public let outputFileId: String? + + /// The ID of the file containing the outputs of requests with errors. + public let errorFileId: String? + + /// The Unix timestamp (in seconds) for when the batch was created. + public let createdAt: Int + + /// The Unix timestamp (in seconds) for when the batch started processing. + public let inProgressAt: Int? + + /// The Unix timestamp (in seconds) for when the batch will expire. + public let expiresAt: Int? + + /// The Unix timestamp (in seconds) for when the batch started finalizing. + public let finalizingAt: Int? + + /// The Unix timestamp (in seconds) for when the batch was completed. + public let completedAt: Int? + + /// The Unix timestamp (in seconds) for when the batch failed. + public let failedAt: Int? + + /// The Unix timestamp (in seconds) for when the batch expired. + public let expiredAt: Int? + + /// The Unix timestamp (in seconds) for when the batch started cancelling. + public let cancellingAt: Int? + + /// The Unix timestamp (in seconds) for when the batch was cancelled. + public let cancelledAt: Int? + + /// The request counts for different statuses within the batch. + public let requestCounts: RequestCounts? + + /// Optional custom metadata for the batch. + public let metadata: [String: String]? + + enum CodingKeys: String, CodingKey { + case id + case object + case endpoint + case inputFileId = "input_file_id" + case completionWindow = "completion_window" + case status + case outputFileId = "output_file_id" + case errorFileId = "error_file_id" + case createdAt = "created_at" + case inProgressAt = "in_progress_at" + case expiresAt = "expires_at" + case finalizingAt = "finalizing_at" + case completedAt = "completed_at" + case failedAt = "failed_at" + case expiredAt = "expired_at" + case cancellingAt = "cancelling_at" + case cancelledAt = "cancelled_at" + case requestCounts = "request_counts" + case metadata + } +} + +extension BatchResult: Identifiable {} + +/// The status of a batch. +public enum BatchStatus: String, Codable, Equatable, Sendable { + /// The input file is being validated before the batch can begin. + case validating + /// The input file has failed the validation process. + case failed + /// The batch is currently being run. + case inProgress = "in_progress" + /// The batch has completed and the results are being prepared. + case finalizing + /// The batch has been completed and the results are ready. + case completed + /// The batch was not able to be completed within the 24-hour time window. + case expired + /// The batch is being cancelled (may take up to 10 minutes). + case cancelling + /// The batch was cancelled. + case cancelled +} + +/// Request counts for different statuses within a batch. +public struct RequestCounts: Codable, Equatable, Sendable { + /// Total number of requests in the batch. + public let total: Int + /// Number of requests that have been completed successfully. + public let completed: Int + /// Number of requests that have failed. + public let failed: Int +} diff --git a/Sources/OpenAI/Public/Models/FileDeleteResult.swift b/Sources/OpenAI/Public/Models/FileDeleteResult.swift new file mode 100644 index 00000000..0d7d4b7f --- /dev/null +++ b/Sources/OpenAI/Public/Models/FileDeleteResult.swift @@ -0,0 +1,18 @@ +// +// FileDeleteResult.swift +// OpenAI +// +// Result from deleting a file. +// + +import Foundation + +/// Result from deleting a file via the Files API. +public struct FileDeleteResult: Codable, Equatable, Sendable { + /// The file ID that was deleted. + public let id: String + /// The object type (always "file"). + public let object: String + /// Whether the file was successfully deleted. + public let deleted: Bool +} diff --git a/Sources/OpenAI/Public/Protocols/OpenAIAsync.swift b/Sources/OpenAI/Public/Protocols/OpenAIAsync.swift index 8a6a2431..459382a4 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIAsync.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIAsync.swift @@ -37,4 +37,18 @@ public protocol OpenAIAsync: Sendable { func threadsMessages(threadId: String, before: String?) async throws -> ThreadsMessagesResult func threadsAddMessage(threadId: String, query: MessageQuery) async throws -> ThreadAddMessageResult func files(query: FilesQuery) async throws -> FilesResult + func retrieveFileContent(id: String) async throws -> Data + func deleteFile(id: String) async throws -> FileDeleteResult + + // MARK: - Batch API + + func createBatch(query: BatchQuery) async throws -> BatchResult + func retrieveBatch(id: String) async throws -> BatchResult + func listBatches(after: String?, limit: Int) async throws -> BatchListResult + func cancelBatch(id: String) async throws -> BatchResult + + // MARK: - Batch API Convenience + + func submitBatch(requests: [BatchRequestLine], fileName: String, endpoint: BatchEndpoint, metadata: [String: String]?) async throws -> BatchResult + func waitForBatch(id batchId: String, pollingInterval: TimeInterval, timeout: TimeInterval) async throws -> [BatchResponseLine] } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index dba875b5..c547d0d2 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -437,7 +437,7 @@ public protocol OpenAIProtocol: OpenAIModern { /** This function sends a purpose string, file contents, and fileName contents to the OpenAI API and returns a file id result. - + Example: Upload file ``` let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") @@ -450,4 +450,54 @@ public protocol OpenAIProtocol: OpenAIModern { Returns a `Result` of type `FilesResult` if successful, or an `Error` if an error occurs. **/ @discardableResult func files(query: FilesQuery, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest + + // MARK: - Batch API + + /** + Creates and executes a batch from an uploaded file of requests. + Batches are processed asynchronously and results can be retrieved when complete. + Batch processing is 50% cheaper than synchronous API calls. + + Example: + ``` + let query = BatchQuery(inputFileId: "file-abc123") + openAI.createBatch(query: query) { result in + // Handle response here + } + ``` + + - Parameter query: A `BatchQuery` containing the input file ID and other batch parameters. + - Parameter completion: The completion handler with the batch result or an error. + - Returns: A `CancellableRequest` that can be used to cancel the request. + **/ + @discardableResult func createBatch(query: BatchQuery, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest + + /** + Retrieves a batch by its ID. + + - Parameter id: The ID of the batch to retrieve. + - Parameter completion: The completion handler with the batch result or an error. + - Returns: A `CancellableRequest` that can be used to cancel the request. + **/ + @discardableResult func retrieveBatch(id: String, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest + + /** + Lists all batches for the organization. + + - Parameter after: A cursor for pagination. Pass the `lastId` from a previous response. + - Parameter limit: The maximum number of batches to return (default 20, max 100). + - Parameter completion: The completion handler with the list result or an error. + - Returns: A `CancellableRequest` that can be used to cancel the request. + **/ + @discardableResult func listBatches(after: String?, limit: Int, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest + + /** + Cancels an in-progress batch. The batch will be moved to `cancelling` status + and eventually `cancelled`. + + - Parameter id: The ID of the batch to cancel. + - Parameter completion: The completion handler with the batch result or an error. + - Returns: A `CancellableRequest` that can be used to cancel the request. + **/ + @discardableResult func cancelBatch(id: String, completion: @escaping @Sendable (Result) -> Void) -> CancellableRequest } diff --git a/Tests/OpenAITests/BatchAPIIntegrationTests.swift b/Tests/OpenAITests/BatchAPIIntegrationTests.swift new file mode 100644 index 00000000..592af580 --- /dev/null +++ b/Tests/OpenAITests/BatchAPIIntegrationTests.swift @@ -0,0 +1,362 @@ +// +// BatchAPIIntegrationTests.swift +// OpenAI +// +// Integration tests for Batch API - requires valid API key. +// + +import XCTest +@testable import OpenAI + +/// Example structured output type for batch testing +struct ExtractedMovieInfo: Codable, JSONSchemaConvertible { + let title: String + let director: String + let year: Int + let genre: String + + static var example: ExtractedMovieInfo { + ExtractedMovieInfo( + title: "The Matrix", + director: "The Wachowskis", + year: 1999, + genre: "Science Fiction" + ) + } +} + +/// Integration tests that hit the real OpenAI API. +/// Set OPENAI_API_KEY environment variable to run. +class BatchAPIIntegrationTests: XCTestCase { + + var openAI: OpenAI! + + override func setUp() async throws { + guard let apiKey = ProcessInfo.processInfo.environment["OPENAI_API_KEY"], + !apiKey.isEmpty else { + throw XCTSkip("OPENAI_API_KEY not set - skipping integration test") + } + + let configuration = OpenAI.Configuration(token: apiKey) + self.openAI = OpenAI(configuration: configuration) + } + + /// Test the full batch workflow: + /// 1. Upload input file + /// 2. Create batch + /// 3. Poll for completion + /// 4. Retrieve results + func testBatchWorkflow() async throws { + // Step 1: Create JSONL input using simplified init + let requests = [ + // Simple text request + BatchRequestLine( + customId: "request-1", + body: ChatQuery( + messages: [.user(.init(content: .string("What is 2+2? Reply with just the number.")))], + model: .gpt4_o_mini + ) + ), + // Another simple text request + BatchRequestLine( + customId: "request-2", + body: ChatQuery( + messages: [.user(.init(content: .string("What is the capital of France? Reply with just the city name.")))], + model: .gpt4_o_mini + ) + ), + // Structured output request using JSON schema + BatchRequestLine( + customId: "request-3-structured", + body: ChatQuery( + messages: [ + .system(.init(content: .textContent("You extract movie information from text."))), + .user(.init(content: .string("Extract the movie info: Inception is a 2010 sci-fi film directed by Christopher Nolan."))) + ], + model: .gpt4_o_mini, + responseFormat: .jsonSchema( + ChatQuery.StructuredOutputConfigurationOptions( + name: "movie_info", + schema: .derivedJsonSchema(ExtractedMovieInfo.self), + strict: true + ) + ) + ) + ) + ] + + // Encode to JSONL + let encoder = JSONEncoder() + let jsonlData = try requests.map { request -> String in + let data = try encoder.encode(request) + return String(data: data, encoding: .utf8)! + }.joined(separator: "\n") + + print("📝 JSONL Input:\n\(jsonlData)\n") + + // Step 2: Upload file + let fileData = jsonlData.data(using: String.Encoding.utf8)! + let fileQuery = FilesQuery( + purpose: "batch", + file: fileData, + fileName: "batch_test_\(Int(Date().timeIntervalSince1970)).jsonl", + contentType: "application/jsonl" + ) + + print("📤 Uploading input file...") + let fileResult = try await openAI.files(query: fileQuery) + print(" File ID: \(fileResult.id)") + + // Step 3: Create batch + let batchQuery = BatchQuery( + inputFileId: fileResult.id, + endpoint: .chatCompletions, + completionWindow: .twentyFourHours, + metadata: ["test": "batch-api-integration"] + ) + + print("🚀 Creating batch...") + let batch = try await openAI.createBatch(query: batchQuery) + print(" Batch ID: \(batch.id)") + print(" Status: \(batch.status)") + + // Step 4: Poll for completion (with timeout) + print("⏳ Waiting for batch to complete...") + let startTime = Date() + let timeout: TimeInterval = 300 // 5 minutes max + + var currentBatch = batch + while currentBatch.status != BatchStatus.completed && + currentBatch.status != BatchStatus.failed && + currentBatch.status != BatchStatus.expired && + currentBatch.status != BatchStatus.cancelled && + currentBatch.status != .cancelling { + + if Date().timeIntervalSince(startTime) > timeout { + print(" ⚠️ Timeout after \(timeout)s - batch still processing") + print(" Final status: \(currentBatch.status)") + // Don't fail - batch might still complete later + return + } + + try await Task.sleep(nanoseconds: 5_000_000_000) // 5 seconds + currentBatch = try await openAI.retrieveBatch(id: batch.id) + print(" Status: \(currentBatch.status)") + + if let counts = currentBatch.requestCounts { + print(" Progress: \(counts.completed)/\(counts.total) completed, \(counts.failed) failed") + } + } + + // Step 5: Check results + XCTAssertEqual(currentBatch.status, BatchStatus.completed, "Batch should complete successfully") + + if let counts = currentBatch.requestCounts { + print("📊 Final counts:") + print(" Total: \(counts.total)") + print(" Completed: \(counts.completed)") + print(" Failed: \(counts.failed)") + } + + // Step 6: Check for errors first + if let errorFileId = currentBatch.errorFileId { + print("⚠️ Error file exists: \(errorFileId)") + let errorData = try await openAI.retrieveFileContent(id: errorFileId) + let errorString = String(data: errorData, encoding: .utf8) ?? "?" + print("📄 Error file content:\n\(errorString)\n") + } + + // Step 7: Download and verify output file + guard let outputFileId = currentBatch.outputFileId else { + XCTFail("No output file ID returned - all requests may have failed") + return + } + + print("📥 Downloading output file: \(outputFileId)") + let outputData = try await openAI.retrieveFileContent(id: outputFileId) + let outputString = String(data: outputData, encoding: .utf8)! + print("📄 Output file content:\n\(outputString)\n") + + // Parse JSONL output + let lines = outputString.split(separator: "\n") + XCTAssertEqual(lines.count, 3, "Should have 3 response lines") + + let decoder = JSONDecoder() + + // Verify each response + var foundMathAnswer = false + var foundCapitalAnswer = false + var foundMovieInfo = false + + for line in lines { + let responseData = line.data(using: .utf8)! + let responseLine = try decoder.decode(BatchResponseLine.self, from: responseData) + + print("📝 Response for \(responseLine.customId):") + + if let response = responseLine.response { + XCTAssertEqual(response.statusCode, 200, "Request should succeed") + + // Get the content from the first choice + if let choice = response.body.choices.first, + let content = choice.message.content { + print(" Content: \(content)") + + switch responseLine.customId { + case "request-1": + XCTAssertTrue(content.contains("4"), "2+2 should equal 4") + foundMathAnswer = true + case "request-2": + XCTAssertTrue(content.lowercased().contains("paris"), "Capital of France is Paris") + foundCapitalAnswer = true + case "request-3-structured": + // Parse structured output + let movieInfo = try decoder.decode(ExtractedMovieInfo.self, from: content.data(using: .utf8)!) + print(" Parsed movie: \(movieInfo.title) (\(movieInfo.year)) by \(movieInfo.director)") + XCTAssertEqual(movieInfo.title, "Inception") + XCTAssertEqual(movieInfo.year, 2010) + XCTAssertEqual(movieInfo.director, "Christopher Nolan") + foundMovieInfo = true + default: + break + } + } + } else if let error = responseLine.error { + XCTFail("Request \(responseLine.customId) failed: \(error.message)") + } + } + + XCTAssertTrue(foundMathAnswer, "Should find math answer") + XCTAssertTrue(foundCapitalAnswer, "Should find capital answer") + XCTAssertTrue(foundMovieInfo, "Should find movie info") + + print("✅ All batch responses verified!") + } + + /// Simple test to list existing batches + func testListBatches() async throws { + print("📋 Listing batches...") + let result = try await openAI.listBatches(after: nil, limit: 10) + + print(" Found \(result.data.count) batches") + for batch in result.data.prefix(5) { + print(" - \(batch.id): \(batch.status) (\(batch.endpoint))") + } + + XCTAssertNotNil(result.data) + } + + /// Test canceling a batch + func testCancelBatch() async throws { + // Create a batch first + let request = BatchRequestLine( + customId: "cancel-test-1", + body: ChatQuery( + messages: [.user(.init(content: .string("What is 1+1?")))], + model: .gpt4_o_mini + ) + ) + + print("📤 Creating batch to cancel...") + let batch = try await openAI.submitBatch( + requests: [request], + fileName: "cancel_test.jsonl" + ) + print(" Batch ID: \(batch.id)") + print(" Status: \(batch.status)") + + let batchWaitTask = Task { + _ = try await openAI.waitForBatch(id: batch.id) + } + // Cancel the batch + print("🚫 Canceling batch...") + let cancelledBatch = try await openAI.cancelBatch(id: batch.id) + print(" Status after cancel: \(cancelledBatch.status)") + + do { + // Batch wait should fail due to cancellation + _ = try await batchWaitTask.value + XCTFail() + } catch let error as BatchError { + guard case let .batchFailed(batchId, status) = error else { + return XCTFail() + } + XCTAssert(status == .cancelled || status == .cancelling) + XCTAssertEqual(batchId, batch.id) + } + + // Status should be cancelling or cancelled + XCTAssertTrue( + cancelledBatch.status == .cancelling || cancelledBatch.status == .cancelled, + "Batch should be cancelling or cancelled, got: \(cancelledBatch.status)" + ) + + print("✅ Batch cancelled successfully!") + } + + /// Test deleting a file + func testDeleteFile() async throws { + // Upload a test file first + let testContent = """ + {"custom_id": "test-1", "method": "POST", "url": "/v1/chat/completions", "body": {"model": "gpt-4o-mini", "messages": [{"role": "user", "content": "test"}]}} + """ + let fileData = testContent.data(using: .utf8)! + let fileQuery = FilesQuery( + purpose: "batch", + file: fileData, + fileName: "delete_test_\(Int(Date().timeIntervalSince1970)).jsonl", + contentType: "application/jsonl" + ) + + print("📤 Uploading test file...") + let fileResult = try await openAI.files(query: fileQuery) + print(" File ID: \(fileResult.id)") + + // Delete the file + print("🗑️ Deleting file...") + let deleteResult = try await openAI.deleteFile(id: fileResult.id) + print(" Deleted: \(deleteResult.deleted)") + + XCTAssertTrue(deleteResult.deleted, "File should be deleted") + XCTAssertEqual(deleteResult.id, fileResult.id, "Deleted file ID should match") + + print("✅ File deleted successfully!") + } + + /// Test the convenience methods (submitBatch + waitForBatch) + func testConvenienceMethods() async throws { + let requests = [ + BatchRequestLine( + customId: "convenience-1", + body: ChatQuery( + messages: [.user(.init(content: .string("Say 'hello'")))], + model: .gpt4_o_mini + ) + ) + ] + + print("📤 Submitting batch with convenience method...") + let batch = try await openAI.submitBatch( + requests: requests, + fileName: "convenience_test.jsonl", + metadata: ["test": "convenience"] + ) + print(" Batch ID: \(batch.id)") + + print("⏳ Waiting for batch with convenience method...") + let responses = try await openAI.waitForBatch(id: batch.id, pollingInterval: 5.0, timeout: 300) + + print("📝 Got \(responses.count) responses") + XCTAssertEqual(responses.count, 1, "Should have 1 response") + + if let response = responses.first { + XCTAssertEqual(response.customId, "convenience-1") + if let body = response.response { + XCTAssertEqual(body.statusCode, 200) + print(" Response: \(body.body.choices.first?.message.content ?? "nil")") + } + } + + print("✅ Convenience methods work!") + } +} diff --git a/Tests/OpenAITests/BatchAPITests.swift b/Tests/OpenAITests/BatchAPITests.swift new file mode 100644 index 00000000..21e121e9 --- /dev/null +++ b/Tests/OpenAITests/BatchAPITests.swift @@ -0,0 +1,425 @@ +// +// BatchAPITests.swift +// OpenAI +// +// Created for Batch API implementation. +// + +import XCTest +@testable import OpenAI + +class BatchAPITests: XCTestCase { + + private var openAI: OpenAIProtocol! + private let urlSession = URLSessionMock() + + override func setUp() async throws { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", timeoutInterval: 14) + self.openAI = OpenAI(configuration: configuration, session: self.urlSession, streamingSessionFactory: MockStreamingSessionFactory()) + } + + // MARK: - Create Batch Tests + + func testCreateBatch() async throws { + let query = BatchQuery( + inputFileId: "file-abc123", + endpoint: .chatCompletions, + completionWindow: .twentyFourHours, + metadata: ["description": "test batch"] + ) + + let expectedResult = BatchResult( + id: "batch_abc123", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-abc123", + completionWindow: "24h", + status: .validating, + outputFileId: nil, + errorFileId: nil, + createdAt: 1711471533, + inProgressAt: nil, + expiresAt: 1711557933, + finalizingAt: nil, + completedAt: nil, + failedAt: nil, + expiredAt: nil, + cancellingAt: nil, + cancelledAt: nil, + requestCounts: nil, + metadata: ["description": "test batch"] + ) + + try self.stub(result: expectedResult) + + let result = try await openAI.createBatch(query: query) + XCTAssertEqual(result.id, expectedResult.id) + XCTAssertEqual(result.status, BatchStatus.validating) + XCTAssertEqual(result.inputFileId, "file-abc123") + } + + func testCreateBatchError() async throws { + let query = BatchQuery( + inputFileId: "file-abc123", + endpoint: .chatCompletions, + completionWindow: .twentyFourHours + ) + + let inError = APIError(message: "Invalid file ID", type: "invalid_request_error", param: "input_file_id", code: "400") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.createBatch(query: query) } + XCTAssertEqual(inError, apiError) + } + + // MARK: - Retrieve Batch Tests + + func testRetrieveBatch() async throws { + let expectedResult = BatchResult( + id: "batch_abc123", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-abc123", + completionWindow: "24h", + status: .completed, + outputFileId: "file-xyz789", + errorFileId: nil, + createdAt: 1711471533, + inProgressAt: 1711471534, + expiresAt: 1711557933, + finalizingAt: 1711493133, + completedAt: 1711493134, + failedAt: nil, + expiredAt: nil, + cancellingAt: nil, + cancelledAt: nil, + requestCounts: RequestCounts(total: 100, completed: 95, failed: 5), + metadata: nil + ) + + try self.stub(result: expectedResult) + + let result = try await openAI.retrieveBatch(id: "batch_abc123") + XCTAssertEqual(result.id, "batch_abc123") + XCTAssertEqual(result.status, .completed) + XCTAssertEqual(result.outputFileId, "file-xyz789") + XCTAssertEqual(result.requestCounts?.total, 100) + XCTAssertEqual(result.requestCounts?.completed, 95) + XCTAssertEqual(result.requestCounts?.failed, 5) + } + + func testRetrieveBatchError() async throws { + let inError = APIError(message: "Batch not found", type: "invalid_request_error", param: "batch_id", code: "404") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.retrieveBatch(id: "batch_nonexistent") } + XCTAssertEqual(inError, apiError) + } + + // MARK: - List Batches Tests + + func testListBatches() async throws { + let batch1 = BatchResult( + id: "batch_abc123", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-abc123", + completionWindow: "24h", + status: .completed, + outputFileId: "file-xyz789", + errorFileId: nil, + createdAt: 1711471533, + inProgressAt: nil, + expiresAt: 1711557933, + finalizingAt: nil, + completedAt: 1711493134, + failedAt: nil, + expiredAt: nil, + cancellingAt: nil, + cancelledAt: nil, + requestCounts: nil, + metadata: nil + ) + + let batch2 = BatchResult( + id: "batch_def456", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-def456", + completionWindow: "24h", + status: .inProgress, + outputFileId: nil, + errorFileId: nil, + createdAt: 1711471600, + inProgressAt: 1711471601, + expiresAt: 1711558000, + finalizingAt: nil, + completedAt: nil, + failedAt: nil, + expiredAt: nil, + cancellingAt: nil, + cancelledAt: nil, + requestCounts: nil, + metadata: nil + ) + + let expectedResult = BatchListResult( + object: "list", + data: [batch1, batch2], + firstId: "batch_abc123", + lastId: "batch_def456", + hasMore: false + ) + + try self.stub(result: expectedResult) + + let result = try await openAI.listBatches(after: nil, limit: 20) + XCTAssertEqual(result.data.count, 2) + XCTAssertEqual(result.data[0].id, "batch_abc123") + XCTAssertEqual(result.data[1].id, "batch_def456") + XCTAssertFalse(result.hasMore) + } + + func testListBatchesWithPagination() async throws { + let batch = BatchResult( + id: "batch_ghi789", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-ghi789", + completionWindow: "24h", + status: .completed, + outputFileId: nil, + errorFileId: nil, + createdAt: 1711471700, + inProgressAt: nil, + expiresAt: 1711558100, + finalizingAt: nil, + completedAt: nil, + failedAt: nil, + expiredAt: nil, + cancellingAt: nil, + cancelledAt: nil, + requestCounts: nil, + metadata: nil + ) + + let expectedResult = BatchListResult( + object: "list", + data: [batch], + firstId: "batch_ghi789", + lastId: "batch_ghi789", + hasMore: true + ) + + try self.stub(result: expectedResult) + + let result = try await openAI.listBatches(after: "batch_def456", limit: 1) + XCTAssertEqual(result.data.count, 1) + XCTAssertTrue(result.hasMore) + } + + func testListBatchesError() async throws { + let inError = APIError(message: "Unauthorized", type: "authentication_error", param: nil, code: "401") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.listBatches(after: nil, limit: 20) } + XCTAssertEqual(inError, apiError) + } + + // MARK: - Cancel Batch Tests + + func testCancelBatch() async throws { + let expectedResult = BatchResult( + id: "batch_abc123", + object: "batch", + endpoint: "/v1/chat/completions", + inputFileId: "file-abc123", + completionWindow: "24h", + status: .cancelling, + outputFileId: nil, + errorFileId: nil, + createdAt: 1711471533, + inProgressAt: 1711471534, + expiresAt: 1711557933, + finalizingAt: nil, + completedAt: nil, + failedAt: nil, + expiredAt: nil, + cancellingAt: 1711475000, + cancelledAt: nil, + requestCounts: RequestCounts(total: 100, completed: 50, failed: 0), + metadata: nil + ) + + try self.stub(result: expectedResult) + + let result = try await openAI.cancelBatch(id: "batch_abc123") + XCTAssertEqual(result.id, "batch_abc123") + XCTAssertEqual(result.status, .cancelling) + XCTAssertNotNil(result.cancellingAt) + } + + func testCancelBatchError() async throws { + let inError = APIError(message: "Batch already completed", type: "invalid_request_error", param: "batch_id", code: "400") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.cancelBatch(id: "batch_completed") } + XCTAssertEqual(inError, apiError) + } + + // MARK: - Batch Status Tests + + func testBatchStatusValues() { + XCTAssertEqual(BatchStatus.validating.rawValue, "validating") + XCTAssertEqual(BatchStatus.failed.rawValue, "failed") + XCTAssertEqual(BatchStatus.inProgress.rawValue, "in_progress") + XCTAssertEqual(BatchStatus.finalizing.rawValue, "finalizing") + XCTAssertEqual(BatchStatus.completed.rawValue, "completed") + XCTAssertEqual(BatchStatus.expired.rawValue, "expired") + XCTAssertEqual(BatchStatus.cancelling.rawValue, "cancelling") + XCTAssertEqual(BatchStatus.cancelled.rawValue, "cancelled") + } + + // MARK: - BatchQuery Encoding Tests + + func testBatchQueryEncoding() throws { + let query = BatchQuery( + inputFileId: "file-abc123", + endpoint: .chatCompletions, + completionWindow: .twentyFourHours, + metadata: ["key": "value"] + ) + + let encoder = JSONEncoder() + let data = try encoder.encode(query) + let json = try JSONSerialization.jsonObject(with: data) as! [String: Any] + + XCTAssertEqual(json["input_file_id"] as? String, "file-abc123") + XCTAssertEqual(json["endpoint"] as? String, "/v1/chat/completions") + XCTAssertEqual(json["completion_window"] as? String, "24h") + XCTAssertEqual((json["metadata"] as? [String: String])?["key"], "value") + } + + // MARK: - BatchResponseLine Tests + + func testBatchResponseLineDecoding() throws { + let jsonString = """ + { + "id": "response_abc123", + "custom_id": "request-1", + "response": { + "status_code": 200, + "request_id": "req_abc123", + "body": { + "id": "chatcmpl-abc123", + "object": "chat.completion", + "created": 1711471533, + "model": "gpt-4o-mini", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello! How can I help you?" + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 10, + "completion_tokens": 8, + "total_tokens": 18 + } + } + }, + "error": null + } + """ + + let decoder = JSONDecoder() + let data = jsonString.data(using: .utf8)! + let responseLine = try decoder.decode(BatchResponseLine.self, from: data) + + XCTAssertEqual(responseLine.id, "response_abc123") + XCTAssertEqual(responseLine.customId, "request-1") + XCTAssertNotNil(responseLine.response) + XCTAssertEqual(responseLine.response?.statusCode, 200) + XCTAssertEqual(responseLine.response?.requestId, "req_abc123") + XCTAssertNil(responseLine.error) + } + + func testBatchResponseLineWithError() throws { + let jsonString = """ + { + "id": "response_def456", + "custom_id": "request-2", + "response": null, + "error": { + "code": "rate_limit_exceeded", + "message": "Rate limit exceeded" + } + } + """ + + let decoder = JSONDecoder() + let data = jsonString.data(using: .utf8)! + let responseLine = try decoder.decode(BatchResponseLine.self, from: data) + + XCTAssertEqual(responseLine.id, "response_def456") + XCTAssertEqual(responseLine.customId, "request-2") + XCTAssertNil(responseLine.response) + XCTAssertNotNil(responseLine.error) + XCTAssertEqual(responseLine.error?.code, "rate_limit_exceeded") + XCTAssertEqual(responseLine.error?.message, "Rate limit exceeded") + } + + // MARK: - URL Building Tests + + func testBatchURLBuilding() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession, streamingSessionFactory: MockStreamingSessionFactory()) + + let batchesURL = openAI.buildURL(path: .batches) + XCTAssertEqual(batchesURL, URL(string: "https://api.openai.com:443/v1/batches")) + + let batchURL = openAI.buildURL(path: .batch("batch_abc123")) + XCTAssertEqual(batchURL, URL(string: "https://api.openai.com:443/v1/batches/batch_abc123")) + + let cancelURL = openAI.buildURL(path: .batchCancel("batch_abc123")) + XCTAssertEqual(cancelURL, URL(string: "https://api.openai.com:443/v1/batches/batch_abc123/cancel")) + } +} + +// MARK: - Test Helpers + +extension BatchAPITests { + + func stub(error: APIError) { + let task = DataTaskMock.failed(with: error) + self.urlSession.dataTask = task + } + + func stub(result: Codable) throws { + let encoder = JSONEncoder() + let data = try encoder.encode(result) + let task = DataTaskMock.successful(with: data) + self.urlSession.dataTask = task + } + + enum TypeError: Error { + case unexpectedResult(String) + case typeMismatch(String) + } + + func XCTExpectError(execute: () async throws -> Sendable) async throws -> ErrorType { + do { + let result = try await execute() + throw TypeError.unexpectedResult("Error expected, but result is returned \(result)") + } catch { + guard let apiError = error as? ErrorType else { + throw TypeError.typeMismatch("Expected APIError, got instead: \(error)") + } + return apiError + } + } +}