Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// TODO: Remove the `#if compiler(>=6.2)` when Xcode 26 is the minimum supported version.
#if compiler(>=6.2)
import Foundation
#if canImport(FoundationModels)
import FoundationModels
#endif // canImport(FoundationModels)

extension FirebaseAI.LanguageModelSession: ModelSession {
func respond<Content>(to prompt: [any PartsRepresentable], schema: FirebaseAI.GenerationSchema?,
generating type: Content.Type, includeSchemaInPrompt: Bool,
options: GenerationConfig?) async throws
-> GenerativeModelSession.Response<Content> {
#if canImport(FoundationModels)
guard #available(iOS 26.0, macOS 26.0, visionOS 26.0, *) else {
fatalError()
}

let parts = ModelContent(parts: prompt)
let promptParts = parts.internalParts.map { part in
guard !(part.isThought ?? false) else { fatalError() }
guard let data = part.data else { fatalError() }
guard case let .text(string) = data else { fatalError() }

return Prompt(string)
}
let prompt = Prompt {
for part in promptParts {
part
}
}

guard let session else {
fatalError()
}

if type == String.self {
let response = try await session.respond(to: prompt)

let rawContent = FirebaseAI.GeneratedContent(
kind: response.rawContent.kind,
id: FirebaseAI.GenerationID(
responseID: UUID().uuidString,
generationID: response.rawContent.id
),
isComplete: response.rawContent.isComplete
)

let modelContent = ModelContent(
role: "model",
parts: [InternalPart(.text(response.content), isThought: false, thoughtSignature: nil)]
)
let candidate = Candidate(
content: modelContent,
safetyRatings: [],
finishReason: nil,
citationMetadata: nil
)
let rawResponse = GenerateContentResponse(candidates: [candidate])

guard let content = response.content as? Content else {
fatalError()
}

return GenerativeModelSession.Response(
content: content,
rawContent: rawContent,
rawResponse: rawResponse
)
} else if let contentMetatype = type as? (any FoundationModels.Generable.Type) {
// Generic helper to explicitly bind the opened existential type to `T`.
func fetchResponse<T: FoundationModels.Generable>(_ generableType: T
.Type) async throws -> GenerativeModelSession.Response<Content> {
let response = try await session.respond(
to: prompt,
generating: generableType,
includeSchemaInPrompt: includeSchemaInPrompt
)

let rawContent = FirebaseAI.GeneratedContent(
kind: response.rawContent.kind,
id: FirebaseAI.GenerationID(
responseID: UUID().uuidString,
generationID: response.rawContent.id
),
isComplete: response.rawContent.isComplete
)
let modelContent = ModelContent(
role: "model",
parts: [
InternalPart(
.text(response.rawContent.jsonString),
isThought: false,
thoughtSignature: nil
),
]
)
let candidate = Candidate(
content: modelContent,
safetyRatings: [],
finishReason: nil,
citationMetadata: nil
)
let rawResponse = GenerateContentResponse(candidates: [candidate])

// Cast the generated content back to the outer `Content` type.
guard let finalContent = response.content as? Content else {
fatalError("Expected \(Content.self) but received \(T.self)")
}

return GenerativeModelSession.Response(
content: finalContent,
rawContent: rawContent,
rawResponse: rawResponse
)
}

// Call the helper, which opens `contentMetatype` and passes it as `T`.
return try await fetchResponse(contentMetatype)

} else {
fatalError("Unsupported type for generation: \(type)")
}
#else
fatalError("Foundation Models not supported.")
#endif // canImport(FoundationModels)
}
}
#endif // compiler(>=6.2)
56 changes: 42 additions & 14 deletions FirebaseAI/Sources/GenerativeModelSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,27 @@
public nonisolated(nonsending)
func respond(to prompt: PartsRepresentable..., options: GenerationConfig? = nil) async throws
-> GenerativeModelSession.Response<String> {
return try await respond(
to: prompt,
schema: nil as FirebaseAI.GenerationSchema?,
generating: String.self,
includeSchemaInPrompt: false,
options: options
)
let model = FirebaseAI.SystemLanguageModel()

// Hardcoded Prefer On-Device for Testing
if model.isAvailable {
let session = FirebaseAI.LanguageModelSession(model: model)
return try await session.respond(
to: prompt,
schema: nil as FirebaseAI.GenerationSchema?,
generating: String.self,
includeSchemaInPrompt: false,
options: options
)
} else {
return try await respond(
to: prompt,
schema: nil as FirebaseAI.GenerationSchema?,
generating: String.self,
includeSchemaInPrompt: false,
options: options
)
}
}

#if canImport(FoundationModels)
Expand Down Expand Up @@ -151,13 +165,27 @@
includeSchemaInPrompt: Bool = true,
options: GenerationConfig? = nil) async throws
-> GenerativeModelSession.Response<Content> where Content: Generable {
return try await respond(
to: prompt,
schema: FirebaseAI.GenerationSchema(Content.generationSchema),
generating: type,
includeSchemaInPrompt: includeSchemaInPrompt,
options: options
)
let model = FirebaseAI.SystemLanguageModel()

// Hardcoded Prefer On-Device for Testing
if model.isAvailable {
let session = FirebaseAI.LanguageModelSession(model: model)
return try await session.respond(
to: prompt,
schema: FirebaseAI.GenerationSchema(Content.generationSchema),
generating: type,
includeSchemaInPrompt: includeSchemaInPrompt,
options: options
)
} else {
return try await respond(
to: prompt,
schema: FirebaseAI.GenerationSchema(Content.generationSchema),
generating: type,
includeSchemaInPrompt: includeSchemaInPrompt,
options: options
)
}
}

/// Streams the model's response as `GeneratedContent`.
Expand Down
24 changes: 24 additions & 0 deletions FirebaseAI/Sources/Protocols/Internal/ModelSession.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// TODO: Remove the `#if compiler(>=6.2)` when Xcode 26 is the minimum supported version.
#if compiler(>=6.2)
protocol ModelSession {
nonisolated(nonsending)
func respond<Content>(to prompt: [PartsRepresentable], schema: FirebaseAI.GenerationSchema?,
generating type: Content.Type, includeSchemaInPrompt: Bool,
options: GenerationConfig?) async throws
-> GenerativeModelSession.Response<Content>
}
#endif // compiler(>=6.2)
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// TODO: Remove the `#if compiler(>=6.2)` when Xcode 26 is the minimum supported version.
#if compiler(>=6.2)
#if canImport(FoundationModels)
import FoundationModels
#endif // canImport(FoundationModels)

extension FirebaseAI {
final class LanguageModelSession: Sendable {
private let _session: (any Sendable)?

#if canImport(FoundationModels)
@available(iOS 26.0, macOS 26.0, *)
@available(tvOS, unavailable)
@available(watchOS, unavailable)
var session: FoundationModels.LanguageModelSession? {
return _session as? FoundationModels.LanguageModelSession
}
#endif // canImport(FoundationModels)

var isResponding: Bool {
#if canImport(FoundationModels)
guard #available(iOS 26.0, macOS 26.0, visionOS 26.0, *),
let session else { return false }

return session.isResponding
#else
return false
#endif // canImport(FoundationModels)
}

init(model: FirebaseAI.SystemLanguageModel) {
#if canImport(FoundationModels)
if #available(iOS 26.0, macOS 26.0, visionOS 26.0, *) {
_session = FoundationModels.LanguageModelSession(model: model.systemModel)
} else {
_session = nil
}
#else
_session = nil
#endif
}
}
}
#endif // compiler(>=6.2)
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// TODO: Remove the `#if compiler(>=6.2)` when Xcode 26 is the minimum supported version.
#if compiler(>=6.2)
#if canImport(FoundationModels)
import FoundationModels
#endif // canImport(FoundationModels)

extension FirebaseAI {
final class SystemLanguageModel: Sendable {
private let _model: (any Sendable)?

init() {
#if canImport(FoundationModels)
if #available(iOS 26.0, macOS 26.0, visionOS 26.0, *) {
_model = FoundationModels.SystemLanguageModel()
} else {
_model = nil
}
#else
_model = nil
#endif
}

#if canImport(FoundationModels)
@available(iOS 26.0, macOS 26.0, *)
@available(tvOS, unavailable)
@available(watchOS, unavailable)
var systemModel: FoundationModels.SystemLanguageModel {
guard let model = _model as? FoundationModels.SystemLanguageModel else {
assertionFailure()
return FoundationModels.SystemLanguageModel()
}
return model
}
#endif // canImport(FoundationModels)

var isAvailable: Bool {
#if canImport(FoundationModels)
guard #available(iOS 26.0, macOS 26.0, visionOS 26.0, *) else { return false }

return systemModel.isAvailable
#else
return false
#endif // canImport(FoundationModels)
}
}
}
#endif // compiler(>=6.2)
Loading