Skip to content

Commit

Permalink
Fix Download and Mocking LLMLocalSession (#81)
Browse files Browse the repository at this point in the history
  • Loading branch information
LeonNissen authored Dec 18, 2024
1 parent fd9f1ca commit 26b1e07
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 167 deletions.
26 changes: 8 additions & 18 deletions Sources/SpeziLLM/Mock/LLMMockSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -52,29 +52,19 @@ public final class LLMMockSession: LLMSession, @unchecked Sendable {
return
}

/// Generate mock messages
await MainActor.run {
self.state = .generating
}
await injectAndYield("Mock ", on: continuation)

try? await Task.sleep(for: .milliseconds(500))
guard await !checkCancellation(on: continuation) else {
return
}
await injectAndYield("Message ", on: continuation)

try? await Task.sleep(for: .milliseconds(500))
guard await !checkCancellation(on: continuation) else {
return
}
await injectAndYield("from ", on: continuation)

try? await Task.sleep(for: .milliseconds(500))
guard await !checkCancellation(on: continuation) else {
return
/// Generate mock messages
let tokens = ["Mock ", "Message ", "from ", "SpeziLLM!"]
for token in tokens {
try? await Task.sleep(for: .milliseconds(500))
guard await !checkCancellation(on: continuation) else {
return
}
await injectAndYield(token, on: continuation)
}
await injectAndYield("SpeziLLM!", on: continuation)

continuation.finish()
await MainActor.run {
Expand Down
13 changes: 6 additions & 7 deletions Sources/SpeziLLMLocal/LLMLocalPlatform.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,12 @@ public actor LLMLocalPlatform: LLMPlatform, DefaultInitializable {
Logger(
subsystem: "Spezi",
category: "LLMLocalPlatform"
).warning("SpeziLLMLocal is only supported on physical devices. Use `LLMMockPlatform` instead.")
).warning("SpeziLLMLocal is only supported on physical devices. A mock session will be used instead.")

Logger(
subsystem: "Spezi",
category: "LLMLocalPlatform"
).warning("\(String(localized: "LLM_MLX_NOT_SUPPORTED_WORKAROUND", bundle: .module))")
#else
if let cacheLimit = configuration.cacheLimit {
MLX.GPU.set(cacheLimit: cacheLimit * 1024 * 1024)
Expand All @@ -74,15 +79,9 @@ public actor LLMLocalPlatform: LLMPlatform, DefaultInitializable {
#endif
}

#if targetEnvironment(simulator)
public nonisolated func callAsFunction(with llmSchema: LLMLocalSchema) -> LLMLocalMockSession {
LLMLocalMockSession(self, schema: llmSchema)
}
#else
public nonisolated func callAsFunction(with llmSchema: LLMLocalSchema) -> LLMLocalSession {
LLMLocalSession(self, schema: llmSchema)
}
#endif

deinit {
MLX.GPU.clearCache()
Expand Down
28 changes: 28 additions & 0 deletions Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ import SpeziLLM
extension LLMLocalSession {
// swiftlint:disable:next identifier_name function_body_length
internal func _generate(continuation: AsyncThrowingStream<String, any Error>.Continuation) async {
#if targetEnvironment(simulator)
// swiftlint:disable:next return_value_from_void_function
return await _mockGenerate(continuation: continuation)
#endif

guard let modelContainer = await self.modelContainer else {

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package iOS (Debug, SpeziLLM-iOS.xcresult, SpeziLLM-iOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package iOS (Release, SpeziLLM-iOS-Release.xcresult, SpeziLLM-iOS-Release.xc... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package visionOS (Debug, SpeziLLM-visionOS.xcresult, SpeziLLM-visionOS.xcres... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package visionOS (Release, SpeziLLM-visionOS-Release.xcresult, SpeziLLM-visi... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Debug, TestApp-iOS.xcresult, TestApp-iOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Release, TestApp-iOS-Release.xcresult, TestApp-iOS-Release.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Release, TestApp-iOS-Release.xcresult, TestApp-iOS-Release.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Debug, TestApp-iPad.xcresult, TestApp-iPad.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Release, TestApp-iPad-Release.xcresult, TestApp-iPad-Release.xcre... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Release, TestApp-iPad-Release.xcresult, TestApp-iPad-Release.xcre... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Debug, TestApp-visionOS.xcresult, TestApp-visionOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Release, TestApp-visionOS-Release.xcresult, TestApp-visionOS-Re... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 26 in Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Release, TestApp-visionOS-Release.xcresult, TestApp-visionOS-Re... / Test using xcodebuild or run fastlane

code after 'return' will never be executed
Self.logger.error("SpeziLLMLocal: Failed to load `modelContainer`")
await finishGenerationWithError(LLMLocalError.modelNotFound, on: continuation)
Expand Down Expand Up @@ -119,4 +124,27 @@ extension LLMLocalSession {
state = .ready
}
}

private func _mockGenerate(continuation: AsyncThrowingStream<String, any Error>.Continuation) async {
let tokens = [
"Mock ", "Message ", "from ", "SpeziLLM! ",
"**Using SpeziLLMLocal only works on physical devices.**",
"\n\n",
String(localized: "LLM_MLX_NOT_SUPPORTED_WORKAROUND", bundle: .module)
]

for token in tokens {
try? await Task.sleep(for: .seconds(1))
guard await !checkCancellation(on: continuation) else {
return
}
continuation.yield(token)
}

continuation.finish()
await MainActor.run {
context.completeAssistantStreaming()
self.state = .ready
}
}
}
22 changes: 22 additions & 0 deletions Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ extension LLMLocalSession {

// swiftlint:disable:next identifier_name
internal func _setup(continuation: AsyncThrowingStream<String, Error>.Continuation?) async -> Bool {
#if targetEnvironment(simulator)
return await _mockSetup(continuation: continuation)
#endif

Self.logger.debug("SpeziLLMLocal: Local LLM is being initialized")

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package iOS (Debug, SpeziLLM-iOS.xcresult, SpeziLLM-iOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package iOS (Release, SpeziLLM-iOS-Release.xcresult, SpeziLLM-iOS-Release.xc... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package visionOS (Debug, SpeziLLM-visionOS.xcresult, SpeziLLM-visionOS.xcres... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test Swift Package visionOS (Release, SpeziLLM-visionOS-Release.xcresult, SpeziLLM-visi... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Debug, TestApp-iOS.xcresult, TestApp-iOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Release, TestApp-iOS-Release.xcresult, TestApp-iOS-Release.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iOS (Release, TestApp-iOS-Release.xcresult, TestApp-iOS-Release.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Debug, TestApp-iPad.xcresult, TestApp-iPad.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Release, TestApp-iPad-Release.xcresult, TestApp-iPad-Release.xcre... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests iPadOS (Release, TestApp-iPad-Release.xcresult, TestApp-iPad-Release.xcre... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Debug, TestApp-visionOS.xcresult, TestApp-visionOS.xcresult) / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Release, TestApp-visionOS-Release.xcresult, TestApp-visionOS-Re... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

Check warning on line 34 in Sources/SpeziLLMLocal/LLMLocalSession+Setup.swift

View workflow job for this annotation

GitHub Actions / Build and Test UI Tests visionOS (Release, TestApp-visionOS-Release.xcresult, TestApp-visionOS-Re... / Test using xcodebuild or run fastlane

code after 'return' will never be executed

await MainActor.run {
Expand Down Expand Up @@ -62,4 +66,22 @@ extension LLMLocalSession {
Self.logger.debug("SpeziLLMLocal: Local LLM has finished initializing")
return true
}

private func _mockSetup(continuation: AsyncThrowingStream<String, Error>.Continuation?) async -> Bool {
Self.logger.debug("SpeziLLMLocal: Local Mock LLM is being initialized")

await MainActor.run {
self.state = .loading
}

try? await Task.sleep(for: .seconds(1))

await MainActor.run {
self.state = .ready
}

Self.logger.debug("SpeziLLMLocal: Local Mock LLM has finished initializing")

return true
}
}
111 changes: 0 additions & 111 deletions Sources/SpeziLLMLocal/Mock/LLMLocalMockSession.swift

This file was deleted.

11 changes: 11 additions & 0 deletions Sources/SpeziLLMLocal/Resources/Localizable.xcstrings
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,17 @@
}
}
},
"LLM_MLX_NOT_SUPPORTED_WORKAROUND" : {
"extractionState" : "manual",
"localizations" : {
"en" : {
"stringUnit" : {
"state" : "translated",
"value" : "Here are two recommended workarounds:\n1. Add the Mac (Designed for iPad) destination to your target in Xcode.\n- SpeziLLMLocal requires MLX which requires Apple silicon, with `Mac (Designed for iPad)` you build an iPad application that will run on macOS.\n- The UI may present with differences to iOS, but this will allow you to build an iOS binary that runs with a fully featured Metal GPU.\n\n2. Make a multiplatform application that can run on macOS, iOS and iPadOS.\n- With SwiftUI it is possible to do most of your development in a macOS application and fine tune it for iOS by running it on an actual device.\n\nYou can also use the simulator for developing UI features but local LLM execution is not possible."
}
}
}
},
"LLM_MODEL_NOT_FOUND_ERROR_DESCRIPTION" : {
"localizations" : {
"en" : {
Expand Down
50 changes: 29 additions & 21 deletions Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@

import Foundation
import Hub
import MLXLLM
import Observation
import SpeziLLMLocal
import SpeziViews


/// Manages the download and storage of Large Language Models (LLM) to the local device.
///
/// One configures the ``LLMLocalDownloadManager`` via the ``LLMLocalDownloadManager/init(llmDownloadUrl:llmStorageUrl:)`` initializer,
Expand All @@ -28,7 +28,7 @@ public final class LLMLocalDownloadManager: NSObject {
public enum DownloadState: Equatable {
case idle
case downloading(progress: Progress)
case downloaded(storageUrl: URL)
case downloaded
case error(LocalizedError)


Expand All @@ -47,25 +47,25 @@ public final class LLMLocalDownloadManager: NSObject {
@ObservationIgnored private var downloadTask: Task<(), Never>?
/// Indicates the current state of the ``LLMLocalDownloadManager``.
@MainActor public var state: DownloadState = .idle
private let modelConfiguration: ModelConfiguration
private let model: LLMLocalModel

@ObservationIgnored public var modelExists: Bool {
LLMLocalDownloadManager.modelExsist(model: .custom(id: modelConfiguration.name))
@ObservationIgnored public var modelExist: Bool {
LLMLocalDownloadManager.modelExist(model: model)
}

/// Initializes a ``LLMLocalDownloadManager`` instance to manage the download of Large Language Model (LLM) files from remote servers.
///
/// - Parameters:
/// - modelID: The Huggingface model ID of the LLM that needs to be downloaded.
public init(model: LLMLocalModel) {
self.modelConfiguration = .init(id: model.hubID)
self.model = model
}

/// Checks if a model is already downloaded to the local device.
///
/// - Parameter model: The model to check for local existence.
/// - Returns: A Boolean value indicating whether the model exists on the device.
public static func modelExsist(model: LLMLocalModel) -> Bool {
public static func modelExist(model: LLMLocalModel) -> Bool {
let repo = Hub.Repo(id: model.hubID)
let url = HubApi.shared.localRepoLocation(repo)
let modelFileExtension = ".safetensors"
Expand All @@ -79,28 +79,23 @@ public final class LLMLocalDownloadManager: NSObject {
}

/// Starts a `URLSessionDownloadTask` to download the specified model.
public func startDownload() {
if case let .directory(url) = modelConfiguration.id {
public func startDownload() async {
if modelExist {
Task { @MainActor in
self.state = .downloaded(storageUrl: url)
self.state = .downloaded
}
return
}

downloadTask?.cancel()
await cancelDownload()
downloadTask = Task(priority: .userInitiated) {
do {
_ = try await loadModelContainer(configuration: modelConfiguration) { progress in
Task { @MainActor in
self.state = .downloading(progress: progress)
}
}

Task { @MainActor in
self.state = .downloaded(storageUrl: modelConfiguration.modelDirectory())
try await downloadWithHub()
await MainActor.run {
self.state = .downloaded
}
} catch {
Task { @MainActor in
await MainActor.run {
self.state = .error(
AnyLocalizedError(
error: error,
Expand All @@ -113,7 +108,20 @@ public final class LLMLocalDownloadManager: NSObject {
}

/// Cancels the download of a specified model via a `URLSessionDownloadTask`.
public func cancelDownload() {
public func cancelDownload() async {
downloadTask?.cancel()
await MainActor.run {
self.state = .idle
}
}

@MainActor
private func downloadWithHub() async throws {
let repo = Hub.Repo(id: model.hubID)
let modelFiles = ["*.safetensors", "config.json"]

try await HubApi.shared.snapshot(from: repo, matching: modelFiles) { progress in
self.state = .downloading(progress: progress)
}
}
}
Loading

0 comments on commit 26b1e07

Please sign in to comment.