in HuggingChat-Mac/LocalLLM/ModelManager.swift [160:200]
func downloadModel(_ model: LocalModel) {
guard let modelIndex = availableModels.firstIndex(where: { $0.id == model.id }) else { return }
availableModels[modelIndex].downloadState = .downloading(progress: 0)
let downloadTask = Task {
do {
let modelConfig = ModelConfiguration(id: model.hfURL!, defaultPrompt: "")
let hub = HubApi()
_ = try await prepareModelDirectory(
hub: hub,
configuration: modelConfig
) { progress in
Task { @MainActor in
if let idx = self.availableModels.firstIndex(where: { $0.id == model.id }) {
self.availableModels[idx].downloadState = .downloading(progress: progress.fractionCompleted)
}
}
}
// Update state to downloaded on success
await MainActor.run {
if let idx = self.availableModels.firstIndex(where: { $0.id == model.id }) {
self.availableModels[idx].downloadState = .downloaded
}
self.fetchAllLocalModels()
}
} catch {
await MainActor.run {
if let idx = self.availableModels.firstIndex(where: { $0.id == model.id }) {
self.availableModels[idx].downloadState = .error(error.localizedDescription)
}
self.fetchAllLocalModels()
}
throw error
}
}
activeDownloads[model.id] = downloadTask
}