Skip to content

Commit e790eef

Browse files
authored
llama.swiftui : update models layout (#4826)
* Updated Models Layout - Added a models drawer - Added downloading directly from Hugging Face - Load custom models from local folder - Delete models by swiping left * trimmed trailing white space * Updated Models Layout
1 parent 5537d9d commit e790eef

File tree

5 files changed

+333
-100
lines changed

5 files changed

+333
-100
lines changed

examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
/* Begin PBXBuildFile section */
1010
549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; };
11+
79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */; };
1112
7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; };
1213
8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; };
1314
8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; };
@@ -22,6 +23,7 @@
2223

2324
/* Begin PBXFileReference section */
2425
549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; };
26+
79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputButton.swift; sourceTree = "<group>"; };
2527
7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = "<group>"; };
2628
8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; };
2729
8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = "<group>"; };
@@ -119,6 +121,7 @@
119121
7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */,
120122
8A1C83782AC328BD0096AF73 /* ContentView.swift */,
121123
F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */,
124+
79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */,
122125
);
123126
path = UI;
124127
sourceTree = "<group>";
@@ -213,6 +216,7 @@
213216
8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */,
214217
8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */,
215218
7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */,
219+
79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */,
216220
);
217221
runOnlyForDeploymentPostprocessing = 0;
218222
};
@@ -345,7 +349,7 @@
345349
CLANG_ENABLE_MODULES = YES;
346350
CODE_SIGN_STYLE = Automatic;
347351
CURRENT_PROJECT_VERSION = 1;
348-
DEVELOPMENT_TEAM = STLSG3FG8Q;
352+
DEVELOPMENT_TEAM = K5UQJPP73A;
349353
ENABLE_PREVIEWS = YES;
350354
GENERATE_INFOPLIST_FILE = YES;
351355
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
@@ -377,7 +381,7 @@
377381
CLANG_ENABLE_MODULES = YES;
378382
CODE_SIGN_STYLE = Automatic;
379383
CURRENT_PROJECT_VERSION = 1;
380-
DEVELOPMENT_TEAM = STLSG3FG8Q;
384+
DEVELOPMENT_TEAM = K5UQJPP73A;
381385
ENABLE_PREVIEWS = YES;
382386
GENERATE_INFOPLIST_FILE = YES;
383387
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;

examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,19 @@
11
import Foundation
22

3+
struct Model: Identifiable {
4+
var id = UUID()
5+
var name: String
6+
var url: String
7+
var filename: String
8+
var status: String?
9+
}
10+
311
@MainActor
412
class LlamaState: ObservableObject {
513
@Published var messageLog = ""
614
@Published var cacheCleared = false
15+
@Published var downloadedModels: [Model] = []
16+
@Published var undownloadedModels: [Model] = []
717
let NS_PER_S = 1_000_000_000.0
818

919
private var llamaContext: LlamaContext?
@@ -13,23 +23,102 @@ class LlamaState: ObservableObject {
1323
}
1424

1525
init() {
26+
loadModelsFromDisk()
27+
loadDefaultModels()
28+
}
29+
30+
private func loadModelsFromDisk() {
31+
do {
32+
let documentsURL = getDocumentsDirectory()
33+
let modelURLs = try FileManager.default.contentsOfDirectory(at: documentsURL, includingPropertiesForKeys: nil, options: [.skipsHiddenFiles, .skipsSubdirectoryDescendants])
34+
for modelURL in modelURLs {
35+
let modelName = modelURL.deletingPathExtension().lastPathComponent
36+
downloadedModels.append(Model(name: modelName, url: "", filename: modelURL.lastPathComponent, status: "downloaded"))
37+
}
38+
} catch {
39+
print("Error loading models from disk: \(error)")
40+
}
41+
}
42+
43+
private func loadDefaultModels() {
1644
do {
1745
try loadModel(modelUrl: defaultModelUrl)
1846
} catch {
1947
messageLog += "Error!\n"
2048
}
49+
50+
for model in defaultModels {
51+
let fileURL = getDocumentsDirectory().appendingPathComponent(model.filename)
52+
if FileManager.default.fileExists(atPath: fileURL.path) {
53+
54+
} else {
55+
var undownloadedModel = model
56+
undownloadedModel.status = "download"
57+
undownloadedModels.append(undownloadedModel)
58+
}
59+
}
2160
}
2261

62+
func getDocumentsDirectory() -> URL {
63+
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
64+
return paths[0]
65+
}
66+
private let defaultModels: [Model] = [
67+
Model(name: "TinyLlama-1.1B (Q4_0, 0.6 GiB)",url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true",filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf", status: "download"),
68+
Model(
69+
name: "TinyLlama-1.1B Chat (Q8_0, 1.1 GiB)",
70+
url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf?download=true",
71+
filename: "tinyllama-1.1b-chat-v1.0.Q8_0.gguf", status: "download"
72+
),
73+
74+
Model(
75+
name: "TinyLlama-1.1B (F16, 2.2 GiB)",
76+
url: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true",
77+
filename: "tinyllama-1.1b-f16.gguf", status: "download"
78+
),
79+
80+
Model(
81+
name: "Phi-2.7B (Q4_0, 1.6 GiB)",
82+
url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true",
83+
filename: "phi-2-q4_0.gguf", status: "download"
84+
),
85+
86+
Model(
87+
name: "Phi-2.7B (Q8_0, 2.8 GiB)",
88+
url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true",
89+
filename: "phi-2-q8_0.gguf", status: "download"
90+
),
91+
92+
Model(
93+
name: "Mistral-7B-v0.1 (Q4_0, 3.8 GiB)",
94+
url: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true",
95+
filename: "mistral-7b-v0.1.Q4_0.gguf", status: "download"
96+
),
97+
Model(
98+
name: "OpenHermes-2.5-Mistral-7B (Q3_K_M, 3.52 GiB)",
99+
url: "https://huggingface.co/TheBloke/OpenHermes-2.5-Mistral-7B-GGUF/resolve/main/openhermes-2.5-mistral-7b.Q3_K_M.gguf?download=true",
100+
filename: "openhermes-2.5-mistral-7b.Q3_K_M.gguf", status: "download"
101+
)
102+
]
23103
func loadModel(modelUrl: URL?) throws {
24104
if let modelUrl {
25105
messageLog += "Loading model...\n"
26106
llamaContext = try LlamaContext.create_context(path: modelUrl.path())
27107
messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
108+
109+
// Assuming that the model is successfully loaded, update the downloaded models
110+
updateDownloadedModels(modelName: modelUrl.lastPathComponent, status: "downloaded")
28111
} else {
29112
messageLog += "Load a model from the list below\n"
30113
}
31114
}
32115

116+
117+
private func updateDownloadedModels(modelName: String, status: String) {
118+
undownloadedModels.removeAll { $0.name == modelName }
119+
}
120+
121+
33122
func complete(text: String) async {
34123
guard let llamaContext else {
35124
return

0 commit comments

Comments
 (0)