1
1
import Foundation
2
2
3
+ struct Model : Identifiable {
4
+ var id = UUID ( )
5
+ var name : String
6
+ var url : String
7
+ var filename : String
8
+ var status : String ?
9
+ }
10
+
3
11
@MainActor
4
12
class LlamaState: ObservableObject {
5
13
@Published var messageLog = " "
6
14
@Published var cacheCleared = false
15
+ @Published var downloadedModels : [ Model ] = [ ]
16
+ @Published var undownloadedModels : [ Model ] = [ ]
7
17
let NS_PER_S = 1_000_000_000.0
8
18
9
19
private var llamaContext : LlamaContext ?
@@ -13,23 +23,102 @@ class LlamaState: ObservableObject {
13
23
}
14
24
15
25
init ( ) {
26
+ loadModelsFromDisk ( )
27
+ loadDefaultModels ( )
28
+ }
29
+
30
+ private func loadModelsFromDisk( ) {
31
+ do {
32
+ let documentsURL = getDocumentsDirectory ( )
33
+ let modelURLs = try FileManager . default. contentsOfDirectory ( at: documentsURL, includingPropertiesForKeys: nil , options: [ . skipsHiddenFiles, . skipsSubdirectoryDescendants] )
34
+ for modelURL in modelURLs {
35
+ let modelName = modelURL. deletingPathExtension ( ) . lastPathComponent
36
+ downloadedModels. append ( Model ( name: modelName, url: " " , filename: modelURL. lastPathComponent, status: " downloaded " ) )
37
+ }
38
+ } catch {
39
+ print ( " Error loading models from disk: \( error) " )
40
+ }
41
+ }
42
+
43
+ private func loadDefaultModels( ) {
16
44
do {
17
45
try loadModel ( modelUrl: defaultModelUrl)
18
46
} catch {
19
47
messageLog += " Error! \n "
20
48
}
49
+
50
+ for model in defaultModels {
51
+ let fileURL = getDocumentsDirectory ( ) . appendingPathComponent ( model. filename)
52
+ if FileManager . default. fileExists ( atPath: fileURL. path) {
53
+
54
+ } else {
55
+ var undownloadedModel = model
56
+ undownloadedModel. status = " download "
57
+ undownloadedModels. append ( undownloadedModel)
58
+ }
59
+ }
21
60
}
22
61
62
+ func getDocumentsDirectory( ) -> URL {
63
+ let paths = FileManager . default. urls ( for: . documentDirectory, in: . userDomainMask)
64
+ return paths [ 0 ]
65
+ }
66
+ private let defaultModels : [ Model ] = [
67
+ Model ( name: " TinyLlama-1.1B (Q4_0, 0.6 GiB) " , url: " https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true " , filename: " tinyllama-1.1b-1t-openorca.Q4_0.gguf " , status: " download " ) ,
68
+ Model (
69
+ name: " TinyLlama-1.1B Chat (Q8_0, 1.1 GiB) " ,
70
+ url: " https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf?download=true " ,
71
+ filename: " tinyllama-1.1b-chat-v1.0.Q8_0.gguf " , status: " download "
72
+ ) ,
73
+
74
+ Model (
75
+ name: " TinyLlama-1.1B (F16, 2.2 GiB) " ,
76
+ url: " https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true " ,
77
+ filename: " tinyllama-1.1b-f16.gguf " , status: " download "
78
+ ) ,
79
+
80
+ Model (
81
+ name: " Phi-2.7B (Q4_0, 1.6 GiB) " ,
82
+ url: " https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true " ,
83
+ filename: " phi-2-q4_0.gguf " , status: " download "
84
+ ) ,
85
+
86
+ Model (
87
+ name: " Phi-2.7B (Q8_0, 2.8 GiB) " ,
88
+ url: " https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true " ,
89
+ filename: " phi-2-q8_0.gguf " , status: " download "
90
+ ) ,
91
+
92
+ Model (
93
+ name: " Mistral-7B-v0.1 (Q4_0, 3.8 GiB) " ,
94
+ url: " https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true " ,
95
+ filename: " mistral-7b-v0.1.Q4_0.gguf " , status: " download "
96
+ ) ,
97
+ Model (
98
+ name: " OpenHermes-2.5-Mistral-7B (Q3_K_M, 3.52 GiB) " ,
99
+ url: " https://huggingface.co/TheBloke/OpenHermes-2.5-Mistral-7B-GGUF/resolve/main/openhermes-2.5-mistral-7b.Q3_K_M.gguf?download=true " ,
100
+ filename: " openhermes-2.5-mistral-7b.Q3_K_M.gguf " , status: " download "
101
+ )
102
+ ]
23
103
func loadModel( modelUrl: URL ? ) throws {
24
104
if let modelUrl {
25
105
messageLog += " Loading model... \n "
26
106
llamaContext = try LlamaContext . create_context ( path: modelUrl. path ( ) )
27
107
messageLog += " Loaded model \( modelUrl. lastPathComponent) \n "
108
+
109
+ // Assuming that the model is successfully loaded, update the downloaded models
110
+ updateDownloadedModels ( modelName: modelUrl. lastPathComponent, status: " downloaded " )
28
111
} else {
29
112
messageLog += " Load a model from the list below \n "
30
113
}
31
114
}
32
115
116
+
117
+ private func updateDownloadedModels( modelName: String , status: String ) {
118
+ undownloadedModels. removeAll { $0. name == modelName }
119
+ }
120
+
121
+
33
122
func complete( text: String ) async {
34
123
guard let llamaContext else {
35
124
return
0 commit comments