]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama.swiftui : add bench functionality (#4483)
authorGeorgi Gerganov <redacted>
Sun, 17 Dec 2023 17:38:41 +0000 (19:38 +0200)
committerGitHub <redacted>
Sun, 17 Dec 2023 17:38:41 +0000 (19:38 +0200)
* llama.swiftui : add bench button

* llama.swiftui : initial bench functionality

* force to use n_gpu_layers on simulator

* add download buttons & expose llamaState.loadModel

* update project.pbxproj

* comment #Preview & fix editorconfig check

* gitignore : xcode stuff

* llama.swiftui : UX improvements

* llama.swiftui : avoid data copy via "downloadTask"

* llama.swiftui : remove model from project

* llama : remove "mostly" from model infos

* llama.swiftui : improve bench

---------

Co-authored-by: jhen <redacted>
.editorconfig
examples/llama.swiftui/.gitignore
examples/llama.swiftui/llama.cpp.swift/LibLlama.swift
examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj
examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift
examples/llama.swiftui/llama.swiftui/UI/ContentView.swift
examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift [new file with mode: 0644]
llama.cpp

index a56e9ccc80b912cbc94e1a998ab842372ae05694..16d16b3b55bf5575e956bebed8de6b635bee4d8c 100644 (file)
@@ -23,3 +23,6 @@ insert_final_newline = unset
 
 [examples/server/public/*]
 indent_size = 2
+
+[examples/llama.swiftui/llama.swiftui.xcodeproj/*]
+indent_style = tab
index 9bce6af399ba9609b3893868e5ab00e855c3185d..e585a2a4f4a5518081a7ff511a93f551de00fa41 100644 (file)
@@ -1 +1,2 @@
 xcuserdata
+xcshareddata
index 3754f055163ea74d194ad7ffc2f8a2291628f3f0..272e1fd8a224105a5884f42a0ef834a78d1d9c02 100644 (file)
@@ -6,16 +6,34 @@ enum LlamaError: Error {
     case couldNotInitializeContext
 }
 
+func llama_batch_clear(_ batch: inout llama_batch) {
+    batch.n_tokens = 0
+}
+
+func llama_batch_add(_ batch: inout llama_batch, _ id: llama_token, _ pos: llama_pos, _ seq_ids: [llama_seq_id], _ logits: Bool) {
+    batch.token   [Int(batch.n_tokens)] = id
+    batch.pos     [Int(batch.n_tokens)] = pos
+    batch.n_seq_id[Int(batch.n_tokens)] = Int32(seq_ids.count)
+    for i in 0..<seq_ids.count {
+        batch.seq_id[Int(batch.n_tokens)]![Int(i)] = seq_ids[i]
+    }
+    batch.logits  [Int(batch.n_tokens)] = logits ? 1 : 0
+
+    batch.n_tokens += 1
+}
+
 actor LlamaContext {
     private var model: OpaquePointer
     private var context: OpaquePointer
     private var batch: llama_batch
     private var tokens_list: [llama_token]
+
     /// This variable is used to store temporarily invalid cchars
     private var temporary_invalid_cchars: [CChar]
 
-    var n_len: Int32 = 512
+    var n_len: Int32 = 64
     var n_cur: Int32 = 0
+
     var n_decode: Int32 = 0
 
     init(model: OpaquePointer, context: OpaquePointer) {
@@ -27,25 +45,34 @@ actor LlamaContext {
     }
 
     deinit {
+        llama_batch_free(batch)
         llama_free(context)
         llama_free_model(model)
         llama_backend_free()
     }
 
-    static func createContext(path: String) throws -> LlamaContext {
+    static func create_context(path: String) throws -> LlamaContext {
         llama_backend_init(false)
-        let model_params = llama_model_default_params()
+        var model_params = llama_model_default_params()
 
+#if targetEnvironment(simulator)
+        model_params.n_gpu_layers = 0
+        print("Running on simulator, force use n_gpu_layers = 0")
+#endif
         let model = llama_load_model_from_file(path, model_params)
         guard let model else {
             print("Could not load model at \(path)")
             throw LlamaError.couldNotInitializeContext
         }
+
+        let n_threads = max(1, min(8, ProcessInfo.processInfo.processorCount - 2))
+        print("Using \(n_threads) threads")
+
         var ctx_params = llama_context_default_params()
-        ctx_params.seed = 1234
+        ctx_params.seed  = 1234
         ctx_params.n_ctx = 2048
-        ctx_params.n_threads = 8
-        ctx_params.n_threads_batch = 8
+        ctx_params.n_threads       = UInt32(n_threads)
+        ctx_params.n_threads_batch = UInt32(n_threads)
 
         let context = llama_new_context_with_model(model, ctx_params)
         guard let context else {
@@ -56,6 +83,26 @@ actor LlamaContext {
         return LlamaContext(model: model, context: context)
     }
 
+    func model_info() -> String {
+        let result = UnsafeMutablePointer<Int8>.allocate(capacity: 256)
+        result.initialize(repeating: Int8(0), count: 256)
+        defer {
+            result.deallocate()
+        }
+
+        // TODO: this is probably very stupid way to get the string from C
+
+        let nChars = llama_model_desc(model, result, 256)
+        let bufferPointer = UnsafeBufferPointer(start: result, count: Int(nChars))
+
+        var SwiftString = ""
+        for char in bufferPointer {
+            SwiftString.append(Character(UnicodeScalar(UInt8(char))))
+        }
+
+        return SwiftString
+    }
+
     func get_n_tokens() -> Int32 {
         return batch.n_tokens;
     }
@@ -79,16 +126,11 @@ actor LlamaContext {
             print(String(cString: token_to_piece(token: id) + [0]))
         }
 
-        // batch = llama_batch_init(512, 0) // done in init()
-        batch.n_tokens = Int32(tokens_list.count)
+        llama_batch_clear(&batch)
 
-        for i1 in 0..<batch.n_tokens {
+        for i1 in 0..<tokens_list.count {
             let i = Int(i1)
-            batch.token[i] = tokens_list[i]
-            batch.pos[i] = i1
-            batch.n_seq_id[Int(i)] = 1
-            batch.seq_id[Int(i)]![0] = 0
-            batch.logits[i] = 0
+            llama_batch_add(&batch, tokens_list[i], Int32(i), [0], false)
         }
         batch.logits[Int(batch.n_tokens) - 1] = 1 // true
 
@@ -141,18 +183,11 @@ actor LlamaContext {
         print(new_token_str)
         // tokens_list.append(new_token_id)
 
-        batch.n_tokens = 0
-
-        batch.token[Int(batch.n_tokens)] = new_token_id
-        batch.pos[Int(batch.n_tokens)] = n_cur
-        batch.n_seq_id[Int(batch.n_tokens)] = 1
-        batch.seq_id[Int(batch.n_tokens)]![0] = 0
-        batch.logits[Int(batch.n_tokens)] = 1 // true
-        batch.n_tokens += 1
+        llama_batch_clear(&batch)
+        llama_batch_add(&batch, new_token_id, n_cur, [0], true)
 
         n_decode += 1
-
-        n_cur += 1
+        n_cur    += 1
 
         if llama_decode(context, batch) != 0 {
             print("failed to evaluate llama!")
@@ -161,14 +196,111 @@ actor LlamaContext {
         return new_token_str
     }
 
+    func bench(pp: Int, tg: Int, pl: Int, nr: Int = 1) -> String {
+        var pp_avg: Double = 0
+        var tg_avg: Double = 0
+
+        var pp_std: Double = 0
+        var tg_std: Double = 0
+
+        for r in 0..<nr {
+            // bench prompt processing
+
+            llama_batch_clear(&batch)
+
+            let n_tokens = pp
+
+            for i in 0..<n_tokens {
+                llama_batch_add(&batch, 0, Int32(i), [0], false)
+            }
+            batch.logits[Int(batch.n_tokens) - 1] = 1 // true
+
+            llama_kv_cache_clear(context)
+
+            let t_pp_start = ggml_time_us()
+
+            if llama_decode(context, batch) != 0 {
+                print("llama_decode() failed during prompt")
+            }
+
+            let t_pp_end = ggml_time_us()
+
+            // bench text generation
+
+            llama_kv_cache_clear(context)
+
+            let t_tg_start = ggml_time_us()
+
+            for i in 0..<tg {
+                llama_batch_clear(&batch)
+
+                for j in 0..<pl {
+                    llama_batch_add(&batch, 0, Int32(i), [Int32(j)], true)
+                }
+
+                if llama_decode(context, batch) != 0 {
+                    print("llama_decode() failed during text generation")
+                }
+            }
+
+            let t_tg_end = ggml_time_us()
+
+            llama_kv_cache_clear(context)
+
+            let t_pp = Double(t_pp_end - t_pp_start) / 1000000.0
+            let t_tg = Double(t_tg_end - t_tg_start) / 1000000.0
+
+            let speed_pp = Double(pp)    / t_pp
+            let speed_tg = Double(pl*tg) / t_tg
+
+            pp_avg += speed_pp
+            tg_avg += speed_tg
+
+            pp_std += speed_pp * speed_pp
+            tg_std += speed_tg * speed_tg
+
+            print("pp \(speed_pp) t/s, tg \(speed_tg) t/s")
+        }
+
+        pp_avg /= Double(nr)
+        tg_avg /= Double(nr)
+
+        if nr > 1 {
+            pp_std = sqrt(pp_std / Double(nr - 1) - pp_avg * pp_avg * Double(nr) / Double(nr - 1))
+            tg_std = sqrt(tg_std / Double(nr - 1) - tg_avg * tg_avg * Double(nr) / Double(nr - 1))
+        } else {
+            pp_std = 0
+            tg_std = 0
+        }
+
+        let model_desc     = model_info();
+        let model_size     = String(format: "%.2f GiB", Double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0);
+        let model_n_params = String(format: "%.2f B", Double(llama_model_n_params(model)) / 1e9);
+        let backend        = "Metal";
+        let pp_avg_str     = String(format: "%.2f", pp_avg);
+        let tg_avg_str     = String(format: "%.2f", tg_avg);
+        let pp_std_str     = String(format: "%.2f", pp_std);
+        let tg_std_str     = String(format: "%.2f", tg_std);
+
+        var result = ""
+
+        result += String("| model | size | params | backend | test | t/s |\n")
+        result += String("| --- | --- | --- | --- | --- | --- |\n")
+        result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | pp \(pp) | \(pp_avg_str) ± \(pp_std_str) |\n")
+        result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | tg \(tg) | \(tg_avg_str) ± \(tg_std_str) |\n")
+
+        return result;
+    }
+
     func clear() {
         tokens_list.removeAll()
         temporary_invalid_cchars.removeAll()
+        llama_kv_cache_clear(context)
     }
 
     private func tokenize(text: String, add_bos: Bool) -> [llama_token] {
         let utf8Count = text.utf8.count
-        let n_tokens = utf8Count + (add_bos ? 1 : 0)
+        let n_tokens = utf8Count + (add_bos ? 1 : 0) + 1
         let tokens = UnsafeMutablePointer<llama_token>.allocate(capacity: n_tokens)
         let tokenCount = llama_tokenize(model, text, Int32(utf8Count), tokens, Int32(n_tokens), add_bos, false)
 
index bc1fd15cebb3179c2e9f9a1e3a0045059f245119..2e61599282203b4498ba0518c92bc4f4c1a10d96 100644 (file)
 // !$*UTF8*$!
 {
-    archiveVersion = 1;
-    classes = {
-    };
-    objectVersion = 56;
-    objects = {
+       archiveVersion = 1;
+       classes = {
+       };
+       objectVersion = 56;
+       objects = {
 
 /* Begin PBXBuildFile section */
-        542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; };
-        5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; };
-        542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; };
-        542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; };
-        542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; };
-        542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; };
-        549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; };
-        549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; };
-        8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; };
-        8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; };
-        8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; };
-        8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; };
-        8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; };
-        8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; };
-        8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; };
-        8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; };
+               542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; };
+               5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; };
+               542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; };
+               542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; };
+               542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; };
+               542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; };
+               549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; };
+               549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; };
+               7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; };
+               8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; };
+               8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; };
+               8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; };
+               8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; };
+               8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; };
+               8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; };
+               8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; };
+               8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; };
 /* End PBXBuildFile section */
 
 /* Begin PBXFileReference section */
-        542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = "<group>"; };
-        542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = "<group>"; };
-        542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = "<group>"; };
-        5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = "<group>"; };
-        542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = "<group>"; };
-        542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = "<group>"; };
-        542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = "<group>"; };
-        542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = "<group>"; };
-        542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = "<group>"; };
-        542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = "<group>"; };
-        549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = "<group>"; };
-        549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = "<group>"; };
-        549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = "<group>"; };
-        549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; };
-        8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = "<group>"; };
-        8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; };
-        8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = "<group>"; };
-        8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
-        8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
-        8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
-        8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
-        8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "llama-2-7b-chat.Q2_K.gguf"; sourceTree = "<group>"; };
-        8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = "<group>"; };
-        8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = "<group>"; };
-        8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = "<group>"; };
+               542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = "<group>"; };
+               542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = "<group>"; };
+               542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = "<group>"; };
+               5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = "<group>"; };
+               542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = "<group>"; };
+               542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = "<group>"; };
+               542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = "<group>"; };
+               542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = "<group>"; };
+               542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = "<group>"; };
+               542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = "<group>"; };
+               549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = "<group>"; };
+               549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = "<group>"; };
+               549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = "<group>"; };
+               549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; };
+               7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = "<group>"; };
+               8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = "<group>"; };
+               8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; };
+               8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = "<group>"; };
+               8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
+               8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
+               8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
+               8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
+               8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = "<group>"; };
+               8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = "<group>"; };
+               8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
 /* Begin PBXFrameworksBuildPhase section */
-        8A1C83702AC328BD0096AF73 /* Frameworks */ = {
-            isa = PBXFrameworksBuildPhase;
-            buildActionMask = 2147483647;
-            files = (
-                549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */,
-                8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */,
-            );
-            runOnlyForDeploymentPostprocessing = 0;
-        };
+               8A1C83702AC328BD0096AF73 /* Frameworks */ = {
+                       isa = PBXFrameworksBuildPhase;
+                       buildActionMask = 2147483647;
+                       files = (
+                               549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */,
+                               8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */,
+                       );
+                       runOnlyForDeploymentPostprocessing = 0;
+               };
 /* End PBXFrameworksBuildPhase section */
 
 /* Begin PBXGroup section */
-        8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = {
-            isa = PBXGroup;
-            children = (
-                5423760A2B0D9C4B008E6A1C /* ggml-backend.c */,
-                542376092B0D9C40008E6A1C /* ggml-backend.h */,
-                542376062B0D9BEA008E6A1C /* ggml-quants.h */,
-                542376072B0D9BFB008E6A1C /* ggml-quants.c */,
-                549479C82AC9E10B00E0F78B /* ggml-metal.metal */,
-                549479C62AC9E0F200E0F78B /* ggml-metal.h */,
-                549479C52AC9E0F200E0F78B /* ggml-metal.m */,
-                542EA09B2AC8723900A8AEE9 /* ggml.c */,
-                542EA09C2AC8723900A8AEE9 /* ggml.h */,
-                542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */,
-                542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */,
-                542EA0A12AC8729100A8AEE9 /* llama.cpp */,
-                542EA0A22AC8729100A8AEE9 /* llama.h */,
-            );
-            name = llama.cpp;
-            sourceTree = "<group>";
-        };
-        8A1C836A2AC328BD0096AF73 = {
-            isa = PBXGroup;
-            children = (
-                8A08D1F62AC7383900FE6CD4 /* llama.cpp */,
-                8A907F312AC7134E006146EA /* llama.cpp.swift */,
-                8A3F84232AC4C891005E2EE8 /* models */,
-                8A1C83752AC328BD0096AF73 /* llama.swiftui */,
-                8A1C83742AC328BD0096AF73 /* Products */,
-                8A39BE082AC7601000BFEB40 /* Frameworks */,
-            );
-            sourceTree = "<group>";
-        };
-        8A1C83742AC328BD0096AF73 /* Products */ = {
-            isa = PBXGroup;
-            children = (
-                8A1C83732AC328BD0096AF73 /* llama.swiftui.app */,
-            );
-            name = Products;
-            sourceTree = "<group>";
-        };
-        8A1C83752AC328BD0096AF73 /* llama.swiftui */ = {
-            isa = PBXGroup;
-            children = (
-                8A3F84102AC4BD85005E2EE8 /* Resources */,
-                8A9F7C4B2AC332DC008AE1EA /* Models */,
-                8A9F7C4A2AC332BF008AE1EA /* UI */,
-                8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */,
-                8A1C837A2AC328BE0096AF73 /* Assets.xcassets */,
-                8A1C837C2AC328BE0096AF73 /* Preview Content */,
-            );
-            path = llama.swiftui;
-            sourceTree = "<group>";
-        };
-        8A1C837C2AC328BE0096AF73 /* Preview Content */ = {
-            isa = PBXGroup;
-            children = (
-                8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */,
-            );
-            path = "Preview Content";
-            sourceTree = "<group>";
-        };
-        8A39BE082AC7601000BFEB40 /* Frameworks */ = {
-            isa = PBXGroup;
-            children = (
-                549479CA2AC9E16000E0F78B /* Metal.framework */,
-                8A39BE092AC7601000BFEB40 /* Accelerate.framework */,
-            );
-            name = Frameworks;
-            sourceTree = "<group>";
-        };
-        8A3F84102AC4BD85005E2EE8 /* Resources */ = {
-            isa = PBXGroup;
-            children = (
-                8A3F84112AC4BD8C005E2EE8 /* models */,
-            );
-            path = Resources;
-            sourceTree = "<group>";
-        };
-        8A3F84112AC4BD8C005E2EE8 /* models */ = {
-            isa = PBXGroup;
-            children = (
-                8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */,
-            );
-            path = models;
-            sourceTree = "<group>";
-        };
-        8A907F312AC7134E006146EA /* llama.cpp.swift */ = {
-            isa = PBXGroup;
-            children = (
-                8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */,
-                8A907F322AC7134E006146EA /* LibLlama.swift */,
-            );
-            path = llama.cpp.swift;
-            sourceTree = "<group>";
-        };
-        8A9F7C4A2AC332BF008AE1EA /* UI */ = {
-            isa = PBXGroup;
-            children = (
-                8A1C83782AC328BD0096AF73 /* ContentView.swift */,
-            );
-            path = UI;
-            sourceTree = "<group>";
-        };
-        8A9F7C4B2AC332DC008AE1EA /* Models */ = {
-            isa = PBXGroup;
-            children = (
-                8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */,
-            );
-            path = Models;
-            sourceTree = "<group>";
-        };
+               8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = {
+                       isa = PBXGroup;
+                       children = (
+                               5423760A2B0D9C4B008E6A1C /* ggml-backend.c */,
+                               542376092B0D9C40008E6A1C /* ggml-backend.h */,
+                               542376062B0D9BEA008E6A1C /* ggml-quants.h */,
+                               542376072B0D9BFB008E6A1C /* ggml-quants.c */,
+                               549479C82AC9E10B00E0F78B /* ggml-metal.metal */,
+                               549479C62AC9E0F200E0F78B /* ggml-metal.h */,
+                               549479C52AC9E0F200E0F78B /* ggml-metal.m */,
+                               542EA09B2AC8723900A8AEE9 /* ggml.c */,
+                               542EA09C2AC8723900A8AEE9 /* ggml.h */,
+                               542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */,
+                               542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */,
+                               542EA0A12AC8729100A8AEE9 /* llama.cpp */,
+                               542EA0A22AC8729100A8AEE9 /* llama.h */,
+                       );
+                       name = llama.cpp;
+                       sourceTree = "<group>";
+               };
+               8A1C836A2AC328BD0096AF73 = {
+                       isa = PBXGroup;
+                       children = (
+                               8A08D1F62AC7383900FE6CD4 /* llama.cpp */,
+                               8A907F312AC7134E006146EA /* llama.cpp.swift */,
+                               8A3F84232AC4C891005E2EE8 /* models */,
+                               8A1C83752AC328BD0096AF73 /* llama.swiftui */,
+                               8A1C83742AC328BD0096AF73 /* Products */,
+                               8A39BE082AC7601000BFEB40 /* Frameworks */,
+                       );
+                       sourceTree = "<group>";
+               };
+               8A1C83742AC328BD0096AF73 /* Products */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A1C83732AC328BD0096AF73 /* llama.swiftui.app */,
+                       );
+                       name = Products;
+                       sourceTree = "<group>";
+               };
+               8A1C83752AC328BD0096AF73 /* llama.swiftui */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A3F84102AC4BD85005E2EE8 /* Resources */,
+                               8A9F7C4B2AC332DC008AE1EA /* Models */,
+                               8A9F7C4A2AC332BF008AE1EA /* UI */,
+                               8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */,
+                               8A1C837A2AC328BE0096AF73 /* Assets.xcassets */,
+                               8A1C837C2AC328BE0096AF73 /* Preview Content */,
+                       );
+                       path = llama.swiftui;
+                       sourceTree = "<group>";
+               };
+               8A1C837C2AC328BE0096AF73 /* Preview Content */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */,
+                       );
+                       path = "Preview Content";
+                       sourceTree = "<group>";
+               };
+               8A39BE082AC7601000BFEB40 /* Frameworks */ = {
+                       isa = PBXGroup;
+                       children = (
+                               549479CA2AC9E16000E0F78B /* Metal.framework */,
+                               8A39BE092AC7601000BFEB40 /* Accelerate.framework */,
+                       );
+                       name = Frameworks;
+                       sourceTree = "<group>";
+               };
+               8A3F84102AC4BD85005E2EE8 /* Resources */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A3F84112AC4BD8C005E2EE8 /* models */,
+                       );
+                       path = Resources;
+                       sourceTree = "<group>";
+               };
+               8A3F84112AC4BD8C005E2EE8 /* models */ = {
+                       isa = PBXGroup;
+                       children = (
+                       );
+                       path = models;
+                       sourceTree = "<group>";
+               };
+               8A907F312AC7134E006146EA /* llama.cpp.swift */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */,
+                               8A907F322AC7134E006146EA /* LibLlama.swift */,
+                       );
+                       path = llama.cpp.swift;
+                       sourceTree = "<group>";
+               };
+               8A9F7C4A2AC332BF008AE1EA /* UI */ = {
+                       isa = PBXGroup;
+                       children = (
+                               7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */,
+                               8A1C83782AC328BD0096AF73 /* ContentView.swift */,
+                       );
+                       path = UI;
+                       sourceTree = "<group>";
+               };
+               8A9F7C4B2AC332DC008AE1EA /* Models */ = {
+                       isa = PBXGroup;
+                       children = (
+                               8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */,
+                       );
+                       path = Models;
+                       sourceTree = "<group>";
+               };
 /* End PBXGroup section */
 
 /* Begin PBXNativeTarget section */
-        8A1C83722AC328BD0096AF73 /* llama.swiftui */ = {
-            isa = PBXNativeTarget;
-            buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */;
-            buildPhases = (
-                8A1C836F2AC328BD0096AF73 /* Sources */,
-                8A1C83702AC328BD0096AF73 /* Frameworks */,
-                8A1C83712AC328BD0096AF73 /* Resources */,
-            );
-            buildRules = (
-            );
-            dependencies = (
-            );
-            name = llama.swiftui;
-            packageProductDependencies = (
-            );
-            productName = llama.swiftui;
-            productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */;
-            productType = "com.apple.product-type.application";
-        };
+               8A1C83722AC328BD0096AF73 /* llama.swiftui */ = {
+                       isa = PBXNativeTarget;
+                       buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */;
+                       buildPhases = (
+                               8A1C836F2AC328BD0096AF73 /* Sources */,
+                               8A1C83702AC328BD0096AF73 /* Frameworks */,
+                               8A1C83712AC328BD0096AF73 /* Resources */,
+                       );
+                       buildRules = (
+                       );
+                       dependencies = (
+                       );
+                       name = llama.swiftui;
+                       packageProductDependencies = (
+                       );
+                       productName = llama.swiftui;
+                       productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */;
+                       productType = "com.apple.product-type.application";
+               };
 /* End PBXNativeTarget section */
 
 /* Begin PBXProject section */
-        8A1C836B2AC328BD0096AF73 /* Project object */ = {
-            isa = PBXProject;
-            attributes = {
-                BuildIndependentTargetsInParallel = 1;
-                LastSwiftUpdateCheck = 1500;
-                LastUpgradeCheck = 1500;
-                TargetAttributes = {
-                    8A1C83722AC328BD0096AF73 = {
-                        CreatedOnToolsVersion = 15.0;
-                        LastSwiftMigration = 1500;
-                    };
-                };
-            };
-            buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */;
-            compatibilityVersion = "Xcode 14.0";
-            developmentRegion = en;
-            hasScannedForEncodings = 0;
-            knownRegions = (
-                en,
-                Base,
-            );
-            mainGroup = 8A1C836A2AC328BD0096AF73;
-            packageReferences = (
-            );
-            productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */;
-            projectDirPath = "";
-            projectRoot = "";
-            targets = (
-                8A1C83722AC328BD0096AF73 /* llama.swiftui */,
-            );
-        };
+               8A1C836B2AC328BD0096AF73 /* Project object */ = {
+                       isa = PBXProject;
+                       attributes = {
+                               BuildIndependentTargetsInParallel = 1;
+                               LastSwiftUpdateCheck = 1500;
+                               LastUpgradeCheck = 1500;
+                               TargetAttributes = {
+                                       8A1C83722AC328BD0096AF73 = {
+                                               CreatedOnToolsVersion = 15.0;
+                                               LastSwiftMigration = 1500;
+                                       };
+                               };
+                       };
+                       buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */;
+                       compatibilityVersion = "Xcode 14.0";
+                       developmentRegion = en;
+                       hasScannedForEncodings = 0;
+                       knownRegions = (
+                               en,
+                               Base,
+                       );
+                       mainGroup = 8A1C836A2AC328BD0096AF73;
+                       packageReferences = (
+                       );
+                       productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */;
+                       projectDirPath = "";
+                       projectRoot = "";
+                       targets = (
+                               8A1C83722AC328BD0096AF73 /* llama.swiftui */,
+                       );
+               };
 /* End PBXProject section */
 
 /* Begin PBXResourcesBuildPhase section */
-        8A1C83712AC328BD0096AF73 /* Resources */ = {
-            isa = PBXResourcesBuildPhase;
-            buildActionMask = 2147483647;
-            files = (
-                542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */,
-                8A3F84242AC4C891005E2EE8 /* models in Resources */,
-                8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */,
-                8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */,
-            );
-            runOnlyForDeploymentPostprocessing = 0;
-        };
+               8A1C83712AC328BD0096AF73 /* Resources */ = {
+                       isa = PBXResourcesBuildPhase;
+                       buildActionMask = 2147483647;
+                       files = (
+                               542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */,
+                               8A3F84242AC4C891005E2EE8 /* models in Resources */,
+                               8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */,
+                               8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */,
+                       );
+                       runOnlyForDeploymentPostprocessing = 0;
+               };
 /* End PBXResourcesBuildPhase section */
 
 /* Begin PBXSourcesBuildPhase section */
-        8A1C836F2AC328BD0096AF73 /* Sources */ = {
-            isa = PBXSourcesBuildPhase;
-            buildActionMask = 2147483647;
-            files = (
-                542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */,
-                549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */,
-                542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */,
-                8A907F332AC7138A006146EA /* LibLlama.swift in Sources */,
-                542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */,
-                8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */,
-                8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */,
-                8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */,
-                542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */,
-                5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */,
-            );
-            runOnlyForDeploymentPostprocessing = 0;
-        };
+               8A1C836F2AC328BD0096AF73 /* Sources */ = {
+                       isa = PBXSourcesBuildPhase;
+                       buildActionMask = 2147483647;
+                       files = (
+                               542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */,
+                               549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */,
+                               542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */,
+                               8A907F332AC7138A006146EA /* LibLlama.swift in Sources */,
+                               542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */,
+                               8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */,
+                               8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */,
+                               8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */,
+                               7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */,
+                               542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */,
+                               5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */,
+                       );
+                       runOnlyForDeploymentPostprocessing = 0;
+               };
 /* End PBXSourcesBuildPhase section */
 
 /* Begin XCBuildConfiguration section */
-        8A1C837F2AC328BE0096AF73 /* Debug */ = {
-            isa = XCBuildConfiguration;
-            buildSettings = {
-                ALWAYS_SEARCH_USER_PATHS = NO;
-                ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
-                CLANG_ANALYZER_NONNULL = YES;
-                CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
-                CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
-                CLANG_ENABLE_MODULES = YES;
-                CLANG_ENABLE_OBJC_ARC = YES;
-                CLANG_ENABLE_OBJC_WEAK = YES;
-                CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
-                CLANG_WARN_BOOL_CONVERSION = YES;
-                CLANG_WARN_COMMA = YES;
-                CLANG_WARN_CONSTANT_CONVERSION = YES;
-                CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
-                CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
-                CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
-                CLANG_WARN_EMPTY_BODY = YES;
-                CLANG_WARN_ENUM_CONVERSION = YES;
-                CLANG_WARN_INFINITE_RECURSION = YES;
-                CLANG_WARN_INT_CONVERSION = YES;
-                CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
-                CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
-                CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
-                CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
-                CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
-                CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
-                CLANG_WARN_STRICT_PROTOTYPES = YES;
-                CLANG_WARN_SUSPICIOUS_MOVE = YES;
-                CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
-                CLANG_WARN_UNREACHABLE_CODE = YES;
-                CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
-                COPY_PHASE_STRIP = NO;
-                DEBUG_INFORMATION_FORMAT = dwarf;
-                ENABLE_STRICT_OBJC_MSGSEND = YES;
-                ENABLE_TESTABILITY = YES;
-                ENABLE_USER_SCRIPT_SANDBOXING = YES;
-                GCC_C_LANGUAGE_STANDARD = gnu17;
-                GCC_DYNAMIC_NO_PIC = NO;
-                GCC_NO_COMMON_BLOCKS = YES;
-                GCC_OPTIMIZATION_LEVEL = 0;
-                GCC_PREPROCESSOR_DEFINITIONS = (
-                    "DEBUG=1",
-                    "$(inherited)",
-                );
-                GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
-                GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
-                GCC_WARN_UNDECLARED_SELECTOR = YES;
-                GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
-                GCC_WARN_UNUSED_FUNCTION = YES;
-                GCC_WARN_UNUSED_VARIABLE = YES;
-                IPHONEOS_DEPLOYMENT_TARGET = 17.0;
-                LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
-                MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
-                MTL_FAST_MATH = YES;
-                ONLY_ACTIVE_ARCH = YES;
-                SDKROOT = iphoneos;
-                SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
-                SWIFT_OPTIMIZATION_LEVEL = "-Onone";
-            };
-            name = Debug;
-        };
-        8A1C83802AC328BE0096AF73 /* Release */ = {
-            isa = XCBuildConfiguration;
-            buildSettings = {
-                ALWAYS_SEARCH_USER_PATHS = NO;
-                ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
-                CLANG_ANALYZER_NONNULL = YES;
-                CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
-                CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
-                CLANG_ENABLE_MODULES = YES;
-                CLANG_ENABLE_OBJC_ARC = YES;
-                CLANG_ENABLE_OBJC_WEAK = YES;
-                CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
-                CLANG_WARN_BOOL_CONVERSION = YES;
-                CLANG_WARN_COMMA = YES;
-                CLANG_WARN_CONSTANT_CONVERSION = YES;
-                CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
-                CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
-                CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
-                CLANG_WARN_EMPTY_BODY = YES;
-                CLANG_WARN_ENUM_CONVERSION = YES;
-                CLANG_WARN_INFINITE_RECURSION = YES;
-                CLANG_WARN_INT_CONVERSION = YES;
-                CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
-                CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
-                CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
-                CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
-                CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
-                CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
-                CLANG_WARN_STRICT_PROTOTYPES = YES;
-                CLANG_WARN_SUSPICIOUS_MOVE = YES;
-                CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
-                CLANG_WARN_UNREACHABLE_CODE = YES;
-                CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
-                COPY_PHASE_STRIP = NO;
-                DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
-                ENABLE_NS_ASSERTIONS = NO;
-                ENABLE_STRICT_OBJC_MSGSEND = YES;
-                ENABLE_USER_SCRIPT_SANDBOXING = YES;
-                GCC_C_LANGUAGE_STANDARD = gnu17;
-                GCC_NO_COMMON_BLOCKS = YES;
-                GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
-                GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
-                GCC_WARN_UNDECLARED_SELECTOR = YES;
-                GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
-                GCC_WARN_UNUSED_FUNCTION = YES;
-                GCC_WARN_UNUSED_VARIABLE = YES;
-                IPHONEOS_DEPLOYMENT_TARGET = 17.0;
-                LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
-                MTL_ENABLE_DEBUG_INFO = NO;
-                MTL_FAST_MATH = YES;
-                SDKROOT = iphoneos;
-                SWIFT_COMPILATION_MODE = wholemodule;
-                VALIDATE_PRODUCT = YES;
-            };
-            name = Release;
-        };
-        8A1C83822AC328BE0096AF73 /* Debug */ = {
-            isa = XCBuildConfiguration;
-            buildSettings = {
-                ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
-                ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
-                CLANG_ENABLE_MODULES = YES;
-                CODE_SIGN_STYLE = Automatic;
-                CURRENT_PROJECT_VERSION = 1;
-                DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
-                DEVELOPMENT_TEAM = STLSG3FG8Q;
-                ENABLE_PREVIEWS = YES;
-                GENERATE_INFOPLIST_FILE = YES;
-                INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
-                INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
-                INFOPLIST_KEY_UILaunchScreen_Generation = YES;
-                INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
-                INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
-                IPHONEOS_DEPLOYMENT_TARGET = 16.0;
-                LD_RUNPATH_SEARCH_PATHS = (
-                    "$(inherited)",
-                    "@executable_path/Frameworks",
-                );
-                MARKETING_VERSION = 1.0;
-                PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
-                PRODUCT_NAME = "$(TARGET_NAME)";
-                SWIFT_EMIT_LOC_STRINGS = YES;
-                SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
-                SWIFT_OPTIMIZATION_LEVEL = "-Onone";
-                SWIFT_VERSION = 5.0;
-                TARGETED_DEVICE_FAMILY = "1,2";
-            };
-            name = Debug;
-        };
-        8A1C83832AC328BE0096AF73 /* Release */ = {
-            isa = XCBuildConfiguration;
-            buildSettings = {
-                ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
-                ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
-                CLANG_ENABLE_MODULES = YES;
-                CODE_SIGN_STYLE = Automatic;
-                CURRENT_PROJECT_VERSION = 1;
-                DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
-                DEVELOPMENT_TEAM = STLSG3FG8Q;
-                ENABLE_PREVIEWS = YES;
-                GENERATE_INFOPLIST_FILE = YES;
-                INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
-                INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
-                INFOPLIST_KEY_UILaunchScreen_Generation = YES;
-                INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
-                INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
-                IPHONEOS_DEPLOYMENT_TARGET = 16.0;
-                LD_RUNPATH_SEARCH_PATHS = (
-                    "$(inherited)",
-                    "@executable_path/Frameworks",
-                );
-                MARKETING_VERSION = 1.0;
-                PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
-                PRODUCT_NAME = "$(TARGET_NAME)";
-                SWIFT_EMIT_LOC_STRINGS = YES;
-                SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
-                SWIFT_VERSION = 5.0;
-                TARGETED_DEVICE_FAMILY = "1,2";
-            };
-            name = Release;
-        };
+               8A1C837F2AC328BE0096AF73 /* Debug */ = {
+                       isa = XCBuildConfiguration;
+                       buildSettings = {
+                               ALWAYS_SEARCH_USER_PATHS = NO;
+                               ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+                               CLANG_ANALYZER_NONNULL = YES;
+                               CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+                               CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+                               CLANG_ENABLE_MODULES = YES;
+                               CLANG_ENABLE_OBJC_ARC = YES;
+                               CLANG_ENABLE_OBJC_WEAK = YES;
+                               CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+                               CLANG_WARN_BOOL_CONVERSION = YES;
+                               CLANG_WARN_COMMA = YES;
+                               CLANG_WARN_CONSTANT_CONVERSION = YES;
+                               CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+                               CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+                               CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+                               CLANG_WARN_EMPTY_BODY = YES;
+                               CLANG_WARN_ENUM_CONVERSION = YES;
+                               CLANG_WARN_INFINITE_RECURSION = YES;
+                               CLANG_WARN_INT_CONVERSION = YES;
+                               CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+                               CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+                               CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+                               CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+                               CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+                               CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+                               CLANG_WARN_STRICT_PROTOTYPES = YES;
+                               CLANG_WARN_SUSPICIOUS_MOVE = YES;
+                               CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+                               CLANG_WARN_UNREACHABLE_CODE = YES;
+                               CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+                               COPY_PHASE_STRIP = NO;
+                               DEBUG_INFORMATION_FORMAT = dwarf;
+                               ENABLE_STRICT_OBJC_MSGSEND = YES;
+                               ENABLE_TESTABILITY = YES;
+                               ENABLE_USER_SCRIPT_SANDBOXING = YES;
+                               GCC_C_LANGUAGE_STANDARD = gnu17;
+                               GCC_DYNAMIC_NO_PIC = NO;
+                               GCC_NO_COMMON_BLOCKS = YES;
+                               GCC_OPTIMIZATION_LEVEL = 0;
+                               GCC_PREPROCESSOR_DEFINITIONS = (
+                                       "DEBUG=1",
+                                       "$(inherited)",
+                               );
+                               GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+                               GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+                               GCC_WARN_UNDECLARED_SELECTOR = YES;
+                               GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+                               GCC_WARN_UNUSED_FUNCTION = YES;
+                               GCC_WARN_UNUSED_VARIABLE = YES;
+                               IPHONEOS_DEPLOYMENT_TARGET = 17.0;
+                               LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+                               MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+                               MTL_FAST_MATH = YES;
+                               ONLY_ACTIVE_ARCH = YES;
+                               SDKROOT = iphoneos;
+                               SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+                               SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+                       };
+                       name = Debug;
+               };
+               8A1C83802AC328BE0096AF73 /* Release */ = {
+                       isa = XCBuildConfiguration;
+                       buildSettings = {
+                               ALWAYS_SEARCH_USER_PATHS = NO;
+                               ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+                               CLANG_ANALYZER_NONNULL = YES;
+                               CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+                               CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+                               CLANG_ENABLE_MODULES = YES;
+                               CLANG_ENABLE_OBJC_ARC = YES;
+                               CLANG_ENABLE_OBJC_WEAK = YES;
+                               CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+                               CLANG_WARN_BOOL_CONVERSION = YES;
+                               CLANG_WARN_COMMA = YES;
+                               CLANG_WARN_CONSTANT_CONVERSION = YES;
+                               CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+                               CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+                               CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+                               CLANG_WARN_EMPTY_BODY = YES;
+                               CLANG_WARN_ENUM_CONVERSION = YES;
+                               CLANG_WARN_INFINITE_RECURSION = YES;
+                               CLANG_WARN_INT_CONVERSION = YES;
+                               CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+                               CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+                               CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+                               CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+                               CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+                               CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+                               CLANG_WARN_STRICT_PROTOTYPES = YES;
+                               CLANG_WARN_SUSPICIOUS_MOVE = YES;
+                               CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+                               CLANG_WARN_UNREACHABLE_CODE = YES;
+                               CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+                               COPY_PHASE_STRIP = NO;
+                               DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+                               ENABLE_NS_ASSERTIONS = NO;
+                               ENABLE_STRICT_OBJC_MSGSEND = YES;
+                               ENABLE_USER_SCRIPT_SANDBOXING = YES;
+                               GCC_C_LANGUAGE_STANDARD = gnu17;
+                               GCC_NO_COMMON_BLOCKS = YES;
+                               GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+                               GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+                               GCC_WARN_UNDECLARED_SELECTOR = YES;
+                               GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+                               GCC_WARN_UNUSED_FUNCTION = YES;
+                               GCC_WARN_UNUSED_VARIABLE = YES;
+                               IPHONEOS_DEPLOYMENT_TARGET = 17.0;
+                               LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+                               MTL_ENABLE_DEBUG_INFO = NO;
+                               MTL_FAST_MATH = YES;
+                               SDKROOT = iphoneos;
+                               SWIFT_COMPILATION_MODE = wholemodule;
+                               VALIDATE_PRODUCT = YES;
+                       };
+                       name = Release;
+               };
+               8A1C83822AC328BE0096AF73 /* Debug */ = {
+                       isa = XCBuildConfiguration;
+                       buildSettings = {
+                               ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+                               ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+                               CLANG_ENABLE_MODULES = YES;
+                               CODE_SIGN_STYLE = Automatic;
+                               CURRENT_PROJECT_VERSION = 1;
+                               DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
+                               DEVELOPMENT_TEAM = STLSG3FG8Q;
+                               ENABLE_PREVIEWS = YES;
+                               GENERATE_INFOPLIST_FILE = YES;
+                               INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
+                               INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
+                               INFOPLIST_KEY_UILaunchScreen_Generation = YES;
+                               INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+                               INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+                               IPHONEOS_DEPLOYMENT_TARGET = 16.0;
+                               LD_RUNPATH_SEARCH_PATHS = (
+                                       "$(inherited)",
+                                       "@executable_path/Frameworks",
+                               );
+                               MARKETING_VERSION = 1.0;
+                               PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
+                               PRODUCT_NAME = "$(TARGET_NAME)";
+                               SWIFT_EMIT_LOC_STRINGS = YES;
+                               SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
+                               SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+                               SWIFT_VERSION = 5.0;
+                               TARGETED_DEVICE_FAMILY = "1,2";
+                       };
+                       name = Debug;
+               };
+               8A1C83832AC328BE0096AF73 /* Release */ = {
+                       isa = XCBuildConfiguration;
+                       buildSettings = {
+                               ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+                               ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+                               CLANG_ENABLE_MODULES = YES;
+                               CODE_SIGN_STYLE = Automatic;
+                               CURRENT_PROJECT_VERSION = 1;
+                               DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
+                               DEVELOPMENT_TEAM = STLSG3FG8Q;
+                               ENABLE_PREVIEWS = YES;
+                               GENERATE_INFOPLIST_FILE = YES;
+                               INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
+                               INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
+                               INFOPLIST_KEY_UILaunchScreen_Generation = YES;
+                               INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+                               INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+                               IPHONEOS_DEPLOYMENT_TARGET = 16.0;
+                               LD_RUNPATH_SEARCH_PATHS = (
+                                       "$(inherited)",
+                                       "@executable_path/Frameworks",
+                               );
+                               MARKETING_VERSION = 1.0;
+                               PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
+                               PRODUCT_NAME = "$(TARGET_NAME)";
+                               SWIFT_EMIT_LOC_STRINGS = YES;
+                               SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
+                               SWIFT_VERSION = 5.0;
+                               TARGETED_DEVICE_FAMILY = "1,2";
+                       };
+                       name = Release;
+               };
 /* End XCBuildConfiguration section */
 
 /* Begin XCConfigurationList section */
-        8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = {
-            isa = XCConfigurationList;
-            buildConfigurations = (
-                8A1C837F2AC328BE0096AF73 /* Debug */,
-                8A1C83802AC328BE0096AF73 /* Release */,
-            );
-            defaultConfigurationIsVisible = 0;
-            defaultConfigurationName = Release;
-        };
-        8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = {
-            isa = XCConfigurationList;
-            buildConfigurations = (
-                8A1C83822AC328BE0096AF73 /* Debug */,
-                8A1C83832AC328BE0096AF73 /* Release */,
-            );
-            defaultConfigurationIsVisible = 0;
-            defaultConfigurationName = Release;
-        };
+               8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = {
+                       isa = XCConfigurationList;
+                       buildConfigurations = (
+                               8A1C837F2AC328BE0096AF73 /* Debug */,
+                               8A1C83802AC328BE0096AF73 /* Release */,
+                       );
+                       defaultConfigurationIsVisible = 0;
+                       defaultConfigurationName = Release;
+               };
+               8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = {
+                       isa = XCConfigurationList;
+                       buildConfigurations = (
+                               8A1C83822AC328BE0096AF73 /* Debug */,
+                               8A1C83832AC328BE0096AF73 /* Release */,
+                       );
+                       defaultConfigurationIsVisible = 0;
+                       defaultConfigurationName = Release;
+               };
 /* End XCConfigurationList section */
-    };
-    rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */;
+       };
+       rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */;
 }
index babc60cdcc9dcc3649c4ef0a0d56f6b9a283c1ab..3393eb242f9388f1e6d0a01ba2afb219a4937a86 100644 (file)
@@ -3,24 +3,26 @@ import Foundation
 @MainActor
 class LlamaState: ObservableObject {
     @Published var messageLog = ""
+    @Published var cacheCleared = false
 
     private var llamaContext: LlamaContext?
-    private var modelUrl: URL? {
-        Bundle.main.url(forResource: "q8_0", withExtension: "gguf", subdirectory: "models")
+    private var defaultModelUrl: URL? {
+        Bundle.main.url(forResource: "ggml-model", withExtension: "gguf", subdirectory: "models")
         // Bundle.main.url(forResource: "llama-2-7b-chat", withExtension: "Q2_K.gguf", subdirectory: "models")
     }
+
     init() {
         do {
-            try loadModel()
+            try loadModel(modelUrl: defaultModelUrl)
         } catch {
             messageLog += "Error!\n"
         }
     }
 
-    private func loadModel() throws {
+    func loadModel(modelUrl: URL?) throws {
         messageLog += "Loading model...\n"
         if let modelUrl {
-            llamaContext = try LlamaContext.createContext(path: modelUrl.path())
+            llamaContext = try LlamaContext.create_context(path: modelUrl.path())
             messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
         } else {
             messageLog += "Could not locate model\n"
@@ -31,7 +33,7 @@ class LlamaState: ObservableObject {
         guard let llamaContext else {
             return
         }
-        messageLog += "Attempting to complete text...\n"
+
         await llamaContext.completion_init(text: text)
         messageLog += "\(text)"
 
@@ -42,4 +44,42 @@ class LlamaState: ObservableObject {
         await llamaContext.clear()
         messageLog += "\n\ndone\n"
     }
+
+    func bench() async {
+        guard let llamaContext else {
+            return
+        }
+
+        messageLog += "\n"
+        messageLog += "Running benchmark...\n"
+        messageLog += "Model info: "
+        messageLog += await llamaContext.model_info() + "\n"
+
+        let t_start = DispatchTime.now().uptimeNanoseconds
+        await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up
+        let t_end = DispatchTime.now().uptimeNanoseconds
+
+        let t_heat = Double(t_end - t_start) / 1_000_000_000.0
+        messageLog += "Heat up time: \(t_heat) seconds, please wait...\n"
+
+        // if more than 5 seconds, then we're probably running on a slow device
+        if t_heat > 5.0 {
+            messageLog += "Heat up time is too long, aborting benchmark\n"
+            return
+        }
+
+        let result = await llamaContext.bench(pp: 512, tg: 128, pl: 1, nr: 3)
+
+        messageLog += "\(result)"
+        messageLog += "\n"
+    }
+
+    func clear() async {
+        guard let llamaContext else {
+            return
+        }
+
+        await llamaContext.clear()
+        messageLog = ""
+    }
 }
index 0bd16a806d10fa4a61bf8618538376100852271c..219bf4dc19c28b5a75907082651aaec190282fce 100644 (file)
@@ -5,24 +5,97 @@ struct ContentView: View {
 
     @State private var multiLineText = ""
 
+    private static func cleanupModelCaches() {
+        // Delete all models (*.gguf)
+        let fileManager = FileManager.default
+        let documentsUrl =  FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
+        do {
+            let fileURLs = try fileManager.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil)
+            for fileURL in fileURLs {
+                if fileURL.pathExtension == "gguf" {
+                    try fileManager.removeItem(at: fileURL)
+                }
+            }
+        } catch {
+            print("Error while enumerating files \(documentsUrl.path): \(error.localizedDescription)")
+        }
+    }
+
     var body: some View {
         VStack {
-            ScrollView(.vertical) {
+            ScrollView(.vertical, showsIndicators: true) {
                 Text(llamaState.messageLog)
+                .font(.system(size: 12))
+                .frame(maxWidth: .infinity, alignment: .leading)
+                .padding()
+                .onTapGesture {
+                    UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
+                }
             }
 
             TextEditor(text: $multiLineText)
-                .frame(height: 200)
+                .frame(height: 80)
                 .padding()
                 .border(Color.gray, width: 0.5)
-            Button(action: {
-                sendText()
-            }) {
-                Text("Send")
-                    .padding()
-                    .background(Color.blue)
-                    .foregroundColor(.white)
-                    .cornerRadius(8)
+
+            HStack {
+                Button("Send") {
+                    sendText()
+                }
+                .padding(8)
+                .background(Color.blue)
+                .foregroundColor(.white)
+                .cornerRadius(8)
+
+                Button("Bench") {
+                    bench()
+                }
+                .padding(8)
+                .background(Color.blue)
+                .foregroundColor(.white)
+                .cornerRadius(8)
+
+                Button("Clear") {
+                    clear()
+                }
+                .padding(8)
+                .background(Color.blue)
+                .foregroundColor(.white)
+                .cornerRadius(8)
+
+                Button("Copy") {
+                    UIPasteboard.general.string = llamaState.messageLog
+                }
+                .padding(8)
+                .background(Color.blue)
+                .foregroundColor(.white)
+                .cornerRadius(8)
+            }
+
+            VStack {
+                DownloadButton(
+                    llamaState: llamaState,
+                    modelName: "TinyLlama-1.1B (Q4_0)",
+                    modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true",
+                    filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf"
+                )
+                .font(.system(size: 12))
+                .padding(.top, 4)
+
+                DownloadButton(
+                    llamaState: llamaState,
+                    modelName: "TinyLlama-1.1B (Q8_0)",
+                    modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true",
+                    filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf"
+                )
+                .font(.system(size: 12))
+
+                Button("Clear downloaded models") {
+                    ContentView.cleanupModelCaches()
+                    llamaState.cacheCleared = true
+                }
+                .padding(8)
+                .font(.system(size: 12))
             }
         }
         .padding()
@@ -34,9 +107,20 @@ struct ContentView: View {
             multiLineText = ""
         }
     }
+
+    func bench() {
+        Task {
+            await llamaState.bench()
+        }
+    }
+
+    func clear() {
+        Task {
+            await llamaState.clear()
+        }
+    }
 }
-/*
-#Preview {
-    ContentView()
-}
-*/
+
+//#Preview {
+//    ContentView()
+//}
diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift
new file mode 100644 (file)
index 0000000..4bd75cb
--- /dev/null
@@ -0,0 +1,122 @@
+import SwiftUI
+
+struct DownloadButton: View {
+    @ObservedObject private var llamaState: LlamaState
+    private var modelName: String
+    private var modelUrl: String
+    private var filename: String
+
+    @State private var status: String
+
+    @State private var downloadTask: URLSessionDownloadTask?
+    @State private var progress = 0.0
+    @State private var observation: NSKeyValueObservation?
+
+    private static func getFileURL(filename: String) -> URL {
+        FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename)
+    }
+
+    private func checkFileExistenceAndUpdateStatus() {
+    }
+
+    init(llamaState: LlamaState, modelName: String, modelUrl: String, filename: String) {
+        self.llamaState = llamaState
+        self.modelName = modelName
+        self.modelUrl = modelUrl
+        self.filename = filename
+
+        let fileURL = DownloadButton.getFileURL(filename: filename)
+        status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download"
+    }
+
+    private func download() {
+        status = "downloading"
+        print("Downloading model \(modelName) from \(modelUrl)")
+        guard let url = URL(string: modelUrl) else { return }
+        let fileURL = DownloadButton.getFileURL(filename: filename)
+
+        downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in
+            if let error = error {
+                print("Error: \(error.localizedDescription)")
+                return
+            }
+
+            guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else {
+                print("Server error!")
+                return
+            }
+
+            do {
+                if let temporaryURL = temporaryURL {
+                    try FileManager.default.copyItem(at: temporaryURL, to: fileURL)
+                    print("Writing to \(filename) completed")
+
+                    llamaState.cacheCleared = false
+
+                    status = "downloaded"
+                }
+            } catch let err {
+                print("Error: \(err.localizedDescription)")
+            }
+        }
+
+        observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in
+            self.progress = progress.fractionCompleted
+        }
+
+        downloadTask?.resume()
+    }
+
+    var body: some View {
+        VStack {
+            if status == "download" {
+                Button(action: download) {
+                    Text("Download " + modelName)
+                }
+            } else if status == "downloading" {
+                Button(action: {
+                    downloadTask?.cancel()
+                    status = "download"
+                }) {
+                    Text("\(modelName) (Downloading \(Int(progress * 100))%)")
+                }
+            } else if status == "downloaded" {
+                Button(action: {
+                    let fileURL = DownloadButton.getFileURL(filename: filename)
+                    if !FileManager.default.fileExists(atPath: fileURL.path) {
+                        download()
+                        return
+                    }
+                    do {
+                        try llamaState.loadModel(modelUrl: fileURL)
+                    } catch let err {
+                        print("Error: \(err.localizedDescription)")
+                    }
+                }) {
+                    Text("\(modelName) (Downloaded)")
+                }
+            } else {
+                Text("Unknown status")
+            }
+        }
+        .onDisappear() {
+            downloadTask?.cancel()
+        }
+        .onChange(of: llamaState.cacheCleared) { newValue in
+            if newValue {
+                downloadTask?.cancel()
+                let fileURL = DownloadButton.getFileURL(filename: filename)
+                status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download"
+            }
+        }
+    }
+}
+
+// #Preview {
+//    DownloadButton(
+//        llamaState: LlamaState(),
+//        modelName: "TheBloke / TinyLlama-1.1B-1T-OpenOrca-GGUF (Q4_0)",
+//        modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true",
+//        filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf"
+//    )
+// }
index f49214c13a878462cb1bc2ee8ae67583d125d43d..fd9fd6ed9e0082f783d6315c17eab103b2ee6e3c 100644 (file)
--- a/llama.cpp
+++ b/llama.cpp
@@ -2397,25 +2397,25 @@ static std::string llama_model_ftype_name(llama_ftype ftype) {
 
     switch (ftype) {
         case LLAMA_FTYPE_ALL_F32:     return "all F32";
-        case LLAMA_FTYPE_MOSTLY_F16:  return "mostly F16";
-        case LLAMA_FTYPE_MOSTLY_Q4_0: return "mostly Q4_0";
-        case LLAMA_FTYPE_MOSTLY_Q4_1: return "mostly Q4_1";
+        case LLAMA_FTYPE_MOSTLY_F16:  return "F16";
+        case LLAMA_FTYPE_MOSTLY_Q4_0: return "Q4_0";
+        case LLAMA_FTYPE_MOSTLY_Q4_1: return "Q4_1";
         case LLAMA_FTYPE_MOSTLY_Q4_1_SOME_F16:
-                                      return "mostly Q4_1, some F16";
-        case LLAMA_FTYPE_MOSTLY_Q5_0: return "mostly Q5_0";
-        case LLAMA_FTYPE_MOSTLY_Q5_1: return "mostly Q5_1";
-        case LLAMA_FTYPE_MOSTLY_Q8_0: return "mostly Q8_0";
+                                      return "Q4_1, some F16";
+        case LLAMA_FTYPE_MOSTLY_Q5_0: return "Q5_0";
+        case LLAMA_FTYPE_MOSTLY_Q5_1: return "Q5_1";
+        case LLAMA_FTYPE_MOSTLY_Q8_0: return "Q8_0";
 
         // K-quants
-        case LLAMA_FTYPE_MOSTLY_Q2_K:   return "mostly Q2_K";
-        case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "mostly Q3_K - Small";
-        case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "mostly Q3_K - Medium";
-        case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "mostly Q3_K - Large";
-        case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "mostly Q4_K - Small";
-        case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "mostly Q4_K - Medium";
-        case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "mostly Q5_K - Small";
-        case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "mostly Q5_K - Medium";
-        case LLAMA_FTYPE_MOSTLY_Q6_K:   return "mostly Q6_K";
+        case LLAMA_FTYPE_MOSTLY_Q2_K:   return "Q2_K";
+        case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "Q3_K - Small";
+        case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "Q3_K - Medium";
+        case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "Q3_K - Large";
+        case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "Q4_K - Small";
+        case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "Q4_K - Medium";
+        case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small";
+        case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium";
+        case LLAMA_FTYPE_MOSTLY_Q6_K:   return "Q6_K";
 
         default: return "unknown, may not work";
     }
@@ -2533,6 +2533,7 @@ static void llm_load_hparams(
                 ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps);
 
                 switch (hparams.n_layer) {
+                    case 22: model.type = e_model::MODEL_1B; break;
                     case 26: model.type = e_model::MODEL_3B; break;
                     case 32: model.type = e_model::MODEL_7B; break;
                     case 40: model.type = e_model::MODEL_13B; break;