]> git.djapps.eu Git - pkg/ggml/sources/whisper.cpp/commitdiff
whisper.swiftui : add model download list & bench methods (#2546)
authorJhen-Jie Hong <redacted>
Wed, 13 Nov 2024 19:51:34 +0000 (03:51 +0800)
committerGitHub <redacted>
Wed, 13 Nov 2024 19:51:34 +0000 (21:51 +0200)
* swift : fix resources & exclude build

* whisper : impl whisper_timings struct & api

* whisper.swiftui : model list & bench methods

* whisper : return ptr for whisper_get_timings

* revert unnecessary change

* whisper : avoid designated initializer

* whisper.swiftui: code style changes

* whisper.swiftui : get device name / os from UIDevice

* whisper.swiftui : fix UIDevice usage

* whisper.swiftui : add memcpy and ggml_mul_mat (commented)

Package.swift
examples/whisper.swiftui/whisper.cpp.swift/LibWhisper.swift
examples/whisper.swiftui/whisper.swiftui.demo/Models/Model.swift [new file with mode: 0644]
examples/whisper.swiftui/whisper.swiftui.demo/Models/WhisperState.swift
examples/whisper.swiftui/whisper.swiftui.demo/UI/ContentView.swift
examples/whisper.swiftui/whisper.swiftui.demo/UI/DownloadButton.swift [new file with mode: 0644]
examples/whisper.swiftui/whisper.swiftui.xcodeproj/project.pbxproj
include/whisper.h
src/whisper.cpp

index e360ae8ab73555f864c1922ae880f5971dad6a8d..04128f7b2a3de43e26156df00a0e9c4800622e2d 100644 (file)
@@ -18,16 +18,17 @@ let package = Package(
             name: "whisper",
             path: ".",
             exclude: [
+               "build",
                "bindings",
                "cmake",
-               "coreml",
                "examples",
-               "extra",
+               "scripts",
                "models",
                "samples",
                "tests",
                "CMakeLists.txt",
-               "Makefile"
+               "Makefile",
+               "ggml/src/ggml-metal-embed.metal"
             ],
             sources: [
                 "ggml/src/ggml.c",
@@ -38,7 +39,7 @@ let package = Package(
                 "ggml/src/ggml-quants.c",
                 "ggml/src/ggml-metal.m"
             ],
-            resources: [.process("ggml-metal.metal")],
+            resources: [.process("ggml/src/ggml-metal.metal")],
             publicHeadersPath: "spm-headers",
             cSettings: [
                 .unsafeFlags(["-Wno-shorten-64-to-32", "-O3", "-DNDEBUG"]),
index a71175d9aef6555ed8a9b000c8218b2da5df21fe..54cb3bd41e0ee3ec8e21ff31982f6fa432c9bfda 100644 (file)
@@ -1,4 +1,5 @@
 import Foundation
+import UIKit
 import whisper
 
 enum WhisperError: Error {
@@ -55,11 +56,93 @@ actor WhisperContext {
         return transcription
     }
 
+    static func benchMemcpy(nThreads: Int32) async -> String {
+        return String.init(cString: whisper_bench_memcpy_str(nThreads))
+    }
+
+    static func benchGgmlMulMat(nThreads: Int32) async -> String {
+        return String.init(cString: whisper_bench_ggml_mul_mat_str(nThreads))
+    }
+
+    private func systemInfo() -> String {
+        var info = ""
+        if (ggml_cpu_has_neon() != 0) { info += "NEON " }
+        if (ggml_cpu_has_metal() != 0) { info += "METAL " }
+        if (ggml_cpu_has_blas() != 0) { info += "BLAS " }
+        return String(info.dropLast())
+    }
+
+    func benchFull(modelName: String, nThreads: Int32) async -> String {
+        let nMels = whisper_model_n_mels(context)
+        if (whisper_set_mel(context, nil, 0, nMels) != 0) {
+            return "error: failed to set mel"
+        }
+        
+        // heat encoder
+        if (whisper_encode(context, 0, nThreads) != 0) {
+            return "error: failed to encode"
+        }
+        
+        var tokens = [whisper_token](repeating: 0, count: 512)
+        
+        // prompt heat
+        if (whisper_decode(context, &tokens, 256, 0, nThreads) != 0) {
+            return "error: failed to decode"
+        }
+        
+        // text-generation heat
+        if (whisper_decode(context, &tokens, 1, 256, nThreads) != 0) {
+            return "error: failed to decode"
+        }
+        
+        whisper_reset_timings(context)
+        
+        // actual run
+        if (whisper_encode(context, 0, nThreads) != 0) {
+            return "error: failed to encode"
+        }
+        
+        // text-generation
+        for i in 0..<256 {
+            if (whisper_decode(context, &tokens, 1, Int32(i), nThreads) != 0) {
+                return "error: failed to decode"
+            }
+        }
+        
+        // batched decoding
+        for _ in 0..<64 {
+            if (whisper_decode(context, &tokens, 5, 0, nThreads) != 0) {
+                return "error: failed to decode"
+            }
+        }
+        
+        // prompt processing
+        for _ in 0..<16 {
+            if (whisper_decode(context, &tokens, 256, 0, nThreads) != 0) {
+                return "error: failed to decode"
+            }
+        }
+
+        whisper_print_timings(context)
+
+        let deviceModel = await UIDevice.current.model
+        let systemName = await UIDevice.current.systemName
+        let systemInfo = self.systemInfo()
+        let timings: whisper_timings = whisper_get_timings(context).pointee
+        let encodeMs = String(format: "%.2f", timings.encode_ms)
+        let decodeMs = String(format: "%.2f", timings.decode_ms)
+        let batchdMs = String(format: "%.2f", timings.batchd_ms)
+        let promptMs = String(format: "%.2f", timings.prompt_ms)
+        return "| \(deviceModel) | \(systemName) | \(systemInfo) | \(modelName) | \(nThreads) | 1 | \(encodeMs) | \(decodeMs) | \(batchdMs) | \(promptMs) | <todo> |"
+    }
+
     static func createContext(path: String) throws -> WhisperContext {
         var params = whisper_context_default_params()
 #if targetEnvironment(simulator)
         params.use_gpu = false
         print("Running on the simulator, using CPU")
+#else
+        params.flash_attn = true // Enabled by default for Metal
 #endif
         let context = whisper_init_from_file_with_params(path, params)
         if let context {
diff --git a/examples/whisper.swiftui/whisper.swiftui.demo/Models/Model.swift b/examples/whisper.swiftui/whisper.swiftui.demo/Models/Model.swift
new file mode 100644 (file)
index 0000000..3df4dbc
--- /dev/null
@@ -0,0 +1,17 @@
+import Foundation
+
+struct Model: Identifiable {
+    var id = UUID()
+    var name: String
+    var info: String
+    var url: String
+
+    var filename: String
+    var fileURL: URL {
+        FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename)
+    }
+
+    func fileExists() -> Bool {
+        FileManager.default.fileExists(atPath: fileURL.path)
+    }
+}
index 59fba8971e0c8c0ed9cdd618d865ba169fb93d77..5c4863bf3da5fcf286f4f1bc819fae10675145ed 100644 (file)
@@ -14,7 +14,7 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
     private var recordedFile: URL? = nil
     private var audioPlayer: AVAudioPlayer?
     
-    private var modelUrl: URL? {
+    private var builtInModelUrl: URL? {
         Bundle.main.url(forResource: "ggml-base.en", withExtension: "bin", subdirectory: "models")
     }
     
@@ -28,23 +28,59 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
     
     override init() {
         super.init()
+        loadModel()
+    }
+    
+    func loadModel(path: URL? = nil, log: Bool = true) {
         do {
-            try loadModel()
+            whisperContext = nil
+            if (log) { messageLog += "Loading model...\n" }
+            let modelUrl = path ?? builtInModelUrl
+            if let modelUrl {
+                whisperContext = try WhisperContext.createContext(path: modelUrl.path())
+                if (log) { messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" }
+            } else {
+                if (log) { messageLog += "Could not locate model\n" }
+            }
             canTranscribe = true
         } catch {
             print(error.localizedDescription)
-            messageLog += "\(error.localizedDescription)\n"
+            if (log) { messageLog += "\(error.localizedDescription)\n" }
         }
     }
-    
-    private func loadModel() throws {
-        messageLog += "Loading model...\n"
-        if let modelUrl {
-            whisperContext = try WhisperContext.createContext(path: modelUrl.path())
-            messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
-        } else {
-            messageLog += "Could not locate model\n"
+
+    func benchCurrentModel() async {
+        if whisperContext == nil {
+            messageLog += "Cannot bench without loaded model\n"
+            return
         }
+        messageLog += "Running benchmark for loaded model\n"
+        let result = await whisperContext?.benchFull(modelName: "<current>", nThreads: Int32(min(4, cpuCount())))
+        if (result != nil) { messageLog += result! + "\n" }
+    }
+
+    func bench(models: [Model]) async {
+        let nThreads = Int32(min(4, cpuCount()))
+
+//        messageLog += "Running memcpy benchmark\n"
+//        messageLog += await WhisperContext.benchMemcpy(nThreads: nThreads) + "\n"
+//
+//        messageLog += "Running ggml_mul_mat benchmark with \(nThreads) threads\n"
+//        messageLog += await WhisperContext.benchGgmlMulMat(nThreads: nThreads) + "\n"
+
+        messageLog += "Running benchmark for all downloaded models\n"
+        messageLog += "| CPU | OS | Config | Model | Th | FA | Enc. | Dec. | Bch5 | PP | Commit |\n"
+        messageLog += "| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n"
+        for model in models {
+            loadModel(path: model.fileURL, log: false)
+            if whisperContext == nil {
+                messageLog += "Cannot bench without loaded model\n"
+                break
+            }
+            let result = await whisperContext?.benchFull(modelName: model.name, nThreads: nThreads)
+            if (result != nil) { messageLog += result! + "\n" }
+        }
+        messageLog += "Benchmarking completed\n"
     }
     
     func transcribeSample() async {
@@ -160,3 +196,8 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
         isRecording = false
     }
 }
+
+
+fileprivate func cpuCount() -> Int {
+    ProcessInfo.processInfo.processorCount
+}
index d8ef47e6fd1b769dcd349b0b32e5fb119f359019..6a1448bcd3a53e231fa790c1595a28b55d5b666b 100644 (file)
@@ -1,5 +1,6 @@
 import SwiftUI
 import AVFoundation
+import Foundation
 
 struct ContentView: View {
     @StateObject var whisperState = WhisperState()
@@ -29,15 +30,125 @@ struct ContentView: View {
                     Text(verbatim: whisperState.messageLog)
                         .frame(maxWidth: .infinity, alignment: .leading)
                 }
+                .font(.footnote)
+                .padding()
+                .background(Color.gray.opacity(0.1))
+                .cornerRadius(10)
+
+                HStack {
+                    Button("Clear Logs", action: {
+                        whisperState.messageLog = ""
+                    })
+                    .font(.footnote)
+                    .buttonStyle(.bordered)
+
+                    Button("Copy Logs", action: {
+                        UIPasteboard.general.string = whisperState.messageLog
+                    })
+                    .font(.footnote)
+                    .buttonStyle(.bordered)
+
+                    Button("Bench", action: {
+                        Task {
+                            await whisperState.benchCurrentModel()
+                        }
+                    })
+                    .font(.footnote)
+                    .buttonStyle(.bordered)
+                    .disabled(!whisperState.canTranscribe)
+
+                    Button("Bench All", action: {
+                        Task {
+                            await whisperState.bench(models: ModelsView.getDownloadedModels())
+                        }
+                    })
+                    .font(.footnote)
+                    .buttonStyle(.bordered)
+                    .disabled(!whisperState.canTranscribe)
+                }
+
+                NavigationLink(destination: ModelsView(whisperState: whisperState)) {
+                    Text("View Models")
+                }
+                .font(.footnote)
+                .padding()
             }
             .navigationTitle("Whisper SwiftUI Demo")
             .padding()
         }
     }
-}
 
-struct ContentView_Previews: PreviewProvider {
-    static var previews: some View {
-        ContentView()
+    struct ModelsView: View {
+        @ObservedObject var whisperState: WhisperState
+        @Environment(\.dismiss) var dismiss
+        
+        private static let models: [Model] = [
+            Model(name: "tiny", info: "(F16, 75 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin", filename: "tiny.bin"),
+            Model(name: "tiny-q5_1", info: "(31 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q5_1.bin", filename: "tiny-q5_1.bin"),
+            Model(name: "tiny-q8_0", info: "(42 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q8_0.bin", filename: "tiny-q8_0.bin"),
+            Model(name: "tiny.en", info: "(F16, 75 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en.bin", filename: "tiny.en.bin"),
+            Model(name: "tiny.en-q5_1", info: "(31 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q5_1.bin", filename: "tiny.en-q5_1.bin"),
+            Model(name: "tiny.en-q8_0", info: "(42 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q8_0.bin", filename: "tiny.en-q8_0.bin"),
+            Model(name: "base", info: "(F16, 142 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin", filename: "base.bin"),
+            Model(name: "base-q5_1", info: "(57 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base-q5_1.bin", filename: "base-q5_1.bin"),
+            Model(name: "base-q8_0", info: "(78 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base-q8_0.bin", filename: "base-q8_0.bin"),
+            Model(name: "base.en", info: "(F16, 142 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin", filename: "base.en.bin"),
+            Model(name: "base.en-q5_1", info: "(57 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin", filename: "base.en-q5_1.bin"),
+            Model(name: "base.en-q8_0", info: "(78 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q8_0.bin", filename: "base.en-q8_0.bin"),
+            Model(name: "small", info: "(F16, 466 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin", filename: "small.bin"),
+            Model(name: "small-q5_1", info: "(181 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small-q5_1.bin", filename: "small-q5_1.bin"),
+            Model(name: "small-q8_0", info: "(252 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small-q8_0.bin", filename: "small-q8_0.bin"),
+            Model(name: "small.en", info: "(F16, 466 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en.bin", filename: "small.en.bin"),
+            Model(name: "small.en-q5_1", info: "(181 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en-q5_1.bin", filename: "small.en-q5_1.bin"),
+            Model(name: "small.en-q8_0", info: "(252 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en-q8_0.bin", filename: "small.en-q8_0.bin"),
+            Model(name: "medium", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin", filename: "medium.bin"),
+            Model(name: "medium-q5_0", info: "(514 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium-q5_0.bin", filename: "medium-q5_0.bin"),
+            Model(name: "medium-q8_0", info: "(785 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium-q8_0.bin", filename: "medium-q8_0.bin"),
+            Model(name: "medium.en", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en.bin", filename: "medium.en.bin"),
+            Model(name: "medium.en-q5_0", info: "(514 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en-q5_0.bin", filename: "medium.en-q5_0.bin"),
+            Model(name: "medium.en-q8_0", info: "(785 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en-q8_0.bin", filename: "medium.en-q8_0.bin"),
+            Model(name: "large-v1", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large.bin", filename: "large.bin"),
+            Model(name: "large-v2", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2.bin", filename: "large-v2.bin"),
+            Model(name: "large-v2-q5_0", info: "(1.1 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin", filename: "large-v2-q5_0.bin"),
+            Model(name: "large-v2-q8_0", info: "(1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q8_0.bin", filename: "large-v2-q8_0.bin"),
+            Model(name: "large-v3", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3.bin", filename: "large-v3.bin"),
+            Model(name: "large-v3-q5_0", info: "(1.1 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-q5_0.bin", filename: "large-v3-q5_0.bin"),
+            Model(name: "large-v3-turbo", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo.bin", filename: "large-v3-turbo.bin"),
+            Model(name: "large-v3-turbo-q5_0", info: "(547 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin", filename: "large-v3-turbo-q5_0.bin"),
+            Model(name: "large-v3-turbo-q8_0", info: "(834 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q8_0.bin", filename: "large-v3-turbo-q8_0.bin"),
+        ]
+
+        static func getDownloadedModels() -> [Model] {
+            // Filter models that have been downloaded
+            return models.filter {
+                FileManager.default.fileExists(atPath: $0.fileURL.path())
+            }
+        }
+
+        func loadModel(model: Model) {
+            Task {
+                dismiss()
+                whisperState.loadModel(path: model.fileURL)
+            }
+        }
+
+        var body: some View {
+            List {
+                Section(header: Text("Models")) {
+                    ForEach(ModelsView.models) { model in
+                        DownloadButton(model: model)
+                            .onLoad(perform: loadModel)
+                    }
+                }
+            }
+            .listStyle(GroupedListStyle())
+            .navigationBarTitle("Models", displayMode: .inline).toolbar {}
+        }
     }
 }
+
+//struct ContentView_Previews: PreviewProvider {
+//    static var previews: some View {
+//        ContentView()
+//    }
+//}
diff --git a/examples/whisper.swiftui/whisper.swiftui.demo/UI/DownloadButton.swift b/examples/whisper.swiftui/whisper.swiftui.demo/UI/DownloadButton.swift
new file mode 100644 (file)
index 0000000..d02cc07
--- /dev/null
@@ -0,0 +1,102 @@
+import SwiftUI
+
+struct DownloadButton: View {
+    private var model: Model
+
+    @State private var status: String
+
+    @State private var downloadTask: URLSessionDownloadTask?
+    @State private var progress = 0.0
+    @State private var observation: NSKeyValueObservation?
+
+    private var onLoad: ((_ model: Model) -> Void)?
+
+    init(model: Model) {
+        self.model = model
+        status = model.fileExists() ? "downloaded" : "download"
+    }
+
+    func onLoad(perform action: @escaping (_ model: Model) -> Void) -> DownloadButton {
+        var button = self
+        button.onLoad = action
+        return button
+    }
+
+    private func download() {
+        status = "downloading"
+        print("Downloading model \(model.name) from \(model.url)")
+        guard let url = URL(string: model.url) else { return }
+
+        downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in
+            if let error = error {
+                print("Error: \(error.localizedDescription)")
+                return
+            }
+
+            guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else {
+                print("Server error!")
+                return
+            }
+
+            do {
+                if let temporaryURL = temporaryURL {
+                    try FileManager.default.copyItem(at: temporaryURL, to: model.fileURL)
+                    print("Writing to \(model.filename) completed")
+                    status = "downloaded"
+                }
+            } catch let err {
+                print("Error: \(err.localizedDescription)")
+            }
+        }
+
+        observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in
+            self.progress = progress.fractionCompleted
+        }
+
+        downloadTask?.resume()
+    }
+
+    var body: some View {
+        VStack {
+            Button(action: {
+                if (status == "download") {
+                    download()
+                } else if (status == "downloading") {
+                    downloadTask?.cancel()
+                    status = "download"
+                } else if (status == "downloaded") {
+                    if !model.fileExists() {
+                        download()
+                    }
+                    onLoad?(model)
+                }
+            }) {
+                let title = "\(model.name) \(model.info)"
+                if (status == "download") {
+                    Text("Download \(title)")
+                } else if (status == "downloading") {
+                    Text("\(title) (Downloading \(Int(progress * 100))%)")
+                } else if (status == "downloaded") {
+                    Text("Load \(title)")
+                } else {
+                    Text("Unknown status")
+                }
+            }.swipeActions {
+                if (status == "downloaded") {
+                    Button("Delete") {
+                        do {
+                            try FileManager.default.removeItem(at: model.fileURL)
+                        } catch {
+                            print("Error deleting file: \(error)")
+                        }
+                        status = "download"
+                    }
+                    .tint(.red)
+                }
+            }
+        }
+        .onDisappear() {
+            downloadTask?.cancel()
+        }
+    }
+}
index d5efc0f5e63e8201562d2366e54df29466446753..db23f6e5c8d88ab3f338c7c68369c6ac30d8e5da 100644 (file)
@@ -17,6 +17,8 @@
                0AAC5D9F29539CD0003032C3 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 0AAC5D9E29539CD0003032C3 /* Assets.xcassets */; };
                0AAC5DCE2953A05C003032C3 /* WhisperState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0AAC5DCD2953A05C003032C3 /* WhisperState.swift */; };
                0AAC5DD12953A394003032C3 /* LibWhisper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0AAC5DD02953A394003032C3 /* LibWhisper.swift */; };
+               7F79E0EE2CE0A78000ACD7BF /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7F79E0ED2CE0A78000ACD7BF /* DownloadButton.swift */; };
+               7F79E0F02CE0C6F700ACD7BF /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7F79E0EF2CE0C6F700ACD7BF /* Model.swift */; };
                E3F92DC52AFA8E3800A6A9D4 /* whisper in Frameworks */ = {isa = PBXBuildFile; productRef = E3F92DC42AFA8E3800A6A9D4 /* whisper */; };
 /* End PBXBuildFile section */
 
@@ -33,6 +35,8 @@
                0AAC5DA029539CD0003032C3 /* WhisperCppDemo.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = WhisperCppDemo.entitlements; sourceTree = "<group>"; };
                0AAC5DCD2953A05C003032C3 /* WhisperState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WhisperState.swift; sourceTree = "<group>"; };
                0AAC5DD02953A394003032C3 /* LibWhisper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibWhisper.swift; sourceTree = "<group>"; };
+               7F79E0ED2CE0A78000ACD7BF /* DownloadButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = "<group>"; };
+               7F79E0EF2CE0C6F700ACD7BF /* Model.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Model.swift; sourceTree = "<group>"; };
                E3F92DC22AFA8DD800A6A9D4 /* whisper.cpp */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = whisper.cpp; path = ../..; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
@@ -52,6 +56,7 @@
                        isa = PBXGroup;
                        children = (
                                0AAC5DCD2953A05C003032C3 /* WhisperState.swift */,
+                               7F79E0EF2CE0C6F700ACD7BF /* Model.swift */,
                        );
                        path = Models;
                        sourceTree = "<group>";
                        isa = PBXGroup;
                        children = (
                                0AAC5D9C29539CCF003032C3 /* ContentView.swift */,
+                               7F79E0ED2CE0A78000ACD7BF /* DownloadButton.swift */,
                        );
                        path = UI;
                        sourceTree = "<group>";
                                0AAC5DCE2953A05C003032C3 /* WhisperState.swift in Sources */,
                                0AAC5DD12953A394003032C3 /* LibWhisper.swift in Sources */,
                                0AA7514C2953B569001EE061 /* RiffWaveUtils.swift in Sources */,
+                               7F79E0EE2CE0A78000ACD7BF /* DownloadButton.swift in Sources */,
                                0AA7514E2953D958001EE061 /* Recorder.swift in Sources */,
+                               7F79E0F02CE0C6F700ACD7BF /* Model.swift in Sources */,
                        );
                        runOnlyForDeploymentPostprocessing = 0;
                };
index 2ce43702686ee003f0007e60fec1fb322b4343f9..a7ed5a11c37605a8f8080efe8d13daf9f1f7de59 100644 (file)
@@ -423,6 +423,14 @@ extern "C" {
     WHISPER_API whisper_token whisper_token_transcribe(struct whisper_context * ctx);
 
     // Performance information from the default state.
+    struct whisper_timings {
+        float sample_ms;
+        float encode_ms;
+        float decode_ms;
+        float batchd_ms;
+        float prompt_ms;
+    };
+    WHISPER_API struct whisper_timings * whisper_get_timings(struct whisper_context * ctx);
     WHISPER_API void whisper_print_timings(struct whisper_context * ctx);
     WHISPER_API void whisper_reset_timings(struct whisper_context * ctx);
 
index d2913146233ce32ce640511dc3d82006aba916ca..754ff096d65ce1fa50757a035148026894235c87 100644 (file)
@@ -4186,6 +4186,19 @@ whisper_token whisper_token_transcribe(struct whisper_context * ctx) {
     return ctx->vocab.token_transcribe;
 }
 
+struct whisper_timings * whisper_get_timings(struct whisper_context * ctx) {
+    if (ctx->state == nullptr) {
+        return nullptr;
+    }
+    whisper_timings * timings = new whisper_timings;
+    timings->sample_ms = 1e-3f * ctx->state->t_sample_us / std::max(1, ctx->state->n_sample);
+    timings->encode_ms = 1e-3f * ctx->state->t_encode_us / std::max(1, ctx->state->n_encode);
+    timings->decode_ms = 1e-3f * ctx->state->t_decode_us / std::max(1, ctx->state->n_decode);
+    timings->batchd_ms = 1e-3f * ctx->state->t_batchd_us / std::max(1, ctx->state->n_batchd);
+    timings->prompt_ms = 1e-3f * ctx->state->t_prompt_us / std::max(1, ctx->state->n_prompt);
+    return timings;
+}
+
 void whisper_print_timings(struct whisper_context * ctx) {
     const int64_t t_end_us = ggml_time_us();