]> git.djapps.eu Git - pkg/ggml/sources/whisper.cpp/commitdiff
openvino : fix convert-whisper-to-openvino.py (#1890)
authorst-gr <redacted>
Thu, 22 Feb 2024 13:11:35 +0000 (05:11 -0800)
committerGitHub <redacted>
Thu, 22 Feb 2024 13:11:35 +0000 (15:11 +0200)
Fix issue: Conversion from Whisper to OpenVino failed #1870

convert-whisper-to-openvino.py stopped working with OpenVINO version 2023.0.0-10926-b4452d56304-releases/2023/0 .

Error was: TypeError: load(): incompatible function arguments. The following argument types are supported:
    1. (self: openvino._pyopenvino.FrontEnd, path: object) -> ov::frontend::InputModel

Tested successfully with a large-v3 conversion.

Co-authored-by: Stefan Grundmann <redacted>
models/convert-whisper-to-openvino.py

index 1a4ad304df015fb5bf199e6827aea7dee241e4b7..5df0be78d9b4a17e0ffd1899f521e1eae7583877 100644 (file)
@@ -3,6 +3,7 @@ import torch
 from whisper import load_model
 import os
 from openvino.tools import mo
+from openvino.frontend import FrontEndManager
 from openvino.runtime import serialize
 import shutil
 
@@ -11,7 +12,7 @@ def convert_encoder(hparams, encoder, mname):
 
     mel = torch.zeros((1, hparams.n_mels, 3000))
 
-    onnx_folder=os.path.join(os.path.dirname(__file__),"onnx_encoder")
+    onnx_folder = os.path.join(os.path.dirname(__file__), "onnx_encoder")
 
     #create a directory to store the onnx model, and other collateral that is saved during onnx export procedure
     if not os.path.isdir(onnx_folder):
@@ -19,6 +20,7 @@ def convert_encoder(hparams, encoder, mname):
 
     onnx_path = os.path.join(onnx_folder, "whisper_encoder.onnx")
 
+    # Export the PyTorch model to ONNX
     torch.onnx.export(
         encoder,
         mel,
@@ -27,11 +29,16 @@ def convert_encoder(hparams, encoder, mname):
         output_names=["output_features"]
     )
 
-    # use model optimizer to convert onnx to OpenVINO IR format
-    encoder_model = mo.convert_model(onnx_path, compress_to_fp16=True)
-    serialize(encoder_model, xml_path=os.path.join(os.path.dirname(__file__),"ggml-" + mname + "-encoder-openvino.xml"))
+    # Convert ONNX to OpenVINO IR format using the frontend
+    fem = FrontEndManager()
+    onnx_fe = fem.load_by_framework("onnx")
+    onnx_model = onnx_fe.load(onnx_path)
+    ov_model = onnx_fe.convert(onnx_model)
 
-    #cleanup
+    # Serialize the OpenVINO model to XML and BIN files
+    serialize(ov_model, xml_path=os.path.join(os.path.dirname(__file__), "ggml-" + mname + "-encoder-openvino.xml"))
+
+    # Cleanup
     if os.path.isdir(onnx_folder):
         shutil.rmtree(onnx_folder)