From: tc-mb Date: Tue, 26 Aug 2025 08:05:55 +0000 (+0800) Subject: model : support MiniCPM-V 4.5 (#15575) X-Git-Tag: upstream/0.0.6527~245 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=c4e9239064a564de7b94ee2b401ae907235a8fca;p=pkg%2Fggml%2Fsources%2Fllama.cpp model : support MiniCPM-V 4.5 (#15575) --- diff --git a/docs/multimodal/minicpmv4.0.md b/docs/multimodal/minicpmv4.0.md index 65887d96..d04cb338 100644 --- a/docs/multimodal/minicpmv4.0.md +++ b/docs/multimodal/minicpmv4.0.md @@ -6,7 +6,7 @@ Download [MiniCPM-V-4](https://huggingface.co/openbmb/MiniCPM-V-4) PyTorch model ### Build llama.cpp -Readme modification time: 20250206 +Readme modification time: 20250731 If there are differences in usage, please refer to the official build [documentation](https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md) diff --git a/docs/multimodal/minicpmv4.5.md b/docs/multimodal/minicpmv4.5.md new file mode 100644 index 00000000..8fea5e61 --- /dev/null +++ b/docs/multimodal/minicpmv4.5.md @@ -0,0 +1,47 @@ +## MiniCPM-V 4.5 + +### Prepare models and code + +Download [MiniCPM-V-4_5](https://huggingface.co/openbmb/MiniCPM-V-4_5) PyTorch model from huggingface to "MiniCPM-V-4_5" folder. + + +### Build llama.cpp +Readme modification time: 20250826 + +If there are differences in usage, please refer to the official build [documentation](https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md) + +Clone llama.cpp: +```bash +git clone https://github.com/ggerganov/llama.cpp +cd llama.cpp +``` + +Build llama.cpp using `CMake`: +```bash +cmake -B build +cmake --build build --config Release +``` + + +### Usage of MiniCPM-V 4 + +Convert PyTorch model to gguf files (You can also download the converted [gguf](https://huggingface.co/openbmb/MiniCPM-V-4_5-gguf) by us) + +```bash +python ./tools/mtmd/legacy-models/minicpmv-surgery.py -m ../MiniCPM-V-4_5 +python ./tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py -m ../MiniCPM-V-4_5 --minicpmv-projector ../MiniCPM-V-4_5/minicpmv.projector --output-dir ../MiniCPM-V-4_5/ --minicpmv_version 6 +python ./convert_hf_to_gguf.py ../MiniCPM-V-4_5/model + +# quantize int4 version +./build/bin/llama-quantize ../MiniCPM-V-4_5/model/ggml-model-f16.gguf ../MiniCPM-V-4_5/model/ggml-model-Q4_K_M.gguf Q4_K_M +``` + + +Inference on Linux or Mac +```bash +# run in single-turn mode +./build/bin/llama-mtmd-cli -m ../MiniCPM-V-4_5/model/ggml-model-f16.gguf --mmproj ../MiniCPM-V-4_5/mmproj-model-f16.gguf -c 4096 --temp 0.7 --top-p 0.8 --top-k 100 --repeat-penalty 1.05 --image xx.jpg -p "What is in the image?" + +# run in conversation mode +./build/bin/llama-mtmd-cli -m ../MiniCPM-V-4_5/model/ggml-model-Q4_K_M.gguf --mmproj ../MiniCPM-V-4_5/mmproj-model-f16.gguf +``` diff --git a/tools/mtmd/clip.cpp b/tools/mtmd/clip.cpp index b3628db6..0e76b9c5 100644 --- a/tools/mtmd/clip.cpp +++ b/tools/mtmd/clip.cpp @@ -2202,6 +2202,8 @@ struct clip_model_loader { hparams.minicpmv_query_num = 64; } else if (hparams.minicpmv_version == 5) { hparams.minicpmv_query_num = 64; + } else if (hparams.minicpmv_version == 6) { + hparams.minicpmv_query_num = 64; } else { hparams.minicpmv_query_num = 96; } @@ -3685,6 +3687,9 @@ int clip_n_output_tokens(const struct clip_ctx * ctx, struct clip_image_f32 * im } else if (params.minicpmv_version == 5) { // MiniCPM-V 4.0 n_patches = 64; + } else if (params.minicpmv_version == 6) { + // MiniCPM-V 4.5 + n_patches = 64; } else { GGML_ABORT("Unknown minicpmv version"); } diff --git a/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py b/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py index 4dda60a2..f34d858d 100644 --- a/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py +++ b/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py @@ -607,6 +607,9 @@ else: elif minicpmv_version == 5: emb_dim = 2560 block_count = 27 + elif minicpmv_version == 6: + emb_dim = 4096 + block_count = 27 default_vision_config = { "hidden_size": 1152, @@ -630,6 +633,10 @@ elif minicpmv_version == 5: default_vision_config["model_type"] = "siglip_vision_model" vision_config = SiglipVisionConfig(**default_vision_config) model = SiglipVisionTransformer(vision_config) +elif minicpmv_version == 6: + default_vision_config["model_type"] = "siglip_vision_model" + vision_config = SiglipVisionConfig(**default_vision_config) + model = SiglipVisionTransformer(vision_config) processor = None # if model.attn_pool is not None: diff --git a/tools/mtmd/mtmd.cpp b/tools/mtmd/mtmd.cpp index a05373d5..cd022c5e 100644 --- a/tools/mtmd/mtmd.cpp +++ b/tools/mtmd/mtmd.cpp @@ -207,7 +207,7 @@ struct mtmd_context { tok_row_end_trail = false; // no trailing end-of-row token ov_img_first = true; - } else if (minicpmv_version == 3 || minicpmv_version == 4 || minicpmv_version == 5) { + } else if (minicpmv_version == 3 || minicpmv_version == 4 || minicpmv_version == 5 || minicpmv_version == 6) { // minicpmv 2.6 format: // (overview) (slice) (slice) \n ... slice_tmpl = MTMD_SLICE_TMPL_MINICPMV_2_6;