From: Jie Fu (傅杰) Date: Wed, 17 Sep 2025 07:29:00 +0000 (+0800) Subject: examples : support encoder-decoder models in the simple example (#16002) X-Git-Tag: upstream/0.0.6527~34 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=1cbd80f8cf80a817715b1ccc5680fe2a3c5172c8;p=pkg%2Fggml%2Fsources%2Fllama.cpp examples : support encoder-decoder models in the simple example (#16002) Signed-off-by: Jie Fu --- diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 633b87e5..d09771d1 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -145,6 +145,20 @@ int main(int argc, char ** argv) { llama_batch batch = llama_batch_get_one(prompt_tokens.data(), prompt_tokens.size()); + if (llama_model_has_encoder(model)) { + if (llama_encode(ctx, batch)) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return 1; + } + + llama_token decoder_start_token_id = llama_model_decoder_start_token(model); + if (decoder_start_token_id == LLAMA_TOKEN_NULL) { + decoder_start_token_id = llama_vocab_bos(vocab); + } + + batch = llama_batch_get_one(&decoder_start_token_id, 1); + } + // main loop const auto t_main_start = ggml_time_us();