]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
common : replace wrap_for_generation with a prefix convenience function and fix gpt...
authorAldehir Rojas <redacted>
Tue, 24 Mar 2026 03:21:47 +0000 (22:21 -0500)
committerGitHub <redacted>
Tue, 24 Mar 2026 03:21:47 +0000 (22:21 -0500)
common/chat-auto-parser-generator.cpp
common/chat-auto-parser-helpers.cpp
common/chat-auto-parser-helpers.h
common/chat-peg-parser.cpp
common/chat-peg-parser.h
common/chat.cpp

index aa03aea5a916d8874bc75f694191cfb7a4f670e9..bf44091d6781d773e123321e4631a035305dfbd7 100644 (file)
@@ -112,8 +112,7 @@ common_peg_arena autoparser::build_parser(const generation_params & inputs) cons
         } else {
             parser = content.build_parser(ctx);
         }
-        parser = wrap_for_generation_prompt(p, parser, inputs, reasoning.start);
-        return parser;
+        return p.prefix(inputs.generation_prompt, reasoning.start) + parser;
     });
 }
 
index 3a7a5c13a7053225d1ab3df660dbc8920f4af6be..2499464cd82ffdb167b6291e8549438435b48074 100644 (file)
@@ -308,22 +308,6 @@ std::vector<segment> prune_whitespace_segments(const std::vector<segment> & segm
     return result;
 }
 
-common_peg_parser wrap_for_generation_prompt(common_chat_peg_builder &             p,
-                                             const common_peg_parser &             prs,
-                                             const autoparser::generation_params & inputs,
-                                             const std::string &                   reasoning_start) {
-    auto parser = prs;
-    if (!inputs.generation_prompt.empty()) {
-        size_t end_pos = inputs.generation_prompt.size();
-        if (!reasoning_start.empty() && inputs.generation_prompt.find(reasoning_start) != std::string::npos) {
-            end_pos = inputs.generation_prompt.find(reasoning_start);
-        }
-        std::string cut_genprompt = inputs.generation_prompt.substr(0, end_pos);
-        parser                    = p.literal(cut_genprompt) + parser;
-    }
-    return parser;
-}
-
 namespace autoparser {
 
 std::string apply_template(const common_chat_template & tmpl, const template_params & params) {
index e13581e58fc3a9a43cc3338bc7af769aed8614a9..7cd031c4d6622108f9dd3bfd4133bc317272edf2 100644 (file)
@@ -58,11 +58,6 @@ std::vector<segment> segmentize_markers(const std::string & text);
 //                                   (MARKER, "</function>"), (MARKER, "</tool_call>") ]
 std::vector<segment> prune_whitespace_segments(const std::vector<segment> & segments);
 
-// Wrap parser with generation prompt parser
-common_peg_parser wrap_for_generation_prompt(common_chat_peg_builder &             p,
-                                             const common_peg_parser &             prs,
-                                             const autoparser::generation_params & inputs,
-                                             const std::string &                   reasoning_start = {});
 namespace autoparser {
 
 // Apply a template with the given parameters, returning the rendered string (empty on failure)
index 5f7d422b4157842a04ccd0acf24fe6dd5084bd59..07b487e15cc9f867996b1a7243fa20e2ad871710 100644 (file)
@@ -802,6 +802,16 @@ common_peg_parser common_chat_peg_builder::build_json_tools_flat_keys(
     return tool_choices;
 }
 
+common_peg_parser common_chat_peg_builder::prefix(const std::string & s, const std::string & delimiter) {
+    if (s.empty()) {
+        return eps();
+    }
+    if (delimiter.empty()) {
+        return literal(s);
+    }
+    return literal(s.substr(0, s.rfind(delimiter)));
+}
+
 common_peg_parser common_chat_peg_builder::standard_json_tools(
                                                        const std::string &              section_start,
                                                        const std::string &              section_end,
index a497508d2f254aee7d319772238b230959605969..62402923c566e1f3e793720b090ddaed60764295 100644 (file)
@@ -82,6 +82,10 @@ class common_chat_peg_builder : public common_peg_parser_builder {
     common_peg_parser tool_arg_string_value(const common_peg_parser & p) { return tag(TOOL_ARG_STRING_VALUE, p); }
     common_peg_parser tool_arg_json_value(const common_peg_parser & p) { return atomic(tag(TOOL_ARG_VALUE, p)); }
 
+
+    // Return a parser that parses the prefix of a string, up to a given delimiter.
+    common_peg_parser prefix(const std::string & s, const std::string & delimiter = {});
+
     // Legacy-compatible helper for building standard JSON tool calls
     // Used by tests and manual parsers
     // name_key/args_key: JSON key names for function name and arguments
index a79d564b34f7b9c632aafca6b3b688ad340bd30a..078b44ee1b4039777d6f3798f2554cb4ea844754 100644 (file)
@@ -872,14 +872,14 @@ static common_chat_params common_chat_params_init_ministral_3(const common_chat_
     };
 
     auto parser = build_chat_peg_parser([&](common_chat_peg_builder & p) {
+        auto generation_prompt = p.prefix(inputs.generation_prompt, "[THINK]");
         auto reasoning =
             extract_reasoning ? p.optional("[THINK]" + p.reasoning(p.until("[/THINK]")) + "[/THINK]") : p.eps();
 
         // Response format parser
         if (inputs.json_schema.is_object() && !inputs.json_schema.empty()) {
             // Ministral wants to emit json surrounded by code fences
-            return wrap_for_generation_prompt(p, reasoning << "```json" << p.content(p.schema(p.json(), "response-format", inputs.json_schema)) << "```",
-                inputs, "[THINK]");
+            return generation_prompt + (reasoning << "```json" << p.content(p.schema(p.json(), "response-format", inputs.json_schema)) << "```");
         }
 
         // Tool call parser
@@ -899,13 +899,12 @@ static common_chat_params common_chat_params_init_ministral_3(const common_chat_
             auto max_calls  = inputs.parallel_tool_calls ? -1 : 1;
             auto tool_calls = p.trigger_rule("tool-call", p.repeat("[TOOL_CALLS]" + tool_choice, min_calls, max_calls));
 
-            return wrap_for_generation_prompt(p, reasoning << p.content(p.until("[TOOL_CALLS]")) << tool_calls,
-                inputs, "[THINK]");
+            return generation_prompt + (reasoning << p.content(p.until("[TOOL_CALLS]")) << tool_calls);
         }
 
         // Content only parser
         include_grammar = false;
-        return wrap_for_generation_prompt(p, reasoning << p.content(p.rest()), inputs, "[THINK]");
+        return generation_prompt + (reasoning << p.content(p.rest()));
     });
 
     data.parser = parser.save();
@@ -991,8 +990,7 @@ static common_chat_params common_chat_params_init_gpt_oss(const common_chat_temp
                 p.literal("<|channel|>final") + constraint + p.literal("<|message|>") +
                 p.content(p.schema(p.json(), "response-format-schema", inputs.json_schema)));
 
-            return wrap_for_generation_prompt(p, response_format | (analysis + p.zero_or_more(start + analysis) + start + response_format),
-                inputs, "<|channel|>");
+            return p.zero_or_more(start + analysis) + start + response_format;
         }
 
         if (has_tools && inputs.tool_choice != COMMON_CHAT_TOOL_CHOICE_NONE) {
@@ -1021,15 +1019,13 @@ static common_chat_params common_chat_params_init_gpt_oss(const common_chat_temp
             auto tool_call  = p.trigger_rule("tool-call", tool_choice);
 
             if (inputs.tool_choice == COMMON_CHAT_TOOL_CHOICE_REQUIRED) {
-                return tool_call | ( any + p.zero_or_more(start + any) + start + tool_call);
+                return p.zero_or_more(start + any) + start + tool_call;
             }
 
-            return wrap_for_generation_prompt(p, tool_call | final_msg | (any + p.zero_or_more(start + any) + start + (tool_call | final_msg)),
-                inputs, "<|channel|>");
+            return p.zero_or_more(start + any) + start + (tool_call | final_msg);
         }
 
-        return wrap_for_generation_prompt(p, final_msg | (any + p.zero_or_more(start + any) + start + final_msg),
-            inputs, "<|channel|>");
+        return p.zero_or_more(start + any) + start + final_msg;
     });
 
     data.parser = parser.save();
@@ -1080,11 +1076,12 @@ static common_chat_params common_chat_params_init_functionary_v3_2(const common_
         // When no tools, content goes until end
         auto content_until_tool = p.literal("all\n") + p.content(p.until(">>>"));
         auto content_until_end  = p.literal("all\n") + p.content(p.rest());
+        auto generation_prompt  = p.literal(inputs.generation_prompt);
 
         // If no tools or tool_choice is NONE, just parse content
         if (!has_tools || inputs.tool_choice == COMMON_CHAT_TOOL_CHOICE_NONE) {
             // When no tools, just match the prefix and capture everything after
-            return wrap_for_generation_prompt(p, content_until_end + p.end(), inputs);
+            return generation_prompt + content_until_end + p.end();
         }
 
         // Build tool call parsers for each available function
@@ -1120,7 +1117,7 @@ static common_chat_params common_chat_params_init_functionary_v3_2(const common_
             auto content_and_tool = content_until_tool + tool_choice;
             ret = p.choice({ content_and_tool, content_only, tool_choice }) + p.end();
         }
-        return wrap_for_generation_prompt(p, ret, inputs);
+        return generation_prompt + ret;
     });
 
     data.parser = parser.save();
@@ -1201,12 +1198,12 @@ static common_chat_params common_chat_params_init_kimi_k2(const common_chat_temp
         auto reasoning = extract_reasoning ? p.optional(THINK_START + p.reasoning(
             p.until_one_of({ THINK_END, "<|tool_calls_section_begin|>", "<|tool_call_begin|>" })) +
             p.optional(p.literal(THINK_END))) : p.eps();
+        auto generation_prompt = p.prefix(inputs.generation_prompt, THINK_START);
 
 
         // Content only parser (no tools)
         if (!has_tools || inputs.tool_choice == COMMON_CHAT_TOOL_CHOICE_NONE) {
-            return wrap_for_generation_prompt(p, reasoning + p.content(p.rest()) + end,
-                inputs, THINK_START);
+            return generation_prompt + reasoning + p.content(p.rest()) + end;
         }
 
         // Build tool call parsers for each available function
@@ -1242,8 +1239,7 @@ static common_chat_params common_chat_params_init_kimi_k2(const common_chat_temp
 
         auto content_before_tools = p.content(p.until_one_of({ SECTION_BEGIN, CALL_BEGIN }));
 
-        return wrap_for_generation_prompt(p, reasoning + content_before_tools + tool_calls + end,
-            inputs, THINK_START);
+        return generation_prompt + reasoning + content_before_tools + tool_calls + end;
     });
 
     data.parser = parser.save();
@@ -1301,6 +1297,7 @@ static common_chat_params common_chat_params_init_lfm2(const common_chat_templat
     data.thinking_end_tag   = THINK_END;
 
     auto parser = build_chat_peg_parser([&](common_chat_peg_builder & p) {
+        auto generation_prompt = p.prefix(inputs.generation_prompt, THINK_START);
         auto end = p.end();
 
         auto reasoning = p.eps();
@@ -1309,8 +1306,7 @@ static common_chat_params common_chat_params_init_lfm2(const common_chat_templat
         }
 
         if (!has_tools || inputs.tool_choice == COMMON_CHAT_TOOL_CHOICE_NONE) {
-            return wrap_for_generation_prompt(p, reasoning + p.content(p.rest()) + end, inputs,
-                THINK_START);
+            return generation_prompt + reasoning + p.content(p.rest()) + end;
         }
 
         auto tool_calls = p.rule("tool-calls",
@@ -1322,8 +1318,7 @@ static common_chat_params common_chat_params_init_lfm2(const common_chat_templat
 
         auto content = p.content(p.until(TOOL_CALL_START));
 
-        return wrap_for_generation_prompt(p, reasoning + content + tool_calls + end, inputs,
-            THINK_START);
+        return generation_prompt + reasoning + content + tool_calls + end;
     });
 
     data.parser = parser.save();
@@ -1396,7 +1391,7 @@ static common_chat_params common_chat_params_init_gigachat_v3(
             ret = p.content(p.rest());
         }
 
-        return wrap_for_generation_prompt(p, ret, inputs);
+        return p.literal(inputs.generation_prompt) + ret;
     });
 
     data.parser = parser.save();
@@ -1621,7 +1616,7 @@ static common_chat_params common_chat_templates_apply_jinja(const struct common_
         data.format                    = COMMON_CHAT_FORMAT_PEG_NATIVE;
         data.generation_prompt         = params.generation_prompt;
         auto parser                    = build_chat_peg_parser([&params](common_chat_peg_builder &p) {
-            return wrap_for_generation_prompt(p, p.content(p.rest()), params);
+            return p.prefix(params.generation_prompt) + p.content(p.rest());
         });
         data.parser                    = parser.save();
         return data;