]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama-chat : Do not throw when tool parsing fails (#14012)
authorPiotr <redacted>
Sat, 14 Jun 2025 16:25:15 +0000 (18:25 +0200)
committerGitHub <redacted>
Sat, 14 Jun 2025 16:25:15 +0000 (17:25 +0100)
Currently when a model generates output which looks like a tool call,
but is invalid an exception is thrown and not handled, causing the cli
or llama-server to bail. Instead, handle the chat parser exception and
simply return the generated text in such cases.

Signed-off-by: Piotr Stankiewicz <redacted>
common/chat-parser.cpp
common/chat-parser.h
common/chat.cpp

index 65b664cb37da44d28fba63d6e39dccbf734f4613..18a30e49aa578f89d8fd0f22a17dc38fd8764b50 100644 (file)
@@ -49,6 +49,7 @@ bool common_chat_msg_parser::add_tool_call(const std::string & name, const std::
 
     // LOG_DBG("Tool call arguments:\n\traw: %s\n\tresult: %s\n", arguments.c_str(), tool_call.arguments.c_str());
     result_.tool_calls.emplace_back(tool_call);
+
     return true;
 }
 bool common_chat_msg_parser::add_tool_call(const json & tool_call) {
@@ -378,3 +379,7 @@ std::optional<common_chat_msg_parser::consume_json_result> common_chat_msg_parse
         /* .is_partial = */ found_healing_marker,
     };
 }
+
+void common_chat_msg_parser::clear_tools() {
+    result_.tool_calls.clear();
+}
index 7ee355056b30a910fc825314aaeaf4def1263846..0e64c341a50aae93f3468ffcc69839775daeb399 100644 (file)
@@ -115,4 +115,6 @@ class common_chat_msg_parser {
         const std::vector<std::vector<std::string>> & args_paths = {},
         const std::vector<std::vector<std::string>> & content_paths = {}
     );
+
+    void clear_tools();
 };
index 1d6974a8c563bc245df746c4490d6ab18c94e62f..0dad14fba9ba595bfff352b23a6a973305cc16e3 100644 (file)
@@ -1921,7 +1921,9 @@ common_chat_msg common_chat_parse(const std::string & input, bool is_partial, co
     } catch (const common_chat_msg_partial_exception & ex) {
         LOG_DBG("Partial parse: %s\n", ex.what());
         if (!is_partial) {
-            throw std::runtime_error(ex.what());
+            builder.clear_tools();
+            builder.move_to(0);
+            common_chat_parse_content_only(builder);
         }
     }
     auto msg = builder.result();