model, mtmd: fix gguf conversion for audio/vision mmproj (#21309)

* fix gguf conversion for audio/vision mmproj

* fix test
This commit is contained in:
Xuan-Son Nguyen 2026-04-02 17:10:32 +02:00 committed by GitHub
parent 223373742b
commit 63f8fe0ef4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 1462 additions and 41 deletions

View file

@ -169,6 +169,8 @@ common_peg_parser analyze_tools::build_parser(parser_build_context & ctx) const
return build_tool_parser_tag_json(ctx);
case tool_format::TAG_WITH_TAGGED:
return build_tool_parser_tag_tagged(ctx);
case tool_format::TAG_WITH_GEMMA4_DICT:
return build_tool_parser_tag_gemma4_dict(ctx);
default:
LOG_ERR("[ERROR] Template seems to support tool calls, but failed to determine tool format. Tool calling will not work properly. "
"Check for a fixed template for your model in the models/templates directory of your llama.cpp installation or "
@ -433,4 +435,113 @@ common_peg_parser analyze_tools::build_tool_parser_tag_tagged(parser_build_conte
p.end();
}
common_peg_parser analyze_tools::build_tool_parser_tag_gemma4_dict(parser_build_context & ctx) const {
auto & p = ctx.p;
const auto & inputs = ctx.inputs;
bool force_tools = inputs.tool_choice == COMMON_CHAT_TOOL_CHOICE_REQUIRED;
// The Gemma4 string quote token used in place of JSON "
static const std::string QUOTE = "<|\"|>";
common_peg_parser tool_choice = p.choice();
foreach_function(inputs.tools, [&](const json & tool) {
const auto & func = tool.at("function");
std::string name = func.at("name");
const auto & params = func.at("parameters");
if (!params.contains("properties") || !params.at("properties").is_object()) {
// No arguments - just match the function name with empty braces
auto func_parser = p.atomic(
p.tool_open(p.literal(function.name_prefix) + p.tool_name(p.literal(name)) + p.literal("{")) +
p.tool_args(p.eps()) +
p.tool_close(p.literal("}")));
tool_choice |= p.rule("tool-" + name, func_parser);
return;
}
const auto & properties = params.at("properties");
std::set<std::string> required;
if (params.contains("required") && params.at("required").is_array()) {
params.at("required").get_to(required);
}
// Build per-argument parsers, sorted alphabetically (matching template's dictsort)
struct arg_entry {
std::string param_name;
common_peg_parser parser;
};
std::vector<arg_entry> arg_entries;
for (const auto & [param_name, param_schema] : properties.items()) {
std::string type = "object";
auto type_v = param_schema.contains("type") ? param_schema.at("type") : json::object();
if (type_v.is_string()) type_v.get_to(type);
common_peg_parser value_parser = p.eps();
if (type == "string") {
// String values are delimited by <|"|>...<|"|>
value_parser =
p.literal(QUOTE) +
p.tool_arg_string_value(p.schema(p.until(QUOTE),
"tool-" + name + "-arg-" + param_name + "-schema", param_schema, true)) +
p.literal(QUOTE);
} else {
// Numbers, booleans: raw text up to the next comma or closing brace
value_parser = p.tool_arg_value(p.until_one_of({",", "}"}));
}
auto arg = p.tool_arg(
p.tool_arg_open(p.tool_arg_name(p.literal(param_name)) + p.literal(":")) +
value_parser +
p.tool_arg_close(p.eps()));
arg_entries.push_back({param_name, p.rule("tool-" + name + "-arg-" + param_name, arg)});
}
// Sort alphabetically to match Jinja's dictsort
std::sort(arg_entries.begin(), arg_entries.end(), [](const auto & a, const auto & b) {
return a.param_name < b.param_name;
});
// Build arg sequence: any arg, then zero-or-more comma-separated additional args
common_peg_parser args_seq = p.eps();
if (!arg_entries.empty()) {
common_peg_parser any_arg = p.choice();
for (auto & entry : arg_entries) {
any_arg |= entry.parser;
}
args_seq = p.optional(
any_arg + p.repeat(p.literal(",") + any_arg, 0, (int) arg_entries.size() - 1));
}
// Full parser: call:name{args}
auto func_parser = p.atomic(
p.tool_open(p.literal(function.name_prefix) + p.tool_name(p.literal(name)) + p.literal("{")) +
p.tool_args(args_seq) +
p.tool_close(p.literal("}")));
tool_choice |= p.rule("tool-" + name, func_parser);
});
// Wrap each call in <|tool_call>...</tool_call|>
auto wrapped_call = p.literal(format.per_call_start) + tool_choice + p.literal(format.per_call_end);
common_peg_parser tool_calls = p.eps();
if (inputs.parallel_tool_calls) {
tool_calls = p.trigger_rule("tool-call", wrapped_call + p.zero_or_more(p.space() + wrapped_call));
} else {
tool_calls = p.trigger_rule("tool-call", wrapped_call);
}
if (!force_tools) {
tool_calls = p.optional(tool_calls);
}
auto content_before_tools = p.until(format.per_call_start);
return ctx.reasoning_parser +
(force_tools ? p.eps() : p.optional(p.content(content_before_tools))) +
tool_calls + p.end();
}
} // namespace autoparser

View file

@ -144,6 +144,7 @@ enum class tool_format {
JSON_NATIVE, // Pure JSON: {"name": "X", "arguments": {...}}
TAG_WITH_JSON, // Tag-based with JSON args: <function=X>{...}</function>
TAG_WITH_TAGGED, // Tag-based with tagged args: <param=key>value</param>
TAG_WITH_GEMMA4_DICT, // Gemma4 custom dict: <|tool_call>call:name{key:<|"|>val<|"|>}<tool_call|>
};
inline std::ostream & operator<<(std::ostream & os, const tool_format & format) {
@ -156,6 +157,8 @@ inline std::ostream & operator<<(std::ostream & os, const tool_format & format)
return os << "TAG_WITH_JSON";
case tool_format::TAG_WITH_TAGGED:
return os << "TAG_WITH_TAGGED";
case tool_format::TAG_WITH_GEMMA4_DICT:
return os << "TAG_WITH_GEMMA4_DICT";
default:
return os << "UNKNOWN";
}
@ -350,6 +353,7 @@ struct analyze_tools : analyze_base {
common_peg_parser build_tool_parser_json_native(parser_build_context & ctx) const;
common_peg_parser build_tool_parser_tag_json(parser_build_context & ctx) const;
common_peg_parser build_tool_parser_tag_tagged(parser_build_context & ctx) const;
common_peg_parser build_tool_parser_tag_gemma4_dict(parser_build_context & ctx) const;
};
// ============================================================================

View file

@ -92,6 +92,33 @@ static std::vector<std::function<void(const common_chat_template & tmpl, autopar
LOG_DBG(ANSI_ORANGE "[Patch: Functionary 3.1]\n" ANSI_RESET);
}
},
// Gemma4 - custom dict format: <|tool_call>call:name{key:<|"|>val<|"|>}<tool_call|>
[](const common_chat_template & tmpl, autoparser & analysis) -> void {
if (tmpl.src.find("'<|tool_call>call:'") != std::string::npos) {
analysis.tools.format.mode = tool_format::TAG_WITH_GEMMA4_DICT;
analysis.tools.format.per_call_start = "<|tool_call>";
analysis.tools.format.per_call_end = "<tool_call|>";
analysis.tools.format.section_start = "";
analysis.tools.format.section_end = "";
analysis.tools.function.name_prefix = "call:";
analysis.tools.function.name_suffix = "";
analysis.tools.arguments.start = "{";
analysis.tools.arguments.end = "}";
analysis.tools.arguments.name_suffix = ":";
analysis.tools.arguments.separator = ",";
analysis.reasoning.mode = reasoning_mode::TAG_BASED;
analysis.reasoning.start = "<|channel>thought\n";
analysis.reasoning.end = "<channel|>";
analysis.preserved_tokens.clear();
analysis.preserved_tokens.push_back("<|tool_call>");
analysis.preserved_tokens.push_back("<tool_call|>");
analysis.preserved_tokens.push_back("<|tool_response>");
analysis.preserved_tokens.push_back("<tool_response|>");
analysis.preserved_tokens.push_back("<|\"|>");
analysis.preserved_tokens.push_back("<|turn>");
LOG_DBG(ANSI_ORANGE "[Patch: Gemma4]\n" ANSI_RESET);
}
},
// DeepSeek-R1-Distill-Qwen
[](const common_chat_template & tmpl, autoparser & analysis) -> void {
if (tmpl.src.find(

View file

@ -1545,6 +1545,50 @@ static void requires_non_null_content(json & messages) {
}
}
// Gemma4 uses a custom tool_responses field instead of role:tool messages.
// Convert consecutive role:tool messages into a single user message with tool_responses.
static void convert_tool_responses_gemma4(json & messages) {
json result = json::array();
size_t i = 0;
while (i < messages.size()) {
if (messages[i].contains("role") && messages[i].at("role") == "tool") {
json tool_responses = json::array();
while (i < messages.size() &&
messages[i].contains("role") &&
messages[i].at("role") == "tool") {
const auto & tool_msg = messages[i];
std::string name;
if (tool_msg.contains("tool_call_id") && tool_msg.at("tool_call_id").is_string()) {
name = tool_msg.at("tool_call_id");
} else if (tool_msg.contains("name") && tool_msg.at("name").is_string()) {
name = tool_msg.at("name");
}
json response;
if (tool_msg.contains("content")) {
const auto & content = tool_msg.at("content");
if (content.is_string()) {
// Try to parse the content as JSON; fall back to raw string
try {
response = json::parse(content.get<std::string>());
} catch (...) {
response = content;
}
} else {
response = content;
}
}
tool_responses.push_back({{"name", name}, {"response", response}});
i++;
}
result.push_back({{"role", "user"}, {"tool_responses", tool_responses}});
} else {
result.push_back(messages[i]);
i++;
}
}
messages = result;
}
static void func_args_not_string(json & messages) {
GGML_ASSERT(messages.is_array());
for (auto & message : messages) {
@ -1673,6 +1717,10 @@ static common_chat_params common_chat_templates_apply_jinja(const struct common_
workaround::func_args_not_string(params.messages);
}
if (src.find("'<|tool_call>call:'") != std::string::npos) {
workaround::convert_tool_responses_gemma4(params.messages);
}
params.add_generation_prompt = false;
std::string no_gen_prompt = common_chat_template_direct_apply(tmpl, params);
params.add_generation_prompt = true;