@@ -50,8 +50,8 @@ static const std::map<std::string, llm_chat_template> LLM_CHAT_TEMPLATES = {
50
50
{ " deepseek3" , LLM_CHAT_TEMPLATE_DEEPSEEK_3 },
51
51
{ " command-r" , LLM_CHAT_TEMPLATE_COMMAND_R },
52
52
{ " llama3" , LLM_CHAT_TEMPLATE_LLAMA_3 },
53
- { " chatglm3" , LLM_CHAT_TEMPLATE_CHATGML_3 },
54
- { " chatglm4" , LLM_CHAT_TEMPLATE_CHATGML_4 },
53
+ { " chatglm3" , LLM_CHAT_TEMPLATE_CHATGLM_3 },
54
+ { " chatglm4" , LLM_CHAT_TEMPLATE_CHATGLM_4 },
55
55
{ " glmedge" , LLM_CHAT_TEMPLATE_GLMEDGE },
56
56
{ " minicpm" , LLM_CHAT_TEMPLATE_MINICPM },
57
57
{ " exaone3" , LLM_CHAT_TEMPLATE_EXAONE_3 },
@@ -123,7 +123,7 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
123
123
} else if (tmpl_contains (" <|assistant|>" ) && tmpl_contains (" <|end|>" )) {
124
124
return LLM_CHAT_TEMPLATE_PHI_3;
125
125
} else if (tmpl_contains (" [gMASK]<sop>" )) {
126
- return LLM_CHAT_TEMPLATE_CHATGML_4 ;
126
+ return LLM_CHAT_TEMPLATE_CHATGLM_4 ;
127
127
} else if (tmpl_contains (" <|assistant|>" ) && tmpl_contains (" <|user|>" )) {
128
128
return tmpl_contains (" </s>" ) ? LLM_CHAT_TEMPLATE_FALCON_3 : LLM_CHAT_TEMPLATE_GLMEDGE;
129
129
} else if (tmpl_contains (" <|{{ item['role'] }}|>" ) && tmpl_contains (" <|begin_of_image|>" )) {
@@ -156,7 +156,7 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
156
156
return LLM_CHAT_TEMPLATE_LLAMA_3;
157
157
} else if (tmpl_contains (" [gMASK]sop" )) {
158
158
// chatglm3-6b
159
- return LLM_CHAT_TEMPLATE_CHATGML_3 ;
159
+ return LLM_CHAT_TEMPLATE_CHATGLM_3 ;
160
160
} else if (tmpl_contains (LU8 (" <用户>" ))) {
161
161
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
162
162
return LLM_CHAT_TEMPLATE_MINICPM;
@@ -437,7 +437,7 @@ int32_t llm_chat_apply_template(
437
437
if (add_ass) {
438
438
ss << " <|start_header_id|>assistant<|end_header_id|>\n\n " ;
439
439
}
440
- } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_3 ) {
440
+ } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_3 ) {
441
441
// chatglm3-6b
442
442
ss << " [gMASK]" << " sop" ;
443
443
for (auto message : chat) {
@@ -447,7 +447,7 @@ int32_t llm_chat_apply_template(
447
447
if (add_ass) {
448
448
ss << " <|assistant|>" ;
449
449
}
450
- } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_4 ) {
450
+ } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_4 ) {
451
451
ss << " [gMASK]" << " <sop>" ;
452
452
for (auto message : chat) {
453
453
std::string role (message->role );
0 commit comments