Skip to content

Commit a9bb2e2

Browse files
committed
add chat template
1 parent 1595b69 commit a9bb2e2

File tree

1 file changed

+16
-0
lines changed

1 file changed

+16
-0
lines changed

src/llama.cpp

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19047,6 +19047,22 @@ static int32_t llama_chat_apply_template_internal(
1904719047
if (add_ass) {
1904819048
ss << "Assistant:";
1904919049
}
19050+
} else if (tmpl == "exaone3" || (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]"))) {
19051+
// ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
19052+
// EXAONE-3.0-7.8B-Instruct
19053+
for (auto message : chat) {
19054+
std::string role(message->role);
19055+
if (role == "system") {
19056+
ss << "[|system|]" << trim(message->content) << "[|endofturn|]\n";
19057+
} else if (role == "user") {
19058+
ss << "[|user|]" << trim(message->content) << "\n";
19059+
} else if (role == "assistant") {
19060+
ss << "[|assistant|]" << trim(message->content) << "[|endofturn|]\n";
19061+
}
19062+
}
19063+
if (add_ass) {
19064+
ss << "[|assistant|]";
19065+
}
1905019066
} else {
1905119067
// template not supported
1905219068
return -1;

0 commit comments

Comments
 (0)