File tree Expand file tree Collapse file tree 1 file changed +16
-0
lines changed Expand file tree Collapse file tree 1 file changed +16
-0
lines changed Original file line number Diff line number Diff line change @@ -19047,6 +19047,22 @@ static int32_t llama_chat_apply_template_internal(
19047
19047
if (add_ass) {
19048
19048
ss << "Assistant:";
19049
19049
}
19050
+ } else if (tmpl == "exaone3" || (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]"))) {
19051
+ // ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
19052
+ // EXAONE-3.0-7.8B-Instruct
19053
+ for (auto message : chat) {
19054
+ std::string role(message->role);
19055
+ if (role == "system") {
19056
+ ss << "[|system|]" << trim(message->content) << "[|endofturn|]\n";
19057
+ } else if (role == "user") {
19058
+ ss << "[|user|]" << trim(message->content) << "\n";
19059
+ } else if (role == "assistant") {
19060
+ ss << "[|assistant|]" << trim(message->content) << "[|endofturn|]\n";
19061
+ }
19062
+ }
19063
+ if (add_ass) {
19064
+ ss << "[|assistant|]";
19065
+ }
19050
19066
} else {
19051
19067
// template not supported
19052
19068
return -1;
You can’t perform that action at this time.
0 commit comments