Skip to content

Commit 73fc9d8

Browse files
committed
token healing : handle more special tokens
Infill tokens were being rolled back in certain cases.
1 parent 2442cae commit 73fc9d8

File tree

1 file changed

+10
-1
lines changed

1 file changed

+10
-1
lines changed

common/sampling.cpp

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,16 @@ static llama_token_healing_output llama_token_healing_get_prefix(
6767

6868
const llama_model * model = llama_get_model(ctx_main);
6969
auto is_special_token = [&](const llama_token token_id) {
70-
return llama_token_is_control(model, token_id) || llama_token_is_eog(model, token_id);
70+
return llama_token_is_control(model, token_id)
71+
|| llama_token_bos (model) == token_id
72+
|| llama_token_eos (model) == token_id
73+
|| llama_token_cls (model) == token_id
74+
|| llama_token_sep (model) == token_id
75+
|| llama_token_pad (model) == token_id
76+
|| llama_token_prefix (model) == token_id
77+
|| llama_token_middle (model) == token_id
78+
|| llama_token_suffix (model) == token_id
79+
|| llama_token_eot (model) == token_id;
7180
};
7281

7382
if (th_type == llama_token_healing_type::DYNAMIC_ONCE || th_type == llama_token_healing_type::DYNAMIC_MULTI) {

0 commit comments

Comments
 (0)