Skip to content

Commit ac86ac0

Browse files
author
Don Mahurin
committed
low_level_api_chat_cpp.py: Fix missing antiprompt output in chat.
1 parent f1dcbb4 commit ac86ac0

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

examples/low_level_api_chat_cpp.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -409,12 +409,15 @@ def generate(self):
409409
# replace end of text token with newline token when in interactive mode
410410
if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct):
411411
id = self.llama_token_newline[0]
412+
self.embd.append(id)
412413
if (self.use_antiprompt()):
413414
# tokenize and inject first reverse prompt
414415
self.embd_inp += self.first_antiprompt[0]
415-
416-
# add it to the context
417-
self.embd.append(id)
416+
for id in self.first_antiprompt[0]:
417+
self.embd.append(id)
418+
else:
419+
# add it to the context
420+
self.embd.append(id)
418421

419422
# echo this to console
420423
self.output_echo = True

0 commit comments

Comments
 (0)