mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
low_level_api_chat_cpp.py: Fix missing antiprompt output in chat.
This commit is contained in:
@@ -382,10 +382,13 @@ n_keep = {self.params.n_keep}
|
||||
# replace end of text token with newline token when in interactive mode
|
||||
if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct):
|
||||
id = self.llama_token_newline[0]
|
||||
self.embd.append(id)
|
||||
if (self.use_antiprompt()):
|
||||
# tokenize and inject first reverse prompt
|
||||
self.embd_inp += self.first_antiprompt[0]
|
||||
|
||||
for id in self.first_antiprompt[0]:
|
||||
self.embd.append(id)
|
||||
else:
|
||||
# add it to the context
|
||||
self.embd.append(id)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user