mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Fix stripping instruction prompt
This commit is contained in:
@@ -75,7 +75,7 @@ class LLaMAInteract:
|
|||||||
|
|
||||||
# add instruction as antiprompt
|
# add instruction as antiprompt
|
||||||
if (self.instruct):
|
if (self.instruct):
|
||||||
self.first_antiprompt.append(self.inp_prefix.strip())
|
self.first_antiprompt.append(self._tokenize(self.inp_prefix.strip()))
|
||||||
|
|
||||||
# primer feed
|
# primer feed
|
||||||
if (len(primer) > 0):
|
if (len(primer) > 0):
|
||||||
|
|||||||
Reference in New Issue
Block a user