mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Remove try catch from chat
This commit is contained in:
@@ -356,10 +356,7 @@ n_keep = {self.params.n_keep}
|
|||||||
def output(self):
|
def output(self):
|
||||||
self.remaining_tokens = self.params.n_predict
|
self.remaining_tokens = self.params.n_predict
|
||||||
for id in self.generate():
|
for id in self.generate():
|
||||||
try:
|
|
||||||
yield llama_cpp.llama_token_to_str(self.ctx, id).decode("utf-8", errors="ignore")
|
yield llama_cpp.llama_token_to_str(self.ctx, id).decode("utf-8", errors="ignore")
|
||||||
except UnicodeDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# read user input
|
# read user input
|
||||||
def read_input(self):
|
def read_input(self):
|
||||||
|
|||||||
Reference in New Issue
Block a user