mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Fix decode errors permanently
This commit is contained in:
@@ -70,7 +70,7 @@ while remaining_tokens > 0:
|
||||
if not input_noecho:
|
||||
for id in embd:
|
||||
print(
|
||||
llama_cpp.llama_token_to_str(ctx, id).decode("utf-8"),
|
||||
llama_cpp.llama_token_to_str(ctx, id).decode("utf-8", errors="ignore"),
|
||||
end="",
|
||||
flush=True,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user