From 02f9fb82fbfe8b33dba3f39a81625837dca34e02 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Sat, 15 Apr 2023 11:39:52 -0400 Subject: [PATCH] Bugfix --- llama_cpp/llama.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 54a2f4a..e570236 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -695,10 +695,7 @@ class Llama: Returns: Generated chat completion or a stream of chat completion chunks. """ - stop = stop if not None else [] - instructions = """Complete the following chat conversation between the user and the assistant. System messages should be strictly followed as additional instructions.""" - chat_history = "\n".join( - f'{message["role"]} {message.get("user", "")}: {message["content"]}' + stop = stop if stop is not None else [] for message in messages ) PROMPT = f" \n\n### Instructions:{instructions}\n\n### Inputs:{chat_history}\n\n### Response:\nassistant: "