From 83b2be6dc4e88154a72f221420823702bae6a1bc Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Sat, 15 Apr 2023 11:58:43 -0400 Subject: [PATCH] Update chat parameters --- llama_cpp/llama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 578dcb6..63c7b53 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -672,12 +672,12 @@ class Llama: def create_chat_completion( self, messages: List[ChatCompletionMessage], - temperature: float = 0.8, + temperature: float = 0.2, top_p: float = 0.95, top_k: int = 40, stream: bool = False, stop: Optional[List[str]] = [], - max_tokens: int = 128, + max_tokens: int = 256, repeat_penalty: float = 1.1, ) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]: """Generate a chat completion from a list of messages.