mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Enable logprobs on completion endpoint
This commit is contained in:
@@ -118,7 +118,6 @@ def create_completion(request: CreateCompletionRequest, llama: llama_cpp.Llama=D
|
|||||||
exclude={
|
exclude={
|
||||||
"model",
|
"model",
|
||||||
"n",
|
"n",
|
||||||
"logprobs",
|
|
||||||
"frequency_penalty",
|
"frequency_penalty",
|
||||||
"presence_penalty",
|
"presence_penalty",
|
||||||
"best_of",
|
"best_of",
|
||||||
|
|||||||
Reference in New Issue
Block a user