mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Add openai frequency and presence penalty parameters. Closes #169
This commit is contained in:
@@ -214,8 +214,6 @@ def create_completion(
|
||||
exclude={
|
||||
"model",
|
||||
"n",
|
||||
"frequency_penalty",
|
||||
"presence_penalty",
|
||||
"best_of",
|
||||
"logit_bias",
|
||||
"user",
|
||||
@@ -315,8 +313,6 @@ def create_chat_completion(
|
||||
exclude={
|
||||
"model",
|
||||
"n",
|
||||
"presence_penalty",
|
||||
"frequency_penalty",
|
||||
"logit_bias",
|
||||
"user",
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user