Fix typescript hints for max_tokens

This commit is contained in:
Kyle Corbitt
2023-07-21 12:04:58 -07:00
parent af9943eefc
commit 213a00a8e6
2 changed files with 8 additions and 1 deletions

View File

@@ -150,7 +150,6 @@
},
"max_tokens": {
"description": "The maximum number of [tokens](/tokenizer) to generate in the chat completion.\n\nThe total length of input tokens and generated tokens is limited by the model's context length. [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\n",
"default": "inf",
"type": "integer"
},
"presence_penalty": {