From b9098b0ef7309b63ebff99cdfadf641223c15025 Mon Sep 17 00:00:00 2001 From: Lucas Doyle Date: Tue, 2 May 2023 14:08:51 -0700 Subject: [PATCH] llama_cpp server: prompt is a string Not sure why this union type was here but taking a look at llama.py, prompt is only ever processed as a string for completion This was breaking types when generating an openapi client --- llama_cpp/server/app.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/llama_cpp/server/app.py b/llama_cpp/server/app.py index ef8aa4e..595476f 100644 --- a/llama_cpp/server/app.py +++ b/llama_cpp/server/app.py @@ -126,7 +126,7 @@ repeat_penalty_field = Field( ) class CreateCompletionRequest(BaseModel): - prompt: Union[str, List[str]] = Field( + prompt: Optional[str] = Field( default="", description="The prompt to generate completions for." ) @@ -175,9 +175,6 @@ CreateCompletionResponse = create_model_from_typeddict(llama_cpp.Completion) def create_completion( request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama) ): - if isinstance(request.prompt, list): - request.prompt = "".join(request.prompt) - completion_or_chunks = llama( **request.dict( exclude={