mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
llama_cpp server: fix to ChatCompletionRequestMessage
When I generate a client, it breaks because it fails to process the schema of ChatCompletionRequestMessage These fix that: - I think `Union[Literal["user"], Literal["channel"], ...]` is the same as Literal["user", "channel", ...] - Turns out default value `Literal["user"]` isn't JSON serializable, so replace with "user"
This commit is contained in:
@@ -58,7 +58,7 @@ class Completion(TypedDict):
|
|||||||
|
|
||||||
|
|
||||||
class ChatCompletionMessage(TypedDict):
|
class ChatCompletionMessage(TypedDict):
|
||||||
role: Union[Literal["assistant"], Literal["user"], Literal["system"]]
|
role: Literal["assistant", "user", "system"]
|
||||||
content: str
|
content: str
|
||||||
|
|
||||||
class ChatCompletionChoice(TypedDict):
|
class ChatCompletionChoice(TypedDict):
|
||||||
|
|||||||
@@ -215,8 +215,8 @@ def create_embedding(
|
|||||||
|
|
||||||
|
|
||||||
class ChatCompletionRequestMessage(BaseModel):
|
class ChatCompletionRequestMessage(BaseModel):
|
||||||
role: Union[Literal["system"], Literal["user"], Literal["assistant"]] = Field(
|
role: Literal["system", "user", "assistant"] = Field(
|
||||||
default=Literal["user"], description="The role of the message."
|
default="user", description="The role of the message."
|
||||||
)
|
)
|
||||||
content: str = Field(default="", description="The content of the message.")
|
content: str = Field(default="", description="The content of the message.")
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user