mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Bugfix for models endpoint
This commit is contained in:
@@ -357,7 +357,9 @@ GetModelResponse = create_model_from_typeddict(ModelList)
|
|||||||
|
|
||||||
|
|
||||||
@router.get("/v1/models", response_model=GetModelResponse)
|
@router.get("/v1/models", response_model=GetModelResponse)
|
||||||
def get_models() -> ModelList:
|
def get_models(
|
||||||
|
llama: llama_cpp.Llama = Depends(get_llama),
|
||||||
|
) -> ModelList:
|
||||||
return {
|
return {
|
||||||
"object": "list",
|
"object": "list",
|
||||||
"data": [
|
"data": [
|
||||||
|
|||||||
Reference in New Issue
Block a user