mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
tests: simple test for server module
This commit is contained in:
@@ -128,3 +128,24 @@ def test_utf8(monkeypatch):
|
||||
n = 0 # reset
|
||||
completion = llama.create_completion("", max_tokens=1)
|
||||
assert completion["choices"][0]["text"] == ""
|
||||
|
||||
|
||||
def test_llama_server():
|
||||
from fastapi.testclient import TestClient
|
||||
import os
|
||||
os.environ["MODEL"] = MODEL
|
||||
os.environ["VOCAB_ONLY"] = "true"
|
||||
from llama_cpp.server.app import app
|
||||
client = TestClient(app)
|
||||
response = client.get("/v1/models")
|
||||
assert response.json() == {
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"id": MODEL,
|
||||
"object": "model",
|
||||
"owned_by": "me",
|
||||
"permissions": [],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user