update for ollama backend

This commit is contained in:
TCUDIKEL
2025-05-11 19:10:45 +03:00
parent 123d50a08a
commit d2dfb36d6f
2 changed files with 8 additions and 5 deletions

View File

@@ -1,3 +1,4 @@
import os
import time
import threading
import asyncio
@@ -92,13 +93,15 @@ class MCPHostManager:
def _build_command(self) -> List[str]:
"""Build the command to start mcphost"""
os.environ["OLLAMA_HOST"] = settings.ollama_host
os.environ["OLLAMA_NUM_CTX"] = str(settings.ollama_num_ctx)
command = [
settings.mcphost_path,
'--config', settings.mcphost_config,
'--model', settings.mcphost_model,
'--openai-url', settings.openai_url,
'--openai-api-key', settings.openai_api_key
]
# '--openai-url', settings.openai_url,
# '--openai-api-key', settings.openai_api_key
if settings.debug:
command.insert(1, '--debug')

View File

@@ -8,7 +8,7 @@
# http://localhost:8000
curl -X 'POST' \
'http://0.0.0.0:8000/v1/chat/completions' \
'http://10.8.0.10:33759/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
@@ -16,10 +16,10 @@ curl -X 'POST' \
"messages": [
{
"role": "user",
"content": "can you give me your previous answer in JSON format? /no_think"
"content": "sum all first 10 digits of pi? /no_think"
}
],
"temperature": 0.7,
"stream": false,
"max_tokens": 1024
"max_tokens": 2058
}'