From d2dfb36d6f7df77d8d41cf08dbd23ef8cf5d6b5e Mon Sep 17 00:00:00 2001 From: TCUDIKEL Date: Sun, 11 May 2025 19:10:45 +0300 Subject: [PATCH] update for ollama backend --- commons/mcp_manager.py | 7 +++++-- test.sh | 6 +++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/commons/mcp_manager.py b/commons/mcp_manager.py index 27ebcd4..e8fd3b0 100644 --- a/commons/mcp_manager.py +++ b/commons/mcp_manager.py @@ -1,3 +1,4 @@ +import os import time import threading import asyncio @@ -92,13 +93,15 @@ class MCPHostManager: def _build_command(self) -> List[str]: """Build the command to start mcphost""" + os.environ["OLLAMA_HOST"] = settings.ollama_host + os.environ["OLLAMA_NUM_CTX"] = str(settings.ollama_num_ctx) command = [ settings.mcphost_path, '--config', settings.mcphost_config, '--model', settings.mcphost_model, - '--openai-url', settings.openai_url, - '--openai-api-key', settings.openai_api_key ] + # '--openai-url', settings.openai_url, + # '--openai-api-key', settings.openai_api_key if settings.debug: command.insert(1, '--debug') diff --git a/test.sh b/test.sh index c6a9f96..4bb6b7f 100755 --- a/test.sh +++ b/test.sh @@ -8,7 +8,7 @@ # http://localhost:8000 curl -X 'POST' \ - 'http://0.0.0.0:8000/v1/chat/completions' \ + 'http://10.8.0.10:33759/v1/chat/completions' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ @@ -16,10 +16,10 @@ curl -X 'POST' \ "messages": [ { "role": "user", - "content": "can you give me your previous answer in JSON format? /no_think" + "content": "sum all first 10 digits of pi? /no_think" } ], "temperature": 0.7, "stream": false, - "max_tokens": 1024 + "max_tokens": 2058 }'