From 399710b8cc55421322cc3ff0408c001ecb8c7810 Mon Sep 17 00:00:00 2001 From: Alihan Date: Sat, 10 May 2025 22:55:57 +0300 Subject: [PATCH 1/2] fixed bugs on settings --- .gitignore | 3 ++- commons/logging_utils.py | 4 ++-- commons/mcp_manager.py | 4 ++-- serve_mcphost_openai_compatible.py | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index bbba6c2..8db3fe7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +supervisor .venv venv config.json @@ -5,4 +6,4 @@ bins/mcphost mcphost_openai_api.log.* settings.py .idea -__pycache__ \ No newline at end of file +__pycache__ diff --git a/commons/logging_utils.py b/commons/logging_utils.py index a4ccb5e..b36f51a 100644 --- a/commons/logging_utils.py +++ b/commons/logging_utils.py @@ -3,7 +3,7 @@ import time from functools import wraps from loguru import logger -from settings import settings +from commons.settings import settings def log_performance(func): @@ -40,4 +40,4 @@ def setup_logger(): diagnose=True ) - return logger \ No newline at end of file + return logger diff --git a/commons/mcp_manager.py b/commons/mcp_manager.py index e99554c..84eefda 100644 --- a/commons/mcp_manager.py +++ b/commons/mcp_manager.py @@ -5,7 +5,7 @@ from typing import List, Optional import pexpect from loguru import logger -from settings import settings +from commons.settings import settings from commons.response_cleaners import clean_response from commons.logging_utils import log_performance @@ -181,4 +181,4 @@ class MCPHostManager: break logger.debug("Collected response length: {} characters", len(response)) - return response \ No newline at end of file + return response diff --git a/serve_mcphost_openai_compatible.py b/serve_mcphost_openai_compatible.py index f8bb7b7..f4babf2 100644 --- a/serve_mcphost_openai_compatible.py +++ b/serve_mcphost_openai_compatible.py @@ -11,7 +11,7 @@ from commons.mcp_manager import MCPHostManager from commons.logging_utils import setup_logger from commons.openai_models import ChatMessage, ChatCompletionRequest, AVAILABLE_MODELS from commons.openai_utils import generate_id, stream_response -from settings import settings +from commons.settings import settings # Setup logger logger = setup_logger() @@ -127,4 +127,4 @@ async def health_check(): if __name__ == "__main__": import uvicorn - uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file + uvicorn.run(app, host=settings.host, port=settings.port) From eeae82619b792eda2c5502ff47b04f6031931cf8 Mon Sep 17 00:00:00 2001 From: Alihan Date: Sat, 10 May 2025 23:45:28 +0300 Subject: [PATCH 2/2] . --- .gitignore | 1 + logs.sh | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 logs.sh diff --git a/.gitignore b/.gitignore index 8db3fe7..f8d88a4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +supervisor.conf supervisor .venv venv diff --git a/logs.sh b/logs.sh new file mode 100644 index 0000000..cbf96a6 --- /dev/null +++ b/logs.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +tail -f supervisor/logs/llm-api-mcphost.log