first commit
This commit is contained in:
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
.venv
|
||||
venv
|
||||
config.json
|
||||
mcphost
|
||||
mcphost_openai_api.log.*
|
||||
helpers/settings.py
|
||||
.idea
|
||||
__pycache__
|
||||
31
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
31
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,31 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="18">
|
||||
<item index="0" class="java.lang.String" itemvalue="pandas" />
|
||||
<item index="1" class="java.lang.String" itemvalue="tiktoken" />
|
||||
<item index="2" class="java.lang.String" itemvalue="gensim" />
|
||||
<item index="3" class="java.lang.String" itemvalue="pydantic" />
|
||||
<item index="4" class="java.lang.String" itemvalue="configparser" />
|
||||
<item index="5" class="java.lang.String" itemvalue="future" />
|
||||
<item index="6" class="java.lang.String" itemvalue="tenacity" />
|
||||
<item index="7" class="java.lang.String" itemvalue="python-dotenv" />
|
||||
<item index="8" class="java.lang.String" itemvalue="aiohttp" />
|
||||
<item index="9" class="java.lang.String" itemvalue="pipmaster" />
|
||||
<item index="10" class="java.lang.String" itemvalue="setuptools" />
|
||||
<item index="11" class="java.lang.String" itemvalue="xlsxwriter" />
|
||||
<item index="12" class="java.lang.String" itemvalue="numpy" />
|
||||
<item index="13" class="java.lang.String" itemvalue="scipy" />
|
||||
<item index="14" class="java.lang.String" itemvalue="filelock" />
|
||||
<item index="15" class="java.lang.String" itemvalue="pycryptodomex" />
|
||||
<item index="16" class="java.lang.String" itemvalue="lxml" />
|
||||
<item index="17" class="java.lang.String" itemvalue="blobfile" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</component>
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
10
.idea/mcphost-api.iml
generated
Normal file
10
.idea/mcphost-api.iml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.12 (mcphost-api)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/mcphost-api.iml" filepath="$PROJECT_DIR$/.idea/mcphost-api.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
102
.idea/workspace.xml
generated
Normal file
102
.idea/workspace.xml
generated
Normal file
@@ -0,0 +1,102 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="AutoImportSettings">
|
||||
<option name="autoReloadType" value="SELECTIVE" />
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="bbbc2257-1e9d-40b2-a56b-3daf4fc0a636" name="Changes" comment="">
|
||||
<change afterPath="$PROJECT_DIR$/.idea/inspectionProfiles/Project_Default.xml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/.idea/inspectionProfiles/profiles_settings.xml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/.idea/mcphost-api.iml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/.idea/modules.xml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/.idea/vcs.xml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/helpers/__init__.py" afterDir="false" />
|
||||
<change afterPath="$PROJECT_DIR$/test.sh" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileTemplateManagerImpl">
|
||||
<option name="RECENT_TEMPLATES">
|
||||
<list>
|
||||
<option value="Python Script" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="Git.Settings">
|
||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
||||
</component>
|
||||
<component name="ProjectColorInfo"><![CDATA[{
|
||||
"associatedIndex": 5
|
||||
}]]></component>
|
||||
<component name="ProjectId" id="2wuM0HE6IiWx1ypaydX36jB6Bh3" />
|
||||
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
|
||||
<component name="ProjectViewState">
|
||||
<option name="hideEmptyMiddlePackages" value="true" />
|
||||
<option name="showLibraryContents" value="true" />
|
||||
</component>
|
||||
<component name="PropertiesComponent"><![CDATA[{
|
||||
"keyToString": {
|
||||
"Python.serve_openai_compatible.executor": "Debug",
|
||||
"RunOnceActivity.ShowReadmeOnStart": "true",
|
||||
"RunOnceActivity.git.unshallow": "true",
|
||||
"git-widget-placeholder": "main"
|
||||
}
|
||||
}]]></component>
|
||||
<component name="RecentsManager">
|
||||
<key name="MoveFile.RECENT_KEYS">
|
||||
<recent name="$PROJECT_DIR$/helpers" />
|
||||
<recent name="$PROJECT_DIR$/utils" />
|
||||
</key>
|
||||
</component>
|
||||
<component name="RunManager">
|
||||
<configuration name="serve_openai_compatible" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="mcphost-api" />
|
||||
<option name="ENV_FILES" value="" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/serve_openai_compatible.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<recent_temporary>
|
||||
<list>
|
||||
<item itemvalue="Python.serve_openai_compatible" />
|
||||
</list>
|
||||
</recent_temporary>
|
||||
</component>
|
||||
<component name="SharedIndexes">
|
||||
<attachedChunks>
|
||||
<set>
|
||||
<option value="bundled-python-sdk-4f4e415b4190-aa17d162503b-com.jetbrains.pycharm.community.sharedIndexes.bundled-PC-243.26053.29" />
|
||||
</set>
|
||||
</attachedChunks>
|
||||
</component>
|
||||
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="bbbc2257-1e9d-40b2-a56b-3daf4fc0a636" name="Changes" comment="" />
|
||||
<created>1746886781301</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1746886781301</updated>
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
</project>
|
||||
0
helpers/__init__.py
Normal file
0
helpers/__init__.py
Normal file
15
helpers/local_logger.py
Normal file
15
helpers/local_logger.py
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
|
||||
|
||||
class LoggerWriter:
|
||||
def __init__(self, logger_func):
|
||||
self.logger_func = logger_func
|
||||
|
||||
def write(self, message):
|
||||
# Remove trailing newlines for cleaner logs
|
||||
if message.strip():
|
||||
self.logger_func(message.rstrip())
|
||||
|
||||
def flush(self):
|
||||
# Required for file-like objects
|
||||
pass
|
||||
15
helpers/models.py
Normal file
15
helpers/models.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
# OpenAI-compatible models
|
||||
class ChatMessage(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
|
||||
class ChatCompletionRequest(BaseModel):
|
||||
model: str = "mcphost-model"
|
||||
messages: List[ChatMessage]
|
||||
max_tokens: Optional[int] = 512
|
||||
temperature: Optional[float] = 0.1
|
||||
stream: Optional[bool] = False
|
||||
548
mcphost_openai_api.log
Normal file
548
mcphost_openai_api.log
Normal file
@@ -0,0 +1,548 @@
|
||||
2025-05-10 17:10:23.482 | INFO | __main__:<module>:378 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:10:23.482 | INFO | __main__:<module>:379 - Configuration:
|
||||
2025-05-10 17:10:23.483 | INFO | __main__:<module>:380 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:10:23.483 | INFO | __main__:<module>:381 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:10:23.483 | INFO | __main__:<module>:382 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:10:23.483 | INFO | __main__:<module>:383 - Debug: True
|
||||
2025-05-10 17:10:25.677 | ERROR | __main__:chat_completions:216 - MCPhost process object is None
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:367 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:368 - Configuration:
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:369 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:370 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:371 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:11:54.622 | INFO | __main__:<module>:372 - Debug: True
|
||||
2025-05-10 17:11:56.959 | ERROR | __main__:chat_completions:205 - MCPhost process object is None
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:367 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:368 - Configuration:
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:369 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:370 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:371 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:14:38.976 | INFO | __main__:<module>:372 - Debug: True
|
||||
2025-05-10 17:14:38.984 | INFO | __main__:lifespan:74 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:14:38.984 | INFO | __main__:lifespan:75 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:14:39.002 | INFO | __main__:lifespan:80 - MCPhost process spawned with PID: 565922
|
||||
2025-05-10 17:14:39.003 | INFO | __main__:lifespan:89 - Waiting for model to load...
|
||||
2025-05-10 17:14:39.019 | ERROR | __main__:lifespan:142 - Failed to start MCPhost: 'function' object has no attribute 'write'
|
||||
2025-05-10 17:14:39.019 | ERROR | __main__:lifespan:144 - MCPhost final output:
|
||||
2025-05-10 17:14:39.019 | ERROR | __main__:lifespan:145 - Process alive: True
|
||||
2025-05-10 17:14:39.019 | INFO | __main__:lifespan:151 - Shutting down MCPhost...
|
||||
2025-05-10 17:14:39.019 | INFO | __main__:lifespan:159 - Terminating MCPhost process...
|
||||
2025-05-10 17:14:39.120 | INFO | __main__:lifespan:162 - MCPhost process stopped
|
||||
2025-05-10 17:16:06.172 | INFO | __main__:<module>:367 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:16:06.172 | INFO | __main__:<module>:368 - Configuration:
|
||||
2025-05-10 17:16:06.172 | INFO | __main__:<module>:369 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:16:06.172 | INFO | __main__:<module>:370 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:16:06.172 | INFO | __main__:<module>:371 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:16:06.173 | INFO | __main__:<module>:372 - Debug: True
|
||||
2025-05-10 17:16:06.180 | INFO | __main__:lifespan:74 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:16:06.180 | INFO | __main__:lifespan:75 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:16:06.199 | INFO | __main__:lifespan:80 - MCPhost process spawned with PID: 565949
|
||||
2025-05-10 17:16:06.199 | ERROR | __main__:lifespan:142 - Failed to start MCPhost: name 'LoggerWriter' is not defined
|
||||
2025-05-10 17:16:06.199 | ERROR | __main__:lifespan:144 - MCPhost final output: None
|
||||
2025-05-10 17:16:06.199 | ERROR | __main__:lifespan:145 - Process alive: True
|
||||
2025-05-10 17:16:06.199 | INFO | __main__:lifespan:151 - Shutting down MCPhost...
|
||||
2025-05-10 17:16:06.200 | INFO | __main__:lifespan:159 - Terminating MCPhost process...
|
||||
2025-05-10 17:16:06.300 | INFO | __main__:lifespan:162 - MCPhost process stopped
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:368 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:369 - Configuration:
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:370 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:371 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:372 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:16:52.374 | INFO | __main__:<module>:373 - Debug: True
|
||||
2025-05-10 17:16:52.382 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:16:52.382 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:16:52.403 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 565987
|
||||
2025-05-10 17:16:52.403 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:16:52.420 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:16:57.428 | DEBUG | local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:17:02.434 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:17:07.440 | DEBUG | local_logger:write:11 - 2025/05/10 17:17:07 [1;38;5;86mINFO[0m [2m<cmd/root.go:495>[0m Model loaded [2mprovider[0m[2m=[0mopenai [2mmodel[0m[2m=[0mqwen3:8b-q8_0
|
||||
2025-05-10 17:17:07.440 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:17:07.440 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:17:07.441 | DEBUG | local_logger:write:11 - 2025/05/10 17:17:07 [1;38;5;86mINFO[0m [2m<cmd/mcp.go:180>[0m Created default config file [2mpath[0m[2m=[0m./config.json
|
||||
[?25l[?2004h
|
||||
2025-05-10 17:17:07.458 | DEBUG | local_logger:write:11 -
|
||||
[38;5;238m┃[0m Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
[38;5;238m┃[0m [7m [0m [0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;59malt+enter / ctrl+j[0m [38;5;59mnew line[0m[38;5;59m • [0m[38;5;59mctrl+e[0m [38;5;59mopen editor[0m[38;5;59m • [0m[38;5;59menter[0m [38;5;59msubmit[0m[0K[80D
|
||||
2025-05-10 17:17:07.974 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:08.508 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:09.041 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:09.574 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:10.108 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:10.641 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:11.158 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:11.692 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:12.224 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:12.758 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:13.291 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:13.824 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:14.341 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:14.875 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:15.408 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:15.941 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:16.475 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:17.008 | DEBUG | local_logger:write:11 - [8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:17.441 | ERROR | __main__:lifespan:119 - Timeout waiting for prompt
|
||||
2025-05-10 17:17:17.441 | ERROR | __main__:lifespan:120 - Output so far: [2mprovider[0m[2m=[0mopenai [2mmodel[0m[2m=[0mqwen3:8b-q8_0
|
||||
2025/05/10 17:17:07 [1;38;5;86mINFO[0m [2m<cmd/mcp.go:180>[0m Created default config file [2mpath[0m[2m=[0m./config.json
|
||||
[?25l[?2004h
|
||||
[38;5;238m┃[0m Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
[38;5;238m┃[0m [7m [0m [0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;59malt+enter / ctrl+j[0m [38;5;59mnew line[0m[38;5;59m • [0m[38;5;59mctrl+e[0m [38;5;59mopen editor[0m[38;5;59m • [0m[38;5;59menter[0m [38;5;59msubmit[0m[0K[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:17.442 | ERROR | __main__:lifespan:143 - Failed to start MCPhost: Timeout waiting for prompt
|
||||
2025-05-10 17:17:17.442 | ERROR | __main__:lifespan:145 - MCPhost final output: [2mprovider[0m[2m=[0mopenai [2mmodel[0m[2m=[0mqwen3:8b-q8_0
|
||||
2025/05/10 17:17:07 [1;38;5;86mINFO[0m [2m<cmd/mcp.go:180>[0m Created default config file [2mpath[0m[2m=[0m./config.json
|
||||
[?25l[?2004h
|
||||
[38;5;238m┃[0m Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
[38;5;238m┃[0m [7m [0m [0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;238m┃[0m [30m [0m[0K
|
||||
[38;5;59malt+enter / ctrl+j[0m [38;5;59mnew line[0m[38;5;59m • [0m[38;5;59mctrl+e[0m [38;5;59mopen editor[0m[38;5;59m • [0m[38;5;59menter[0m [38;5;59msubmit[0m[0K[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [0K
|
||||
[B[B[B[B[B[80D[8A[B[B[38;5;238m┃[0m [7m [0m [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:17:17.442 | ERROR | __main__:lifespan:146 - Process alive: True
|
||||
2025-05-10 17:17:17.442 | INFO | __main__:lifespan:152 - Shutting down MCPhost...
|
||||
2025-05-10 17:17:17.442 | INFO | __main__:lifespan:160 - Terminating MCPhost process...
|
||||
2025-05-10 17:17:17.543 | INFO | __main__:lifespan:163 - MCPhost process stopped
|
||||
2025-05-10 17:22:30.034 | INFO | __main__:<module>:368 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:22:30.034 | INFO | __main__:<module>:369 - Configuration:
|
||||
2025-05-10 17:22:30.034 | INFO | __main__:<module>:370 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:22:30.035 | INFO | __main__:<module>:371 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:22:30.035 | INFO | __main__:<module>:372 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:22:30.035 | INFO | __main__:<module>:373 - Debug: True
|
||||
2025-05-10 17:22:30.207 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:22:30.207 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:22:30.227 | ERROR | __main__:lifespan:83 - Failed to spawn MCPhost process: [Errno 8] Exec format error: b'./mcphost'
|
||||
2025-05-10 17:22:30.228 | ERROR | __main__:lifespan:143 - Failed to start MCPhost: [Errno 8] Exec format error: b'./mcphost'
|
||||
2025-05-10 17:22:30.228 | INFO | __main__:lifespan:152 - Shutting down MCPhost...
|
||||
2025-05-10 17:23:22.399 | INFO | __main__:<module>:368 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:23:22.400 | INFO | __main__:<module>:369 - Configuration:
|
||||
2025-05-10 17:23:22.400 | INFO | __main__:<module>:370 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:23:22.400 | INFO | __main__:<module>:371 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:23:22.400 | INFO | __main__:<module>:372 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:23:22.400 | INFO | __main__:<module>:373 - Debug: True
|
||||
2025-05-10 17:23:22.545 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:23:22.546 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:23:22.571 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 4385
|
||||
2025-05-10 17:23:22.572 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:23:22.574 | DEBUG | local_logger:write:11 - pydev debugger: bytes arguments were passed to a new process creation function. Breakpoints may not work correctly.
|
||||
2025-05-10 17:23:23.188 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:23:28.196 | DEBUG | local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:23:33.199 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:23:38.203 | DEBUG | local_logger:write:11 - 2025/05/10 17:23:38 INFO <cmd/root.go:495> Model loaded provider=openai model=qwen3:8b-q8_0
|
||||
2025-05-10 17:23:38.204 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:23:38.205 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:23:38.206 | DEBUG | local_logger:write:11 - [?25l[?2004h
|
||||
2025-05-10 17:23:38.222 | DEBUG | local_logger:write:11 -
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:23:48.211 | ERROR | __main__:lifespan:119 - Timeout waiting for prompt
|
||||
2025-05-10 17:23:48.211 | ERROR | __main__:lifespan:120 - Output so far: provider=openai model=qwen3:8b-q8_0
|
||||
[?25l[?2004h
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:23:48.212 | ERROR | __main__:lifespan:143 - Failed to start MCPhost: Timeout waiting for prompt
|
||||
2025-05-10 17:23:48.212 | ERROR | __main__:lifespan:145 - MCPhost final output: provider=openai model=qwen3:8b-q8_0
|
||||
[?25l[?2004h
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:23:48.212 | ERROR | __main__:lifespan:146 - Process alive: True
|
||||
2025-05-10 17:23:48.213 | INFO | __main__:lifespan:152 - Shutting down MCPhost...
|
||||
2025-05-10 17:23:48.213 | INFO | __main__:lifespan:160 - Terminating MCPhost process...
|
||||
2025-05-10 17:23:48.315 | INFO | __main__:lifespan:163 - MCPhost process stopped
|
||||
2025-05-10 17:27:44.919 | INFO | __main__:<module>:375 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:27:44.920 | INFO | __main__:<module>:376 - Configuration:
|
||||
2025-05-10 17:27:44.921 | INFO | __main__:<module>:377 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:27:44.921 | INFO | __main__:<module>:378 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:27:44.921 | INFO | __main__:<module>:379 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:27:44.921 | INFO | __main__:<module>:380 - Debug: True
|
||||
2025-05-10 17:27:45.074 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:27:45.074 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:27:45.094 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 4500
|
||||
2025-05-10 17:27:45.095 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:27:45.096 | DEBUG | local_logger:write:11 - pydev debugger: bytes arguments were passed to a new process creation function. Breakpoints may not work correctly.
|
||||
2025-05-10 17:27:45.185 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:27:50.203 | DEBUG | local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:27:55.206 | DEBUG | local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:28:00.209 | DEBUG | local_logger:write:11 - 2025/05/10 17:28:00 INFO <cmd/root.go:495> Model loaded provider=openai model=qwen3:8b-q8_0
|
||||
2025-05-10 17:28:00.210 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:28:00.211 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:28:00.213 | DEBUG | local_logger:write:11 - [?25l[?2004h
|
||||
2025-05-10 17:28:00.230 | DEBUG | local_logger:write:11 -
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:28:00.231 | INFO | __main__:lifespan:128 - MCPhost process started and ready
|
||||
2025-05-10 17:28:00.232 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:00.232 | INFO | __main__:lifespan:137 - Started process monitoring thread
|
||||
2025-05-10 17:28:00.234 | INFO | __main__:lifespan:141 - MCPhost process is alive and ready to accept requests
|
||||
2025-05-10 17:28:05.248 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:10.266 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:15.281 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:20.298 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:23.421 | DEBUG | __main__:chat_completions:231 - Sending to MCPhost: Hello, how are you?
|
||||
2025-05-10 17:28:23.580 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.581 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.582 | DEBUG | local_logger:write:11 - 8
|
||||
2025-05-10 17:28:23.583 | DEBUG | local_logger:write:11 - A
|
||||
2025-05-10 17:28:23.584 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.585 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.585 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.586 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.587 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.587 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.588 | DEBUG | local_logger:write:11 - ┃
|
||||
2025-05-10 17:28:23.589 | DEBUG | local_logger:write:11 - H
|
||||
2025-05-10 17:28:23.589 | DEBUG | local_logger:write:11 - e
|
||||
2025-05-10 17:28:23.590 | DEBUG | local_logger:write:11 - l
|
||||
2025-05-10 17:28:23.590 | DEBUG | local_logger:write:11 - l
|
||||
2025-05-10 17:28:23.591 | DEBUG | local_logger:write:11 - o
|
||||
2025-05-10 17:28:23.592 | DEBUG | local_logger:write:11 - ,
|
||||
2025-05-10 17:28:23.593 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.594 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.595 | DEBUG | local_logger:write:11 - 0
|
||||
2025-05-10 17:28:23.595 | DEBUG | local_logger:write:11 - K
|
||||
2025-05-10 17:28:23.596 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.596 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.597 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.598 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.598 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.599 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.600 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.600 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.601 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.602 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.602 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.603 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.603 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.604 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.604 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.604 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.605 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.605 | DEBUG | local_logger:write:11 - 8
|
||||
2025-05-10 17:28:23.605 | DEBUG | local_logger:write:11 - 0
|
||||
2025-05-10 17:28:23.606 | DEBUG | local_logger:write:11 - D
|
||||
2025-05-10 17:28:23.606 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.606 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.607 | DEBUG | local_logger:write:11 - 8
|
||||
2025-05-10 17:28:23.607 | DEBUG | local_logger:write:11 - A
|
||||
2025-05-10 17:28:23.608 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.608 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.609 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.609 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.610 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.610 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.611 | DEBUG | local_logger:write:11 - ┃
|
||||
2025-05-10 17:28:23.611 | DEBUG | local_logger:write:11 - H
|
||||
2025-05-10 17:28:23.612 | DEBUG | local_logger:write:11 - e
|
||||
2025-05-10 17:28:23.612 | DEBUG | local_logger:write:11 - l
|
||||
2025-05-10 17:28:23.612 | DEBUG | local_logger:write:11 - l
|
||||
2025-05-10 17:28:23.613 | DEBUG | local_logger:write:11 - o
|
||||
2025-05-10 17:28:23.613 | DEBUG | local_logger:write:11 - ,
|
||||
2025-05-10 17:28:23.613 | DEBUG | local_logger:write:11 - h
|
||||
2025-05-10 17:28:23.614 | DEBUG | local_logger:write:11 - o
|
||||
2025-05-10 17:28:23.614 | DEBUG | local_logger:write:11 - w
|
||||
2025-05-10 17:28:23.615 | DEBUG | local_logger:write:11 - a
|
||||
2025-05-10 17:28:23.615 | DEBUG | local_logger:write:11 - r
|
||||
2025-05-10 17:28:23.616 | DEBUG | local_logger:write:11 - e
|
||||
2025-05-10 17:28:23.616 | DEBUG | local_logger:write:11 - y
|
||||
2025-05-10 17:28:23.617 | DEBUG | local_logger:write:11 - o
|
||||
2025-05-10 17:28:23.617 | DEBUG | local_logger:write:11 - u
|
||||
2025-05-10 17:28:23.617 | DEBUG | local_logger:write:11 - ?
|
||||
2025-05-10 17:28:23.619 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.619 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.619 | DEBUG | local_logger:write:11 - 0
|
||||
2025-05-10 17:28:23.620 | DEBUG | local_logger:write:11 - K
|
||||
2025-05-10 17:28:23.620 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.620 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.621 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.621 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.621 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.622 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.622 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.622 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.623 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.623 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.623 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.624 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.624 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.624 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.625 | DEBUG | local_logger:write:11 - B
|
||||
2025-05-10 17:28:23.625 | DEBUG | local_logger:write:11 -
|
||||
2025-05-10 17:28:23.625 | DEBUG | local_logger:write:11 - [
|
||||
2025-05-10 17:28:23.626 | DEBUG | local_logger:write:11 - 8
|
||||
2025-05-10 17:28:23.626 | DEBUG | local_logger:write:11 - 0
|
||||
2025-05-10 17:28:23.626 | DEBUG | local_logger:write:11 - D
|
||||
2025-05-10 17:28:25.314 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:30.327 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:35.340 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:40.356 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:45.373 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:50.390 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:28:53.604 | DEBUG | __main__:chat_completions:299 - MCPhost response: [8A[B[B┃ Hello, [0K [B[B[B[B[B[80D[8A[B[B┃ Hello, how are you? [0K [B[B[B[B[B[80D
|
||||
2025-05-10 17:28:55.406 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:00.422 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:05.443 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:10.464 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:15.481 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:20.499 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:25.519 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:30.539 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:35.558 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:40.577 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:45.597 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:50.617 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:29:55.634 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:30:00.653 | INFO | __main__:monitor_process:39 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:30:03.910 | INFO | __main__:lifespan:158 - Shutting down MCPhost...
|
||||
2025-05-10 17:30:04.668 | INFO | __main__:lifespan:166 - Terminating MCPhost process...
|
||||
2025-05-10 17:30:05.273 | INFO | __main__:lifespan:169 - MCPhost process stopped
|
||||
2025-05-10 17:31:20.410 | INFO | __main__:<module>:404 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:31:20.411 | INFO | __main__:<module>:405 - Configuration:
|
||||
2025-05-10 17:31:20.411 | INFO | __main__:<module>:406 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:31:20.411 | INFO | __main__:<module>:407 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:31:20.412 | INFO | __main__:<module>:408 - OpenAI URL: http://0.0.0.0:33740/v1
|
||||
2025-05-10 17:31:20.412 | INFO | __main__:<module>:409 - Debug: True
|
||||
2025-05-10 17:31:20.550 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://0.0.0.0:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:31:20.551 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:31:20.574 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 4603
|
||||
2025-05-10 17:31:20.574 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:31:20.575 | DEBUG | helpers.local_logger:write:11 - pydev debugger: bytes arguments were passed to a new process creation function. Breakpoints may not work correctly.
|
||||
2025-05-10 17:31:20.678 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:31:25.705 | DEBUG | helpers.local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:31:30.709 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:31:35.713 | DEBUG | helpers.local_logger:write:11 - 2025/05/10 17:31:35 INFO <cmd/root.go:495> Model loaded provider=openai model=qwen3:8b-q8_0
|
||||
2025-05-10 17:31:35.714 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:31:35.715 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:31:35.718 | DEBUG | helpers.local_logger:write:11 - [?25l[?2004h
|
||||
2025-05-10 17:31:35.735 | DEBUG | helpers.local_logger:write:11 -
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:31:35.735 | INFO | __main__:lifespan:128 - MCPhost process started and ready
|
||||
2025-05-10 17:31:35.736 | INFO | __main__:lifespan:137 - Started process monitoring thread
|
||||
2025-05-10 17:31:35.736 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:31:35.737 | INFO | __main__:lifespan:141 - MCPhost process is alive and ready to accept requests
|
||||
2025-05-10 17:31:40.752 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:31:45.770 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:31:50.789 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:31:55.321 | DEBUG | __main__:chat_completions:233 - Sending to MCPhost: Hello, how are you?
|
||||
2025-05-10 17:31:55.485 | DEBUG | helpers.local_logger:write:11 - [8A[B[B┃ Hello, how are you? [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:31:55.808 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:31:56.641 | DEBUG | __main__:chat_completions:325 - MCPhost response: Hello, how are you?
|
||||
2025-05-10 17:32:00.824 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:05.837 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:10.849 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:15.857 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:20.875 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:25.893 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:27.241 | INFO | __main__:lifespan:158 - Shutting down MCPhost...
|
||||
2025-05-10 17:32:27.900 | INFO | __main__:lifespan:166 - Terminating MCPhost process...
|
||||
2025-05-10 17:32:28.505 | INFO | __main__:lifespan:169 - MCPhost process stopped
|
||||
2025-05-10 17:32:32.580 | INFO | __main__:<module>:404 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:32:32.580 | INFO | __main__:<module>:405 - Configuration:
|
||||
2025-05-10 17:32:32.580 | INFO | __main__:<module>:406 - MCPhost Path: ../mcphost
|
||||
2025-05-10 17:32:32.580 | INFO | __main__:<module>:407 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:32:32.581 | INFO | __main__:<module>:408 - OpenAI URL: http://10.8.0.10:33740/v1
|
||||
2025-05-10 17:32:32.581 | INFO | __main__:<module>:409 - Debug: True
|
||||
2025-05-10 17:32:32.745 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ../mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://10.8.0.10:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:32:32.745 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:32:32.751 | ERROR | __main__:lifespan:83 - Failed to spawn MCPhost process: The command was not found or was not executable: ../mcphost.
|
||||
2025-05-10 17:32:32.751 | ERROR | __main__:lifespan:149 - Failed to start MCPhost: The command was not found or was not executable: ../mcphost.
|
||||
2025-05-10 17:32:32.752 | INFO | __main__:lifespan:158 - Shutting down MCPhost...
|
||||
2025-05-10 17:32:43.168 | INFO | __main__:<module>:404 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:32:43.168 | INFO | __main__:<module>:405 - Configuration:
|
||||
2025-05-10 17:32:43.169 | INFO | __main__:<module>:406 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:32:43.169 | INFO | __main__:<module>:407 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:32:43.169 | INFO | __main__:<module>:408 - OpenAI URL: http://10.8.0.10:33740/v1
|
||||
2025-05-10 17:32:43.169 | INFO | __main__:<module>:409 - Debug: True
|
||||
2025-05-10 17:32:43.293 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://10.8.0.10:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:32:43.293 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:32:43.313 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 4681
|
||||
2025-05-10 17:32:43.314 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:32:43.315 | DEBUG | helpers.local_logger:write:11 - pydev debugger: bytes arguments were passed to a new process creation function. Breakpoints may not work correctly.
|
||||
2025-05-10 17:32:43.349 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:32:48.357 | DEBUG | helpers.local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:32:53.358 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:32:58.364 | DEBUG | helpers.local_logger:write:11 - 2025/05/10 17:32:58 INFO <cmd/root.go:495> Model loaded provider=openai model=qwen3:8b-q8_0
|
||||
2025-05-10 17:32:58.364 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:32:58.365 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:32:58.366 | DEBUG | helpers.local_logger:write:11 - [?25l[?2004h
|
||||
2025-05-10 17:32:58.383 | DEBUG | helpers.local_logger:write:11 -
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
alt+enter / ctrl+j new line • ctrl+e open editor • enter submit[0K[80D
|
||||
2025-05-10 17:32:58.384 | INFO | __main__:lifespan:128 - MCPhost process started and ready
|
||||
2025-05-10 17:32:58.385 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:32:58.386 | INFO | __main__:lifespan:137 - Started process monitoring thread
|
||||
2025-05-10 17:32:58.386 | INFO | __main__:lifespan:141 - MCPhost process is alive and ready to accept requests
|
||||
2025-05-10 17:33:03.402 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:08.050 | DEBUG | __main__:chat_completions:233 - Sending to MCPhost: Hello, how are you?
|
||||
2025-05-10 17:33:08.216 | DEBUG | helpers.local_logger:write:11 - [8A[B[B┃ Hello, how are you? [0K
|
||||
[B[B[B[B[B[80D
|
||||
2025-05-10 17:33:08.420 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:09.367 | DEBUG | __main__:chat_completions:325 - MCPhost response: Hello, how are you?
|
||||
2025-05-10 17:33:13.435 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:18.455 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:23.476 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:28.487 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:33.502 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:36.958 | DEBUG | __main__:chat_completions:233 - Sending to MCPhost: Tell me a joke.
|
||||
2025-05-10 17:33:37.116 | DEBUG | helpers.local_logger:write:11 - [8A[B[B[B┃ Tell me a joke. [0K
|
||||
[B[B[B[B[80D
|
||||
2025-05-10 17:33:38.258 | DEBUG | __main__:chat_completions:325 - MCPhost response: Tell me a joke.
|
||||
2025-05-10 17:33:38.518 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:43.534 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:48.550 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:50.324 | DEBUG | __main__:chat_completions:233 - Sending to MCPhost: Tell me a joke.
|
||||
2025-05-10 17:33:50.483 | DEBUG | helpers.local_logger:write:11 - [8A[B[B[B[B┃ Tell me a joke. [0K
|
||||
[B[B[B[80D
|
||||
2025-05-10 17:33:51.627 | DEBUG | __main__:chat_completions:325 - MCPhost response: Tell me a joke.
|
||||
2025-05-10 17:33:53.570 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:33:58.586 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:03.604 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:08.610 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:13.621 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:18.644 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:23.665 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:28.681 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:33.700 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:38.717 | INFO | __main__:monitor_process:38 - MCPhost process status - Alive: True, Exit status: None
|
||||
2025-05-10 17:34:42.098 | INFO | __main__:lifespan:158 - Shutting down MCPhost...
|
||||
2025-05-10 17:34:42.734 | INFO | __main__:lifespan:166 - Terminating MCPhost process...
|
||||
2025-05-10 17:34:43.337 | INFO | __main__:lifespan:169 - MCPhost process stopped
|
||||
2025-05-10 17:34:50.679 | INFO | __main__:<module>:404 - Starting OpenAI-compatible MCPhost API server...
|
||||
2025-05-10 17:34:50.679 | INFO | __main__:<module>:405 - Configuration:
|
||||
2025-05-10 17:34:50.679 | INFO | __main__:<module>:406 - MCPhost Path: ./mcphost
|
||||
2025-05-10 17:34:50.679 | INFO | __main__:<module>:407 - Model: openai:qwen3:8b-q8_0
|
||||
2025-05-10 17:34:50.679 | INFO | __main__:<module>:408 - OpenAI URL: http://10.8.0.10:33740/v1
|
||||
2025-05-10 17:34:50.680 | INFO | __main__:<module>:409 - Debug: True
|
||||
2025-05-10 17:34:50.814 | INFO | __main__:lifespan:75 - Starting MCPhost with command: ./mcphost --debug --config ./config.json --model openai:qwen3:8b-q8_0 --openai-url http://10.8.0.10:33740/v1 --openai-api-key anything
|
||||
2025-05-10 17:34:50.814 | INFO | __main__:lifespan:76 - Environment: OLLAMA_NUM_CTX=40960
|
||||
2025-05-10 17:34:50.834 | INFO | __main__:lifespan:81 - MCPhost process spawned with PID: 4756
|
||||
2025-05-10 17:34:50.834 | INFO | __main__:lifespan:90 - Waiting for model to load...
|
||||
2025-05-10 17:34:50.835 | DEBUG | helpers.local_logger:write:11 - pydev debugger: bytes arguments were passed to a new process creation function. Breakpoints may not work correctly.
|
||||
2025-05-10 17:34:50.870 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:34:55.879 | DEBUG | helpers.local_logger:write:11 - ]10;?\[6n
|
||||
2025-05-10 17:35:00.884 | DEBUG | helpers.local_logger:write:11 - ]11;?\[6n
|
||||
2025-05-10 17:35:05.887 | DEBUG | helpers.local_logger:write:11 - 2025/05/10 17:35:05 INFO <cmd/root.go:495> Model loaded provider=openai model=qwen3:8b-q8_0
|
||||
2025-05-10 17:35:05.887 | INFO | __main__:lifespan:98 - MCPhost model loaded successfully
|
||||
2025-05-10 17:35:05.888 | INFO | __main__:lifespan:109 - Waiting for prompt...
|
||||
2025-05-10 17:35:05.888 | DEBUG | helpers.local_logger:write:11 - [?25l[?2004h
|
||||
2025-05-10 17:35:05.905 | DEBUG | helpers.local_logger:write:11 -
|
||||
┃ Enter your prompt (Type /help for commands, Ctrl+C to [0K
|
||||
quit) [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
┃ [0K
|
||||
20
requirements.txt
Normal file
20
requirements.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
annotated-types==0.7.0
|
||||
anyio==4.9.0
|
||||
click==8.1.8
|
||||
fastapi==0.115.12
|
||||
fsspec==2024.6.1
|
||||
h11==0.16.0
|
||||
idna==3.10
|
||||
loguru==0.7.3
|
||||
pexpect==4.9.0
|
||||
ptyprocess==0.7.0
|
||||
pydantic==2.11.4
|
||||
pydantic-settings==2.9.1
|
||||
pydantic_core==2.33.2
|
||||
python-dotenv==1.1.0
|
||||
sniffio==1.3.1
|
||||
starlette==0.46.2
|
||||
tqdm==4.66.4
|
||||
typing-inspection==0.4.0
|
||||
typing_extensions==4.12.2
|
||||
uvicorn==0.34.2
|
||||
411
serve_openai_compatible.py
Normal file
411
serve_openai_compatible.py
Normal file
@@ -0,0 +1,411 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
OpenAI-compatible MCPhost API wrapper with enhanced debugging
|
||||
Usage: python mcphost_openai_api.py
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
import asyncio
|
||||
import os
|
||||
from typing import Optional
|
||||
from contextlib import asynccontextmanager
|
||||
import threading
|
||||
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from starlette.responses import StreamingResponse
|
||||
import uvicorn
|
||||
from loguru import logger
|
||||
import pexpect
|
||||
import pyte
|
||||
|
||||
from helpers.settings import settings
|
||||
from helpers.models import ChatCompletionRequest
|
||||
from helpers.local_logger import LoggerWriter
|
||||
|
||||
# Global process variable
|
||||
mcp_process: Optional[pexpect.spawn] = None
|
||||
process_monitor_thread = None
|
||||
stop_monitoring = threading.Event()
|
||||
|
||||
|
||||
def monitor_process():
|
||||
"""Background thread to monitor MCPhost process status"""
|
||||
while not stop_monitoring.is_set():
|
||||
if mcp_process:
|
||||
is_alive = mcp_process.isalive()
|
||||
exit_status = mcp_process.exitstatus if not is_alive else None
|
||||
logger.info(f"MCPhost process status - Alive: {is_alive}, Exit status: {exit_status}")
|
||||
|
||||
if not is_alive and exit_status is not None:
|
||||
logger.error(f"MCPhost process died with exit code: {exit_status}")
|
||||
else:
|
||||
logger.warning("MCPhost process is None")
|
||||
|
||||
# Wait 5 seconds before next check
|
||||
for _ in range(5):
|
||||
if stop_monitoring.is_set():
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup: Start MCPhost subprocess
|
||||
global mcp_process, process_monitor_thread
|
||||
|
||||
try:
|
||||
# Build command
|
||||
cmd = [settings.mcphost_path]
|
||||
|
||||
if settings.debug:
|
||||
cmd.append("--debug")
|
||||
|
||||
cmd.extend([
|
||||
"--config", settings.mcphost_config,
|
||||
"--model", settings.mcphost_model,
|
||||
"--openai-url", settings.openai_url,
|
||||
"--openai-api-key", settings.openai_api_key
|
||||
])
|
||||
|
||||
# Set environment
|
||||
env = os.environ.copy()
|
||||
env["OLLAMA_NUM_CTX"] = str(settings.ollama_num_ctx)
|
||||
|
||||
logger.info(f"Starting MCPhost with command: {' '.join(cmd)}")
|
||||
logger.info(f"Environment: OLLAMA_NUM_CTX={settings.ollama_num_ctx}")
|
||||
|
||||
# Use pexpect to spawn MCPhost in interactive mode
|
||||
try:
|
||||
mcp_process = pexpect.spawn(" ".join(cmd), timeout=30, encoding='utf-8', env=env)
|
||||
logger.info(f"MCPhost process spawned with PID: {mcp_process.pid}")
|
||||
except Exception as spawn_error:
|
||||
logger.error(f"Failed to spawn MCPhost process: {spawn_error}")
|
||||
raise
|
||||
|
||||
# Enable detailed logging
|
||||
mcp_process.logfile_read = LoggerWriter(logger.debug)
|
||||
|
||||
# Wait for model to load
|
||||
logger.info("Waiting for model to load...")
|
||||
index = mcp_process.expect([
|
||||
"Model loaded",
|
||||
pexpect.TIMEOUT,
|
||||
pexpect.EOF
|
||||
], timeout=30)
|
||||
|
||||
if index == 0:
|
||||
logger.info("MCPhost model loaded successfully")
|
||||
elif index == 1:
|
||||
logger.error("Timeout waiting for model to load")
|
||||
logger.error(f"Output so far: {mcp_process.before}")
|
||||
raise RuntimeError("Timeout waiting for model to load")
|
||||
elif index == 2:
|
||||
logger.error("MCPhost process ended unexpectedly")
|
||||
logger.error(f"Output: {mcp_process.before}")
|
||||
raise RuntimeError("MCPhost process ended unexpectedly")
|
||||
|
||||
# Wait for prompt interface
|
||||
logger.info("Waiting for prompt...")
|
||||
|
||||
# Read output until we see the prompt interface
|
||||
seen_prompt = False
|
||||
start_time = time.time()
|
||||
|
||||
while time.time() - start_time < 10: # 10 second timeout
|
||||
try:
|
||||
output = mcp_process.read_nonblocking(size=1000, timeout=0.5)
|
||||
if "Enter your prompt" in output or "┃" in output:
|
||||
seen_prompt = True
|
||||
break
|
||||
except pexpect.TIMEOUT:
|
||||
continue
|
||||
except pexpect.EOF:
|
||||
logger.error("MCPhost process ended unexpectedly")
|
||||
raise RuntimeError("MCPhost process ended unexpectedly")
|
||||
|
||||
if seen_prompt:
|
||||
logger.info("MCPhost process started and ready")
|
||||
else:
|
||||
logger.error("Timeout waiting for prompt")
|
||||
raise RuntimeError("Timeout waiting for prompt")
|
||||
|
||||
# Start process monitoring thread
|
||||
stop_monitoring.clear()
|
||||
process_monitor_thread = threading.Thread(target=monitor_process, daemon=True)
|
||||
process_monitor_thread.start()
|
||||
logger.info("Started process monitoring thread")
|
||||
|
||||
# Final check
|
||||
if mcp_process.isalive():
|
||||
logger.info("MCPhost process is alive and ready to accept requests")
|
||||
else:
|
||||
logger.error("MCPhost process is not alive after startup")
|
||||
raise RuntimeError("MCPhost process died during startup")
|
||||
|
||||
yield
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start MCPhost: {e}")
|
||||
if mcp_process:
|
||||
logger.error(f"MCPhost final output: {mcp_process.before}")
|
||||
logger.error(f"Process alive: {mcp_process.isalive()}")
|
||||
if not mcp_process.isalive():
|
||||
logger.error(f"Exit status: {mcp_process.exitstatus}")
|
||||
raise
|
||||
finally:
|
||||
# Shutdown: Clean up subprocess
|
||||
logger.info("Shutting down MCPhost...")
|
||||
stop_monitoring.set()
|
||||
|
||||
if process_monitor_thread:
|
||||
process_monitor_thread.join(timeout=2)
|
||||
|
||||
if mcp_process:
|
||||
if mcp_process.isalive():
|
||||
logger.info("Terminating MCPhost process...")
|
||||
mcp_process.terminate()
|
||||
mcp_process.wait()
|
||||
logger.info("MCPhost process stopped")
|
||||
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(title="OpenAI-compatible MCPhost API", lifespan=lifespan)
|
||||
|
||||
|
||||
async def _resp_async_generator(response_text: str, model: str):
|
||||
"""Generator for streaming responses in OpenAI format"""
|
||||
tokens = response_text.split(" ")
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
chunk = {
|
||||
"id": f"chatcmpl-{int(time.time())}-{i}",
|
||||
"object": "chat.completion.chunk",
|
||||
"created": int(time.time()),
|
||||
"model": model,
|
||||
"choices": [{
|
||||
"index": 0,
|
||||
"delta": {"content": token + " "},
|
||||
"finish_reason": None
|
||||
}]
|
||||
}
|
||||
yield f"data: {json.dumps(chunk)}\n\n"
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
final_chunk = {
|
||||
"id": f"chatcmpl-{int(time.time())}-final",
|
||||
"object": "chat.completion.chunk",
|
||||
"created": int(time.time()),
|
||||
"model": model,
|
||||
"choices": [{
|
||||
"index": 0,
|
||||
"delta": {},
|
||||
"finish_reason": "stop"
|
||||
}]
|
||||
}
|
||||
yield f"data: {json.dumps(final_chunk)}\n\n"
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
|
||||
@app.post("/v1/chat/completions")
|
||||
@app.post("/chat/completions")
|
||||
async def chat_completions(request: ChatCompletionRequest):
|
||||
"""OpenAI-compatible chat completions endpoint"""
|
||||
if not mcp_process:
|
||||
logger.error("MCPhost process object is None")
|
||||
raise HTTPException(status_code=500, detail="MCPhost process not initialized")
|
||||
|
||||
if not mcp_process.isalive():
|
||||
logger.error(f"MCPhost process not running. Exit status: {mcp_process.exitstatus}")
|
||||
raise HTTPException(status_code=500, detail="MCPhost process not running")
|
||||
|
||||
try:
|
||||
# Extract the last user message
|
||||
user_message = ""
|
||||
for message in reversed(request.messages):
|
||||
if message.role == "user":
|
||||
user_message = message.content
|
||||
break
|
||||
|
||||
if not user_message:
|
||||
user_message = request.messages[-1].content if request.messages else ""
|
||||
|
||||
logger.debug(f"Sending to MCPhost: {user_message}")
|
||||
|
||||
# Clear any pending output (non-blocking read)
|
||||
try:
|
||||
mcp_process.read_nonblocking(size=1000, timeout=0.1)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Send message to MCPhost
|
||||
mcp_process.sendline(user_message)
|
||||
|
||||
# Create virtual terminal to parse output
|
||||
screen = pyte.Screen(80, 24)
|
||||
stream = pyte.ByteStream(screen)
|
||||
|
||||
# Wait for the response
|
||||
response_text = ""
|
||||
last_screen_content = ""
|
||||
|
||||
try:
|
||||
start_time = time.time()
|
||||
no_change_count = 0
|
||||
|
||||
while time.time() - start_time < 30: # 30 second timeout
|
||||
try:
|
||||
# Read available data
|
||||
data = mcp_process.read_nonblocking(size=1024, timeout=0.1)
|
||||
|
||||
# Feed data to virtual terminal
|
||||
stream.feed(data.encode('utf-8'))
|
||||
|
||||
# Get current screen content
|
||||
current_screen = '\n'.join(screen.display).strip()
|
||||
|
||||
# Check if screen content has changed
|
||||
if current_screen == last_screen_content:
|
||||
no_change_count += 1
|
||||
if no_change_count > 10: # No change for 1 second
|
||||
break
|
||||
else:
|
||||
no_change_count = 0
|
||||
last_screen_content = current_screen
|
||||
|
||||
# Extract response text from screen
|
||||
lines = current_screen.split('\n')
|
||||
response_lines = []
|
||||
|
||||
for line in lines:
|
||||
# Skip empty lines and prompt-related lines
|
||||
if line.strip() and not "Enter your prompt" in line and not line.strip().startswith(
|
||||
"alt+enter"):
|
||||
# Remove the prompt box character if present
|
||||
if line.startswith("┃"):
|
||||
clean_line = line[1:].strip()
|
||||
if clean_line and not "Enter your prompt" in clean_line:
|
||||
response_lines.append(clean_line)
|
||||
else:
|
||||
response_lines.append(line.strip())
|
||||
|
||||
# Update response text
|
||||
response_text = ' '.join(response_lines)
|
||||
|
||||
except pexpect.TIMEOUT:
|
||||
# If we haven't received anything yet, continue waiting
|
||||
if not response_text:
|
||||
continue
|
||||
else:
|
||||
no_change_count += 1
|
||||
if no_change_count > 10:
|
||||
break
|
||||
except pexpect.EOF:
|
||||
logger.error("MCPhost process ended unexpectedly")
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error reading response: {e}")
|
||||
|
||||
response_text = response_text.strip()
|
||||
|
||||
# Clean up the response text - remove any duplicates or artifacts
|
||||
if response_text:
|
||||
# Sometimes the terminal might show duplicated content
|
||||
words = response_text.split()
|
||||
clean_words = []
|
||||
for i, word in enumerate(words):
|
||||
if i == 0 or word != words[i - 1]:
|
||||
clean_words.append(word)
|
||||
response_text = ' '.join(clean_words)
|
||||
|
||||
if not response_text:
|
||||
response_text = "No response received from MCPhost"
|
||||
|
||||
logger.debug(f"MCPhost response: {response_text}")
|
||||
|
||||
# Handle streaming response
|
||||
if request.stream:
|
||||
return StreamingResponse(
|
||||
_resp_async_generator(response_text, request.model),
|
||||
media_type="text/event-stream"
|
||||
)
|
||||
|
||||
# Non-streaming response
|
||||
return {
|
||||
"id": f"chatcmpl-{int(time.time())}",
|
||||
"object": "chat.completion",
|
||||
"created": int(time.time()),
|
||||
"model": request.model,
|
||||
"choices": [{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": response_text
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}],
|
||||
"usage": {
|
||||
"prompt_tokens": len(user_message.split()),
|
||||
"completion_tokens": len(response_text.split()),
|
||||
"total_tokens": len(user_message.split()) + len(response_text.split())
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error in chat completion")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/v1/models")
|
||||
@app.get("/models")
|
||||
async def list_models():
|
||||
"""List available models (OpenAI-compatible endpoint)"""
|
||||
return {
|
||||
"object": "list",
|
||||
"data": [{
|
||||
"id": settings.mcphost_model,
|
||||
"object": "model",
|
||||
"created": int(time.time()),
|
||||
"owned_by": "mcphost",
|
||||
"permission": [],
|
||||
"root": settings.mcphost_model,
|
||||
"parent": None
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
if not mcp_process:
|
||||
logger.warning("Health check: MCPhost process is None")
|
||||
return {"status": "unhealthy", "detail": "Process not initialized"}
|
||||
|
||||
is_healthy = mcp_process.isalive()
|
||||
status = "healthy" if is_healthy else "unhealthy"
|
||||
detail = {"pid": mcp_process.pid if mcp_process else None}
|
||||
|
||||
if not is_healthy and mcp_process:
|
||||
detail["exit_status"] = mcp_process.exitstatus
|
||||
|
||||
logger.info(f"Health check: {status}, details: {detail}")
|
||||
return {"status": status, "detail": detail}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.add(
|
||||
"mcphost_openai_api.log",
|
||||
rotation="10 MB",
|
||||
retention="10 days",
|
||||
level="DEBUG"
|
||||
)
|
||||
|
||||
logger.info("Starting OpenAI-compatible MCPhost API server...")
|
||||
logger.info(f"Configuration:")
|
||||
logger.info(f" MCPhost Path: {settings.mcphost_path}")
|
||||
logger.info(f" Model: {settings.mcphost_model}")
|
||||
logger.info(f" OpenAI URL: {settings.openai_url}")
|
||||
logger.info(f" Debug: {settings.debug}")
|
||||
|
||||
uvicorn.run(app, host=settings.host, port=settings.port, log_config=None)
|
||||
10
test.sh
Executable file
10
test.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
curl -X POST http://0.0.0.0:8000/v1/chat/completions -H "Content-Type: application/json" -H "Authorization: Bearer fake-api-key" -d '{
|
||||
"model": "mcphost-model",
|
||||
"messages": [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "Tell me a joke."}
|
||||
],
|
||||
"temperature": 0.7
|
||||
}'
|
||||
Reference in New Issue
Block a user