working simple api bridge done
This commit is contained in:
		
							
								
								
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -3,6 +3,6 @@ venv | ||||
| config.json | ||||
| mcphost | ||||
| mcphost_openai_api.log.* | ||||
| helpers/settings.py | ||||
| settings.py | ||||
| .idea | ||||
| __pycache__ | ||||
							
								
								
									
										31
									
								
								.idea/inspectionProfiles/Project_Default.xml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										31
									
								
								.idea/inspectionProfiles/Project_Default.xml
									
									
									
										generated
									
									
									
								
							| @@ -1,31 +0,0 @@ | ||||
| <component name="InspectionProjectProfileManager"> | ||||
|   <profile version="1.0"> | ||||
|     <option name="myName" value="Project Default" /> | ||||
|     <inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true"> | ||||
|       <option name="ignoredPackages"> | ||||
|         <value> | ||||
|           <list size="18"> | ||||
|             <item index="0" class="java.lang.String" itemvalue="pandas" /> | ||||
|             <item index="1" class="java.lang.String" itemvalue="tiktoken" /> | ||||
|             <item index="2" class="java.lang.String" itemvalue="gensim" /> | ||||
|             <item index="3" class="java.lang.String" itemvalue="pydantic" /> | ||||
|             <item index="4" class="java.lang.String" itemvalue="configparser" /> | ||||
|             <item index="5" class="java.lang.String" itemvalue="future" /> | ||||
|             <item index="6" class="java.lang.String" itemvalue="tenacity" /> | ||||
|             <item index="7" class="java.lang.String" itemvalue="python-dotenv" /> | ||||
|             <item index="8" class="java.lang.String" itemvalue="aiohttp" /> | ||||
|             <item index="9" class="java.lang.String" itemvalue="pipmaster" /> | ||||
|             <item index="10" class="java.lang.String" itemvalue="setuptools" /> | ||||
|             <item index="11" class="java.lang.String" itemvalue="xlsxwriter" /> | ||||
|             <item index="12" class="java.lang.String" itemvalue="numpy" /> | ||||
|             <item index="13" class="java.lang.String" itemvalue="scipy" /> | ||||
|             <item index="14" class="java.lang.String" itemvalue="filelock" /> | ||||
|             <item index="15" class="java.lang.String" itemvalue="pycryptodomex" /> | ||||
|             <item index="16" class="java.lang.String" itemvalue="lxml" /> | ||||
|             <item index="17" class="java.lang.String" itemvalue="blobfile" /> | ||||
|           </list> | ||||
|         </value> | ||||
|       </option> | ||||
|     </inspection_tool> | ||||
|   </profile> | ||||
| </component> | ||||
							
								
								
									
										6
									
								
								.idea/inspectionProfiles/profiles_settings.xml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										6
									
								
								.idea/inspectionProfiles/profiles_settings.xml
									
									
									
										generated
									
									
									
								
							| @@ -1,6 +0,0 @@ | ||||
| <component name="InspectionProjectProfileManager"> | ||||
|   <settings> | ||||
|     <option name="USE_PROJECT_PROFILE" value="false" /> | ||||
|     <version value="1.0" /> | ||||
|   </settings> | ||||
| </component> | ||||
							
								
								
									
										10
									
								
								.idea/mcphost-api.iml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										10
									
								
								.idea/mcphost-api.iml
									
									
									
										generated
									
									
									
								
							| @@ -1,10 +0,0 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <module type="PYTHON_MODULE" version="4"> | ||||
|   <component name="NewModuleRootManager"> | ||||
|     <content url="file://$MODULE_DIR$"> | ||||
|       <excludeFolder url="file://$MODULE_DIR$/.venv" /> | ||||
|     </content> | ||||
|     <orderEntry type="jdk" jdkName="Python 3.12 (mcphost-api)" jdkType="Python SDK" /> | ||||
|     <orderEntry type="sourceFolder" forTests="false" /> | ||||
|   </component> | ||||
| </module> | ||||
							
								
								
									
										8
									
								
								.idea/modules.xml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										8
									
								
								.idea/modules.xml
									
									
									
										generated
									
									
									
								
							| @@ -1,8 +0,0 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <project version="4"> | ||||
|   <component name="ProjectModuleManager"> | ||||
|     <modules> | ||||
|       <module fileurl="file://$PROJECT_DIR$/.idea/mcphost-api.iml" filepath="$PROJECT_DIR$/.idea/mcphost-api.iml" /> | ||||
|     </modules> | ||||
|   </component> | ||||
| </project> | ||||
							
								
								
									
										6
									
								
								.idea/vcs.xml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										6
									
								
								.idea/vcs.xml
									
									
									
										generated
									
									
									
								
							| @@ -1,6 +0,0 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <project version="4"> | ||||
|   <component name="VcsDirectoryMappings"> | ||||
|     <mapping directory="$PROJECT_DIR$" vcs="Git" /> | ||||
|   </component> | ||||
| </project> | ||||
							
								
								
									
										102
									
								
								.idea/workspace.xml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										102
									
								
								.idea/workspace.xml
									
									
									
										generated
									
									
									
								
							| @@ -1,102 +0,0 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <project version="4"> | ||||
|   <component name="AutoImportSettings"> | ||||
|     <option name="autoReloadType" value="SELECTIVE" /> | ||||
|   </component> | ||||
|   <component name="ChangeListManager"> | ||||
|     <list default="true" id="bbbc2257-1e9d-40b2-a56b-3daf4fc0a636" name="Changes" comment=""> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/inspectionProfiles/Project_Default.xml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/inspectionProfiles/profiles_settings.xml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/mcphost-api.iml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/modules.xml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/vcs.xml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/helpers/__init__.py" afterDir="false" /> | ||||
|       <change afterPath="$PROJECT_DIR$/test.sh" afterDir="false" /> | ||||
|     </list> | ||||
|     <option name="SHOW_DIALOG" value="false" /> | ||||
|     <option name="HIGHLIGHT_CONFLICTS" value="true" /> | ||||
|     <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" /> | ||||
|     <option name="LAST_RESOLUTION" value="IGNORE" /> | ||||
|   </component> | ||||
|   <component name="FileTemplateManagerImpl"> | ||||
|     <option name="RECENT_TEMPLATES"> | ||||
|       <list> | ||||
|         <option value="Python Script" /> | ||||
|       </list> | ||||
|     </option> | ||||
|   </component> | ||||
|   <component name="Git.Settings"> | ||||
|     <option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" /> | ||||
|   </component> | ||||
|   <component name="ProjectColorInfo"><![CDATA[{ | ||||
|   "associatedIndex": 5 | ||||
| }]]></component> | ||||
|   <component name="ProjectId" id="2wuM0HE6IiWx1ypaydX36jB6Bh3" /> | ||||
|   <component name="ProjectLevelVcsManager" settingsEditedManually="true" /> | ||||
|   <component name="ProjectViewState"> | ||||
|     <option name="hideEmptyMiddlePackages" value="true" /> | ||||
|     <option name="showLibraryContents" value="true" /> | ||||
|   </component> | ||||
|   <component name="PropertiesComponent"><![CDATA[{ | ||||
|   "keyToString": { | ||||
|     "Python.serve_openai_compatible.executor": "Debug", | ||||
|     "RunOnceActivity.ShowReadmeOnStart": "true", | ||||
|     "RunOnceActivity.git.unshallow": "true", | ||||
|     "git-widget-placeholder": "main" | ||||
|   } | ||||
| }]]></component> | ||||
|   <component name="RecentsManager"> | ||||
|     <key name="MoveFile.RECENT_KEYS"> | ||||
|       <recent name="$PROJECT_DIR$/helpers" /> | ||||
|       <recent name="$PROJECT_DIR$/utils" /> | ||||
|     </key> | ||||
|   </component> | ||||
|   <component name="RunManager"> | ||||
|     <configuration name="serve_openai_compatible" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | ||||
|       <module name="mcphost-api" /> | ||||
|       <option name="ENV_FILES" value="" /> | ||||
|       <option name="INTERPRETER_OPTIONS" value="" /> | ||||
|       <option name="PARENT_ENVS" value="true" /> | ||||
|       <envs> | ||||
|         <env name="PYTHONUNBUFFERED" value="1" /> | ||||
|       </envs> | ||||
|       <option name="SDK_HOME" value="" /> | ||||
|       <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" /> | ||||
|       <option name="IS_MODULE_SDK" value="true" /> | ||||
|       <option name="ADD_CONTENT_ROOTS" value="true" /> | ||||
|       <option name="ADD_SOURCE_ROOTS" value="true" /> | ||||
|       <option name="SCRIPT_NAME" value="$PROJECT_DIR$/serve_openai_compatible.py" /> | ||||
|       <option name="PARAMETERS" value="" /> | ||||
|       <option name="SHOW_COMMAND_LINE" value="false" /> | ||||
|       <option name="EMULATE_TERMINAL" value="false" /> | ||||
|       <option name="MODULE_MODE" value="false" /> | ||||
|       <option name="REDIRECT_INPUT" value="false" /> | ||||
|       <option name="INPUT_FILE" value="" /> | ||||
|       <method v="2" /> | ||||
|     </configuration> | ||||
|     <recent_temporary> | ||||
|       <list> | ||||
|         <item itemvalue="Python.serve_openai_compatible" /> | ||||
|       </list> | ||||
|     </recent_temporary> | ||||
|   </component> | ||||
|   <component name="SharedIndexes"> | ||||
|     <attachedChunks> | ||||
|       <set> | ||||
|         <option value="bundled-python-sdk-4f4e415b4190-aa17d162503b-com.jetbrains.pycharm.community.sharedIndexes.bundled-PC-243.26053.29" /> | ||||
|       </set> | ||||
|     </attachedChunks> | ||||
|   </component> | ||||
|   <component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" /> | ||||
|   <component name="TaskManager"> | ||||
|     <task active="true" id="Default" summary="Default task"> | ||||
|       <changelist id="bbbc2257-1e9d-40b2-a56b-3daf4fc0a636" name="Changes" comment="" /> | ||||
|       <created>1746886781301</created> | ||||
|       <option name="number" value="Default" /> | ||||
|       <option name="presentableId" value="Default" /> | ||||
|       <updated>1746886781301</updated> | ||||
|     </task> | ||||
|     <servers /> | ||||
|   </component> | ||||
| </project> | ||||
| @@ -1,15 +0,0 @@ | ||||
|  | ||||
|  | ||||
|  | ||||
| class LoggerWriter: | ||||
|     def __init__(self, logger_func): | ||||
|         self.logger_func = logger_func | ||||
|  | ||||
|     def write(self, message): | ||||
|         # Remove trailing newlines for cleaner logs | ||||
|         if message.strip(): | ||||
|             self.logger_func(message.rstrip()) | ||||
|  | ||||
|     def flush(self): | ||||
|         # Required for file-like objects | ||||
|         pass | ||||
| @@ -1,15 +0,0 @@ | ||||
| from typing import Optional, List | ||||
| from pydantic import BaseModel | ||||
|  | ||||
|  | ||||
| # OpenAI-compatible models | ||||
| class ChatMessage(BaseModel): | ||||
|     role: str | ||||
|     content: str | ||||
|  | ||||
| class ChatCompletionRequest(BaseModel): | ||||
|     model: str = "mcphost-model" | ||||
|     messages: List[ChatMessage] | ||||
|     max_tokens: Optional[int] = 512 | ||||
|     temperature: Optional[float] = 0.1 | ||||
|     stream: Optional[bool] = False | ||||
							
								
								
									
										425
									
								
								serve.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										425
									
								
								serve.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,425 @@ | ||||
| import json | ||||
| import re | ||||
| import sys | ||||
| import threading | ||||
| import time | ||||
| from http.server import HTTPServer, BaseHTTPRequestHandler | ||||
| from typing import Optional, List | ||||
| from functools import partial, wraps | ||||
|  | ||||
| import pexpect | ||||
| from loguru import logger | ||||
|  | ||||
| from settings import settings | ||||
|  | ||||
|  | ||||
| def log_performance(func): | ||||
|     """Decorator to log function performance""" | ||||
|  | ||||
|     @wraps(func) | ||||
|     def wrapper(*args, **kwargs): | ||||
|         start_time = time.time() | ||||
|         logger.debug("Starting {}", func.__name__) | ||||
|  | ||||
|         try: | ||||
|             result = func(*args, **kwargs) | ||||
|             duration = time.time() - start_time | ||||
|             logger.debug("{} completed in {:.2f}s", func.__name__, duration) | ||||
|             return result | ||||
|         except Exception as e: | ||||
|             duration = time.time() - start_time | ||||
|             logger.error("{} failed after {:.2f}s: {}", func.__name__, duration, str(e)) | ||||
|             raise | ||||
|  | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
| # Configure loguru | ||||
| logger.remove()  # Remove default handler | ||||
|  | ||||
| # Console handler only | ||||
| logger.add( | ||||
|     sys.stderr, | ||||
|     level="DEBUG" if settings.debug else "INFO", | ||||
|     format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>", | ||||
|     colorize=True, | ||||
|     backtrace=True, | ||||
|     diagnose=True | ||||
| ) | ||||
|  | ||||
|  | ||||
| class Config: | ||||
|     """Configuration constants for the application""" | ||||
|     SPAWN_TIMEOUT = 60 | ||||
|     ECHO_DELAY = 0.5 | ||||
|     READ_TIMEOUT = 0.1 | ||||
|     RESPONSE_WAIT_TIME = 2 | ||||
|     CHUNK_SIZE = 1000 | ||||
|     MAX_READ_SIZE = 10000 | ||||
|     PROMPT_INDICATOR = "Enter your prompt" | ||||
|     ANSI_PATTERN = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') | ||||
|     TUI_BORDER = '┃' | ||||
|     SKIP_PATTERNS = ['alt+enter', 'Enter your prompt'] | ||||
|  | ||||
|  | ||||
| class ResponseCleaner: | ||||
|     """Handles cleaning and formatting of MCP responses""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.ansi_pattern = Config.ANSI_PATTERN | ||||
|         self.tui_border = Config.TUI_BORDER | ||||
|         self.skip_patterns = Config.SKIP_PATTERNS | ||||
|  | ||||
|     def clean(self, response: str, original_prompt: str) -> str: | ||||
|         """Clean response with clear steps""" | ||||
|         if not response: | ||||
|             return "" | ||||
|  | ||||
|         response = self._remove_ansi(response) | ||||
|         lines = self._extract_content_lines(response, original_prompt) | ||||
|         return '\n'.join(lines) | ||||
|  | ||||
|     def _remove_ansi(self, text: str) -> str: | ||||
|         """Remove ANSI escape sequences""" | ||||
|         return self.ansi_pattern.sub('', text) | ||||
|  | ||||
|     def _extract_content_lines(self, response: str, original_prompt: str) -> List[str]: | ||||
|         """Extract meaningful content lines from response""" | ||||
|         lines = response.split('\n') | ||||
|         cleaned_lines = [] | ||||
|  | ||||
|         for line in lines: | ||||
|             cleaned_line = self._process_line(line, original_prompt) | ||||
|             if cleaned_line is not None: | ||||
|                 cleaned_lines.append(cleaned_line) | ||||
|  | ||||
|         return cleaned_lines | ||||
|  | ||||
|     def _process_line(self, line: str, original_prompt: str) -> Optional[str]: | ||||
|         """Process a single line and return cleaned content or None to skip""" | ||||
|         stripped = line.strip() | ||||
|  | ||||
|         # Skip empty lines | ||||
|         if not stripped: | ||||
|             return None | ||||
|  | ||||
|         # Skip the original prompt | ||||
|         if stripped == original_prompt: | ||||
|             return None | ||||
|  | ||||
|         # Handle TUI decorations | ||||
|         if stripped.startswith(self.tui_border): | ||||
|             content = stripped.strip(self.tui_border).strip() | ||||
|             if content and content != original_prompt: | ||||
|                 return content | ||||
|             return None | ||||
|  | ||||
|         # Skip navigation hints | ||||
|         if any(pattern in line for pattern in self.skip_patterns): | ||||
|             return None | ||||
|  | ||||
|         # Return non-empty, non-decoration lines | ||||
|         return stripped | ||||
|  | ||||
|  | ||||
| class ProcessManager: | ||||
|     """Manages the MCP process lifecycle""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.child: Optional[pexpect.spawn] = None | ||||
|         self.config = Config() | ||||
|  | ||||
|     @log_performance | ||||
|     def start(self) -> bool: | ||||
|         """Start the mcphost process""" | ||||
|         command = self._build_command() | ||||
|         logger.info("Starting mcphost: {}", ' '.join(command)) | ||||
|  | ||||
|         try: | ||||
|             self.child = pexpect.spawn( | ||||
|                 ' '.join(command), | ||||
|                 timeout=self.config.SPAWN_TIMEOUT, | ||||
|                 encoding='utf-8' | ||||
|             ) | ||||
|             self._configure_process() | ||||
|             return self._wait_for_ready() | ||||
|         except Exception as e: | ||||
|             logger.error("Error starting mcphost: {}", e) | ||||
|             logger.exception("Full traceback:") | ||||
|             return False | ||||
|  | ||||
|     def is_alive(self) -> bool: | ||||
|         """Check if the process is running""" | ||||
|         return self.child is not None and self.child.isalive() | ||||
|  | ||||
|     def shutdown(self): | ||||
|         """Shutdown mcphost gracefully""" | ||||
|         if self.is_alive(): | ||||
|             logger.info("Shutting down mcphost...") | ||||
|             self.child.sendcontrol('c') | ||||
|             self.child.close() | ||||
|             logger.info("MCPHost shutdown complete") | ||||
|  | ||||
|     def _build_command(self) -> List[str]: | ||||
|         """Build the command to start mcphost""" | ||||
|         command = [ | ||||
|             settings.mcphost_path, | ||||
|             '--config', settings.mcphost_config, | ||||
|             '--model', settings.mcphost_model, | ||||
|             '--openai-url', settings.openai_url, | ||||
|             '--openai-api-key', settings.openai_api_key | ||||
|         ] | ||||
|  | ||||
|         if settings.debug: | ||||
|             command.insert(1, '--debug') | ||||
|  | ||||
|         return command | ||||
|  | ||||
|     def _configure_process(self): | ||||
|         """Configure the spawned process""" | ||||
|         self.child.setecho(False) | ||||
|  | ||||
|     def _wait_for_ready(self) -> bool: | ||||
|         """Wait for the process to be ready""" | ||||
|         try: | ||||
|             self.child.expect(self.config.PROMPT_INDICATOR) | ||||
|             logger.success("MCPHost started and ready") | ||||
|             self._clear_buffer() | ||||
|             return True | ||||
|         except Exception as e: | ||||
|             logger.error("Error waiting for prompt: {}", e) | ||||
|             return False | ||||
|  | ||||
|     def _clear_buffer(self): | ||||
|         """Clear any remaining output in the buffer""" | ||||
|         time.sleep(self.config.ECHO_DELAY) | ||||
|         try: | ||||
|             self.child.read_nonblocking( | ||||
|                 size=self.config.MAX_READ_SIZE, | ||||
|                 timeout=self.config.READ_TIMEOUT | ||||
|             ) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|  | ||||
| class MCPCommunicator: | ||||
|     """Handles communication with the MCP process""" | ||||
|  | ||||
|     def __init__(self, process: ProcessManager, cleaner: ResponseCleaner): | ||||
|         self.process = process | ||||
|         self.cleaner = cleaner | ||||
|         self.config = Config() | ||||
|  | ||||
|     def send_prompt(self, prompt: str) -> str: | ||||
|         """Send a prompt and receive response""" | ||||
|         if not self.process.is_alive(): | ||||
|             raise RuntimeError("MCP process is not running") | ||||
|  | ||||
|         self._clear_pending_output() | ||||
|         self._send_command(prompt) | ||||
|         response = self._collect_response() | ||||
|         return self.cleaner.clean(response, prompt) | ||||
|  | ||||
|     def _clear_pending_output(self): | ||||
|         """Clear any pending output from the process""" | ||||
|         try: | ||||
|             self.process.child.read_nonblocking( | ||||
|                 size=self.config.MAX_READ_SIZE, | ||||
|                 timeout=self.config.READ_TIMEOUT | ||||
|             ) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def _send_command(self, prompt: str): | ||||
|         """Send a command to the process""" | ||||
|         logger.debug("Sending prompt: {}", prompt) | ||||
|         self.process.child.send(prompt) | ||||
|         self.process.child.send('\r') | ||||
|  | ||||
|         # Wait for the model to process | ||||
|         time.sleep(self.config.RESPONSE_WAIT_TIME) | ||||
|  | ||||
|     def _collect_response(self) -> str: | ||||
|         """Collect response from the process""" | ||||
|         response = "" | ||||
|         response_complete = False | ||||
|  | ||||
|         with logger.catch(message="Error during response collection"): | ||||
|             while not response_complete: | ||||
|                 try: | ||||
|                     chunk = self.process.child.read_nonblocking( | ||||
|                         size=self.config.CHUNK_SIZE, | ||||
|                         timeout=3 | ||||
|                     ) | ||||
|  | ||||
|                     if chunk: | ||||
|                         response += chunk | ||||
|                         logger.trace("Received chunk: {}", chunk[:50] + "..." if len(chunk) > 50 else chunk) | ||||
|                         if self.config.PROMPT_INDICATOR in chunk: | ||||
|                             response_complete = True | ||||
|                     else: | ||||
|                         break | ||||
|  | ||||
|                 except pexpect.TIMEOUT: | ||||
|                     if response and self.config.PROMPT_INDICATOR in response: | ||||
|                         response_complete = True | ||||
|                     elif response: | ||||
|                         logger.debug("Waiting for more response data...") | ||||
|                         time.sleep(1) | ||||
|                         continue | ||||
|                     else: | ||||
|                         break | ||||
|                 except Exception as e: | ||||
|                     logger.error("Error reading response: {}", e) | ||||
|                     break | ||||
|  | ||||
|         logger.debug("Collected response length: {} characters", len(response)) | ||||
|         return response | ||||
|  | ||||
|  | ||||
| class MCPHostManager: | ||||
|     """Main manager that orchestrates process and communication""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.process = ProcessManager() | ||||
|         self.cleaner = ResponseCleaner() | ||||
|         self.communicator = MCPCommunicator(self.process, self.cleaner) | ||||
|         self.lock = threading.Lock() | ||||
|  | ||||
|     def __enter__(self): | ||||
|         self.start() | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, exc_type, exc_val, exc_tb): | ||||
|         self.shutdown() | ||||
|  | ||||
|     def start(self) -> bool: | ||||
|         """Start the MCP host""" | ||||
|         return self.process.start() | ||||
|  | ||||
|     @log_performance | ||||
|     def send_prompt(self, prompt: str) -> str: | ||||
|         """Send a prompt to mcphost and get the response""" | ||||
|         with self.lock: | ||||
|             if not self.process.is_alive(): | ||||
|                 logger.warning("MCPHost not running, attempting to restart...") | ||||
|                 if not self.start(): | ||||
|                     return "Error: Failed to restart MCPHost" | ||||
|  | ||||
|             try: | ||||
|                 return self.communicator.send_prompt(prompt) | ||||
|             except Exception as e: | ||||
|                 logger.exception("Exception in send_prompt: {}", str(e)) | ||||
|                 return f"Error: {str(e)}" | ||||
|  | ||||
|     def shutdown(self): | ||||
|         """Shutdown mcphost gracefully""" | ||||
|         self.process.shutdown() | ||||
|  | ||||
|  | ||||
| class SubprocessHandler(BaseHTTPRequestHandler): | ||||
|     """HTTP request handler with dependency injection""" | ||||
|  | ||||
|     def __init__(self, *args, manager: MCPHostManager, **kwargs): | ||||
|         self.manager = manager | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     @logger.catch | ||||
|     def do_POST(self): | ||||
|         """Handle POST requests""" | ||||
|         try: | ||||
|             content_length = self._get_content_length() | ||||
|             input_data = self._read_request_body(content_length) | ||||
|             prompt = self._parse_prompt(input_data) | ||||
|  | ||||
|             logger.info("Received prompt: {}", prompt) | ||||
|             response = self.manager.send_prompt(prompt) | ||||
|             logger.info("Response: {}", response) | ||||
|  | ||||
|             self._send_success_response(response) | ||||
|         except ValueError as e: | ||||
|             logger.warning("Bad request: {}", str(e)) | ||||
|             self._send_error_response(400, str(e)) | ||||
|         except Exception as e: | ||||
|             logger.exception("Unexpected error in POST handler") | ||||
|             self._send_error_response(500, "Internal server error") | ||||
|  | ||||
|     def do_GET(self): | ||||
|         """Handle GET requests (not supported)""" | ||||
|         self._send_error_response(404, "Not Found") | ||||
|  | ||||
|     def log_message(self, format, *args): | ||||
|         """Override to prevent default logging""" | ||||
|         pass | ||||
|  | ||||
|     def _get_content_length(self) -> int: | ||||
|         """Get content length from headers""" | ||||
|         return int(self.headers.get('Content-Length', 0)) | ||||
|  | ||||
|     def _read_request_body(self, content_length: int) -> str: | ||||
|         """Read request body""" | ||||
|         if content_length == 0: | ||||
|             raise ValueError("Empty request body") | ||||
|         return self.rfile.read(content_length).decode('utf-8') | ||||
|  | ||||
|     def _parse_prompt(self, input_data: str) -> str: | ||||
|         """Parse prompt from input data""" | ||||
|         try: | ||||
|             data = json.loads(input_data) | ||||
|             return data.get('prompt', input_data) | ||||
|         except json.JSONDecodeError: | ||||
|             return input_data | ||||
|  | ||||
|     def _send_success_response(self, content: str): | ||||
|         """Send successful response""" | ||||
|         self.send_response(200) | ||||
|         self.send_header('Content-type', 'text/plain') | ||||
|         self.end_headers() | ||||
|         self.wfile.write(content.encode('utf-8')) | ||||
|  | ||||
|     def _send_error_response(self, code: int, message: str): | ||||
|         """Send error response""" | ||||
|         self.send_response(code) | ||||
|         self.send_header('Content-type', 'text/plain') | ||||
|         self.end_headers() | ||||
|         self.wfile.write(message.encode('utf-8')) | ||||
|  | ||||
|  | ||||
| def create_server(manager: MCPHostManager) -> HTTPServer: | ||||
|     """Factory function to create HTTP server with manager""" | ||||
|     handler = partial(SubprocessHandler, manager=manager) | ||||
|     return HTTPServer((settings.host, settings.port), handler) | ||||
|  | ||||
|  | ||||
| @logger.catch | ||||
| def main(): | ||||
|     """Main function with clean structure""" | ||||
|     # Startup banner | ||||
|     logger.info("=" * 50) | ||||
|     logger.info("MCP Host Server v1.0") | ||||
|     logger.info("Debug Mode: {}", "ON" if settings.debug else "OFF") | ||||
|     logger.info("=" * 50) | ||||
|  | ||||
|     logger.info("Initializing MCPHost...") | ||||
|  | ||||
|     with MCPHostManager() as manager: | ||||
|         server = create_server(manager) | ||||
|  | ||||
|         try: | ||||
|             logger.success("Server started at {}:{}", settings.host, settings.port) | ||||
|             logger.info("Ready to accept requests.") | ||||
|             logger.info("Press Ctrl+C to shutdown") | ||||
|             server.serve_forever() | ||||
|         except KeyboardInterrupt: | ||||
|             logger.info("Received shutdown signal, shutting down gracefully...") | ||||
|         except Exception as e: | ||||
|             logger.error("Server error: {}", e) | ||||
|             logger.exception("Full traceback:") | ||||
|         finally: | ||||
|             logger.info("Shutting down server...") | ||||
|             server.shutdown() | ||||
|             logger.success("Server shutdown complete") | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     main() | ||||
| @@ -1,411 +0,0 @@ | ||||
| #!/usr/bin/env python3 | ||||
| """ | ||||
| OpenAI-compatible MCPhost API wrapper with enhanced debugging | ||||
| Usage: python mcphost_openai_api.py | ||||
| """ | ||||
|  | ||||
| import json | ||||
| import time | ||||
| import asyncio | ||||
| import os | ||||
| from typing import Optional | ||||
| from contextlib import asynccontextmanager | ||||
| import threading | ||||
|  | ||||
| from fastapi import FastAPI, HTTPException | ||||
| from starlette.responses import StreamingResponse | ||||
| import uvicorn | ||||
| from loguru import logger | ||||
| import pexpect | ||||
| import pyte | ||||
|  | ||||
| from helpers.settings import settings | ||||
| from helpers.models import ChatCompletionRequest | ||||
| from helpers.local_logger import LoggerWriter | ||||
|  | ||||
| # Global process variable | ||||
| mcp_process: Optional[pexpect.spawn] = None | ||||
| process_monitor_thread = None | ||||
| stop_monitoring = threading.Event() | ||||
|  | ||||
|  | ||||
| def monitor_process(): | ||||
|     """Background thread to monitor MCPhost process status""" | ||||
|     while not stop_monitoring.is_set(): | ||||
|         if mcp_process: | ||||
|             is_alive = mcp_process.isalive() | ||||
|             exit_status = mcp_process.exitstatus if not is_alive else None | ||||
|             logger.info(f"MCPhost process status - Alive: {is_alive}, Exit status: {exit_status}") | ||||
|  | ||||
|             if not is_alive and exit_status is not None: | ||||
|                 logger.error(f"MCPhost process died with exit code: {exit_status}") | ||||
|         else: | ||||
|             logger.warning("MCPhost process is None") | ||||
|  | ||||
|         # Wait 5 seconds before next check | ||||
|         for _ in range(5): | ||||
|             if stop_monitoring.is_set(): | ||||
|                 break | ||||
|             time.sleep(1) | ||||
|  | ||||
|  | ||||
| @asynccontextmanager | ||||
| async def lifespan(app: FastAPI): | ||||
|     # Startup: Start MCPhost subprocess | ||||
|     global mcp_process, process_monitor_thread | ||||
|  | ||||
|     try: | ||||
|         # Build command | ||||
|         cmd = [settings.mcphost_path] | ||||
|  | ||||
|         if settings.debug: | ||||
|             cmd.append("--debug") | ||||
|  | ||||
|         cmd.extend([ | ||||
|             "--config", settings.mcphost_config, | ||||
|             "--model", settings.mcphost_model, | ||||
|             "--openai-url", settings.openai_url, | ||||
|             "--openai-api-key", settings.openai_api_key | ||||
|         ]) | ||||
|  | ||||
|         # Set environment | ||||
|         env = os.environ.copy() | ||||
|         env["OLLAMA_NUM_CTX"] = str(settings.ollama_num_ctx) | ||||
|  | ||||
|         logger.info(f"Starting MCPhost with command: {' '.join(cmd)}") | ||||
|         logger.info(f"Environment: OLLAMA_NUM_CTX={settings.ollama_num_ctx}") | ||||
|  | ||||
|         # Use pexpect to spawn MCPhost in interactive mode | ||||
|         try: | ||||
|             mcp_process = pexpect.spawn(" ".join(cmd), timeout=30, encoding='utf-8', env=env) | ||||
|             logger.info(f"MCPhost process spawned with PID: {mcp_process.pid}") | ||||
|         except Exception as spawn_error: | ||||
|             logger.error(f"Failed to spawn MCPhost process: {spawn_error}") | ||||
|             raise | ||||
|  | ||||
|         # Enable detailed logging | ||||
|         mcp_process.logfile_read = LoggerWriter(logger.debug) | ||||
|  | ||||
|         # Wait for model to load | ||||
|         logger.info("Waiting for model to load...") | ||||
|         index = mcp_process.expect([ | ||||
|             "Model loaded", | ||||
|             pexpect.TIMEOUT, | ||||
|             pexpect.EOF | ||||
|         ], timeout=30) | ||||
|  | ||||
|         if index == 0: | ||||
|             logger.info("MCPhost model loaded successfully") | ||||
|         elif index == 1: | ||||
|             logger.error("Timeout waiting for model to load") | ||||
|             logger.error(f"Output so far: {mcp_process.before}") | ||||
|             raise RuntimeError("Timeout waiting for model to load") | ||||
|         elif index == 2: | ||||
|             logger.error("MCPhost process ended unexpectedly") | ||||
|             logger.error(f"Output: {mcp_process.before}") | ||||
|             raise RuntimeError("MCPhost process ended unexpectedly") | ||||
|  | ||||
|         # Wait for prompt interface | ||||
|         logger.info("Waiting for prompt...") | ||||
|  | ||||
|         # Read output until we see the prompt interface | ||||
|         seen_prompt = False | ||||
|         start_time = time.time() | ||||
|  | ||||
|         while time.time() - start_time < 10:  # 10 second timeout | ||||
|             try: | ||||
|                 output = mcp_process.read_nonblocking(size=1000, timeout=0.5) | ||||
|                 if "Enter your prompt" in output or "┃" in output: | ||||
|                     seen_prompt = True | ||||
|                     break | ||||
|             except pexpect.TIMEOUT: | ||||
|                 continue | ||||
|             except pexpect.EOF: | ||||
|                 logger.error("MCPhost process ended unexpectedly") | ||||
|                 raise RuntimeError("MCPhost process ended unexpectedly") | ||||
|  | ||||
|         if seen_prompt: | ||||
|             logger.info("MCPhost process started and ready") | ||||
|         else: | ||||
|             logger.error("Timeout waiting for prompt") | ||||
|             raise RuntimeError("Timeout waiting for prompt") | ||||
|  | ||||
|         # Start process monitoring thread | ||||
|         stop_monitoring.clear() | ||||
|         process_monitor_thread = threading.Thread(target=monitor_process, daemon=True) | ||||
|         process_monitor_thread.start() | ||||
|         logger.info("Started process monitoring thread") | ||||
|  | ||||
|         # Final check | ||||
|         if mcp_process.isalive(): | ||||
|             logger.info("MCPhost process is alive and ready to accept requests") | ||||
|         else: | ||||
|             logger.error("MCPhost process is not alive after startup") | ||||
|             raise RuntimeError("MCPhost process died during startup") | ||||
|  | ||||
|         yield | ||||
|  | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to start MCPhost: {e}") | ||||
|         if mcp_process: | ||||
|             logger.error(f"MCPhost final output: {mcp_process.before}") | ||||
|             logger.error(f"Process alive: {mcp_process.isalive()}") | ||||
|             if not mcp_process.isalive(): | ||||
|                 logger.error(f"Exit status: {mcp_process.exitstatus}") | ||||
|         raise | ||||
|     finally: | ||||
|         # Shutdown: Clean up subprocess | ||||
|         logger.info("Shutting down MCPhost...") | ||||
|         stop_monitoring.set() | ||||
|  | ||||
|         if process_monitor_thread: | ||||
|             process_monitor_thread.join(timeout=2) | ||||
|  | ||||
|         if mcp_process: | ||||
|             if mcp_process.isalive(): | ||||
|                 logger.info("Terminating MCPhost process...") | ||||
|                 mcp_process.terminate() | ||||
|                 mcp_process.wait() | ||||
|             logger.info("MCPhost process stopped") | ||||
|  | ||||
|  | ||||
| # Create FastAPI app | ||||
| app = FastAPI(title="OpenAI-compatible MCPhost API", lifespan=lifespan) | ||||
|  | ||||
|  | ||||
| async def _resp_async_generator(response_text: str, model: str): | ||||
|     """Generator for streaming responses in OpenAI format""" | ||||
|     tokens = response_text.split(" ") | ||||
|  | ||||
|     for i, token in enumerate(tokens): | ||||
|         chunk = { | ||||
|             "id": f"chatcmpl-{int(time.time())}-{i}", | ||||
|             "object": "chat.completion.chunk", | ||||
|             "created": int(time.time()), | ||||
|             "model": model, | ||||
|             "choices": [{ | ||||
|                 "index": 0, | ||||
|                 "delta": {"content": token + " "}, | ||||
|                 "finish_reason": None | ||||
|             }] | ||||
|         } | ||||
|         yield f"data: {json.dumps(chunk)}\n\n" | ||||
|         await asyncio.sleep(0.01) | ||||
|  | ||||
|     final_chunk = { | ||||
|         "id": f"chatcmpl-{int(time.time())}-final", | ||||
|         "object": "chat.completion.chunk", | ||||
|         "created": int(time.time()), | ||||
|         "model": model, | ||||
|         "choices": [{ | ||||
|             "index": 0, | ||||
|             "delta": {}, | ||||
|             "finish_reason": "stop" | ||||
|         }] | ||||
|     } | ||||
|     yield f"data: {json.dumps(final_chunk)}\n\n" | ||||
|     yield "data: [DONE]\n\n" | ||||
|  | ||||
|  | ||||
| @app.post("/v1/chat/completions") | ||||
| @app.post("/chat/completions") | ||||
| async def chat_completions(request: ChatCompletionRequest): | ||||
|     """OpenAI-compatible chat completions endpoint""" | ||||
|     if not mcp_process: | ||||
|         logger.error("MCPhost process object is None") | ||||
|         raise HTTPException(status_code=500, detail="MCPhost process not initialized") | ||||
|  | ||||
|     if not mcp_process.isalive(): | ||||
|         logger.error(f"MCPhost process not running. Exit status: {mcp_process.exitstatus}") | ||||
|         raise HTTPException(status_code=500, detail="MCPhost process not running") | ||||
|  | ||||
|     try: | ||||
|         # Extract the last user message | ||||
|         user_message = "" | ||||
|         for message in reversed(request.messages): | ||||
|             if message.role == "user": | ||||
|                 user_message = message.content | ||||
|                 break | ||||
|  | ||||
|         if not user_message: | ||||
|             user_message = request.messages[-1].content if request.messages else "" | ||||
|  | ||||
|         logger.debug(f"Sending to MCPhost: {user_message}") | ||||
|  | ||||
|         # Clear any pending output (non-blocking read) | ||||
|         try: | ||||
|             mcp_process.read_nonblocking(size=1000, timeout=0.1) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         # Send message to MCPhost | ||||
|         mcp_process.sendline(user_message) | ||||
|  | ||||
|         # Create virtual terminal to parse output | ||||
|         screen = pyte.Screen(80, 24) | ||||
|         stream = pyte.ByteStream(screen) | ||||
|  | ||||
|         # Wait for the response | ||||
|         response_text = "" | ||||
|         last_screen_content = "" | ||||
|  | ||||
|         try: | ||||
|             start_time = time.time() | ||||
|             no_change_count = 0 | ||||
|  | ||||
|             while time.time() - start_time < 30:  # 30 second timeout | ||||
|                 try: | ||||
|                     # Read available data | ||||
|                     data = mcp_process.read_nonblocking(size=1024, timeout=0.1) | ||||
|  | ||||
|                     # Feed data to virtual terminal | ||||
|                     stream.feed(data.encode('utf-8')) | ||||
|  | ||||
|                     # Get current screen content | ||||
|                     current_screen = '\n'.join(screen.display).strip() | ||||
|  | ||||
|                     # Check if screen content has changed | ||||
|                     if current_screen == last_screen_content: | ||||
|                         no_change_count += 1 | ||||
|                         if no_change_count > 10:  # No change for 1 second | ||||
|                             break | ||||
|                     else: | ||||
|                         no_change_count = 0 | ||||
|                         last_screen_content = current_screen | ||||
|  | ||||
|                     # Extract response text from screen | ||||
|                     lines = current_screen.split('\n') | ||||
|                     response_lines = [] | ||||
|  | ||||
|                     for line in lines: | ||||
|                         # Skip empty lines and prompt-related lines | ||||
|                         if line.strip() and not "Enter your prompt" in line and not line.strip().startswith( | ||||
|                                 "alt+enter"): | ||||
|                             # Remove the prompt box character if present | ||||
|                             if line.startswith("┃"): | ||||
|                                 clean_line = line[1:].strip() | ||||
|                                 if clean_line and not "Enter your prompt" in clean_line: | ||||
|                                     response_lines.append(clean_line) | ||||
|                             else: | ||||
|                                 response_lines.append(line.strip()) | ||||
|  | ||||
|                     # Update response text | ||||
|                     response_text = ' '.join(response_lines) | ||||
|  | ||||
|                 except pexpect.TIMEOUT: | ||||
|                     # If we haven't received anything yet, continue waiting | ||||
|                     if not response_text: | ||||
|                         continue | ||||
|                     else: | ||||
|                         no_change_count += 1 | ||||
|                         if no_change_count > 10: | ||||
|                             break | ||||
|                 except pexpect.EOF: | ||||
|                     logger.error("MCPhost process ended unexpectedly") | ||||
|                     break | ||||
|  | ||||
|         except Exception as e: | ||||
|             logger.warning(f"Error reading response: {e}") | ||||
|  | ||||
|         response_text = response_text.strip() | ||||
|  | ||||
|         # Clean up the response text - remove any duplicates or artifacts | ||||
|         if response_text: | ||||
|             # Sometimes the terminal might show duplicated content | ||||
|             words = response_text.split() | ||||
|             clean_words = [] | ||||
|             for i, word in enumerate(words): | ||||
|                 if i == 0 or word != words[i - 1]: | ||||
|                     clean_words.append(word) | ||||
|             response_text = ' '.join(clean_words) | ||||
|  | ||||
|         if not response_text: | ||||
|             response_text = "No response received from MCPhost" | ||||
|  | ||||
|         logger.debug(f"MCPhost response: {response_text}") | ||||
|  | ||||
|         # Handle streaming response | ||||
|         if request.stream: | ||||
|             return StreamingResponse( | ||||
|                 _resp_async_generator(response_text, request.model), | ||||
|                 media_type="text/event-stream" | ||||
|             ) | ||||
|  | ||||
|         # Non-streaming response | ||||
|         return { | ||||
|             "id": f"chatcmpl-{int(time.time())}", | ||||
|             "object": "chat.completion", | ||||
|             "created": int(time.time()), | ||||
|             "model": request.model, | ||||
|             "choices": [{ | ||||
|                 "index": 0, | ||||
|                 "message": { | ||||
|                     "role": "assistant", | ||||
|                     "content": response_text | ||||
|                 }, | ||||
|                 "finish_reason": "stop" | ||||
|             }], | ||||
|             "usage": { | ||||
|                 "prompt_tokens": len(user_message.split()), | ||||
|                 "completion_tokens": len(response_text.split()), | ||||
|                 "total_tokens": len(user_message.split()) + len(response_text.split()) | ||||
|             } | ||||
|         } | ||||
|  | ||||
|     except Exception as e: | ||||
|         logger.exception(f"Error in chat completion") | ||||
|         raise HTTPException(status_code=500, detail=str(e)) | ||||
|  | ||||
|  | ||||
| @app.get("/v1/models") | ||||
| @app.get("/models") | ||||
| async def list_models(): | ||||
|     """List available models (OpenAI-compatible endpoint)""" | ||||
|     return { | ||||
|         "object": "list", | ||||
|         "data": [{ | ||||
|             "id": settings.mcphost_model, | ||||
|             "object": "model", | ||||
|             "created": int(time.time()), | ||||
|             "owned_by": "mcphost", | ||||
|             "permission": [], | ||||
|             "root": settings.mcphost_model, | ||||
|             "parent": None | ||||
|         }] | ||||
|     } | ||||
|  | ||||
|  | ||||
| @app.get("/health") | ||||
| async def health_check(): | ||||
|     """Health check endpoint""" | ||||
|     if not mcp_process: | ||||
|         logger.warning("Health check: MCPhost process is None") | ||||
|         return {"status": "unhealthy", "detail": "Process not initialized"} | ||||
|  | ||||
|     is_healthy = mcp_process.isalive() | ||||
|     status = "healthy" if is_healthy else "unhealthy" | ||||
|     detail = {"pid": mcp_process.pid if mcp_process else None} | ||||
|  | ||||
|     if not is_healthy and mcp_process: | ||||
|         detail["exit_status"] = mcp_process.exitstatus | ||||
|  | ||||
|     logger.info(f"Health check: {status}, details: {detail}") | ||||
|     return {"status": status, "detail": detail} | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     logger.add( | ||||
|         "mcphost_openai_api.log", | ||||
|         rotation="10 MB", | ||||
|         retention="10 days", | ||||
|         level="DEBUG" | ||||
|     ) | ||||
|  | ||||
|     logger.info("Starting OpenAI-compatible MCPhost API server...") | ||||
|     logger.info(f"Configuration:") | ||||
|     logger.info(f"  MCPhost Path: {settings.mcphost_path}") | ||||
|     logger.info(f"  Model: {settings.mcphost_model}") | ||||
|     logger.info(f"  OpenAI URL: {settings.openai_url}") | ||||
|     logger.info(f"  Debug: {settings.debug}") | ||||
|  | ||||
|     uvicorn.run(app, host=settings.host, port=settings.port, log_config=None) | ||||
							
								
								
									
										23
									
								
								test.sh
									
									
									
									
									
								
							
							
						
						
									
										23
									
								
								test.sh
									
									
									
									
									
								
							| @@ -1,10 +1,17 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| curl -X POST http://0.0.0.0:8000/v1/chat/completions   -H "Content-Type: application/json"   -H "Authorization: Bearer fake-api-key"   -d '{ | ||||
|     "model": "mcphost-model", | ||||
|     "messages": [ | ||||
|       {"role": "system", "content": "You are a helpful assistant."}, | ||||
|       {"role": "user", "content": "Tell me a joke."} | ||||
|     ], | ||||
|     "temperature": 0.7 | ||||
|   }' | ||||
|  | ||||
| #clear | ||||
| curl -X POST \ | ||||
|   -H "Content-Type: plain/text" \ | ||||
|   -d "When is your knowledge cut off? /no_think" \ | ||||
|   http://localhost:8000 | ||||
|  | ||||
| #curl -X POST http://0.0.0.0:8000/v1/chat/completions   -H "Content-Type: application/json"   -H "Authorization: Bearer fake-api-key"   -d '{ | ||||
| #    "model": "mcphost-model", | ||||
| #    "messages": [ | ||||
| #      {"role": "system", "content": "You are a helpful assistant."}, | ||||
| #      {"role": "user", "content": "Tell me a joke."} | ||||
| #    ], | ||||
| #    "temperature": 0.7 | ||||
| #  }' | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 TCUDIKEL
					TCUDIKEL