Merge pull request #143 from pad918/main

Features/mistralai intergrate
This commit is contained in:
warmshao
2025-02-01 23:38:38 +08:00
committed by GitHub
3 changed files with 28 additions and 3 deletions

View File

@@ -2,3 +2,4 @@ browser-use==0.1.29
pyperclip==1.9.0
gradio==5.10.0
json-repair
langchain-mistralai==0.2.4

View File

@@ -5,6 +5,7 @@ from pathlib import Path
from typing import Dict, Optional
from langchain_anthropic import ChatAnthropic
from langchain_mistralai import ChatMistralAI
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_ollama import ChatOllama
from langchain_openai import AzureChatOpenAI, ChatOpenAI
@@ -46,6 +47,22 @@ def get_llm_model(provider: str, **kwargs):
base_url=base_url,
api_key=api_key,
)
elif provider == 'mistral':
if not kwargs.get("base_url", ""):
base_url = os.getenv("MISTRAL_ENDPOINT", "https://api.mistral.ai/v1")
else:
base_url = kwargs.get("base_url")
if not kwargs.get("api_key", ""):
api_key = os.getenv("MISTRAL_API_KEY", "")
else:
api_key = kwargs.get("api_key")
return ChatMistralAI(
model=kwargs.get("model_name", "mistral-large-latest"),
temperature=kwargs.get("temperature", 0.0),
base_url=base_url,
api_key=api_key,
)
elif provider == "openai":
if not kwargs.get("base_url", ""):
base_url = os.getenv("OPENAI_ENDPOINT", "https://api.openai.com/v1")
@@ -127,7 +144,8 @@ model_names = {
"deepseek": ["deepseek-chat", "deepseek-reasoner"],
"gemini": ["gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp", "gemini-1.5-flash-latest", "gemini-1.5-flash-8b-latest", "gemini-2.0-flash-thinking-exp-1219" ],
"ollama": ["qwen2.5:7b", "llama2:7b", "deepseek-r1:14b", "deepseek-r1:32b"],
"azure_openai": ["gpt-4o", "gpt-4", "gpt-3.5-turbo"]
"azure_openai": ["gpt-4o", "gpt-4", "gpt-3.5-turbo"],
"mistral": ["pixtral-large-latest", "mistral-large-latest", "mistral-small-latest", "ministral-8b-latest"]
}
# Callback to update the model name dropdown based on the selected provider

View File

@@ -38,7 +38,8 @@ def get_env_value(key, provider):
"openai": {"api_key": "OPENAI_API_KEY", "base_url": "OPENAI_ENDPOINT"},
"azure_openai": {"api_key": "AZURE_OPENAI_API_KEY", "base_url": "AZURE_OPENAI_ENDPOINT"},
"gemini": {"api_key": "GOOGLE_API_KEY"},
"deepseek": {"api_key": "DEEPSEEK_API_KEY", "base_url": "DEEPSEEK_ENDPOINT"}
"deepseek": {"api_key": "DEEPSEEK_API_KEY", "base_url": "DEEPSEEK_ENDPOINT"},
"mistral": {"api_key": "MISTRAL_API_KEY", "base_url": "MISTRAL_ENDPOINT"},
}
if provider in env_mappings and key in env_mappings[provider]:
@@ -116,11 +117,16 @@ def test_deepseek_r1_ollama_model():
config = LLMConfig(provider="ollama", model_name="deepseek-r1:14b")
test_llm(config, "How many 'r's are in the word 'strawberry'?")
def test_mistral_model():
config = LLMConfig(provider="mistral", model_name="pixtral-large-latest")
test_llm(config, "Describe this image", "assets/examples/test.png")
if __name__ == "__main__":
# test_openai_model()
# test_gemini_model()
# test_azure_openai_model()
test_deepseek_model()
#test_deepseek_model()
# test_ollama_model()
# test_deepseek_r1_model()
# test_deepseek_r1_ollama_model()
test_mistral_model()