feat(ui): display missing API key errors in UI

This PR introduces proper error handling in the UI for missing API keys, addressing issue #188. Previously, missing API keys resulted in tracebacks in the console, but no clear error was shown in the UI. This made it difficult to understand what went wrong. Now, API key checks are performed at the start of the `get_llm_model` function in `src/utils/utils.py`. I've also added a `PROVIDER_DISPLAY_NAMES` constant for more user-friendly error messages and a `handle_api_key_error` function that leverages `gr.Error` to display clear error messages directly in the UI. If an API key is missing, you'll now see an error message right away. The `run_browser_agent` and `run_with_stream` functions in `webui.py` have been adjusted to ensure these `gr.Error` exceptions are handled properly.
This commit is contained in:
marginal23326
2025-01-30 02:36:47 +06:00
parent dc41476f37
commit 82844a4182
2 changed files with 35 additions and 24 deletions

View File

@@ -12,6 +12,14 @@ import gradio as gr
from .llm import DeepSeekR1ChatOpenAI, DeepSeekR1ChatOllama
PROVIDER_DISPLAY_NAMES = {
"openai": "OpenAI",
"azure_openai": "Azure OpenAI",
"anthropic": "Anthropic",
"deepseek": "DeepSeek",
"gemini": "Gemini"
}
def get_llm_model(provider: str, **kwargs):
"""
获取LLM 模型
@@ -19,17 +27,19 @@ def get_llm_model(provider: str, **kwargs):
:param kwargs:
:return:
"""
if provider not in ["ollama"]:
env_var = "GOOGLE_API_KEY" if provider == "gemini" else f"{provider.upper()}_API_KEY"
api_key = kwargs.get("api_key", "") or os.getenv(env_var, "")
if not api_key:
handle_api_key_error(provider, env_var)
kwargs["api_key"] = api_key
if provider == "anthropic":
if not kwargs.get("base_url", ""):
base_url = "https://api.anthropic.com"
else:
base_url = kwargs.get("base_url")
if not kwargs.get("api_key", ""):
api_key = os.getenv("ANTHROPIC_API_KEY", "")
else:
api_key = kwargs.get("api_key")
return ChatAnthropic(
model_name=kwargs.get("model_name", "claude-3-5-sonnet-20240620"),
temperature=kwargs.get("temperature", 0.0),
@@ -42,11 +52,6 @@ def get_llm_model(provider: str, **kwargs):
else:
base_url = kwargs.get("base_url")
if not kwargs.get("api_key", ""):
api_key = os.getenv("OPENAI_API_KEY", "")
else:
api_key = kwargs.get("api_key")
return ChatOpenAI(
model=kwargs.get("model_name", "gpt-4o"),
temperature=kwargs.get("temperature", 0.0),
@@ -59,11 +64,6 @@ def get_llm_model(provider: str, **kwargs):
else:
base_url = kwargs.get("base_url")
if not kwargs.get("api_key", ""):
api_key = os.getenv("DEEPSEEK_API_KEY", "")
else:
api_key = kwargs.get("api_key")
if kwargs.get("model_name", "deepseek-chat") == "deepseek-reasoner":
return DeepSeekR1ChatOpenAI(
model=kwargs.get("model_name", "deepseek-reasoner"),
@@ -79,10 +79,6 @@ def get_llm_model(provider: str, **kwargs):
api_key=api_key,
)
elif provider == "gemini":
if not kwargs.get("api_key", ""):
api_key = os.getenv("GOOGLE_API_KEY", "")
else:
api_key = kwargs.get("api_key")
return ChatGoogleGenerativeAI(
model=kwargs.get("model_name", "gemini-2.0-flash-exp"),
temperature=kwargs.get("temperature", 0.0),
@@ -114,10 +110,6 @@ def get_llm_model(provider: str, **kwargs):
base_url = os.getenv("AZURE_OPENAI_ENDPOINT", "")
else:
base_url = kwargs.get("base_url")
if not kwargs.get("api_key", ""):
api_key = os.getenv("AZURE_OPENAI_API_KEY", "")
else:
api_key = kwargs.get("api_key")
return AzureChatOpenAI(
model=kwargs.get("model_name", "gpt-4o"),
temperature=kwargs.get("temperature", 0.0),
@@ -154,7 +146,17 @@ def update_model_dropdown(llm_provider, api_key=None, base_url=None):
return gr.Dropdown(choices=model_names[llm_provider], value=model_names[llm_provider][0], interactive=True)
else:
return gr.Dropdown(choices=[], value="", interactive=True, allow_custom_value=True)
def handle_api_key_error(provider: str, env_var: str):
"""
Handles the missing API key error by raising a gr.Error with a clear message.
"""
provider_display = PROVIDER_DISPLAY_NAMES.get(provider, provider.upper())
raise gr.Error(
f"💥 {provider_display} API key not found! 🔑 Please set the "
f"`{env_var}` environment variable or provide it in the UI."
)
def encode_image(img_path):
if not img_path:
return None

View File

@@ -184,6 +184,9 @@ async def run_browser_agent(
gr.update(interactive=True) # Re-enable run button
)
except gr.Error:
raise
except Exception as e:
import traceback
traceback.print_exc()
@@ -535,6 +538,12 @@ async def run_with_stream(
try:
result = await agent_task
final_result, errors, model_actions, model_thoughts, latest_videos, trace, history_file, stop_button, run_button = result
except gr.Error:
final_result = ""
model_actions = ""
model_thoughts = ""
latest_videos = trace = history_file = None
except Exception as e:
errors = f"Agent error: {str(e)}"