diff --git a/src/utils/utils.py b/src/utils/utils.py index c4218cd..e627014 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -12,6 +12,14 @@ import gradio as gr from .llm import DeepSeekR1ChatOpenAI, DeepSeekR1ChatOllama +PROVIDER_DISPLAY_NAMES = { + "openai": "OpenAI", + "azure_openai": "Azure OpenAI", + "anthropic": "Anthropic", + "deepseek": "DeepSeek", + "gemini": "Gemini" +} + def get_llm_model(provider: str, **kwargs): """ 获取LLM 模型 @@ -19,17 +27,19 @@ def get_llm_model(provider: str, **kwargs): :param kwargs: :return: """ + if provider not in ["ollama"]: + env_var = "GOOGLE_API_KEY" if provider == "gemini" else f"{provider.upper()}_API_KEY" + api_key = kwargs.get("api_key", "") or os.getenv(env_var, "") + if not api_key: + handle_api_key_error(provider, env_var) + kwargs["api_key"] = api_key + if provider == "anthropic": if not kwargs.get("base_url", ""): base_url = "https://api.anthropic.com" else: base_url = kwargs.get("base_url") - if not kwargs.get("api_key", ""): - api_key = os.getenv("ANTHROPIC_API_KEY", "") - else: - api_key = kwargs.get("api_key") - return ChatAnthropic( model_name=kwargs.get("model_name", "claude-3-5-sonnet-20240620"), temperature=kwargs.get("temperature", 0.0), @@ -42,11 +52,6 @@ def get_llm_model(provider: str, **kwargs): else: base_url = kwargs.get("base_url") - if not kwargs.get("api_key", ""): - api_key = os.getenv("OPENAI_API_KEY", "") - else: - api_key = kwargs.get("api_key") - return ChatOpenAI( model=kwargs.get("model_name", "gpt-4o"), temperature=kwargs.get("temperature", 0.0), @@ -59,11 +64,6 @@ def get_llm_model(provider: str, **kwargs): else: base_url = kwargs.get("base_url") - if not kwargs.get("api_key", ""): - api_key = os.getenv("DEEPSEEK_API_KEY", "") - else: - api_key = kwargs.get("api_key") - if kwargs.get("model_name", "deepseek-chat") == "deepseek-reasoner": return DeepSeekR1ChatOpenAI( model=kwargs.get("model_name", "deepseek-reasoner"), @@ -79,10 +79,6 @@ def get_llm_model(provider: str, **kwargs): api_key=api_key, ) elif provider == "gemini": - if not kwargs.get("api_key", ""): - api_key = os.getenv("GOOGLE_API_KEY", "") - else: - api_key = kwargs.get("api_key") return ChatGoogleGenerativeAI( model=kwargs.get("model_name", "gemini-2.0-flash-exp"), temperature=kwargs.get("temperature", 0.0), @@ -114,10 +110,6 @@ def get_llm_model(provider: str, **kwargs): base_url = os.getenv("AZURE_OPENAI_ENDPOINT", "") else: base_url = kwargs.get("base_url") - if not kwargs.get("api_key", ""): - api_key = os.getenv("AZURE_OPENAI_API_KEY", "") - else: - api_key = kwargs.get("api_key") return AzureChatOpenAI( model=kwargs.get("model_name", "gpt-4o"), temperature=kwargs.get("temperature", 0.0), @@ -154,7 +146,17 @@ def update_model_dropdown(llm_provider, api_key=None, base_url=None): return gr.Dropdown(choices=model_names[llm_provider], value=model_names[llm_provider][0], interactive=True) else: return gr.Dropdown(choices=[], value="", interactive=True, allow_custom_value=True) - + +def handle_api_key_error(provider: str, env_var: str): + """ + Handles the missing API key error by raising a gr.Error with a clear message. + """ + provider_display = PROVIDER_DISPLAY_NAMES.get(provider, provider.upper()) + raise gr.Error( + f"💥 {provider_display} API key not found! 🔑 Please set the " + f"`{env_var}` environment variable or provide it in the UI." + ) + def encode_image(img_path): if not img_path: return None diff --git a/webui.py b/webui.py index c6808ab..f760aab 100644 --- a/webui.py +++ b/webui.py @@ -184,6 +184,9 @@ async def run_browser_agent( gr.update(interactive=True) # Re-enable run button ) + except gr.Error: + raise + except Exception as e: import traceback traceback.print_exc() @@ -535,6 +538,12 @@ async def run_with_stream( try: result = await agent_task final_result, errors, model_actions, model_thoughts, latest_videos, trace, history_file, stop_button, run_button = result + except gr.Error: + final_result = "" + model_actions = "" + model_thoughts = "" + latest_videos = trace = history_file = None + except Exception as e: errors = f"Agent error: {str(e)}"