diff --git a/backend/src/agent/graph.py b/backend/src/agent/graph.py index 2d4f9d9..0f19c3f 100644 --- a/backend/src/agent/graph.py +++ b/backend/src/agent/graph.py @@ -42,9 +42,9 @@ genai_client = Client(api_key=os.getenv("GEMINI_API_KEY")) # Nodes def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerationState: - """LangGraph node that generates a search queries based on the User's question. + """LangGraph node that generates search queries based on the User's question. - Uses Gemini 2.0 Flash to create an optimized search query for web research based on + Uses Gemini 2.0 Flash to create an optimized search queries for web research based on the User's question. Args: @@ -52,7 +52,7 @@ def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerati config: Configuration for the runnable, including LLM provider settings Returns: - Dictionary with state update, including search_query key containing the generated query + Dictionary with state update, including search_query key containing the generated queries """ configurable = Configuration.from_runnable_config(config) diff --git a/backend/src/agent/prompts.py b/backend/src/agent/prompts.py index 4c8af2f..75857e0 100644 --- a/backend/src/agent/prompts.py +++ b/backend/src/agent/prompts.py @@ -87,7 +87,7 @@ Instructions: - You have access to all the information gathered from the previous steps. - You have access to the user's question. - Generate a high-quality answer to the user's question based on the provided summaries and the user's question. -- you MUST include all the citations from the summaries in the answer correctly. +- You MUST include all the citations from the summaries in the answer correctly. User Context: - {research_topic}