Merge branch 'main' into main

This commit is contained in:
Philipp Schmid
2025-06-18 14:52:46 +02:00
committed by GitHub
11 changed files with 75 additions and 42 deletions

View File

@@ -28,7 +28,7 @@ Follow these steps to get the application running locally for development and te
**1. Prerequisites:**
- Node.js and npm (or yarn/pnpm)
- Python 3.8+
- Python 3.11+
- **`GEMINI_API_KEY`**: The backend agent requires a Google Gemini API key.
1. Navigate to the `backend/` directory.
2. Create a file named `.env` by copying the `backend/.env.example` file.

View File

@@ -1,8 +1,7 @@
# mypy: disable - error - code = "no-untyped-def,misc"
import pathlib
from fastapi import FastAPI, Request, Response
from fastapi import FastAPI, Response
from fastapi.staticfiles import StaticFiles
import fastapi.exceptions
# Define the FastAPI app
app = FastAPI()
@@ -18,7 +17,6 @@ def create_frontend_router(build_dir="../frontend/dist"):
A Starlette application serving the frontend.
"""
build_path = pathlib.Path(__file__).parent.parent.parent / build_dir
static_files_path = build_path / "assets" # Vite uses 'assets' subdir
if not build_path.is_dir() or not (build_path / "index.html").is_file():
print(
@@ -36,21 +34,7 @@ def create_frontend_router(build_dir="../frontend/dist"):
return Route("/{path:path}", endpoint=dummy_frontend)
build_dir = pathlib.Path(build_dir)
react = FastAPI(openapi_url="")
react.mount(
"/assets", StaticFiles(directory=static_files_path), name="static_assets"
)
@react.get("/{path:path}")
async def handle_catch_all(request: Request, path: str):
fp = build_path / path
if not fp.exists() or not fp.is_file():
fp = build_path / "index.html"
return fastapi.responses.FileResponse(fp)
return react
return StaticFiles(directory=build_path, html=True)
# Mount the frontend under /app to not conflict with the LangGraph API routes

View File

@@ -16,14 +16,14 @@ class Configuration(BaseModel):
)
reflection_model: str = Field(
default="gemini-2.5-flash-preview-04-17",
default="gemini-2.5-flash",
metadata={
"description": "The name of the language model to use for the agent's reflection."
},
)
answer_model: str = Field(
default="gemini-2.5-pro-preview-05-06",
default="gemini-2.5-pro",
metadata={
"description": "The name of the language model to use for the agent's answer."
},

View File

@@ -78,7 +78,7 @@ def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerati
)
# Generate the search queries
result = structured_llm.invoke(formatted_prompt)
return {"query_list": result.query}
return {"search_query": result.query}
def continue_to_web_research(state: QueryGenerationState):
@@ -88,7 +88,7 @@ def continue_to_web_research(state: QueryGenerationState):
"""
return [
Send("web_research", {"search_query": search_query, "id": int(idx)})
for idx, search_query in enumerate(state["query_list"])
for idx, search_query in enumerate(state["search_query"])
]
@@ -153,7 +153,7 @@ def reflection(state: OverallState, config: RunnableConfig) -> ReflectionState:
configurable = Configuration.from_runnable_config(config)
# Increment the research loop count and get the reasoning model
state["research_loop_count"] = state.get("research_loop_count", 0) + 1
reasoning_model = state.get("reasoning_model") or configurable.reasoning_model
reasoning_model = state.get("reasoning_model", configurable.reflection_model)
# Format the prompt
current_date = get_current_date()
@@ -231,7 +231,7 @@ def finalize_answer(state: OverallState, config: RunnableConfig):
Dictionary with state update, including running_summary key containing the formatted final summary with sources
"""
configurable = Configuration.from_runnable_config(config)
reasoning_model = state.get("reasoning_model") or configurable.reasoning_model
reasoning_model = state.get("reasoning_model") or configurable.answer_model
# Format the prompt
current_date = get_current_date()

View File

@@ -37,7 +37,7 @@ class Query(TypedDict):
class QueryGenerationState(TypedDict):
query_list: list[Query]
search_query: list[Query]
class WebSearchState(TypedDict):

View File

@@ -4,6 +4,7 @@ volumes:
services:
langgraph-redis:
image: docker.io/redis:6
container_name: langgraph-redis
healthcheck:
test: redis-cli ping
interval: 5s
@@ -11,6 +12,7 @@ services:
retries: 5
langgraph-postgres:
image: docker.io/postgres:16
container_name: langgraph-postgres
ports:
- "5433:5432"
environment:
@@ -27,6 +29,7 @@ services:
interval: 5s
langgraph-api:
image: gemini-fullstack-langgraph
container_name: langgraph-api
ports:
- "8123:8000"
depends_on:

View File

@@ -4,6 +4,7 @@ import { useState, useEffect, useRef, useCallback } from "react";
import { ProcessedEvent } from "@/components/ActivityTimeline";
import { WelcomeScreen } from "@/components/WelcomeScreen";
import { ChatMessagesView } from "@/components/ChatMessagesView";
import { Button } from "@/components/ui/button";
export default function App() {
const [processedEventsTimeline, setProcessedEventsTimeline] = useState<
@@ -14,7 +15,7 @@ export default function App() {
>({});
const scrollAreaRef = useRef<HTMLDivElement>(null);
const hasFinalizeEventOccurredRef = useRef(false);
const [error, setError] = useState<string | null>(null);
const thread = useStream<{
messages: Message[];
initial_search_query_count: number;
@@ -26,15 +27,12 @@ export default function App() {
: "http://localhost:8123",
assistantId: "agent",
messagesKey: "messages",
onFinish: (event: any) => {
console.log(event);
},
onUpdateEvent: (event: any) => {
let processedEvent: ProcessedEvent | null = null;
if (event.generate_query) {
processedEvent = {
title: "Generating Search Queries",
data: event.generate_query.query_list.join(", "),
data: event.generate_query?.search_query?.join(", ") || "",
};
} else if (event.web_research) {
const sources = event.web_research.sources_gathered || [];
@@ -52,11 +50,7 @@ export default function App() {
} else if (event.reflection) {
processedEvent = {
title: "Reflection",
data: event.reflection.is_sufficient
? "Search successful, generating final answer."
: `Need more information, searching for ${event.reflection.follow_up_queries.join(
", "
)}`,
data: "Analysing Web Research Results",
};
} else if (event.finalize_answer) {
processedEvent = {
@@ -72,6 +66,9 @@ export default function App() {
]);
}
},
onError: (error: any) => {
setError(error.message);
},
});
useEffect(() => {
@@ -161,6 +158,20 @@ export default function App() {
isLoading={thread.isLoading}
onCancel={handleCancel}
/>
) : error ? (
<div className="flex flex-col items-center justify-center h-full">
<div className="flex flex-col items-center justify-center gap-4">
<h1 className="text-2xl text-red-400 font-bold">Error</h1>
<p className="text-red-400">{JSON.stringify(error)}</p>
<Button
variant="destructive"
onClick={() => window.location.reload()}
>
Retry
</Button>
</div>
</div>
) : (
<ChatMessagesView
messages={thread.messages}

View File

@@ -203,7 +203,9 @@ const AiMessageBubble: React.FC<AiMessageBubbleProps> = ({
</ReactMarkdown>
<Button
variant="default"
className="cursor-pointer bg-neutral-700 border-neutral-600 text-neutral-300 self-end"
className={`cursor-pointer bg-neutral-700 border-neutral-600 text-neutral-300 self-end ${
message.content.length > 0 ? "visible" : "hidden"
}`}
onClick={() =>
handleCopy(
typeof message.content === "string"
@@ -250,7 +252,6 @@ export function ChatMessagesView({
console.error("Failed to copy text: ", err);
}
};
return (
<div className="flex flex-col h-full">
<ScrollArea className="flex-1 overflow-y-auto" ref={scrollAreaRef}>

View File

@@ -49,7 +49,7 @@ export const InputForm: React.FC<InputFormProps> = ({
return (
<form
onSubmit={handleInternalSubmit}
className={`flex flex-col gap-2 p-3 `}
className={`flex flex-col gap-2 p-3 pb-4`}
>
<div
className={`flex flex-row items-center justify-between text-white rounded-3xl rounded-bl-sm ${

View File

@@ -17,6 +17,7 @@ function ScrollArea({
<ScrollAreaPrimitive.Viewport
data-slot="scroll-area-viewport"
className="focus-visible:ring-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] outline-none focus-visible:ring-[3px] focus-visible:outline-1"
style={{ overscrollBehavior: 'none' }}
>
{children}
</ScrollAreaPrimitive.Viewport>
@@ -38,16 +39,16 @@ function ScrollBar({
className={cn(
"flex touch-none p-px transition-colors select-none",
orientation === "vertical" &&
"h-full w-2.5 border-l border-l-transparent",
"h-full w-1.5 border-l border-l-transparent",
orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent",
"h-1.5 flex-col border-t border-t-transparent",
className
)}
{...props}
>
<ScrollAreaPrimitive.ScrollAreaThumb
data-slot="scroll-area-thumb"
className="bg-border relative flex-1 rounded-full"
className="bg-neutral-600/30 relative flex-1 rounded-full"
/>
</ScrollAreaPrimitive.ScrollAreaScrollbar>
)

View File

@@ -116,6 +116,13 @@
}
body {
@apply bg-background text-foreground;
/* Prevent scroll bounce/overscroll on mobile */
overscroll-behavior: none;
-webkit-overflow-scrolling: touch;
}
html {
/* Prevent scroll bounce on the entire page */
overscroll-behavior: none;
}
}
@@ -150,5 +157,31 @@
animation: fadeInUpSmooth 0.3s ease-out forwards;
}
/* Prevent scroll bounce on scroll areas */
[data-radix-scroll-area-viewport] {
overscroll-behavior: none !important;
-webkit-overflow-scrolling: touch;
}
/* Hide any white space that might appear during scroll bounce */
[data-radix-scroll-area-viewport]::-webkit-scrollbar {
width: 0px;
background: transparent;
}
/* Subtle scroll bar styling */
[data-slot="scroll-area-scrollbar"] {
opacity: 0.3;
transition: opacity 0.2s ease;
}
[data-slot="scroll-area"]:hover [data-slot="scroll-area-scrollbar"] {
opacity: 0.6;
}
[data-slot="scroll-area-thumb"] {
background-color: rgb(115 115 115 / 0.2) !important;
}
/* Ensure your body or html has a dark background if not already set, e.g.: */
/* body { background-color: #0c0c0d; } */ /* This is similar to neutral-950 */