mirror of
https://github.com/bytedance/deer-flow.git
synced 2026-05-10 02:38:26 +00:00
fix(frontend): defer thread id to onStart to avoid 404 on new chat (#2749)
* fix(frontend): defer thread id to onStart to avoid 404 on new chat
The LangGraph SDK's useStream eagerly fetches /threads/{id}/history the
moment it receives a thread id, and the local useThreadRuns issues
GET /threads/{id}/runs for the same reason. The chats page used to flip
isNewThread=false (and forward the client-generated thread id) inside
the synchronous onSend callback, before thread.submit had created the
thread on the backend. The two queries therefore raced ahead of
POST /runs/stream and returned 404 on the very first send.
Drop the onSend handler so isNewThread stays true until onStart fires
from useStream's onCreated — by then the backend has the thread, and
the SDK's submittingRef guard naturally suppresses the redundant
history fetch. The agent chat page already uses this pattern, so this
also unifies the two flows.
Adds an E2E regression that records request ordering and asserts
GET /history and GET /runs are never issued before POST /runs/stream
on the first send from /chats/new.
Closes #2746
* fix(frontend): split welcome layout from backend thread state
Removing onSend kept GET /history and GET /runs from racing ahead of
POST /runs/stream, but it also coupled the welcome layout (centered
input, hero, quick actions) to backend thread creation. Until onCreated
returned, the user's optimistic message and the welcome hero rendered on
top of each other.
Introduce a dedicated `isWelcomeMode` UI flag, separate from
`isNewThread`:
- `isNewThread` still tracks "backend has no thread yet" and gates the
thread id forwarded to useStream.
- `isWelcomeMode` drives the visual layout (header background, input
box position, max width, hero, quick actions, autoFocus) and flips to
false inside onSend so the layout animates immediately.
`isWelcomeMode` is kept in sync with `isNewThread` via an effect so
sidebar navigation and "new chat" still behave correctly. All 15 E2E
tests pass, including the ordering regression added in the previous
commit.
* test(e2e): use monotonic sequence for thread-init ordering check
Date.now() is millisecond-resolution, so two requests emitted within
the same tick would share a timestamp and slip past the strict `<`
ordering assertions. Replace the timestamp with a monotonic counter
that increments on every observed request/requestfinished event so the
ordering check is robust regardless of scheduling.
Per PR #2749 review feedback from copilot-pull-request-reviewer.
* refactor(input-box): rename isNewThread prop to isWelcomeMode
Inside InputBox, the prop named `isNewThread` is only ever consulted
for visual layout decisions — gating follow-up suggestions, the bottom
background strip, and the welcome-mode quick-action SuggestionList. It
never reflects "the backend has created the thread", which after #2746
is tracked separately via `isNewThread` in the chat pages themselves.
Rename the prop to `isWelcomeMode` and update both call sites
(workspace chats page and agent chats page) so the prop name matches
its actual semantics. No behavior change.
Per PR #2749 review feedback from @WillemJiang.
This commit is contained in:
parent
cef4224381
commit
27559f3675
@ -191,7 +191,7 @@ export default function AgentChatPage() {
|
||||
|
||||
<InputBox
|
||||
className={cn("bg-background/5 w-full -translate-y-4")}
|
||||
isNewThread={isNewThread}
|
||||
isWelcomeMode={isNewThread}
|
||||
threadId={threadId}
|
||||
autoFocus={isNewThread}
|
||||
status={
|
||||
|
||||
@ -35,6 +35,12 @@ export default function ChatPage() {
|
||||
const [showFollowups, setShowFollowups] = useState(false);
|
||||
const { threadId, setThreadId, isNewThread, setIsNewThread, isMock } =
|
||||
useThreadChat();
|
||||
// `isNewThread` tracks whether the backend has the thread yet — gates the
|
||||
// SDK's history fetch (see issue #2746). `isWelcomeMode` is the visual
|
||||
// welcome layout (centered input, hero, quick actions); we flip it to false
|
||||
// the moment the user submits so the UI animates immediately, even though
|
||||
// `isNewThread` stays true until the backend actually creates the thread.
|
||||
const [isWelcomeMode, setIsWelcomeMode] = useState(isNewThread);
|
||||
const [settings, setSettings] = useThreadSettings(threadId);
|
||||
const [localSettings, setLocalSettings] = useLocalSettings();
|
||||
const { tokenUsageEnabled } = useModels();
|
||||
@ -45,6 +51,14 @@ export default function ChatPage() {
|
||||
mountedRef.current = true;
|
||||
}, []);
|
||||
|
||||
// Keep welcome layout in sync when navigating between threads (sidebar
|
||||
// clicks, "new chat" button). Submitting in /chats/new flips the layout
|
||||
// via onSend below — `isNewThread` stays true until onStart, so this effect
|
||||
// is harmless during the submit transition.
|
||||
useEffect(() => {
|
||||
setIsWelcomeMode(isNewThread);
|
||||
}, [isNewThread]);
|
||||
|
||||
const { showNotification } = useNotification();
|
||||
|
||||
const {
|
||||
@ -58,9 +72,11 @@ export default function ChatPage() {
|
||||
threadId: isNewThread ? undefined : threadId,
|
||||
context: settings.context,
|
||||
isMock,
|
||||
onSend: (_threadId) => {
|
||||
setThreadId(_threadId);
|
||||
setIsNewThread(false);
|
||||
// onSend only animates the UI; do NOT flip `isNewThread` here — the
|
||||
// LangGraph SDK eagerly fetches /history the moment it receives a
|
||||
// thread id and assumes the thread exists on the backend (issue #2746).
|
||||
onSend: () => {
|
||||
setIsWelcomeMode(false);
|
||||
},
|
||||
onStart: (createdThreadId) => {
|
||||
setThreadId(createdThreadId);
|
||||
@ -111,7 +127,7 @@ export default function ChatPage() {
|
||||
<header
|
||||
className={cn(
|
||||
"absolute top-0 right-0 left-0 z-30 flex h-12 shrink-0 items-center px-4",
|
||||
isNewThread
|
||||
isWelcomeMode
|
||||
? "bg-background/0 backdrop-blur-none"
|
||||
: "bg-background/80 shadow-xs backdrop-blur",
|
||||
)}
|
||||
@ -135,7 +151,7 @@ export default function ChatPage() {
|
||||
<main className="flex min-h-0 max-w-full grow flex-col">
|
||||
<div className="flex size-full justify-center">
|
||||
<MessageList
|
||||
className={cn("size-full", !isNewThread && "pt-10")}
|
||||
className={cn("size-full", !isWelcomeMode && "pt-10")}
|
||||
threadId={threadId}
|
||||
thread={thread}
|
||||
paddingBottom={messageListPaddingBottom}
|
||||
@ -149,8 +165,8 @@ export default function ChatPage() {
|
||||
<div
|
||||
className={cn(
|
||||
"relative w-full",
|
||||
isNewThread && "-translate-y-[calc(50vh-96px)]",
|
||||
isNewThread
|
||||
isWelcomeMode && "-translate-y-[calc(50vh-96px)]",
|
||||
isWelcomeMode
|
||||
? "max-w-(--container-width-sm)"
|
||||
: "max-w-(--container-width-md)",
|
||||
)}
|
||||
@ -169,9 +185,9 @@ export default function ChatPage() {
|
||||
{mountedRef.current ? (
|
||||
<InputBox
|
||||
className={cn("bg-background/5 w-full -translate-y-4")}
|
||||
isNewThread={isNewThread}
|
||||
isWelcomeMode={isWelcomeMode}
|
||||
threadId={threadId}
|
||||
autoFocus={isNewThread}
|
||||
autoFocus={isWelcomeMode}
|
||||
status={
|
||||
thread.error
|
||||
? "error"
|
||||
@ -181,7 +197,7 @@ export default function ChatPage() {
|
||||
}
|
||||
context={settings.context}
|
||||
extraHeader={
|
||||
isNewThread && <Welcome mode={settings.context.mode} />
|
||||
isWelcomeMode && <Welcome mode={settings.context.mode} />
|
||||
}
|
||||
disabled={
|
||||
env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" ||
|
||||
|
||||
@ -106,7 +106,7 @@ export function InputBox({
|
||||
status = "ready",
|
||||
context,
|
||||
extraHeader,
|
||||
isNewThread,
|
||||
isWelcomeMode,
|
||||
threadId,
|
||||
initialValue,
|
||||
onContextChange,
|
||||
@ -126,7 +126,12 @@ export function InputBox({
|
||||
reasoning_effort?: "minimal" | "low" | "medium" | "high";
|
||||
};
|
||||
extraHeader?: React.ReactNode;
|
||||
isNewThread?: boolean;
|
||||
/**
|
||||
* Whether to render the input in welcome layout (vertically centered,
|
||||
* with hero + quick action suggestions). This is purely a visual flag,
|
||||
* decoupled from "the backend has created the thread" — see issue #2746.
|
||||
*/
|
||||
isWelcomeMode?: boolean;
|
||||
threadId: string;
|
||||
initialValue?: string;
|
||||
onContextChange?: (
|
||||
@ -341,7 +346,7 @@ export function InputBox({
|
||||
|
||||
const showFollowups =
|
||||
!disabled &&
|
||||
!isNewThread &&
|
||||
!isWelcomeMode &&
|
||||
!followupsHidden &&
|
||||
(followupsLoading || followups.length > 0);
|
||||
|
||||
@ -846,12 +851,12 @@ export function InputBox({
|
||||
/>
|
||||
</PromptInputTools>
|
||||
</PromptInputFooter>
|
||||
{!isNewThread && (
|
||||
{!isWelcomeMode && (
|
||||
<div className="bg-background absolute right-0 -bottom-[17px] left-0 z-0 h-4"></div>
|
||||
)}
|
||||
</PromptInput>
|
||||
|
||||
{isNewThread && searchParams.get("mode") !== "skill" && (
|
||||
{isWelcomeMode && searchParams.get("mode") !== "skill" && (
|
||||
<div className="flex items-center justify-center pt-2">
|
||||
<SuggestionList />
|
||||
</div>
|
||||
|
||||
115
frontend/tests/e2e/chat-thread-init-ordering.spec.ts
Normal file
115
frontend/tests/e2e/chat-thread-init-ordering.spec.ts
Normal file
@ -0,0 +1,115 @@
|
||||
import { expect, test } from "@playwright/test";
|
||||
|
||||
import { handleRunStream, mockLangGraphAPI } from "./utils/mock-api";
|
||||
|
||||
/**
|
||||
* Regression for https://github.com/bytedance/deer-flow/issues/2746.
|
||||
*
|
||||
* On a brand-new chat, the LangGraph SDK's useStream eagerly fetches
|
||||
* `/threads/{id}/history` the moment it receives a thread id, and the
|
||||
* frontend's own `useThreadRuns` fires `GET /threads/{id}/runs` for the same
|
||||
* reason. Both endpoints assume the thread already exists on the backend;
|
||||
* if the frontend forwards the (client-generated) thread id before
|
||||
* `POST /runs/stream` has actually created the thread, both calls 404 in
|
||||
* production. This test pins the request ordering so the regression cannot
|
||||
* re-appear silently.
|
||||
*/
|
||||
test.describe("Chat: thread API request ordering on first send", () => {
|
||||
test("does not call /history or GET /runs before /runs/stream is initiated", async ({
|
||||
page,
|
||||
}) => {
|
||||
type EventLog = {
|
||||
phase: "sent" | "done";
|
||||
url: string;
|
||||
method: string;
|
||||
seq: number;
|
||||
};
|
||||
const events: EventLog[] = [];
|
||||
// Monotonic sequence number — Date.now() is millisecond-resolution and
|
||||
// would let two requests share a timestamp, which would defeat the
|
||||
// strict-ordering assertions below.
|
||||
let nextSeq = 0;
|
||||
|
||||
page.on("request", (req) => {
|
||||
events.push({
|
||||
phase: "sent",
|
||||
url: req.url(),
|
||||
method: req.method(),
|
||||
seq: nextSeq++,
|
||||
});
|
||||
});
|
||||
page.on("requestfinished", (req) => {
|
||||
events.push({
|
||||
phase: "done",
|
||||
url: req.url(),
|
||||
method: req.method(),
|
||||
seq: nextSeq++,
|
||||
});
|
||||
});
|
||||
|
||||
mockLangGraphAPI(page);
|
||||
|
||||
// Slow down /runs/stream so any pre-create /history or /runs request
|
||||
// would land well before the stream returns metadata, widening the
|
||||
// race window the bug used to exploit.
|
||||
await page.route(
|
||||
"**/api/langgraph/threads/*/runs/stream",
|
||||
async (route) => {
|
||||
await new Promise((r) => setTimeout(r, 250));
|
||||
return handleRunStream(route);
|
||||
},
|
||||
);
|
||||
await page.route("**/api/langgraph/runs/stream", async (route) => {
|
||||
await new Promise((r) => setTimeout(r, 250));
|
||||
return handleRunStream(route);
|
||||
});
|
||||
|
||||
await page.goto("/workspace/chats/new");
|
||||
|
||||
const textarea = page.getByPlaceholder(/how can i assist you/i);
|
||||
await expect(textarea).toBeVisible({ timeout: 15_000 });
|
||||
await textarea.fill("Hello");
|
||||
await textarea.press("Enter");
|
||||
|
||||
// Wait for streaming response so all init requests have a chance to fire.
|
||||
await expect(page.getByText("Hello from DeerFlow!")).toBeVisible({
|
||||
timeout: 15_000,
|
||||
});
|
||||
|
||||
const isHistory = (url: string) =>
|
||||
/\/api\/langgraph\/threads\/[^/]+\/history/.test(url);
|
||||
const isRunsList = (url: string, method: string) =>
|
||||
method === "GET" &&
|
||||
/\/api\/langgraph\/threads\/[^/]+\/runs(\?|$)/.test(url);
|
||||
const isRunsStream = (url: string, method: string) =>
|
||||
method === "POST" && /\/runs\/stream(\?|$)/.test(url);
|
||||
|
||||
const runsStreamSent = events.find(
|
||||
(e) => e.phase === "sent" && isRunsStream(e.url, e.method),
|
||||
);
|
||||
expect(
|
||||
runsStreamSent,
|
||||
"Expected POST /runs/stream to be issued during send",
|
||||
).toBeDefined();
|
||||
|
||||
const earlyHistory = events.filter(
|
||||
(e) =>
|
||||
e.phase === "sent" && isHistory(e.url) && e.seq < runsStreamSent!.seq,
|
||||
);
|
||||
const earlyRunsList = events.filter(
|
||||
(e) =>
|
||||
e.phase === "sent" &&
|
||||
isRunsList(e.url, e.method) &&
|
||||
e.seq < runsStreamSent!.seq,
|
||||
);
|
||||
|
||||
expect(
|
||||
earlyHistory.map((e) => e.url),
|
||||
"GET /history must not be issued before POST /runs/stream — see issue #2746",
|
||||
).toEqual([]);
|
||||
expect(
|
||||
earlyRunsList.map((e) => e.url),
|
||||
"GET /runs must not be issued before POST /runs/stream — see issue #2746",
|
||||
).toEqual([]);
|
||||
});
|
||||
});
|
||||
Loading…
x
Reference in New Issue
Block a user