mirror of
https://github.com/bytedance/deer-flow.git
synced 2026-05-11 11:13:51 +00:00
* fix(frontend): defer thread id to onStart to avoid 404 on new chat
The LangGraph SDK's useStream eagerly fetches /threads/{id}/history the
moment it receives a thread id, and the local useThreadRuns issues
GET /threads/{id}/runs for the same reason. The chats page used to flip
isNewThread=false (and forward the client-generated thread id) inside
the synchronous onSend callback, before thread.submit had created the
thread on the backend. The two queries therefore raced ahead of
POST /runs/stream and returned 404 on the very first send.
Drop the onSend handler so isNewThread stays true until onStart fires
from useStream's onCreated — by then the backend has the thread, and
the SDK's submittingRef guard naturally suppresses the redundant
history fetch. The agent chat page already uses this pattern, so this
also unifies the two flows.
Adds an E2E regression that records request ordering and asserts
GET /history and GET /runs are never issued before POST /runs/stream
on the first send from /chats/new.
Closes #2746
* fix(frontend): split welcome layout from backend thread state
Removing onSend kept GET /history and GET /runs from racing ahead of
POST /runs/stream, but it also coupled the welcome layout (centered
input, hero, quick actions) to backend thread creation. Until onCreated
returned, the user's optimistic message and the welcome hero rendered on
top of each other.
Introduce a dedicated `isWelcomeMode` UI flag, separate from
`isNewThread`:
- `isNewThread` still tracks "backend has no thread yet" and gates the
thread id forwarded to useStream.
- `isWelcomeMode` drives the visual layout (header background, input
box position, max width, hero, quick actions, autoFocus) and flips to
false inside onSend so the layout animates immediately.
`isWelcomeMode` is kept in sync with `isNewThread` via an effect so
sidebar navigation and "new chat" still behave correctly. All 15 E2E
tests pass, including the ordering regression added in the previous
commit.
* test(e2e): use monotonic sequence for thread-init ordering check
Date.now() is millisecond-resolution, so two requests emitted within
the same tick would share a timestamp and slip past the strict `<`
ordering assertions. Replace the timestamp with a monotonic counter
that increments on every observed request/requestfinished event so the
ordering check is robust regardless of scheduling.
Per PR #2749 review feedback from copilot-pull-request-reviewer.
* refactor(input-box): rename isNewThread prop to isWelcomeMode
Inside InputBox, the prop named `isNewThread` is only ever consulted
for visual layout decisions — gating follow-up suggestions, the bottom
background strip, and the welcome-mode quick-action SuggestionList. It
never reflects "the backend has created the thread", which after #2746
is tracked separately via `isNewThread` in the chat pages themselves.
Rename the prop to `isWelcomeMode` and update both call sites
(workspace chats page and agent chats page) so the prop name matches
its actual semantics. No behavior change.
Per PR #2749 review feedback from @WillemJiang.
116 lines
3.8 KiB
TypeScript
116 lines
3.8 KiB
TypeScript
import { expect, test } from "@playwright/test";
|
|
|
|
import { handleRunStream, mockLangGraphAPI } from "./utils/mock-api";
|
|
|
|
/**
|
|
* Regression for https://github.com/bytedance/deer-flow/issues/2746.
|
|
*
|
|
* On a brand-new chat, the LangGraph SDK's useStream eagerly fetches
|
|
* `/threads/{id}/history` the moment it receives a thread id, and the
|
|
* frontend's own `useThreadRuns` fires `GET /threads/{id}/runs` for the same
|
|
* reason. Both endpoints assume the thread already exists on the backend;
|
|
* if the frontend forwards the (client-generated) thread id before
|
|
* `POST /runs/stream` has actually created the thread, both calls 404 in
|
|
* production. This test pins the request ordering so the regression cannot
|
|
* re-appear silently.
|
|
*/
|
|
test.describe("Chat: thread API request ordering on first send", () => {
|
|
test("does not call /history or GET /runs before /runs/stream is initiated", async ({
|
|
page,
|
|
}) => {
|
|
type EventLog = {
|
|
phase: "sent" | "done";
|
|
url: string;
|
|
method: string;
|
|
seq: number;
|
|
};
|
|
const events: EventLog[] = [];
|
|
// Monotonic sequence number — Date.now() is millisecond-resolution and
|
|
// would let two requests share a timestamp, which would defeat the
|
|
// strict-ordering assertions below.
|
|
let nextSeq = 0;
|
|
|
|
page.on("request", (req) => {
|
|
events.push({
|
|
phase: "sent",
|
|
url: req.url(),
|
|
method: req.method(),
|
|
seq: nextSeq++,
|
|
});
|
|
});
|
|
page.on("requestfinished", (req) => {
|
|
events.push({
|
|
phase: "done",
|
|
url: req.url(),
|
|
method: req.method(),
|
|
seq: nextSeq++,
|
|
});
|
|
});
|
|
|
|
mockLangGraphAPI(page);
|
|
|
|
// Slow down /runs/stream so any pre-create /history or /runs request
|
|
// would land well before the stream returns metadata, widening the
|
|
// race window the bug used to exploit.
|
|
await page.route(
|
|
"**/api/langgraph/threads/*/runs/stream",
|
|
async (route) => {
|
|
await new Promise((r) => setTimeout(r, 250));
|
|
return handleRunStream(route);
|
|
},
|
|
);
|
|
await page.route("**/api/langgraph/runs/stream", async (route) => {
|
|
await new Promise((r) => setTimeout(r, 250));
|
|
return handleRunStream(route);
|
|
});
|
|
|
|
await page.goto("/workspace/chats/new");
|
|
|
|
const textarea = page.getByPlaceholder(/how can i assist you/i);
|
|
await expect(textarea).toBeVisible({ timeout: 15_000 });
|
|
await textarea.fill("Hello");
|
|
await textarea.press("Enter");
|
|
|
|
// Wait for streaming response so all init requests have a chance to fire.
|
|
await expect(page.getByText("Hello from DeerFlow!")).toBeVisible({
|
|
timeout: 15_000,
|
|
});
|
|
|
|
const isHistory = (url: string) =>
|
|
/\/api\/langgraph\/threads\/[^/]+\/history/.test(url);
|
|
const isRunsList = (url: string, method: string) =>
|
|
method === "GET" &&
|
|
/\/api\/langgraph\/threads\/[^/]+\/runs(\?|$)/.test(url);
|
|
const isRunsStream = (url: string, method: string) =>
|
|
method === "POST" && /\/runs\/stream(\?|$)/.test(url);
|
|
|
|
const runsStreamSent = events.find(
|
|
(e) => e.phase === "sent" && isRunsStream(e.url, e.method),
|
|
);
|
|
expect(
|
|
runsStreamSent,
|
|
"Expected POST /runs/stream to be issued during send",
|
|
).toBeDefined();
|
|
|
|
const earlyHistory = events.filter(
|
|
(e) =>
|
|
e.phase === "sent" && isHistory(e.url) && e.seq < runsStreamSent!.seq,
|
|
);
|
|
const earlyRunsList = events.filter(
|
|
(e) =>
|
|
e.phase === "sent" &&
|
|
isRunsList(e.url, e.method) &&
|
|
e.seq < runsStreamSent!.seq,
|
|
);
|
|
|
|
expect(
|
|
earlyHistory.map((e) => e.url),
|
|
"GET /history must not be issued before POST /runs/stream — see issue #2746",
|
|
).toEqual([]);
|
|
expect(
|
|
earlyRunsList.map((e) => e.url),
|
|
"GET /runs must not be issued before POST /runs/stream — see issue #2746",
|
|
).toEqual([]);
|
|
});
|
|
});
|