From b6456549232a93bca7647001a3b13973bae69182 Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Mon, 2 Mar 2026 19:37:03 +0000 Subject: [PATCH] fix: avoid stale followup drain callbacks (#31902) (thanks @Lanfei) --- CHANGELOG.md | 1 + src/auto-reply/reply/queue/drain.ts | 6 +++--- src/auto-reply/reply/reply-flow.test.ts | 27 +++++++++++++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aae18333e..698fda8d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,7 @@ Docs: https://docs.openclaw.ai ### Fixes +- Auto-reply/followup queue: avoid stale callback reuse across idle-window restarts by caching the followup runner only when a drain actually starts, preserving enqueue ordering after empty-finalize paths. (#31902) Thanks @Lanfei. - Gateway/Heartbeat model reload: treat `models.*` and `agents.defaults.model` config updates as heartbeat hot-reload triggers so heartbeat picks up model changes without a full gateway restart. (#32046) Thanks @stakeswky. - Slack/inbound debounce routing: isolate top-level non-DM message debounce keys by message timestamp to avoid cross-thread collisions, preserve DM batching, and flush pending top-level buffers before immediate non-debounce follow-ups to keep ordering stable. (#31951) Thanks @scoootscooob. - OpenRouter/x-ai compatibility: skip `reasoning.effort` injection for `x-ai/*` models (for example Grok) so OpenRouter requests no longer fail with invalid-arguments errors on unsupported reasoning params. (#32054) Thanks @scoootscooob. diff --git a/src/auto-reply/reply/queue/drain.ts b/src/auto-reply/reply/queue/drain.ts index 3846b2eaa..e8e93b3dd 100644 --- a/src/auto-reply/reply/queue/drain.ts +++ b/src/auto-reply/reply/queue/drain.ts @@ -67,13 +67,13 @@ export function scheduleFollowupDrain( key: string, runFollowup: (run: FollowupRun) => Promise, ): void { - // Cache the callback so enqueueFollowupRun can restart drain after the queue - // has been deleted and recreated (the post-drain idle window race condition). - FOLLOWUP_RUN_CALLBACKS.set(key, runFollowup); const queue = beginQueueDrain(FOLLOWUP_QUEUES, key); if (!queue) { return; } + // Cache callback only when a drain actually starts. Avoid keeping stale + // callbacks around from finalize calls where no queue work is pending. + FOLLOWUP_RUN_CALLBACKS.set(key, runFollowup); void (async () => { try { const collectState = { forceIndividualCollect: false }; diff --git a/src/auto-reply/reply/reply-flow.test.ts b/src/auto-reply/reply/reply-flow.test.ts index c854672a9..2842924b2 100644 --- a/src/auto-reply/reply/reply-flow.test.ts +++ b/src/auto-reply/reply/reply-flow.test.ts @@ -1097,6 +1097,33 @@ describe("followup queue collect routing", () => { }); describe("followup queue drain restart after idle window", () => { + it("does not retain stale callbacks when scheduleFollowupDrain runs with an empty queue", async () => { + const key = `test-no-stale-callback-${Date.now()}`; + const settings: QueueSettings = { mode: "followup", debounceMs: 0, cap: 50 }; + const staleCalls: FollowupRun[] = []; + const freshCalls: FollowupRun[] = []; + const drained = createDeferred(); + + // Simulate finalizeWithFollowup calling schedule without pending queue items. + scheduleFollowupDrain(key, async (run) => { + staleCalls.push(run); + }); + + enqueueFollowupRun(key, createRun({ prompt: "after-empty-schedule" }), settings); + await new Promise((resolve) => setImmediate(resolve)); + expect(staleCalls).toHaveLength(0); + + scheduleFollowupDrain(key, async (run) => { + freshCalls.push(run); + drained.resolve(); + }); + await drained.promise; + + expect(staleCalls).toHaveLength(0); + expect(freshCalls).toHaveLength(1); + expect(freshCalls[0]?.prompt).toBe("after-empty-schedule"); + }); + it("processes a message enqueued after the drain empties and deletes the queue", async () => { const key = `test-idle-window-race-${Date.now()}`; const calls: FollowupRun[] = [];