feat(agents): flush reply pipeline before compaction wait (#35489)

Merged via squash.

Prepared head SHA: 7dbbcc510b74b0e8d35eb750d24575e34b5d769a
Co-authored-by: Sid-Qin <201593046+Sid-Qin@users.noreply.github.com>
Co-authored-by: jalehman <550978+jalehman@users.noreply.github.com>
Reviewed-by: @jalehman
This commit is contained in:
Sid
2026-03-06 10:22:19 +08:00
committed by GitHub
parent 6084c26d00
commit 7a22b3fa0b
4 changed files with 16 additions and 2 deletions

View File

@@ -160,6 +160,7 @@ Docs: https://docs.openclaw.ai
- Discord/inbound timeout isolation: separate inbound worker timeout tracking from listener timeout budgets so queued Discord replies are no longer dropped when listener watchdog windows expire mid-run. (#36602) Thanks @dutifulbob.
- Memory/doctor SecretRef handling: treat SecretRef-backed memory-search API keys as configured, and fail embedding setup with explicit unresolved-secret errors instead of crashing. (#36835) Thanks @joshavant.
- Memory/flush default prompt: ban timestamped variant filenames during default memory flush runs so durable notes stay in the canonical daily `memory/YYYY-MM-DD.md` file. (#34951) thanks @zerone0x.
- Agents/reply delivery timing: flush embedded Pi block replies before waiting on compaction retries so already-generated assistant replies reach channels before compaction wait completes. (#35489) thanks @Sid-Qin.
## 2026.3.2

View File

@@ -26,8 +26,8 @@ async function loadRunEmbeddedPiAgent(): Promise<RunEmbeddedPiAgentFn> {
// Bundled install (built)
// NOTE: there is no src/ tree in a packaged install. Prefer a stable internal entrypoint.
const distModulePath = "../../../dist/extensionAPI.js";
const mod = await import(distModulePath);
const distExtensionApi = "../../../dist/extensionAPI.js";
const mod = (await import(distExtensionApi)) as { runEmbeddedPiAgent?: unknown };
// oxlint-disable-next-line typescript/no-explicit-any
const fn = (mod as any).runEmbeddedPiAgent;
if (typeof fn !== "function") {

View File

@@ -1688,6 +1688,14 @@ export async function runEmbeddedAttempt(
const preCompactionSessionId = activeSession.sessionId;
try {
// Flush buffered block replies before waiting for compaction so the
// user receives the assistant response immediately. Without this,
// coalesced/buffered blocks stay in the pipeline until compaction
// finishes — which can take minutes on large contexts (#35074).
if (params.onBlockReplyFlush) {
await params.onBlockReplyFlush();
}
await abortable(waitForCompactionRetry());
} catch (err) {
if (isRunnerAbortError(err)) {

View File

@@ -73,6 +73,11 @@ export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
}
ctx.flushBlockReplyBuffer();
// Flush the reply pipeline so the response reaches the channel before
// compaction wait blocks the run. This mirrors the pattern used by
// handleToolExecutionStart and ensures delivery is not held hostage to
// long-running compaction (#35074).
void ctx.params.onBlockReplyFlush?.();
ctx.state.blockState.thinking = false;
ctx.state.blockState.final = false;