Refactor release hardening follow-ups (#39959)

* build: fail fast on stale host-env swift policy

* build: sync generated host env swift policy

* build: guard bundled extension root dependency gaps

* refactor: centralize provider capability quirks

* test: table-drive provider regression coverage

* fix: block merge when prep branch has unpushed commits

* refactor: simplify models config merge preservation
This commit is contained in:
Peter Steinberger
2026-03-08 14:49:58 +00:00
committed by GitHub
parent 27558806b5
commit eba9dcc67a
13 changed files with 425 additions and 110 deletions

View File

@@ -22,7 +22,7 @@ enum HostEnvSecurityPolicy {
"PS4",
"GCONV_PATH",
"IFS",
"SSLKEYLOGFILE",
"SSLKEYLOGFILE"
]
static let blockedOverrideKeys: Set<String> = [
@@ -50,17 +50,17 @@ enum HostEnvSecurityPolicy {
"OPENSSL_ENGINES",
"PYTHONSTARTUP",
"WGETRC",
"CURL_HOME",
"CURL_HOME"
]
static let blockedOverridePrefixes: [String] = [
"GIT_CONFIG_",
"NPM_CONFIG_",
"NPM_CONFIG_"
]
static let blockedPrefixes: [String] = [
"DYLD_",
"LD_",
"BASH_FUNC_",
"BASH_FUNC_"
]
}

View File

@@ -227,7 +227,7 @@
"build:plugin-sdk:dts": "tsc -p tsconfig.plugin-sdk.dts.json",
"build:strict-smoke": "pnpm canvas:a2ui:bundle && node scripts/tsdown-build.mjs && node scripts/copy-plugin-sdk-root-alias.mjs && pnpm build:plugin-sdk:dts",
"canvas:a2ui:bundle": "bash scripts/bundle-a2ui.sh",
"check": "pnpm format:check && pnpm tsgo && pnpm lint && pnpm lint:tmp:no-random-messaging && pnpm lint:tmp:channel-agnostic-boundaries && pnpm lint:tmp:no-raw-channel-fetch && pnpm lint:agent:ingress-owner && pnpm lint:plugins:no-register-http-handler && pnpm lint:plugins:no-monolithic-plugin-sdk-entry-imports && pnpm lint:webhook:no-low-level-body-read && pnpm lint:auth:no-pairing-store-group && pnpm lint:auth:pairing-account-scope && pnpm check:host-env-policy:swift",
"check": "pnpm check:host-env-policy:swift && pnpm format:check && pnpm tsgo && pnpm lint && pnpm lint:tmp:no-random-messaging && pnpm lint:tmp:channel-agnostic-boundaries && pnpm lint:tmp:no-raw-channel-fetch && pnpm lint:agent:ingress-owner && pnpm lint:plugins:no-register-http-handler && pnpm lint:plugins:no-monolithic-plugin-sdk-entry-imports && pnpm lint:webhook:no-low-level-body-read && pnpm lint:auth:no-pairing-store-group && pnpm lint:auth:pairing-account-scope",
"check:docs": "pnpm format:docs:check && pnpm lint:docs && pnpm docs:check-links",
"check:host-env-policy:swift": "node scripts/generate-host-env-security-policy-swift.mjs --check",
"check:loc": "node --import tsx scripts/check-ts-max-loc.ts --max 500",

View File

@@ -229,6 +229,30 @@ checkout_prep_branch() {
git checkout "$prep_branch"
}
verify_prep_branch_matches_prepared_head() {
local pr="$1"
local prepared_head_sha="$2"
require_artifact .local/prep-context.env
checkout_prep_branch "$pr"
local prep_branch_head_sha
prep_branch_head_sha=$(git rev-parse HEAD)
if [ "$prep_branch_head_sha" = "$prepared_head_sha" ]; then
return 0
fi
echo "Local prep branch moved after prepare-push (expected $prepared_head_sha, got $prep_branch_head_sha)."
if git merge-base --is-ancestor "$prepared_head_sha" "$prep_branch_head_sha" 2>/dev/null; then
echo "Unpushed local commits on prep branch:"
git log --oneline "${prepared_head_sha}..${prep_branch_head_sha}" | sed 's/^/ /' || true
echo "Run scripts/pr prepare-sync-head $pr to push them before merge."
else
echo "Prep branch no longer contains the prepared head. Re-run prepare-init."
fi
exit 1
}
resolve_head_push_url() {
# shellcheck disable=SC1091
source .local/pr-meta.env
@@ -1667,6 +1691,7 @@ merge_verify() {
require_artifact .local/prep.env
# shellcheck disable=SC1091
source .local/prep.env
verify_prep_branch_matches_prepared_head "$pr" "$PREP_HEAD_SHA"
local json
json=$(pr_meta_json "$pr")

View File

@@ -8,6 +8,17 @@ import { sparkleBuildFloorsFromShortVersion, type SparkleBuildFloors } from "./s
type PackFile = { path: string };
type PackResult = { files?: PackFile[] };
type PackageJson = {
name?: string;
version?: string;
dependencies?: Record<string, string>;
optionalDependencies?: Record<string, string>;
openclaw?: {
install?: {
npmSpec?: string;
};
};
};
const requiredPathGroups = [
["dist/index.js", "dist/index.mjs"],
@@ -108,11 +119,6 @@ const appcastPath = resolve("appcast.xml");
const laneBuildMin = 1_000_000_000;
const laneFloorAdoptionDateKey = 20260227;
type PackageJson = {
name?: string;
version?: string;
};
function normalizePluginSyncVersion(version: string): string {
const normalized = version.trim().replace(/^v/, "");
const base = /^([0-9]+\.[0-9]+\.[0-9]+)/.exec(normalized)?.[1];
@@ -122,6 +128,92 @@ function normalizePluginSyncVersion(version: string): string {
return normalized.replace(/[-+].*$/, "");
}
const ALLOWLISTED_BUNDLED_EXTENSION_ROOT_DEP_GAPS: Record<string, string[]> = {
googlechat: ["google-auth-library"],
matrix: ["@matrix-org/matrix-sdk-crypto-nodejs", "@vector-im/matrix-bot-sdk", "music-metadata"],
msteams: ["@microsoft/agents-hosting"],
nostr: ["nostr-tools"],
tlon: ["@tloncorp/api", "@tloncorp/tlon-skill", "@urbit/aura"],
zalouser: ["zca-js"],
};
export function collectBundledExtensionRootDependencyGapErrors(params: {
rootPackage: PackageJson;
extensions: Array<{ id: string; packageJson: PackageJson }>;
}): string[] {
const rootDeps = {
...params.rootPackage.dependencies,
...params.rootPackage.optionalDependencies,
};
const errors: string[] = [];
for (const extension of params.extensions) {
if (!extension.packageJson.openclaw?.install?.npmSpec) {
continue;
}
const missing = Object.keys(extension.packageJson.dependencies ?? {})
.filter((dep) => dep !== "openclaw" && !rootDeps[dep])
.toSorted();
const allowlisted = [
...(ALLOWLISTED_BUNDLED_EXTENSION_ROOT_DEP_GAPS[extension.id] ?? []),
].toSorted();
if (missing.join("\n") !== allowlisted.join("\n")) {
const unexpected = missing.filter((dep) => !allowlisted.includes(dep));
const resolved = allowlisted.filter((dep) => !missing.includes(dep));
const parts = [
`bundled extension '${extension.id}' root dependency mirror drift`,
`missing in root package: ${missing.length > 0 ? missing.join(", ") : "(none)"}`,
];
if (unexpected.length > 0) {
parts.push(`new gaps: ${unexpected.join(", ")}`);
}
if (resolved.length > 0) {
parts.push(`remove stale allowlist entries: ${resolved.join(", ")}`);
}
errors.push(parts.join(" | "));
}
}
return errors;
}
function collectBundledExtensions(): Array<{ id: string; packageJson: PackageJson }> {
const extensionsDir = resolve("extensions");
const entries = readdirSync(extensionsDir, { withFileTypes: true }).filter((entry) =>
entry.isDirectory(),
);
return entries.flatMap((entry) => {
const packagePath = join(extensionsDir, entry.name, "package.json");
try {
return [
{
id: entry.name,
packageJson: JSON.parse(readFileSync(packagePath, "utf8")) as PackageJson,
},
];
} catch {
return [];
}
});
}
function checkBundledExtensionRootDependencyMirrors() {
const rootPackage = JSON.parse(readFileSync(resolve("package.json"), "utf8")) as PackageJson;
const errors = collectBundledExtensionRootDependencyGapErrors({
rootPackage,
extensions: collectBundledExtensions(),
});
if (errors.length > 0) {
console.error("release-check: bundled extension root dependency mirror validation failed:");
for (const error of errors) {
console.error(` - ${error}`);
}
process.exit(1);
}
}
function runPackDry(): PackResult[] {
const raw = execSync("npm pack --dry-run --json --ignore-scripts", {
encoding: "utf8",
@@ -321,6 +413,7 @@ function main() {
checkPluginVersions();
checkAppcastSparkleVersions();
checkPluginSdkExports();
checkBundledExtensionRootDependencyMirrors();
const results = runPackDry();
const files = results.flatMap((entry) => entry.files ?? []);

View File

@@ -246,6 +246,21 @@ describe("models-config", () => {
});
});
it("replaces stale merged baseUrl when the provider api changes", async () => {
await withTempHome(async () => {
const parsed = await runCustomProviderMergeTest({
seedProvider: {
baseUrl: "https://agent.example/v1",
apiKey: "AGENT_KEY", // pragma: allowlist secret
api: "openai-completions",
models: [{ id: "agent-model", name: "Agent model", input: ["text"] }],
},
});
expect(parsed.providers.custom?.apiKey).toBe("AGENT_KEY");
expect(parsed.providers.custom?.baseUrl).toBe("https://config.example/v1");
});
});
it("replaces stale merged apiKey when provider is SecretRef-managed in current config", async () => {
await withTempHome(async () => {
await writeAgentModelsJson({

View File

@@ -19,10 +19,14 @@ import {
} from "./models-config.providers.js";
type ModelsConfig = NonNullable<OpenClawConfig["models"]>;
type ExistingProviderConfig = NonNullable<ModelsConfig["providers"]>[string] & {
apiKey?: string;
baseUrl?: string;
api?: string;
};
const DEFAULT_MODE: NonNullable<ModelsConfig["mode"]> = "merge";
const MODELS_JSON_WRITE_LOCKS = new Map<string, Promise<void>>();
const AUTHORITATIVE_IMPLICIT_BASEURL_PROVIDERS = new Set(["openai-codex"]);
function isPositiveFiniteTokenLimit(value: unknown): value is number {
return typeof value === "number" && Number.isFinite(value) && value > 0;
@@ -142,18 +146,10 @@ async function readJson(pathname: string): Promise<unknown> {
async function resolveProvidersForModelsJson(params: {
cfg: OpenClawConfig;
agentDir: string;
}): Promise<{
providers: Record<string, ProviderConfig>;
authoritativeImplicitBaseUrlProviders: ReadonlySet<string>;
}> {
}): Promise<Record<string, ProviderConfig>> {
const { cfg, agentDir } = params;
const explicitProviders = cfg.models?.providers ?? {};
const implicitProviders = await resolveImplicitProviders({ agentDir, explicitProviders });
const authoritativeImplicitBaseUrlProviders = new Set<string>(
[...AUTHORITATIVE_IMPLICIT_BASEURL_PROVIDERS].filter((key) =>
Boolean(implicitProviders?.[key]),
),
);
const providers: Record<string, ProviderConfig> = mergeProviders({
implicit: implicitProviders,
explicit: explicitProviders,
@@ -171,52 +167,80 @@ async function resolveProvidersForModelsJson(params: {
if (implicitCopilot && !providers["github-copilot"]) {
providers["github-copilot"] = implicitCopilot;
}
return { providers, authoritativeImplicitBaseUrlProviders };
return providers;
}
function resolveProviderApi(entry: { api?: unknown } | undefined): string | undefined {
if (typeof entry?.api !== "string") {
return undefined;
}
const api = entry.api.trim();
return api || undefined;
}
function shouldPreserveExistingApiKey(params: {
providerKey: string;
existing: ExistingProviderConfig;
secretRefManagedProviders: ReadonlySet<string>;
}): boolean {
const { providerKey, existing, secretRefManagedProviders } = params;
return (
!secretRefManagedProviders.has(providerKey) &&
typeof existing.apiKey === "string" &&
existing.apiKey.length > 0 &&
!isNonSecretApiKeyMarker(existing.apiKey, { includeEnvVarName: false })
);
}
function shouldPreserveExistingBaseUrl(params: {
providerKey: string;
existing: ExistingProviderConfig;
nextEntry: ProviderConfig;
explicitBaseUrlProviders: ReadonlySet<string>;
}): boolean {
const { providerKey, existing, nextEntry, explicitBaseUrlProviders } = params;
if (
explicitBaseUrlProviders.has(providerKey) ||
typeof existing.baseUrl !== "string" ||
existing.baseUrl.length === 0
) {
return false;
}
const existingApi = resolveProviderApi(existing);
const nextApi = resolveProviderApi(nextEntry);
return !existingApi || !nextApi || existingApi === nextApi;
}
function mergeWithExistingProviderSecrets(params: {
nextProviders: Record<string, ProviderConfig>;
existingProviders: Record<string, NonNullable<ModelsConfig["providers"]>[string]>;
existingProviders: Record<string, ExistingProviderConfig>;
secretRefManagedProviders: ReadonlySet<string>;
explicitBaseUrlProviders: ReadonlySet<string>;
authoritativeImplicitBaseUrlProviders: ReadonlySet<string>;
}): Record<string, ProviderConfig> {
const {
nextProviders,
existingProviders,
secretRefManagedProviders,
explicitBaseUrlProviders,
authoritativeImplicitBaseUrlProviders,
} = params;
const { nextProviders, existingProviders, secretRefManagedProviders, explicitBaseUrlProviders } =
params;
const mergedProviders: Record<string, ProviderConfig> = {};
for (const [key, entry] of Object.entries(existingProviders)) {
mergedProviders[key] = entry;
}
for (const [key, newEntry] of Object.entries(nextProviders)) {
const existing = existingProviders[key] as
| (NonNullable<ModelsConfig["providers"]>[string] & {
apiKey?: string;
baseUrl?: string;
})
| undefined;
const existing = existingProviders[key];
if (!existing) {
mergedProviders[key] = newEntry;
continue;
}
const preserved: Record<string, unknown> = {};
if (
!secretRefManagedProviders.has(key) &&
typeof existing.apiKey === "string" &&
existing.apiKey &&
!isNonSecretApiKeyMarker(existing.apiKey, { includeEnvVarName: false })
) {
if (shouldPreserveExistingApiKey({ providerKey: key, existing, secretRefManagedProviders })) {
preserved.apiKey = existing.apiKey;
}
if (
!authoritativeImplicitBaseUrlProviders.has(key) &&
!explicitBaseUrlProviders.has(key) &&
typeof existing.baseUrl === "string" &&
existing.baseUrl
shouldPreserveExistingBaseUrl({
providerKey: key,
existing,
nextEntry: newEntry,
explicitBaseUrlProviders,
})
) {
preserved.baseUrl = existing.baseUrl;
}
@@ -231,7 +255,6 @@ async function resolveProvidersForMode(params: {
providers: Record<string, ProviderConfig>;
secretRefManagedProviders: ReadonlySet<string>;
explicitBaseUrlProviders: ReadonlySet<string>;
authoritativeImplicitBaseUrlProviders: ReadonlySet<string>;
}): Promise<Record<string, ProviderConfig>> {
if (params.mode !== "merge") {
return params.providers;
@@ -246,10 +269,9 @@ async function resolveProvidersForMode(params: {
>;
return mergeWithExistingProviderSecrets({
nextProviders: params.providers,
existingProviders,
existingProviders: existingProviders as Record<string, ExistingProviderConfig>,
secretRefManagedProviders: params.secretRefManagedProviders,
explicitBaseUrlProviders: params.explicitBaseUrlProviders,
authoritativeImplicitBaseUrlProviders: params.authoritativeImplicitBaseUrlProviders,
});
}
@@ -316,8 +338,7 @@ export async function ensureOpenClawModelsJson(
// through the full loadConfig() pipeline which applies these.
applyConfigEnvVars(cfg);
const { providers, authoritativeImplicitBaseUrlProviders } =
await resolveProvidersForModelsJson({ cfg, agentDir });
const providers = await resolveProvidersForModelsJson({ cfg, agentDir });
if (Object.keys(providers).length === 0) {
return { agentDir, wrote: false };
@@ -348,7 +369,6 @@ export async function ensureOpenClawModelsJson(
providers: normalizedProviders,
secretRefManagedProviders,
explicitBaseUrlProviders,
authoritativeImplicitBaseUrlProviders,
});
const next = `${JSON.stringify({ providers: mergedProviders }, null, 2)}\n`;
const existingRaw = await readRawFile(targetPath);

View File

@@ -803,7 +803,11 @@ describe("applyExtraParamsToAgent", () => {
});
});
it("normalizes anthropic tool_choice modes for kimi-coding endpoints", () => {
it.each([
{ input: { type: "auto" }, expected: "auto" },
{ input: { type: "none" }, expected: "none" },
{ input: { type: "required" }, expected: "required" },
])("normalizes anthropic tool_choice %j for kimi-coding endpoints", ({ input, expected }) => {
const payloads: Record<string, unknown>[] = [];
const baseStreamFn: StreamFn = (_model, _context, options) => {
const payload: Record<string, unknown> = {
@@ -814,7 +818,7 @@ describe("applyExtraParamsToAgent", () => {
input_schema: { type: "object", properties: {} },
},
],
tool_choice: { type: "auto" },
tool_choice: input,
};
options?.onPayload?.(payload);
payloads.push(payload);
@@ -834,7 +838,7 @@ describe("applyExtraParamsToAgent", () => {
void agent.streamFn?.(model, context, {});
expect(payloads).toHaveLength(1);
expect(payloads[0]?.tool_choice).toBe("auto");
expect(payloads[0]?.tool_choice).toBe(expected);
});
it("does not rewrite anthropic tool schema for non-kimi endpoints", () => {

View File

@@ -3,6 +3,10 @@ import type { SimpleStreamOptions } from "@mariozechner/pi-ai";
import { streamSimple } from "@mariozechner/pi-ai";
import type { ThinkLevel } from "../../auto-reply/thinking.js";
import type { OpenClawConfig } from "../../config/config.js";
import {
usesOpenAiFunctionAnthropicToolSchema,
usesOpenAiStringModeAnthropicToolChoice,
} from "../provider-capabilities.js";
import { log } from "./logger.js";
const OPENROUTER_APP_HEADERS: Record<string, string> = {
@@ -786,7 +790,7 @@ function createMoonshotThinkingWrapper(
};
}
function isKimiCodingAnthropicEndpoint(model: {
function requiresAnthropicToolPayloadCompatibility(model: {
api?: unknown;
provider?: unknown;
baseUrl?: unknown;
@@ -795,7 +799,7 @@ function isKimiCodingAnthropicEndpoint(model: {
return false;
}
if (typeof model.provider === "string" && model.provider.trim().toLowerCase() === "kimi-coding") {
if (typeof model.provider === "string" && usesOpenAiFunctionAnthropicToolSchema(model.provider)) {
return true;
}
@@ -814,7 +818,9 @@ function isKimiCodingAnthropicEndpoint(model: {
}
}
function normalizeKimiCodingToolDefinition(tool: unknown): Record<string, unknown> | undefined {
function normalizeOpenAiFunctionAnthropicToolDefinition(
tool: unknown,
): Record<string, unknown> | undefined {
if (!tool || typeof tool !== "object" || Array.isArray(tool)) {
return undefined;
}
@@ -852,7 +858,7 @@ function normalizeKimiCodingToolDefinition(tool: unknown): Record<string, unknow
};
}
function normalizeKimiCodingToolChoice(toolChoice: unknown): unknown {
function normalizeOpenAiStringModeAnthropicToolChoice(toolChoice: unknown): unknown {
if (!toolChoice || typeof toolChoice !== "object" || Array.isArray(toolChoice)) {
return toolChoice;
}
@@ -881,24 +887,43 @@ function normalizeKimiCodingToolChoice(toolChoice: unknown): unknown {
}
/**
* Kimi Coding's anthropic-messages endpoint expects OpenAI-style tool payloads
* (`tools[].function`) even when messages use Anthropic request framing.
* Some anthropic-messages providers accept Anthropic framing but still expect
* OpenAI-style tool payloads (`tools[].function`, string tool_choice modes).
*/
function createKimiCodingAnthropicToolSchemaWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
function createAnthropicToolPayloadCompatibilityWrapper(
baseStreamFn: StreamFn | undefined,
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const originalOnPayload = options?.onPayload;
return underlying(model, context, {
...options,
onPayload: (payload) => {
if (payload && typeof payload === "object" && isKimiCodingAnthropicEndpoint(model)) {
if (
payload &&
typeof payload === "object" &&
requiresAnthropicToolPayloadCompatibility(model)
) {
const payloadObj = payload as Record<string, unknown>;
if (Array.isArray(payloadObj.tools)) {
if (
Array.isArray(payloadObj.tools) &&
usesOpenAiFunctionAnthropicToolSchema(
typeof model.provider === "string" ? model.provider : undefined,
)
) {
payloadObj.tools = payloadObj.tools
.map((tool) => normalizeKimiCodingToolDefinition(tool))
.map((tool) => normalizeOpenAiFunctionAnthropicToolDefinition(tool))
.filter((tool): tool is Record<string, unknown> => !!tool);
}
payloadObj.tool_choice = normalizeKimiCodingToolChoice(payloadObj.tool_choice);
if (
usesOpenAiStringModeAnthropicToolChoice(
typeof model.provider === "string" ? model.provider : undefined,
)
) {
payloadObj.tool_choice = normalizeOpenAiStringModeAnthropicToolChoice(
payloadObj.tool_choice,
);
}
}
originalOnPayload?.(payload);
},
@@ -1245,7 +1270,7 @@ export function applyExtraParamsToAgent(
agent.streamFn = createMoonshotThinkingWrapper(agent.streamFn, moonshotThinkingType);
}
agent.streamFn = createKimiCodingAnthropicToolSchemaWrapper(agent.streamFn);
agent.streamFn = createAnthropicToolPayloadCompatibilityWrapper(agent.streamFn);
if (provider === "openrouter") {
log.debug(`applying OpenRouter app attribution headers for ${provider}/${modelId}`);

View File

@@ -0,0 +1,23 @@
import { describe, expect, it } from "vitest";
import { resolveProviderCapabilities } from "./provider-capabilities.js";
describe("resolveProviderCapabilities", () => {
it("returns native anthropic defaults for ordinary providers", () => {
expect(resolveProviderCapabilities("anthropic")).toEqual({
anthropicToolSchemaMode: "native",
anthropicToolChoiceMode: "native",
preserveAnthropicThinkingSignatures: true,
});
});
it("normalizes kimi aliases to the same capability set", () => {
expect(resolveProviderCapabilities("kimi-coding")).toEqual(
resolveProviderCapabilities("kimi-code"),
);
expect(resolveProviderCapabilities("kimi-code")).toEqual({
anthropicToolSchemaMode: "openai-functions",
anthropicToolChoiceMode: "openai-string-modes",
preserveAnthropicThinkingSignatures: false,
});
});
});

View File

@@ -0,0 +1,41 @@
import { normalizeProviderId } from "./model-selection.js";
export type ProviderCapabilities = {
anthropicToolSchemaMode: "native" | "openai-functions";
anthropicToolChoiceMode: "native" | "openai-string-modes";
preserveAnthropicThinkingSignatures: boolean;
};
const DEFAULT_PROVIDER_CAPABILITIES: ProviderCapabilities = {
anthropicToolSchemaMode: "native",
anthropicToolChoiceMode: "native",
preserveAnthropicThinkingSignatures: true,
};
const PROVIDER_CAPABILITIES: Record<string, Partial<ProviderCapabilities>> = {
"kimi-coding": {
anthropicToolSchemaMode: "openai-functions",
anthropicToolChoiceMode: "openai-string-modes",
preserveAnthropicThinkingSignatures: false,
},
};
export function resolveProviderCapabilities(provider?: string | null): ProviderCapabilities {
const normalized = normalizeProviderId(provider ?? "");
return {
...DEFAULT_PROVIDER_CAPABILITIES,
...PROVIDER_CAPABILITIES[normalized],
};
}
export function preservesAnthropicThinkingSignatures(provider?: string | null): boolean {
return resolveProviderCapabilities(provider).preserveAnthropicThinkingSignatures;
}
export function usesOpenAiFunctionAnthropicToolSchema(provider?: string | null): boolean {
return resolveProviderCapabilities(provider).anthropicToolSchemaMode === "openai-functions";
}
export function usesOpenAiStringModeAnthropicToolChoice(provider?: string | null): boolean {
return resolveProviderCapabilities(provider).anthropicToolChoiceMode === "openai-string-modes";
}

View File

@@ -78,57 +78,58 @@ describe("resolveTranscriptPolicy", () => {
expect(policy.sanitizeMode).toBe("full");
});
it("preserves thinking signatures for Anthropic provider (#32526)", () => {
const policy = resolveTranscriptPolicy({
it.each([
{
title: "Anthropic provider",
provider: "anthropic",
modelId: "claude-opus-4-5",
modelApi: "anthropic-messages",
});
expect(policy.preserveSignatures).toBe(true);
});
it("preserves thinking signatures for Bedrock Anthropic (#32526)", () => {
const policy = resolveTranscriptPolicy({
modelApi: "anthropic-messages" as const,
preserveSignatures: true,
},
{
title: "Bedrock Anthropic",
provider: "amazon-bedrock",
modelId: "us.anthropic.claude-opus-4-6-v1",
modelApi: "bedrock-converse-stream",
});
expect(policy.preserveSignatures).toBe(true);
});
it("does not preserve signatures for Google provider (#32526)", () => {
const policy = resolveTranscriptPolicy({
modelApi: "bedrock-converse-stream" as const,
preserveSignatures: true,
},
{
title: "Google provider",
provider: "google",
modelId: "gemini-2.0-flash",
modelApi: "google-generative-ai",
});
expect(policy.preserveSignatures).toBe(false);
});
it("does not preserve signatures for OpenAI provider (#32526)", () => {
const policy = resolveTranscriptPolicy({
modelApi: "google-generative-ai" as const,
preserveSignatures: false,
},
{
title: "OpenAI provider",
provider: "openai",
modelId: "gpt-4o",
modelApi: "openai",
});
expect(policy.preserveSignatures).toBe(false);
});
it("does not preserve signatures for Mistral provider (#32526)", () => {
const policy = resolveTranscriptPolicy({
modelApi: "openai" as const,
preserveSignatures: false,
},
{
title: "Mistral provider",
provider: "mistral",
modelId: "mistral-large-latest",
});
expect(policy.preserveSignatures).toBe(false);
});
it("does not preserve signatures for kimi-coding provider (#39798)", () => {
const policy = resolveTranscriptPolicy({
preserveSignatures: false,
},
{
title: "kimi-coding provider",
provider: "kimi-coding",
modelId: "k2p5",
modelApi: "anthropic-messages",
});
expect(policy.preserveSignatures).toBe(false);
modelApi: "anthropic-messages" as const,
preserveSignatures: false,
},
{
title: "kimi-code alias",
provider: "kimi-code",
modelId: "k2p5",
modelApi: "anthropic-messages" as const,
preserveSignatures: false,
},
])("sets preserveSignatures for $title (#32526, #39798)", ({ preserveSignatures, ...input }) => {
const policy = resolveTranscriptPolicy(input);
expect(policy.preserveSignatures).toBe(preserveSignatures);
});
it("enables turn-ordering and assistant-merge for strict OpenAI-compatible providers (#38962)", () => {

View File

@@ -1,5 +1,6 @@
import { normalizeProviderId } from "./model-selection.js";
import { isGoogleModelApi } from "./pi-embedded-helpers/google.js";
import { preservesAnthropicThinkingSignatures } from "./provider-capabilities.js";
import type { ToolCallIdMode } from "./tool-call-id.js";
export type TranscriptSanitizeMode = "full" | "images-only";
@@ -39,8 +40,6 @@ const OPENAI_MODEL_APIS = new Set([
]);
const OPENAI_PROVIDERS = new Set(["openai", "openai-codex"]);
const OPENAI_COMPAT_TURN_MERGE_EXCLUDED_PROVIDERS = new Set(["openrouter", "opencode"]);
// Providers that use anthropic-messages API but cannot handle re-sent thinkingSignature blobs (#39798)
const ANTHROPIC_API_SIGNATURE_EXCLUDED_PROVIDERS = new Set(["kimi-coding"]);
function isOpenAiApi(modelApi?: string | null): boolean {
if (!modelApi) {
@@ -125,7 +124,7 @@ export function resolveTranscriptPolicy(params: {
(!isOpenAi && sanitizeToolCallIds) || requiresOpenAiCompatibleToolIdSanitization,
toolCallIdMode,
repairToolUseResultPairing,
preserveSignatures: isAnthropic && !ANTHROPIC_API_SIGNATURE_EXCLUDED_PROVIDERS.has(provider),
preserveSignatures: isAnthropic && preservesAnthropicThinkingSignatures(provider),
sanitizeThoughtSignatures: isOpenAi ? undefined : sanitizeThoughtSignatures,
sanitizeThinkingSignatures: false,
dropThinkingBlocks,

View File

@@ -1,5 +1,8 @@
import { describe, expect, it } from "vitest";
import { collectAppcastSparkleVersionErrors } from "../scripts/release-check.ts";
import {
collectAppcastSparkleVersionErrors,
collectBundledExtensionRootDependencyGapErrors,
} from "../scripts/release-check.ts";
function makeItem(shortVersion: string, sparkleVersion: string): string {
return `<item><title>${shortVersion}</title><sparkle:shortVersionString>${shortVersion}</sparkle:shortVersionString><sparkle:version>${sparkleVersion}</sparkle:version></item>`;
@@ -26,3 +29,69 @@ describe("collectAppcastSparkleVersionErrors", () => {
expect(collectAppcastSparkleVersionErrors(xml)).toEqual([]);
});
});
describe("collectBundledExtensionRootDependencyGapErrors", () => {
it("allows known gaps but still flags unallowlisted ones", () => {
expect(
collectBundledExtensionRootDependencyGapErrors({
rootPackage: { dependencies: {} },
extensions: [
{
id: "googlechat",
packageJson: {
dependencies: { "google-auth-library": "^1.0.0" },
openclaw: { install: { npmSpec: "@openclaw/googlechat" } },
},
},
{
id: "feishu",
packageJson: {
dependencies: { "@larksuiteoapi/node-sdk": "^1.59.0" },
openclaw: { install: { npmSpec: "@openclaw/feishu" } },
},
},
],
}),
).toEqual([
"bundled extension 'feishu' root dependency mirror drift | missing in root package: @larksuiteoapi/node-sdk | new gaps: @larksuiteoapi/node-sdk",
]);
});
it("flags newly introduced bundled extension dependency gaps", () => {
expect(
collectBundledExtensionRootDependencyGapErrors({
rootPackage: { dependencies: {} },
extensions: [
{
id: "googlechat",
packageJson: {
dependencies: { "google-auth-library": "^1.0.0", undici: "^7.0.0" },
openclaw: { install: { npmSpec: "@openclaw/googlechat" } },
},
},
],
}),
).toEqual([
"bundled extension 'googlechat' root dependency mirror drift | missing in root package: google-auth-library, undici | new gaps: undici",
]);
});
it("flags stale allowlist entries once a gap is resolved", () => {
expect(
collectBundledExtensionRootDependencyGapErrors({
rootPackage: { dependencies: { "google-auth-library": "^1.0.0" } },
extensions: [
{
id: "googlechat",
packageJson: {
dependencies: { "google-auth-library": "^1.0.0" },
openclaw: { install: { npmSpec: "@openclaw/googlechat" } },
},
},
],
}),
).toEqual([
"bundled extension 'googlechat' root dependency mirror drift | missing in root package: (none) | remove stale allowlist entries: google-auth-library",
]);
});
});