fix(models): normalize trailing @profile parsing across resolver paths

Co-authored-by: Vincent Koc <vincentkoc@ieee.org>
Co-authored-by: Marcus Castro <mcaxtr@gmail.com>
Co-authored-by: Brandon Wise <brandonawise@gmail.com>
This commit is contained in:
Peter Steinberger
2026-02-26 14:31:57 +01:00
parent 00e8e88a7c
commit 4b259ab81b
8 changed files with 208 additions and 14 deletions

View File

@@ -27,6 +27,7 @@ Docs: https://docs.openclaw.ai
- Podman/Default bind: change `run-openclaw-podman.sh` default gateway bind from `lan` to `loopback` and document explicit LAN opt-in with Control UI origin configuration. (#27491) thanks @robbyczgw-cla.
- Auto-reply/Streaming: suppress only exact `NO_REPLY` final replies while still filtering streaming partial sentinel fragments (`NO_`, `NO_RE`, `HEARTBEAT_...`) so substantive replies ending with `NO_REPLY` are delivered and partial silent tokens do not leak during streaming. (#19576) Thanks @aldoeliacim.
- LINE/Inline directives auth: gate directive parsing (`/model`, `/think`, `/verbose`, `/reasoning`, `/queue`) on resolved authorization (`command.isAuthorizedSender`) so `commands.allowFrom`-authorized LINE senders are not silently stripped when raw `CommandAuthorized` is unset. Landed from contributor PR #27248 by @kevinWangSheng. (#27240)
- Models/Profile suffix parsing: centralize trailing `@profile` parsing and only treat `@` as a profile separator when it appears after the final `/`, preserving model IDs like `openai/@cf/...` and `openrouter/@preset/...` across `/model` directive parsing and allowlist model resolution, with regression coverage.
- Doctor/State integrity: ignore metadata-only slash routing sessions when checking recent missing transcripts so `openclaw doctor` no longer reports false-positive transcript-missing warnings for `*:slash:*` keys. (#27375) thanks @gumadeiras.
- Channels/Multi-account config: when adding a non-default channel account to a single-account top-level channel setup, move existing account-scoped top-level single-account values into `channels.<channel>.accounts.default` before writing the new account so the original account keeps working without duplicated account values at channel root; `openclaw doctor --fix` now repairs previously mixed channel account shapes the same way. (#27334) thanks @gumadeiras.
- Feishu/Doc tools: route `feishu_doc` and `feishu_app_scopes` through the active agent account context (with explicit `accountId` override support) so multi-account agents no longer default to the first configured app, with regression coverage for context routing and explicit override behavior. (#27338) thanks @AaronL725.

View File

@@ -0,0 +1,49 @@
import { describe, expect, it } from "vitest";
import { splitTrailingAuthProfile } from "./model-ref-profile.js";
describe("splitTrailingAuthProfile", () => {
it("returns trimmed model when no profile suffix exists", () => {
expect(splitTrailingAuthProfile(" openai/gpt-5 ")).toEqual({
model: "openai/gpt-5",
});
});
it("splits trailing @profile suffix", () => {
expect(splitTrailingAuthProfile("openai/gpt-5@work")).toEqual({
model: "openai/gpt-5",
profile: "work",
});
});
it("keeps @-prefixed path segments in model ids", () => {
expect(splitTrailingAuthProfile("openai/@cf/openai/gpt-oss-20b")).toEqual({
model: "openai/@cf/openai/gpt-oss-20b",
});
});
it("supports trailing profile override after @-prefixed path segments", () => {
expect(splitTrailingAuthProfile("openai/@cf/openai/gpt-oss-20b@cf:default")).toEqual({
model: "openai/@cf/openai/gpt-oss-20b",
profile: "cf:default",
});
});
it("keeps openrouter preset paths without profile override", () => {
expect(splitTrailingAuthProfile("openrouter/@preset/kimi-2-5")).toEqual({
model: "openrouter/@preset/kimi-2-5",
});
});
it("supports openrouter preset profile overrides", () => {
expect(splitTrailingAuthProfile("openrouter/@preset/kimi-2-5@work")).toEqual({
model: "openrouter/@preset/kimi-2-5",
profile: "work",
});
});
it("does not split when suffix after @ contains slash", () => {
expect(splitTrailingAuthProfile("provider/foo@bar/baz")).toEqual({
model: "provider/foo@bar/baz",
});
});
});

View File

@@ -0,0 +1,23 @@
export function splitTrailingAuthProfile(raw: string): {
model: string;
profile?: string;
} {
const trimmed = raw.trim();
if (!trimmed) {
return { model: "" };
}
const profileDelimiter = trimmed.lastIndexOf("@");
const lastSlash = trimmed.lastIndexOf("/");
if (profileDelimiter <= 0 || profileDelimiter <= lastSlash) {
return { model: trimmed };
}
const model = trimmed.slice(0, profileDelimiter).trim();
const profile = trimmed.slice(profileDelimiter + 1).trim();
if (!model || !profile) {
return { model: trimmed };
}
return { model, profile };
}

View File

@@ -304,6 +304,30 @@ describe("model-selection", () => {
ref: { provider: "anthropic", model: "claude-sonnet-4-6" },
});
});
it("strips trailing auth profile suffix before allowlist matching", () => {
const cfg: OpenClawConfig = {
agents: {
defaults: {
models: {
"openai/@cf/openai/gpt-oss-20b": {},
},
},
},
} as OpenClawConfig;
const result = resolveAllowedModelRef({
cfg,
catalog: [],
raw: "openai/@cf/openai/gpt-oss-20b@cf:default",
defaultProvider: "anthropic",
});
expect(result).toEqual({
key: "openai/@cf/openai/gpt-oss-20b",
ref: { provider: "openai", model: "@cf/openai/gpt-oss-20b" },
});
});
});
describe("resolveModelRefFromString", () => {
@@ -332,6 +356,78 @@ describe("model-selection", () => {
});
expect(resolved?.ref).toEqual({ provider: "openai", model: "gpt-4" });
});
it("strips trailing profile suffix for simple model refs", () => {
const resolved = resolveModelRefFromString({
raw: "gpt-5@myprofile",
defaultProvider: "openai",
});
expect(resolved?.ref).toEqual({ provider: "openai", model: "gpt-5" });
});
it("strips trailing profile suffix for provider/model refs", () => {
const resolved = resolveModelRefFromString({
raw: "google/gemini-flash-latest@google:bevfresh",
defaultProvider: "anthropic",
});
expect(resolved?.ref).toEqual({
provider: "google",
model: "gemini-flash-latest",
});
});
it("preserves Cloudflare @cf model segments", () => {
const resolved = resolveModelRefFromString({
raw: "openai/@cf/openai/gpt-oss-20b",
defaultProvider: "anthropic",
});
expect(resolved?.ref).toEqual({
provider: "openai",
model: "@cf/openai/gpt-oss-20b",
});
});
it("preserves OpenRouter @preset model segments", () => {
const resolved = resolveModelRefFromString({
raw: "openrouter/@preset/kimi-2-5",
defaultProvider: "anthropic",
});
expect(resolved?.ref).toEqual({
provider: "openrouter",
model: "@preset/kimi-2-5",
});
});
it("splits trailing profile suffix after OpenRouter preset paths", () => {
const resolved = resolveModelRefFromString({
raw: "openrouter/@preset/kimi-2-5@work",
defaultProvider: "anthropic",
});
expect(resolved?.ref).toEqual({
provider: "openrouter",
model: "@preset/kimi-2-5",
});
});
it("strips profile suffix before alias resolution", () => {
const index = {
byAlias: new Map([
["kimi", { alias: "kimi", ref: { provider: "nvidia", model: "moonshotai/kimi-k2.5" } }],
]),
byKey: new Map(),
};
const resolved = resolveModelRefFromString({
raw: "kimi@nvidia:default",
defaultProvider: "openai",
aliasIndex: index,
});
expect(resolved?.ref).toEqual({
provider: "nvidia",
model: "moonshotai/kimi-k2.5",
});
expect(resolved?.alias).toBe("kimi");
});
});
describe("resolveConfiguredModelRef", () => {

View File

@@ -4,6 +4,7 @@ import { createSubsystemLogger } from "../logging/subsystem.js";
import { resolveAgentConfig, resolveAgentEffectiveModelPrimary } from "./agent-scope.js";
import { DEFAULT_MODEL, DEFAULT_PROVIDER } from "./defaults.js";
import type { ModelCatalogEntry } from "./model-catalog.js";
import { splitTrailingAuthProfile } from "./model-ref-profile.js";
import { normalizeGoogleModelId } from "./models-config.providers.js";
const log = createSubsystemLogger("model-selection");
@@ -283,18 +284,18 @@ export function resolveModelRefFromString(params: {
defaultProvider: string;
aliasIndex?: ModelAliasIndex;
}): { ref: ModelRef; alias?: string } | null {
const trimmed = params.raw.trim();
if (!trimmed) {
const { model } = splitTrailingAuthProfile(params.raw);
if (!model) {
return null;
}
if (!trimmed.includes("/")) {
const aliasKey = normalizeAliasKey(trimmed);
if (!model.includes("/")) {
const aliasKey = normalizeAliasKey(model);
const aliasMatch = params.aliasIndex?.byAlias.get(aliasKey);
if (aliasMatch) {
return { ref: aliasMatch.ref, alias: aliasMatch.alias };
}
}
const parsed = parseModelRef(trimmed, params.defaultProvider);
const parsed = parseModelRef(model, params.defaultProvider);
if (!parsed) {
return null;
}

View File

@@ -50,6 +50,20 @@ describe("extractModelDirective", () => {
expect(result.rawProfile).toBe("work");
});
it("keeps Cloudflare @cf path segments inside model ids", () => {
const result = extractModelDirective("/model openai/@cf/openai/gpt-oss-20b");
expect(result.hasDirective).toBe(true);
expect(result.rawModel).toBe("openai/@cf/openai/gpt-oss-20b");
expect(result.rawProfile).toBeUndefined();
});
it("allows profile overrides after Cloudflare @cf path segments", () => {
const result = extractModelDirective("/model openai/@cf/openai/gpt-oss-20b@cf:default");
expect(result.hasDirective).toBe(true);
expect(result.rawModel).toBe("openai/@cf/openai/gpt-oss-20b");
expect(result.rawProfile).toBe("cf:default");
});
it("returns no directive for plain text", () => {
const result = extractModelDirective("hello world");
expect(result.hasDirective).toBe(false);

View File

@@ -1,3 +1,4 @@
import { splitTrailingAuthProfile } from "../agents/model-ref-profile.js";
import { escapeRegExp } from "../utils.js";
export function extractModelDirective(
@@ -34,15 +35,9 @@ export function extractModelDirective(
let rawModel = raw;
let rawProfile: string | undefined;
if (raw) {
const atIndex = raw.lastIndexOf("@");
if (atIndex > 0) {
const candidateModel = raw.slice(0, atIndex).trim();
const candidateProfile = raw.slice(atIndex + 1).trim();
if (candidateModel && candidateProfile && !candidateProfile.includes("/")) {
rawModel = candidateModel;
rawProfile = candidateProfile;
}
}
const split = splitTrailingAuthProfile(raw);
rawModel = split.model;
rawProfile = split.profile;
}
const cleaned = match ? body.replace(match[0], " ").replace(/\s+/g, " ").trim() : body.trim();

View File

@@ -172,6 +172,21 @@ describe("/model chat UX", () => {
isDefault: false,
});
});
it("keeps cloudflare @cf model segments for exact selections", () => {
const resolved = resolveModelSelectionForCommand({
command: "/model openai/@cf/openai/gpt-oss-20b",
allowedModelKeys: new Set(["openai/@cf/openai/gpt-oss-20b"]),
allowedModelCatalog: [],
});
expect(resolved.errorText).toBeUndefined();
expect(resolved.modelSelection).toEqual({
provider: "openai",
model: "@cf/openai/gpt-oss-20b",
isDefault: false,
});
});
});
describe("handleDirectiveOnly model persist behavior (fixes #1435)", () => {