feat: modularize provider plugin architecture

This commit is contained in:
Peter Steinberger
2026-03-12 22:24:22 +00:00
parent bf89947a8e
commit d83491e751
41 changed files with 1734 additions and 260 deletions

View File

@@ -0,0 +1,3 @@
# Ollama Provider
Bundled provider plugin for Ollama discovery and setup.

115
extensions/ollama/index.ts Normal file
View File

@@ -0,0 +1,115 @@
import {
buildOllamaProvider,
emptyPluginConfigSchema,
ensureOllamaModelPulled,
OLLAMA_DEFAULT_BASE_URL,
promptAndConfigureOllama,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "ollama";
const DEFAULT_API_KEY = "ollama-local";
const ollamaPlugin = {
id: "ollama",
name: "Ollama Provider",
description: "Bundled Ollama provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "Ollama",
docsPath: "/providers/ollama",
envVars: ["OLLAMA_API_KEY"],
auth: [
{
id: "local",
label: "Ollama",
hint: "Cloud and local open models",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOllama({
cfg: ctx.config,
prompter: ctx.prompter,
});
return {
profiles: [
{
profileId: "ollama:default",
credential: {
type: "api_key",
provider: PROVIDER_ID,
key: DEFAULT_API_KEY,
},
},
],
configPatch: result.config,
defaultModel: `ollama/${result.defaultModelId}`,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
const explicit = ctx.config.models?.providers?.ollama;
const hasExplicitModels = Array.isArray(explicit?.models) && explicit.models.length > 0;
const ollamaKey = ctx.resolveProviderApiKey(PROVIDER_ID).apiKey;
if (hasExplicitModels && explicit) {
return {
provider: {
...explicit,
baseUrl:
typeof explicit.baseUrl === "string" && explicit.baseUrl.trim()
? explicit.baseUrl.trim().replace(/\/+$/, "")
: OLLAMA_DEFAULT_BASE_URL,
api: explicit.api ?? "ollama",
apiKey: ollamaKey ?? explicit.apiKey ?? DEFAULT_API_KEY,
},
};
}
const provider = await buildOllamaProvider(explicit?.baseUrl, {
quiet: !ollamaKey && !explicit,
});
if (provider.models.length === 0 && !ollamaKey && !explicit?.apiKey) {
return null;
}
return {
provider: {
...provider,
apiKey: ollamaKey ?? explicit?.apiKey ?? DEFAULT_API_KEY,
},
};
},
},
wizard: {
onboarding: {
choiceId: "ollama",
choiceLabel: "Ollama",
choiceHint: "Cloud and local open models",
groupId: "ollama",
groupLabel: "Ollama",
groupHint: "Cloud and local open models",
methodId: "local",
},
modelPicker: {
label: "Ollama (custom)",
hint: "Detect models from a local or remote Ollama instance",
methodId: "local",
},
},
onModelSelected: async ({ config, model, prompter }) => {
if (!model.startsWith("ollama/")) {
return;
}
await ensureOllamaModelPulled({ config, prompter });
},
});
},
};
export default ollamaPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "ollama",
"providers": ["ollama"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/ollama-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw Ollama provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,3 @@
# SGLang Provider
Bundled provider plugin for SGLang discovery and setup.

View File

@@ -0,0 +1,92 @@
import {
buildSglangProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "sglang";
const DEFAULT_BASE_URL = "http://127.0.0.1:30000/v1";
const sglangPlugin = {
id: "sglang",
name: "SGLang Provider",
description: "Bundled SGLang provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "SGLang",
docsPath: "/providers/sglang",
envVars: ["SGLANG_API_KEY"],
auth: [
{
id: "custom",
label: "SGLang",
hint: "Fast self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "SGLang",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "SGLANG_API_KEY",
modelPlaceholder: "Qwen/Qwen3-8B",
});
return {
profiles: [
{
profileId: result.profileId,
credential: result.credential,
},
],
configPatch: result.config,
defaultModel: result.modelRef,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
if (ctx.config.models?.providers?.sglang) {
return null;
}
const { apiKey, discoveryApiKey } = ctx.resolveProviderApiKey(PROVIDER_ID);
if (!apiKey) {
return null;
}
return {
provider: {
...(await buildSglangProvider({ apiKey: discoveryApiKey })),
apiKey,
},
};
},
},
wizard: {
onboarding: {
choiceId: "sglang",
choiceLabel: "SGLang",
choiceHint: "Fast self-hosted OpenAI-compatible server",
groupId: "sglang",
groupLabel: "SGLang",
groupHint: "Fast self-hosted server",
methodId: "custom",
},
modelPicker: {
label: "SGLang (custom)",
hint: "Enter SGLang URL + API key + model",
methodId: "custom",
},
},
});
},
};
export default sglangPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "sglang",
"providers": ["sglang"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/sglang-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw SGLang provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,3 @@
# vLLM Provider
Bundled provider plugin for vLLM discovery and setup.

92
extensions/vllm/index.ts Normal file
View File

@@ -0,0 +1,92 @@
import {
buildVllmProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "vllm";
const DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
const vllmPlugin = {
id: "vllm",
name: "vLLM Provider",
description: "Bundled vLLM provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "vLLM",
docsPath: "/providers/vllm",
envVars: ["VLLM_API_KEY"],
auth: [
{
id: "custom",
label: "vLLM",
hint: "Local/self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "vLLM",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
});
return {
profiles: [
{
profileId: result.profileId,
credential: result.credential,
},
],
configPatch: result.config,
defaultModel: result.modelRef,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
if (ctx.config.models?.providers?.vllm) {
return null;
}
const { apiKey, discoveryApiKey } = ctx.resolveProviderApiKey(PROVIDER_ID);
if (!apiKey) {
return null;
}
return {
provider: {
...(await buildVllmProvider({ apiKey: discoveryApiKey })),
apiKey,
},
};
},
},
wizard: {
onboarding: {
choiceId: "vllm",
choiceLabel: "vLLM",
choiceHint: "Local/self-hosted OpenAI-compatible server",
groupId: "vllm",
groupLabel: "vLLM",
groupHint: "Local/self-hosted OpenAI-compatible",
methodId: "custom",
},
modelPicker: {
label: "vLLM (custom)",
hint: "Enter vLLM URL + API key + model",
methodId: "custom",
},
},
});
},
};
export default vllmPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "vllm",
"providers": ["vllm"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/vllm-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw vLLM provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -35,6 +35,7 @@ export const PROVIDER_ENV_API_KEY_CANDIDATES: Record<string, string[]> = {
qianfan: ["QIANFAN_API_KEY"],
modelstudio: ["MODELSTUDIO_API_KEY"],
ollama: ["OLLAMA_API_KEY"],
sglang: ["SGLANG_API_KEY"],
vllm: ["VLLM_API_KEY"],
kilocode: ["KILOCODE_API_KEY"],
};

View File

@@ -31,6 +31,16 @@ const log = createSubsystemLogger("agents/model-providers");
const OLLAMA_SHOW_CONCURRENCY = 8;
const OLLAMA_SHOW_MAX_MODELS = 200;
const SGLANG_BASE_URL = "http://127.0.0.1:30000/v1";
const SGLANG_DEFAULT_CONTEXT_WINDOW = 128000;
const SGLANG_DEFAULT_MAX_TOKENS = 8192;
const SGLANG_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
const VLLM_BASE_URL = "http://127.0.0.1:8000/v1";
const VLLM_DEFAULT_CONTEXT_WINDOW = 128000;
const VLLM_DEFAULT_MAX_TOKENS = 8192;
@@ -47,6 +57,12 @@ type VllmModelsResponse = {
}>;
};
type SglangModelsResponse = {
data?: Array<{
id?: string;
}>;
};
async function discoverOllamaModels(
baseUrl?: string,
opts?: { quiet?: boolean },
@@ -145,6 +161,55 @@ async function discoverVllmModels(
}
}
async function discoverSglangModels(
baseUrl: string,
apiKey?: string,
): Promise<ModelDefinitionConfig[]> {
if (process.env.VITEST || process.env.NODE_ENV === "test") {
return [];
}
const trimmedBaseUrl = baseUrl.trim().replace(/\/+$/, "");
const url = `${trimmedBaseUrl}/models`;
try {
const trimmedApiKey = apiKey?.trim();
const response = await fetch(url, {
headers: trimmedApiKey ? { Authorization: `Bearer ${trimmedApiKey}` } : undefined,
signal: AbortSignal.timeout(5000),
});
if (!response.ok) {
log.warn(`Failed to discover SGLang models: ${response.status}`);
return [];
}
const data = (await response.json()) as SglangModelsResponse;
const models = data.data ?? [];
if (models.length === 0) {
log.warn("No SGLang models found on local instance");
return [];
}
return models
.map((model) => ({ id: typeof model.id === "string" ? model.id.trim() : "" }))
.filter((model) => Boolean(model.id))
.map((model) => {
const modelId = model.id;
return {
id: modelId,
name: modelId,
reasoning: isReasoningModelHeuristic(modelId),
input: ["text"],
cost: SGLANG_DEFAULT_COST,
contextWindow: SGLANG_DEFAULT_CONTEXT_WINDOW,
maxTokens: SGLANG_DEFAULT_MAX_TOKENS,
} satisfies ModelDefinitionConfig;
});
} catch (error) {
log.warn(`Failed to discover SGLang models: ${String(error)}`);
return [];
}
}
export async function buildVeniceProvider(): Promise<ProviderConfig> {
const models = await discoverVeniceModels();
return {
@@ -200,6 +265,19 @@ export async function buildVllmProvider(params?: {
};
}
export async function buildSglangProvider(params?: {
baseUrl?: string;
apiKey?: string;
}): Promise<ProviderConfig> {
const baseUrl = (params?.baseUrl?.trim() || SGLANG_BASE_URL).replace(/\/+$/, "");
const models = await discoverSglangModels(baseUrl, params?.apiKey);
return {
baseUrl,
api: "openai-completions",
models,
};
}
/**
* Build the Kilocode provider with dynamic model discovery from the gateway
* API. Falls back to the static catalog on failure.

View File

@@ -15,10 +15,8 @@ import {
import {
buildHuggingfaceProvider,
buildKilocodeProviderWithDiscovery,
buildOllamaProvider,
buildVeniceProvider,
buildVercelAiGatewayProvider,
buildVllmProvider,
resolveOllamaApiBase,
} from "./models-config.providers.discovery.js";
import {
@@ -57,9 +55,13 @@ export {
QIANFAN_DEFAULT_MODEL_ID,
XIAOMI_DEFAULT_MODEL_ID,
} from "./models-config.providers.static.js";
import {
groupPluginDiscoveryProvidersByOrder,
normalizePluginDiscoveryResult,
resolvePluginDiscoveryProviders,
} from "../plugins/provider-discovery.js";
import {
MINIMAX_OAUTH_MARKER,
OLLAMA_LOCAL_AUTH_MARKER,
QWEN_OAUTH_MARKER,
isNonSecretApiKeyMarker,
resolveNonEnvSecretRefApiKeyMarker,
@@ -587,6 +589,7 @@ type ImplicitProviderParams = {
agentDir: string;
config?: OpenClawConfig;
env?: NodeJS.ProcessEnv;
workspaceDir?: string;
explicitProviders?: Record<string, ProviderConfig> | null;
};
@@ -796,56 +799,35 @@ async function resolveCloudflareAiGatewayImplicitProvider(
return undefined;
}
async function resolveOllamaImplicitProvider(
async function resolvePluginImplicitProviders(
ctx: ImplicitProviderContext,
order: import("../plugins/types.js").ProviderDiscoveryOrder,
): Promise<Record<string, ProviderConfig> | undefined> {
const ollamaKey = ctx.resolveProviderApiKey("ollama").apiKey;
const explicitOllama = ctx.explicitProviders?.ollama;
const hasExplicitModels =
Array.isArray(explicitOllama?.models) && explicitOllama.models.length > 0;
if (hasExplicitModels && explicitOllama) {
return {
ollama: {
...explicitOllama,
baseUrl: resolveOllamaApiBase(explicitOllama.baseUrl),
api: explicitOllama.api ?? "ollama",
apiKey: ollamaKey ?? explicitOllama.apiKey ?? OLLAMA_LOCAL_AUTH_MARKER,
},
};
}
const ollamaBaseUrl = explicitOllama?.baseUrl;
const hasExplicitOllamaConfig = Boolean(explicitOllama);
const ollamaProvider = await buildOllamaProvider(ollamaBaseUrl, {
quiet: !ollamaKey && !hasExplicitOllamaConfig,
const providers = resolvePluginDiscoveryProviders({
config: ctx.config,
workspaceDir: ctx.workspaceDir,
env: ctx.env,
});
if (ollamaProvider.models.length === 0 && !ollamaKey && !explicitOllama?.apiKey) {
return undefined;
const byOrder = groupPluginDiscoveryProvidersByOrder(providers);
const discovered: Record<string, ProviderConfig> = {};
for (const provider of byOrder[order]) {
const result = await provider.discovery?.run({
config: ctx.config ?? {},
agentDir: ctx.agentDir,
workspaceDir: ctx.workspaceDir,
env: ctx.env,
resolveProviderApiKey: (providerId) =>
ctx.resolveProviderApiKey(providerId?.trim() || provider.id),
});
mergeImplicitProviderSet(
discovered,
normalizePluginDiscoveryResult({
provider,
result,
}),
);
}
return {
ollama: {
...ollamaProvider,
apiKey: ollamaKey ?? explicitOllama?.apiKey ?? OLLAMA_LOCAL_AUTH_MARKER,
},
};
}
async function resolveVllmImplicitProvider(
ctx: ImplicitProviderContext,
): Promise<Record<string, ProviderConfig> | undefined> {
if (ctx.explicitProviders?.vllm) {
return undefined;
}
const { apiKey: vllmKey, discoveryApiKey } = ctx.resolveProviderApiKey("vllm");
if (!vllmKey) {
return undefined;
}
return {
vllm: {
...(await buildVllmProvider({ apiKey: discoveryApiKey })),
apiKey: vllmKey,
},
};
return Object.keys(discovered).length > 0 ? discovered : undefined;
}
export async function resolveImplicitProviders(
@@ -882,15 +864,17 @@ export async function resolveImplicitProviders(
for (const loader of SIMPLE_IMPLICIT_PROVIDER_LOADERS) {
mergeImplicitProviderSet(providers, await loader(context));
}
mergeImplicitProviderSet(providers, await resolvePluginImplicitProviders(context, "simple"));
for (const loader of PROFILE_IMPLICIT_PROVIDER_LOADERS) {
mergeImplicitProviderSet(providers, await loader(context));
}
mergeImplicitProviderSet(providers, await resolvePluginImplicitProviders(context, "profile"));
for (const loader of PAIRED_IMPLICIT_PROVIDER_LOADERS) {
mergeImplicitProviderSet(providers, await loader(context));
}
mergeImplicitProviderSet(providers, await resolvePluginImplicitProviders(context, "paired"));
mergeImplicitProviderSet(providers, await resolveCloudflareAiGatewayImplicitProvider(context));
mergeImplicitProviderSet(providers, await resolveOllamaImplicitProvider(context));
mergeImplicitProviderSet(providers, await resolveVllmImplicitProvider(context));
mergeImplicitProviderSet(providers, await resolvePluginImplicitProviders(context, "late"));
if (!providers["github-copilot"]) {
const implicitCopilot = await resolveImplicitCopilotProvider({

View File

@@ -266,6 +266,7 @@ export async function agentsAddCommand(
prompter,
store: authStore,
includeSkip: true,
config: nextConfig,
});
const authResult = await applyAuthChoice({

View File

@@ -1,11 +1,19 @@
import { describe, expect, it } from "vitest";
import { describe, expect, it, vi } from "vitest";
import type { AuthProfileStore } from "../agents/auth-profiles.js";
import type { ProviderWizardOption } from "../plugins/provider-wizard.js";
import {
buildAuthChoiceGroups,
buildAuthChoiceOptions,
formatAuthChoiceChoicesForCli,
} from "./auth-choice-options.js";
const resolveProviderWizardOptions = vi.hoisted(() =>
vi.fn<() => ProviderWizardOption[]>(() => []),
);
vi.mock("../plugins/provider-wizard.js", () => ({
resolveProviderWizardOptions,
}));
const EMPTY_STORE: AuthProfileStore = { version: 1, profiles: {} };
function getOptions(includeSkip = false) {
@@ -17,6 +25,29 @@ function getOptions(includeSkip = false) {
describe("buildAuthChoiceOptions", () => {
it("includes core and provider-specific auth choices", () => {
resolveProviderWizardOptions.mockReturnValue([
{
value: "ollama",
label: "Ollama",
hint: "Cloud and local open models",
groupId: "ollama",
groupLabel: "Ollama",
},
{
value: "vllm",
label: "vLLM",
hint: "Local/self-hosted OpenAI-compatible server",
groupId: "vllm",
groupLabel: "vLLM",
},
{
value: "sglang",
label: "SGLang",
hint: "Fast self-hosted OpenAI-compatible server",
groupId: "sglang",
groupLabel: "SGLang",
},
]);
const options = getOptions();
for (const value of [
@@ -24,9 +55,9 @@ describe("buildAuthChoiceOptions", () => {
"token",
"zai-api-key",
"xiaomi-api-key",
"minimax-api",
"minimax-api-key-cn",
"minimax-api-lightning",
"minimax-global-api",
"minimax-cn-api",
"minimax-global-oauth",
"moonshot-api-key",
"moonshot-api-key-cn",
"kimi-code-api-key",
@@ -43,6 +74,7 @@ describe("buildAuthChoiceOptions", () => {
"vllm",
"opencode-go",
"ollama",
"sglang",
]) {
expect(options.some((opt) => opt.value === value)).toBe(true);
}
@@ -96,6 +128,15 @@ describe("buildAuthChoiceOptions", () => {
});
it("shows Ollama in grouped provider selection", () => {
resolveProviderWizardOptions.mockReturnValue([
{
value: "ollama",
label: "Ollama",
hint: "Cloud and local open models",
groupId: "ollama",
groupLabel: "Ollama",
},
]);
const { groups } = buildAuthChoiceGroups({
store: EMPTY_STORE,
includeSkip: false,

View File

@@ -1,4 +1,6 @@
import type { AuthProfileStore } from "../agents/auth-profiles.js";
import type { OpenClawConfig } from "../config/config.js";
import { resolveProviderWizardOptions } from "../plugins/provider-wizard.js";
import { AUTH_CHOICE_LEGACY_ALIASES_FOR_CLI } from "./auth-choice-legacy.js";
import { ONBOARD_PROVIDER_AUTH_FLAGS } from "./onboard-provider-auth-flags.js";
import type { AuthChoice, AuthChoiceGroupId } from "./onboard-types.js";
@@ -41,18 +43,6 @@ const AUTH_CHOICE_GROUP_DEFS: {
hint: "OAuth",
choices: ["chutes"],
},
{
value: "vllm",
label: "vLLM",
hint: "Local/self-hosted OpenAI-compatible",
choices: ["vllm"],
},
{
value: "ollama",
label: "Ollama",
hint: "Cloud and local open models",
choices: ["ollama"],
},
{
value: "minimax",
label: "MiniMax",
@@ -239,16 +229,6 @@ const BASE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
label: "OpenAI Codex (ChatGPT OAuth)",
},
{ value: "chutes", label: "Chutes (OAuth)" },
{
value: "vllm",
label: "vLLM (custom URL + model)",
hint: "Local/self-hosted OpenAI-compatible server",
},
{
value: "ollama",
label: "Ollama",
hint: "Cloud and local open models",
},
...buildProviderAuthChoiceOptions(),
{
value: "moonshot-api-key-cn",
@@ -336,13 +316,27 @@ const BASE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
{ value: "custom-api-key", label: "Custom Provider" },
];
function resolveDynamicProviderCliChoices(params?: {
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): string[] {
return [...new Set(resolveProviderWizardOptions(params ?? {}).map((option) => option.value))];
}
export function formatAuthChoiceChoicesForCli(params?: {
includeSkip?: boolean;
includeLegacyAliases?: boolean;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): string {
const includeSkip = params?.includeSkip ?? true;
const includeLegacyAliases = params?.includeLegacyAliases ?? false;
const values = BASE_AUTH_CHOICE_OPTIONS.map((opt) => opt.value);
const values = [
...BASE_AUTH_CHOICE_OPTIONS.map((opt) => opt.value),
...resolveDynamicProviderCliChoices(params),
];
if (includeSkip) {
values.push("skip");
@@ -357,9 +351,29 @@ export function formatAuthChoiceChoicesForCli(params?: {
export function buildAuthChoiceOptions(params: {
store: AuthProfileStore;
includeSkip: boolean;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): AuthChoiceOption[] {
void params.store;
const options: AuthChoiceOption[] = [...BASE_AUTH_CHOICE_OPTIONS];
const seen = new Set(options.map((option) => option.value));
for (const option of resolveProviderWizardOptions({
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
})) {
if (seen.has(option.value as AuthChoice)) {
continue;
}
options.push({
value: option.value as AuthChoice,
label: option.label,
hint: option.hint,
});
seen.add(option.value as AuthChoice);
}
if (params.includeSkip) {
options.push({ value: "skip", label: "Skip for now" });
@@ -368,7 +382,13 @@ export function buildAuthChoiceOptions(params: {
return options;
}
export function buildAuthChoiceGroups(params: { store: AuthProfileStore; includeSkip: boolean }): {
export function buildAuthChoiceGroups(params: {
store: AuthProfileStore;
includeSkip: boolean;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): {
groups: AuthChoiceGroup[];
skipOption?: AuthChoiceOption;
} {
@@ -380,12 +400,42 @@ export function buildAuthChoiceGroups(params: { store: AuthProfileStore; include
options.map((opt) => [opt.value, opt]),
);
const groups = AUTH_CHOICE_GROUP_DEFS.map((group) => ({
const groups: AuthChoiceGroup[] = AUTH_CHOICE_GROUP_DEFS.map((group) => ({
...group,
options: group.choices
.map((choice) => optionByValue.get(choice))
.filter((opt): opt is AuthChoiceOption => Boolean(opt)),
}));
const staticGroupIds = new Set(groups.map((group) => group.value));
for (const option of resolveProviderWizardOptions({
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
})) {
const existing = groups.find((group) => group.value === option.groupId);
const nextOption = optionByValue.get(option.value as AuthChoice) ?? {
value: option.value as AuthChoice,
label: option.label,
hint: option.hint,
};
if (existing) {
if (!existing.options.some((candidate) => candidate.value === nextOption.value)) {
existing.options.push(nextOption);
}
continue;
}
if (staticGroupIds.has(option.groupId as AuthChoiceGroupId)) {
continue;
}
groups.push({
value: option.groupId as AuthChoiceGroupId,
label: option.groupLabel,
hint: option.groupHint,
options: [nextOption],
});
staticGroupIds.add(option.groupId as AuthChoiceGroupId);
}
const skipOption = params.includeSkip
? ({ value: "skip", label: "Skip for now" } satisfies AuthChoiceOption)

View File

@@ -1,4 +1,5 @@
import type { AuthProfileStore } from "../agents/auth-profiles.js";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { buildAuthChoiceGroups } from "./auth-choice-options.js";
import type { AuthChoice } from "./onboard-types.js";
@@ -9,6 +10,9 @@ export async function promptAuthChoiceGrouped(params: {
prompter: WizardPrompter;
store: AuthProfileStore;
includeSkip: boolean;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): Promise<AuthChoice> {
const { groups, skipOption } = buildAuthChoiceGroups(params);
const availableGroups = groups.filter((group) => group.options.length > 0);
@@ -55,6 +59,6 @@ export async function promptAuthChoiceGrouped(params: {
continue;
}
return methodSelection as AuthChoice;
return methodSelection;
}
}

View File

@@ -12,7 +12,6 @@ export async function applyAuthChoiceOllama(
const { config, defaultModelId } = await promptAndConfigureOllama({
cfg: params.config,
prompter: params.prompter,
agentDir: params.agentDir,
});
// Set an Ollama default so the model picker pre-selects an Ollama model.

View File

@@ -0,0 +1,140 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { ProviderPlugin } from "../plugins/types.js";
import type { ProviderAuthMethod } from "../plugins/types.js";
import type { ApplyAuthChoiceParams } from "./auth-choice.apply.js";
import { applyAuthChoiceLoadedPluginProvider } from "./auth-choice.apply.plugin-provider.js";
const resolvePluginProviders = vi.hoisted(() => vi.fn<() => ProviderPlugin[]>(() => []));
vi.mock("../plugins/providers.js", () => ({
resolvePluginProviders,
}));
const resolveProviderPluginChoice = vi.hoisted(() =>
vi.fn<() => { provider: ProviderPlugin; method: ProviderAuthMethod } | null>(),
);
const runProviderModelSelectedHook = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("../plugins/provider-wizard.js", () => ({
resolveProviderPluginChoice,
runProviderModelSelectedHook,
}));
const upsertAuthProfile = vi.hoisted(() => vi.fn());
vi.mock("../agents/auth-profiles.js", () => ({
upsertAuthProfile,
}));
const resolveDefaultAgentId = vi.hoisted(() => vi.fn(() => "default"));
const resolveAgentWorkspaceDir = vi.hoisted(() => vi.fn(() => "/tmp/workspace"));
const resolveAgentDir = vi.hoisted(() => vi.fn(() => "/tmp/agent"));
vi.mock("../agents/agent-scope.js", () => ({
resolveDefaultAgentId,
resolveAgentDir,
resolveAgentWorkspaceDir,
}));
const resolveDefaultAgentWorkspaceDir = vi.hoisted(() => vi.fn(() => "/tmp/workspace"));
vi.mock("../agents/workspace.js", () => ({
resolveDefaultAgentWorkspaceDir,
}));
const resolveOpenClawAgentDir = vi.hoisted(() => vi.fn(() => "/tmp/agent"));
vi.mock("../agents/agent-paths.js", () => ({
resolveOpenClawAgentDir,
}));
const applyAuthProfileConfig = vi.hoisted(() => vi.fn((config) => config));
vi.mock("./onboard-auth.js", () => ({
applyAuthProfileConfig,
}));
const isRemoteEnvironment = vi.hoisted(() => vi.fn(() => false));
vi.mock("./oauth-env.js", () => ({
isRemoteEnvironment,
}));
const createVpsAwareOAuthHandlers = vi.hoisted(() => vi.fn());
vi.mock("./oauth-flow.js", () => ({
createVpsAwareOAuthHandlers,
}));
const openUrl = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("./onboard-helpers.js", () => ({
openUrl,
}));
function buildProvider(): ProviderPlugin {
return {
id: "ollama",
label: "Ollama",
auth: [
{
id: "local",
label: "Ollama",
kind: "custom",
run: async () => ({
profiles: [
{
profileId: "ollama:default",
credential: {
type: "api_key",
provider: "ollama",
key: "ollama-local",
},
},
],
defaultModel: "ollama/qwen3:4b",
}),
},
],
};
}
function buildParams(overrides: Partial<ApplyAuthChoiceParams> = {}): ApplyAuthChoiceParams {
return {
authChoice: "ollama",
config: {},
prompter: {
note: vi.fn(async () => {}),
} as unknown as ApplyAuthChoiceParams["prompter"],
runtime: {} as ApplyAuthChoiceParams["runtime"],
setDefaultModel: true,
...overrides,
};
}
describe("applyAuthChoiceLoadedPluginProvider", () => {
beforeEach(() => {
vi.clearAllMocks();
});
it("applies the default model and runs provider post-setup hooks", async () => {
const provider = buildProvider();
resolvePluginProviders.mockReturnValue([provider]);
resolveProviderPluginChoice.mockReturnValue({
provider,
method: provider.auth[0],
});
const result = await applyAuthChoiceLoadedPluginProvider(buildParams());
expect(result?.config.agents?.defaults?.model).toEqual({
primary: "ollama/qwen3:4b",
});
expect(upsertAuthProfile).toHaveBeenCalledWith({
profileId: "ollama:default",
credential: {
type: "api_key",
provider: "ollama",
key: "ollama-local",
},
agentDir: "/tmp/agent",
});
expect(runProviderModelSelectedHook).toHaveBeenCalledWith({
config: result?.config,
model: "ollama/qwen3:4b",
prompter: expect.objectContaining({ note: expect.any(Function) }),
agentDir: undefined,
workspaceDir: "/tmp/workspace",
});
});
});

View File

@@ -7,7 +7,12 @@ import {
import { upsertAuthProfile } from "../agents/auth-profiles.js";
import { resolveDefaultAgentWorkspaceDir } from "../agents/workspace.js";
import { enablePluginInConfig } from "../plugins/enable.js";
import {
resolveProviderPluginChoice,
runProviderModelSelectedHook,
} from "../plugins/provider-wizard.js";
import { resolvePluginProviders } from "../plugins/providers.js";
import type { ProviderAuthMethod } from "../plugins/types.js";
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import { isRemoteEnvironment } from "./oauth-env.js";
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
@@ -28,6 +33,124 @@ export type PluginProviderAuthChoiceOptions = {
label: string;
};
export async function runProviderPluginAuthMethod(params: {
config: ApplyAuthChoiceParams["config"];
runtime: ApplyAuthChoiceParams["runtime"];
prompter: ApplyAuthChoiceParams["prompter"];
method: ProviderAuthMethod;
agentDir?: string;
agentId?: string;
workspaceDir?: string;
emitNotes?: boolean;
}): Promise<{ config: ApplyAuthChoiceParams["config"]; defaultModel?: string }> {
const agentId = params.agentId ?? resolveDefaultAgentId(params.config);
const defaultAgentId = resolveDefaultAgentId(params.config);
const agentDir =
params.agentDir ??
(agentId === defaultAgentId
? resolveOpenClawAgentDir()
: resolveAgentDir(params.config, agentId));
const workspaceDir =
params.workspaceDir ??
resolveAgentWorkspaceDir(params.config, agentId) ??
resolveDefaultAgentWorkspaceDir();
const isRemote = isRemoteEnvironment();
const result = await params.method.run({
config: params.config,
agentDir,
workspaceDir,
prompter: params.prompter,
runtime: params.runtime,
isRemote,
openUrl: async (url) => {
await openUrl(url);
},
oauth: {
createVpsAwareHandlers: (opts) => createVpsAwareOAuthHandlers(opts),
},
});
let nextConfig = params.config;
if (result.configPatch) {
nextConfig = mergeConfigPatch(nextConfig, result.configPatch);
}
for (const profile of result.profiles) {
upsertAuthProfile({
profileId: profile.profileId,
credential: profile.credential,
agentDir,
});
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: profile.profileId,
provider: profile.credential.provider,
mode: profile.credential.type === "token" ? "token" : profile.credential.type,
...("email" in profile.credential && profile.credential.email
? { email: profile.credential.email }
: {}),
});
}
if (params.emitNotes !== false && result.notes && result.notes.length > 0) {
await params.prompter.note(result.notes.join("\n"), "Provider notes");
}
return {
config: nextConfig,
defaultModel: result.defaultModel,
};
}
export async function applyAuthChoiceLoadedPluginProvider(
params: ApplyAuthChoiceParams,
): Promise<ApplyAuthChoiceResult | null> {
const agentId = params.agentId ?? resolveDefaultAgentId(params.config);
const workspaceDir =
resolveAgentWorkspaceDir(params.config, agentId) ?? resolveDefaultAgentWorkspaceDir();
const providers = resolvePluginProviders({ config: params.config, workspaceDir });
const resolved = resolveProviderPluginChoice({
providers,
choice: params.authChoice,
});
if (!resolved) {
return null;
}
const applied = await runProviderPluginAuthMethod({
config: params.config,
runtime: params.runtime,
prompter: params.prompter,
method: resolved.method,
agentDir: params.agentDir,
agentId: params.agentId,
workspaceDir,
});
let agentModelOverride: string | undefined;
if (applied.defaultModel) {
if (params.setDefaultModel) {
const nextConfig = applyDefaultModel(applied.config, applied.defaultModel);
await runProviderModelSelectedHook({
config: nextConfig,
model: applied.defaultModel,
prompter: params.prompter,
agentDir: params.agentDir,
workspaceDir,
});
await params.prompter.note(
`Default model set to ${applied.defaultModel}`,
"Model configured",
);
return { config: nextConfig };
}
agentModelOverride = applied.defaultModel;
}
return { config: applied.config, agentModelOverride };
}
export async function applyAuthChoicePluginProvider(
params: ApplyAuthChoiceParams,
options: PluginProviderAuthChoiceOptions,
@@ -70,60 +193,40 @@ export async function applyAuthChoicePluginProvider(
return { config: nextConfig };
}
const isRemote = isRemoteEnvironment();
const result = await method.run({
const applied = await runProviderPluginAuthMethod({
config: nextConfig,
agentDir,
workspaceDir,
prompter: params.prompter,
runtime: params.runtime,
isRemote,
openUrl: async (url) => {
await openUrl(url);
},
oauth: {
createVpsAwareHandlers: (opts) => createVpsAwareOAuthHandlers(opts),
},
prompter: params.prompter,
method,
agentDir,
agentId,
workspaceDir,
});
if (result.configPatch) {
nextConfig = mergeConfigPatch(nextConfig, result.configPatch);
}
for (const profile of result.profiles) {
upsertAuthProfile({
profileId: profile.profileId,
credential: profile.credential,
agentDir,
});
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: profile.profileId,
provider: profile.credential.provider,
mode: profile.credential.type === "token" ? "token" : profile.credential.type,
...("email" in profile.credential && profile.credential.email
? { email: profile.credential.email }
: {}),
});
}
nextConfig = applied.config;
let agentModelOverride: string | undefined;
if (result.defaultModel) {
if (applied.defaultModel) {
if (params.setDefaultModel) {
nextConfig = applyDefaultModel(nextConfig, result.defaultModel);
await params.prompter.note(`Default model set to ${result.defaultModel}`, "Model configured");
} else if (params.agentId) {
agentModelOverride = result.defaultModel;
nextConfig = applyDefaultModel(nextConfig, applied.defaultModel);
await runProviderModelSelectedHook({
config: nextConfig,
model: applied.defaultModel,
prompter: params.prompter,
agentDir,
workspaceDir,
});
await params.prompter.note(
`Default model set to ${result.defaultModel} for agent "${params.agentId}".`,
`Default model set to ${applied.defaultModel}`,
"Model configured",
);
} else if (params.agentId) {
agentModelOverride = applied.defaultModel;
await params.prompter.note(
`Default model set to ${applied.defaultModel} for agent "${params.agentId}".`,
"Model configured",
);
}
}
if (result.notes && result.notes.length > 0) {
await params.prompter.note(result.notes.join("\n"), "Provider notes");
}
return { config: nextConfig, agentModelOverride };
}

View File

@@ -9,10 +9,9 @@ import { applyAuthChoiceGitHubCopilot } from "./auth-choice.apply.github-copilot
import { applyAuthChoiceGoogleGeminiCli } from "./auth-choice.apply.google-gemini-cli.js";
import { applyAuthChoiceMiniMax } from "./auth-choice.apply.minimax.js";
import { applyAuthChoiceOAuth } from "./auth-choice.apply.oauth.js";
import { applyAuthChoiceOllama } from "./auth-choice.apply.ollama.js";
import { applyAuthChoiceOpenAI } from "./auth-choice.apply.openai.js";
import { applyAuthChoiceLoadedPluginProvider } from "./auth-choice.apply.plugin-provider.js";
import { applyAuthChoiceQwenPortal } from "./auth-choice.apply.qwen-portal.js";
import { applyAuthChoiceVllm } from "./auth-choice.apply.vllm.js";
import { applyAuthChoiceVolcengine } from "./auth-choice.apply.volcengine.js";
import { applyAuthChoiceXAI } from "./auth-choice.apply.xai.js";
import type { AuthChoice, OnboardOptions } from "./onboard-types.js";
@@ -37,9 +36,8 @@ export async function applyAuthChoice(
params: ApplyAuthChoiceParams,
): Promise<ApplyAuthChoiceResult> {
const handlers: Array<(p: ApplyAuthChoiceParams) => Promise<ApplyAuthChoiceResult | null>> = [
applyAuthChoiceLoadedPluginProvider,
applyAuthChoiceAnthropic,
applyAuthChoiceVllm,
applyAuthChoiceOllama,
applyAuthChoiceOpenAI,
applyAuthChoiceOAuth,
applyAuthChoiceApiProviders,

View File

@@ -34,7 +34,6 @@ export async function applyAuthChoiceVllm(
const { config: nextConfig, modelRef } = await promptAndConfigureVllm({
cfg: params.config,
prompter: params.prompter,
agentDir: params.agentDir,
});
if (!params.setDefaultModel) {

View File

@@ -1,3 +1,6 @@
import type { OpenClawConfig } from "../config/config.js";
import { resolveProviderPluginChoice } from "../plugins/provider-wizard.js";
import { resolvePluginProviders } from "../plugins/providers.js";
import type { AuthChoice } from "./onboard-types.js";
const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
@@ -6,8 +9,6 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
"claude-cli": "anthropic",
token: "anthropic",
apiKey: "anthropic",
vllm: "vllm",
ollama: "ollama",
"openai-codex": "openai-codex",
"codex-cli": "openai-codex",
chutes: "chutes",
@@ -22,6 +23,8 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
"gemini-api-key": "google",
"google-gemini-cli": "google-gemini-cli",
"mistral-api-key": "mistral",
ollama: "ollama",
sglang: "sglang",
"zai-api-key": "zai",
"zai-coding-global": "zai",
"zai-coding-cn": "zai",
@@ -47,8 +50,27 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
"byteplus-api-key": "byteplus",
"qianfan-api-key": "qianfan",
"custom-api-key": "custom",
vllm: "vllm",
};
export function resolvePreferredProviderForAuthChoice(choice: AuthChoice): string | undefined {
return PREFERRED_PROVIDER_BY_AUTH_CHOICE[choice];
export function resolvePreferredProviderForAuthChoice(params: {
choice: AuthChoice;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): string | undefined {
const preferred = PREFERRED_PROVIDER_BY_AUTH_CHOICE[params.choice];
if (preferred) {
return preferred;
}
const providers = resolvePluginProviders({
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
});
return resolveProviderPluginChoice({
providers,
choice: params.choice,
})?.provider.id;
}

View File

@@ -1369,7 +1369,7 @@ describe("resolvePreferredProviderForAuthChoice", () => {
{ authChoice: "unknown" as AuthChoice, expectedProvider: undefined },
] as const;
for (const scenario of scenarios) {
expect(resolvePreferredProviderForAuthChoice(scenario.authChoice)).toBe(
expect(resolvePreferredProviderForAuthChoice({ choice: scenario.authChoice })).toBe(
scenario.expectedProvider,
);
}

View File

@@ -1,4 +1,5 @@
import { ensureAuthProfileStore } from "../agents/auth-profiles.js";
import { resolveDefaultAgentWorkspaceDir } from "../agents/workspace.js";
import type { OpenClawConfig, GatewayAuthConfig } from "../config/config.js";
import { isSecretRef, type SecretInput } from "../config/types.secrets.js";
import type { RuntimeEnv } from "../runtime.js";
@@ -86,6 +87,7 @@ export async function promptAuthConfig(
allowKeychainPrompt: false,
}),
includeSkip: true,
config: cfg,
});
let next = cfg;
@@ -107,7 +109,13 @@ export async function promptAuthConfig(
prompter,
allowKeep: true,
ignoreAllowlist: true,
preferredProvider: resolvePreferredProviderForAuthChoice(authChoice),
includeProviderPluginSetups: true,
preferredProvider: resolvePreferredProviderForAuthChoice({
choice: authChoice,
config: next,
}),
workspaceDir: resolveDefaultAgentWorkspaceDir(),
runtime,
});
if (modelSelection.config) {
next = modelSelection.config;

View File

@@ -21,12 +21,10 @@ const ensureAuthProfileStore = vi.hoisted(() =>
);
const listProfilesForProvider = vi.hoisted(() => vi.fn(() => []));
const upsertAuthProfile = vi.hoisted(() => vi.fn());
const upsertAuthProfileWithLock = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("../agents/auth-profiles.js", () => ({
ensureAuthProfileStore,
listProfilesForProvider,
upsertAuthProfile,
upsertAuthProfileWithLock,
}));
const resolveEnvApiKey = vi.hoisted(() => vi.fn(() => undefined));
@@ -36,6 +34,25 @@ vi.mock("../agents/model-auth.js", () => ({
hasUsableCustomProviderApiKey,
}));
const resolveProviderModelPickerEntries = vi.hoisted(() => vi.fn(() => []));
const resolveProviderPluginChoice = vi.hoisted(() => vi.fn());
const runProviderModelSelectedHook = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("../plugins/provider-wizard.js", () => ({
resolveProviderModelPickerEntries,
resolveProviderPluginChoice,
runProviderModelSelectedHook,
}));
const resolvePluginProviders = vi.hoisted(() => vi.fn(() => []));
vi.mock("../plugins/providers.js", () => ({
resolvePluginProviders,
}));
const runProviderPluginAuthMethod = vi.hoisted(() => vi.fn());
vi.mock("./auth-choice.apply.plugin-provider.js", () => ({
runProviderPluginAuthMethod,
}));
const OPENROUTER_CATALOG = [
{
provider: "openrouter",
@@ -69,17 +86,40 @@ describe("promptDefaultModel", () => {
name: "Claude Sonnet 4.5",
},
]);
resolveProviderModelPickerEntries.mockReturnValue([
{ value: "vllm", label: "vLLM (custom)", hint: "Enter vLLM URL + API key + model" },
] as never);
resolvePluginProviders.mockReturnValue([{ id: "vllm" }] as never);
resolveProviderPluginChoice.mockReturnValue({
provider: { id: "vllm", label: "vLLM", auth: [] },
method: { id: "custom", label: "vLLM", kind: "custom" },
});
runProviderPluginAuthMethod.mockResolvedValue({
config: {
models: {
providers: {
vllm: {
baseUrl: "http://127.0.0.1:8000/v1",
api: "openai-completions",
apiKey: "VLLM_API_KEY",
models: [
{
id: "meta-llama/Meta-Llama-3-8B-Instruct",
name: "meta-llama/Meta-Llama-3-8B-Instruct",
},
],
},
},
},
},
defaultModel: "vllm/meta-llama/Meta-Llama-3-8B-Instruct",
});
const select = vi.fn(async (params) => {
const vllm = params.options.find((opt: { value: string }) => opt.value === "__vllm__");
const vllm = params.options.find((opt: { value: string }) => opt.value === "vllm");
return (vllm?.value ?? "") as never;
});
const text = vi
.fn()
.mockResolvedValueOnce("http://127.0.0.1:8000/v1")
.mockResolvedValueOnce("sk-vllm-test")
.mockResolvedValueOnce("meta-llama/Meta-Llama-3-8B-Instruct");
const prompter = makePrompter({ select, text: text as never });
const prompter = makePrompter({ select });
const config = { agents: { defaults: {} } } as OpenClawConfig;
const result = await promptDefaultModel({
@@ -87,17 +127,13 @@ describe("promptDefaultModel", () => {
prompter,
allowKeep: false,
includeManual: false,
includeVllm: true,
includeProviderPluginSetups: true,
ignoreAllowlist: true,
agentDir: "/tmp/openclaw-agent",
runtime: {} as never,
});
expect(upsertAuthProfileWithLock).toHaveBeenCalledWith(
expect.objectContaining({
profileId: "vllm:default",
credential: expect.objectContaining({ provider: "vllm" }),
}),
);
expect(runProviderPluginAuthMethod).toHaveBeenCalledOnce();
expect(result.model).toBe("vllm/meta-llama/Meta-Llama-3-8B-Instruct");
expect(result.config?.models?.providers?.vllm).toMatchObject({
baseUrl: "http://127.0.0.1:8000/v1",

View File

@@ -11,14 +11,19 @@ import {
} from "../agents/model-selection.js";
import type { OpenClawConfig } from "../config/config.js";
import { resolveAgentModelPrimaryValue } from "../config/model-input.js";
import {
resolveProviderPluginChoice,
resolveProviderModelPickerEntries,
runProviderModelSelectedHook,
} from "../plugins/provider-wizard.js";
import { resolvePluginProviders } from "../plugins/providers.js";
import type { WizardPrompter, WizardSelectOption } from "../wizard/prompts.js";
import { runProviderPluginAuthMethod } from "./auth-choice.apply.plugin-provider.js";
import { formatTokenK } from "./models/shared.js";
import { OPENAI_CODEX_DEFAULT_MODEL } from "./openai-codex-model-default.js";
import { promptAndConfigureVllm } from "./vllm-setup.js";
const KEEP_VALUE = "__keep__";
const MANUAL_VALUE = "__manual__";
const VLLM_VALUE = "__vllm__";
const PROVIDER_FILTER_THRESHOLD = 30;
// Models that are internal routing features and should not be shown in selection lists.
@@ -31,10 +36,13 @@ type PromptDefaultModelParams = {
prompter: WizardPrompter;
allowKeep?: boolean;
includeManual?: boolean;
includeVllm?: boolean;
includeProviderPluginSetups?: boolean;
ignoreAllowlist?: boolean;
preferredProvider?: string;
agentDir?: string;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
runtime?: import("../runtime.js").RuntimeEnv;
message?: string;
};
@@ -180,7 +188,7 @@ export async function promptDefaultModel(
const cfg = params.config;
const allowKeep = params.allowKeep ?? true;
const includeManual = params.includeManual ?? true;
const includeVllm = params.includeVllm ?? false;
const includeProviderPluginSetups = params.includeProviderPluginSetups ?? false;
const ignoreAllowlist = params.ignoreAllowlist ?? false;
const preferredProviderRaw = params.preferredProvider?.trim();
const preferredProvider = preferredProviderRaw
@@ -227,19 +235,19 @@ export async function promptDefaultModel(
});
}
const providers = Array.from(new Set(models.map((entry) => entry.provider))).toSorted((a, b) =>
const providerIds = Array.from(new Set(models.map((entry) => entry.provider))).toSorted((a, b) =>
a.localeCompare(b),
);
const hasPreferredProvider = preferredProvider ? providers.includes(preferredProvider) : false;
const hasPreferredProvider = preferredProvider ? providerIds.includes(preferredProvider) : false;
const shouldPromptProvider =
!hasPreferredProvider && providers.length > 1 && models.length > PROVIDER_FILTER_THRESHOLD;
!hasPreferredProvider && providerIds.length > 1 && models.length > PROVIDER_FILTER_THRESHOLD;
if (shouldPromptProvider) {
const selection = await params.prompter.select({
message: "Filter models by provider",
options: [
{ value: "*", label: "All providers" },
...providers.map((provider) => {
...providerIds.map((provider) => {
const count = models.filter((entry) => entry.provider === provider).length;
return {
value: provider,
@@ -286,12 +294,14 @@ export async function promptDefaultModel(
if (includeManual) {
options.push({ value: MANUAL_VALUE, label: "Enter model manually" });
}
if (includeVllm && agentDir) {
options.push({
value: VLLM_VALUE,
label: "vLLM (custom)",
hint: "Enter vLLM URL + API key + model",
});
if (includeProviderPluginSetups && agentDir) {
options.push(
...resolveProviderModelPickerEntries({
config: cfg,
workspaceDir: params.workspaceDir,
env: params.env,
}),
);
}
const seen = new Set<string>();
@@ -337,23 +347,65 @@ export async function promptDefaultModel(
initialValue: configuredRaw || resolvedKey || undefined,
});
}
if (selection === VLLM_VALUE) {
if (!agentDir) {
const pluginProviders = resolvePluginProviders({
config: cfg,
workspaceDir: params.workspaceDir,
env: params.env,
});
const pluginResolution = selection.startsWith("provider-plugin:")
? selection
: selection.includes("/")
? null
: pluginProviders.some(
(provider) => normalizeProviderId(provider.id) === normalizeProviderId(selection),
)
? selection
: null;
if (pluginResolution) {
if (!agentDir || !params.runtime) {
await params.prompter.note(
"vLLM setup requires an agent directory context.",
"vLLM not available",
"Provider setup requires agent and runtime context.",
"Provider setup unavailable",
);
return {};
}
const { config: nextConfig, modelRef } = await promptAndConfigureVllm({
cfg,
prompter: params.prompter,
agentDir,
const resolved = resolveProviderPluginChoice({
providers: pluginProviders,
choice: pluginResolution,
});
return { model: modelRef, config: nextConfig };
if (!resolved) {
return {};
}
const applied = await runProviderPluginAuthMethod({
config: cfg,
runtime: params.runtime,
prompter: params.prompter,
method: resolved.method,
agentDir,
workspaceDir: params.workspaceDir,
});
if (applied.defaultModel) {
await runProviderModelSelectedHook({
config: applied.config,
model: applied.defaultModel,
prompter: params.prompter,
agentDir,
workspaceDir: params.workspaceDir,
env: params.env,
});
}
return { model: applied.defaultModel, config: applied.config };
}
return { model: String(selection) };
const model = String(selection);
await runProviderModelSelectedHook({
config: cfg,
model,
prompter: params.prompter,
agentDir,
workspaceDir: params.workspaceDir,
env: params.env,
});
return { model };
}
export async function promptModelAllowlist(params: {

View File

@@ -289,7 +289,6 @@ async function storeOllamaCredential(agentDir?: string): Promise<void> {
export async function promptAndConfigureOllama(params: {
cfg: OpenClawConfig;
prompter: WizardPrompter;
agentDir?: string;
}): Promise<{ config: OpenClawConfig; defaultModelId: string }> {
const { prompter } = params;
@@ -395,8 +394,6 @@ export async function promptAndConfigureOllama(params: {
...modelNames.filter((name) => !suggestedModels.includes(name)),
];
await storeOllamaCredential(params.agentDir);
const defaultModelId = suggestedModels[0] ?? OLLAMA_DEFAULT_MODEL;
const config = applyOllamaProviderConfig(
params.cfg,

View File

@@ -2,15 +2,13 @@ import type { ChannelId } from "../channels/plugins/types.js";
import type { GatewayDaemonRuntime } from "./daemon-runtime.js";
export type OnboardMode = "local" | "remote";
export type AuthChoice =
export type BuiltInAuthChoice =
// Legacy alias for `setup-token` (kept for backwards CLI compatibility).
| "oauth"
| "setup-token"
| "claude-cli"
| "token"
| "chutes"
| "vllm"
| "ollama"
| "openai-codex"
| "openai-api-key"
| "openrouter-api-key"
@@ -53,12 +51,12 @@ export type AuthChoice =
| "modelstudio-api-key"
| "custom-api-key"
| "skip";
export type AuthChoiceGroupId =
export type AuthChoice = BuiltInAuthChoice | (string & {});
export type BuiltInAuthChoiceGroupId =
| "openai"
| "anthropic"
| "chutes"
| "vllm"
| "ollama"
| "google"
| "copilot"
| "openrouter"
@@ -83,6 +81,7 @@ export type AuthChoiceGroupId =
| "volcengine"
| "byteplus"
| "custom";
export type AuthChoiceGroupId = BuiltInAuthChoiceGroupId | (string & {});
export type GatewayAuthChoice = "token" | "password";
export type ResetScope = "config" | "config+creds+sessions" | "full";
export type GatewayBind = "loopback" | "lan" | "auto" | "custom" | "tailnet";

View File

@@ -0,0 +1,119 @@
import type { AuthProfileCredential } from "../agents/auth-profiles/types.js";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
export const SELF_HOSTED_DEFAULT_CONTEXT_WINDOW = 128000;
export const SELF_HOSTED_DEFAULT_MAX_TOKENS = 8192;
export const SELF_HOSTED_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
export function applyProviderDefaultModel(cfg: OpenClawConfig, modelRef: string): OpenClawConfig {
const existingModel = cfg.agents?.defaults?.model;
const fallbacks =
existingModel && typeof existingModel === "object" && "fallbacks" in existingModel
? (existingModel as { fallbacks?: string[] }).fallbacks
: undefined;
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
model: {
...(fallbacks ? { fallbacks } : undefined),
primary: modelRef,
},
},
},
};
}
export async function promptAndConfigureOpenAICompatibleSelfHostedProvider(params: {
cfg: OpenClawConfig;
prompter: WizardPrompter;
providerId: string;
providerLabel: string;
defaultBaseUrl: string;
defaultApiKeyEnvVar: string;
modelPlaceholder: string;
input?: Array<"text" | "image">;
reasoning?: boolean;
contextWindow?: number;
maxTokens?: number;
}): Promise<{
config: OpenClawConfig;
credential: AuthProfileCredential;
modelId: string;
modelRef: string;
profileId: string;
}> {
const baseUrlRaw = await params.prompter.text({
message: `${params.providerLabel} base URL`,
initialValue: params.defaultBaseUrl,
placeholder: params.defaultBaseUrl,
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const apiKeyRaw = await params.prompter.text({
message: `${params.providerLabel} API key`,
placeholder: "sk-... (or any non-empty string)",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const modelIdRaw = await params.prompter.text({
message: `${params.providerLabel} model`,
placeholder: params.modelPlaceholder,
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const baseUrl = String(baseUrlRaw ?? "")
.trim()
.replace(/\/+$/, "");
const apiKey = String(apiKeyRaw ?? "").trim();
const modelId = String(modelIdRaw ?? "").trim();
const modelRef = `${params.providerId}/${modelId}`;
const profileId = `${params.providerId}:default`;
const credential: AuthProfileCredential = {
type: "api_key",
provider: params.providerId,
key: apiKey,
};
const nextConfig: OpenClawConfig = {
...params.cfg,
models: {
...params.cfg.models,
mode: params.cfg.models?.mode ?? "merge",
providers: {
...params.cfg.models?.providers,
[params.providerId]: {
baseUrl,
api: "openai-completions",
apiKey: params.defaultApiKeyEnvVar,
models: [
{
id: modelId,
name: modelId,
reasoning: params.reasoning ?? false,
input: params.input ?? ["text"],
cost: SELF_HOSTED_DEFAULT_COST,
contextWindow: params.contextWindow ?? SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
maxTokens: params.maxTokens ?? SELF_HOSTED_DEFAULT_MAX_TOKENS,
},
],
},
},
},
};
return {
config: nextConfig,
credential,
modelId,
modelRef,
profileId,
};
}

View File

@@ -1,78 +1,36 @@
import { upsertAuthProfileWithLock } from "../agents/auth-profiles.js";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import {
applyProviderDefaultModel,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "./self-hosted-provider-setup.js";
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
export const VLLM_DEFAULT_CONTEXT_WINDOW = 128000;
export const VLLM_DEFAULT_MAX_TOKENS = 8192;
export const VLLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW;
export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS;
export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST;
export async function promptAndConfigureVllm(params: {
cfg: OpenClawConfig;
prompter: WizardPrompter;
agentDir?: string;
}): Promise<{ config: OpenClawConfig; modelId: string; modelRef: string }> {
const baseUrlRaw = await params.prompter.text({
message: "vLLM base URL",
initialValue: VLLM_DEFAULT_BASE_URL,
placeholder: VLLM_DEFAULT_BASE_URL,
validate: (value) => (value?.trim() ? undefined : "Required"),
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: params.cfg,
prompter: params.prompter,
providerId: "vllm",
providerLabel: "vLLM",
defaultBaseUrl: VLLM_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
});
const apiKeyRaw = await params.prompter.text({
message: "vLLM API key",
placeholder: "sk-... (or any non-empty string)",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const modelIdRaw = await params.prompter.text({
message: "vLLM model",
placeholder: "meta-llama/Meta-Llama-3-8B-Instruct",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const baseUrl = String(baseUrlRaw ?? "")
.trim()
.replace(/\/+$/, "");
const apiKey = String(apiKeyRaw ?? "").trim();
const modelId = String(modelIdRaw ?? "").trim();
const modelRef = `vllm/${modelId}`;
await upsertAuthProfileWithLock({
profileId: "vllm:default",
credential: { type: "api_key", provider: "vllm", key: apiKey },
agentDir: params.agentDir,
});
const nextConfig: OpenClawConfig = {
...params.cfg,
models: {
...params.cfg.models,
mode: params.cfg.models?.mode ?? "merge",
providers: {
...params.cfg.models?.providers,
vllm: {
baseUrl,
api: "openai-completions",
apiKey: "VLLM_API_KEY",
models: [
{
id: modelId,
name: modelId,
reasoning: false,
input: ["text"],
cost: VLLM_DEFAULT_COST,
contextWindow: VLLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: VLLM_DEFAULT_MAX_TOKENS,
},
],
},
},
},
return {
config: result.config,
modelId: result.modelId,
modelRef: result.modelRef,
};
return { config: nextConfig, modelId, modelRef };
}
export { applyProviderDefaultModel as applyVllmDefaultModel };

View File

@@ -1,6 +1,7 @@
export type {
AnyAgentTool,
OpenClawPluginApi,
ProviderDiscoveryContext,
OpenClawPluginService,
ProviderAuthContext,
ProviderAuthResult,
@@ -12,6 +13,32 @@ export type { GatewayRequestHandlerOptions } from "../gateway/server-methods/typ
export { emptyPluginConfigSchema } from "../plugins/config-schema.js";
export { buildOauthProviderAuthResult } from "./provider-auth-result.js";
export {
applyProviderDefaultModel,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../commands/self-hosted-provider-setup.js";
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../commands/ollama-setup.js";
export {
VLLM_DEFAULT_BASE_URL,
VLLM_DEFAULT_CONTEXT_WINDOW,
VLLM_DEFAULT_COST,
VLLM_DEFAULT_MAX_TOKENS,
promptAndConfigureVllm,
} from "../commands/vllm-setup.js";
export {
buildOllamaProvider,
buildSglangProvider,
buildVllmProvider,
} from "../agents/models-config.providers.discovery.js";
export {
approveDevicePairing,

View File

@@ -820,6 +820,33 @@ export type { ContextEngineFactory } from "../context-engine/registry.js";
// agentDir/store) rather than importing raw helpers directly.
export { requireApiKey } from "../agents/model-auth.js";
export type { ResolvedProviderAuth } from "../agents/model-auth.js";
export type { ProviderDiscoveryContext } from "../plugins/types.js";
export {
applyProviderDefaultModel,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../commands/self-hosted-provider-setup.js";
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../commands/ollama-setup.js";
export {
VLLM_DEFAULT_BASE_URL,
VLLM_DEFAULT_CONTEXT_WINDOW,
VLLM_DEFAULT_COST,
VLLM_DEFAULT_MAX_TOKENS,
promptAndConfigureVllm,
} from "../commands/vllm-setup.js";
export {
buildOllamaProvider,
buildSglangProvider,
buildVllmProvider,
} from "../agents/models-config.providers.discovery.js";
// Security utilities
export { redactSensitiveText } from "../logging/redact.js";

View File

@@ -25,8 +25,11 @@ export type NormalizedPluginsConfig = {
export const BUNDLED_ENABLED_BY_DEFAULT = new Set<string>([
"device-pair",
"ollama",
"phone-control",
"sglang",
"talk-voice",
"vllm",
]);
const normalizeList = (value: unknown): string[] => {

View File

@@ -0,0 +1,90 @@
import { describe, expect, it } from "vitest";
import type { ModelProviderConfig } from "../config/types.js";
import {
groupPluginDiscoveryProvidersByOrder,
normalizePluginDiscoveryResult,
} from "./provider-discovery.js";
import type { ProviderDiscoveryOrder, ProviderPlugin } from "./types.js";
function makeProvider(params: {
id: string;
label?: string;
order?: ProviderDiscoveryOrder;
}): ProviderPlugin {
return {
id: params.id,
label: params.label ?? params.id,
auth: [],
discovery: {
...(params.order ? { order: params.order } : {}),
run: async () => null,
},
};
}
function makeModelProviderConfig(overrides?: Partial<ModelProviderConfig>): ModelProviderConfig {
return {
baseUrl: "http://127.0.0.1:8000/v1",
models: [],
...overrides,
};
}
describe("groupPluginDiscoveryProvidersByOrder", () => {
it("groups providers by declared order and sorts labels within each group", () => {
const grouped = groupPluginDiscoveryProvidersByOrder([
makeProvider({ id: "late-b", label: "Zulu" }),
makeProvider({ id: "late-a", label: "Alpha" }),
makeProvider({ id: "paired", label: "Paired", order: "paired" }),
makeProvider({ id: "profile", label: "Profile", order: "profile" }),
makeProvider({ id: "simple", label: "Simple", order: "simple" }),
]);
expect(grouped.simple.map((provider) => provider.id)).toEqual(["simple"]);
expect(grouped.profile.map((provider) => provider.id)).toEqual(["profile"]);
expect(grouped.paired.map((provider) => provider.id)).toEqual(["paired"]);
expect(grouped.late.map((provider) => provider.id)).toEqual(["late-a", "late-b"]);
});
});
describe("normalizePluginDiscoveryResult", () => {
it("maps a single provider result to the plugin id", () => {
const provider = makeProvider({ id: "Ollama" });
const normalized = normalizePluginDiscoveryResult({
provider,
result: {
provider: makeModelProviderConfig({
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
}),
},
});
expect(normalized).toEqual({
ollama: {
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
models: [],
},
});
});
it("normalizes keys for multi-provider discovery results", () => {
const normalized = normalizePluginDiscoveryResult({
provider: makeProvider({ id: "ignored" }),
result: {
providers: {
" VLLM ": makeModelProviderConfig(),
"": makeModelProviderConfig({ baseUrl: "http://ignored" }),
},
},
});
expect(normalized).toEqual({
vllm: {
baseUrl: "http://127.0.0.1:8000/v1",
models: [],
},
});
});
});

View File

@@ -0,0 +1,65 @@
import { normalizeProviderId } from "../agents/model-selection.js";
import type { OpenClawConfig } from "../config/config.js";
import type { ModelProviderConfig } from "../config/types.js";
import { resolvePluginProviders } from "./providers.js";
import type { ProviderDiscoveryOrder, ProviderPlugin } from "./types.js";
const DISCOVERY_ORDER: readonly ProviderDiscoveryOrder[] = ["simple", "profile", "paired", "late"];
export function resolvePluginDiscoveryProviders(params: {
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): ProviderPlugin[] {
return resolvePluginProviders(params).filter((provider) => provider.discovery);
}
export function groupPluginDiscoveryProvidersByOrder(
providers: ProviderPlugin[],
): Record<ProviderDiscoveryOrder, ProviderPlugin[]> {
const grouped = {
simple: [],
profile: [],
paired: [],
late: [],
} as Record<ProviderDiscoveryOrder, ProviderPlugin[]>;
for (const provider of providers) {
const order = provider.discovery?.order ?? "late";
grouped[order].push(provider);
}
for (const order of DISCOVERY_ORDER) {
grouped[order].sort((a, b) => a.label.localeCompare(b.label));
}
return grouped;
}
export function normalizePluginDiscoveryResult(params: {
provider: ProviderPlugin;
result:
| { provider: ModelProviderConfig }
| { providers: Record<string, ModelProviderConfig> }
| null
| undefined;
}): Record<string, ModelProviderConfig> {
const result = params.result;
if (!result) {
return {};
}
if ("provider" in result) {
return { [normalizeProviderId(params.provider.id)]: result.provider };
}
const normalized: Record<string, ModelProviderConfig> = {};
for (const [key, value] of Object.entries(result.providers)) {
const normalizedKey = normalizeProviderId(key);
if (!normalizedKey || !value) {
continue;
}
normalized[normalizedKey] = value;
}
return normalized;
}

View File

@@ -0,0 +1,243 @@
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
import { parseModelRef } from "../agents/model-selection.js";
import { normalizeProviderId } from "../agents/model-selection.js";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { resolvePluginProviders } from "./providers.js";
import type {
ProviderAuthMethod,
ProviderPlugin,
ProviderPluginWizardModelPicker,
ProviderPluginWizardOnboarding,
} from "./types.js";
export const PROVIDER_PLUGIN_CHOICE_PREFIX = "provider-plugin:";
export type ProviderWizardOption = {
value: string;
label: string;
hint?: string;
groupId: string;
groupLabel: string;
groupHint?: string;
};
export type ProviderModelPickerEntry = {
value: string;
label: string;
hint?: string;
};
function normalizeChoiceId(choiceId: string): string {
return choiceId.trim();
}
function resolveWizardOnboardingChoiceId(
provider: ProviderPlugin,
wizard: ProviderPluginWizardOnboarding,
): string {
const explicit = wizard.choiceId?.trim();
if (explicit) {
return explicit;
}
const explicitMethodId = wizard.methodId?.trim();
if (explicitMethodId) {
return buildProviderPluginMethodChoice(provider.id, explicitMethodId);
}
if (provider.auth.length === 1) {
return provider.id;
}
return buildProviderPluginMethodChoice(provider.id, provider.auth[0]?.id ?? "default");
}
function resolveMethodById(
provider: ProviderPlugin,
methodId?: string,
): ProviderAuthMethod | undefined {
const normalizedMethodId = methodId?.trim().toLowerCase();
if (!normalizedMethodId) {
return provider.auth[0];
}
return provider.auth.find((method) => method.id.trim().toLowerCase() === normalizedMethodId);
}
function buildOnboardingOptionForMethod(params: {
provider: ProviderPlugin;
wizard: ProviderPluginWizardOnboarding;
method: ProviderAuthMethod;
value: string;
}): ProviderWizardOption {
const normalizedGroupId = params.wizard.groupId?.trim() || params.provider.id;
return {
value: normalizeChoiceId(params.value),
label:
params.wizard.choiceLabel?.trim() ||
(params.provider.auth.length === 1 ? params.provider.label : params.method.label),
hint: params.wizard.choiceHint?.trim() || params.method.hint,
groupId: normalizedGroupId,
groupLabel: params.wizard.groupLabel?.trim() || params.provider.label,
groupHint: params.wizard.groupHint?.trim(),
};
}
export function buildProviderPluginMethodChoice(providerId: string, methodId: string): string {
return `${PROVIDER_PLUGIN_CHOICE_PREFIX}${providerId.trim()}:${methodId.trim()}`;
}
export function resolveProviderWizardOptions(params: {
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): ProviderWizardOption[] {
const providers = resolvePluginProviders(params);
const options: ProviderWizardOption[] = [];
for (const provider of providers) {
const wizard = provider.wizard?.onboarding;
if (!wizard) {
continue;
}
const explicitMethod = resolveMethodById(provider, wizard.methodId);
if (explicitMethod) {
options.push(
buildOnboardingOptionForMethod({
provider,
wizard,
method: explicitMethod,
value: resolveWizardOnboardingChoiceId(provider, wizard),
}),
);
continue;
}
for (const method of provider.auth) {
options.push(
buildOnboardingOptionForMethod({
provider,
wizard,
method,
value: buildProviderPluginMethodChoice(provider.id, method.id),
}),
);
}
}
return options;
}
function resolveModelPickerChoiceValue(
provider: ProviderPlugin,
modelPicker: ProviderPluginWizardModelPicker,
): string {
const explicitMethodId = modelPicker.methodId?.trim();
if (explicitMethodId) {
return buildProviderPluginMethodChoice(provider.id, explicitMethodId);
}
if (provider.auth.length === 1) {
return provider.id;
}
return buildProviderPluginMethodChoice(provider.id, provider.auth[0]?.id ?? "default");
}
export function resolveProviderModelPickerEntries(params: {
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): ProviderModelPickerEntry[] {
const providers = resolvePluginProviders(params);
const entries: ProviderModelPickerEntry[] = [];
for (const provider of providers) {
const modelPicker = provider.wizard?.modelPicker;
if (!modelPicker) {
continue;
}
entries.push({
value: resolveModelPickerChoiceValue(provider, modelPicker),
label: modelPicker.label?.trim() || `${provider.label} (custom)`,
hint: modelPicker.hint?.trim(),
});
}
return entries;
}
export function resolveProviderPluginChoice(params: {
providers: ProviderPlugin[];
choice: string;
}): { provider: ProviderPlugin; method: ProviderAuthMethod } | null {
const choice = params.choice.trim();
if (!choice) {
return null;
}
if (choice.startsWith(PROVIDER_PLUGIN_CHOICE_PREFIX)) {
const payload = choice.slice(PROVIDER_PLUGIN_CHOICE_PREFIX.length);
const separator = payload.indexOf(":");
const providerId = separator >= 0 ? payload.slice(0, separator) : payload;
const methodId = separator >= 0 ? payload.slice(separator + 1) : undefined;
const provider = params.providers.find(
(entry) => normalizeProviderId(entry.id) === normalizeProviderId(providerId),
);
if (!provider) {
return null;
}
const method = resolveMethodById(provider, methodId);
return method ? { provider, method } : null;
}
for (const provider of params.providers) {
const onboarding = provider.wizard?.onboarding;
if (onboarding) {
const onboardingChoiceId = resolveWizardOnboardingChoiceId(provider, onboarding);
if (normalizeChoiceId(onboardingChoiceId) === choice) {
const method = resolveMethodById(provider, onboarding.methodId);
if (method) {
return { provider, method };
}
}
}
if (
normalizeProviderId(provider.id) === normalizeProviderId(choice) &&
provider.auth.length > 0
) {
return { provider, method: provider.auth[0] };
}
}
return null;
}
export async function runProviderModelSelectedHook(params: {
config: OpenClawConfig;
model: string;
prompter: WizardPrompter;
agentDir?: string;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
}): Promise<void> {
const parsed = parseModelRef(params.model, DEFAULT_PROVIDER);
if (!parsed) {
return;
}
const providers = resolvePluginProviders({
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
});
const provider = providers.find(
(entry) => normalizeProviderId(entry.id) === normalizeProviderId(parsed.provider),
);
if (!provider?.onModelSelected) {
return;
}
await provider.onModelSelected({
config: params.config,
model: params.model,
prompter: params.prompter,
agentDir: params.agentDir,
workspaceDir: params.workspaceDir,
});
}

View File

@@ -119,6 +119,59 @@ export type ProviderAuthMethod = {
run: (ctx: ProviderAuthContext) => Promise<ProviderAuthResult>;
};
export type ProviderDiscoveryOrder = "simple" | "profile" | "paired" | "late";
export type ProviderDiscoveryContext = {
config: OpenClawConfig;
agentDir?: string;
workspaceDir?: string;
env: NodeJS.ProcessEnv;
resolveProviderApiKey: (providerId?: string) => {
apiKey: string | undefined;
discoveryApiKey?: string;
};
};
export type ProviderDiscoveryResult =
| { provider: ModelProviderConfig }
| { providers: Record<string, ModelProviderConfig> }
| null
| undefined;
export type ProviderPluginDiscovery = {
order?: ProviderDiscoveryOrder;
run: (ctx: ProviderDiscoveryContext) => Promise<ProviderDiscoveryResult>;
};
export type ProviderPluginWizardOnboarding = {
choiceId?: string;
choiceLabel?: string;
choiceHint?: string;
groupId?: string;
groupLabel?: string;
groupHint?: string;
methodId?: string;
};
export type ProviderPluginWizardModelPicker = {
label?: string;
hint?: string;
methodId?: string;
};
export type ProviderPluginWizard = {
onboarding?: ProviderPluginWizardOnboarding;
modelPicker?: ProviderPluginWizardModelPicker;
};
export type ProviderModelSelectedContext = {
config: OpenClawConfig;
model: string;
prompter: WizardPrompter;
agentDir?: string;
workspaceDir?: string;
};
export type ProviderPlugin = {
id: string;
label: string;
@@ -127,8 +180,11 @@ export type ProviderPlugin = {
envVars?: string[];
models?: ModelProviderConfig;
auth: ProviderAuthMethod[];
discovery?: ProviderPluginDiscovery;
wizard?: ProviderPluginWizard;
formatApiKey?: (cred: AuthProfileCredential) => string;
refreshOAuth?: (cred: OAuthCredential) => Promise<OAuthCredential>;
onModelSelected?: (ctx: ProviderModelSelectedContext) => Promise<void>;
};
export type OpenClawPluginGatewayMethod = {

View File

@@ -426,6 +426,8 @@ export async function runOnboardingWizard(
prompter,
store: authStore,
includeSkip: true,
config: nextConfig,
workspaceDir,
}));
if (authChoice === "custom-api-key") {
@@ -442,7 +444,7 @@ export async function runOnboardingWizard(
config: nextConfig,
prompter,
runtime,
setDefaultModel: !(authChoiceFromPrompt && authChoice === "ollama"),
setDefaultModel: true,
opts: {
tokenProvider: opts.tokenProvider,
token: opts.authChoice === "apiKey" && opts.token ? opts.token : undefined,
@@ -461,8 +463,14 @@ export async function runOnboardingWizard(
prompter,
allowKeep: true,
ignoreAllowlist: true,
includeVllm: true,
preferredProvider: resolvePreferredProviderForAuthChoice(authChoice),
includeProviderPluginSetups: true,
preferredProvider: resolvePreferredProviderForAuthChoice({
choice: authChoice,
config: nextConfig,
workspaceDir,
}),
workspaceDir,
runtime,
});
if (modelSelection.config) {
nextConfig = modelSelection.config;
@@ -472,11 +480,6 @@ export async function runOnboardingWizard(
}
}
if (authChoice === "ollama") {
const { ensureOllamaModelPulled } = await import("../commands/ollama-setup.js");
await ensureOllamaModelPulled({ config: nextConfig, prompter });
}
await warnIfModelConfigLooksOff(nextConfig, prompter);
const { configureGatewayForOnboarding } = await import("./onboarding.gateway-config.js");