feat: modularize provider plugin architecture

This commit is contained in:
Peter Steinberger
2026-03-12 22:24:22 +00:00
parent bf89947a8e
commit d83491e751
41 changed files with 1734 additions and 260 deletions

View File

@@ -0,0 +1,3 @@
# Ollama Provider
Bundled provider plugin for Ollama discovery and setup.

115
extensions/ollama/index.ts Normal file
View File

@@ -0,0 +1,115 @@
import {
buildOllamaProvider,
emptyPluginConfigSchema,
ensureOllamaModelPulled,
OLLAMA_DEFAULT_BASE_URL,
promptAndConfigureOllama,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "ollama";
const DEFAULT_API_KEY = "ollama-local";
const ollamaPlugin = {
id: "ollama",
name: "Ollama Provider",
description: "Bundled Ollama provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "Ollama",
docsPath: "/providers/ollama",
envVars: ["OLLAMA_API_KEY"],
auth: [
{
id: "local",
label: "Ollama",
hint: "Cloud and local open models",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOllama({
cfg: ctx.config,
prompter: ctx.prompter,
});
return {
profiles: [
{
profileId: "ollama:default",
credential: {
type: "api_key",
provider: PROVIDER_ID,
key: DEFAULT_API_KEY,
},
},
],
configPatch: result.config,
defaultModel: `ollama/${result.defaultModelId}`,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
const explicit = ctx.config.models?.providers?.ollama;
const hasExplicitModels = Array.isArray(explicit?.models) && explicit.models.length > 0;
const ollamaKey = ctx.resolveProviderApiKey(PROVIDER_ID).apiKey;
if (hasExplicitModels && explicit) {
return {
provider: {
...explicit,
baseUrl:
typeof explicit.baseUrl === "string" && explicit.baseUrl.trim()
? explicit.baseUrl.trim().replace(/\/+$/, "")
: OLLAMA_DEFAULT_BASE_URL,
api: explicit.api ?? "ollama",
apiKey: ollamaKey ?? explicit.apiKey ?? DEFAULT_API_KEY,
},
};
}
const provider = await buildOllamaProvider(explicit?.baseUrl, {
quiet: !ollamaKey && !explicit,
});
if (provider.models.length === 0 && !ollamaKey && !explicit?.apiKey) {
return null;
}
return {
provider: {
...provider,
apiKey: ollamaKey ?? explicit?.apiKey ?? DEFAULT_API_KEY,
},
};
},
},
wizard: {
onboarding: {
choiceId: "ollama",
choiceLabel: "Ollama",
choiceHint: "Cloud and local open models",
groupId: "ollama",
groupLabel: "Ollama",
groupHint: "Cloud and local open models",
methodId: "local",
},
modelPicker: {
label: "Ollama (custom)",
hint: "Detect models from a local or remote Ollama instance",
methodId: "local",
},
},
onModelSelected: async ({ config, model, prompter }) => {
if (!model.startsWith("ollama/")) {
return;
}
await ensureOllamaModelPulled({ config, prompter });
},
});
},
};
export default ollamaPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "ollama",
"providers": ["ollama"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/ollama-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw Ollama provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,3 @@
# SGLang Provider
Bundled provider plugin for SGLang discovery and setup.

View File

@@ -0,0 +1,92 @@
import {
buildSglangProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "sglang";
const DEFAULT_BASE_URL = "http://127.0.0.1:30000/v1";
const sglangPlugin = {
id: "sglang",
name: "SGLang Provider",
description: "Bundled SGLang provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "SGLang",
docsPath: "/providers/sglang",
envVars: ["SGLANG_API_KEY"],
auth: [
{
id: "custom",
label: "SGLang",
hint: "Fast self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "SGLang",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "SGLANG_API_KEY",
modelPlaceholder: "Qwen/Qwen3-8B",
});
return {
profiles: [
{
profileId: result.profileId,
credential: result.credential,
},
],
configPatch: result.config,
defaultModel: result.modelRef,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
if (ctx.config.models?.providers?.sglang) {
return null;
}
const { apiKey, discoveryApiKey } = ctx.resolveProviderApiKey(PROVIDER_ID);
if (!apiKey) {
return null;
}
return {
provider: {
...(await buildSglangProvider({ apiKey: discoveryApiKey })),
apiKey,
},
};
},
},
wizard: {
onboarding: {
choiceId: "sglang",
choiceLabel: "SGLang",
choiceHint: "Fast self-hosted OpenAI-compatible server",
groupId: "sglang",
groupLabel: "SGLang",
groupHint: "Fast self-hosted server",
methodId: "custom",
},
modelPicker: {
label: "SGLang (custom)",
hint: "Enter SGLang URL + API key + model",
methodId: "custom",
},
},
});
},
};
export default sglangPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "sglang",
"providers": ["sglang"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/sglang-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw SGLang provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,3 @@
# vLLM Provider
Bundled provider plugin for vLLM discovery and setup.

92
extensions/vllm/index.ts Normal file
View File

@@ -0,0 +1,92 @@
import {
buildVllmProvider,
emptyPluginConfigSchema,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
type OpenClawPluginApi,
type ProviderAuthContext,
type ProviderAuthResult,
type ProviderDiscoveryContext,
} from "openclaw/plugin-sdk/core";
const PROVIDER_ID = "vllm";
const DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
const vllmPlugin = {
id: "vllm",
name: "vLLM Provider",
description: "Bundled vLLM provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "vLLM",
docsPath: "/providers/vllm",
envVars: ["VLLM_API_KEY"],
auth: [
{
id: "custom",
label: "vLLM",
hint: "Local/self-hosted OpenAI-compatible server",
kind: "custom",
run: async (ctx: ProviderAuthContext): Promise<ProviderAuthResult> => {
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: ctx.config,
prompter: ctx.prompter,
providerId: PROVIDER_ID,
providerLabel: "vLLM",
defaultBaseUrl: DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
});
return {
profiles: [
{
profileId: result.profileId,
credential: result.credential,
},
],
configPatch: result.config,
defaultModel: result.modelRef,
};
},
},
],
discovery: {
order: "late",
run: async (ctx: ProviderDiscoveryContext) => {
if (ctx.config.models?.providers?.vllm) {
return null;
}
const { apiKey, discoveryApiKey } = ctx.resolveProviderApiKey(PROVIDER_ID);
if (!apiKey) {
return null;
}
return {
provider: {
...(await buildVllmProvider({ apiKey: discoveryApiKey })),
apiKey,
},
};
},
},
wizard: {
onboarding: {
choiceId: "vllm",
choiceLabel: "vLLM",
choiceHint: "Local/self-hosted OpenAI-compatible server",
groupId: "vllm",
groupLabel: "vLLM",
groupHint: "Local/self-hosted OpenAI-compatible",
methodId: "custom",
},
modelPicker: {
label: "vLLM (custom)",
hint: "Enter vLLM URL + API key + model",
methodId: "custom",
},
},
});
},
};
export default vllmPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "vllm",
"providers": ["vllm"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/vllm-provider",
"version": "2026.3.12",
"private": true,
"description": "OpenClaw vLLM provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}