diff --git a/flake.nix b/flake.nix index 3ca2458..af5fbe6 100644 --- a/flake.nix +++ b/flake.nix @@ -31,6 +31,9 @@ gcc clang libclang + + # TypeScript / OpenClaw plugin + nodejs_22 # Useful tools jq diff --git a/justfile b/justfile index e4bbefc..39999bf 100644 --- a/justfile +++ b/justfile @@ -256,3 +256,51 @@ ghcr-pull tag="latest": @echo "๐Ÿ“ฅ Pulling from GitHub Container Registry..." @{{container}} pull ghcr.io/opensecretcloud/maple-proxy:{{tag}} @echo "โœ… Pulled ghcr.io/opensecretcloud/maple-proxy:{{tag}}" + +# === OpenClaw Plugin === + +# Install plugin dependencies +plugin-install: + @echo "๐Ÿ“ฆ Installing plugin dependencies..." + @cd openclaw-plugin && npm install + @echo "โœ… Plugin dependencies installed" + +# Build plugin (TypeScript -> JS) +plugin-build: + @echo "๐Ÿ”จ Building OpenClaw plugin..." + @cd openclaw-plugin && npm run build + @echo "โœ… Plugin built" + +# Lint plugin +plugin-lint: + @echo "๐Ÿ” Linting plugin..." + @cd openclaw-plugin && npm run lint + @echo "โœ… Plugin linted" + +# Test plugin +plugin-test: + @echo "๐Ÿงช Testing plugin..." + @cd openclaw-plugin && npm test + @echo "โœ… Plugin tests passed" + +# Check all (Rust + plugin) +check-all: check plugin-lint plugin-test + @echo "โœ… All checks passed (Rust + Plugin)" + +# Link plugin locally for OpenClaw development +plugin-link: + @echo "๐Ÿ”— Linking plugin to OpenClaw extensions..." + @openclaw plugins install -l ./openclaw-plugin + @echo "โœ… Plugin linked" + +# Pack plugin for npm publishing +plugin-pack: + @echo "๐Ÿ“ฆ Packing plugin for npm..." + @cd openclaw-plugin && npm pack + @echo "โœ… Plugin packed" + +# Publish plugin to npm +plugin-publish: + @echo "๐Ÿš€ Publishing plugin to npm..." + @cd openclaw-plugin && npm publish --access public + @echo "โœ… Plugin published" diff --git a/openclaw-plugin/.gitignore b/openclaw-plugin/.gitignore new file mode 100644 index 0000000..1ab415f --- /dev/null +++ b/openclaw-plugin/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +dist/ +*.tgz diff --git a/openclaw-plugin/README.md b/openclaw-plugin/README.md new file mode 100644 index 0000000..2e9dd43 --- /dev/null +++ b/openclaw-plugin/README.md @@ -0,0 +1,199 @@ +# @opensecret/maple-openclaw-plugin + +OpenClaw plugin that automatically downloads, configures, and runs [maple-proxy](https://github.com/OpenSecretCloud/maple-proxy) as a background service. All AI inference runs inside Maple's TEE (Trusted Execution Environment) secure enclaves. + +## Quick Start (Recommended) + +Install the plugin and let your agent handle the rest: + +```bash +openclaw plugins install @opensecret/maple-openclaw-plugin +``` + +Then tell your agent: + +> Install and configure maple-proxy with my API key: `YOUR_MAPLE_API_KEY` + +The plugin bundles a skill that teaches the agent how to set up the maple provider, configure models, and enable embeddings. After a gateway restart, the agent will have all the context it needs from the skill to complete the setup. If the plugin isn't configured yet, the `maple_proxy_status` tool also returns step-by-step instructions. + +## Manual Setup + +If you prefer to configure everything yourself, follow these steps after installing the plugin. + +### 1. Configure the plugin + +Set your Maple API key in `openclaw.json`: + +```json +{ + "plugins": { + "entries": { + "maple-openclaw-plugin": { + "enabled": true, + "config": { + "apiKey": "YOUR_MAPLE_API_KEY" + } + } + } + } +} +``` + +### 2. Add the Maple provider + +Add a `maple` provider so OpenClaw can route requests to the local proxy (default port **8787**): + +```json +{ + "models": { + "providers": { + "maple": { + "baseUrl": "http://127.0.0.1:8787/v1", + "apiKey": "YOUR_MAPLE_API_KEY", + "api": "openai-completions", + "models": [ + { "id": "kimi-k2-5", "name": "Kimi K2.5 (recommended)" }, + { "id": "deepseek-r1-0528", "name": "DeepSeek R1" }, + { "id": "gpt-oss-120b", "name": "GPT-OSS 120B" }, + { "id": "llama-3.3-70b", "name": "Llama 3.3 70B" }, + { "id": "qwen3-vl-30b", "name": "Qwen3 VL 30B" } + ] + } + } + } +} +``` + +Use the same Maple API key in both places. To discover all available models, call `GET http://127.0.0.1:8787/v1/models` after startup. + +### 3. Add models to the allowlist (if applicable) + +If you have an `agents.defaults.models` section in your config, add the maple models you want. If you don't have this section, skip this step -- all models are allowed by default. + +```json +{ + "agents": { + "defaults": { + "models": { + "maple/kimi-k2-5": {}, + "maple/deepseek-r1-0528": {}, + "maple/gpt-oss-120b": {}, + "maple/llama-3.3-70b": {}, + "maple/qwen3-vl-30b": {} + } + } + } +} +``` + +### 4. Restart the gateway + +```bash +systemctl restart openclaw.service +``` + +Plugin config changes always require a full gateway restart. Model and provider config changes hot-apply without a restart. + +## Usage + +Use maple models by prefixing with `maple/`: + +- `maple/kimi-k2-5` (recommended) +- `maple/deepseek-r1-0528` +- `maple/gpt-oss-120b` +- `maple/llama-3.3-70b` +- `maple/qwen3-vl-30b` + +The plugin also registers a `maple_proxy_status` tool that shows the proxy's health, port, version, and available endpoints. If the plugin isn't configured yet, the tool returns setup instructions. + +## Embeddings & Memory Search + +maple-proxy serves an OpenAI-compatible embeddings endpoint using the `nomic-embed-text` model. You can use this for OpenClaw's memory search so embeddings are generated inside the TEE -- no cloud embedding provider needed. + +### Enable the memory-core plugin + +The `memory_search` and `memory_get` tools come from OpenClaw's `memory-core` plugin. It ships as a stock plugin but **must be explicitly enabled**: + +```json +{ + "plugins": { + "allow": ["memory-core"], + "entries": { + "memory-core": { + "enabled": true + } + } + } +} +``` + +### Configure memorySearch + +> **Important**: The model field must be `nomic-embed-text` (without a `maple/` prefix). Using `maple/nomic-embed-text` will cause 400 errors. + +```json +{ + "agents": { + "defaults": { + "memorySearch": { + "enabled": true, + "provider": "openai", + "model": "nomic-embed-text", + "remote": { + "baseUrl": "http://127.0.0.1:8787/v1/", + "apiKey": "YOUR_MAPLE_API_KEY" + } + } + } + } +} +``` + +### Restart and reindex + +```bash +systemctl restart openclaw.service +openclaw memory index --verbose +openclaw memory status --deep +``` + +The status output should show **Embeddings: available** and **Vector: ready**. + +### Troubleshooting + +| Problem | Cause | Fix | +|---|---|---| +| "memory slot plugin not found" | `memory-core` not enabled | Add to `plugins.allow` and `plugins.entries`, restart | +| Embeddings 400 error | Model has provider prefix | Change `maple/nomic-embed-text` to `nomic-embed-text` | +| Embeddings 401 error | Wrong API key | Check the key is the actual value, not a placeholder | +| "Batch: disabled" in status | Too many embedding failures | Fix config, restart to reset failure counter | +| Only some files indexed | Embeddings were failing during indexing | Fix config, restart, run `openclaw memory index --verbose` | + +## Plugin Config Options + +| Option | Default | Description | +|---|---|---| +| `apiKey` | (required) | Your Maple API key | +| `port` | `8787` | Local port for the proxy | +| `backendUrl` | `https://enclave.trymaple.ai` | Maple TEE backend URL | +| `debug` | `false` | Enable debug logging | +| `version` | (latest) | Pin to a specific maple-proxy version | + +## Updating + +```bash +openclaw plugins update maple-openclaw-plugin +``` + +> **Note**: `openclaw plugins update` works for stable releases. To move between beta versions, reinstall with the full version: `openclaw plugins install @opensecret/maple-openclaw-plugin@0.1.0-beta.4` + +## Direct API Access + +- `GET http://127.0.0.1:8787/v1/models` -- List available models +- `POST http://127.0.0.1:8787/v1/chat/completions` -- Chat completions (streaming and non-streaming) +- `POST http://127.0.0.1:8787/v1/embeddings` -- Generate embeddings (model: `nomic-embed-text`) +- `GET http://127.0.0.1:8787/health` -- Health check + +## License + +MIT diff --git a/openclaw-plugin/index.ts b/openclaw-plugin/index.ts new file mode 100644 index 0000000..16480cb --- /dev/null +++ b/openclaw-plugin/index.ts @@ -0,0 +1,186 @@ +import { ensureBinary } from "./lib/downloader.js"; +import { startProxy, type RunningProxy } from "./lib/process.js"; + +interface PluginConfig { + apiKey: string; + port?: number; + backendUrl?: string; + debug?: boolean; + version?: string; +} + +interface PluginApi { + config: { plugins: { entries: Record } }; + logger: { info: (msg: string) => void; error: (msg: string) => void }; + registerService: (service: { + id: string; + start: () => Promise; + stop: () => Promise; + }) => void; + registerTool: ( + tool: { + name: string; + description: string; + parameters: Record; + execute: ( + id: string, + params: Record + ) => Promise<{ content: Array<{ type: string; text: string }> }>; + }, + opts?: { optional?: boolean } + ) => void; +} + +export const id = "maple-openclaw-plugin"; +export const name = "Maple Proxy"; + +const PLUGIN_CONFIG_KEY = "maple-openclaw-plugin"; + +export default function register(api: PluginApi) { + let proxy: RunningProxy | null = null; + let starting = false; + + api.registerTool({ + name: "maple_proxy_status", + description: + "Check the status of the local maple-proxy server. " + + "Returns the port, version, and health status.", + parameters: { + type: "object", + properties: {}, + }, + async execute() { + const pluginConfig = + api.config.plugins.entries[PLUGIN_CONFIG_KEY]?.config; + + if (!pluginConfig?.apiKey) { + return { + content: [ + { + type: "text", + text: JSON.stringify({ + running: false, + error: "maple-proxy is not configured", + setup: { + step1: 'Set your Maple API key: plugins.entries["maple-openclaw-plugin"].config.apiKey', + step2: "Add a maple provider to models.providers with baseUrl http://127.0.0.1:8787/v1 and your Maple API key", + step3: "If you have agents.defaults.models, add the maple models (e.g. maple/kimi-k2-5)", + step4: "Restart the gateway", + }, + }), + }, + ], + }; + } + + if (!proxy) { + return { + content: [ + { + type: "text", + text: JSON.stringify({ + running: false, + error: "maple-proxy is not running. The API key is configured but the service failed to start. Check gateway logs for details.", + }), + }, + ], + }; + } + + let healthy = false; + try { + const res = await fetch( + `http://127.0.0.1:${proxy.port}/health` + ); + healthy = res.ok; + } catch { + // Not healthy + } + + return { + content: [ + { + type: "text", + text: JSON.stringify({ + running: true, + healthy, + port: proxy.port, + version: proxy.version, + endpoint: `http://127.0.0.1:${proxy.port}/v1`, + modelsUrl: `http://127.0.0.1:${proxy.port}/v1/models`, + chatUrl: `http://127.0.0.1:${proxy.port}/v1/chat/completions`, + }), + }, + ], + }; + }, + }); + + api.registerService({ + id: "maple-proxy-service", + + async start() { + if (starting) { + api.logger.info("maple-proxy start already in progress, skipping"); + return; + } + starting = true; + + try { + if (proxy) { + api.logger.info("Stopping existing maple-proxy before restart..."); + proxy.kill(); + proxy = null; + } + + const pluginConfig = + api.config.plugins.entries[PLUGIN_CONFIG_KEY]?.config; + + if (!pluginConfig?.apiKey) { + api.logger.error( + `${PLUGIN_CONFIG_KEY}: no apiKey configured. ` + + `Set plugins.entries["${PLUGIN_CONFIG_KEY}"].config.apiKey in openclaw.json` + ); + return; + } + + const { binaryPath, version } = await ensureBinary( + api.logger, + pluginConfig.version + ); + api.logger.info(`maple-proxy binary: ${version} at ${binaryPath}`); + + proxy = await startProxy( + { + binaryPath, + apiKey: pluginConfig.apiKey, + port: pluginConfig.port, + backendUrl: pluginConfig.backendUrl, + debug: pluginConfig.debug, + }, + version, + api.logger + ); + + api.logger.info( + `maple-proxy is OpenAI-compatible at http://127.0.0.1:${proxy.port}/v1 ` + + `-- configure as maple provider or use directly` + ); + } catch (err) { + api.logger.error( + `${PLUGIN_CONFIG_KEY}: failed to start: ${err instanceof Error ? err.message : err}` + ); + } finally { + starting = false; + } + }, + + async stop() { + if (proxy) { + api.logger.info("Stopping maple-proxy..."); + proxy.kill(); + proxy = null; + } + }, + }); +} diff --git a/openclaw-plugin/lib/downloader.test.ts b/openclaw-plugin/lib/downloader.test.ts new file mode 100644 index 0000000..c8527f9 --- /dev/null +++ b/openclaw-plugin/lib/downloader.test.ts @@ -0,0 +1,65 @@ +import { describe, it } from "node:test"; +import assert from "node:assert/strict"; +import { compareVersionsDesc } from "./downloader.js"; + +describe("compareVersionsDesc", () => { + it("sorts simple versions in descending order", () => { + const input = ["v0.1.0", "v0.2.0", "v0.1.5"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v0.2.0", "v0.1.5", "v0.1.0"]); + }); + + it("handles v0.9.0 vs v0.10.0 correctly (not lexicographic)", () => { + const input = ["v0.9.0", "v0.10.0", "v0.2.0"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v0.10.0", "v0.9.0", "v0.2.0"]); + }); + + it("handles major version differences", () => { + const input = ["v1.0.0", "v2.0.0", "v0.9.0"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v2.0.0", "v1.0.0", "v0.9.0"]); + }); + + it("handles patch version differences", () => { + const input = ["v0.1.1", "v0.1.3", "v0.1.2"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v0.1.3", "v0.1.2", "v0.1.1"]); + }); + + it("handles double-digit version components", () => { + const input = ["v1.2.3", "v1.12.0", "v1.2.30"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v1.12.0", "v1.2.30", "v1.2.3"]); + }); + + it("keeps equal versions stable", () => { + const input = ["v0.1.6", "v0.1.6"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v0.1.6", "v0.1.6"]); + }); + + it("handles single element", () => { + const input = ["v0.1.0"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, ["v0.1.0"]); + }); + + it("handles empty array", () => { + const input: string[] = []; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, []); + }); + + it("handles realistic release sequence", () => { + const input = ["v0.1.0", "v0.1.6", "v0.1.5", "v0.2.0", "v0.1.10"]; + const result = [...input].sort(compareVersionsDesc); + assert.deepStrictEqual(result, [ + "v0.2.0", + "v0.1.10", + "v0.1.6", + "v0.1.5", + "v0.1.0", + ]); + }); +}); diff --git a/openclaw-plugin/lib/downloader.ts b/openclaw-plugin/lib/downloader.ts new file mode 100644 index 0000000..107f026 --- /dev/null +++ b/openclaw-plugin/lib/downloader.ts @@ -0,0 +1,218 @@ +import fs from "node:fs"; +import fsp from "node:fs/promises"; +import path from "node:path"; +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; +import { + getArtifact, + getReleaseUrl, + getChecksumUrl, + getBinaryPath, + getCacheDir, + getLatestVersion, +} from "./platform.js"; + +const execFileAsync = promisify(execFile); + +const VERSION_CHECK_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours +const MAX_KEPT_VERSIONS = 2; // current + one previous + +function parseVer(v: string): number[] { + return v.replace(/^v/, "").split(".").map(Number); +} + +export function compareVersionsDesc(a: string, b: string): number { + const [aMaj = 0, aMin = 0, aPat = 0] = parseVer(a); + const [bMaj = 0, bMin = 0, bPat = 0] = parseVer(b); + return bMaj - aMaj || bMin - aMin || bPat - aPat; +} + +async function downloadFile(url: string, dest: string): Promise { + const res = await fetch(url, { redirect: "follow" }); + if (!res.ok) { + throw new Error(`Download failed: ${res.status} ${res.statusText} (${url})`); + } + const buffer = Buffer.from(await res.arrayBuffer()); + await fsp.writeFile(dest, buffer); +} + +async function verifyChecksum( + filePath: string, + checksumUrl: string, + logger: { info: (msg: string) => void } +): Promise { + const res = await fetch(checksumUrl, { redirect: "follow" }); + if (!res.ok) { + if (res.status === 404) { + logger.info( + `Warning: checksum file not found (404), skipping verification for ${path.basename(filePath)}` + ); + return; + } + throw new Error( + `Failed to fetch checksum for ${path.basename(filePath)}: ${res.status} ${res.statusText}. ` + + `This may indicate GitHub rate limiting or a server error.` + ); + } + + const expectedLine = (await res.text()).trim(); + const expectedHash = expectedLine.split(/\s+/)[0]; + + const { createHash } = await import("node:crypto"); + const fileBuffer = await fsp.readFile(filePath); + const actualHash = createHash("sha256").update(fileBuffer).digest("hex"); + + if (actualHash !== expectedHash) { + await fsp.unlink(filePath); + throw new Error( + `Checksum mismatch for ${path.basename(filePath)}: ` + + `expected ${expectedHash}, got ${actualHash}` + ); + } +} + +async function extractTarGz(archivePath: string, destDir: string): Promise { + await execFileAsync("tar", ["-xzf", archivePath, "-C", destDir]); +} + +async function extractZip(archivePath: string, destDir: string): Promise { + if (process.platform === "win32") { + await execFileAsync("powershell", [ + "-NoProfile", + "-Command", + "Expand-Archive", + "-Path", + archivePath, + "-DestinationPath", + destDir, + "-Force", + ]); + } else { + await execFileAsync("unzip", ["-o", archivePath, "-d", destDir]); + } +} + +async function resolveVersion( + logger: { info: (msg: string) => void }, + pinnedVersion?: string +): Promise { + if (pinnedVersion) { + return pinnedVersion; + } + + const cacheDir = getCacheDir(); + const cacheFile = path.join(cacheDir, ".latest-version"); + + try { + const stat = await fsp.stat(cacheFile); + const age = Date.now() - stat.mtimeMs; + if (age < VERSION_CHECK_TTL_MS) { + const cached = (await fsp.readFile(cacheFile, "utf-8")).trim(); + if (cached) { + logger.info(`Using cached latest version: ${cached} (checked ${Math.round(age / 60000)}m ago)`); + return cached; + } + } + } catch { + // No cache file or unreadable + } + + logger.info("Checking GitHub for latest maple-proxy release..."); + const version = await getLatestVersion(); + + await fsp.mkdir(cacheDir, { recursive: true }); + await fsp.writeFile(cacheFile, version, "utf-8"); + + return version; +} + +async function cleanupOldVersions( + currentVersion: string, + logger: { info: (msg: string) => void } +): Promise { + const cacheDir = getCacheDir(); + + let entries: string[]; + try { + entries = await fsp.readdir(cacheDir); + } catch { + return; + } + + const versionDirs = entries + .filter((e) => e.startsWith("v")) + .sort(compareVersionsDesc); + + if (versionDirs.length <= MAX_KEPT_VERSIONS) { + return; + } + + // Always keep the current version; keep most recent others up to MAX_KEPT_VERSIONS + const toKeep = new Set([currentVersion]); + for (const dir of versionDirs) { + if (toKeep.size >= MAX_KEPT_VERSIONS) break; + toKeep.add(dir); + } + + for (const dir of versionDirs) { + if (toKeep.has(dir)) continue; + const dirPath = path.join(cacheDir, dir); + try { + await fsp.rm(dirPath, { recursive: true, force: true }); + logger.info(`Cleaned up old maple-proxy version: ${dir}`); + } catch { + // Best-effort cleanup + } + } +} + +export interface DownloadResult { + binaryPath: string; + version: string; +} + +export async function ensureBinary( + logger: { info: (msg: string) => void }, + pinnedVersion?: string +): Promise { + const version = await resolveVersion(logger, pinnedVersion); + const binaryPath = getBinaryPath(version); + + if (fs.existsSync(binaryPath)) { + logger.info(`maple-proxy ${version} already cached at ${binaryPath}`); + await cleanupOldVersions(version, logger); + return { binaryPath, version }; + } + + const artifact = getArtifact(); + const cacheDir = getCacheDir(); + const versionDir = path.join(cacheDir, version); + await fsp.mkdir(versionDir, { recursive: true }); + + const ext = artifact.archiveType === "zip" ? "zip" : "tar.gz"; + const archivePath = path.join(versionDir, `${artifact.name}.${ext}`); + + logger.info(`Downloading maple-proxy ${version} for ${artifact.name}...`); + const releaseUrl = getReleaseUrl(version, artifact); + await downloadFile(releaseUrl, archivePath); + + const checksumUrl = getChecksumUrl(version, artifact); + await verifyChecksum(archivePath, checksumUrl, logger); + + logger.info(`Extracting to ${versionDir}...`); + if (artifact.archiveType === "zip") { + await extractZip(archivePath, versionDir); + } else { + await extractTarGz(archivePath, versionDir); + } + + await fsp.unlink(archivePath); + + if (process.platform !== "win32") { + await fsp.chmod(binaryPath, 0o755); + } + + logger.info(`maple-proxy ${version} ready at ${binaryPath}`); + await cleanupOldVersions(version, logger); + return { binaryPath, version }; +} diff --git a/openclaw-plugin/lib/platform.ts b/openclaw-plugin/lib/platform.ts new file mode 100644 index 0000000..faaad99 --- /dev/null +++ b/openclaw-plugin/lib/platform.ts @@ -0,0 +1,66 @@ +import os from "node:os"; +import path from "node:path"; + +const GITHUB_REPO = "OpenSecretCloud/maple-proxy"; + +export interface PlatformArtifact { + name: string; + archiveType: "tar.gz" | "zip"; +} + +export function getArtifact(): PlatformArtifact { + const platform = os.platform(); + const arch = os.arch(); + + if (platform === "linux" && arch === "x64") { + return { name: "maple-proxy-linux-x86_64", archiveType: "tar.gz" }; + } + if (platform === "linux" && arch === "arm64") { + return { name: "maple-proxy-linux-aarch64", archiveType: "tar.gz" }; + } + if (platform === "darwin" && arch === "arm64") { + return { name: "maple-proxy-macos-aarch64", archiveType: "tar.gz" }; + } + if (platform === "win32" && arch === "x64") { + return { name: "maple-proxy-windows-x86_64", archiveType: "zip" }; + } + + throw new Error( + `Unsupported platform: ${platform}/${arch}. ` + + `Supported: linux/x64, linux/arm64, darwin/arm64, win32/x64` + ); +} + +export function getReleaseUrl(version: string, artifact: PlatformArtifact): string { + const ext = artifact.archiveType === "zip" ? "zip" : "tar.gz"; + return `https://github.com/${GITHUB_REPO}/releases/download/${version}/${artifact.name}.${ext}`; +} + +export function getChecksumUrl(version: string, artifact: PlatformArtifact): string { + const ext = artifact.archiveType === "zip" ? "zip" : "tar.gz"; + return `https://github.com/${GITHUB_REPO}/releases/download/${version}/${artifact.name}.${ext}.sha256`; +} + +export function getCacheDir(): string { + return path.join(os.homedir(), ".openclaw", "tools", "maple-proxy"); +} + +export function getBinaryName(): string { + return os.platform() === "win32" ? "maple-proxy.exe" : "maple-proxy"; +} + +export function getBinaryPath(version: string): string { + return path.join(getCacheDir(), version, getBinaryName()); +} + +export async function getLatestVersion(): Promise { + const url = `https://api.github.com/repos/${GITHUB_REPO}/releases/latest`; + const res = await fetch(url, { + headers: { Accept: "application/vnd.github.v3+json" }, + }); + if (!res.ok) { + throw new Error(`Failed to fetch latest release: ${res.status} ${res.statusText}`); + } + const data = (await res.json()) as { tag_name: string }; + return data.tag_name; +} diff --git a/openclaw-plugin/lib/process.ts b/openclaw-plugin/lib/process.ts new file mode 100644 index 0000000..68b7c7a --- /dev/null +++ b/openclaw-plugin/lib/process.ts @@ -0,0 +1,216 @@ +import { spawn, type ChildProcess } from "node:child_process"; +import net from "node:net"; + +const DEFAULT_PORT = 8787; +const HEALTH_TIMEOUT_MS = 10000; +const MAX_RESTART_ATTEMPTS = 3; +const RESTART_BACKOFF_MS = 2000; + +export interface ProxyConfig { + binaryPath: string; + apiKey: string; + port?: number; + backendUrl?: string; + debug?: boolean; +} + +export interface RunningProxy { + readonly process: ChildProcess; + port: number; + version: string; + kill: () => void; +} + +function checkPortAvailable(port: number): Promise { + return new Promise((resolve, reject) => { + const server = net.createServer(); + server.listen(port, "127.0.0.1", () => { + server.close(() => resolve(true)); + }); + server.on("error", (err: NodeJS.ErrnoException) => { + if (err.code === "EADDRINUSE") { + resolve(false); + } else { + reject(err); + } + }); + }); +} + +async function waitForHealth(port: number): Promise { + const start = Date.now(); + while (Date.now() - start < HEALTH_TIMEOUT_MS) { + try { + const res = await fetch(`http://127.0.0.1:${port}/health`); + if (res.ok) return; + } catch { + // Not ready yet + } + await new Promise((r) => setTimeout(r, 200)); + } + throw new Error(`maple-proxy did not become healthy within ${HEALTH_TIMEOUT_MS}ms`); +} + +function spawnProxy( + config: ProxyConfig, + port: number, + logger: { info: (msg: string) => void; error: (msg: string) => void } +): ChildProcess { + const env: Record = { + ...(process.env as Record), + MAPLE_HOST: "127.0.0.1", + MAPLE_PORT: String(port), + MAPLE_API_KEY: config.apiKey, + }; + + if (config.backendUrl) { + env.MAPLE_BACKEND_URL = config.backendUrl; + } + if (config.debug) { + env.MAPLE_DEBUG = "true"; + } + + const child = spawn(config.binaryPath, [], { + env, + stdio: ["ignore", "pipe", "pipe"], + }); + + child.stdout?.on("data", (data: Buffer) => { + logger.info(`[maple-proxy] ${data.toString().trim()}`); + }); + + child.stderr?.on("data", (data: Buffer) => { + logger.error(`[maple-proxy] ${data.toString().trim()}`); + }); + + return child; +} + +export async function startProxy( + config: ProxyConfig, + version: string, + logger: { info: (msg: string) => void; error: (msg: string) => void } +): Promise { + const port = config.port ?? DEFAULT_PORT; + + const available = await checkPortAvailable(port); + if (!available) { + throw new Error( + `Port ${port} is already in use. ` + + `Set a different port in plugins.entries["maple-openclaw-plugin"].config.port` + ); + } + + let child = spawnProxy(config, port, logger); + let stopped = false; + let exited = false; + let restartAttempts = 0; + + const trackExit = (proc: ChildProcess) => { + proc.on("exit", () => { + exited = true; + }); + }; + trackExit(child); + + const setupCrashRecovery = (proc: ChildProcess) => { + proc.on("exit", (code, signal) => { + if (stopped) return; + if (signal === "SIGINT" || signal === "SIGTERM" || signal === "SIGKILL") return; + + const crashed = + (code !== null && code !== 0) || + (code === null && signal !== null); + + if (crashed) { + const reason = + code !== null + ? `exit code ${code}` + : `signal ${signal}`; + logger.error(`maple-proxy crashed (${reason})`); + + if (restartAttempts < MAX_RESTART_ATTEMPTS) { + restartAttempts++; + const delay = RESTART_BACKOFF_MS * restartAttempts; + logger.info( + `Restarting maple-proxy in ${delay}ms (attempt ${restartAttempts}/${MAX_RESTART_ATTEMPTS})...` + ); + setTimeout(async () => { + if (stopped) return; + const spawned = spawnProxy(config, port, logger); + child = spawned; + exited = false; + trackExit(spawned); + setupCrashRecovery(spawned); + try { + await waitForHealth(port); + logger.info(`maple-proxy restarted on http://127.0.0.1:${port}`); + restartAttempts = 0; + } catch (err) { + logger.error( + `Failed to restart maple-proxy: ${err instanceof Error ? err.message : err}` + ); + if (!spawned.killed) { + spawned.kill("SIGKILL"); + } + } + }, delay).unref(); + } else { + logger.error( + `maple-proxy crashed ${MAX_RESTART_ATTEMPTS} times, giving up. ` + + `Restart the gateway to try again.` + ); + } + } + }); + }; + + setupCrashRecovery(child); + + // Race health check against spawn errors so we fail fast if the binary + // is missing, not executable, or crashes immediately on startup. + try { + await Promise.race([ + waitForHealth(port), + new Promise((_, reject) => { + child.on("error", (err) => { + reject(new Error(`maple-proxy failed to spawn: ${err.message}`)); + }); + child.on("exit", (code, signal) => { + if (code !== null && code !== 0) { + reject(new Error(`maple-proxy exited immediately with code ${code}`)); + } else if (signal) { + reject(new Error(`maple-proxy killed by signal ${signal} during startup`)); + } + }); + }), + ]); + } catch (err) { + // Clean up the child process if it's still around + if (!child.killed) { + child.kill("SIGKILL"); + } + stopped = true; + throw err; + } + + logger.info(`maple-proxy running on http://127.0.0.1:${port}`); + + return { + get process() { + return child; + }, + port, + version, + kill: () => { + stopped = true; + if (exited) return; + child.kill("SIGINT"); + setTimeout(() => { + if (!exited) { + child.kill("SIGKILL"); + } + }, 3000).unref(); + }, + }; +} diff --git a/openclaw-plugin/openclaw.plugin.json b/openclaw-plugin/openclaw.plugin.json new file mode 100644 index 0000000..619ae05 --- /dev/null +++ b/openclaw-plugin/openclaw.plugin.json @@ -0,0 +1,26 @@ +{ + "id": "maple-openclaw-plugin", + "name": "Maple Proxy", + "description": "Run Maple TEE-backed AI models locally via maple-proxy", + "version": "0.1.0", + "configSchema": { + "type": "object", + "additionalProperties": false, + "properties": { + "apiKey": { "type": "string" }, + "port": { "type": "number" }, + "backendUrl": { "type": "string" }, + "debug": { "type": "boolean" }, + "version": { "type": "string" } + }, + "required": ["apiKey"] + }, + "uiHints": { + "apiKey": { "label": "Maple API Key", "sensitive": true }, + "port": { "label": "Local Port", "placeholder": "8787" }, + "backendUrl": { "label": "Backend URL", "placeholder": "https://enclave.trymaple.ai" }, + "debug": { "label": "Debug Logging" }, + "version": { "label": "Binary Version", "placeholder": "latest" } + }, + "skills": ["./skills/maple-proxy-skill"] +} diff --git a/openclaw-plugin/package-lock.json b/openclaw-plugin/package-lock.json new file mode 100644 index 0000000..d099faf --- /dev/null +++ b/openclaw-plugin/package-lock.json @@ -0,0 +1,56 @@ +{ + "name": "@opensecret/maple-openclaw-plugin", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@opensecret/maple-openclaw-plugin", + "version": "0.1.0", + "license": "MIT", + "devDependencies": { + "@types/node": "^22.0.0", + "typescript": "^5.7.0" + }, + "peerDependencies": { + "openclaw": ">=2026.1.0" + }, + "peerDependenciesMeta": { + "openclaw": { + "optional": true + } + } + }, + "node_modules/@types/node": { + "version": "22.19.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.11.tgz", + "integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/openclaw-plugin/package.json b/openclaw-plugin/package.json new file mode 100644 index 0000000..334c42e --- /dev/null +++ b/openclaw-plugin/package.json @@ -0,0 +1,38 @@ +{ + "name": "@opensecret/maple-openclaw-plugin", + "version": "0.1.1", + "description": "OpenClaw plugin that runs Maple TEE-backed AI models via maple-proxy", + "type": "module", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/OpenSecretCloud/maple-proxy", + "directory": "openclaw-plugin" + }, + "openclaw": { + "extensions": ["./index.ts"] + }, + "files": [ + "index.ts", + "lib", + "skills", + "openclaw.plugin.json" + ], + "scripts": { + "build": "tsc", + "lint": "tsc --noEmit", + "test": "tsc && node --test dist/lib/downloader.test.js" + }, + "peerDependencies": { + "openclaw": ">=2026.1.0" + }, + "peerDependenciesMeta": { + "openclaw": { + "optional": true + } + }, + "devDependencies": { + "typescript": "^5.7.0", + "@types/node": "^22.0.0" + } +} diff --git a/openclaw-plugin/skills/maple-proxy-skill/SKILL.md b/openclaw-plugin/skills/maple-proxy-skill/SKILL.md new file mode 100644 index 0000000..24494b1 --- /dev/null +++ b/openclaw-plugin/skills/maple-proxy-skill/SKILL.md @@ -0,0 +1,203 @@ +--- +name: maple-proxy-skill +description: Use Maple TEE-backed AI models via the local maple-proxy +metadata: {"openclaw": {"requires": {"config": ["plugins.entries.maple-openclaw-plugin.enabled"]}, "primaryEnv": "MAPLE_API_KEY", "emoji": "๐Ÿ"}} +--- + +# Maple Proxy + +The maple-openclaw-plugin manages a local OpenAI-compatible proxy server that forwards requests to Maple's TEE (Trusted Execution Environment) backend. All AI inference runs inside secure enclaves. + +## Setup + +### 1. Add the Maple provider + +Add a `maple` provider to your `openclaw.json` with your Maple API key and the models you want to use. maple-proxy runs on port **8787** by default. + +```json +{ + "models": { + "providers": { + "maple": { + "baseUrl": "http://127.0.0.1:8787/v1", + "apiKey": "YOUR_MAPLE_API_KEY", + "api": "openai-completions", + "models": [ + { "id": "kimi-k2-5", "name": "Kimi K2.5 (recommended)" }, + { "id": "deepseek-r1-0528", "name": "DeepSeek R1" }, + { "id": "gpt-oss-120b", "name": "GPT-OSS 120B" }, + { "id": "llama-3.3-70b", "name": "Llama 3.3 70B" }, + { "id": "qwen3-vl-30b", "name": "Qwen3 VL 30B" } + ] + } + } + } +} +``` + +Use the same Maple API key you configured in the plugin config -- maple-proxy forwards the `Authorization: Bearer` header to the TEE backend for authentication. + +To discover available models, use the `maple_proxy_status` tool or call `GET http://127.0.0.1:8787/v1/models` directly. + +### 2. Add models to the allowlist + +If you have an `agents.defaults.models` section in your config, you must add the maple models you want to use. If you don't have this section at all, skip this step -- all models are allowed by default. + +Add each model you want to use as `maple/`. Check available models via `GET http://127.0.0.1:8787/v1/models` or the `maple_proxy_status` tool. + +```json +{ + "agents": { + "defaults": { + "models": { + "maple/kimi-k2-5": {}, + "maple/deepseek-r1-0528": {}, + "maple/gpt-oss-120b": {}, + "maple/llama-3.3-70b": {}, + "maple/qwen3-vl-30b": {} + } + } + } +} +``` + +### 3. Restart the gateway + +Restart the OpenClaw gateway to pick up the new provider and model config. + +## Using Maple Models + +Use maple models by prefixing with `maple/`: + +- `maple/kimi-k2-5` (recommended) +- `maple/deepseek-r1-0528` +- `maple/gpt-oss-120b` +- `maple/llama-3.3-70b` +- `maple/qwen3-vl-30b` + +To spawn a subagent on a Maple model: + +``` +Use sessions_spawn with model: "maple/kimi-k2-5" to run tasks on Maple TEE models. +``` + +## Status Tool + +Use the `maple_proxy_status` tool to check if the proxy is running, which port it is on, its health status, and the available models endpoint. + +## Embeddings & Memory Search + +maple-proxy serves an OpenAI-compatible embeddings endpoint using the `nomic-embed-text` model. You can use this for OpenClaw's memory search so that embeddings are generated inside the TEE โ€” no cloud embedding provider needed. + +### 1. Enable the memory-core plugin + +The `memory_search` and `memory_get` tools are provided by OpenClaw's `memory-core` plugin. It ships as a stock plugin but must be explicitly enabled. Add it to `plugins.allow` and `plugins.entries`: + +```json +{ + "plugins": { + "allow": ["memory-core"], + "entries": { + "memory-core": { + "enabled": true + } + } + } +} +``` + +This requires a **full gateway restart** (not just SIGUSR1) since it's a plugin change. + +### 2. Configure memorySearch to use maple-proxy + +Point `memorySearch.remote` at the local maple-proxy endpoint. **Important**: the `model` field must be `nomic-embed-text` (without a `maple/` provider prefix) โ€” the proxy does not strip provider prefixes for embedding requests. + +```json +{ + "agents": { + "defaults": { + "memorySearch": { + "enabled": true, + "provider": "openai", + "model": "nomic-embed-text", + "remote": { + "baseUrl": "http://127.0.0.1:8787/v1/", + "apiKey": "YOUR_MAPLE_API_KEY" + } + } + } + } +} +``` + +Use the same Maple API key you configured in the plugin config. This replaces the need for a separate OpenAI, Gemini, or Voyage API key for embeddings. + +> **Common mistake**: Setting the model to `maple/nomic-embed-text` will cause 400 errors from the proxy. Use `nomic-embed-text` (no prefix). + +### 3. Restart and reindex + +After updating the config, do a full gateway restart, then build the vector index: + +```bash +# Full restart (plugin changes require this) +systemctl restart openclaw.service + +# Index memory files and generate embeddings +openclaw memory index --verbose + +# Verify everything is working +openclaw memory status --deep +``` + +The status output should show: +- **Provider**: `openai` (this is the API format, not the actual provider) +- **Model**: `nomic-embed-text` +- **Embeddings**: `available` (not `unavailable`) +- **Vector**: `ready` + +### 4. Test with the CLI and tool + +Test from the command line first: + +```bash +openclaw memory search "your query here" +``` + +Once that works, the `memory_search` tool will also be available to the agent in chat. The agent can call `memory_search` to semantically search across `MEMORY.md` and `memory/*.md` files, with results ranked by relevance and cited with source paths. + +### Troubleshooting + +- **"memory slot plugin not found"** in logs โ†’ `memory-core` is not in `plugins.allow` or `plugins.entries`, or hasn't been restarted after adding it +- **Embeddings 400 error** โ†’ model name includes provider prefix (`maple/nomic-embed-text`), change to `nomic-embed-text` +- **Embeddings 401 error** โ†’ wrong API key, or key is a literal string like `${MAPLE_API_KEY}` instead of the actual key value +- **"Batch: disabled"** in status โ†’ embeddings failed too many times, fix the config and restart to reset the failure counter +- **Only 1/7 files indexed** โ†’ embeddings were failing, fix config, restart, then run `openclaw memory index --verbose` + +## Direct API Access + +- `GET http://127.0.0.1:8787/v1/models` - List available models +- `POST http://127.0.0.1:8787/v1/chat/completions` - Chat completions (streaming and non-streaming) +- `POST http://127.0.0.1:8787/v1/embeddings` - Generate embeddings (model: `nomic-embed-text`) +- `GET http://127.0.0.1:8787/health` - Health check + +## Port Override + +The default port is 8787. To change it: + +```json +{ + "plugins": { + "entries": { + "maple-openclaw-plugin": { + "config": { "port": 9000 } + } + } + } +} +``` + +If you change the port, update your `models.providers.maple.baseUrl` and `memorySearch.remote.baseUrl` to match. + +## Configuration Changes + +Plugin config changes (port, API key, backend URL) require a full gateway restart to take effect. Model and provider config changes hot-apply without a restart. diff --git a/openclaw-plugin/tsconfig.json b/openclaw-plugin/tsconfig.json new file mode 100644 index 0000000..0dc3f72 --- /dev/null +++ b/openclaw-plugin/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "Node16", + "moduleResolution": "Node16", + "outDir": "dist", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "declaration": true, + "sourceMap": true + }, + "include": ["index.ts", "lib/**/*.ts", "lib/**/*.test.ts"], + "exclude": ["node_modules", "dist"] +}