Onboarding: add vLLM provider support

This commit is contained in:
gejifeng
2026-02-09 10:20:45 +00:00
committed by Peter Steinberger
parent 54bf5d0f41
commit e73d881c50
19 changed files with 555 additions and 3 deletions

View File

@@ -0,0 +1,33 @@
import { mkdtempSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, expect, it } from "vitest";
import { resolveImplicitProviders } from "./models-config.providers.js";
describe("vLLM provider", () => {
it("should not include vllm when no API key is configured", async () => {
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
const providers = await resolveImplicitProviders({ agentDir });
expect(providers?.vllm).toBeUndefined();
});
it("should include vllm when VLLM_API_KEY is set", async () => {
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
process.env.VLLM_API_KEY = "test-key";
try {
const providers = await resolveImplicitProviders({ agentDir });
expect(providers?.vllm).toBeDefined();
expect(providers?.vllm?.apiKey).toBe("VLLM_API_KEY");
expect(providers?.vllm?.baseUrl).toBe("http://127.0.0.1:8000/v1");
expect(providers?.vllm?.api).toBe("openai-completions");
// Note: discovery is disabled in test environments (VITEST check)
expect(providers?.vllm?.models).toEqual([]);
} finally {
delete process.env.VLLM_API_KEY;
}
});
});