fix(memory): align auto checks and ollama fallback defaults

This commit is contained in:
Gustavo Madeira Santana
2026-03-02 20:37:15 -05:00
parent c27274efd1
commit 9be045e26b
5 changed files with 82 additions and 4 deletions

View File

@@ -235,6 +235,32 @@ describe("noteMemorySearchHealth", () => {
const message = String(note.mock.calls[0]?.[0] ?? "");
expect(message).toContain("openclaw configure --section model");
});
it("still warns in auto mode when only ollama credentials exist", async () => {
resolveMemorySearchConfig.mockReturnValue({
provider: "auto",
local: {},
remote: {},
});
resolveApiKeyForProvider.mockImplementation(async ({ provider }: { provider: string }) => {
if (provider === "ollama") {
return {
apiKey: "ollama-local",
source: "env: OLLAMA_API_KEY",
mode: "api-key",
};
}
throw new Error("missing key");
});
await noteMemorySearchHealth(cfg);
expect(note).toHaveBeenCalledTimes(1);
const providersChecked = resolveApiKeyForProvider.mock.calls.map(
([arg]: [{ provider: string }]) => arg.provider,
);
expect(providersChecked).toEqual(["openai", "google", "voyage", "mistral"]);
});
});
describe("detectLegacyWorkspaceDirs", () => {

View File

@@ -117,7 +117,7 @@ export async function noteMemorySearchHealth(
if (hasLocalEmbeddings(resolved.local)) {
return;
}
for (const provider of ["openai", "gemini", "voyage", "mistral", "ollama"] as const) {
for (const provider of ["openai", "gemini", "voyage", "mistral"] as const) {
if (hasRemoteApiKey || (await hasApiKeyForProvider(provider, cfg, agentDir))) {
return;
}

View File

@@ -14,7 +14,7 @@ export type OllamaEmbeddingClient = {
};
type OllamaEmbeddingClientConfig = Omit<OllamaEmbeddingClient, "embedBatch">;
const DEFAULT_OLLAMA_EMBEDDING_MODEL = "nomic-embed-text";
export const DEFAULT_OLLAMA_EMBEDDING_MODEL = "nomic-embed-text";
const DEFAULT_OLLAMA_BASE_URL = "http://127.0.0.1:11434";
function sanitizeAndNormalizeEmbedding(vec: number[]): number[] {

View File

@@ -13,6 +13,7 @@ import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
import { resolveUserPath } from "../utils.js";
import { DEFAULT_GEMINI_EMBEDDING_MODEL } from "./embeddings-gemini.js";
import { DEFAULT_MISTRAL_EMBEDDING_MODEL } from "./embeddings-mistral.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import { DEFAULT_OPENAI_EMBEDDING_MODEL } from "./embeddings-openai.js";
import { DEFAULT_VOYAGE_EMBEDDING_MODEL } from "./embeddings-voyage.js";
import {
@@ -980,7 +981,9 @@ export abstract class MemoryManagerSyncOps {
? DEFAULT_VOYAGE_EMBEDDING_MODEL
: fallback === "mistral"
? DEFAULT_MISTRAL_EMBEDDING_MODEL
: this.settings.model;
: fallback === "ollama"
? DEFAULT_OLLAMA_EMBEDDING_MODEL
: this.settings.model;
const fallbackResult = await createEmbeddingProvider({
config: this.cfg,

View File

@@ -3,10 +3,12 @@ import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import type {
EmbeddingProvider,
EmbeddingProviderResult,
MistralEmbeddingClient,
OllamaEmbeddingClient,
OpenAiEmbeddingClient,
} from "./embeddings.js";
import { getMemorySearchManager, type MemoryIndexManager } from "./index.js";
@@ -36,7 +38,7 @@ function buildConfig(params: {
workspaceDir: string;
indexPath: string;
provider: "openai" | "mistral";
fallback?: "none" | "mistral";
fallback?: "none" | "mistral" | "ollama";
}): OpenClawConfig {
return {
agents: {
@@ -144,4 +146,51 @@ describe("memory manager mistral provider wiring", () => {
expect(internal.openAi).toBeUndefined();
expect(internal.mistral).toBe(mistralClient);
});
it("uses default ollama model when activating ollama fallback", async () => {
const openAiClient: OpenAiEmbeddingClient = {
baseUrl: "https://api.openai.com/v1",
headers: { authorization: "Bearer openai-key" },
model: "text-embedding-3-small",
};
const ollamaClient: OllamaEmbeddingClient = {
baseUrl: "http://127.0.0.1:11434",
headers: {},
model: DEFAULT_OLLAMA_EMBEDDING_MODEL,
embedBatch: async (texts: string[]) => texts.map(() => [0.1, 0.2, 0.3]),
};
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "openai",
provider: createProvider("openai"),
openAi: openAiClient,
} as EmbeddingProviderResult);
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "ollama",
provider: createProvider("ollama"),
ollama: ollamaClient,
} as EmbeddingProviderResult);
const cfg = buildConfig({ workspaceDir, indexPath, provider: "openai", fallback: "ollama" });
const result = await getMemorySearchManager({ cfg, agentId: "main" });
if (!result.manager) {
throw new Error(`manager missing: ${result.error ?? "no error provided"}`);
}
manager = result.manager as unknown as MemoryIndexManager;
const internal = manager as unknown as {
activateFallbackProvider: (reason: string) => Promise<boolean>;
openAi?: OpenAiEmbeddingClient;
ollama?: OllamaEmbeddingClient;
};
const activated = await internal.activateFallbackProvider("forced ollama fallback");
expect(activated).toBe(true);
expect(internal.openAi).toBeUndefined();
expect(internal.ollama).toBe(ollamaClient);
const fallbackCall = createEmbeddingProviderMock.mock.calls[1]?.[0] as
| { provider?: string; model?: string }
| undefined;
expect(fallbackCall?.provider).toBe("ollama");
expect(fallbackCall?.model).toBe(DEFAULT_OLLAMA_EMBEDDING_MODEL);
});
});