feat(memory): native Voyage AI support (#7078)

* feat(memory): add native Voyage AI embedding support with batching

Cherry-picked from PR #2519, resolved conflict in memory-search.ts
(hasRemote -> hasRemoteConfig rename + added voyage provider)

* fix(memory): optimize voyage batch memory usage with streaming and deduplicate code

Cherry-picked from PR #2519. Fixed lint error: changed this.runWithConcurrency
to use imported runWithConcurrency function after extraction to internal.ts
This commit is contained in:
Jake
2026-02-07 10:09:32 +13:00
committed by GitHub
parent e3d3893d5d
commit 6965a2cc9d
11 changed files with 879 additions and 58 deletions

View File

@@ -9,7 +9,7 @@ export type ResolvedMemorySearchConfig = {
enabled: boolean;
sources: Array<"memory" | "sessions">;
extraPaths: string[];
provider: "openai" | "local" | "gemini" | "auto";
provider: "openai" | "local" | "gemini" | "voyage" | "auto";
remote?: {
baseUrl?: string;
apiKey?: string;
@@ -25,7 +25,7 @@ export type ResolvedMemorySearchConfig = {
experimental: {
sessionMemory: boolean;
};
fallback: "openai" | "gemini" | "local" | "none";
fallback: "openai" | "gemini" | "local" | "voyage" | "none";
model: string;
local: {
modelPath?: string;
@@ -72,6 +72,7 @@ export type ResolvedMemorySearchConfig = {
const DEFAULT_OPENAI_MODEL = "text-embedding-3-small";
const DEFAULT_GEMINI_MODEL = "gemini-embedding-001";
const DEFAULT_VOYAGE_MODEL = "voyage-4-large";
const DEFAULT_CHUNK_TOKENS = 400;
const DEFAULT_CHUNK_OVERLAP = 80;
const DEFAULT_WATCH_DEBOUNCE_MS = 1500;
@@ -136,7 +137,11 @@ function mergeConfig(
defaultRemote?.headers,
);
const includeRemote =
hasRemoteConfig || provider === "openai" || provider === "gemini" || provider === "auto";
hasRemoteConfig ||
provider === "openai" ||
provider === "gemini" ||
provider === "voyage" ||
provider === "auto";
const batch = {
enabled: overrideRemote?.batch?.enabled ?? defaultRemote?.batch?.enabled ?? true,
wait: overrideRemote?.batch?.wait ?? defaultRemote?.batch?.wait ?? true,
@@ -163,7 +168,9 @@ function mergeConfig(
? DEFAULT_GEMINI_MODEL
: provider === "openai"
? DEFAULT_OPENAI_MODEL
: undefined;
: provider === "voyage"
? DEFAULT_VOYAGE_MODEL
: undefined;
const model = overrides?.model ?? defaults?.model ?? modelDefault ?? "";
const local = {
modelPath: overrides?.local?.modelPath ?? defaults?.local?.modelPath,