mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-05 21:39:34 +00:00
feat(memory): native Voyage AI support (#7078)
* feat(memory): add native Voyage AI embedding support with batching Cherry-picked from PR #2519, resolved conflict in memory-search.ts (hasRemote -> hasRemoteConfig rename + added voyage provider) * fix(memory): optimize voyage batch memory usage with streaming and deduplicate code Cherry-picked from PR #2519. Fixed lint error: changed this.runWithConcurrency to use imported runWithConcurrency function after extraction to internal.ts
This commit is contained in:
@@ -234,7 +234,7 @@ export type MemorySearchConfig = {
|
||||
sessionMemory?: boolean;
|
||||
};
|
||||
/** Embedding provider mode. */
|
||||
provider?: "openai" | "gemini" | "local";
|
||||
provider?: "openai" | "gemini" | "local" | "voyage";
|
||||
remote?: {
|
||||
baseUrl?: string;
|
||||
apiKey?: string;
|
||||
@@ -253,7 +253,7 @@ export type MemorySearchConfig = {
|
||||
};
|
||||
};
|
||||
/** Fallback behavior when embeddings fail. */
|
||||
fallback?: "openai" | "gemini" | "local" | "none";
|
||||
fallback?: "openai" | "gemini" | "local" | "voyage" | "none";
|
||||
/** Embedding model id (remote) or alias (local). */
|
||||
model?: string;
|
||||
/** Local embedding settings (node-llama-cpp). */
|
||||
|
||||
Reference in New Issue
Block a user