refactor: unify queueing and normalize telegram slack flows

This commit is contained in:
Peter Steinberger
2026-03-02 20:55:10 +00:00
parent 320920d523
commit 3a08e69a05
21 changed files with 627 additions and 266 deletions

View File

@@ -1,11 +1,13 @@
import { describe, expect, it } from "vitest";
import {
buildModelSelectionCallbackData,
buildModelsKeyboard,
buildProviderKeyboard,
buildBrowseProvidersButton,
buildProviderKeyboard,
calculateTotalPages,
getModelsPageSize,
parseModelCallbackData,
resolveModelSelection,
type ProviderInfo,
} from "./model-buttons.js";
@@ -52,6 +54,79 @@ describe("parseModelCallbackData", () => {
});
});
describe("resolveModelSelection", () => {
it("returns explicit provider selections unchanged", () => {
const result = resolveModelSelection({
callback: { type: "select", provider: "openai", model: "gpt-4.1" },
providers: ["openai", "anthropic"],
byProvider: new Map([
["openai", new Set(["gpt-4.1"])],
["anthropic", new Set(["claude-sonnet-4-5"])],
]),
});
expect(result).toEqual({ kind: "resolved", provider: "openai", model: "gpt-4.1" });
});
it("resolves compact callbacks when exactly one provider matches", () => {
const result = resolveModelSelection({
callback: { type: "select", model: "shared" },
providers: ["openai", "anthropic"],
byProvider: new Map([
["openai", new Set(["shared"])],
["anthropic", new Set(["other"])],
]),
});
expect(result).toEqual({ kind: "resolved", provider: "openai", model: "shared" });
});
it("returns ambiguous result when zero or multiple providers match", () => {
const sharedByBoth = resolveModelSelection({
callback: { type: "select", model: "shared" },
providers: ["openai", "anthropic"],
byProvider: new Map([
["openai", new Set(["shared"])],
["anthropic", new Set(["shared"])],
]),
});
expect(sharedByBoth).toEqual({
kind: "ambiguous",
model: "shared",
matchingProviders: ["openai", "anthropic"],
});
const missingEverywhere = resolveModelSelection({
callback: { type: "select", model: "missing" },
providers: ["openai", "anthropic"],
byProvider: new Map([
["openai", new Set(["gpt-4.1"])],
["anthropic", new Set(["claude-sonnet-4-5"])],
]),
});
expect(missingEverywhere).toEqual({
kind: "ambiguous",
model: "missing",
matchingProviders: [],
});
});
});
describe("buildModelSelectionCallbackData", () => {
it("uses standard callback when under limit and compact callback when needed", () => {
expect(buildModelSelectionCallbackData({ provider: "openai", model: "gpt-4.1" })).toBe(
"mdl_sel_openai/gpt-4.1",
);
const longModel = "us.anthropic.claude-3-5-sonnet-20240620-v1:0";
expect(buildModelSelectionCallbackData({ provider: "amazon-bedrock", model: longModel })).toBe(
`mdl_sel/${longModel}`,
);
});
it("returns null when even compact callback exceeds Telegram limit", () => {
const tooLongModel = "x".repeat(80);
expect(buildModelSelectionCallbackData({ provider: "openai", model: tooLongModel })).toBeNull();
});
});
describe("buildProviderKeyboard", () => {
it("lays out providers in two-column rows", () => {
const cases = [