refactor(test): share google assistant message builders

This commit is contained in:
Peter Steinberger
2026-02-15 15:50:24 +00:00
parent c6c6e9f741
commit 0c57f5e62e
3 changed files with 93 additions and 215 deletions

View File

@@ -1,7 +1,13 @@
import type { Context } from "@mariozechner/pi-ai/dist/types.js";
import { convertMessages } from "@mariozechner/pi-ai/dist/providers/google-shared.js";
import { describe, expect, it } from "vitest";
import { asRecord, makeGeminiCliModel, makeModel } from "./google-shared.test-helpers.js";
import {
asRecord,
makeGeminiCliAssistantMessage,
makeGeminiCliModel,
makeGoogleAssistantMessage,
makeModel,
} from "./google-shared.test-helpers.js";
describe("google-shared convertTools", () => {
it("ensures function call comes after user turn, not after model turn", () => {
@@ -12,59 +18,15 @@ describe("google-shared convertTools", () => {
role: "user",
content: "Hello",
},
{
role: "assistant",
content: [{ type: "text", text: "Hi!" }],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
makeGoogleAssistantMessage(model.id, [{ type: "text", text: "Hi!" }]),
makeGoogleAssistantMessage(model.id, [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: {},
},
stopReason: "stop",
timestamp: 0,
},
{
role: "assistant",
content: [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: {},
},
],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
stopReason: "stop",
timestamp: 0,
},
]),
],
} as unknown as Context;
@@ -88,37 +50,15 @@ describe("google-shared convertTools", () => {
role: "user",
content: "Use a tool",
},
{
role: "assistant",
content: [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: { arg: "value" },
thoughtSignature: "dGVzdA==",
},
],
api: "google-gemini-cli",
provider: "google-gemini-cli",
model: "gemini-3-flash",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
makeGeminiCliAssistantMessage(model.id, [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: { arg: "value" },
thoughtSignature: "dGVzdA==",
},
stopReason: "stop",
timestamp: 0,
},
]),
{
role: "toolResult",
toolCallId: "call_1",

View File

@@ -1,7 +1,12 @@
import type { Context, Tool } from "@mariozechner/pi-ai/dist/types.js";
import { convertMessages, convertTools } from "@mariozechner/pi-ai/dist/providers/google-shared.js";
import { describe, expect, it } from "vitest";
import { asRecord, getFirstToolParameters, makeModel } from "./google-shared.test-helpers.js";
import {
asRecord,
getFirstToolParameters,
makeGoogleAssistantMessage,
makeModel,
} from "./google-shared.test-helpers.js";
describe("google-shared convertTools", () => {
it("preserves parameters when type is missing", () => {
@@ -122,35 +127,13 @@ describe("google-shared convertMessages", () => {
const model = makeModel("gemini-1.5-pro");
const context = {
messages: [
{
role: "assistant",
content: [
{
type: "thinking",
thinking: "hidden",
thinkingSignature: "c2ln",
},
],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
makeGoogleAssistantMessage(model.id, [
{
type: "thinking",
thinking: "hidden",
thinkingSignature: "c2ln",
},
stopReason: "stop",
timestamp: 0,
},
]),
],
} as unknown as Context;
@@ -167,35 +150,13 @@ describe("google-shared convertMessages", () => {
const model = makeModel("claude-3-opus");
const context = {
messages: [
{
role: "assistant",
content: [
{
type: "thinking",
thinking: "structured",
thinkingSignature: "c2ln",
},
],
api: "google-generative-ai",
provider: "google",
model: "claude-3-opus",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
makeGoogleAssistantMessage(model.id, [
{
type: "thinking",
thinking: "structured",
thinkingSignature: "c2ln",
},
stopReason: "stop",
timestamp: 0,
},
]),
],
} as unknown as Context;
@@ -262,52 +223,8 @@ describe("google-shared convertMessages", () => {
role: "user",
content: "Hello",
},
{
role: "assistant",
content: [{ type: "text", text: "Hi there!" }],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
stopReason: "stop",
timestamp: 0,
},
{
role: "assistant",
content: [{ type: "text", text: "How can I help?" }],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
stopReason: "stop",
timestamp: 0,
},
makeGoogleAssistantMessage(model.id, [{ type: "text", text: "Hi there!" }]),
makeGoogleAssistantMessage(model.id, [{ type: "text", text: "How can I help?" }]),
],
} as unknown as Context;
@@ -328,36 +245,14 @@ describe("google-shared convertMessages", () => {
role: "user",
content: "Use a tool",
},
{
role: "assistant",
content: [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: { arg: "value" },
},
],
api: "google-generative-ai",
provider: "google",
model: "gemini-1.5-pro",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
makeGoogleAssistantMessage(model.id, [
{
type: "toolCall",
id: "call_1",
name: "myTool",
arguments: { arg: "value" },
},
stopReason: "stop",
timestamp: 0,
},
]),
{
role: "toolResult",
toolCallId: "call_1",

View File

@@ -47,3 +47,46 @@ export const makeGeminiCliModel = (id: string): Model<"google-gemini-cli"> =>
contextWindow: 1,
maxTokens: 1,
}) as Model<"google-gemini-cli">;
function makeZeroUsage() {
return {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
};
}
export function makeGoogleAssistantMessage(model: string, content: unknown) {
return {
role: "assistant",
content,
api: "google-generative-ai",
provider: "google",
model,
usage: makeZeroUsage(),
stopReason: "stop",
timestamp: 0,
};
}
export function makeGeminiCliAssistantMessage(model: string, content: unknown) {
return {
role: "assistant",
content,
api: "google-gemini-cli",
provider: "google-gemini-cli",
model,
usage: makeZeroUsage(),
stopReason: "stop",
timestamp: 0,
};
}