fix: object generation errors; easily-switchable model

master
Avraham Sakal 2 months ago
parent 341cf5bff1
commit cb749072f2

@ -5,7 +5,7 @@ import {
} from "../../trpc/server.js";
import { db, type Fact } from "../../database/lowdb.js";
import type { DraftMessage } from "../../types.js";
import { openrouter } from "./provider.js";
import { openrouter, MODEL_NAME } from "./provider.js";
import { generateObject, generateText, jsonSchema } from "ai";
const factTriggersSystemPrompt = ({
@ -119,7 +119,7 @@ export const factTriggers = router({
},
}) => {
const factTriggers = await generateObject({
model: openrouter("mistralai/mistral-nemo"),
model: openrouter(MODEL_NAME),
messages: [
{
role: "system" as const,
@ -148,9 +148,17 @@ export const factTriggers = router({
},
},
},
required: ["factTriggers"],
}),
// maxSteps: 3,
maxRetries: 0,
// tools: undefined,
}).catch((err) => {
console.error(err);
return {
object: {
factTriggers: [] as Array<string>,
},
};
});
return factTriggers;
}

@ -5,7 +5,7 @@ import {
} from "../../trpc/server.js";
import { db, type Fact } from "../../database/lowdb.js";
import type { DraftMessage } from "../../types.js";
import { openrouter } from "./provider.js";
import { MODEL_NAME, openrouter } from "./provider.js";
import { generateObject, generateText, jsonSchema } from "ai";
const factsFromNewMessagesSystemPrompt = ({
@ -116,7 +116,7 @@ export const facts = router({
},
}) => {
const factsFromUserMessageResponse = await generateObject({
model: openrouter("mistralai/mistral-nemo"),
model: openrouter(MODEL_NAME),
messages: [
{
role: "system" as const,
@ -144,8 +144,17 @@ export const facts = router({
},
},
},
required: ["facts"],
}),
temperature: 0.4,
maxRetries: 0,
}).catch((err) => {
console.error(err);
return {
object: {
facts: [] as Array<string>,
},
};
});
return factsFromUserMessageResponse;
}

@ -4,7 +4,7 @@ import {
createCallerFactory,
} from "../../trpc/server";
import { createCaller as createConversationsCaller } from "./conversations.js";
import { openrouter } from "./provider.js";
import { MODEL_NAME, openrouter } from "./provider.js";
import { generateObject, generateText, jsonSchema } from "ai";
import type { DraftMessage } from "../../types.js";
@ -75,7 +75,7 @@ export const messages = router({
},
}) => {
const runningSummaryResponse = await generateText({
model: openrouter("mistralai/mistral-nemo"),
model: openrouter(MODEL_NAME),
messages: [
{
role: "system" as const,

@ -3,3 +3,7 @@ import { env } from "../../server/env.js";
export const openrouter = createOpenRouter({
apiKey: env.OPENROUTER_API_KEY,
});
// export const MODEL_NAME = "mistralai/mistral-nemo";
// export const MODEL_NAME = "openai/gpt-oss-20b";
export const MODEL_NAME = "openai/gpt-5-mini";

@ -22,7 +22,7 @@ import { facts, createCaller as createCallerFacts } from "./facts.js";
import { createCaller as createCallerMessages } from "./messages.js";
import { createCaller as createCallerFactTriggers } from "./fact-triggers.js";
import { factTriggers } from "./fact-triggers.js";
import { openrouter } from "./provider.js";
import { MODEL_NAME, openrouter } from "./provider.js";
const factsCaller = createCallerFacts({});
const messagesCaller = createCallerMessages({});
@ -99,7 +99,7 @@ export const chat = router({
* language which this system can execute; usually an interpretted
* language like Python or JavaScript. */
const mainResponse = await generateText({
model: openrouter("mistralai/mistral-nemo"),
model: openrouter(MODEL_NAME),
messages: [
previousRunningSummary === ""
? {

Loading…
Cancel
Save