each generation/completion uses one system prompt and one user prompt for maximum clarity for the model

master
Avraham Sakal 3 months ago
parent ee9e9424ab
commit caf36cf67b

@ -34,21 +34,78 @@ const factsFromUserMessageSystemPrompt = ({
previousRunningSummary,
}: {
previousRunningSummary: string;
}) => `Given the following summary of a conversation, coupled with the messages exchanged since that summary was produced, extract new facts that can be gleaned from the conversation.
}) => `You are an expert at extracting facts from conversations. Given the following summary of a conversation, coupled with the messages exchanged since that summary was produced (which will be provided by the user), extract new facts that can be gleaned from the messages exchanged since the summary was produced.
<running_summary>
${previousRunningSummary}
</running_summary>
`;
const factsFromUserMessageUserPrompt = ({
messagesSincePreviousRunningSummary,
}: {
messagesSincePreviousRunningSummary: Array<DraftMessage>;
}) =>
`${messagesSincePreviousRunningSummary.map(
(message) =>
`<${message.role}_message>${message.content}</${message.role}_message>`,
)}
Extract new facts from these messages.`;
const factsFromAssistantMessageSystemPrompt = ({
previousRunningSummary,
}: {
previousRunningSummary: string;
}) => `You are an insightful fact extractor. Given the following summary of a conversation, coupled with the messages exchanged since that summary was produced (which will be provided by the user), extract new facts that can be gleaned from the final assistant response.
<running_summary>
${previousRunningSummary}
</running_summary>
`;
const factsFromAssistantMessageUserPrompt = ({
messagesSincePreviousRunningSummary,
mainResponseContent,
}: {
messagesSincePreviousRunningSummary: Array<DraftMessage>;
mainResponseContent: string;
}) =>
`${messagesSincePreviousRunningSummary.map(
(message) =>
`<${message.role}_message>${message.content}</${message.role}_message>`,
)}
<assistant_response>
${mainResponseContent}
</assistant_response>
Extract facts from the assistant's response.`;
const runningSummarySystemPrompt = ({
previousRunningSummary,
}: {
previousRunningSummary: string;
}) => `Given the following summary of a conversation, coupled with the messages exchanged since that summary was produced, produce a new summary of the conversation.
}) => `You are an expert at summarizing conversations. Given the following summary of a conversation, coupled with the messages exchanged since that summary was produced (which will be provided by the user), produce a new summary of the conversation.
<running_summary>
${previousRunningSummary}
</running_summary>
`;
const runningSummaryUserPrompt = ({
messagesSincePreviousRunningSummary,
mainResponseContent,
}: {
messagesSincePreviousRunningSummary: Array<DraftMessage>;
mainResponseContent: string;
}) =>
`${messagesSincePreviousRunningSummary.map(
(message) =>
`<${message.role}_message>${message.content}</${message.role}_message>`,
)}
<assistant_response>
${mainResponseContent}
</assistant_response>
Generate a new running summary of the conversation.`;
const openrouter = createOpenRouter({
apiKey: env.OPENROUTER_API_KEY,
});
@ -145,6 +202,9 @@ export const chat = router({
? ((messages[previousRunningSummaryIndex] as CommittedMessage)
.runningSummary as string)
: "";
const messagesSincePreviousRunningSummary = messages.slice(
previousRunningSummaryIndex + 1,
);
/** Save the incoming message to the database. */
const insertedUserMessage: CommittedMessage = {
id: nanoid(),
@ -176,7 +236,7 @@ export const chat = router({
previousRunningSummary,
}),
},
...messages.slice(previousRunningSummaryIndex + 1),
...messagesSincePreviousRunningSummary,
],
maxSteps: 3,
tools: undefined,
@ -201,7 +261,12 @@ export const chat = router({
previousRunningSummary,
}),
},
...messages.slice(previousRunningSummaryIndex + 1),
{
role: "user" as const,
content: factsFromUserMessageUserPrompt({
messagesSincePreviousRunningSummary,
}),
},
],
schema: jsonSchema({
type: "object",
@ -218,7 +283,7 @@ export const chat = router({
tools: undefined,
...parameters,
});
const insertedFacts: Array<Fact> =
const insertedFactsFromUserMessage: Array<Fact> =
factsFromUserMessageResponse.object.facts.map((fact) => ({
id: nanoid(),
userId: "1",
@ -226,67 +291,82 @@ export const chat = router({
content: fact,
createdAt: new Date().toISOString(),
}));
db.data.facts.push(...insertedFacts);
db.data.facts.push(...insertedFactsFromUserMessage);
/** Extract Facts from the model's response, and add them to the database,
* linking the Facts with the messages they came from. */
/** For each Fact produced in the two fact-extraction steps, generate
* FactTriggers and add them to the database, linking the FactTriggers
* with the Facts they came from. A FactTrigger is a natural language
* phrase that describes a situation in which it would be useful to invoke
* the Fact. (e.g., "When food preferences are discussed"). */
/** Produce a running summary of the conversation, and save that along
* with the model's response to the database. The new running summary is
* based on the previous running summary combined with the all messages
* since that summary was produced. */
const runningSummaryResponse = previousRunningSummary
? await generateText({
const factsFromAssistantMessageResponse = await generateObject<{
facts: Array<string>;
}>({
model: openrouter("mistralai/mistral-nemo"),
messages: [
{
role: "system" as const,
content: runningSummarySystemPrompt({
content: factsFromAssistantMessageSystemPrompt({
previousRunningSummary,
}),
},
...messages.slice(previousRunningSummaryIndex + 1),
{
role: "assistant" as const,
content: mainResponse.text,
} as UIMessage,
/** I might need this next message, because models are trained to
* respond when the final message in `messages` is from the `user`,
* but in our case it's an `assistant` message, so I'm artificially
* adding a `user` message to the end of the conversation. */
/** Yes, the next message is a `user` message, because models are
* trained to respond to `user` messages. So we wrap the assistant
* response in XML tags to show that it's not the user speaking,
* rather it's input for the model to process. The user is only
* saying "Extract facts..." */
{
role: "user" as const,
content: "What is the new summary of the conversation?",
} as UIMessage,
content: factsFromAssistantMessageUserPrompt({
messagesSincePreviousRunningSummary,
mainResponseContent: mainResponse.text,
}),
},
],
schema: jsonSchema({
type: "object",
properties: {
facts: {
type: "array",
items: {
type: "string",
},
},
},
}),
maxSteps: 3,
tools: undefined,
...parameters,
})
: await generateText({
});
const insertedFactsFromAssistantMessage: Array<Fact> =
factsFromAssistantMessageResponse.object.facts.map((factContent) => ({
id: nanoid(),
userId: "1",
sourceMessageId: insertedAssistantMessage.id,
content: factContent,
createdAt: new Date().toISOString(),
}));
db.data.facts.push(...insertedFactsFromAssistantMessage);
/** For each Fact produced in the two fact-extraction steps, generate
* FactTriggers and add them to the database, linking the FactTriggers
* with the Facts they came from. A FactTrigger is a natural language
* phrase that describes a situation in which it would be useful to invoke
* the Fact. (e.g., "When food preferences are discussed"). */
/** Produce a running summary of the conversation, and save that along
* with the model's response to the database. The new running summary is
* based on the previous running summary combined with the all messages
* since that summary was produced. */
const runningSummaryResponse = await generateText({
model: openrouter("mistralai/mistral-nemo"),
messages: [
{
role: "system" as const,
content:
"Given the following messages of a conversation, produce a summary of the conversation.",
content: runningSummarySystemPrompt({
previousRunningSummary,
}),
},
...messages,
{
role: "assistant" as const,
content: mainResponse.text,
} as UIMessage,
/** I might need this next message, because models are trained to
* respond when the final message in `messages` is from the `user`,
* but in our case it's an `assistant` message, so I'm artificially
* adding a `user` message to the end of the conversation. */
{
role: "user" as const,
content: "What is the new summary of the conversation?",
} as UIMessage,
content: runningSummaryUserPrompt({
messagesSincePreviousRunningSummary,
mainResponseContent: mainResponse.text,
}),
},
],
maxSteps: 3,
tools: undefined,
@ -308,7 +388,10 @@ export const chat = router({
return {
insertedAssistantMessage,
insertedUserMessage,
insertedFacts,
insertedFacts: [
...insertedFactsFromUserMessage,
...insertedFactsFromAssistantMessage,
],
};
},
),

Loading…
Cancel
Save