mirror of
https://github.com/Youzini-afk/ST-Bionic-Memory-Ecology.git
synced 2026-05-15 22:30:38 +08:00
recall: sectioned recentMessages with context/target split for LLM prompt
- prompt-builder.js: add RECALL_TARGET_CONTENT_HEADER, update splitSectionedTranscriptPayloadMessage to recognize recall-specific target header - retriever.js: add buildRecallSectionedTranscript helper, format recentMessages as sectioned transcript with context-review and recall-target headers for prompt building while keeping flat string[] for ranking - p0-regressions.mjs: add testRecallUsesSectionedPromptMessagesForContextAndTarget regression asserting two system messages with correct transcriptSection and headers
This commit is contained in:
@@ -16,6 +16,8 @@ import {
|
||||
buildTaskExecutionDebugContext,
|
||||
buildTaskLlmPayload,
|
||||
buildTaskPrompt,
|
||||
EXTRACTION_CONTEXT_REVIEW_HEADER,
|
||||
RECALL_TARGET_CONTENT_HEADER,
|
||||
} from "../prompting/prompt-builder.js";
|
||||
import {
|
||||
applyCooccurrenceBoost,
|
||||
@@ -93,6 +95,32 @@ function resolveTaskLlmSystemPrompt(promptPayload, fallbackSystemPrompt = "") {
|
||||
return String(promptPayload?.systemPrompt || fallbackSystemPrompt || "");
|
||||
}
|
||||
|
||||
function buildRecallSectionedTranscript(recentMessages = []) {
|
||||
const lines = (Array.isArray(recentMessages) ? recentMessages : [])
|
||||
.map((line) => String(line || "").trim())
|
||||
.filter(Boolean);
|
||||
if (lines.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const targetLines = [lines[lines.length - 1]].filter(Boolean);
|
||||
const contextLines = lines.slice(0, -1).filter(Boolean);
|
||||
const sections = [];
|
||||
|
||||
if (contextLines.length > 0) {
|
||||
sections.push(
|
||||
`${EXTRACTION_CONTEXT_REVIEW_HEADER}\n\n${contextLines.join("\n---\n")}`,
|
||||
);
|
||||
}
|
||||
if (targetLines.length > 0) {
|
||||
sections.push(
|
||||
`${RECALL_TARGET_CONTENT_HEADER}\n\n${targetLines.join("\n---\n")}`,
|
||||
);
|
||||
}
|
||||
|
||||
return sections.join("\n\n");
|
||||
}
|
||||
|
||||
function buildRecallFallbackReason(llmResult) {
|
||||
const failureType = String(llmResult?.errorType || "").trim();
|
||||
const failureReason = String(llmResult?.failureReason || "").trim();
|
||||
@@ -2153,6 +2181,8 @@ async function llmRecall(
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
const contextStr = recentMessages.join("\n---\n");
|
||||
const sectionedContextStr =
|
||||
buildRecallSectionedTranscript(recentMessages) || contextStr;
|
||||
const sceneOwnerCandidateText = buildSceneOwnerCandidateText(sceneOwnerCandidates);
|
||||
const {
|
||||
candidateKeyToNodeId,
|
||||
@@ -2177,7 +2207,7 @@ async function llmRecall(
|
||||
|
||||
const recallPromptBuild = await buildTaskPrompt(settings, "recall", {
|
||||
taskName: "recall",
|
||||
recentMessages: contextStr || "(无)",
|
||||
recentMessages: sectionedContextStr || "(无)",
|
||||
userMessage,
|
||||
candidateNodes: candidateDescriptions,
|
||||
candidateText: candidateDescriptions,
|
||||
@@ -2212,7 +2242,7 @@ async function llmRecall(
|
||||
activeStoryTimeLabel || "(未确定)",
|
||||
"",
|
||||
"## 最近对话上下文",
|
||||
contextStr || "(无)",
|
||||
sectionedContextStr || contextStr || "(无)",
|
||||
"",
|
||||
"## 用户最新输入",
|
||||
userMessage,
|
||||
|
||||
Reference in New Issue
Block a user