feat: 完整 Prompt 配置页 - 全部 6 个 prompt 可自定义

- panel.html: 6 个折叠式 textarea(提取/召回/进化/压缩/概要/反思)
- style.css: details/summary 折叠组样式
- panel.js: 全部 6 个 prompt 双向数据绑定
- index.js: 所有调用点传入 customPrompt
- extractor.js / retriever.js / evolution.js / compressor.js: 接受并使用自定义 prompt
- 留空时走内置默认 prompt,完全向后兼容
This commit is contained in:
Youzini-afk
2026-03-24 17:21:40 +08:00
parent 963e4f3b7b
commit 58304e7253
8 changed files with 106 additions and 18 deletions

View File

@@ -16,7 +16,7 @@ import { isDirectVectorConfig } from './vector-index.js';
* @param {boolean} [params.force=false] - 忽略阈值强制压缩
* @returns {Promise<{created: number, archived: number}>}
*/
export async function compressType({ graph, typeDef, embeddingConfig, force = false }) {
export async function compressType({ graph, typeDef, embeddingConfig, force = false, customPrompt }) {
const compression = typeDef.compression;
if (!compression || compression.mode !== 'hierarchical') {
return { created: 0, archived: 0 };
@@ -33,6 +33,7 @@ export async function compressType({ graph, typeDef, embeddingConfig, force = fa
level,
embeddingConfig,
force,
customPrompt,
});
totalCreated += result.created;
@@ -48,7 +49,7 @@ export async function compressType({ graph, typeDef, embeddingConfig, force = fa
/**
* 压缩特定层级的节点
*/
async function compressLevel({ graph, typeDef, level, embeddingConfig, force }) {
async function compressLevel({ graph, typeDef, level, embeddingConfig, force, customPrompt }) {
const compression = typeDef.compression;
// 获取该层级的活跃叶子节点
@@ -79,7 +80,7 @@ async function compressLevel({ graph, typeDef, level, embeddingConfig, force })
if (batch.length < 2) break; // 至少 2 个才压缩
// 调用 LLM 总结
const summaryResult = await summarizeBatch(batch, typeDef);
const summaryResult = await summarizeBatch(batch, typeDef, customPrompt);
if (!summaryResult) continue;
// 创建压缩节点
@@ -152,7 +153,7 @@ function migrateBatchEdges(graph, batch, compressedNode) {
/**
* 调用 LLM 总结一批节点
*/
async function summarizeBatch(nodes, typeDef) {
async function summarizeBatch(nodes, typeDef, customPrompt) {
const nodeDescriptions = nodes.map((n, i) => {
const fieldsStr = Object.entries(n.fields)
.filter(([_, v]) => v)
@@ -163,7 +164,7 @@ async function summarizeBatch(nodes, typeDef) {
const instruction = typeDef.compression.instruction || '将以下节点压缩总结为一条精炼记录。';
const systemPrompt = [
const systemPrompt = customPrompt || [
'你是一个记忆压缩器。将多个同类型节点总结为一条更高层级的压缩节点。',
instruction,
'',
@@ -190,13 +191,13 @@ async function summarizeBatch(nodes, typeDef) {
* @param {boolean} [force=false]
* @returns {Promise<{created: number, archived: number}>}
*/
export async function compressAll(graph, schema, embeddingConfig, force = false) {
export async function compressAll(graph, schema, embeddingConfig, force = false, customPrompt) {
let totalCreated = 0;
let totalArchived = 0;
for (const typeDef of schema) {
if (typeDef.compression?.mode === 'hierarchical') {
const result = await compressType({ graph, typeDef, embeddingConfig, force });
const result = await compressType({ graph, typeDef, embeddingConfig, force, customPrompt });
totalCreated += result.created;
totalArchived += result.archived;
}

View File

@@ -56,6 +56,7 @@ export async function evolveMemories({
newNodeIds,
embeddingConfig,
options = {},
customPrompt,
}) {
const neighborCount = options.neighborCount ?? 5;
const stats = { evolved: 0, connections: 0, updates: 0 };
@@ -114,7 +115,7 @@ export async function evolveMemories({
try {
const decision = await callLLMForJSON({
systemPrompt: EVOLUTION_SYSTEM_PROMPT,
systemPrompt: customPrompt || EVOLUTION_SYSTEM_PROMPT,
userPrompt,
maxRetries: 1,
});

View File

@@ -617,7 +617,7 @@ async function mem0ConflictCheck(
* @param {number} params.currentSeq
* @returns {Promise<void>}
*/
export async function generateSynopsis({ graph, schema, currentSeq }) {
export async function generateSynopsis({ graph, schema, currentSeq, customPrompt }) {
const eventNodes = getActiveNodes(graph, "event").sort(
(a, b) => a.seq - b.seq,
);
@@ -639,7 +639,7 @@ export async function generateSynopsis({ graph, schema, currentSeq }) {
.join("; ");
const result = await callLLMForJSON({
systemPrompt: [
systemPrompt: customPrompt || [
"你是故事概要生成器。根据事件线、角色和主线生成简洁的前情提要。",
'输出 JSON{"summary": "前情提要文本200字以内"}',
"要求:涵盖核心冲突、关键转折、主要角色当前状态。",
@@ -686,7 +686,7 @@ export async function generateSynopsis({ graph, schema, currentSeq }) {
}
}
export async function generateReflection({ graph, currentSeq }) {
export async function generateReflection({ graph, currentSeq, customPrompt }) {
const recentEvents = getActiveNodes(graph, "event")
.sort((a, b) => b.seq - a.seq)
.slice(0, 6)
@@ -726,7 +726,7 @@ export async function generateReflection({ graph, currentSeq }) {
.join("\n");
const result = await callLLMForJSON({
systemPrompt: [
systemPrompt: customPrompt || [
"你是 RP 长期记忆系统的反思生成器。",
'输出严格 JSON{"insight":"...","trigger":"...","suggestion":"...","importance":1-10}',
"insight 应总结最近情节中最值得长期保留的变化、关系趋势或潜在线索。",

View File

@@ -722,6 +722,7 @@ async function handleExtractionSuccess(result, endIdx, settings) {
newNodeIds: result.newNodeIds,
embeddingConfig: getEmbeddingConfig(),
options: { neighborCount: settings.evoNeighborCount },
customPrompt: settings.evolutionPrompt || undefined,
});
postProcessArtifacts.push("evolution");
} catch (e) {
@@ -735,6 +736,7 @@ async function handleExtractionSuccess(result, endIdx, settings) {
graph: currentGraph,
schema: getSchema(),
currentSeq: endIdx,
customPrompt: settings.synopsisPrompt || undefined,
});
postProcessArtifacts.push("synopsis");
} catch (e) {
@@ -750,6 +752,7 @@ async function handleExtractionSuccess(result, endIdx, settings) {
await generateReflection({
graph: currentGraph,
currentSeq: endIdx,
customPrompt: settings.reflectionPrompt || undefined,
});
postProcessArtifacts.push("reflection");
} catch (e) {
@@ -770,6 +773,8 @@ async function handleExtractionSuccess(result, endIdx, settings) {
currentGraph,
getSchema(),
getEmbeddingConfig(),
false,
settings.compressPrompt || undefined,
);
if (compressionResult.created > 0 || compressionResult.archived > 0) {
postProcessArtifacts.push("compression");
@@ -1189,6 +1194,7 @@ async function runRecall() {
topK: settings.recallTopK,
maxRecallNodes: settings.recallMaxNodes,
enableLLMRecall: settings.recallEnableLLM,
recallPrompt: settings.recallPrompt || undefined,
weights: {
graphWeight: settings.graphWeight,
vectorWeight: settings.vectorWeight,

View File

@@ -319,12 +319,38 @@
<div class="bme-config-card">
<div class="bme-section-header">系统提示词</div>
<div class="bme-config-help">
空时使用内置默认提取系统提示词。这里只覆盖“记忆提取”提示词,召回/概要/反思仍走内置模板
</div>
<div class="bme-config-row">
<label for="bme-setting-extract-prompt">记忆提取系统提示词</label>
<textarea id="bme-setting-extract-prompt" class="bme-config-textarea" placeholder="留空则使用默认提取系统提示词"></textarea>
空时使用内置默认提示词。自定义后将完全替换对应环节的 system prompt
</div>
<details class="bme-prompt-group">
<summary>📝 记忆提取</summary>
<textarea id="bme-setting-extract-prompt" class="bme-config-textarea" placeholder="留空 = 默认提取 prompt"></textarea>
</details>
<details class="bme-prompt-group">
<summary>🔍 智能召回</summary>
<textarea id="bme-setting-recall-prompt" class="bme-config-textarea" placeholder="留空 = 默认召回 prompt"></textarea>
</details>
<details class="bme-prompt-group">
<summary>🧬 记忆进化</summary>
<textarea id="bme-setting-evolution-prompt" class="bme-config-textarea" placeholder="留空 = 默认进化 prompt"></textarea>
</details>
<details class="bme-prompt-group">
<summary>📦 记忆压缩</summary>
<textarea id="bme-setting-compress-prompt" class="bme-config-textarea" placeholder="留空 = 默认压缩 prompt"></textarea>
</details>
<details class="bme-prompt-group">
<summary>📜 全局概要</summary>
<textarea id="bme-setting-synopsis-prompt" class="bme-config-textarea" placeholder="留空 = 默认概要 prompt"></textarea>
</details>
<details class="bme-prompt-group">
<summary>💭 反思生成</summary>
<textarea id="bme-setting-reflection-prompt" class="bme-config-textarea" placeholder="留空 = 默认反思 prompt"></textarea>
</details>
</div>
<div class="bme-config-card">

View File

@@ -531,6 +531,11 @@ function _refreshConfigTab() {
);
_setInputValue("bme-setting-extract-prompt", settings.extractPrompt || "");
_setInputValue("bme-setting-recall-prompt", settings.recallPrompt || "");
_setInputValue("bme-setting-evolution-prompt", settings.evolutionPrompt || "");
_setInputValue("bme-setting-compress-prompt", settings.compressPrompt || "");
_setInputValue("bme-setting-synopsis-prompt", settings.synopsisPrompt || "");
_setInputValue("bme-setting-reflection-prompt", settings.reflectionPrompt || "");
_setInputValue("bme-setting-panel-theme", settings.panelTheme || "crimson");
}
@@ -603,6 +608,21 @@ function _bindConfigControls() {
bindText("bme-setting-extract-prompt", (value) =>
_updateSettings?.({ extractPrompt: value }),
);
bindText("bme-setting-recall-prompt", (value) =>
_updateSettings?.({ recallPrompt: value }),
);
bindText("bme-setting-evolution-prompt", (value) =>
_updateSettings?.({ evolutionPrompt: value }),
);
bindText("bme-setting-compress-prompt", (value) =>
_updateSettings?.({ compressPrompt: value }),
);
bindText("bme-setting-synopsis-prompt", (value) =>
_updateSettings?.({ synopsisPrompt: value }),
);
bindText("bme-setting-reflection-prompt", (value) =>
_updateSettings?.({ reflectionPrompt: value }),
);
bindText("bme-setting-panel-theme", (value) =>
_updateSettings?.({ panelTheme: value }),
);

View File

@@ -218,6 +218,7 @@ export async function retrieve({
graph,
schema,
normalizedMaxRecallNodes,
options.recallPrompt,
);
} else {
selectedNodeIds = scoredNodes
@@ -331,6 +332,7 @@ async function llmRecall(
graph,
schema,
maxNodes,
customPrompt,
) {
const contextStr = recentMessages.join("\n---\n");
const candidateDescriptions = candidates
@@ -345,7 +347,7 @@ async function llmRecall(
})
.join("\n");
const systemPrompt = [
const systemPrompt = customPrompt || [
"你是一个记忆召回分析器。",
"根据用户最新输入和对话上下文,从候选记忆节点中选择最相关的节点。",
"优先选择:(1) 直接相关的当前场景节点, (2) 因果关系连续性节点, (3) 有潜在影响的背景节点。",

View File

@@ -733,6 +733,38 @@
font-family: 'Cascadia Code', 'Fira Code', monospace;
}
.bme-prompt-group {
margin-top: 8px;
border: 1px solid var(--bme-border);
border-radius: 6px;
overflow: hidden;
}
.bme-prompt-group summary {
padding: 8px 12px;
cursor: pointer;
font-size: 12px;
font-weight: 500;
color: var(--bme-on-surface);
background: var(--bme-surface-lowest);
user-select: none;
transition: background 0.15s;
}
.bme-prompt-group summary:hover {
background: var(--bme-surface-low);
}
.bme-prompt-group[open] summary {
border-bottom: 1px solid var(--bme-border);
}
.bme-prompt-group .bme-config-textarea {
border: none;
border-radius: 0;
min-height: 120px;
}
.bme-config-actions {
display: flex;
justify-content: flex-end;