From c31af1d1a4aca7792fe8ee62559df4f36f07a2df Mon Sep 17 00:00:00 2001
From: Youzini-afk <13153778771cx@gmail.com>
Date: Fri, 27 Mar 2026 01:26:56 +0800
Subject: [PATCH] Harden runtime debug and task pipeline
---
compressor.js | 15 +-
consolidator.js | 32 ++-
embedding.js | 17 +-
extractor.js | 26 +-
index.js | 228 +++++++++++++++---
llm.js | 221 ++++++++++++++++-
panel.js | 290 +++++++++++++++--------
prompt-builder.js | 85 +++++++
request-timeout.js | 9 +
retriever.js | 15 +-
task-ejs.js | 41 +++-
task-regex.js | 20 +-
task-worldinfo.js | 357 +++++++++++++++++++++++-----
tests/p0-regressions.mjs | 497 ++++++++++++++++++++++++++++++++++++++-
tests/task-regex.mjs | 102 ++++++++
tests/task-worldinfo.mjs | 20 +-
vector-index.js | 13 +-
17 files changed, 1750 insertions(+), 238 deletions(-)
create mode 100644 request-timeout.js
diff --git a/compressor.js b/compressor.js
index dd97282..0029dcc 100644
--- a/compressor.js
+++ b/compressor.js
@@ -11,7 +11,7 @@ import {
getNode,
} from "./graph.js";
import { callLLMForJSON } from "./llm.js";
-import { buildTaskPrompt } from "./prompt-builder.js";
+import { buildTaskExecutionDebugContext, buildTaskPrompt } from "./prompt-builder.js";
import { getSTContextForPrompt } from "./st-context.js";
import { applyTaskRegex } from "./task-regex.js";
import { isDirectVectorConfig } from "./vector-index.js";
@@ -22,6 +22,12 @@ function createAbortError(message = "操作已终止") {
return error;
}
+function createTaskLlmDebugContext(promptBuild, regexInput) {
+ return typeof buildTaskExecutionDebugContext === "function"
+ ? buildTaskExecutionDebugContext(promptBuild, { regexInput })
+ : null;
+}
+
function throwIfAborted(signal) {
if (signal?.aborted) {
throw signal.reason instanceof Error ? signal.reason : createAbortError();
@@ -249,6 +255,7 @@ async function summarizeBatch(
graphStats: `node_count=${nodes.length}, node_type=${typeDef.id}`,
...getSTContextForPrompt(),
});
+ const compressRegexInput = { entries: [] };
const systemPrompt = applyTaskRegex(
settings,
"compress",
@@ -267,6 +274,8 @@ async function summarizeBatch(
"- 去除重复和低信息密度内容",
"- 压缩后文本应精炼,目标 150 字左右",
].join("\n"),
+ compressRegexInput,
+ "system",
);
const userPrompt = `请压缩以下 ${nodes.length} 个 "${typeDef.label}" 节点:\n\n${nodeDescriptions}`;
@@ -277,6 +286,10 @@ async function summarizeBatch(
maxRetries: 1,
signal,
taskType: "compress",
+ debugContext: createTaskLlmDebugContext(
+ compressPromptBuild,
+ compressRegexInput,
+ ),
additionalMessages:
compressPromptBuild.privateTaskMessages || [
...(compressPromptBuild.customMessages || []),
diff --git a/consolidator.js b/consolidator.js
index f98009e..dc314e9 100644
--- a/consolidator.js
+++ b/consolidator.js
@@ -5,7 +5,7 @@
import { embedBatch, searchSimilar } from "./embedding.js";
import { addEdge, createEdge, getActiveNodes, getNode } from "./graph.js";
import { callLLMForJSON } from "./llm.js";
-import { buildTaskPrompt } from "./prompt-builder.js";
+import { buildTaskExecutionDebugContext, buildTaskPrompt } from "./prompt-builder.js";
import { getSTContextForPrompt } from "./st-context.js";
import { applyTaskRegex } from "./task-regex.js";
import {
@@ -21,6 +21,12 @@ function createAbortError(message = "操作已终止") {
return error;
}
+function createTaskLlmDebugContext(promptBuild, regexInput) {
+ return typeof buildTaskExecutionDebugContext === "function"
+ ? buildTaskExecutionDebugContext(promptBuild, { regexInput })
+ : null;
+}
+
function isAbortError(error) {
return error?.name === "AbortError";
}
@@ -301,20 +307,28 @@ export async function consolidateMemories({
graphStats: `new_entries=${newEntries.length}, threshold=${conflictThreshold}`,
...getSTContextForPrompt(),
});
+ const consolidationRegexInput = { entries: [] };
+ const consolidationSystemPrompt = applyTaskRegex(
+ settings,
+ "consolidation",
+ "finalPrompt",
+ consolidationPromptBuild.systemPrompt ||
+ customPrompt ||
+ CONSOLIDATION_SYSTEM_PROMPT,
+ consolidationRegexInput,
+ "system",
+ );
try {
decision = await callLLMForJSON({
- systemPrompt: applyTaskRegex(
- settings,
- "consolidation",
- "finalPrompt",
- consolidationPromptBuild.systemPrompt ||
- customPrompt ||
- CONSOLIDATION_SYSTEM_PROMPT,
- ),
+ systemPrompt: consolidationSystemPrompt,
userPrompt,
maxRetries: 1,
signal,
taskType: "consolidation",
+ debugContext: createTaskLlmDebugContext(
+ consolidationPromptBuild,
+ consolidationRegexInput,
+ ),
additionalMessages:
consolidationPromptBuild.privateTaskMessages || [
...(consolidationPromptBuild.customMessages || []),
diff --git a/embedding.js b/embedding.js
index 38ebac1..1a41f59 100644
--- a/embedding.js
+++ b/embedding.js
@@ -7,6 +7,7 @@
*/
import { extension_settings } from "../../../extensions.js";
+import { resolveConfiguredTimeoutMs } from "./request-timeout.js";
const MODULE_NAME = "st_bme";
const EMBEDDING_REQUEST_TIMEOUT_MS = 300000;
@@ -19,10 +20,14 @@ function getEmbeddingTestOverride(name) {
function getConfiguredTimeoutMs(
settings = extension_settings[MODULE_NAME] || {},
) {
- const timeoutMs = Number(settings?.timeoutMs);
- return Number.isFinite(timeoutMs) && timeoutMs > 0
- ? timeoutMs
- : EMBEDDING_REQUEST_TIMEOUT_MS;
+ return typeof resolveConfiguredTimeoutMs === "function"
+ ? resolveConfiguredTimeoutMs(settings, EMBEDDING_REQUEST_TIMEOUT_MS)
+ : (() => {
+ const timeoutMs = Number(settings?.timeoutMs);
+ return Number.isFinite(timeoutMs) && timeoutMs > 0
+ ? timeoutMs
+ : EMBEDDING_REQUEST_TIMEOUT_MS;
+ })();
}
function isAbortError(error) {
@@ -131,7 +136,7 @@ export async function embedText(text, config, { signal } = {}) {
input: text,
}),
},
- getConfiguredTimeoutMs(),
+ getConfiguredTimeoutMs(config),
);
if (!response.ok) {
@@ -196,7 +201,7 @@ export async function embedBatch(texts, config, { signal } = {}) {
input: texts,
}),
},
- getConfiguredTimeoutMs(),
+ getConfiguredTimeoutMs(config),
);
if (!response.ok) {
diff --git a/extractor.js b/extractor.js
index 9b9350f..22495fc 100644
--- a/extractor.js
+++ b/extractor.js
@@ -16,7 +16,7 @@ import {
} from "./graph.js";
import { callLLMForJSON } from "./llm.js";
import { ensureEventTitle, getNodeDisplayName } from "./node-labels.js";
-import { buildTaskPrompt } from "./prompt-builder.js";
+import { buildTaskExecutionDebugContext, buildTaskPrompt } from "./prompt-builder.js";
import { RELATION_TYPES } from "./schema.js";
import { applyTaskRegex } from "./task-regex.js";
import { getSTContextForPrompt } from "./st-context.js";
@@ -28,6 +28,12 @@ function createAbortError(message = "操作已终止") {
return error;
}
+function createTaskLlmDebugContext(promptBuild, regexInput) {
+ return typeof buildTaskExecutionDebugContext === "function"
+ ? buildTaskExecutionDebugContext(promptBuild, { regexInput })
+ : null;
+}
+
function isAbortError(error) {
return error?.name === "AbortError";
}
@@ -122,6 +128,7 @@ export async function extractMemories({
});
// 系统提示词
+ const extractRegexInput = { entries: [] };
const systemPrompt = applyTaskRegex(
settings,
"extract",
@@ -129,6 +136,8 @@ export async function extractMemories({
promptBuild.systemPrompt ||
extractPrompt ||
buildDefaultExtractPrompt(schema),
+ extractRegexInput,
+ "system",
);
// 用户提示词
@@ -152,6 +161,7 @@ export async function extractMemories({
maxRetries: 2,
signal,
taskType: "extract",
+ debugContext: createTaskLlmDebugContext(promptBuild, extractRegexInput),
additionalMessages:
promptBuild.privateTaskMessages || [
...(promptBuild.customMessages || []),
@@ -641,6 +651,7 @@ export async function generateSynopsis({
graphStats: `event=${eventNodes.length}, character=${characterNodes.length}, thread=${threadNodes.length}`,
...getSTContextForPrompt(),
});
+ const synopsisRegexInput = { entries: [] };
const synopsisSystemPrompt = applyTaskRegex(
settings,
"synopsis",
@@ -652,6 +663,8 @@ export async function generateSynopsis({
'输出 JSON:{"summary": "前情提要文本(200字以内)"}',
"要求:涵盖核心冲突、关键转折、主要角色当前状态。",
].join("\n"),
+ synopsisRegexInput,
+ "system",
);
const result = await callLLMForJSON({
@@ -669,6 +682,10 @@ export async function generateSynopsis({
maxRetries: 1,
signal,
taskType: "synopsis",
+ debugContext: createTaskLlmDebugContext(
+ synopsisPromptBuild,
+ synopsisRegexInput,
+ ),
additionalMessages:
synopsisPromptBuild.privateTaskMessages || [
...(synopsisPromptBuild.customMessages || []),
@@ -759,6 +776,7 @@ export async function generateReflection({
graphStats: `event=${recentEvents.length}, character=${recentCharacters.length}, thread=${recentThreads.length}`,
...getSTContextForPrompt(),
});
+ const reflectionRegexInput = { entries: [] };
const reflectionSystemPrompt = applyTaskRegex(
settings,
"reflection",
@@ -773,6 +791,8 @@ export async function generateReflection({
"suggestion 给出后续检索或叙事上值得关注的提示。",
"不要复述全部事件,要提炼高层结论。",
].join("\n"),
+ reflectionRegexInput,
+ "system",
);
const result = await callLLMForJSON({
@@ -793,6 +813,10 @@ export async function generateReflection({
maxRetries: 1,
signal,
taskType: "reflection",
+ debugContext: createTaskLlmDebugContext(
+ reflectionPromptBuild,
+ reflectionRegexInput,
+ ),
additionalMessages:
reflectionPromptBuild.privateTaskMessages || [
...(reflectionPromptBuild.customMessages || []),
diff --git a/index.js b/index.js
index 47f70b5..0911ec4 100644
--- a/index.js
+++ b/index.js
@@ -49,6 +49,7 @@ import {
import { retrieve } from "./retriever.js";
import {
appendBatchJournal,
+ buildReverseJournalRecoveryPlan,
buildRecoveryResult,
clearHistoryDirty,
cloneGraphSnapshot,
@@ -72,6 +73,7 @@ import {
testVectorConnection,
validateVectorConfig,
} from "./vector-index.js";
+import { resolveConfiguredTimeoutMs } from "./request-timeout.js";
// 操控面板模块(动态加载,防止加载失败崩溃整个扩展)
let _panelModule = null;
@@ -820,10 +822,14 @@ function getSchema() {
}
function getConfiguredTimeoutMs(settings = getSettings()) {
- const timeoutMs = Number(settings?.timeoutMs);
- return Number.isFinite(timeoutMs) && timeoutMs > 0
- ? timeoutMs
- : LOCAL_VECTOR_TIMEOUT_MS;
+ return typeof resolveConfiguredTimeoutMs === "function"
+ ? resolveConfiguredTimeoutMs(settings, LOCAL_VECTOR_TIMEOUT_MS)
+ : (() => {
+ const timeoutMs = Number(settings?.timeoutMs);
+ return Number.isFinite(timeoutMs) && timeoutMs > 0
+ ? timeoutMs
+ : LOCAL_VECTOR_TIMEOUT_MS;
+ })();
}
function getEmbeddingConfig(mode = null) {
@@ -2929,6 +2935,120 @@ function rollbackAffectedJournals(graph, affectedJournals = []) {
: [];
}
+function pruneProcessedMessageHashesFromFloor(graph, fromFloor) {
+ if (!graph?.historyState?.processedMessageHashes) return;
+ if (!Number.isFinite(fromFloor)) return;
+
+ const hashes = graph.historyState.processedMessageHashes;
+ for (const key of Object.keys(hashes)) {
+ if (Number(key) >= fromFloor) {
+ delete hashes[key];
+ }
+ }
+}
+
+async function rollbackGraphForReroll(targetFloor, context = getContext()) {
+ ensureCurrentGraphRuntimeState();
+ const chatId = getCurrentChatId(context);
+ const recoveryPoint = findJournalRecoveryPoint(currentGraph, targetFloor);
+
+ if (!recoveryPoint) {
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: targetFloor,
+ effectiveFromFloor: null,
+ recoveryPath: "unavailable",
+ affectedBatchCount: 0,
+ error:
+ "未找到可用的回滚点,无法安全重新提取。请先执行一次历史恢复或重新提取更早的批次。",
+ };
+ }
+
+ clearInjectionState();
+ lastExtractedItems = [];
+
+ const config = getEmbeddingConfig();
+ const recoveryPath = recoveryPoint.path || "unknown";
+ const affectedBatchCount = recoveryPoint.affectedBatchCount || 0;
+
+ if (recoveryPath === "reverse-journal") {
+ const recoveryPlan = buildReverseJournalRecoveryPlan(
+ recoveryPoint.affectedJournals,
+ targetFloor,
+ );
+ rollbackAffectedJournals(currentGraph, recoveryPoint.affectedJournals);
+ currentGraph = normalizeGraphRuntimeState(currentGraph, chatId);
+ extractionCount = currentGraph.historyState.extractionCount || 0;
+ applyRecoveryPlanToVectorState(recoveryPlan, targetFloor);
+
+ if (
+ isBackendVectorConfig(config) &&
+ recoveryPlan.backendDeleteHashes.length > 0
+ ) {
+ await deleteBackendVectorHashesForRecovery(
+ currentGraph.vectorIndexState.collectionId,
+ config,
+ recoveryPlan.backendDeleteHashes,
+ );
+ }
+
+ await prepareVectorStateForReplay(false, undefined, {
+ skipBackendPurge: isBackendVectorConfig(config),
+ });
+ } else if (recoveryPath === "legacy-snapshot") {
+ currentGraph = normalizeGraphRuntimeState(recoveryPoint.snapshotBefore, chatId);
+ extractionCount = currentGraph.historyState.extractionCount || 0;
+ await prepareVectorStateForReplay(false);
+ } else {
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: targetFloor,
+ effectiveFromFloor: null,
+ recoveryPath,
+ affectedBatchCount,
+ error: `不支持的回滚路径: ${recoveryPath}`,
+ };
+ }
+
+ const effectiveFromFloor = Number.isFinite(
+ currentGraph.historyState?.lastProcessedAssistantFloor,
+ )
+ ? currentGraph.historyState.lastProcessedAssistantFloor + 1
+ : 0;
+
+ pruneProcessedMessageHashesFromFloor(currentGraph, effectiveFromFloor);
+ currentGraph.lastProcessedSeq =
+ currentGraph.historyState?.lastProcessedAssistantFloor ?? -1;
+ clearHistoryDirty(
+ currentGraph,
+ buildRecoveryResult("reroll-rollback", {
+ fromFloor: targetFloor,
+ effectiveFromFloor,
+ path: recoveryPath,
+ affectedBatchCount,
+ detectionSource: "manual-reroll",
+ reason: "manual-reroll",
+ }),
+ );
+ saveGraphToChat();
+ refreshPanelLiveState();
+
+ return {
+ success: true,
+ rollbackPerformed: true,
+ extractionTriggered: false,
+ requestedFloor: targetFloor,
+ effectiveFromFloor,
+ recoveryPath,
+ affectedBatchCount,
+ error: "",
+ };
+}
+
async function recoverHistoryIfNeeded(trigger = "history-recovery") {
if (!currentGraph || isRecoveringHistory) {
return !isRecoveringHistory;
@@ -4002,18 +4122,45 @@ async function onManualExtract() {
async function onReroll({ fromFloor } = {}) {
if (isExtracting) {
toastr.info("记忆提取正在进行中,请稍候");
- return;
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: null,
+ effectiveFromFloor: null,
+ recoveryPath: "busy",
+ affectedBatchCount: 0,
+ error: "记忆提取正在进行中",
+ };
}
if (!currentGraph) {
toastr.info("图谱为空,无需重 Roll");
- return;
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: null,
+ effectiveFromFloor: null,
+ recoveryPath: "empty-graph",
+ affectedBatchCount: 0,
+ error: "图谱为空",
+ };
}
const context = getContext();
const chat = context.chat;
if (!Array.isArray(chat) || chat.length === 0) {
toastr.info("当前聊天为空");
- return;
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: null,
+ effectiveFromFloor: null,
+ recoveryPath: "empty-chat",
+ affectedBatchCount: 0,
+ error: "当前聊天为空",
+ };
}
// 确定回滚起点
@@ -4023,7 +4170,16 @@ async function onReroll({ fromFloor } = {}) {
const assistantTurns = getAssistantTurns(chat);
if (assistantTurns.length === 0) {
toastr.info("聊天中没有 AI 回复");
- return;
+ return {
+ success: false,
+ rollbackPerformed: false,
+ extractionTriggered: false,
+ requestedFloor: null,
+ effectiveFromFloor: null,
+ recoveryPath: "no-assistant-turn",
+ affectedBatchCount: 0,
+ error: "聊天中没有 AI 回复",
+ };
}
targetFloor = assistantTurns[assistantTurns.length - 1];
}
@@ -4037,42 +4193,40 @@ async function onReroll({ fromFloor } = {}) {
timeOut: 2000,
});
await onManualExtract();
- return;
+ return {
+ success: true,
+ rollbackPerformed: false,
+ extractionTriggered: true,
+ requestedFloor: targetFloor,
+ effectiveFromFloor: lastProcessed + 1,
+ recoveryPath: "direct-extract",
+ affectedBatchCount: 0,
+ extractionStatus: lastExtractionStatus?.level || "idle",
+ error: "",
+ };
}
console.log(`[ST-BME] 重 Roll 开始,目标楼层: ${targetFloor}`);
-
- // 1. 找到受影响的 journal 并回滚
- const recovery = findJournalRecoveryPoint(currentGraph, targetFloor);
- if (recovery && recovery.affectedJournals?.length > 0) {
- rollbackAffectedJournals(currentGraph, recovery.affectedJournals);
- console.log(`[ST-BME] 已回滚 ${recovery.affectedJournals.length} 个 batch`);
+ const rollbackResult = await rollbackGraphForReroll(targetFloor, context);
+ if (!rollbackResult.success) {
+ toastr.error(rollbackResult.error, "ST-BME 重 Roll");
+ return rollbackResult;
}
- // 2. 重置提取指针
- const newFloor = targetFloor - 1;
- currentGraph.historyState.lastProcessedAssistantFloor = newFloor;
- currentGraph.lastProcessedSeq = newFloor;
+ const rerollDesc =
+ rollbackResult.effectiveFromFloor !== targetFloor
+ ? `已按批次边界回滚到楼层 ${rollbackResult.effectiveFromFloor} 开始重新提取…`
+ : `已回滚到楼层 ${targetFloor} 开始重新提取…`;
+ toastr.info(rerollDesc, "ST-BME 重 Roll", {
+ timeOut: 2500,
+ });
- // 3. 清理 processedMessageHashes 中 >= targetFloor 的条目
- const hashes = currentGraph.historyState.processedMessageHashes || {};
- for (const key of Object.keys(hashes)) {
- if (Number(key) >= targetFloor) {
- delete hashes[key];
- }
- }
-
- // 4. 保存回滚后的状态
- saveGraph();
-
- toastr.info(
- `已回滚到楼层 ${targetFloor} 之前,开始重新提取…`,
- "ST-BME 重 Roll",
- { timeOut: 2000 },
- );
-
- // 5. 触发重新提取(复用手动提取逻辑)
await onManualExtract();
+ return {
+ ...rollbackResult,
+ extractionTriggered: true,
+ extractionStatus: lastExtractionStatus?.level || "idle",
+ };
}
async function onManualSleep() {
diff --git a/llm.js b/llm.js
index 3d7d780..5e83ac1 100644
--- a/llm.js
+++ b/llm.js
@@ -5,12 +5,16 @@ import { getRequestHeaders } from "../../../../script.js";
import { extension_settings } from "../../../extensions.js";
import { chat_completion_sources, sendOpenAIRequest } from "../../../openai.js";
import { resolveTaskGenerationOptions } from "./generation-options.js";
+import { resolveConfiguredTimeoutMs } from "./request-timeout.js";
+import { applyTaskRegex } from "./task-regex.js";
const MODULE_NAME = "st_bme";
const LLM_REQUEST_TIMEOUT_MS = 300000;
const DEFAULT_TEXT_COMPLETION_TOKENS = 64000;
const DEFAULT_JSON_COMPLETION_TOKENS = 64000;
const RETRY_JSON_COMPLETION_TOKENS = 3200;
+const SENSITIVE_DEBUG_KEY_PATTERN =
+ /^(authorization|proxy_password|api[_-]?key|access[_-]?token|refresh[_-]?token|secret|password)$/i;
function cloneRuntimeDebugValue(value, fallback = null) {
if (value == null) {
@@ -24,6 +28,57 @@ function cloneRuntimeDebugValue(value, fallback = null) {
}
}
+function redactSensitiveString(value) {
+ return String(value ?? "")
+ .replace(/(Bearer\s+)[^\s"'\r\n]+/gi, "$1[REDACTED]")
+ .replace(
+ /(Authorization\s*:\s*Bearer\s+)[^\s"'\r\n]+/gi,
+ "$1[REDACTED]",
+ )
+ .replace(/(proxy_password\s*:\s*)[^\r\n]+/gi, "$1[REDACTED]");
+}
+
+function redactSensitiveValue(value, currentKey = "") {
+ if (value == null) {
+ return value;
+ }
+
+ if (Array.isArray(value)) {
+ return value.map((item) => redactSensitiveValue(item, currentKey));
+ }
+
+ if (typeof value === "object") {
+ return Object.fromEntries(
+ Object.entries(value).map(([key, entryValue]) => [
+ key,
+ redactSensitiveValue(entryValue, key),
+ ]),
+ );
+ }
+
+ if (typeof value === "string") {
+ if (SENSITIVE_DEBUG_KEY_PATTERN.test(String(currentKey || ""))) {
+ return value ? "[REDACTED]" : "";
+ }
+ return redactSensitiveString(value);
+ }
+
+ if (SENSITIVE_DEBUG_KEY_PATTERN.test(String(currentKey || ""))) {
+ return "[REDACTED]";
+ }
+
+ return value;
+}
+
+function sanitizeLlmDebugSnapshot(snapshot = {}) {
+ const cloned = cloneRuntimeDebugValue(snapshot, {});
+ const redacted = redactSensitiveValue(cloned);
+ if (redacted && typeof redacted === "object" && !Array.isArray(redacted)) {
+ redacted.redacted = true;
+ }
+ return redacted;
+}
+
function getRuntimeDebugState() {
const stateKey = "__stBmeRuntimeDebugState";
if (
@@ -41,12 +96,17 @@ function getRuntimeDebugState() {
return globalThis[stateKey];
}
-function recordTaskLlmRequest(taskType, snapshot = {}) {
+function recordTaskLlmRequest(taskType, snapshot = {}, options = {}) {
const normalizedTaskType = String(taskType || "").trim() || "unknown";
const state = getRuntimeDebugState();
+ const shouldMerge = options?.merge === true;
+ const previousSnapshot = shouldMerge
+ ? cloneRuntimeDebugValue(state.taskLlmRequests[normalizedTaskType], {})
+ : {};
state.taskLlmRequests[normalizedTaskType] = {
+ ...previousSnapshot,
updatedAt: new Date().toISOString(),
- ...cloneRuntimeDebugValue(snapshot, {}),
+ ...sanitizeLlmDebugSnapshot(snapshot),
};
state.updatedAt = new Date().toISOString();
}
@@ -67,10 +127,131 @@ function getMemoryLLMConfig() {
}
function getConfiguredTimeoutMs(settings = {}) {
- const timeoutMs = Number(settings?.timeoutMs);
- return Number.isFinite(timeoutMs) && timeoutMs > 0
- ? timeoutMs
- : LLM_REQUEST_TIMEOUT_MS;
+ return typeof resolveConfiguredTimeoutMs === "function"
+ ? resolveConfiguredTimeoutMs(settings, LLM_REQUEST_TIMEOUT_MS)
+ : (() => {
+ const timeoutMs = Number(settings?.timeoutMs);
+ return Number.isFinite(timeoutMs) && timeoutMs > 0
+ ? timeoutMs
+ : LLM_REQUEST_TIMEOUT_MS;
+ })();
+}
+
+function normalizeRegexDebugEntries(debugCollector = null) {
+ if (!Array.isArray(debugCollector?.entries)) {
+ return [];
+ }
+ return debugCollector.entries.map((entry) => ({
+ taskType: String(entry?.taskType || ""),
+ stage: String(entry?.stage || ""),
+ enabled: entry?.enabled !== false,
+ appliedRules: Array.isArray(entry?.appliedRules)
+ ? entry.appliedRules.map((rule) => ({
+ id: String(rule?.id || ""),
+ source: String(rule?.source || ""),
+ error: String(rule?.error || ""),
+ }))
+ : [],
+ sourceCount: {
+ tavern: Number(entry?.sourceCount?.tavern || 0),
+ local: Number(entry?.sourceCount?.local || 0),
+ },
+ }));
+}
+
+function applyTaskOutputRegexStages(taskType, text) {
+ const normalizedTaskType = String(taskType || "").trim();
+ const rawText = typeof text === "string" ? text : "";
+ if (!normalizedTaskType || !rawText) {
+ return {
+ cleanedText: rawText,
+ debug: {
+ changed: false,
+ applied: false,
+ stages: [],
+ rawLength: rawText.length,
+ cleanedLength: rawText.length,
+ },
+ };
+ }
+
+ const settings = extension_settings[MODULE_NAME] || {};
+ const regexDebug = { entries: [] };
+ const afterRawStage = applyTaskRegex(
+ settings,
+ normalizedTaskType,
+ "output.rawResponse",
+ rawText,
+ regexDebug,
+ "assistant",
+ );
+ const cleanedText = applyTaskRegex(
+ settings,
+ normalizedTaskType,
+ "output.beforeParse",
+ afterRawStage,
+ regexDebug,
+ "assistant",
+ );
+ const normalizedEntries = normalizeRegexDebugEntries(regexDebug);
+ const applied = normalizedEntries.some(
+ (entry) => entry.appliedRules.length > 0,
+ );
+
+ return {
+ cleanedText,
+ debug: {
+ changed: cleanedText !== rawText,
+ applied,
+ rawLength: rawText.length,
+ cleanedLength: cleanedText.length,
+ stages: normalizedEntries,
+ },
+ };
+}
+
+function buildEffectiveLlmRoute(
+ hasDedicatedConfig,
+ privateRequestSource,
+ taskType = "",
+) {
+ const dedicated = Boolean(hasDedicatedConfig);
+ return {
+ taskType: String(taskType || "").trim(),
+ requestSource: String(privateRequestSource || "").trim(),
+ llm: dedicated ? "dedicated-memory-llm" : "sillytavern-current-model",
+ transport: dedicated
+ ? "dedicated-openai-compatible"
+ : "sillytavern-current-model",
+ };
+}
+
+function buildPromptExecutionSummary(debugContext = null) {
+ if (!debugContext || typeof debugContext !== "object") {
+ return null;
+ }
+
+ return {
+ promptAssembly:
+ debugContext.promptAssembly && typeof debugContext.promptAssembly === "object"
+ ? cloneRuntimeDebugValue(debugContext.promptAssembly, {})
+ : null,
+ promptBuild:
+ debugContext.promptBuild && typeof debugContext.promptBuild === "object"
+ ? cloneRuntimeDebugValue(debugContext.promptBuild, {})
+ : null,
+ effectiveDelivery:
+ debugContext.effectiveDelivery &&
+ typeof debugContext.effectiveDelivery === "object"
+ ? cloneRuntimeDebugValue(debugContext.effectiveDelivery, {})
+ : null,
+ ejsRuntimeStatus: String(debugContext.ejsRuntimeStatus || ""),
+ worldInfo:
+ debugContext.worldInfo && typeof debugContext.worldInfo === "object"
+ ? cloneRuntimeDebugValue(debugContext.worldInfo, {})
+ : null,
+ regexInput: normalizeRegexDebugEntries(debugContext.regexInput),
+ };
}
function normalizeOpenAICompatibleBaseUrl(value) {
@@ -423,6 +604,11 @@ async function callDedicatedOpenAICompatible(
filteredGeneration: generationResolved.filtered || {},
removedGeneration: generationResolved.removed || [],
capabilityMode: generationResolved.capabilityMode || "",
+ effectiveRoute: buildEffectiveLlmRoute(
+ hasDedicatedConfig,
+ privateRequestSource,
+ taskType,
+ ),
maxCompletionTokens,
});
if (!hasDedicatedConfig) {
@@ -521,6 +707,11 @@ async function callDedicatedOpenAICompatible(
removedGeneration: generationResolved.removed || [],
capabilityMode: generationResolved.capabilityMode || "",
resolvedCompletionTokens,
+ effectiveRoute: buildEffectiveLlmRoute(
+ true,
+ privateRequestSource,
+ taskType,
+ ),
requestBody: body,
});
@@ -605,6 +796,7 @@ export async function callLLMForJSON({
taskType = "",
requestSource = "",
additionalMessages = [],
+ debugContext = null,
} = {}) {
const override = getLlmTestOverride("callLLMForJSON");
if (override) {
@@ -616,6 +808,7 @@ export async function callLLMForJSON({
taskType,
requestSource,
additionalMessages,
+ debugContext,
});
}
@@ -624,6 +817,7 @@ export async function callLLMForJSON({
requestSource,
);
let lastFailureReason = "";
+ const promptExecutionSummary = buildPromptExecutionSummary(debugContext);
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
@@ -645,6 +839,17 @@ export async function callLLMForJSON({
: RETRY_JSON_COMPLETION_TOKENS,
});
const responseText = response?.content || "";
+ const outputCleanup = applyTaskOutputRegexStages(taskType, responseText);
+ recordTaskLlmRequest(
+ taskType || privateRequestSource,
+ {
+ responseCleaning: outputCleanup.debug,
+ promptExecution: promptExecutionSummary,
+ },
+ {
+ merge: true,
+ },
+ );
if (!responseText || typeof responseText !== "string") {
console.warn(`[ST-BME] LLM 返回空响应 (尝试 ${attempt + 1})`);
@@ -653,14 +858,14 @@ export async function callLLMForJSON({
}
// 尝试解析 JSON
- const parsed = extractJSON(responseText);
+ const parsed = extractJSON(outputCleanup.cleanedText);
if (parsed !== null) {
return parsed;
}
const truncated =
response.finishReason === "length" ||
- looksLikeTruncatedJson(responseText);
+ looksLikeTruncatedJson(outputCleanup.cleanedText);
lastFailureReason = truncated
? "输出因长度限制被截断,请重新输出更紧凑的完整 JSON"
: "输出不是有效 JSON,请严格返回紧凑 JSON 对象";
diff --git a/panel.js b/panel.js
index 271d287..8d192c9 100644
--- a/panel.js
+++ b/panel.js
@@ -444,23 +444,41 @@ function _renderRecentList(elementId, items) {
if (!listEl) return;
if (!items.length) {
- listEl.innerHTML =
- '
暂无数据
';
+ const li = document.createElement("li");
+ li.className = "bme-recent-item";
+ const text = document.createElement("div");
+ text.className = "bme-recent-text";
+ text.style.color = "var(--bme-on-surface-dim)";
+ text.textContent = "暂无数据";
+ li.appendChild(text);
+ listEl.replaceChildren(li);
return;
}
- listEl.innerHTML = items
- .map((item) => {
- const secondary = item.meta || item.time || "";
- return `
- ${_typeLabel(item.type)}
-
-
${_escHtml(item.name || "—")}
-
${_escHtml(secondary)}
-
- `;
- })
- .join("");
+ const fragment = document.createDocumentFragment();
+ items.forEach((item) => {
+ const secondary = item.meta || item.time || "";
+ const li = document.createElement("li");
+ li.className = "bme-recent-item";
+
+ const badge = document.createElement("span");
+ badge.className = `bme-type-badge ${_safeCssToken(item.type)}`;
+ badge.textContent = _typeLabel(item.type);
+ li.appendChild(badge);
+
+ const content = document.createElement("div");
+ const title = document.createElement("div");
+ title.className = "bme-recent-text";
+ title.textContent = item.name || "—";
+ const meta = document.createElement("div");
+ meta.className = "bme-recent-meta";
+ meta.textContent = secondary;
+ content.append(title, meta);
+ li.appendChild(content);
+
+ fragment.appendChild(li);
+ });
+ listEl.replaceChildren(fragment);
}
// ==================== 记忆浏览器 ====================
@@ -497,25 +515,43 @@ function _refreshMemoryBrowser() {
return (b.seqRange?.[1] ?? b.seq ?? 0) - (a.seqRange?.[1] ?? a.seq ?? 0);
});
- listEl.innerHTML = nodes
- .slice(0, 100)
- .map((node) => {
- const name = getNodeDisplayName(node);
- const snippet = _getNodeSnippet(node);
- return `
- ${_typeLabel(node.type)}
-
-
${_escHtml(name)}
-
${_escHtml(snippet)}
-
- imp: ${node.importance || 5}
- acc: ${node.accessCount || 0}
- seq: ${node.seqRange?.[1] ?? node.seq ?? 0}
-
-
- `;
- })
- .join("");
+ const fragment = document.createDocumentFragment();
+ nodes.slice(0, 100).forEach((node) => {
+ const name = getNodeDisplayName(node);
+ const snippet = _getNodeSnippet(node);
+ const li = document.createElement("li");
+ li.className = "bme-memory-item";
+ li.dataset.nodeId = String(node.id || "");
+
+ const badge = document.createElement("span");
+ badge.className = `bme-type-badge ${_safeCssToken(node.type)}`;
+ badge.textContent = _typeLabel(node.type);
+ li.appendChild(badge);
+
+ const content = document.createElement("div");
+ const title = document.createElement("div");
+ title.className = "bme-memory-name";
+ title.textContent = name;
+ const body = document.createElement("div");
+ body.className = "bme-memory-content";
+ body.textContent = snippet;
+ const meta = document.createElement("div");
+ meta.className = "bme-memory-meta";
+ ["imp", "acc", "seq"].forEach((key, index) => {
+ const span = document.createElement("span");
+ span.textContent =
+ index === 0
+ ? `imp: ${node.importance || 5}`
+ : index === 1
+ ? `acc: ${node.accessCount || 0}`
+ : `seq: ${node.seqRange?.[1] ?? node.seq ?? 0}`;
+ meta.appendChild(span);
+ });
+ content.append(title, body, meta);
+ li.appendChild(content);
+ fragment.appendChild(li);
+ });
+ listEl.replaceChildren(fragment);
listEl.querySelectorAll(".bme-memory-item").forEach((el) => {
el.addEventListener("click", () => {
@@ -547,8 +583,11 @@ async function _refreshInjectionPreview() {
const injection = String(_getLastInjection?.() || "").trim();
if (!injection) {
- container.innerHTML =
- '暂无注入内容。先完成一次召回或正常生成后再查看。
';
+ const empty = document.createElement("div");
+ empty.className = "bme-injection-preview";
+ empty.style.color = "var(--bme-on-surface-dim)";
+ empty.textContent = "暂无注入内容。先完成一次召回或正常生成后再查看。";
+ container.replaceChildren(empty);
if (tokenEl) tokenEl.textContent = "";
return;
}
@@ -556,10 +595,17 @@ async function _refreshInjectionPreview() {
try {
const { estimateTokens } = await import("./injector.js");
const totalTokens = estimateTokens(injection);
- container.innerHTML = `${_escHtml(injection)}
`;
+ const preview = document.createElement("div");
+ preview.className = "bme-injection-preview";
+ preview.textContent = injection;
+ container.replaceChildren(preview);
if (tokenEl) tokenEl.textContent = `≈ ${totalTokens} tokens`;
} catch (error) {
- container.innerHTML = `预览生成失败: ${_escHtml(error.message)}
`;
+ const failure = document.createElement("div");
+ failure.className = "bme-injection-preview";
+ failure.style.color = "var(--bme-accent3)";
+ failure.textContent = `预览生成失败: ${error.message}`;
+ container.replaceChildren(failure);
if (tokenEl) tokenEl.textContent = "";
}
}
@@ -589,14 +635,18 @@ function _buildLegend() {
{ key: "reflection", label: "反思" },
];
- legendEl.innerHTML = types
- .map(
- (type) => `
-
- ${type.label}
- `,
- )
- .join("");
+ const fragment = document.createDocumentFragment();
+ types.forEach((type) => {
+ const item = document.createElement("span");
+ item.className = "bme-legend-item";
+ const dot = document.createElement("span");
+ dot.className = "bme-legend-dot";
+ dot.style.background = colors[type.key] || "";
+ item.appendChild(dot);
+ item.append(document.createTextNode(type.label));
+ fragment.appendChild(item);
+ });
+ legendEl.replaceChildren(fragment);
}
function _bindGraphControls() {
@@ -648,14 +698,19 @@ function _showNodeDetail(node) {
});
}
- bodyEl.innerHTML = items
- .map(
- (item) => `
-
-
${_escHtml(String(item.value ?? "—"))}
-
`,
- )
- .join("");
+ const fragment = document.createDocumentFragment();
+ items.forEach((item) => {
+ const row = document.createElement("div");
+ row.className = "bme-node-detail-field";
+ const label = document.createElement("label");
+ label.textContent = item.label;
+ const value = document.createElement("div");
+ value.className = "value";
+ value.textContent = String(item.value ?? "—");
+ row.append(label, value);
+ fragment.appendChild(row);
+ });
+ bodyEl.replaceChildren(fragment);
detailEl.classList.add("open");
}
@@ -1856,7 +1911,7 @@ function _renderTaskProfileWorkspace(state) {