mirror of
https://github.com/Youzini-afk/ST-Bionic-Memory-Ecology.git
synced 2026-05-15 22:30:38 +08:00
Refine extraction stage notices for post-processing
This commit is contained in:
43
index.js
43
index.js
@@ -7685,12 +7685,27 @@ async function handleExtractionSuccess(
|
||||
typeof recordMaintenanceAction === "function"
|
||||
? recordMaintenanceAction
|
||||
: () => null;
|
||||
const updateExtractionPostProcessStatus = (
|
||||
text,
|
||||
meta,
|
||||
{ noticeMarquee = false } = {},
|
||||
) => {
|
||||
if (typeof setLastExtractionStatus !== "function") return;
|
||||
setLastExtractionStatus(text, meta, "running", {
|
||||
syncRuntime: true,
|
||||
noticeMarquee,
|
||||
});
|
||||
};
|
||||
throwIfAborted(signal, "提取已终止");
|
||||
extractionCount++;
|
||||
ensureCurrentGraphRuntimeState();
|
||||
currentGraph.historyState.extractionCount = extractionCount;
|
||||
updateLastExtractedItems(result.newNodeIds || []);
|
||||
setBatchStageOutcome(status, "core", "success");
|
||||
updateExtractionPostProcessStatus(
|
||||
"提取收尾中",
|
||||
`已抽取 ${newNodeCount} 个新节点,正在处理后续阶段`,
|
||||
);
|
||||
|
||||
if (settings.enableConsolidation && result.newNodeIds?.length > 0) {
|
||||
let consolidationAnalysis = null;
|
||||
@@ -7702,6 +7717,10 @@ async function handleExtractionSuccess(
|
||||
),
|
||||
);
|
||||
if (newNodeCount < minNewNodes) {
|
||||
updateExtractionPostProcessStatus(
|
||||
"整合判定中",
|
||||
`本批新增 ${newNodeCount} 个节点,正在检查是否需要自动整合/进化`,
|
||||
);
|
||||
consolidationAnalysis = await analyzeConsolidationGate({
|
||||
graph: currentGraph,
|
||||
newNodeIds: result.newNodeIds,
|
||||
@@ -7729,6 +7748,10 @@ async function handleExtractionSuccess(
|
||||
pushBatchStageArtifact(status, "structural", "consolidation-skipped");
|
||||
} else {
|
||||
try {
|
||||
updateExtractionPostProcessStatus(
|
||||
"整合/进化中",
|
||||
String(gate.reason || "").trim() || "正在自动整合新旧记忆",
|
||||
);
|
||||
const beforeSnapshot = cloneMaintenanceSnapshot(currentGraph);
|
||||
const consolidationResult = await consolidateMemories({
|
||||
graph: currentGraph,
|
||||
@@ -7772,6 +7795,10 @@ async function handleExtractionSuccess(
|
||||
extractionCount % settings.synopsisEveryN === 0
|
||||
) {
|
||||
try {
|
||||
updateExtractionPostProcessStatus(
|
||||
"概要更新中",
|
||||
`第 ${extractionCount} 次提取,正在生成全局概要`,
|
||||
);
|
||||
await generateSynopsis({
|
||||
graph: currentGraph,
|
||||
schema: getSchema(),
|
||||
@@ -7799,6 +7826,10 @@ async function handleExtractionSuccess(
|
||||
extractionCount % settings.reflectEveryN === 0
|
||||
) {
|
||||
try {
|
||||
updateExtractionPostProcessStatus(
|
||||
"反思生成中",
|
||||
`第 ${extractionCount} 次提取,正在生成长期反思`,
|
||||
);
|
||||
await generateReflection({
|
||||
graph: currentGraph,
|
||||
currentSeq: endIdx,
|
||||
@@ -7825,6 +7856,10 @@ async function handleExtractionSuccess(
|
||||
extractionCount % settings.sleepEveryN === 0
|
||||
) {
|
||||
try {
|
||||
updateExtractionPostProcessStatus(
|
||||
"主动遗忘中",
|
||||
`第 ${extractionCount} 次提取,正在归档低价值记忆`,
|
||||
);
|
||||
const beforeSnapshot = cloneMaintenanceSnapshot(currentGraph);
|
||||
const sleepResult = sleepCycle(currentGraph, settings);
|
||||
if ((sleepResult?.forgotten || 0) > 0) {
|
||||
@@ -7872,6 +7907,10 @@ async function handleExtractionSuccess(
|
||||
"已到自动压缩周期,但当前没有达到内部压缩阈值的候选组";
|
||||
pushBatchStageArtifact(status, "structural", "compression-skipped");
|
||||
} else {
|
||||
updateExtractionPostProcessStatus(
|
||||
"自动压缩中",
|
||||
`已到第 ${extractionCount} 次提取周期,正在压缩层级记忆`,
|
||||
);
|
||||
status.autoCompressionSkippedReason = "";
|
||||
const beforeSnapshot = cloneMaintenanceSnapshot(currentGraph);
|
||||
const compressionResult = await compressAll(
|
||||
@@ -7916,6 +7955,10 @@ async function handleExtractionSuccess(
|
||||
|
||||
let vectorSync = null;
|
||||
try {
|
||||
updateExtractionPostProcessStatus(
|
||||
"向量同步中",
|
||||
"正在同步本批提取后的向量索引",
|
||||
);
|
||||
vectorSync = await syncVectorState({ signal });
|
||||
} catch (error) {
|
||||
if (isAbortError(error)) throw error;
|
||||
|
||||
@@ -236,6 +236,7 @@ function createBatchStageHarness() {
|
||||
result: null,
|
||||
extractionCount: 0,
|
||||
currentGraph: null,
|
||||
extractionStatuses: [],
|
||||
consolidateMemories: async () => {},
|
||||
generateSynopsis: async () => {},
|
||||
generateReflection: async () => {},
|
||||
@@ -271,6 +272,9 @@ function createBatchStageHarness() {
|
||||
pushBatchStageArtifact,
|
||||
finalizeBatchStatus,
|
||||
createUiStatus,
|
||||
setLastExtractionStatus(...args) {
|
||||
context.extractionStatuses.push(args);
|
||||
},
|
||||
};
|
||||
vm.createContext(context);
|
||||
vm.runInContext(
|
||||
@@ -2604,6 +2608,73 @@ async function testBatchStatusSemanticFailureDoesNotHideCoreSuccess() {
|
||||
assert.match(effects.batchStatus.errors[0], /概要生成失败/);
|
||||
}
|
||||
|
||||
async function testExtractionPostProcessStatusesExposeMaintenancePhases() {
|
||||
const harness = await createBatchStageHarness();
|
||||
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
|
||||
harness.currentGraph = {
|
||||
historyState: { extractionCount: 0 },
|
||||
vectorIndexState: {},
|
||||
};
|
||||
harness.ensureCurrentGraphRuntimeState = () => {
|
||||
harness.currentGraph.historyState ||= {};
|
||||
harness.currentGraph.vectorIndexState ||= {};
|
||||
};
|
||||
harness.consolidateMemories = async () => ({
|
||||
merged: 1,
|
||||
skipped: 0,
|
||||
kept: 0,
|
||||
evolved: 1,
|
||||
connections: 0,
|
||||
updates: 0,
|
||||
});
|
||||
harness.generateSynopsis = async () => ({ ok: true });
|
||||
harness.generateReflection = async () => ({ ok: true });
|
||||
harness.sleepCycle = () => ({ forgotten: 0 });
|
||||
harness.inspectAutoCompressionCandidates = () => ({
|
||||
hasCandidates: true,
|
||||
reason: "",
|
||||
});
|
||||
harness.compressAll = async () => ({ created: 1, archived: 2 });
|
||||
harness.syncVectorState = async () => ({
|
||||
insertedHashes: ["hash-stage"],
|
||||
stats: { pending: 0, indexed: 3 },
|
||||
});
|
||||
|
||||
const batchStatus = createBatchStatusSkeleton({
|
||||
processedRange: [8, 8],
|
||||
extractionCountBefore: 0,
|
||||
});
|
||||
await handleExtractionSuccess(
|
||||
{
|
||||
newNodeIds: ["node-stage"],
|
||||
},
|
||||
8,
|
||||
{
|
||||
enableConsolidation: true,
|
||||
consolidationAutoMinNewNodes: 1,
|
||||
enableSynopsis: true,
|
||||
synopsisEveryN: 1,
|
||||
enableReflection: true,
|
||||
reflectEveryN: 1,
|
||||
enableSleepCycle: true,
|
||||
sleepEveryN: 1,
|
||||
enableAutoCompression: true,
|
||||
compressionEveryN: 1,
|
||||
},
|
||||
undefined,
|
||||
batchStatus,
|
||||
);
|
||||
|
||||
const statusTexts = harness.extractionStatuses.map((entry) => entry[0]);
|
||||
assert.ok(statusTexts.includes("提取收尾中"));
|
||||
assert.ok(statusTexts.includes("整合/进化中"));
|
||||
assert.ok(statusTexts.includes("概要更新中"));
|
||||
assert.ok(statusTexts.includes("反思生成中"));
|
||||
assert.ok(statusTexts.includes("主动遗忘中"));
|
||||
assert.ok(statusTexts.includes("自动压缩中"));
|
||||
assert.ok(statusTexts.includes("向量同步中"));
|
||||
}
|
||||
|
||||
async function testAutoConsolidationRunsOnHighDuplicateRiskSingleNode() {
|
||||
const harness = await createBatchStageHarness();
|
||||
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
|
||||
@@ -5474,6 +5545,7 @@ await testReverseJournalRollbackStateFormsReplayClosure();
|
||||
await testReverseJournalRecoveryPlanMixedLegacyAndCurrentRetainsRepairSet();
|
||||
await testBatchStatusStructuralPartialRemainsRecoverable();
|
||||
await testBatchStatusSemanticFailureDoesNotHideCoreSuccess();
|
||||
await testExtractionPostProcessStatusesExposeMaintenancePhases();
|
||||
await testAutoConsolidationRunsOnHighDuplicateRiskSingleNode();
|
||||
await testAutoConsolidationSkipsLowRiskSingleNode();
|
||||
await testAutoCompressionRunsOnlyOnConfiguredInterval();
|
||||
|
||||
Reference in New Issue
Block a user