Refine automatic consolidation and compression triggers

This commit is contained in:
Youzini-afk
2026-04-04 15:54:39 +08:00
parent 4bbbd4d09d
commit f367b8989c
7 changed files with 977 additions and 98 deletions

View File

@@ -34,7 +34,53 @@ this.defaultSettings = defaultSettings;
return context.defaultSettings;
}
async function loadSettingsCompatHelpers() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const indexPath = path.resolve(__dirname, "../index.js");
const source = await fs.readFile(indexPath, "utf8");
const settingsMatch = source.match(/const defaultSettings = \{[\s\S]*?^\};/m);
const compatMatch = source.match(
/function migrateLegacyAutoMaintenanceSettings\(loaded = \{\}\) \{[\s\S]*?^}\n/m,
);
const mergeMatch = source.match(
/function mergePersistedSettings\(loaded = \{\}\) \{[\s\S]*?^}\n/m,
);
if (!settingsMatch || !compatMatch || !mergeMatch) {
throw new Error("无法从 index.js 提取设置兼容辅助函数");
}
const context = vm.createContext({
clampInt: (value, fallback = 0, min = 0, max = 9999) => {
const numeric = Number(value);
if (!Number.isFinite(numeric)) return fallback;
return Math.min(max, Math.max(min, Math.trunc(numeric)));
},
createDefaultTaskProfiles() {
return {
extract: { activeProfileId: "default", profiles: [] },
recall: { activeProfileId: "default", profiles: [] },
compress: { activeProfileId: "default", profiles: [] },
synopsis: { activeProfileId: "default", profiles: [] },
reflection: { activeProfileId: "default", profiles: [] },
consolidation: { activeProfileId: "default", profiles: [] },
};
},
});
const script = new vm.Script(`
${settingsMatch[0]}
${compatMatch[0]}
${mergeMatch[0]}
this.mergePersistedSettings = mergePersistedSettings;
`);
script.runInContext(context);
return {
mergePersistedSettings: context.mergePersistedSettings,
};
}
const defaultSettings = await loadDefaultSettings();
const { mergePersistedSettings } = await loadSettingsCompatHelpers();
assert.equal(defaultSettings.extractContextTurns, 2);
assert.equal(defaultSettings.recallTopK, 20);
@@ -79,11 +125,20 @@ assert.equal(defaultSettings.injectObjectiveGlobalMemory, true);
assert.equal(defaultSettings.injectDepth, 9999);
assert.equal(defaultSettings.enabled, true);
assert.equal(defaultSettings.enableReflection, true);
assert.equal(defaultSettings.maintenanceAutoMinNewNodes, 3);
assert.equal(defaultSettings.consolidationAutoMinNewNodes, 2);
assert.equal(defaultSettings.compressionEveryN, 10);
assert.equal("maintenanceAutoMinNewNodes" in defaultSettings, false);
assert.equal(defaultSettings.embeddingTransportMode, "direct");
assert.equal(defaultSettings.taskProfilesVersion, 3);
assert.ok(defaultSettings.taskProfiles);
assert.ok(defaultSettings.taskProfiles.extract);
assert.ok(defaultSettings.taskProfiles.recall);
const migratedSettings = mergePersistedSettings({
maintenanceAutoMinNewNodes: 7,
});
assert.equal(migratedSettings.consolidationAutoMinNewNodes, 7);
assert.equal(migratedSettings.compressionEveryN, 10);
assert.equal("maintenanceAutoMinNewNodes" in migratedSettings, false);
console.log("default-settings tests passed");

View File

@@ -231,6 +231,16 @@ function createBatchStageHarness() {
getSchema: () => schema,
getEmbeddingConfig: () => null,
getVectorIndexStats: () => ({ pending: 0 }),
analyzeAutoConsolidationGate: async () => ({
triggered: false,
reason: "本批新增少且无明显重复风险,跳过自动整合",
matchedScore: null,
matchedNodeId: "",
}),
inspectAutoCompressionCandidates: () => ({
hasCandidates: false,
reason: "已到自动压缩周期,但当前没有达到内部压缩阈值的候选组",
}),
updateLastExtractedItems: () => {},
ensureCurrentGraphRuntimeState: () => {},
throwIfAborted: () => {},
@@ -2530,6 +2540,10 @@ async function testBatchStatusStructuralPartialRemainsRecoverable() {
harness.currentGraph.historyState ||= {};
harness.currentGraph.vectorIndexState ||= {};
};
harness.inspectAutoCompressionCandidates = () => ({
hasCandidates: true,
reason: "",
});
harness.compressAll = async () => {
throw new Error("compression down");
};
@@ -2550,6 +2564,7 @@ async function testBatchStatusStructuralPartialRemainsRecoverable() {
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
compressionEveryN: 1,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
@@ -2614,6 +2629,333 @@ async function testBatchStatusSemanticFailureDoesNotHideCoreSuccess() {
assert.match(effects.batchStatus.errors[0], /概要生成失败/);
}
async function testAutoConsolidationRunsOnHighDuplicateRiskSingleNode() {
const harness = await createBatchStageHarness();
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
harness.currentGraph = {
historyState: { extractionCount: 0 },
vectorIndexState: {},
};
harness.ensureCurrentGraphRuntimeState = () => {
harness.currentGraph.historyState ||= {};
harness.currentGraph.vectorIndexState ||= {};
};
let gateCalls = 0;
let consolidateCalls = 0;
harness.analyzeAutoConsolidationGate = async () => {
gateCalls += 1;
return {
triggered: true,
reason:
"本批仅新增 1 个节点但与旧记忆高度相似0.930 >= 0.85),已触发自动整合",
matchedScore: 0.93,
matchedNodeId: "old-1",
};
};
harness.consolidateMemories = async () => {
consolidateCalls += 1;
return {
merged: 1,
skipped: 0,
kept: 0,
evolved: 0,
connections: 0,
updates: 0,
};
};
harness.syncVectorState = async () => ({
insertedHashes: [],
stats: { pending: 0 },
});
const batchStatus = createBatchStatusSkeleton({
processedRange: [6, 6],
extractionCountBefore: 0,
});
const effects = await handleExtractionSuccess(
{ newNodeIds: ["node-dup"] },
6,
{
enableConsolidation: true,
consolidationAutoMinNewNodes: 2,
consolidationThreshold: 0.85,
compressionEveryN: 0,
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
},
undefined,
batchStatus,
);
assert.equal(gateCalls, 1);
assert.equal(consolidateCalls, 1);
assert.equal(effects.batchStatus.consolidationGateTriggered, true);
assert.equal(
effects.batchStatus.consolidationGateMatchedNodeId,
"old-1",
);
assert.equal(effects.batchStatus.consolidationGateSimilarity, 0.93);
assert.match(
effects.batchStatus.consolidationGateReason,
/高度相似/,
);
assert.equal(effects.batchStatus.autoCompressionScheduled, false);
assert.match(
effects.batchStatus.autoCompressionSkippedReason,
/自动压缩已关闭/,
);
}
async function testAutoConsolidationSkipsLowRiskSingleNode() {
const harness = await createBatchStageHarness();
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
harness.currentGraph = {
historyState: { extractionCount: 0 },
vectorIndexState: {},
};
harness.ensureCurrentGraphRuntimeState = () => {
harness.currentGraph.historyState ||= {};
harness.currentGraph.vectorIndexState ||= {};
};
let consolidateCalls = 0;
harness.analyzeAutoConsolidationGate = async () => ({
triggered: false,
reason:
"本批新增少且最高相似度 0.420 未达到阈值 0.85,跳过自动整合",
matchedScore: 0.42,
matchedNodeId: "old-2",
});
harness.consolidateMemories = async () => {
consolidateCalls += 1;
return {
merged: 0,
skipped: 0,
kept: 1,
evolved: 0,
connections: 0,
updates: 0,
};
};
harness.syncVectorState = async () => ({
insertedHashes: [],
stats: { pending: 0 },
});
const batchStatus = createBatchStatusSkeleton({
processedRange: [7, 7],
extractionCountBefore: 0,
});
const effects = await handleExtractionSuccess(
{ newNodeIds: ["node-low-risk"] },
7,
{
enableConsolidation: true,
consolidationAutoMinNewNodes: 2,
consolidationThreshold: 0.85,
compressionEveryN: 0,
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
},
undefined,
batchStatus,
);
assert.equal(consolidateCalls, 0);
assert.equal(effects.batchStatus.consolidationGateTriggered, false);
assert.equal(
effects.batchStatus.consolidationGateMatchedNodeId,
"old-2",
);
assert.equal(effects.batchStatus.consolidationGateSimilarity, 0.42);
assert.match(
effects.batchStatus.consolidationGateReason,
/跳过自动整合/,
);
assert.equal(
effects.batchStatus.stages.structural.artifacts.includes(
"consolidation-skipped",
),
true,
);
}
async function testAutoCompressionRunsOnlyOnConfiguredInterval() {
const harness = await createBatchStageHarness();
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
harness.currentGraph = {
historyState: { extractionCount: 9 },
vectorIndexState: {},
};
harness.ensureCurrentGraphRuntimeState = () => {
harness.currentGraph.historyState ||= {};
harness.currentGraph.vectorIndexState ||= {};
};
harness.extractionCount = 9;
let compressionCalls = 0;
harness.inspectAutoCompressionCandidates = () => ({
hasCandidates: true,
reason: "",
});
harness.compressAll = async () => {
compressionCalls += 1;
return { created: 1, archived: 2 };
};
harness.syncVectorState = async () => ({
insertedHashes: [],
stats: { pending: 0 },
});
const batchStatus = createBatchStatusSkeleton({
processedRange: [8, 8],
extractionCountBefore: 9,
});
const effects = await handleExtractionSuccess(
{ newNodeIds: ["node-for-compress"] },
8,
{
enableConsolidation: false,
compressionEveryN: 10,
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
},
undefined,
batchStatus,
);
assert.equal(compressionCalls, 1);
assert.equal(effects.batchStatus.autoCompressionScheduled, true);
assert.equal(effects.batchStatus.nextCompressionAtExtractionCount, 20);
assert.equal(effects.batchStatus.autoCompressionSkippedReason, "");
}
async function testAutoCompressionSkipsWhenNotScheduledOrNoCandidates() {
const offCycleHarness = await createBatchStageHarness();
const {
createBatchStatusSkeleton: createOffCycleBatchStatus,
handleExtractionSuccess: handleOffCycleExtractionSuccess,
} = offCycleHarness.result;
offCycleHarness.currentGraph = {
historyState: { extractionCount: 0 },
vectorIndexState: {},
};
offCycleHarness.ensureCurrentGraphRuntimeState = () => {
offCycleHarness.currentGraph.historyState ||= {};
offCycleHarness.currentGraph.vectorIndexState ||= {};
};
let offCycleCompressionCalls = 0;
offCycleHarness.compressAll = async () => {
offCycleCompressionCalls += 1;
return { created: 1, archived: 1 };
};
offCycleHarness.syncVectorState = async () => ({
insertedHashes: [],
stats: { pending: 0 },
});
const offCycleStatus = createOffCycleBatchStatus({
processedRange: [9, 9],
extractionCountBefore: 0,
});
const offCycleEffects = await handleOffCycleExtractionSuccess(
{ newNodeIds: ["node-off-cycle"] },
9,
{
enableConsolidation: false,
compressionEveryN: 10,
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
},
undefined,
offCycleStatus,
);
assert.equal(offCycleCompressionCalls, 0);
assert.equal(offCycleEffects.batchStatus.autoCompressionScheduled, false);
assert.match(
offCycleEffects.batchStatus.autoCompressionSkippedReason,
/未到每 10 次自动压缩周期/,
);
assert.equal(offCycleEffects.batchStatus.nextCompressionAtExtractionCount, 10);
const scheduledHarness = await createBatchStageHarness();
const {
createBatchStatusSkeleton: createScheduledBatchStatus,
handleExtractionSuccess: handleScheduledExtractionSuccess,
} = scheduledHarness.result;
scheduledHarness.currentGraph = {
historyState: { extractionCount: 9 },
vectorIndexState: {},
};
scheduledHarness.ensureCurrentGraphRuntimeState = () => {
scheduledHarness.currentGraph.historyState ||= {};
scheduledHarness.currentGraph.vectorIndexState ||= {};
};
scheduledHarness.extractionCount = 9;
let scheduledCompressionCalls = 0;
scheduledHarness.inspectAutoCompressionCandidates = () => ({
hasCandidates: false,
reason: "已到自动压缩周期,但当前没有达到内部压缩阈值的候选组",
});
scheduledHarness.compressAll = async () => {
scheduledCompressionCalls += 1;
return { created: 1, archived: 1 };
};
scheduledHarness.syncVectorState = async () => ({
insertedHashes: [],
stats: { pending: 0 },
});
const scheduledStatus = createScheduledBatchStatus({
processedRange: [10, 10],
extractionCountBefore: 9,
});
const scheduledEffects = await handleScheduledExtractionSuccess(
{ newNodeIds: ["node-scheduled"] },
10,
{
enableConsolidation: false,
compressionEveryN: 10,
enableSynopsis: false,
enableReflection: false,
enableSleepCycle: false,
synopsisEveryN: 1,
reflectEveryN: 1,
sleepEveryN: 1,
},
undefined,
scheduledStatus,
);
assert.equal(scheduledCompressionCalls, 0);
assert.equal(scheduledEffects.batchStatus.autoCompressionScheduled, true);
assert.match(
scheduledEffects.batchStatus.autoCompressionSkippedReason,
/没有达到内部压缩阈值的候选组/,
);
assert.equal(
scheduledEffects.batchStatus.stages.structural.artifacts.includes(
"compression-skipped",
),
true,
);
}
async function testBatchStatusFinalizeFailureIsNotCompleteSuccess() {
const harness = await createBatchStageHarness();
const { createBatchStatusSkeleton, handleExtractionSuccess } = harness.result;
@@ -4685,6 +5027,10 @@ await testReverseJournalRollbackStateFormsReplayClosure();
await testReverseJournalRecoveryPlanMixedLegacyAndCurrentRetainsRepairSet();
await testBatchStatusStructuralPartialRemainsRecoverable();
await testBatchStatusSemanticFailureDoesNotHideCoreSuccess();
await testAutoConsolidationRunsOnHighDuplicateRiskSingleNode();
await testAutoConsolidationSkipsLowRiskSingleNode();
await testAutoCompressionRunsOnlyOnConfiguredInterval();
await testAutoCompressionSkipsWhenNotScheduledOrNoCandidates();
await testBatchStatusFinalizeFailureIsNotCompleteSuccess();
await testProcessedHistoryAdvanceTracksCoreExtractionSuccess();
await testGenerationRecallTransactionDedupesDoubleHookBySameKey();