mirror of
https://github.com/Youzini-afk/ST-Bionic-Memory-Ecology.git
synced 2026-05-15 22:30:38 +08:00
perf: optimize persist/load P1 hot paths
This commit is contained in:
@@ -3239,6 +3239,166 @@ result = {
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const chatId = "chat-idb-direct-delta-prebuilt-persist-snapshot";
|
||||
const baseGraph = createMeaningfulGraph(chatId, "direct-delta-base");
|
||||
const runtimeGraph = createMeaningfulGraph(chatId, "direct-delta-after");
|
||||
const baseSnapshot = buildSnapshotFromGraph(baseGraph, {
|
||||
chatId,
|
||||
revision: 7,
|
||||
});
|
||||
const persistSnapshot = buildSnapshotFromGraph(runtimeGraph, {
|
||||
chatId,
|
||||
revision: 8,
|
||||
baseSnapshot,
|
||||
});
|
||||
const directDelta = buildPersistDelta(baseSnapshot, persistSnapshot, {
|
||||
useNativeDelta: false,
|
||||
});
|
||||
const harness = await createGraphPersistenceHarness({
|
||||
chatId,
|
||||
globalChatId: chatId,
|
||||
chatMetadata: {
|
||||
integrity: "meta-idb-direct-delta-prebuilt-persist-snapshot",
|
||||
},
|
||||
indexedDbSnapshot: baseSnapshot,
|
||||
});
|
||||
harness.api.setCurrentGraph(runtimeGraph);
|
||||
harness.api.setGraphPersistenceState({
|
||||
loadState: "loaded",
|
||||
chatId,
|
||||
revision: 8,
|
||||
lastPersistedRevision: 0,
|
||||
writesBlocked: false,
|
||||
});
|
||||
|
||||
const originalBuildSnapshotFromGraph = harness.runtimeContext.buildSnapshotFromGraph;
|
||||
let buildSnapshotCallCount = 0;
|
||||
harness.runtimeContext.buildSnapshotFromGraph = (...args) => {
|
||||
buildSnapshotCallCount += 1;
|
||||
return originalBuildSnapshotFromGraph(...args);
|
||||
};
|
||||
|
||||
const result = await harness.api.saveGraphToIndexedDb(chatId, runtimeGraph, {
|
||||
revision: 8,
|
||||
reason: "direct-delta-prebuilt-persist-snapshot-save",
|
||||
scheduleCloudUpload: false,
|
||||
persistDelta: directDelta,
|
||||
persistSnapshot,
|
||||
});
|
||||
|
||||
assert.equal(result.saved, true);
|
||||
assert.equal(
|
||||
buildSnapshotCallCount,
|
||||
0,
|
||||
"direct-delta 且已提供 persistSnapshot 时不应再次构建 snapshot",
|
||||
);
|
||||
assert.equal(result.snapshot?.meta?.revision, 8);
|
||||
assert.equal(harness.api.getIndexedDbSnapshot()?.meta?.revision, 8);
|
||||
}
|
||||
|
||||
{
|
||||
const chatId = "chat-indexeddb-probe-empty-early-return";
|
||||
const persistedSnapshot = {
|
||||
meta: { revision: 0, chatId },
|
||||
nodes: [],
|
||||
edges: [],
|
||||
tombstones: [],
|
||||
state: {
|
||||
lastProcessedFloor: -1,
|
||||
extractionCount: 0,
|
||||
},
|
||||
};
|
||||
const harness = await createGraphPersistenceHarness({
|
||||
chatId,
|
||||
globalChatId: chatId,
|
||||
chatMetadata: {
|
||||
integrity: "meta-indexeddb-probe-empty-early-return",
|
||||
},
|
||||
indexedDbSnapshot: persistedSnapshot,
|
||||
});
|
||||
harness.runtimeContext.__globalChatId = chatId;
|
||||
harness.runtimeContext.__chatContext.chatId = chatId;
|
||||
harness.api.setChatContext({
|
||||
...harness.api.getChatContext(),
|
||||
chatId,
|
||||
chatMetadata: {
|
||||
integrity: "meta-indexeddb-probe-empty-early-return",
|
||||
},
|
||||
});
|
||||
harness.api.setCurrentGraph(
|
||||
createMeaningfulGraph(chatId, "probe-empty-runtime-current"),
|
||||
);
|
||||
harness.api.setGraphPersistenceState({
|
||||
loadState: "loaded",
|
||||
chatId,
|
||||
revision: 1,
|
||||
lastPersistedRevision: 1,
|
||||
storagePrimary: "indexeddb",
|
||||
storageMode: "indexeddb",
|
||||
writesBlocked: false,
|
||||
});
|
||||
|
||||
const originalCreateDb = harness.runtimeContext.BmeChatManager.prototype._createDb;
|
||||
let exportSnapshotCalls = 0;
|
||||
let exportProbeCalls = 0;
|
||||
harness.runtimeContext.BmeChatManager.prototype._createDb = function(dbChatId = "") {
|
||||
const baseDb = originalCreateDb.call(this, dbChatId);
|
||||
return {
|
||||
...baseDb,
|
||||
async exportSnapshot() {
|
||||
exportSnapshotCalls += 1;
|
||||
return await baseDb.exportSnapshot();
|
||||
},
|
||||
async exportSnapshotProbe() {
|
||||
exportProbeCalls += 1;
|
||||
const snapshot = harness.api.getIndexedDbSnapshotForChat(dbChatId) || {
|
||||
meta: { revision: 0, chatId: String(dbChatId || "") },
|
||||
state: { lastProcessedFloor: -1, extractionCount: 0 },
|
||||
nodes: [],
|
||||
edges: [],
|
||||
tombstones: [],
|
||||
};
|
||||
return {
|
||||
meta: {
|
||||
...(snapshot.meta || {}),
|
||||
chatId: String(dbChatId || ""),
|
||||
revision: Number(snapshot?.meta?.revision || 0),
|
||||
nodeCount: Array.isArray(snapshot?.nodes) ? snapshot.nodes.length : 0,
|
||||
edgeCount: Array.isArray(snapshot?.edges) ? snapshot.edges.length : 0,
|
||||
tombstoneCount: Array.isArray(snapshot?.tombstones)
|
||||
? snapshot.tombstones.length
|
||||
: 0,
|
||||
},
|
||||
state: {
|
||||
lastProcessedFloor: Number(snapshot?.state?.lastProcessedFloor ?? -1),
|
||||
extractionCount: Number(snapshot?.state?.extractionCount ?? 0),
|
||||
},
|
||||
nodes: [],
|
||||
edges: [],
|
||||
tombstones: [],
|
||||
__stBmeProbeOnly: true,
|
||||
__stBmeTombstonesOmitted: true,
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const result = await harness.api.loadGraphFromIndexedDb(chatId, {
|
||||
source: "probe-empty-early-return",
|
||||
attemptIndex: 0,
|
||||
});
|
||||
|
||||
assert.equal(result.loaded, false);
|
||||
assert.equal(exportProbeCalls, 1);
|
||||
assert.equal(
|
||||
exportSnapshotCalls,
|
||||
0,
|
||||
"empty/probe 早退应在 probe 阶段终止,而不是继续全量导出 snapshot",
|
||||
);
|
||||
harness.runtimeContext.BmeChatManager.prototype._createDb = originalCreateDb;
|
||||
}
|
||||
|
||||
{
|
||||
const harness = await createGraphPersistenceHarness({
|
||||
chatId: "chat-pending-persist-retry",
|
||||
|
||||
@@ -235,6 +235,49 @@ async function testSnapshotExportWithoutTombstones() {
|
||||
await db.close();
|
||||
}
|
||||
|
||||
async function testSnapshotProbeExport() {
|
||||
const db = new BmeDatabase("chat-export-probe", {
|
||||
dexieClass: globalThis.Dexie,
|
||||
});
|
||||
await db.open();
|
||||
|
||||
await db.bulkUpsertNodes([
|
||||
{
|
||||
id: "node-probe",
|
||||
type: "event",
|
||||
sourceFloor: 4,
|
||||
archived: false,
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
]);
|
||||
await db.patchMeta({
|
||||
lastProcessedFloor: 6,
|
||||
extractionCount: 3,
|
||||
runtimeHistoryState: {
|
||||
chatId: "chat-export-probe",
|
||||
lastProcessedAssistantFloor: 6,
|
||||
extractionCount: 3,
|
||||
},
|
||||
});
|
||||
|
||||
const probe = await db.exportSnapshotProbe();
|
||||
assert.equal(probe.__stBmeProbeOnly, true);
|
||||
assert.equal(probe.__stBmeTombstonesOmitted, true);
|
||||
assert.deepEqual(probe.nodes, []);
|
||||
assert.deepEqual(probe.edges, []);
|
||||
assert.deepEqual(probe.tombstones, []);
|
||||
assert.equal(probe.meta.chatId, "chat-export-probe");
|
||||
assert.equal(probe.meta.nodeCount, 1);
|
||||
assert.equal(probe.state.lastProcessedFloor, 6);
|
||||
assert.equal(probe.state.extractionCount, 3);
|
||||
assert.equal(
|
||||
probe.meta.runtimeHistoryState.lastProcessedAssistantFloor,
|
||||
6,
|
||||
);
|
||||
|
||||
await db.close();
|
||||
}
|
||||
|
||||
async function testReplaceImportResetsStaleMeta() {
|
||||
const chatId = "chat-replace-reset";
|
||||
const db = new BmeDatabase(chatId, { dexieClass: globalThis.Dexie });
|
||||
@@ -577,29 +620,58 @@ async function testGraphSnapshotConverters() {
|
||||
updatedAt: Date.now(),
|
||||
});
|
||||
|
||||
let snapshotDiagnostics = null;
|
||||
const snapshot = buildSnapshotFromGraph(graph, {
|
||||
chatId: "chat-a",
|
||||
revision: 17,
|
||||
onDiagnostics(snapshotValue) {
|
||||
snapshotDiagnostics = snapshotValue;
|
||||
},
|
||||
});
|
||||
assert.equal(snapshot.meta.chatId, "chat-a");
|
||||
assert.equal(snapshot.meta.revision, 17);
|
||||
assert.equal(snapshot.state.lastProcessedFloor, 9);
|
||||
assert.equal(snapshot.state.extractionCount, 4);
|
||||
assert.equal(snapshot.nodes.length, 1);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.nodesMs), true);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.edgesMs), true);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.tombstonesMs), true);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.stateMs), true);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.metaMs), true);
|
||||
assert.equal(Number.isFinite(snapshotDiagnostics?.totalMs), true);
|
||||
assert.equal(snapshotDiagnostics?.nodeCount, 1);
|
||||
|
||||
let hydrateDiagnostics = null;
|
||||
const nextGraph = buildGraphFromSnapshot(snapshot, {
|
||||
chatId: "chat-a",
|
||||
onDiagnostics(snapshotValue) {
|
||||
hydrateDiagnostics = snapshotValue;
|
||||
},
|
||||
});
|
||||
assert.equal(hydrateDiagnostics?.success, true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.nodesMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.edgesMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.runtimeMetaMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.stateMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.normalizeMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.integrityMs), true);
|
||||
assert.equal(Number.isFinite(hydrateDiagnostics?.totalMs), true);
|
||||
|
||||
let reusedSnapshotDiagnostics = null;
|
||||
const reusedSnapshot = buildSnapshotFromGraph(nextGraph, {
|
||||
chatId: "chat-a",
|
||||
revision: 18,
|
||||
baseSnapshot: snapshot,
|
||||
onDiagnostics(snapshotValue) {
|
||||
reusedSnapshotDiagnostics = snapshotValue;
|
||||
},
|
||||
});
|
||||
assert.equal(
|
||||
reusedSnapshot.nodes[0],
|
||||
snapshot.nodes[0],
|
||||
"未变化节点应直接复用 baseSnapshot 记录对象",
|
||||
);
|
||||
assert.equal(reusedSnapshotDiagnostics?.reusedNodeCount, 1);
|
||||
nextGraph.nodes[0].updatedAt = Number(nextGraph.nodes[0].updatedAt || 0) + 1;
|
||||
const changedSnapshot = buildSnapshotFromGraph(nextGraph, {
|
||||
chatId: "chat-a",
|
||||
@@ -662,6 +734,7 @@ async function main() {
|
||||
await testTransactionRollback();
|
||||
await testSnapshotExportImport();
|
||||
await testSnapshotExportWithoutTombstones();
|
||||
await testSnapshotProbeExport();
|
||||
await testReplaceImportResetsStaleMeta();
|
||||
await testRevisionMonotonicity();
|
||||
await testTombstonePrune();
|
||||
|
||||
@@ -298,6 +298,10 @@ async function testUploadPayloadMetaFirstAndDebounce() {
|
||||
assert.equal(uploadResult.uploaded, true);
|
||||
assert.equal(logs.uploadCalls, 1);
|
||||
assert.equal(logs.uploadChunkCalls > 0, true);
|
||||
assert.equal(Number.isFinite(uploadResult.timings?.exportMs), true);
|
||||
assert.equal(Number.isFinite(uploadResult.timings?.chunkUploadMs), true);
|
||||
assert.equal(Number.isFinite(uploadResult.timings?.manifestUploadMs), true);
|
||||
assert.equal(Number.isFinite(uploadResult.timings?.metaPatchMs), true);
|
||||
|
||||
const uploadedPayload = logs.uploadedPayloads[0].payload;
|
||||
assert.equal(uploadedPayload.formatVersion, 2);
|
||||
@@ -375,6 +379,10 @@ async function testDownloadImport() {
|
||||
const result = await download("chat-download", runtime);
|
||||
|
||||
assert.equal(result.downloaded, true);
|
||||
assert.equal(Number.isFinite(result.timings?.networkMs), true);
|
||||
assert.equal(Number.isFinite(result.timings?.importMs), true);
|
||||
assert.equal(Number.isFinite(result.timings?.metaPatchMs), true);
|
||||
assert.equal(Number.isFinite(result.timings?.hookMs), true);
|
||||
assert.equal(db.lastImportPayload.meta.revision, 12);
|
||||
assert.equal(db.lastImportPayload.nodes[0].id, "remote-node");
|
||||
assert.equal(db.lastImportPayload.meta.runtimeVectorIndexState.dirty, true);
|
||||
@@ -731,6 +739,10 @@ async function testManualBackupAndRestoreFlow() {
|
||||
|
||||
const backupResult = await backupToServer("chat-backup-flow", runtime);
|
||||
assert.equal(backupResult.backedUp, true);
|
||||
assert.equal(Number.isFinite(backupResult.timings?.exportMs), true);
|
||||
assert.equal(Number.isFinite(backupResult.timings?.uploadMs), true);
|
||||
assert.equal(Number.isFinite(backupResult.timings?.manifestWriteMs), true);
|
||||
assert.equal(Number.isFinite(backupResult.timings?.metaPatchMs), true);
|
||||
assert.equal(db.meta.get("syncDirty"), false);
|
||||
assert.ok(Number(db.meta.get("lastBackupUploadedAt")) > 0);
|
||||
assert.ok(String(db.meta.get("lastBackupFilename") || "").startsWith("ST-BME_backup_"));
|
||||
@@ -801,6 +813,12 @@ async function testManualBackupAndRestoreFlow() {
|
||||
|
||||
const restoreResult = await restoreFromServer("chat-backup-flow", runtime);
|
||||
assert.equal(restoreResult.restored, true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.downloadMs), true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.localExportMs), true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.safetySnapshotMs), true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.importMs), true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.metaPatchMs), true);
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.hookMs), true);
|
||||
assert.equal(db.snapshot.nodes[0].id, "local-node");
|
||||
assert.equal(db.snapshot.meta.runtimeBatchJournal.length, 4);
|
||||
assert.equal(db.snapshot.meta.maintenanceJournal.length, 0);
|
||||
@@ -963,6 +981,7 @@ async function testRestoreValidationDoesNotCreateSafetySnapshot() {
|
||||
const restoreResult = await restoreFromServer("chat-no-backup", runtime);
|
||||
assert.equal(restoreResult.restored, false);
|
||||
assert.equal(restoreResult.reason, "not-found");
|
||||
assert.equal(Number.isFinite(restoreResult.timings?.downloadMs), true);
|
||||
|
||||
const safetyStatus = await getRestoreSafetySnapshotStatus(
|
||||
"chat-no-backup",
|
||||
|
||||
@@ -56,12 +56,18 @@ await store.patchMeta({
|
||||
lastProcessedFloor: 9,
|
||||
extractionCount: 4,
|
||||
});
|
||||
const probe = await store.exportSnapshotProbe();
|
||||
|
||||
assert.equal(
|
||||
loadSnapshotCalls,
|
||||
0,
|
||||
"manifest-only meta fast path should not load full snapshot",
|
||||
);
|
||||
assert.equal(probe.__stBmeProbeOnly, true);
|
||||
assert.equal(probe.meta.lastBackupFilename, "after.json");
|
||||
assert.equal(probe.meta.nodeCount, 1);
|
||||
assert.equal(probe.state.lastProcessedFloor, 9);
|
||||
assert.equal(probe.state.extractionCount, 4);
|
||||
|
||||
const snapshot = await originalLoadSnapshot();
|
||||
assert.equal(snapshot.meta.lastBackupFilename, "after.json");
|
||||
|
||||
@@ -261,7 +261,62 @@ async function testGraphLikeDeltaPreservesHistoryFrontier() {
|
||||
);
|
||||
}
|
||||
|
||||
async function testCommitDeltaDiagnosticsSplitWalAndManifestStages() {
|
||||
const rootDirectory = createMemoryOpfsRoot();
|
||||
const store = new OpfsGraphStore("chat-opfs-diagnostics-split", {
|
||||
rootDirectoryFactory: async () => rootDirectory,
|
||||
storeMode: BME_GRAPH_LOCAL_STORAGE_MODE_OPFS_PRIMARY,
|
||||
});
|
||||
await store.open();
|
||||
|
||||
await store.importSnapshot(
|
||||
{
|
||||
meta: { revision: 1 },
|
||||
state: { lastProcessedFloor: 0, extractionCount: 0 },
|
||||
nodes: [],
|
||||
edges: [],
|
||||
tombstones: [],
|
||||
},
|
||||
{ mode: "replace", preserveRevision: true },
|
||||
);
|
||||
|
||||
const result = await store.commitDelta(
|
||||
{
|
||||
upsertNodes: [
|
||||
{
|
||||
id: "diag-node-1",
|
||||
type: "event",
|
||||
fields: { title: "diag" },
|
||||
archived: false,
|
||||
updatedAt: 10,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
reason: "diagnostics-split",
|
||||
requestedRevision: 2,
|
||||
markSyncDirty: true,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(Number.isFinite(result.diagnostics?.walSerializeMs), true);
|
||||
assert.equal(Number.isFinite(result.diagnostics?.walFileWriteMs), true);
|
||||
assert.equal(Number.isFinite(result.diagnostics?.walWriteMs), true);
|
||||
assert.equal(Number.isFinite(result.diagnostics?.manifestSerializeMs), true);
|
||||
assert.equal(Number.isFinite(result.diagnostics?.manifestFileWriteMs), true);
|
||||
assert.equal(Number.isFinite(result.diagnostics?.manifestWriteMs), true);
|
||||
assert.equal(
|
||||
result.diagnostics.walWriteMs >= result.diagnostics.walSerializeMs,
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
result.diagnostics.manifestWriteMs >= result.diagnostics.manifestSerializeMs,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
await testCommitDeltaAndPatchMetaSerialize();
|
||||
await testImportSnapshotAndClearAllSerialize();
|
||||
await testGraphLikeDeltaPreservesHistoryFrontier();
|
||||
await testCommitDeltaDiagnosticsSplitWalAndManifestStages();
|
||||
console.log("opfs-write-serialization tests passed");
|
||||
|
||||
326
tests/perf/persist-load-bench.mjs
Normal file
326
tests/perf/persist-load-bench.mjs
Normal file
@@ -0,0 +1,326 @@
|
||||
import { performance } from "node:perf_hooks";
|
||||
|
||||
import {
|
||||
buildGraphFromSnapshot,
|
||||
buildPersistDelta,
|
||||
buildSnapshotFromGraph,
|
||||
} from "../../sync/bme-db.js";
|
||||
import {
|
||||
BME_GRAPH_LOCAL_STORAGE_MODE_OPFS_PRIMARY,
|
||||
OpfsGraphStore,
|
||||
} from "../../sync/bme-opfs-store.js";
|
||||
import { createMemoryOpfsRoot } from "../helpers/memory-opfs.mjs";
|
||||
|
||||
const RUNS = 4;
|
||||
const SIZE_PRESETS = [
|
||||
{ label: "M", seed: 17, nodeCount: 1200, edgeCount: 3600, churn: 0.08 },
|
||||
{ label: "L", seed: 29, nodeCount: 3600, edgeCount: 10800, churn: 0.1 },
|
||||
{ label: "XL", seed: 43, nodeCount: 7200, edgeCount: 21600, churn: 0.12 },
|
||||
];
|
||||
|
||||
function summarize(values = []) {
|
||||
if (!values.length) {
|
||||
return { avg: 0, p95: 0, min: 0, max: 0 };
|
||||
}
|
||||
const sorted = [...values].sort((a, b) => a - b);
|
||||
const sum = sorted.reduce((acc, value) => acc + value, 0);
|
||||
const p95Index = Math.min(sorted.length - 1, Math.floor(sorted.length * 0.95));
|
||||
return {
|
||||
avg: sum / sorted.length,
|
||||
p95: sorted[p95Index],
|
||||
min: sorted[0],
|
||||
max: sorted[sorted.length - 1],
|
||||
};
|
||||
}
|
||||
|
||||
function formatSummary(label, values = []) {
|
||||
const summary = summarize(values);
|
||||
return `${label} avg=${summary.avg.toFixed(2)}ms p95=${summary.p95.toFixed(2)}ms min=${summary.min.toFixed(2)}ms max=${summary.max.toFixed(2)}ms`;
|
||||
}
|
||||
|
||||
function createRandom(seed = 1) {
|
||||
let state = seed >>> 0;
|
||||
return () => {
|
||||
state = (state * 1664525 + 1013904223) >>> 0;
|
||||
return state / 0xffffffff;
|
||||
};
|
||||
}
|
||||
|
||||
function buildRuntimeGraph(seed = 1, nodeCount = 100, edgeCount = 200, chatId = "bench-chat") {
|
||||
const rand = createRandom(seed);
|
||||
const nodes = [];
|
||||
const edges = [];
|
||||
for (let index = 0; index < nodeCount; index += 1) {
|
||||
nodes.push({
|
||||
id: `node-${index}`,
|
||||
type: "event",
|
||||
updatedAt: 1000 + index,
|
||||
archived: false,
|
||||
sourceFloor: index,
|
||||
fields: {
|
||||
title: `Node ${index}`,
|
||||
text: `node-${index}-${Math.floor(rand() * 100000)}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
for (let index = 0; index < edgeCount; index += 1) {
|
||||
const fromIndex = Math.floor(rand() * nodeCount);
|
||||
let toIndex = Math.floor(rand() * nodeCount);
|
||||
if (toIndex === fromIndex) {
|
||||
toIndex = (toIndex + 1) % nodeCount;
|
||||
}
|
||||
edges.push({
|
||||
id: `edge-${index}`,
|
||||
fromId: `node-${fromIndex}`,
|
||||
toId: `node-${toIndex}`,
|
||||
relation: "related",
|
||||
strength: rand(),
|
||||
updatedAt: 2000 + index,
|
||||
});
|
||||
}
|
||||
return {
|
||||
version: 1,
|
||||
nodes,
|
||||
edges,
|
||||
historyState: {
|
||||
chatId,
|
||||
lastProcessedAssistantFloor: Math.max(0, Math.floor(nodeCount / 12)),
|
||||
extractionCount: Math.max(1, Math.floor(nodeCount / 40)),
|
||||
processedMessageHashes: {},
|
||||
processedMessageHashVersion: 1,
|
||||
processedMessageHashesNeedRefresh: false,
|
||||
recentRecallOwnerKeys: [],
|
||||
activeRecallOwnerKey: "",
|
||||
activeRegion: "",
|
||||
activeRegionSource: "",
|
||||
activeStorySegmentId: "",
|
||||
activeStoryTimeLabel: "",
|
||||
activeStoryTimeSource: "",
|
||||
lastBatchStatus: null,
|
||||
lastMutationSource: "bench",
|
||||
lastExtractedRegion: "",
|
||||
lastExtractedStorySegmentId: "",
|
||||
activeCharacterPovOwner: "",
|
||||
activeUserPovOwner: "",
|
||||
},
|
||||
vectorIndexState: {
|
||||
chatId,
|
||||
collectionId: "",
|
||||
hashToNodeId: {},
|
||||
nodeToHash: {},
|
||||
replayRequiredNodeIds: [],
|
||||
dirty: false,
|
||||
dirtyReason: "",
|
||||
pendingRepairFromFloor: null,
|
||||
lastIntegrityIssue: null,
|
||||
lastStats: {
|
||||
nodesIndexed: 0,
|
||||
updatedAt: 0,
|
||||
},
|
||||
},
|
||||
knowledgeState: {
|
||||
owners: {},
|
||||
activeOwnerKey: "",
|
||||
},
|
||||
regionState: {
|
||||
activeRegion: "",
|
||||
knownRegions: {},
|
||||
manualActiveRegion: "",
|
||||
},
|
||||
timelineState: {
|
||||
activeSegmentId: "",
|
||||
manualActiveSegmentId: "",
|
||||
segments: [],
|
||||
},
|
||||
summaryState: {
|
||||
updatedAt: 0,
|
||||
entries: [],
|
||||
},
|
||||
batchJournal: [],
|
||||
maintenanceJournal: [],
|
||||
lastRecallResult: null,
|
||||
lastProcessedSeq: Math.max(0, Math.floor(nodeCount / 12)),
|
||||
};
|
||||
}
|
||||
|
||||
function mutateRuntimeGraph(baseGraph, seed = 1, churn = 0.1) {
|
||||
const rand = createRandom(seed);
|
||||
const nextGraph = structuredClone(baseGraph);
|
||||
const mutateNodeCount = Math.max(1, Math.floor(nextGraph.nodes.length * churn));
|
||||
const mutateEdgeCount = Math.max(1, Math.floor(nextGraph.edges.length * churn * 0.5));
|
||||
for (let index = 0; index < mutateNodeCount; index += 1) {
|
||||
const nodeIndex = Math.floor(rand() * nextGraph.nodes.length);
|
||||
const node = nextGraph.nodes[nodeIndex];
|
||||
node.updatedAt += 100 + index;
|
||||
node.fields.text = `${node.fields.text}-mut-${index}`;
|
||||
}
|
||||
for (let index = 0; index < mutateEdgeCount; index += 1) {
|
||||
const edgeIndex = Math.floor(rand() * nextGraph.edges.length);
|
||||
const edge = nextGraph.edges[edgeIndex];
|
||||
edge.updatedAt += 80 + index;
|
||||
edge.strength = rand();
|
||||
}
|
||||
const addNodeCount = Math.max(1, Math.floor(nextGraph.nodes.length * churn * 0.12));
|
||||
const baseNodeId = nextGraph.nodes.length;
|
||||
for (let index = 0; index < addNodeCount; index += 1) {
|
||||
nextGraph.nodes.push({
|
||||
id: `node-new-${baseNodeId + index}`,
|
||||
type: "event",
|
||||
updatedAt: 5000 + index,
|
||||
archived: false,
|
||||
sourceFloor: baseNodeId + index,
|
||||
fields: {
|
||||
title: `Node new ${index}`,
|
||||
text: `new-node-${index}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
const deleteEdgeCount = Math.max(1, Math.floor(nextGraph.edges.length * churn * 0.08));
|
||||
nextGraph.edges.splice(0, deleteEdgeCount);
|
||||
nextGraph.historyState.lastProcessedAssistantFloor += 1;
|
||||
nextGraph.historyState.extractionCount += 1;
|
||||
nextGraph.lastProcessedSeq = nextGraph.historyState.lastProcessedAssistantFloor;
|
||||
nextGraph.summaryState.updatedAt += 1;
|
||||
return nextGraph;
|
||||
}
|
||||
|
||||
function buildBenchPair({ label, seed, nodeCount, edgeCount, churn }) {
|
||||
const chatId = `bench-${label.toLowerCase()}`;
|
||||
const beforeGraph = buildRuntimeGraph(seed, nodeCount, edgeCount, chatId);
|
||||
const afterGraph = mutateRuntimeGraph(beforeGraph, seed + 101, churn);
|
||||
return {
|
||||
label,
|
||||
chatId,
|
||||
beforeGraph,
|
||||
afterGraph,
|
||||
};
|
||||
}
|
||||
|
||||
function measureSnapshotBuild(graph, options) {
|
||||
let diagnostics = null;
|
||||
const startedAt = performance.now();
|
||||
const snapshot = buildSnapshotFromGraph(graph, {
|
||||
...options,
|
||||
onDiagnostics(snapshotValue) {
|
||||
diagnostics = snapshotValue;
|
||||
},
|
||||
});
|
||||
return {
|
||||
elapsedMs: performance.now() - startedAt,
|
||||
snapshot,
|
||||
diagnostics,
|
||||
};
|
||||
}
|
||||
|
||||
function measureHydrate(snapshot, chatId) {
|
||||
let diagnostics = null;
|
||||
const startedAt = performance.now();
|
||||
buildGraphFromSnapshot(snapshot, {
|
||||
chatId,
|
||||
onDiagnostics(snapshotValue) {
|
||||
diagnostics = snapshotValue;
|
||||
},
|
||||
});
|
||||
return {
|
||||
elapsedMs: performance.now() - startedAt,
|
||||
diagnostics,
|
||||
};
|
||||
}
|
||||
|
||||
async function measureOpfsCommit(baseSnapshot, afterSnapshot, delta, chatId) {
|
||||
const rootDirectory = createMemoryOpfsRoot();
|
||||
const store = new OpfsGraphStore(chatId, {
|
||||
rootDirectoryFactory: async () => rootDirectory,
|
||||
storeMode: BME_GRAPH_LOCAL_STORAGE_MODE_OPFS_PRIMARY,
|
||||
});
|
||||
await store.open();
|
||||
await store.importSnapshot(baseSnapshot, {
|
||||
mode: "replace",
|
||||
preserveRevision: true,
|
||||
markSyncDirty: false,
|
||||
});
|
||||
const startedAt = performance.now();
|
||||
const result = await store.commitDelta(delta, {
|
||||
reason: "bench-commit",
|
||||
requestedRevision: Number(afterSnapshot?.meta?.revision || 0),
|
||||
markSyncDirty: true,
|
||||
committedSnapshot: afterSnapshot,
|
||||
});
|
||||
const elapsedMs = performance.now() - startedAt;
|
||||
await store.close();
|
||||
return {
|
||||
elapsedMs,
|
||||
diagnostics: result?.diagnostics || {},
|
||||
};
|
||||
}
|
||||
|
||||
async function runPreset(preset) {
|
||||
const snapshotBuildSamples = [];
|
||||
const hydrateSamples = [];
|
||||
const opfsCommitSamples = [];
|
||||
const snapshotNodesSamples = [];
|
||||
const hydrateRuntimeMetaSamples = [];
|
||||
const walFileWriteSamples = [];
|
||||
const manifestFileWriteSamples = [];
|
||||
|
||||
for (let run = 0; run < RUNS; run += 1) {
|
||||
const pair = buildBenchPair({
|
||||
...preset,
|
||||
seed: preset.seed + run * 17,
|
||||
});
|
||||
const beforeSnapshotResult = measureSnapshotBuild(pair.beforeGraph, {
|
||||
chatId: pair.chatId,
|
||||
revision: 1,
|
||||
});
|
||||
const afterSnapshotResult = measureSnapshotBuild(pair.afterGraph, {
|
||||
chatId: pair.chatId,
|
||||
revision: 2,
|
||||
baseSnapshot: beforeSnapshotResult.snapshot,
|
||||
});
|
||||
const delta = buildPersistDelta(
|
||||
beforeSnapshotResult.snapshot,
|
||||
afterSnapshotResult.snapshot,
|
||||
{ useNativeDelta: false },
|
||||
);
|
||||
const hydrateResult = measureHydrate(afterSnapshotResult.snapshot, pair.chatId);
|
||||
const opfsCommitResult = await measureOpfsCommit(
|
||||
beforeSnapshotResult.snapshot,
|
||||
afterSnapshotResult.snapshot,
|
||||
delta,
|
||||
pair.chatId,
|
||||
);
|
||||
|
||||
snapshotBuildSamples.push(afterSnapshotResult.elapsedMs);
|
||||
hydrateSamples.push(hydrateResult.elapsedMs);
|
||||
opfsCommitSamples.push(opfsCommitResult.elapsedMs);
|
||||
snapshotNodesSamples.push(Number(afterSnapshotResult.diagnostics?.nodesMs || 0));
|
||||
hydrateRuntimeMetaSamples.push(Number(hydrateResult.diagnostics?.runtimeMetaMs || 0));
|
||||
walFileWriteSamples.push(Number(opfsCommitResult.diagnostics?.walFileWriteMs || 0));
|
||||
manifestFileWriteSamples.push(
|
||||
Number(opfsCommitResult.diagnostics?.manifestFileWriteMs || 0),
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`\n[ST-BME][persist-load-bench] ${preset.label}`);
|
||||
console.log(
|
||||
formatSummary("snapshot-build", snapshotBuildSamples),
|
||||
`nodesPhaseP95=${summarize(snapshotNodesSamples).p95.toFixed(2)}ms`,
|
||||
);
|
||||
console.log(
|
||||
formatSummary("hydrate", hydrateSamples),
|
||||
`runtimeMetaP95=${summarize(hydrateRuntimeMetaSamples).p95.toFixed(2)}ms`,
|
||||
);
|
||||
console.log(
|
||||
formatSummary("opfs-commit", opfsCommitSamples),
|
||||
`walFileP95=${summarize(walFileWriteSamples).p95.toFixed(2)}ms`,
|
||||
`manifestFileP95=${summarize(manifestFileWriteSamples).p95.toFixed(2)}ms`,
|
||||
);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
for (const preset of SIZE_PRESETS) {
|
||||
await runPreset(preset);
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
Reference in New Issue
Block a user