fix: deep repair p0-p1 persistence runtime merge and integrity

This commit is contained in:
Youzini-afk
2026-03-31 02:38:27 +08:00
parent 10c6db258b
commit a2bed39e28
8 changed files with 1152 additions and 54 deletions

View File

@@ -28,6 +28,30 @@ export const BME_DB_TABLE_SCHEMAS = Object.freeze({
tombstones: "&id, kind, targetId, deletedAt, sourceDeviceId, [kind+targetId]",
});
function createDefaultMetaValues(chatId = "", nowMs = Date.now()) {
const normalizedChatId = normalizeChatId(chatId);
const normalizedNow = normalizeTimestamp(nowMs);
return {
chatId: normalizedChatId,
revision: 0,
lastProcessedFloor: META_DEFAULT_LAST_PROCESSED_FLOOR,
extractionCount: META_DEFAULT_EXTRACTION_COUNT,
lastModified: normalizedNow,
lastSyncUploadedAt: 0,
lastSyncDownloadedAt: 0,
lastSyncedRevision: 0,
deviceId: "",
nodeCount: 0,
edgeCount: 0,
tombstoneCount: 0,
schemaVersion: BME_DB_SCHEMA_VERSION,
syncDirty: false,
migrationCompletedAt: 0,
migrationSource: "",
legacyRetentionUntil: 0,
};
}
function normalizeChatId(chatId) {
return String(chatId ?? "").trim();
}
@@ -1010,6 +1034,7 @@ export class BmeDatabase {
edges: 0,
tombstones: 0,
};
let revisionFloor = 0;
await db.transaction(
"rw",
@@ -1018,11 +1043,14 @@ export class BmeDatabase {
db.table("tombstones"),
db.table("meta"),
async () => {
revisionFloor = normalizeRevision((await db.table("meta").get("revision"))?.value);
if (mode === "replace") {
await Promise.all([
db.table("nodes").clear(),
db.table("edges").clear(),
db.table("tombstones").clear(),
db.table("meta").clear(),
]);
}
@@ -1044,6 +1072,7 @@ export class BmeDatabase {
}
const metaPatch = {
...(mode === "replace" ? createDefaultMetaValues(this.chatId, nowMs) : {}),
...normalizedSnapshot.meta,
...(normalizedSnapshot.state || {}),
chatId: this.chatId,
@@ -1059,9 +1088,12 @@ export class BmeDatabase {
counts = await this._updateCountMetaInTx(db, nowMs);
const currentRevision = normalizeRevision(
const persistedRevision = normalizeRevision(
(await db.table("meta").get("revision"))?.value,
);
const currentRevision =
mode === "replace" ? Math.max(revisionFloor, persistedRevision) : persistedRevision;
const incomingRevision = normalizeRevision(normalizedSnapshot.meta?.revision);
const explicitRevision = normalizeRevision(options.revision);
const requestedRevision = Number.isFinite(Number(options.revision))
@@ -1199,25 +1231,7 @@ export class BmeDatabase {
async _ensureMetaDefaults() {
const db = await this.open();
const nowMs = Date.now();
const defaultMeta = {
chatId: this.chatId,
revision: 0,
lastProcessedFloor: META_DEFAULT_LAST_PROCESSED_FLOOR,
extractionCount: META_DEFAULT_EXTRACTION_COUNT,
lastModified: nowMs,
lastSyncUploadedAt: 0,
lastSyncDownloadedAt: 0,
lastSyncedRevision: 0,
deviceId: "",
nodeCount: 0,
edgeCount: 0,
tombstoneCount: 0,
schemaVersion: BME_DB_SCHEMA_VERSION,
syncDirty: false,
migrationCompletedAt: 0,
migrationSource: "",
legacyRetentionUntil: 0,
};
const defaultMeta = createDefaultMetaValues(this.chatId, nowMs);
await db.transaction("rw", db.table("meta"), async () => {
for (const [key, value] of Object.entries(defaultMeta)) {

View File

@@ -11,6 +11,14 @@ const sanitizedFilenameByChatId = new Map();
let visibilitySyncInstalled = false;
let lastVisibilityState = "visible";
const RUNTIME_HISTORY_META_KEY = "runtimeHistoryState";
const RUNTIME_VECTOR_META_KEY = "runtimeVectorIndexState";
const RUNTIME_BATCH_JOURNAL_META_KEY = "runtimeBatchJournal";
const RUNTIME_LAST_RECALL_META_KEY = "runtimeLastRecallResult";
const RUNTIME_LAST_PROCESSED_SEQ_META_KEY = "runtimeLastProcessedSeq";
const RUNTIME_GRAPH_VERSION_META_KEY = "runtimeGraphVersion";
const RUNTIME_BATCH_JOURNAL_LIMIT = 96;
function normalizeChatId(chatId) {
return String(chatId ?? "").trim();
}
@@ -280,6 +288,523 @@ function mergeRecordCollectionById(localRecords = [], remoteRecords = []) {
return Array.from(mergedById.values());
}
function normalizeNonNegativeInteger(value, fallback = 0) {
const parsed = Number(value);
if (!Number.isFinite(parsed) || parsed < 0) {
return Math.max(0, Math.floor(Number(fallback) || 0));
}
return Math.floor(parsed);
}
function normalizeOptionalFloor(value) {
const parsed = Number(value);
if (!Number.isFinite(parsed)) return null;
return Math.max(0, Math.floor(parsed));
}
function normalizeStringMap(record = {}) {
if (!record || typeof record !== "object" || Array.isArray(record)) {
return {};
}
const normalized = {};
for (const [key, value] of Object.entries(record)) {
const normalizedKey = String(key || "").trim();
const normalizedValue = String(value || "").trim();
if (!normalizedKey || !normalizedValue) continue;
normalized[normalizedKey] = normalizedValue;
}
return normalized;
}
function normalizeProcessedMessageHashes(record = {}) {
if (!record || typeof record !== "object" || Array.isArray(record)) {
return {};
}
const normalized = {};
for (const [floorKey, hashValue] of Object.entries(record)) {
const floor = Number.parseInt(floorKey, 10);
const normalizedHash = String(hashValue || "").trim();
if (!Number.isFinite(floor) || floor < 0 || !normalizedHash) continue;
normalized[String(floor)] = normalizedHash;
}
return normalized;
}
function sortProcessedMessageHashes(record = {}) {
const sorted = {};
const keys = Object.keys(record)
.map((value) => Number.parseInt(value, 10))
.filter(Number.isFinite)
.sort((left, right) => left - right);
for (const key of keys) {
sorted[String(key)] = record[String(key)];
}
return sorted;
}
function normalizeStringArray(value) {
return Array.isArray(value)
? [...new Set(value.map((item) => String(item || "").trim()).filter(Boolean))]
: [];
}
function stableSerialize(value) {
try {
return JSON.stringify(value);
} catch {
return "";
}
}
function readRuntimeTimestamp(value) {
if (!value || typeof value !== "object" || Array.isArray(value)) {
return 0;
}
const candidates = [
value.updatedAt,
value.at,
value.createdAt,
value.completedAt,
value.lastUpdatedAt,
value.timestamp,
];
for (const candidate of candidates) {
const parsed = Number(candidate);
if (Number.isFinite(parsed) && parsed > 0) {
return Math.floor(parsed);
}
if (typeof candidate === "string") {
const dateValue = Date.parse(candidate);
if (Number.isFinite(dateValue) && dateValue > 0) {
return Math.floor(dateValue);
}
}
}
return 0;
}
function chooseNewerRuntimePayload(localValue, remoteValue) {
const local = toSerializableData(localValue, null);
const remote = toSerializableData(remoteValue, null);
if (local == null) return remote;
if (remote == null) return local;
if (stableSerialize(local) === stableSerialize(remote)) {
return local;
}
const localTimestamp = readRuntimeTimestamp(local);
const remoteTimestamp = readRuntimeTimestamp(remote);
if (remoteTimestamp > localTimestamp) return remote;
if (localTimestamp > remoteTimestamp) return local;
return null;
}
function pickMinFinite(values = [], fallbackValue = null) {
const normalized = values.filter(Number.isFinite);
if (!normalized.length) return fallbackValue;
return Math.min(...normalized);
}
function normalizeRuntimeHistoryMeta(value = {}, fallbackChatId = "") {
const input =
value && typeof value === "object" && !Array.isArray(value)
? toSerializableData(value, {})
: {};
return {
...input,
chatId: normalizeChatId(input.chatId || fallbackChatId),
lastProcessedAssistantFloor: Number.isFinite(Number(input.lastProcessedAssistantFloor))
? Math.floor(Number(input.lastProcessedAssistantFloor))
: -1,
extractionCount: normalizeNonNegativeInteger(input.extractionCount, 0),
processedMessageHashes: normalizeProcessedMessageHashes(input.processedMessageHashes),
historyDirtyFrom: normalizeOptionalFloor(input.historyDirtyFrom),
lastMutationReason:
typeof input.lastMutationReason === "string" ? input.lastMutationReason : "",
lastMutationSource:
typeof input.lastMutationSource === "string" ? input.lastMutationSource : "",
lastRecoveryResult: toSerializableData(input.lastRecoveryResult, null),
lastBatchStatus: toSerializableData(input.lastBatchStatus, null),
};
}
function mergeRuntimeHistoryMeta(localMeta = {}, remoteMeta = {}, options = {}) {
const localHistory = normalizeRuntimeHistoryMeta(localMeta, options.chatId);
const remoteHistory = normalizeRuntimeHistoryMeta(remoteMeta, options.chatId);
const fallbackLastProcessedFloor = Number.isFinite(Number(options.fallbackLastProcessedFloor))
? Math.floor(Number(options.fallbackLastProcessedFloor))
: -1;
const fallbackExtractionCount = normalizeNonNegativeInteger(options.fallbackExtractionCount, 0);
const baseLastProcessedFloor = Math.max(
localHistory.lastProcessedAssistantFloor,
remoteHistory.lastProcessedAssistantFloor,
fallbackLastProcessedFloor,
);
const mergedHashes = {};
const conflictFloors = [];
const floorSet = new Set([
...Object.keys(localHistory.processedMessageHashes),
...Object.keys(remoteHistory.processedMessageHashes),
]);
const sortedFloors = Array.from(floorSet)
.map((value) => Number.parseInt(value, 10))
.filter(Number.isFinite)
.sort((left, right) => left - right);
for (const floor of sortedFloors) {
const floorKey = String(floor);
const localHash = localHistory.processedMessageHashes[floorKey];
const remoteHash = remoteHistory.processedMessageHashes[floorKey];
if (localHash && remoteHash && localHash !== remoteHash) {
conflictFloors.push(floor);
continue;
}
if (localHash || remoteHash) {
mergedHashes[floorKey] = localHash || remoteHash;
}
}
let safeLastProcessedFloor = baseLastProcessedFloor;
const hasIntegrityConflict = conflictFloors.length > 0;
if (hasIntegrityConflict) {
const highestConflictFreeFloor = sortedFloors.length
? sortedFloors[sortedFloors.length - 1]
: -1;
const firstConflictFloor = Math.min(...conflictFloors);
safeLastProcessedFloor = Math.min(
baseLastProcessedFloor,
highestConflictFreeFloor,
firstConflictFloor - 1,
);
}
safeLastProcessedFloor = Math.max(-1, safeLastProcessedFloor);
const historyDirtyFrom = pickMinFinite(
[
localHistory.historyDirtyFrom,
remoteHistory.historyDirtyFrom,
hasIntegrityConflict ? Math.max(0, safeLastProcessedFloor + 1) : null,
],
null,
);
const firstConflictFloor = hasIntegrityConflict ? Math.min(...conflictFloors) : null;
const mergedHistory = {
...localHistory,
...remoteHistory,
chatId: normalizeChatId(remoteHistory.chatId || localHistory.chatId || options.chatId),
lastProcessedAssistantFloor: safeLastProcessedFloor,
extractionCount: Math.max(
localHistory.extractionCount,
remoteHistory.extractionCount,
fallbackExtractionCount,
),
processedMessageHashes: sortProcessedMessageHashes(mergedHashes),
historyDirtyFrom,
lastMutationReason: hasIntegrityConflict
? `sync-merge:processed-hash-conflict@${firstConflictFloor}`
: String(remoteHistory.lastMutationReason || localHistory.lastMutationReason || ""),
lastMutationSource: hasIntegrityConflict
? "sync-merge"
: String(remoteHistory.lastMutationSource || localHistory.lastMutationSource || ""),
lastRecoveryResult: chooseNewerRuntimePayload(
localHistory.lastRecoveryResult,
remoteHistory.lastRecoveryResult,
),
lastBatchStatus: chooseNewerRuntimePayload(
localHistory.lastBatchStatus,
remoteHistory.lastBatchStatus,
),
};
return {
history: mergedHistory,
hasIntegrityConflict,
safeLastProcessedFloor,
conflictFloors,
};
}
function normalizeRuntimeVectorMeta(value = {}) {
const input =
value && typeof value === "object" && !Array.isArray(value)
? toSerializableData(value, {})
: {};
const localStats =
input.lastStats && typeof input.lastStats === "object" && !Array.isArray(input.lastStats)
? input.lastStats
: {};
return {
...input,
mode: typeof input.mode === "string" ? input.mode : "",
collectionId: typeof input.collectionId === "string" ? input.collectionId : "",
source: typeof input.source === "string" ? input.source : "",
modelScope: typeof input.modelScope === "string" ? input.modelScope : "",
hashToNodeId: normalizeStringMap(input.hashToNodeId),
nodeToHash: normalizeStringMap(input.nodeToHash),
dirty: Boolean(input.dirty),
replayRequiredNodeIds: normalizeStringArray(input.replayRequiredNodeIds),
dirtyReason: typeof input.dirtyReason === "string" ? input.dirtyReason : "",
pendingRepairFromFloor: normalizeOptionalFloor(input.pendingRepairFromFloor),
lastSyncAt: normalizeTimestamp(input.lastSyncAt, 0),
lastStats: {
total: normalizeNonNegativeInteger(localStats.total, 0),
indexed: normalizeNonNegativeInteger(localStats.indexed, 0),
stale: normalizeNonNegativeInteger(localStats.stale, 0),
pending: normalizeNonNegativeInteger(localStats.pending, 0),
},
lastWarning: typeof input.lastWarning === "string" ? input.lastWarning : "",
};
}
function mergeRuntimeVectorMeta(localMeta = {}, remoteMeta = {}, options = {}) {
const localVector = normalizeRuntimeVectorMeta(localMeta);
const remoteVector = normalizeRuntimeVectorMeta(remoteMeta);
const aliveNodeIds = new Set(
(Array.isArray(options.mergedNodes) ? options.mergedNodes : [])
.map((node) => String(node?.id || "").trim())
.filter(Boolean),
);
const conflictNodeIds = new Set();
const candidateHashByNode = new Map();
const registerCandidate = (nodeId, hash) => {
const normalizedNodeId = String(nodeId || "").trim();
const normalizedHash = String(hash || "").trim();
if (!normalizedNodeId || !normalizedHash || !aliveNodeIds.has(normalizedNodeId)) return;
if (conflictNodeIds.has(normalizedNodeId)) return;
const existingHash = candidateHashByNode.get(normalizedNodeId);
if (!existingHash) {
candidateHashByNode.set(normalizedNodeId, normalizedHash);
return;
}
if (existingHash !== normalizedHash) {
conflictNodeIds.add(normalizedNodeId);
candidateHashByNode.delete(normalizedNodeId);
}
};
for (const [nodeId, hash] of Object.entries(localVector.nodeToHash)) {
registerCandidate(nodeId, hash);
}
for (const [nodeId, hash] of Object.entries(remoteVector.nodeToHash)) {
registerCandidate(nodeId, hash);
}
for (const [hash, nodeId] of Object.entries(localVector.hashToNodeId)) {
registerCandidate(nodeId, hash);
}
for (const [hash, nodeId] of Object.entries(remoteVector.hashToNodeId)) {
registerCandidate(nodeId, hash);
}
for (const nodeId of conflictNodeIds) {
candidateHashByNode.delete(nodeId);
}
const hashBuckets = new Map();
for (const [nodeId, hash] of candidateHashByNode.entries()) {
const bucket = hashBuckets.get(hash) || new Set();
bucket.add(nodeId);
hashBuckets.set(hash, bucket);
}
const mergedNodeToHash = {};
const mergedHashToNodeId = {};
for (const [hash, bucket] of hashBuckets.entries()) {
const nodeIds = Array.from(bucket).filter((nodeId) => aliveNodeIds.has(nodeId));
if (nodeIds.length !== 1) {
for (const nodeId of nodeIds) {
conflictNodeIds.add(nodeId);
}
continue;
}
const nodeId = nodeIds[0];
mergedNodeToHash[nodeId] = hash;
mergedHashToNodeId[hash] = nodeId;
}
const replayRequiredNodeIds = normalizeStringArray([
...localVector.replayRequiredNodeIds,
...remoteVector.replayRequiredNodeIds,
...Array.from(conflictNodeIds),
]).filter((nodeId) => aliveNodeIds.has(nodeId));
const hasMappingConflict = conflictNodeIds.size > 0;
const inheritedDirty = Boolean(localVector.dirty || remoteVector.dirty);
const dirty = inheritedDirty || hasMappingConflict || replayRequiredNodeIds.length > 0;
const fallbackRepairFloor = Number.isFinite(Number(options.fallbackLastProcessedFloor))
? Math.max(0, Math.floor(Number(options.fallbackLastProcessedFloor)))
: 0;
const pendingRepairFromFloor = dirty
? pickMinFinite(
[
localVector.pendingRepairFromFloor,
remoteVector.pendingRepairFromFloor,
hasMappingConflict ? fallbackRepairFloor : null,
],
null,
)
: null;
const mappingCount = Object.keys(mergedNodeToHash).length;
const total = Math.max(mappingCount, localVector.lastStats.total, remoteVector.lastStats.total);
const indexed = mappingCount;
const stale = Math.max(0, total - indexed);
const pending = dirty
? Math.max(
replayRequiredNodeIds.length,
localVector.lastStats.pending,
remoteVector.lastStats.pending,
hasMappingConflict ? 1 : 0,
)
: 0;
return {
...localVector,
...remoteVector,
mode: String(remoteVector.mode || localVector.mode || "").trim(),
source: String(remoteVector.source || localVector.source || "").trim(),
modelScope: String(remoteVector.modelScope || localVector.modelScope || "").trim(),
collectionId: String(remoteVector.collectionId || localVector.collectionId || "").trim(),
hashToNodeId: mergedHashToNodeId,
nodeToHash: mergedNodeToHash,
replayRequiredNodeIds,
dirty,
dirtyReason: hasMappingConflict
? "sync-merge-vector-conflict"
: dirty
? String(
remoteVector.dirtyReason ||
localVector.dirtyReason ||
"sync-merge-vector-replay-required",
)
: "",
pendingRepairFromFloor,
lastSyncAt: Math.max(localVector.lastSyncAt, remoteVector.lastSyncAt),
lastStats: {
total,
indexed,
stale,
pending,
},
lastWarning: hasMappingConflict
? "同步合并检测到向量映射冲突,已标记待重建"
: String(remoteVector.lastWarning || localVector.lastWarning || ""),
};
}
function normalizeJournalEntry(entry) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) return null;
const normalizedId = String(entry.id || "").trim();
if (!normalizedId) return null;
const range = Array.isArray(entry.processedRange) ? entry.processedRange : [];
const rangeStart = Number(range[0]);
const rangeEnd = Number(range[1]);
if (!Number.isFinite(rangeStart) || !Number.isFinite(rangeEnd) || rangeStart > rangeEnd) {
return null;
}
return {
...toSerializableData(entry, entry),
id: normalizedId,
createdAt: normalizeTimestamp(entry.createdAt ?? entry.at, 0),
processedRange: [Math.floor(rangeStart), Math.floor(rangeEnd)],
};
}
function chooseJournalEntryWinner(localEntry, remoteEntry) {
if (!localEntry) return remoteEntry || null;
if (!remoteEntry) return localEntry || null;
if (remoteEntry.createdAt > localEntry.createdAt) return remoteEntry;
if (localEntry.createdAt > remoteEntry.createdAt) return localEntry;
const localEnd = Number(localEntry.processedRange?.[1] ?? -1);
const remoteEnd = Number(remoteEntry.processedRange?.[1] ?? -1);
if (remoteEnd > localEnd) return remoteEntry;
if (localEnd > remoteEnd) return localEntry;
return remoteEntry;
}
function mergeRuntimeBatchJournal(localJournal = [], remoteJournal = [], options = {}) {
const journalById = new Map();
const register = (entry) => {
const normalizedEntry = normalizeJournalEntry(entry);
if (!normalizedEntry) return;
const existing = journalById.get(normalizedEntry.id);
const winner = chooseJournalEntryWinner(existing, normalizedEntry);
if (winner) journalById.set(normalizedEntry.id, winner);
};
for (const entry of Array.isArray(localJournal) ? localJournal : []) {
register(entry);
}
for (const entry of Array.isArray(remoteJournal) ? remoteJournal : []) {
register(entry);
}
let merged = Array.from(journalById.values());
const maxTrustedFloor = Number.isFinite(Number(options.maxTrustedFloor))
? Math.floor(Number(options.maxTrustedFloor))
: null;
if (Number.isFinite(maxTrustedFloor)) {
merged = merged.filter((entry) => Number(entry.processedRange?.[1]) <= maxTrustedFloor);
}
merged.sort((left, right) => {
const leftStart = Number(left.processedRange?.[0] ?? -1);
const rightStart = Number(right.processedRange?.[0] ?? -1);
const leftEnd = Number(left.processedRange?.[1] ?? -1);
const rightEnd = Number(right.processedRange?.[1] ?? -1);
return (
leftStart - rightStart ||
leftEnd - rightEnd ||
left.createdAt - right.createdAt ||
left.id.localeCompare(right.id)
);
});
if (merged.length > RUNTIME_BATCH_JOURNAL_LIMIT) {
merged = merged.slice(-RUNTIME_BATCH_JOURNAL_LIMIT);
}
return merged.map((entry) => toSerializableData(entry, entry));
}
function mergeRuntimeLastRecallResult(localSnapshot, remoteSnapshot) {
const localRecall = toSerializableData(localSnapshot?.meta?.[RUNTIME_LAST_RECALL_META_KEY], null);
const remoteRecall = toSerializableData(remoteSnapshot?.meta?.[RUNTIME_LAST_RECALL_META_KEY], null);
const mergedByPayload = chooseNewerRuntimePayload(localRecall, remoteRecall);
if (mergedByPayload != null) {
return mergedByPayload;
}
const localModified = normalizeTimestamp(localSnapshot?.meta?.lastModified, 0);
const remoteModified = normalizeTimestamp(remoteSnapshot?.meta?.lastModified, 0);
if (remoteModified > localModified) return remoteRecall;
if (localModified > remoteModified) return localRecall;
return null;
}
async function getDb(chatId, options = {}) {
const normalizedChatId = normalizeChatId(chatId);
if (!normalizedChatId) {
@@ -312,6 +837,24 @@ async function patchDbMeta(db, patch = {}) {
}
}
async function invokeSyncAppliedHook(options = {}, payload = {}) {
if (typeof options.onSyncApplied !== "function") {
return;
}
try {
await options.onSyncApplied({
...(payload || {}),
});
} catch (error) {
console.warn("[ST-BME] 同步后运行时刷新回调失败:", {
chatId: String(payload?.chatId || ""),
action: String(payload?.action || ""),
error,
});
}
}
async function sanitizeFilename(fileName, options = {}) {
const fallbackSanitized = String(fileName || "")
.replace(/[<>:"/\\|?*\x00-\x1F]/g, "_")
@@ -675,6 +1218,12 @@ export async function download(chatId, options = {}) {
syncDirtyReason: "",
});
await invokeSyncAppliedHook(options, {
chatId: normalizedChatId,
action: "download",
revision: remoteRevision,
});
return {
downloaded: true,
exists: true,
@@ -720,7 +1269,7 @@ export function mergeSnapshots(localSnapshot, remoteSnapshot, options = {}) {
const remoteRevision = normalizeRevision(remote.meta.revision);
const mergedRevision = Math.max(localRevision, remoteRevision) + 1;
const mergedState = {
const baseMergedState = {
lastProcessedFloor: Math.max(
Number(local.state?.lastProcessedFloor ?? -1),
Number(remote.state?.lastProcessedFloor ?? -1),
@@ -731,9 +1280,88 @@ export function mergeSnapshots(localSnapshot, remoteSnapshot, options = {}) {
),
};
const mergedHistoryResult = mergeRuntimeHistoryMeta(
local.meta?.[RUNTIME_HISTORY_META_KEY],
remote.meta?.[RUNTIME_HISTORY_META_KEY],
{
chatId: normalizedChatId,
fallbackLastProcessedFloor: baseMergedState.lastProcessedFloor,
fallbackExtractionCount: baseMergedState.extractionCount,
},
);
const mergedLastProcessedFloor = Math.min(
Number(baseMergedState.lastProcessedFloor ?? -1),
Number(mergedHistoryResult.safeLastProcessedFloor ?? -1),
);
const mergedState = {
lastProcessedFloor: Number.isFinite(mergedLastProcessedFloor)
? Math.floor(mergedLastProcessedFloor)
: -1,
extractionCount: Math.max(
Number(baseMergedState.extractionCount ?? 0),
Number(mergedHistoryResult.history?.extractionCount ?? 0),
),
};
const mergedHistoryState = {
...mergedHistoryResult.history,
chatId: normalizedChatId,
lastProcessedAssistantFloor: mergedState.lastProcessedFloor,
extractionCount: mergedState.extractionCount,
processedMessageHashes: sortProcessedMessageHashes(
Object.fromEntries(
Object.entries(mergedHistoryResult.history?.processedMessageHashes || {}).filter(
([floorKey]) => {
const floor = Number.parseInt(floorKey, 10);
return Number.isFinite(floor) && floor >= 0 && floor <= mergedState.lastProcessedFloor;
},
),
),
),
};
const mergedVectorState = mergeRuntimeVectorMeta(
local.meta?.[RUNTIME_VECTOR_META_KEY],
remote.meta?.[RUNTIME_VECTOR_META_KEY],
{
mergedNodes,
fallbackLastProcessedFloor: mergedState.lastProcessedFloor,
},
);
const mergedBatchJournal = mergeRuntimeBatchJournal(
local.meta?.[RUNTIME_BATCH_JOURNAL_META_KEY],
remote.meta?.[RUNTIME_BATCH_JOURNAL_META_KEY],
{
maxTrustedFloor: mergedState.lastProcessedFloor,
},
);
const mergedLastRecallResult = mergeRuntimeLastRecallResult(local, remote);
const mergedLastProcessedSeq = Math.max(
normalizeNonNegativeInteger(local.meta?.[RUNTIME_LAST_PROCESSED_SEQ_META_KEY], 0),
normalizeNonNegativeInteger(remote.meta?.[RUNTIME_LAST_PROCESSED_SEQ_META_KEY], 0),
normalizeNonNegativeInteger(mergedState.lastProcessedFloor, 0),
);
const mergedRuntimeGraphVersion = Math.max(
normalizeNonNegativeInteger(local.meta?.[RUNTIME_GRAPH_VERSION_META_KEY], 0),
normalizeNonNegativeInteger(remote.meta?.[RUNTIME_GRAPH_VERSION_META_KEY], 0),
normalizeNonNegativeInteger(mergedRevision, 0),
);
const mergedMeta = {
...local.meta,
...remote.meta,
[RUNTIME_HISTORY_META_KEY]: mergedHistoryState,
[RUNTIME_VECTOR_META_KEY]: mergedVectorState,
[RUNTIME_BATCH_JOURNAL_META_KEY]: mergedBatchJournal,
[RUNTIME_LAST_RECALL_META_KEY]: mergedLastRecallResult,
[RUNTIME_LAST_PROCESSED_SEQ_META_KEY]: mergedLastProcessedSeq,
[RUNTIME_GRAPH_VERSION_META_KEY]: mergedRuntimeGraphVersion,
schemaVersion: Math.max(
Number(local.meta?.schemaVersion || 1),
Number(remote.meta?.schemaVersion || 1),
@@ -749,6 +1377,10 @@ export function mergeSnapshots(localSnapshot, remoteSnapshot, options = {}) {
nodeCount: mergedNodes.length,
edgeCount: mergedEdges.length,
tombstoneCount: mergedTombstones.length,
syncDirty: false,
syncDirtyReason: "",
lastProcessedFloor: mergedState.lastProcessedFloor,
extractionCount: mergedState.extractionCount,
};
return {
@@ -858,6 +1490,12 @@ export async function syncNow(chatId, options = {}) {
syncDirtyReason: "",
});
await invokeSyncAppliedHook(options, {
chatId: normalizedChatId,
action: "merge",
revision: normalizeRevision(mergedSnapshot.meta.revision),
});
return {
synced: true,
chatId: normalizedChatId,

121
index.js
View File

@@ -1918,8 +1918,58 @@ function getCurrentChatId(context = getContext()) {
return resolveCurrentChatIdentity(context).chatId;
}
function buildBmeSyncRuntimeOptions(extra = {}) {
async function refreshRuntimeGraphAfterSyncApplied(syncPayload = {}) {
const action = String(syncPayload?.action || "")
.trim()
.toLowerCase();
if (action !== "download" && action !== "merge") {
return {
refreshed: false,
reason: "action-not-supported",
action,
};
}
const syncedChatId = normalizeChatIdCandidate(syncPayload?.chatId);
const activeChatId = normalizeChatIdCandidate(getCurrentChatId());
const targetChatId = syncedChatId || activeChatId;
if (!targetChatId) {
return {
refreshed: false,
reason: "missing-chat-id",
action,
};
}
if (activeChatId && targetChatId !== activeChatId) {
return {
refreshed: false,
reason: "chat-switched",
action,
chatId: targetChatId,
activeChatId,
};
}
const loadResult = await loadGraphFromIndexedDb(targetChatId, {
source: `sync-post-refresh:${action}`,
allowOverride: true,
applyEmptyState: true,
});
return {
refreshed: Boolean(loadResult?.loaded || loadResult?.emptyConfirmed),
action,
chatId: targetChatId,
...loadResult,
};
}
function buildBmeSyncRuntimeOptions(extra = {}) {
const normalizedExtra =
extra && typeof extra === "object" && !Array.isArray(extra) ? extra : {};
const defaultOptions = {
getDb: async (chatId) => {
const manager = ensureBmeChatManager();
if (!manager) {
@@ -1929,7 +1979,25 @@ function buildBmeSyncRuntimeOptions(extra = {}) {
},
getCurrentChatId: () => getCurrentChatId(),
getRequestHeaders,
...extra,
onSyncApplied: async (payload = {}) => {
await refreshRuntimeGraphAfterSyncApplied(payload);
},
};
if (typeof normalizedExtra.onSyncApplied !== "function") {
return {
...defaultOptions,
...normalizedExtra,
};
}
return {
...defaultOptions,
...normalizedExtra,
onSyncApplied: async (payload = {}) => {
await defaultOptions.onSyncApplied(payload);
await normalizedExtra.onSyncApplied(payload);
},
};
}
@@ -3984,17 +4052,30 @@ function loadGraphFromChat(options = {}) {
lastExtractedItems = [];
updateLastRecalledItems(currentGraph.lastRecallResult || []);
lastInjectionContent = "";
runtimeStatus = createUiStatus("待命", "已从兼容 metadata 加载图谱", "idle");
lastExtractionStatus = createUiStatus("待命", "已加载聊天图谱,等待下一次提取", "idle");
lastVectorStatus = createUiStatus(
runtimeStatus = createUiStatus(
"图谱加载中",
"已从兼容 metadata 暂载图谱,等待 IndexedDB 权威确认",
"running",
);
lastExtractionStatus = createUiStatus(
"待命",
currentGraph.vectorIndexState?.lastWarning || "已加载聊天图谱,等待下一次向量任务",
"兼容图谱暂载中,等待 IndexedDB 确认后再执行提取",
"idle",
);
lastRecallStatus = createUiStatus("待命", "已加载聊天图谱,等待下一次召回", "idle");
applyGraphLoadState(GRAPH_LOAD_STATES.LOADED, {
lastVectorStatus = createUiStatus(
"待命",
currentGraph.vectorIndexState?.lastWarning ||
"兼容图谱暂载中,等待 IndexedDB 确认后再执行向量任务",
"idle",
);
lastRecallStatus = createUiStatus(
"待命",
"兼容图谱暂载中,等待 IndexedDB 确认后再执行召回",
"idle",
);
applyGraphLoadState(GRAPH_LOAD_STATES.LOADING, {
chatId,
reason: `${source}:metadata-compat`,
reason: `${source}:metadata-compat-provisional`,
attemptIndex,
revision: officialRevision,
lastPersistedRevision: officialRevision,
@@ -4005,15 +4086,23 @@ function loadGraphFromChat(options = {}) {
shadowSnapshotRevision: 0,
shadowSnapshotUpdatedAt: "",
shadowSnapshotReason: "",
dbReady: true,
writesBlocked: false,
dbReady: false,
writesBlocked: true,
});
updateGraphPersistenceState({
metadataIntegrity: getChatMetadataIntegrity(context),
storagePrimary: "metadata",
storageMode: "metadata",
dbReady: true,
storagePrimary: "indexeddb",
storageMode: "indexeddb",
dbReady: false,
indexedDbLastError: "",
dualWriteLastResult: {
action: "load",
source: `${source}:metadata-compat`,
success: true,
provisional: true,
revision: officialRevision,
at: Date.now(),
},
});
scheduleIndexedDbGraphProbe(chatId, {
@@ -4027,8 +4116,8 @@ function loadGraphFromChat(options = {}) {
return {
success: true,
loaded: true,
loadState: GRAPH_LOAD_STATES.LOADED,
reason: `${source}:metadata-compat`,
loadState: GRAPH_LOAD_STATES.LOADING,
reason: `${source}:metadata-compat-provisional`,
chatId,
attemptIndex,
};

View File

@@ -219,19 +219,47 @@ export function snapshotProcessedMessageHashes(
export function detectHistoryMutation(chat, historyState) {
const lastProcessedAssistantFloor =
historyState?.lastProcessedAssistantFloor ?? -1;
const processedMessageHashes = historyState?.processedMessageHashes || {};
const processedMessageHashes =
historyState?.processedMessageHashes &&
typeof historyState.processedMessageHashes === "object" &&
!Array.isArray(historyState.processedMessageHashes)
? historyState.processedMessageHashes
: {};
if (!Array.isArray(chat) || lastProcessedAssistantFloor < 0) {
return { dirty: false, earliestAffectedFloor: null, reason: "" };
}
if (lastProcessedAssistantFloor >= chat.length) {
return {
dirty: true,
earliestAffectedFloor: chat.length,
reason: "已处理楼层超出当前聊天长度,检测到历史截断",
};
}
const trackedFloors = Object.keys(processedMessageHashes)
.map((value) => Number.parseInt(value, 10))
.filter(Number.isFinite)
.sort((a, b) => a - b);
if (trackedFloors.length === 0) {
return { dirty: false, earliestAffectedFloor: null, reason: "" };
if (trackedFloors.length === 0 && lastProcessedAssistantFloor >= 0) {
return {
dirty: true,
earliestAffectedFloor: 0,
reason: "已处理楼层存在,但 processedMessageHashes 缺失,执行保守重放",
};
}
for (let floor = 0; floor <= lastProcessedAssistantFloor; floor++) {
if (!Object.prototype.hasOwnProperty.call(processedMessageHashes, String(floor))) {
return {
dirty: true,
earliestAffectedFloor: floor,
reason: `楼层 ${floor} 缺少已处理哈希,执行保守重放`,
};
}
}
for (const floor of trackedFloors) {
@@ -253,14 +281,6 @@ export function detectHistoryMutation(chat, historyState) {
}
}
if (lastProcessedAssistantFloor >= chat.length) {
return {
dirty: true,
earliestAffectedFloor: chat.length,
reason: "已处理楼层超出当前聊天长度,检测到历史截断",
};
}
return { dirty: false, earliestAffectedFloor: null, reason: "" };
}

View File

@@ -455,6 +455,7 @@ result = {
loadGraphFromChat,
saveGraphToChat,
syncGraphLoadFromLiveContext,
buildBmeSyncRuntimeOptions,
onMessageReceived,
applyGraphLoadState,
maybeFlushQueuedGraphPersist,
@@ -541,11 +542,17 @@ result = {
source: "global-chat-id",
});
assert.equal(result.loadState, "loaded");
assert.equal(result.loadState, "loading");
assert.equal(result.reason, "global-chat-id:metadata-compat-provisional");
assert.equal(
harness.api.getCurrentGraph().historyState.chatId,
"chat-global",
);
assert.equal(harness.api.getGraphPersistenceState().dbReady, false);
assert.equal(
harness.api.getGraphPersistenceLiveState().writesBlocked,
true,
);
}
{
@@ -839,6 +846,42 @@ result = {
);
}
{
const harness = await createGraphPersistenceHarness({
chatId: "chat-sync-refresh",
chatMetadata: {
integrity: "chat-sync-refresh-ready",
},
});
harness.api.setCurrentGraph(
normalizeGraphRuntimeState(createMeaningfulGraph("chat-sync-refresh", "stale-runtime"), "chat-sync-refresh"),
);
harness.api.setGraphPersistenceState({
loadState: "loaded",
chatId: "chat-sync-refresh",
reason: "runtime-stale",
revision: 2,
lastPersistedRevision: 2,
dbReady: true,
writesBlocked: false,
});
harness.api.setIndexedDbSnapshot(
buildSnapshotFromGraph(createMeaningfulGraph("chat-sync-refresh", "fresh-indexeddb"), {
chatId: "chat-sync-refresh",
revision: 7,
}),
);
const runtimeOptions = harness.api.buildBmeSyncRuntimeOptions();
await runtimeOptions.onSyncApplied({ chatId: "chat-sync-refresh", action: "download" });
assert.equal(
harness.api.getCurrentGraph().nodes[0]?.fields?.title,
"事件-fresh-indexeddb",
"download/merge 后应刷新当前运行时图谱",
);
}
{
const sharedSession = new Map();
const writer = await createGraphPersistenceHarness({
@@ -901,7 +944,7 @@ result = {
source: "official-load",
});
assert.equal(result.loadState, "loaded");
assert.equal(result.loadState, "loading");
assert.equal(
reader.api.getCurrentGraph().nodes[0]?.fields?.title,
"事件-official",
@@ -943,8 +986,8 @@ result = {
source: "official-older-than-shadow",
});
assert.equal(result.loadState, "loaded");
assert.equal(result.reason, "official-older-than-shadow:metadata-compat");
assert.equal(result.loadState, "loading");
assert.equal(result.reason, "official-older-than-shadow:metadata-compat-provisional");
assert.equal(
reader.api.getCurrentGraph().nodes[0]?.fields?.title,
"事件-official-older",
@@ -1166,7 +1209,7 @@ result = {
source: "load-official-decoupled",
});
assert.equal(result.loadState, "loaded");
assert.equal(result.loadState, "loading");
const runtimeGraph = harness.api.getCurrentGraph();
const persistedGraph =
harness.runtimeContext.__chatContext.chatMetadata.st_bme_graph;
@@ -1233,7 +1276,7 @@ result = {
source: "load-shadow-decoupled",
});
assert.equal(result.loadState, "loaded");
assert.equal(result.loadState, "loading");
const runtimeGraph = reader.api.getCurrentGraph();
const persistedGraph =
reader.runtimeContext.__chatContext.chatMetadata.st_bme_graph;

View File

@@ -19,6 +19,7 @@ const chatIdsForCleanup = new Set([
"chat-b",
"chat-manager-a",
"chat-manager-b",
"chat-replace-reset",
]);
async function setupIndexedDbTestEnv() {
@@ -194,6 +195,81 @@ async function testSnapshotExportImport() {
await db.close();
}
async function testReplaceImportResetsStaleMeta() {
const chatId = "chat-replace-reset";
const db = new BmeDatabase(chatId, { dexieClass: globalThis.Dexie });
await db.open();
await db.patchMeta({
runtimeHistoryState: {
chatId,
lastProcessedAssistantFloor: 99,
processedMessageHashes: {
99: "stale-hash",
},
},
runtimeVectorIndexState: {
hashToNodeId: {
"stale-hash": "node-stale",
},
nodeToHash: {
"node-stale": "stale-hash",
},
dirty: true,
pendingRepairFromFloor: 88,
},
runtimeBatchJournal: [{ id: "stale-journal", processedRange: [90, 99] }],
runtimeLastRecallResult: { updatedAt: 123456, nodes: ["node-stale"] },
runtimeLastProcessedSeq: 999,
runtimeGraphVersion: 999,
migrationCompletedAt: 987654321,
legacyRetentionUntil: 987654321,
customLeakField: "stale-value",
});
const revisionBefore = await db.getRevision();
const importResult = await db.importSnapshot(
{
meta: {
chatId,
revision: 1,
deviceId: "device-replace-new",
},
state: {
lastProcessedFloor: 3,
extractionCount: 2,
},
nodes: [],
edges: [],
tombstones: [],
},
{
mode: "replace",
preserveRevision: true,
markSyncDirty: false,
},
);
assert.ok(importResult.revision > revisionBefore, "replace 导入后 revision 必须单调递增");
assert.equal(await db.getMeta("chatId", ""), chatId);
assert.equal(await db.getMeta("lastProcessedFloor", -1), 3);
assert.equal(await db.getMeta("extractionCount", 0), 2);
assert.equal(await db.getMeta("deviceId", ""), "device-replace-new");
assert.equal(await db.getMeta("migrationCompletedAt", -1), 0);
assert.equal(await db.getMeta("legacyRetentionUntil", -1), 0);
assert.equal(await db.getMeta("runtimeHistoryState", "__missing__"), "__missing__");
assert.equal(await db.getMeta("runtimeVectorIndexState", "__missing__"), "__missing__");
assert.equal(await db.getMeta("runtimeBatchJournal", "__missing__"), "__missing__");
assert.equal(await db.getMeta("runtimeLastRecallResult", "__missing__"), "__missing__");
assert.equal(await db.getMeta("runtimeLastProcessedSeq", "__missing__"), "__missing__");
assert.equal(await db.getMeta("runtimeGraphVersion", "__missing__"), "__missing__");
assert.equal(await db.getMeta("customLeakField", "__missing__"), "__missing__");
assert.equal(await db.getMeta("syncDirty", true), false);
await db.close();
}
async function testRevisionMonotonicity() {
const db = new BmeDatabase("chat-a", { dexieClass: globalThis.Dexie });
await db.open();
@@ -395,6 +471,7 @@ async function main() {
await testCrudAndMeta();
await testTransactionRollback();
await testSnapshotExportImport();
await testReplaceImportResetsStaleMeta();
await testRevisionMonotonicity();
await testTombstonePrune();
await testChatIsolationAndManager();

View File

@@ -371,6 +371,128 @@ async function testMergeRules() {
assert.equal(merged.state.extractionCount, 3);
}
async function testMergeRuntimeMetaPolicies() {
const local = {
meta: {
chatId: "chat-merge-meta",
revision: 7,
lastModified: 200,
deviceId: "local-device",
schemaVersion: 1,
runtimeHistoryState: {
chatId: "chat-merge-meta",
lastProcessedAssistantFloor: 6,
extractionCount: 6,
processedMessageHashes: {
1: "h1",
2: "h2",
3: "h3",
4: "local-h4",
6: "h6",
},
},
runtimeVectorIndexState: {
hashToNodeId: {
"hash-local-a": "node-a",
"hash-shared-b": "node-b",
},
nodeToHash: {
"node-a": "hash-local-a",
"node-b": "hash-shared-b",
},
},
runtimeBatchJournal: [
{ id: "journal-shared", processedRange: [0, 2], createdAt: 100 },
{ id: "journal-drop-local", processedRange: [4, 5], createdAt: 110 },
],
runtimeLastRecallResult: { nodes: ["local-only"] },
runtimeLastProcessedSeq: 2,
runtimeGraphVersion: 10,
},
nodes: [
{ id: "node-a", updatedAt: 100 },
{ id: "node-b", updatedAt: 100 },
],
edges: [],
tombstones: [],
state: {
lastProcessedFloor: 6,
extractionCount: 3,
},
};
const remote = {
meta: {
chatId: "chat-merge-meta",
revision: 10,
lastModified: 200,
deviceId: "remote-device",
schemaVersion: 1,
runtimeHistoryState: {
chatId: "chat-merge-meta",
lastProcessedAssistantFloor: 5,
extractionCount: 7,
processedMessageHashes: {
1: "h1",
2: "h2",
3: "h3",
4: "remote-h4",
5: "h5",
},
},
runtimeVectorIndexState: {
hashToNodeId: {
"hash-remote-a": "node-a",
"hash-shared-b": "node-b",
},
nodeToHash: {
"node-a": "hash-remote-a",
"node-b": "hash-shared-b",
},
},
runtimeBatchJournal: [
{ id: "journal-shared", processedRange: [0, 3], createdAt: 210 },
{ id: "journal-drop-remote", processedRange: [3, 4], createdAt: 220 },
],
runtimeLastRecallResult: { nodes: ["remote-only"] },
runtimeLastProcessedSeq: 9,
runtimeGraphVersion: 7,
},
nodes: [
{ id: "node-a", updatedAt: 200 },
{ id: "node-b", updatedAt: 200 },
],
edges: [],
tombstones: [],
state: {
lastProcessedFloor: 5,
extractionCount: 2,
},
};
const merged = mergeSnapshots(local, remote, { chatId: "chat-merge-meta" });
assert.equal(merged.state.lastProcessedFloor, 3, "冲突哈希楼层应触发保守回退");
assert.equal(merged.state.extractionCount, 7);
assert.deepEqual(Object.keys(merged.meta.runtimeHistoryState.processedMessageHashes), ["1", "2", "3"]);
assert.equal(merged.meta.runtimeHistoryState.historyDirtyFrom, 4);
assert.ok(String(merged.meta.runtimeHistoryState.lastMutationReason).includes("processed-hash-conflict@4"));
assert.equal(merged.meta.runtimeVectorIndexState.nodeToHash["node-a"], undefined);
assert.equal(merged.meta.runtimeVectorIndexState.nodeToHash["node-b"], "hash-shared-b");
assert.equal(merged.meta.runtimeVectorIndexState.hashToNodeId["hash-local-a"], undefined);
assert.equal(merged.meta.runtimeVectorIndexState.hashToNodeId["hash-remote-a"], undefined);
assert.equal(merged.meta.runtimeVectorIndexState.hashToNodeId["hash-shared-b"], "node-b");
assert.equal(merged.meta.runtimeVectorIndexState.dirty, true);
assert.ok(merged.meta.runtimeVectorIndexState.replayRequiredNodeIds.includes("node-a"));
assert.equal(merged.meta.runtimeVectorIndexState.pendingRepairFromFloor, 3);
assert.equal(merged.meta.runtimeBatchJournal.length, 1);
assert.equal(merged.meta.runtimeBatchJournal[0].id, "journal-shared");
assert.deepEqual(merged.meta.runtimeBatchJournal[0].processedRange, [0, 3]);
assert.equal(merged.meta.runtimeLastRecallResult, null);
assert.equal(merged.meta.runtimeLastProcessedSeq, 9);
assert.equal(merged.meta.runtimeGraphVersion, 11);
}
async function testSyncNowLockAndAutoSync() {
const { fetch, remoteFiles, logs } = createMockFetchEnvironment();
const dbByChatId = new Map();
@@ -518,6 +640,81 @@ async function testSyncNowRemoteReadErrorPath() {
assert.equal(result.reason, "http-error");
}
async function testSyncAppliedHook() {
const { fetch, remoteFiles } = createMockFetchEnvironment();
const dbByChatId = new Map();
const hookCalls = [];
dbByChatId.set(
"chat-hook-download",
new FakeDb("chat-hook-download", {
meta: {
schemaVersion: 1,
chatId: "chat-hook-download",
revision: 1,
lastModified: 10,
deviceId: "",
nodeCount: 0,
edgeCount: 0,
tombstoneCount: 0,
},
nodes: [],
edges: [],
tombstones: [],
state: { lastProcessedFloor: -1, extractionCount: 0 },
}),
);
dbByChatId.set(
"chat-hook-merge",
new FakeDb("chat-hook-merge", {
meta: {
schemaVersion: 1,
chatId: "chat-hook-merge",
revision: 4,
lastModified: 20,
deviceId: "",
nodeCount: 1,
edgeCount: 0,
tombstoneCount: 0,
},
nodes: [{ id: "local-merge", updatedAt: 20 }],
edges: [],
tombstones: [],
state: { lastProcessedFloor: 1, extractionCount: 1 },
}),
);
remoteFiles.set("ST-BME_sync_chat-hook-download.json", {
meta: { schemaVersion: 1, chatId: "chat-hook-download", revision: 3, lastModified: 30, deviceId: "remote", nodeCount: 1, edgeCount: 0, tombstoneCount: 0 },
nodes: [{ id: "remote-download", updatedAt: 30 }],
edges: [],
tombstones: [],
state: { lastProcessedFloor: 2, extractionCount: 1 },
});
remoteFiles.set("ST-BME_sync_chat-hook-merge.json", {
meta: { schemaVersion: 1, chatId: "chat-hook-merge", revision: 4, lastModified: 25, deviceId: "remote", nodeCount: 1, edgeCount: 0, tombstoneCount: 0 },
nodes: [{ id: "remote-merge", updatedAt: 25 }],
edges: [],
tombstones: [],
state: { lastProcessedFloor: 3, extractionCount: 2 },
});
const runtime = {
...buildRuntimeOptions({ dbByChatId, fetch }),
onSyncApplied: async (payload) => hookCalls.push({ ...payload }),
};
const downloadResult = await syncNow("chat-hook-download", runtime);
assert.equal(downloadResult.action, "download");
dbByChatId.get("chat-hook-merge").meta.set("syncDirty", true);
const mergeResult = await syncNow("chat-hook-merge", runtime);
assert.equal(mergeResult.action, "merge");
assert.deepEqual(hookCalls.map((item) => item.action), ["download", "merge"]);
}
async function main() {
console.log(`${PREFIX} debounce=${BME_SYNC_UPLOAD_DEBOUNCE_MS}`);
await testDeviceId();
@@ -525,10 +722,12 @@ async function main() {
await testUploadPayloadMetaFirstAndDebounce();
await testDownloadImport();
await testMergeRules();
await testMergeRuntimeMetaPolicies();
await testSyncNowLockAndAutoSync();
await testDeleteRemoteSyncFile();
await testAutoSyncOnVisibility();
await testSyncNowRemoteReadErrorPath();
await testSyncAppliedHook();
console.log("indexeddb-sync tests passed");
}

View File

@@ -24,6 +24,24 @@ const cleanDetection = detectHistoryMutation(chat, {
});
assert.equal(cleanDetection.dirty, false);
const missingHashesDetection = detectHistoryMutation(chat, {
lastProcessedAssistantFloor: 3,
processedMessageHashes: {},
});
assert.equal(missingHashesDetection.dirty, true);
assert.equal(missingHashesDetection.earliestAffectedFloor, 0);
const sparseHashesDetection = detectHistoryMutation(chat, {
lastProcessedAssistantFloor: 3,
processedMessageHashes: {
0: hashes[0],
2: hashes[2],
3: hashes[3],
},
});
assert.equal(sparseHashesDetection.dirty, true);
assert.equal(sparseHashesDetection.earliestAffectedFloor, 1);
const editedChat = structuredClone(chat);
editedChat[1].mes = "我改过内容了。";
const editedDetection = detectHistoryMutation(editedChat, {