fix: 解耦提取链路与向量预修复

This commit is contained in:
Youzini-afk
2026-03-24 23:19:18 +08:00
parent 826ef78f18
commit e15abec208
2 changed files with 80 additions and 21 deletions

View File

@@ -178,6 +178,7 @@ let isRecoveringHistory = false;
let lastHistoryWarningAt = 0;
let lastRecallFallbackNoticeAt = 0;
let lastExtractionWarningAt = 0;
const LOCAL_VECTOR_TIMEOUT_MS = 30000;
function getNodeDisplayName(node) {
return (
@@ -328,6 +329,29 @@ function notifyExtractionIssue(message, title = "ST-BME 提取提示") {
toastr.warning(message, title, { timeOut: 4500 });
}
async function fetchLocalWithTimeout(url, options = {}, timeoutMs = LOCAL_VECTOR_TIMEOUT_MS) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
let signal = controller.signal;
if (options.signal) {
if (typeof AbortSignal !== "undefined" && typeof AbortSignal.any === "function") {
signal = AbortSignal.any([options.signal, controller.signal]);
} else {
signal = controller.signal;
options.signal.addEventListener("abort", () => controller.abort(), { once: true });
}
}
try {
return await fetch(url, {
...options,
signal,
});
} finally {
clearTimeout(timeout);
}
}
function snapshotRuntimeUiState() {
return {
extractionCount,
@@ -1002,7 +1026,7 @@ function inspectHistoryMutation(trigger = "history-change") {
async function purgeCurrentVectorCollection() {
if (!currentGraph?.vectorIndexState?.collectionId) return;
const response = await fetch("/api/vector/purge", {
const response = await fetchLocalWithTimeout("/api/vector/purge", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -1245,10 +1269,6 @@ async function runExtraction() {
const settings = getSettings();
if (!settings.enabled) return;
if (!(await recoverHistoryIfNeeded("auto-extract"))) return;
const vectorPrep = await ensureVectorReadyIfNeeded("pre-extract");
if (vectorPrep?.error) {
notifyExtractionIssue(`提取前向量修复失败: ${vectorPrep.error}`);
}
const context = getContext();
const chat = context.chat;
@@ -1481,7 +1501,7 @@ async function onBeforeCombinePrompts() {
await runRecall();
}
async function onMessageReceived() {
function onMessageReceived() {
// 新消息到达,图状态可能需要更新
if (currentGraph) {
saveGraphToChat();
@@ -1494,7 +1514,12 @@ async function onMessageReceived() {
: null;
if (isAssistantChatMessage(lastMessage)) {
await runExtraction();
queueMicrotask(() => {
void runExtraction().catch((error) => {
console.error("[ST-BME] 异步自动提取失败:", error);
notifyExtractionIssue(error?.message || String(error) || "自动提取失败");
});
});
}
}
@@ -1729,7 +1754,6 @@ async function onManualExtract() {
return;
}
if (!(await recoverHistoryIfNeeded("manual-extract"))) return;
const vectorPrep = await ensureVectorReadyIfNeeded("manual-extract");
if (!currentGraph) currentGraph = normalizeGraphRuntimeState(createEmptyGraph(), getCurrentChatId());
const context = getContext();
@@ -1757,10 +1781,6 @@ async function onManualExtract() {
};
const warnings = [];
if (vectorPrep?.error) {
warnings.push(`预检向量修复失败: ${vectorPrep.error}`);
}
isExtracting = true;
try {
while (true) {

View File

@@ -33,6 +33,7 @@ const MODEL_LIST_ENDPOINTS = {
nanogpt: "/api/openai/nanogpt/models/embedding",
electronhub: "/api/openai/electronhub/models",
};
const VECTOR_REQUEST_TIMEOUT_MS = 30000;
const BACKEND_STATUS_MODEL_SOURCES = {
openai: "openai",
@@ -53,6 +54,44 @@ export const BACKEND_DEFAULT_MODELS = {
vllm: "BAAI/bge-m3",
};
function createCombinedAbortSignal(...signals) {
const validSignals = signals.filter(Boolean);
if (validSignals.length <= 1) {
return validSignals[0] || undefined;
}
if (typeof AbortSignal !== "undefined" && typeof AbortSignal.any === "function") {
return AbortSignal.any(validSignals);
}
const controller = new AbortController();
for (const signal of validSignals) {
if (signal.aborted) {
controller.abort();
return controller.signal;
}
signal.addEventListener("abort", () => controller.abort(), { once: true });
}
return controller.signal;
}
async function fetchWithTimeout(url, options = {}, timeoutMs = VECTOR_REQUEST_TIMEOUT_MS) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
const signal = options.signal
? createCombinedAbortSignal(options.signal, controller.signal)
: controller.signal;
try {
return await fetch(url, {
...options,
signal,
});
} finally {
clearTimeout(timeout);
}
}
export function normalizeOpenAICompatibleBaseUrl(value, autoSuffix = true) {
let normalized = String(value || "")
.trim()
@@ -300,7 +339,7 @@ function computeVectorStats(graph, desiredEntries) {
}
async function purgeVectorCollection(collectionId) {
const response = await fetch("/api/vector/purge", {
const response = await fetchWithTimeout("/api/vector/purge", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({ collectionId }),
@@ -315,7 +354,7 @@ async function purgeVectorCollection(collectionId) {
async function deleteVectorHashes(collectionId, config, hashes) {
if (!Array.isArray(hashes) || hashes.length === 0) return;
const response = await fetch("/api/vector/delete", {
const response = await fetchWithTimeout("/api/vector/delete", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -334,7 +373,7 @@ async function deleteVectorHashes(collectionId, config, hashes) {
async function insertVectorEntries(collectionId, config, entries) {
if (!Array.isArray(entries) || entries.length === 0) return;
const response = await fetch("/api/vector/insert", {
const response = await fetchWithTimeout("/api/vector/insert", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -567,7 +606,7 @@ export async function findSimilarNodesByText(
const validation = validateVectorConfig(config);
if (!validation.valid) return [];
const response = await fetch("/api/vector/query", {
const response = await fetchWithTimeout("/api/vector/query", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -618,7 +657,7 @@ export async function testVectorConnection(config, chatId = "connection-test") {
}
try {
const response = await fetch("/api/vector/query", {
const response = await fetchWithTimeout("/api/vector/query", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -696,7 +735,7 @@ function normalizeModelOptions(items = [], { embeddingOnly = false } = {}) {
}
async function fetchJsonEndpoint(url, { method = "POST" } = {}) {
const response = await fetch(url, {
const response = await fetchWithTimeout(url, {
method,
headers: getRequestHeaders({ omitContentType: true }),
});
@@ -719,7 +758,7 @@ async function fetchBackendStatusModelList(source) {
throw new Error("当前后端向量源暂不支持自动拉取模型,请手动填写");
}
const response = await fetch("/api/backends/chat-completions/status", {
const response = await fetchWithTimeout("/api/backends/chat-completions/status", {
method: "POST",
headers: getRequestHeaders(),
body: JSON.stringify({
@@ -743,7 +782,7 @@ async function fetchOpenAICompatibleModelList(apiUrl, apiKey = "") {
throw new Error("请先填写 API 地址");
}
const response = await fetch(`${normalizedUrl}/models`, {
const response = await fetchWithTimeout(`${normalizedUrl}/models`, {
method: "GET",
headers: {
...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {}),
@@ -764,7 +803,7 @@ async function fetchOllamaModelList(apiUrl) {
throw new Error("请先填写 Ollama API 地址");
}
const response = await fetch(`${normalizedUrl}/api/tags`, { method: "GET" });
const response = await fetchWithTimeout(`${normalizedUrl}/api/tags`, { method: "GET" });
const payload = await response.json().catch(() => ({}));
if (!response.ok) {
throw new Error(payload?.error || payload?.message || response.statusText);