Merge pull request #30 from Youzini-afk/dev

Dev
This commit is contained in:
youzini
2026-04-11 23:22:52 +08:00
committed by GitHub
15 changed files with 1250 additions and 87 deletions

View File

@@ -2,6 +2,206 @@ function normalizeLlmConfigValue(value) {
return String(value || "").trim();
}
const OPENAI_COMPATIBLE_PROVIDER_LABELS = {
openai: "OpenAI",
openrouter: "OpenRouter",
deepseek: "DeepSeek",
xai: "xAI",
mistral: "Mistral",
moonshot: "Moonshot",
zai: "Z.AI",
groq: "Groq",
siliconflow: "SiliconFlow",
aimlapi: "AI/ML API",
fireworks: "Fireworks",
nanogpt: "NanoGPT",
chutes: "Chutes",
electronhub: "ElectronHub",
"volcengine-ark": "火山方舟 Ark",
"custom-openai-compatible": "自定义 OpenAI 兼容渠道",
};
function tryParseLlmUrl(value) {
const normalized = normalizeLlmConfigValue(value);
if (!normalized) return null;
try {
return new URL(normalized);
} catch {
return null;
}
}
function normalizeParsedUrlString(parsedUrl) {
if (!parsedUrl) return "";
const cloned = new URL(parsedUrl.toString());
cloned.search = "";
cloned.hash = "";
return String(cloned.toString()).replace(/\/+$/, "");
}
function stripOpenAiCompatibleEndpointSuffix(value) {
return String(value || "")
.replace(/\/+((chat|text)\/completions|completions|embeddings|models)$/i, "")
.replace(/\/+$/, "");
}
function stripAnthropicEndpointSuffix(value) {
return String(value || "")
.replace(/\/+messages$/i, "")
.replace(/\/+$/, "");
}
function stripGoogleAiStudioEndpointSuffix(value) {
return String(value || "")
.replace(
/\/+v\d+(?:beta)?\/models(?:\/[^/:?#]+:(?:streamGenerateContent|generateContent))?$/i,
"",
)
.replace(/\/+$/, "");
}
function resolveKnownOpenAiCompatibleProviderId(parsedUrl) {
const hostname = String(parsedUrl?.hostname || "").trim().toLowerCase();
const pathname = String(parsedUrl?.pathname || "").trim().toLowerCase();
if (!hostname) {
return "custom-openai-compatible";
}
if (hostname.includes("openai.com")) return "openai";
if (hostname.includes("openrouter.ai")) return "openrouter";
if (hostname.includes("deepseek.com")) return "deepseek";
if (hostname === "x.ai" || hostname === "api.x.ai" || hostname.endsWith(".x.ai")) {
return "xai";
}
if (hostname.includes("mistral.ai")) return "mistral";
if (hostname.includes("moonshot.ai")) return "moonshot";
if (hostname === "api.z.ai" || hostname.endsWith(".z.ai")) return "zai";
if (hostname.includes("groq.com")) return "groq";
if (hostname.includes("siliconflow.com")) return "siliconflow";
if (hostname.includes("aimlapi.com")) return "aimlapi";
if (hostname.includes("fireworks.ai")) return "fireworks";
if (hostname.includes("nano-gpt.com")) return "nanogpt";
if (hostname.includes("chutes.ai")) return "chutes";
if (hostname.includes("electronhub.ai")) return "electronhub";
if (
hostname.includes("volces.com") ||
hostname.startsWith("ark.") ||
pathname.includes("/api/coding/v3")
) {
return "volcengine-ark";
}
return "custom-openai-compatible";
}
function createResolvedDedicatedProviderConfig(overrides = {}) {
return {
inputUrl: "",
apiUrl: "",
providerId: "",
providerLabel: "",
transportId: "",
transportLabel: "",
hostSource: "",
hostSourceConst: "",
routeMode: "",
supportsModelFetch: false,
statusStrategies: [],
isKnownProvider: false,
isOpenAiCompatible: false,
...overrides,
};
}
export function resolveDedicatedLlmProviderConfig(value = "") {
const normalizedInput = normalizeLlmConfigValue(value);
if (!normalizedInput) {
return createResolvedDedicatedProviderConfig();
}
const parsedUrl = tryParseLlmUrl(normalizedInput);
if (!parsedUrl) {
return createResolvedDedicatedProviderConfig({
inputUrl: normalizedInput,
apiUrl: normalizedInput.replace(/\/+$/, ""),
providerId: "custom-openai-compatible",
providerLabel: OPENAI_COMPATIBLE_PROVIDER_LABELS["custom-openai-compatible"],
transportId: "dedicated-openai-compatible",
transportLabel: "专用 OpenAI 兼容接口",
hostSource: "custom",
hostSourceConst: "CUSTOM",
routeMode: "custom",
supportsModelFetch: true,
statusStrategies: ["custom", "openai-reverse-proxy"],
isKnownProvider: false,
isOpenAiCompatible: true,
});
}
const normalizedUrl = normalizeParsedUrlString(parsedUrl);
const hostname = String(parsedUrl.hostname || "").trim().toLowerCase();
if (hostname.includes("anthropic.com")) {
const apiUrl = stripAnthropicEndpointSuffix(normalizedUrl) || normalizedUrl;
return createResolvedDedicatedProviderConfig({
inputUrl: normalizedInput,
apiUrl,
providerId: "anthropic-claude",
providerLabel: "Anthropic Claude",
transportId: "dedicated-anthropic-claude",
transportLabel: "Anthropic Claude 接口",
hostSource: "claude",
hostSourceConst: "CLAUDE",
routeMode: "reverse-proxy",
supportsModelFetch: false,
statusStrategies: [],
isKnownProvider: true,
isOpenAiCompatible: false,
});
}
if (hostname.includes("generativelanguage.googleapis.com")) {
const apiUrl = stripGoogleAiStudioEndpointSuffix(normalizedUrl) || normalizedUrl;
return createResolvedDedicatedProviderConfig({
inputUrl: normalizedInput,
apiUrl,
providerId: "google-ai-studio",
providerLabel: "Google AI Studio / Gemini",
transportId: "dedicated-google-ai-studio",
transportLabel: "Google AI Studio / Gemini 接口",
hostSource: "makersuite",
hostSourceConst: "MAKERSUITE",
routeMode: "reverse-proxy",
supportsModelFetch: true,
statusStrategies: ["makersuite-reverse-proxy"],
isKnownProvider: true,
isOpenAiCompatible: false,
});
}
const providerId = resolveKnownOpenAiCompatibleProviderId(parsedUrl);
const apiUrl = stripOpenAiCompatibleEndpointSuffix(normalizedUrl) || normalizedUrl;
return createResolvedDedicatedProviderConfig({
inputUrl: normalizedInput,
apiUrl,
providerId,
providerLabel:
OPENAI_COMPATIBLE_PROVIDER_LABELS[providerId] ||
OPENAI_COMPATIBLE_PROVIDER_LABELS["custom-openai-compatible"],
transportId: "dedicated-openai-compatible",
transportLabel: "专用 OpenAI 兼容接口",
hostSource: "custom",
hostSourceConst: "CUSTOM",
routeMode: "custom",
supportsModelFetch: true,
statusStrategies: ["custom", "openai-reverse-proxy"],
isKnownProvider: providerId !== "custom-openai-compatible",
isOpenAiCompatible: true,
});
}
export function createLlmConfigSnapshot(source = {}) {
return {
llmApiUrl: normalizeLlmConfigValue(source?.llmApiUrl),

View File

@@ -6,7 +6,10 @@ import { extension_settings } from "../../../../extensions.js";
import { chat_completion_sources, sendOpenAIRequest } from "../../../../openai.js";
import { debugLog, debugWarn } from "../runtime/debug-logging.js";
import { resolveTaskGenerationOptions } from "../runtime/generation-options.js";
import { resolveLlmConfigSelection } from "./llm-preset-utils.js";
import {
resolveDedicatedLlmProviderConfig,
resolveLlmConfigSelection,
} from "./llm-preset-utils.js";
import { getActiveTaskProfile } from "../prompting/prompt-profiles.js";
import { resolveConfiguredTimeoutMs } from "../runtime/request-timeout.js";
import { applyTaskRegex } from "../prompting/task-regex.js";
@@ -206,11 +209,27 @@ function getMemoryLLMConfig(taskType = "") {
? activeProfile.generation.llm_preset
: "";
const selection = resolveLlmConfigSelection(settings, selectedPresetName);
const resolvedProvider = resolveDedicatedLlmProviderConfig(
selection.config?.llmApiUrl,
);
return {
apiUrl: normalizeOpenAICompatibleBaseUrl(selection.config?.llmApiUrl),
inputApiUrl: resolvedProvider.inputUrl || "",
apiUrl: resolvedProvider.apiUrl || "",
apiKey: String(selection.config?.llmApiKey || "").trim(),
model: String(selection.config?.llmModel || "").trim(),
timeoutMs: getConfiguredTimeoutMs(settings),
llmProvider: resolvedProvider.providerId || "",
llmProviderLabel: resolvedProvider.providerLabel || "",
llmTransport: resolvedProvider.transportId || "",
llmTransportLabel: resolvedProvider.transportLabel || "",
llmRouteMode: resolvedProvider.routeMode || "",
llmHostSource: resolvedProvider.hostSource || "",
llmHostSourceConst: resolvedProvider.hostSourceConst || "",
llmSupportsModelFetch: resolvedProvider.supportsModelFetch === true,
llmStatusStrategies: Array.isArray(resolvedProvider.statusStrategies)
? [...resolvedProvider.statusStrategies]
: [],
llmChannel: resolvedProvider,
llmConfigSource: selection.source || "global",
llmConfigSourceLabel: formatLlmConfigSourceLabel(selection.source),
llmPresetName: selection.presetName || "",
@@ -418,6 +437,7 @@ function buildEffectiveLlmRoute(
hasDedicatedConfig,
privateRequestSource,
taskType = "",
config = null,
) {
const dedicated = Boolean(hasDedicatedConfig);
return {
@@ -425,8 +445,18 @@ function buildEffectiveLlmRoute(
requestSource: String(privateRequestSource || "").trim(),
llm: dedicated ? "dedicated-memory-llm" : "sillytavern-current-model",
transport: dedicated
? "dedicated-openai-compatible"
? String(config?.llmTransport || "dedicated-openai-compatible")
: "sillytavern-current-model",
transportLabel: dedicated
? String(
config?.llmTransportLabel || config?.llmProviderLabel || "专用记忆模型",
)
: "酒馆当前模型",
provider: dedicated ? String(config?.llmProvider || "") : "",
providerLabel: dedicated ? String(config?.llmProviderLabel || "") : "",
routeMode: dedicated ? String(config?.llmRouteMode || "") : "",
inputApiUrl: dedicated ? String(config?.inputApiUrl || "") : "",
apiUrl: dedicated ? String(config?.apiUrl || "") : "",
};
}
@@ -665,10 +695,8 @@ function buildResponseErrorMessage(response, responseText = "") {
}
function normalizeOpenAICompatibleBaseUrl(value) {
return String(value || "")
.trim()
.replace(/\/+(chat\/completions|embeddings)$/i, "")
.replace(/\/+$/, "");
const resolved = resolveDedicatedLlmProviderConfig(value);
return resolved.apiUrl || String(value || "").trim().replace(/\/+$/, "");
}
function hasDedicatedLLMConfig(config = getMemoryLLMConfig()) {
@@ -739,28 +767,87 @@ function buildDedicatedAuthHeaderString(apiKey = "") {
return normalized ? `Authorization: Bearer ${normalized}` : "";
}
function buildDedicatedStatusRequestVariants(config = getMemoryLLMConfig()) {
const customVariant = {
function resolveChatCompletionSourceValue(sourceConst = "", fallback = "") {
const normalizedConst = String(sourceConst || "").trim();
if (
normalizedConst &&
chat_completion_sources &&
typeof chat_completion_sources === "object" &&
chat_completion_sources[normalizedConst]
) {
return String(chat_completion_sources[normalizedConst]).trim();
}
return String(fallback || "").trim();
}
function buildDedicatedCustomStatusVariant(config = getMemoryLLMConfig()) {
return {
mode: "custom",
body: {
chat_completion_source: chat_completion_sources.CUSTOM,
chat_completion_source: resolveChatCompletionSourceValue("CUSTOM", "custom"),
custom_url: config.apiUrl,
custom_include_headers: buildDedicatedAuthHeaderString(config.apiKey),
reverse_proxy: config.apiUrl,
proxy_password: "",
},
};
}
const legacyOpenAiVariant = {
mode: "openai-reverse-proxy",
function buildDedicatedReverseProxyStatusVariant(
mode,
sourceConst,
fallbackSource,
config = getMemoryLLMConfig(),
) {
return {
mode,
body: {
chat_completion_source: chat_completion_sources.OPENAI,
chat_completion_source: resolveChatCompletionSourceValue(
sourceConst,
fallbackSource,
),
reverse_proxy: config.apiUrl,
proxy_password: config.apiKey || "",
},
};
}
return [customVariant, legacyOpenAiVariant];
function buildDedicatedStatusRequestVariants(config = getMemoryLLMConfig()) {
const strategies = Array.isArray(config.llmStatusStrategies)
? config.llmStatusStrategies
: ["custom", "openai-reverse-proxy"];
const variants = [];
const seenModes = new Set();
for (const strategy of strategies) {
let variant = null;
if (strategy === "custom") {
variant = buildDedicatedCustomStatusVariant(config);
} else if (strategy === "openai-reverse-proxy") {
variant = buildDedicatedReverseProxyStatusVariant(
"openai-reverse-proxy",
"OPENAI",
"openai",
config,
);
} else if (strategy === "makersuite-reverse-proxy") {
variant = buildDedicatedReverseProxyStatusVariant(
"makersuite-reverse-proxy",
"MAKERSUITE",
"makersuite",
config,
);
}
if (!variant?.mode || seenModes.has(variant.mode)) {
continue;
}
seenModes.add(variant.mode);
variants.push(variant);
}
return variants;
}
async function requestDedicatedStatusModels(
@@ -1444,6 +1531,64 @@ async function executeDedicatedRequest(
}
}
function shouldForceDedicatedNonStream(config = getMemoryLLMConfig()) {
return (
String(config.llmRouteMode || "").trim() === "reverse-proxy" &&
["claude", "makersuite"].includes(
String(config.llmHostSource || "").trim().toLowerCase(),
)
);
}
function buildDedicatedRequestBody(
config,
transportMessages,
filteredGeneration,
resolvedCompletionTokens,
{ jsonMode = false } = {},
) {
const routeMode = String(config?.llmRouteMode || "custom").trim() || "custom";
const body = {
model: config.model,
messages: transportMessages,
temperature: filteredGeneration.temperature ?? 1,
max_tokens: resolvedCompletionTokens,
stream: filteredGeneration.stream ?? false,
frequency_penalty: filteredGeneration.frequency_penalty ?? 0,
presence_penalty: filteredGeneration.presence_penalty ?? 0,
top_p: filteredGeneration.top_p ?? 1,
};
if (routeMode === "reverse-proxy") {
body.chat_completion_source = resolveChatCompletionSourceValue(
config.llmHostSourceConst,
config.llmHostSource || "custom",
);
body.reverse_proxy = config.apiUrl;
body.proxy_password = config.apiKey || "";
if (jsonMode) {
body.json_schema = createGenericJsonSchema();
}
} else {
body.chat_completion_source = resolveChatCompletionSourceValue("CUSTOM", "custom");
body.custom_url = config.apiUrl;
body.custom_include_headers = config.apiKey
? buildYamlObject({
Authorization: `Bearer ${config.apiKey}`,
})
: "";
if (jsonMode && _jsonModeSupported) {
body.custom_include_body = buildYamlObject({
response_format: {
type: "json_object",
},
});
}
}
return body;
}
async function callDedicatedOpenAICompatible(
messages,
{
@@ -1487,8 +1632,15 @@ async function callDedicatedOpenAICompatible(
};
const taskKey = taskType || privateRequestSource;
const initialFilteredGeneration = generationResolved.filtered || {};
const filteredGeneration = {
...initialFilteredGeneration,
};
const forceNonStream = hasDedicatedConfig && shouldForceDedicatedNonStream(config);
if (forceNonStream && filteredGeneration.stream === true) {
filteredGeneration.stream = false;
}
const streamRequested =
hasDedicatedConfig && initialFilteredGeneration.stream === true;
hasDedicatedConfig && filteredGeneration.stream === true;
const streamState = createStreamDebugState({
requested: streamRequested,
});
@@ -1499,10 +1651,17 @@ async function callDedicatedOpenAICompatible(
jsonMode,
dedicatedConfig: hasDedicatedConfig,
route: hasDedicatedConfig
? "dedicated-openai-compatible"
? config.llmTransport || "dedicated-openai-compatible"
: "sillytavern-current-model",
routeLabel: hasDedicatedConfig ? config.llmTransportLabel || "" : "酒馆当前模型",
model: hasDedicatedConfig ? config.model : "sillytavern-current-model",
inputApiUrl: hasDedicatedConfig ? config.inputApiUrl || "" : "",
apiUrl: hasDedicatedConfig ? config.apiUrl : "",
llmProvider: config.llmProvider || "",
llmProviderLabel: config.llmProviderLabel || "",
llmTransport: config.llmTransport || "",
llmTransportLabel: config.llmTransportLabel || "",
llmRouteMode: config.llmRouteMode || "",
llmConfigSource: config.llmConfigSource || "global",
llmConfigSourceLabel: config.llmConfigSourceLabel || "",
llmPresetName: config.llmPresetName || "",
@@ -1511,13 +1670,15 @@ async function callDedicatedOpenAICompatible(
messages,
transportMessages,
generation: generationResolved.generation || {},
filteredGeneration: generationResolved.filtered || {},
filteredGeneration,
removedGeneration: generationResolved.removed || [],
capabilityMode: generationResolved.capabilityMode || "",
streamForceDisabled: forceNonStream,
effectiveRoute: buildEffectiveLlmRoute(
hasDedicatedConfig,
privateRequestSource,
taskType,
config,
),
maxCompletionTokens,
...buildStreamDebugSnapshot(streamState),
@@ -1546,30 +1707,19 @@ async function callDedicatedOpenAICompatible(
: jsonMode
? DEFAULT_JSON_COMPLETION_TOKENS
: DEFAULT_TEXT_COMPLETION_TOKENS;
const filteredGeneration = generationResolved.filtered || {};
const resolvedCompletionTokens = Number.isFinite(
filteredGeneration.max_completion_tokens,
)
? filteredGeneration.max_completion_tokens
: completionTokens;
const body = {
chat_completion_source: chat_completion_sources.CUSTOM,
custom_url: config.apiUrl,
custom_include_headers: config.apiKey
? buildYamlObject({
Authorization: `Bearer ${config.apiKey}`,
})
: "",
model: config.model,
messages: transportMessages,
temperature: filteredGeneration.temperature ?? 1,
max_tokens: resolvedCompletionTokens,
stream: filteredGeneration.stream ?? false,
frequency_penalty: filteredGeneration.frequency_penalty ?? 0,
presence_penalty: filteredGeneration.presence_penalty ?? 0,
top_p: filteredGeneration.top_p ?? 1,
};
const body = buildDedicatedRequestBody(
config,
transportMessages,
filteredGeneration,
resolvedCompletionTokens,
{ jsonMode },
);
const optionalGenerationFields = [
"top_p",
@@ -1596,12 +1746,8 @@ async function callDedicatedOpenAICompatible(
body[field] = filteredGeneration[field];
}
if (jsonMode && _jsonModeSupported) {
body.custom_include_body = buildYamlObject({
response_format: {
type: "json_object",
},
});
if (Object.prototype.hasOwnProperty.call(filteredGeneration, "request_thoughts")) {
body.include_reasoning = Boolean(filteredGeneration.request_thoughts);
}
recordTaskLlmRequest(taskKey, {
@@ -1609,9 +1755,16 @@ async function callDedicatedOpenAICompatible(
taskType: String(taskType || "").trim(),
jsonMode,
dedicatedConfig: true,
route: "dedicated-openai-compatible",
route: config.llmTransport || "dedicated-openai-compatible",
routeLabel: config.llmTransportLabel || "",
model: config.model,
inputApiUrl: config.inputApiUrl || "",
apiUrl: config.apiUrl,
llmProvider: config.llmProvider || "",
llmProviderLabel: config.llmProviderLabel || "",
llmTransport: config.llmTransport || "",
llmTransportLabel: config.llmTransportLabel || "",
llmRouteMode: config.llmRouteMode || "",
llmConfigSource: config.llmConfigSource || "global",
llmConfigSourceLabel: config.llmConfigSourceLabel || "",
llmPresetName: config.llmPresetName || "",
@@ -1624,10 +1777,12 @@ async function callDedicatedOpenAICompatible(
removedGeneration: generationResolved.removed || [],
capabilityMode: generationResolved.capabilityMode || "",
resolvedCompletionTokens,
streamForceDisabled: forceNonStream,
effectiveRoute: buildEffectiveLlmRoute(
true,
privateRequestSource,
taskType,
config,
),
requestBody: body,
...buildStreamDebugSnapshot(streamState),
@@ -1983,7 +2138,7 @@ export async function callLLM(systemPrompt, userPrompt, options = {}) {
export async function testLLMConnection() {
const config = getMemoryLLMConfig();
const mode = hasDedicatedLLMConfig(config)
? `dedicated:${config.model}`
? `dedicated:${config.llmProviderLabel || config.llmTransportLabel || config.model}:${config.model}`
: "sillytavern-current-model";
try {
@@ -2013,7 +2168,22 @@ export async function fetchMemoryLLMModels() {
};
}
if (config.llmSupportsModelFetch !== true) {
return {
success: false,
models: [],
error: `${config.llmProviderLabel || "当前渠道"} 暂不支持自动拉取模型,请手动填写模型名`,
};
}
const variants = buildDedicatedStatusRequestVariants(config);
if (!variants.length) {
return {
success: false,
models: [],
error: `${config.llmProviderLabel || "当前渠道"} 暂无可用的模型探测策略,请手动填写模型名`,
};
}
const errors = [];
try {

View File

@@ -6,6 +6,6 @@
"js": "index.js",
"css": "style.css",
"author": "Youzini",
"version": "4.6.2",
"version": "4.6.6",
"homePage": "https://github.com/Youzini-afk/ST-Bionic-Memory-Ecology"
}

View File

@@ -3977,6 +3977,11 @@
overflow: auto;
}
#bme-mobile-graph-pane {
overflow: hidden;
overscroll-behavior: contain;
}
.bme-mobile-graph-pane.active {
display: flex;
}

View File

@@ -0,0 +1,29 @@
import assert from "node:assert/strict";
import {
isUsableGraphCanvasSize,
remapPositionBetweenRects,
} from "../ui/graph-renderer-utils.js";
assert.equal(isUsableGraphCanvasSize(0, 0), false);
assert.equal(isUsableGraphCanvasSize(47, 120), false);
assert.equal(isUsableGraphCanvasSize(120, 47), false);
assert.equal(isUsableGraphCanvasSize(48, 48), true);
assert.equal(isUsableGraphCanvasSize(320, 180), true);
assert.deepEqual(
remapPositionBetweenRects(60, 35, { x: 10, y: 10, w: 100, h: 50 }, { x: 20, y: 20, w: 200, h: 100 }),
{ x: 120, y: 70 },
);
assert.deepEqual(
remapPositionBetweenRects(-50, 300, { x: 10, y: 10, w: 100, h: 50 }, { x: 20, y: 20, w: 200, h: 100 }),
{ x: 20, y: 120 },
);
assert.deepEqual(
remapPositionBetweenRects(42, 84, null, { x: 20, y: 20, w: 200, h: 100 }),
{ x: 42, y: 84 },
);
console.log("graph-renderer-utils tests passed");

View File

@@ -110,6 +110,23 @@ async function withModelFetchSettings(run) {
}
}
async function withModelFetchSettingsOverrides(overrides, run) {
const previousSettings = JSON.parse(
JSON.stringify(extensionsApi.extension_settings.st_bme || {}),
);
extensionsApi.extension_settings.st_bme = {
...previousSettings,
...buildModelFetchSettings(),
...(overrides || {}),
};
try {
await run();
} finally {
extensionsApi.extension_settings.st_bme = previousSettings;
}
}
async function testFetchMemoryModelsUsesCustomStatusFirst() {
const originalFetch = globalThis.fetch;
const seenBodies = [];
@@ -238,8 +255,70 @@ async function testFetchMemoryModelsParsesNestedPayload() {
}
}
async function testFetchMemoryModelsUsesGoogleStatusRoute() {
const originalFetch = globalThis.fetch;
const seenBodies = [];
globalThis.fetch = async (_url, options = {}) => {
seenBodies.push(JSON.parse(String(options.body || "{}")));
return new Response(
JSON.stringify({
data: [{ id: "gemini-2.5-pro" }],
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
},
},
);
};
try {
await withModelFetchSettingsOverrides(
{
llmApiUrl:
"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent",
llmApiKey: "gemini-secret",
},
async () => {
const result = await llm.fetchMemoryLLMModels();
assert.equal(result.success, true);
assert.deepEqual(result.models, [
{ id: "gemini-2.5-pro", label: "gemini-2.5-pro" },
]);
assert.equal(seenBodies.length, 1);
assert.equal(seenBodies[0].chat_completion_source, "makersuite");
assert.equal(seenBodies[0].reverse_proxy, "https://generativelanguage.googleapis.com");
assert.equal(seenBodies[0].proxy_password, "gemini-secret");
},
);
} finally {
globalThis.fetch = originalFetch;
}
}
async function testFetchMemoryModelsReturnsHelpfulMessageForAnthropic() {
await withModelFetchSettingsOverrides(
{
llmApiUrl: "https://api.anthropic.com/v1/messages",
llmApiKey: "anthropic-secret",
llmModel: "claude-sonnet-4-5",
},
async () => {
const result = await llm.fetchMemoryLLMModels();
assert.equal(result.success, false);
assert.equal(result.models.length, 0);
assert.match(result.error, /Anthropic Claude/);
assert.match(result.error, /手动填写模型名/);
},
);
}
await testFetchMemoryModelsUsesCustomStatusFirst();
await testFetchMemoryModelsFallsBackToLegacyStatus();
await testFetchMemoryModelsParsesNestedPayload();
await testFetchMemoryModelsUsesGoogleStatusRoute();
await testFetchMemoryModelsReturnsHelpfulMessageForAnthropic();
console.log("llm-model-fetch tests passed");

View File

@@ -5,6 +5,7 @@ import {
isSameLlmConfigSnapshot,
isUsableLlmConfigSnapshot,
normalizeLlmPresetMap,
resolveDedicatedLlmProviderConfig,
resolveLlmConfigSelection,
resolveActiveLlmPresetName,
sanitizeLlmPresetSettings,
@@ -226,4 +227,31 @@ assert.deepEqual(invalidTaskPresetSelection.config, {
llmModel: "model-global",
});
const arkProvider = resolveDedicatedLlmProviderConfig(
"https://ark.cn-beijing.volces.com/api/coding/v3/chat/completions",
);
assert.equal(arkProvider.providerId, "volcengine-ark");
assert.equal(arkProvider.transportId, "dedicated-openai-compatible");
assert.equal(arkProvider.routeMode, "custom");
assert.equal(arkProvider.apiUrl, "https://ark.cn-beijing.volces.com/api/coding/v3");
assert.equal(arkProvider.supportsModelFetch, true);
const anthropicProvider = resolveDedicatedLlmProviderConfig(
"https://api.anthropic.com/v1/messages",
);
assert.equal(anthropicProvider.providerId, "anthropic-claude");
assert.equal(anthropicProvider.transportId, "dedicated-anthropic-claude");
assert.equal(anthropicProvider.routeMode, "reverse-proxy");
assert.equal(anthropicProvider.apiUrl, "https://api.anthropic.com/v1");
assert.equal(anthropicProvider.supportsModelFetch, false);
const geminiProvider = resolveDedicatedLlmProviderConfig(
"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent",
);
assert.equal(geminiProvider.providerId, "google-ai-studio");
assert.equal(geminiProvider.transportId, "dedicated-google-ai-studio");
assert.equal(geminiProvider.routeMode, "reverse-proxy");
assert.equal(geminiProvider.apiUrl, "https://generativelanguage.googleapis.com");
assert.equal(geminiProvider.supportsModelFetch, true);
console.log("llm-preset-utils tests passed");

View File

@@ -83,7 +83,7 @@ if (originalSendOpenAIRequest === undefined) {
globalThis.__llmStreamingSendOpenAIRequest = originalSendOpenAIRequest;
}
function buildStreamingSettings(generation = {}) {
function buildStreamingSettings(generation = {}, overrides = {}) {
const taskProfiles = createDefaultTaskProfiles();
taskProfiles.extract.profiles[0].generation = {
...taskProfiles.extract.profiles[0].generation,
@@ -96,6 +96,7 @@ function buildStreamingSettings(generation = {}) {
timeoutMs: 1234,
taskProfilesVersion: 3,
taskProfiles,
...(overrides || {}),
};
}
@@ -125,13 +126,13 @@ function getSnapshot(taskKey = "extract") {
return globalThis.__stBmeRuntimeDebugState?.taskLlmRequests?.[taskKey] || null;
}
async function withStreamingSettings(generation, run) {
async function withStreamingSettings(generation, run, overrides = {}) {
const previousSettings = JSON.parse(
JSON.stringify(extensionsApi.extension_settings.st_bme || {}),
);
extensionsApi.extension_settings.st_bme = {
...previousSettings,
...buildStreamingSettings(generation),
...buildStreamingSettings(generation, overrides),
};
delete globalThis.__stBmeRuntimeDebugState;
@@ -415,9 +416,72 @@ async function testJsonRetryKeepsProfileCompletionTokens() {
}
}
async function testAnthropicRouteUsesReverseProxyAndDisablesStreaming() {
const originalFetch = globalThis.fetch;
let requestBody = null;
globalThis.fetch = async (_url, options = {}) => {
requestBody = JSON.parse(String(options.body || "{}"));
return new Response(
JSON.stringify({
choices: [
{
message: {
content: '{"ok":true}',
},
finish_reason: "stop",
},
],
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
},
},
);
};
try {
await withStreamingSettings(
{ stream: true },
async () => {
const result = await llm.callLLMForJSON({
systemPrompt: "system",
userPrompt: "user",
maxRetries: 0,
taskType: "extract",
requestSource: "test:anthropic-route",
});
assert.deepEqual(result, { ok: true });
assert.equal(requestBody?.chat_completion_source, "claude");
assert.equal(requestBody?.reverse_proxy, "https://api.anthropic.com/v1");
assert.equal(requestBody?.proxy_password, "sk-stream-secret");
assert.equal(requestBody?.stream, false);
assert.ok(requestBody?.json_schema);
const snapshot = getSnapshot("extract");
assert.ok(snapshot);
assert.equal(snapshot.route, "dedicated-anthropic-claude");
assert.equal(snapshot.llmProviderLabel, "Anthropic Claude");
assert.equal(snapshot.streamRequested, false);
assert.equal(snapshot.streamForceDisabled, true);
},
{
llmApiUrl: "https://api.anthropic.com/v1/messages",
llmModel: "claude-sonnet-4-5",
},
);
} finally {
globalThis.fetch = originalFetch;
}
}
await testDedicatedStreamingSuccess();
await testDedicatedStreamingFallsBackToNonStream();
await testDedicatedStreamingAbortDoesNotLeaveActiveState();
await testJsonRetryKeepsProfileCompletionTokens();
await testAnthropicRouteUsesReverseProxyAndDisablesStreaming();
console.log("llm-streaming tests passed");

View File

@@ -0,0 +1,186 @@
import assert from "node:assert/strict";
import {
buildVisibleGraphRefreshToken,
resolveVisibleGraphWorkspaceMode,
} from "../ui/panel-graph-refresh-utils.js";
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: false,
isMobile: false,
currentTabId: "dashboard",
currentGraphView: "graph",
}),
"hidden",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: false,
currentTabId: "config",
currentGraphView: "graph",
}),
"hidden",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: false,
currentTabId: "dashboard",
currentGraphView: "graph",
}),
"desktop:graph",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: false,
currentTabId: "memory",
currentGraphView: "cognition",
}),
"desktop:cognition",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: false,
currentTabId: "actions",
currentGraphView: "summary",
}),
"desktop:summary",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: true,
currentTabId: "dashboard",
currentMobileGraphView: "graph",
}),
"hidden",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: true,
currentTabId: "graph",
currentMobileGraphView: "graph",
}),
"mobile:graph",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: true,
currentTabId: "graph",
currentMobileGraphView: "cognition",
}),
"mobile:cognition",
);
assert.equal(
resolveVisibleGraphWorkspaceMode({
overlayActive: true,
isMobile: true,
currentTabId: "graph",
currentMobileGraphView: "summary",
}),
"mobile:summary",
);
assert.equal(
buildVisibleGraphRefreshToken({
visibleMode: "hidden",
chatId: "chat-main",
loadState: "loaded",
revision: 12,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
}),
"hidden",
);
const baseToken = buildVisibleGraphRefreshToken({
visibleMode: "desktop:graph",
chatId: "chat-main",
loadState: "loaded",
revision: 12,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
});
assert.equal(
baseToken,
buildVisibleGraphRefreshToken({
visibleMode: "desktop:graph",
chatId: "chat-main",
loadState: "loaded",
revision: 12,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
}),
);
assert.notEqual(
baseToken,
buildVisibleGraphRefreshToken({
visibleMode: "desktop:graph",
chatId: "chat-main",
loadState: "loaded",
revision: 13,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
}),
);
assert.notEqual(
baseToken,
buildVisibleGraphRefreshToken({
visibleMode: "desktop:cognition",
chatId: "chat-main",
loadState: "loaded",
revision: 12,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
}),
);
assert.notEqual(
baseToken,
buildVisibleGraphRefreshToken({
visibleMode: "desktop:graph",
chatId: "chat-side",
loadState: "loaded",
revision: 12,
nodeCount: 40,
edgeCount: 55,
lastProcessedSeq: 9,
}),
);
assert.notEqual(
baseToken,
buildVisibleGraphRefreshToken({
visibleMode: "desktop:graph",
chatId: "chat-main",
loadState: "loaded",
revision: 12,
nodeCount: 41,
edgeCount: 55,
lastProcessedSeq: 9,
}),
);
console.log("panel-graph-refresh tests passed");

View File

@@ -284,6 +284,36 @@ async function testSkipFlagTtlExpires() {
}
}
async function testPromptViewerSyntheticGenerationSkipsRecall() {
const harness = await createGenerationRecallHarness();
const fakeDialog = {
textContent: "Prompt Viewer",
querySelector(selector) {
if (selector === ".fa-rotate-right.animate-spin") {
return {};
}
return null;
},
};
harness.document.querySelectorAll = (selector) =>
selector === '[role="dialog"]' ? [fakeDialog] : [];
harness.__sendTextareaValue = "hello world";
const startResult = harness.result.onGenerationStarted("normal", {}, false);
assert.equal(startResult, null);
assert.equal(
harness.result.getCurrentGenerationTrivialSkip()?.reason,
"tavern-helper-prompt-viewer",
);
const beforeCombine = await harness.result.onBeforeCombinePrompts();
assert.deepEqual(beforeCombine, {
skipped: true,
reason: "tavern-helper-prompt-viewer",
});
assert.equal(harness.runRecallCalls.length, 0);
}
await Promise.resolve();
testIsTrivialUserInputTable();
await testSlashCommandSkipsRecallAndExtraction();
@@ -297,5 +327,6 @@ await testNonTrivialGenerationClearsResidualTrivialSkip();
await testNonTargetMessageIdDoesNotConsumeFlag();
await testNullMessageIdFallsBackToLastAssistantIndex();
await testSkipFlagTtlExpires();
await testPromptViewerSyntheticGenerationSkipsRecall();
console.log("trivial-user-input tests passed");

View File

@@ -0,0 +1,46 @@
function clampUnit(value) {
if (!Number.isFinite(Number(value))) return 0;
return Math.min(1, Math.max(0, Number(value)));
}
export function isUsableGraphCanvasSize(width = 0, height = 0, minDimension = 48) {
const normalizedWidth = Number(width);
const normalizedHeight = Number(height);
const threshold = Number.isFinite(Number(minDimension))
? Math.max(1, Number(minDimension))
: 48;
return (
Number.isFinite(normalizedWidth) &&
Number.isFinite(normalizedHeight) &&
normalizedWidth >= threshold &&
normalizedHeight >= threshold
);
}
export function remapPositionBetweenRects(x = 0, y = 0, prevRect = null, nextRect = null) {
const pointX = Number.isFinite(Number(x)) ? Number(x) : 0;
const pointY = Number.isFinite(Number(y)) ? Number(y) : 0;
if (!prevRect || !nextRect) {
return {
x: pointX,
y: pointY,
};
}
const prevX = Number.isFinite(Number(prevRect.x)) ? Number(prevRect.x) : 0;
const prevY = Number.isFinite(Number(prevRect.y)) ? Number(prevRect.y) : 0;
const prevW = Math.max(1, Number.isFinite(Number(prevRect.w)) ? Number(prevRect.w) : 0);
const prevH = Math.max(1, Number.isFinite(Number(prevRect.h)) ? Number(prevRect.h) : 0);
const nextX = Number.isFinite(Number(nextRect.x)) ? Number(nextRect.x) : 0;
const nextY = Number.isFinite(Number(nextRect.y)) ? Number(nextRect.y) : 0;
const nextW = Math.max(1, Number.isFinite(Number(nextRect.w)) ? Number(nextRect.w) : 0);
const nextH = Math.max(1, Number.isFinite(Number(nextRect.h)) ? Number(nextRect.h) : 0);
const relX = clampUnit((pointX - prevX) / prevW);
const relY = clampUnit((pointY - prevY) / prevH);
return {
x: nextX + relX * nextW,
y: nextY + relY * nextH,
};
}

View File

@@ -2,6 +2,10 @@
// 零依赖:客观层 / 角色 POV / 用户 POV 分区内 Vogel 初值 + 一次性力导向稳定,无帧循环抖动
import { getNodeColors } from './themes.js';
import {
isUsableGraphCanvasSize,
remapPositionBetweenRects,
} from './graph-renderer-utils.js';
import { getGraphNodeLabel, getNodeDisplayName } from '../graph/node-labels.js';
import { normalizeMemoryScope } from '../graph/memory-scope.js';
import {
@@ -40,6 +44,8 @@ const DEFAULT_LAYOUT_CONFIG = {
neuralMinGap: 12,
};
const MIN_USABLE_CANVAS_DIMENSION = 48;
/** 兼容旧版 forceConfig召回卡片等 */
function layoutKeysFromForceConfig(fc) {
if (!fc || typeof fc !== 'object') return {};
@@ -192,6 +198,10 @@ export class GraphRenderer {
this._regionPanels = [];
this._lastGraph = null;
this._lastLayoutHints = {};
this._lastCanvasCssWidth = 0;
this._lastCanvasCssHeight = 0;
this._lastDevicePixelRatio = window.devicePixelRatio || 1;
// View transform
this.scale = 1;
@@ -234,6 +244,9 @@ export class GraphRenderer {
const prevSelectedId = this.selectedNode?.id || null;
this.nodeMap.clear();
this._lastGraph = graph;
this._lastLayoutHints = layoutHints && typeof layoutHints === 'object'
? { ...layoutHints }
: {};
if (layoutHints && Object.prototype.hasOwnProperty.call(layoutHints, 'userPovAliases')) {
this._userPovAliasSet = buildUserPovAliasNormalizedSet(
layoutHints.userPovAliases,
@@ -438,6 +451,40 @@ export class GraphRenderer {
}
}
_rebuildLayoutForCurrentViewport(W, H) {
const previousRectsByRegion = new Map();
for (const node of this.nodes) {
if (!node?.regionKey || previousRectsByRegion.has(node.regionKey) || !node.regionRect) {
continue;
}
previousRectsByRegion.set(node.regionKey, {
x: node.regionRect.x,
y: node.regionRect.y,
w: node.regionRect.w,
h: node.regionRect.h,
});
}
const parts = partitionNodesByScope(this.nodes, this._userPovAliasSet);
this._regionPanels = this._computeRegionPanels(W, H, parts);
for (const node of this.nodes) {
const nextRect = node.regionRect;
const previousRect = previousRectsByRegion.get(node.regionKey) || nextRect;
const nextPosition = remapPositionBetweenRects(
node.x,
node.y,
previousRect,
nextRect,
);
node.x = nextPosition.x;
node.y = nextPosition.y;
node.vx = 0;
node.vy = 0;
this._clampNodeToRegion(node);
}
}
/**
* 椭圆 Vogel 螺旋初值有机疏密Deterministic无网格感
*/
@@ -634,9 +681,10 @@ export class GraphRenderer {
const W = this.canvas.width / dpr;
const H = this.canvas.height / dpr;
ctx.setTransform(1, 0, 0, 1, 0, 0);
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
ctx.save();
ctx.scale(dpr, dpr);
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
ctx.translate(this.offsetX, this.offsetY);
ctx.scale(this.scale, this.scale);
@@ -703,6 +751,14 @@ export class GraphRenderer {
ctx.restore();
}
_scheduleRender() {
if (this.animId) return;
this.animId = requestAnimationFrame(() => {
this.animId = null;
this._render();
});
}
_drawGrid(W, H) {
const sp = this.config.gridSpacing;
if (!sp || sp <= 0) return;
@@ -813,7 +869,7 @@ export class GraphRenderer {
this.offsetY += dy;
this._touchSession.lastX = t.clientX;
this._touchSession.lastY = t.clientY;
this._render();
this._scheduleRender();
}, { passive: false });
c.addEventListener('touchend', (e) => {
if (!this._touchSession) return;
@@ -891,17 +947,17 @@ export class GraphRenderer {
this.dragNode.x = x;
this.dragNode.y = y;
this._clampNodeToRegion(this.dragNode);
this._render();
this._scheduleRender();
} else if (this.isPanning) {
this.offsetX += e.clientX - this.lastMouse.x;
this.offsetY += e.clientY - this.lastMouse.y;
this._render();
this._scheduleRender();
} else {
const node = this._findNodeAt(x, y);
if (node !== this.hoveredNode) {
this.hoveredNode = node;
this.canvas.style.cursor = node ? 'pointer' : 'grab';
this._render();
this._scheduleRender();
}
}
this.lastMouse = { x: e.clientX, y: e.clientY };
@@ -981,15 +1037,34 @@ export class GraphRenderer {
const dpr = window.devicePixelRatio || 1;
const parent = this.canvas.parentElement;
if (!parent) return;
const w = parent.clientWidth;
const h = parent.clientHeight;
const w = Math.round(parent.clientWidth || 0);
const h = Math.round(parent.clientHeight || 0);
if (!isUsableGraphCanvasSize(w, h, MIN_USABLE_CANVAS_DIMENSION)) {
return;
}
if (
w === this._lastCanvasCssWidth
&& h === this._lastCanvasCssHeight
&& dpr === this._lastDevicePixelRatio
) {
return;
}
this._lastCanvasCssWidth = w;
this._lastCanvasCssHeight = h;
this._lastDevicePixelRatio = dpr;
this.canvas.width = w * dpr;
this.canvas.height = h * dpr;
this.canvas.style.width = w + 'px';
this.canvas.style.height = h + 'px';
if (this._lastGraph) {
this.loadGraph(this._lastGraph);
if (this.nodes.length > 0 && this._regionPanels.length > 0) {
this._rebuildLayoutForCurrentViewport(w, h);
this._render();
} else if (this._lastGraph) {
this.loadGraph(this._lastGraph, this._lastLayoutHints);
} else {
this._render();
}

View File

@@ -0,0 +1,59 @@
export function resolveVisibleGraphWorkspaceMode({
overlayActive = false,
isMobile = false,
currentTabId = "dashboard",
currentGraphView = "graph",
currentMobileGraphView = "graph",
} = {}) {
if (!overlayActive) return "hidden";
if (isMobile) {
if (currentTabId !== "graph") return "hidden";
const mobileView = String(currentMobileGraphView || "graph").trim() || "graph";
return mobileView === "cognition"
? "mobile:cognition"
: mobileView === "summary"
? "mobile:summary"
: "mobile:graph";
}
if (currentTabId === "config") return "hidden";
const desktopView = String(currentGraphView || "graph").trim() || "graph";
return desktopView === "cognition"
? "desktop:cognition"
: desktopView === "summary"
? "desktop:summary"
: "desktop:graph";
}
export function buildVisibleGraphRefreshToken({
visibleMode = "hidden",
chatId = "",
loadState = "",
revision = 0,
nodeCount = -1,
edgeCount = -1,
lastProcessedSeq = -1,
} = {}) {
const normalizedMode = String(visibleMode || "hidden").trim() || "hidden";
if (normalizedMode === "hidden") return "hidden";
const normalizedRevision = Number.isFinite(Number(revision))
? Math.trunc(Number(revision))
: 0;
const normalizedNodeCount = Number.isFinite(Number(nodeCount))
? Math.trunc(Number(nodeCount))
: -1;
const normalizedEdgeCount = Number.isFinite(Number(edgeCount))
? Math.trunc(Number(edgeCount))
: -1;
const normalizedLastProcessedSeq = Number.isFinite(Number(lastProcessedSeq))
? Math.trunc(Number(lastProcessedSeq))
: -1;
return [
normalizedMode,
String(chatId || "").trim(),
String(loadState || "").trim() || "unknown",
normalizedRevision,
normalizedNodeCount,
normalizedEdgeCount,
normalizedLastProcessedSeq,
].join("|");
}

View File

@@ -782,8 +782,7 @@
<div>
<div class="bme-config-card-title">记忆 LLM</div>
<div class="bme-config-card-subtitle">
留空时复用当前聊天模型;填写后走现有代理转发到 OpenAI
兼容接口。
留空时复用当前聊天模型;填写后会自动识别常见渠道,未识别时按自定义 OpenAI 兼容接口处理。
</div>
</div>
</div>
@@ -836,6 +835,9 @@
placeholder="https://api.openai.com/v1"
/>
</div>
<div class="bme-config-help" id="bme-memory-llm-provider-help">
留空时复用当前聊天模型。支持自动识别 OpenAI 兼容渠道、Anthropic Claude、Google AI Studio / Gemini填写完整 endpoint 时会自动规整为可复用的 base URL。
</div>
<div class="bme-config-row">
<label for="bme-setting-llm-key">LLM API Key</label>
<input

View File

@@ -1,6 +1,10 @@
// ST-BME: 操控面板交互逻辑
import { GraphRenderer } from "./graph-renderer.js";
import {
buildVisibleGraphRefreshToken,
resolveVisibleGraphWorkspaceMode,
} from "./panel-graph-refresh-utils.js";
import { getNodeDisplayName } from "../graph/node-labels.js";
import {
buildRegionLine,
@@ -21,6 +25,7 @@ import {
} from "../graph/summary-state.js";
import {
resolveActiveLlmPresetName,
resolveDedicatedLlmProviderConfig,
sanitizeLlmPresetSettings,
} from "../llm/llm-preset-utils.js";
import {
@@ -53,6 +58,48 @@ import {
let defaultPromptCache = null;
function _refreshMemoryLlmProviderHelp(urlValue = null) {
const helpEl = document.getElementById("bme-memory-llm-provider-help");
if (!helpEl) return;
const settings = _getSettings?.() || {};
const rawUrl = String(
urlValue ??
document.getElementById("bme-setting-llm-url")?.value ??
settings.llmApiUrl ??
"",
).trim();
if (!rawUrl) {
helpEl.textContent =
"留空时复用当前聊天模型。支持自动识别 OpenAI 兼容渠道、Anthropic Claude、Google AI Studio / Gemini填写完整 endpoint 时会自动规整为可复用的 base URL。";
return;
}
const resolved = resolveDedicatedLlmProviderConfig(rawUrl);
const parts = [];
if (resolved.isKnownProvider) {
parts.push(`已识别渠道:${resolved.providerLabel || resolved.providerId || "未知渠道"}`);
} else {
parts.push("未识别为特定渠道,将按自定义 OpenAI 兼容接口处理");
}
if (resolved.transportLabel) {
parts.push(`请求通道:${resolved.transportLabel}`);
}
if (resolved.apiUrl && resolved.apiUrl !== rawUrl) {
parts.push(`规范化地址:${resolved.apiUrl}`);
}
if (resolved.supportsModelFetch !== true) {
parts.push("该渠道暂不支持自动拉取模型,请手动填写模型名");
}
helpEl.textContent = parts.join("");
}
function getDefaultPrompts() {
if (defaultPromptCache) {
return defaultPromptCache;
@@ -292,6 +339,12 @@ let fetchedBackendEmbeddingModels = [];
let fetchedDirectEmbeddingModels = [];
let viewportSyncBound = false;
let popupRuntimePromise = null;
const GRAPH_LIVE_REFRESH_THROTTLE_MS = 240;
let pendingVisibleGraphRefreshTimer = null;
let pendingVisibleGraphRefreshToken = "";
let pendingVisibleGraphRefreshForce = false;
let lastVisibleGraphRefreshToken = "";
let lastVisibleGraphRefreshAt = 0;
// 由 index.js 注入的引用
let _getGraph = null;
@@ -641,6 +694,143 @@ function bindViewportSync() {
window.visualViewport?.addEventListener("scroll", update);
}
function _getVisibleGraphWorkspaceMode() {
return resolveVisibleGraphWorkspaceMode({
overlayActive: overlayEl?.classList.contains("active") === true,
isMobile: _isMobile(),
currentTabId,
currentGraphView,
currentMobileGraphView,
});
}
function _getCurrentGraphRefreshToken() {
const graph = _getGraph?.();
const persistence = _getGraphPersistenceSnapshot();
return buildVisibleGraphRefreshToken({
visibleMode: _getVisibleGraphWorkspaceMode(),
chatId: persistence?.chatId,
loadState: persistence?.loadState,
revision:
persistence?.revision ??
persistence?.lastAcceptedRevision ??
persistence?.lastSyncedRevision ??
0,
nodeCount: Array.isArray(graph?.nodes) ? graph.nodes.length : -1,
edgeCount: Array.isArray(graph?.edges) ? graph.edges.length : -1,
lastProcessedSeq: graph?.historyState?.lastProcessedAssistantFloor ?? -1,
});
}
function _clearScheduledVisibleGraphRefresh() {
if (pendingVisibleGraphRefreshTimer) {
clearTimeout(pendingVisibleGraphRefreshTimer);
pendingVisibleGraphRefreshTimer = null;
}
pendingVisibleGraphRefreshToken = "";
pendingVisibleGraphRefreshForce = false;
}
function _refreshVisibleGraphWorkspace({ force = false } = {}) {
const visibleMode = _getVisibleGraphWorkspaceMode();
if (visibleMode === "hidden") {
return { refreshed: false, reason: "hidden" };
}
const graph = _getGraph?.();
const nextToken = _getCurrentGraphRefreshToken();
if (!force && nextToken === lastVisibleGraphRefreshToken) {
return { refreshed: false, reason: "unchanged", token: nextToken };
}
const hints = { userPovAliases: _hostUserPovAliasHintsForGraph() };
if (visibleMode === "desktop:graph") {
if (graph && graphRenderer) {
graphRenderer.loadGraph(graph, hints);
}
} else if (visibleMode === "desktop:cognition") {
_refreshCognitionWorkspace();
} else if (visibleMode === "desktop:summary") {
_refreshSummaryWorkspace();
} else if (visibleMode === "mobile:graph") {
if (graph && mobileGraphRenderer) {
mobileGraphRenderer.loadGraph(graph, hints);
}
_buildMobileLegend();
} else if (visibleMode === "mobile:cognition") {
_refreshMobileCognitionFull();
} else if (visibleMode === "mobile:summary") {
_refreshMobileSummaryFull();
}
lastVisibleGraphRefreshToken = nextToken;
lastVisibleGraphRefreshAt = Date.now();
return {
refreshed: true,
reason: force ? "forced" : "changed",
token: nextToken,
visibleMode,
};
}
function _flushScheduledVisibleGraphRefresh() {
const shouldForce = pendingVisibleGraphRefreshForce === true;
_clearScheduledVisibleGraphRefresh();
return _refreshVisibleGraphWorkspace({ force: shouldForce });
}
function _scheduleVisibleGraphWorkspaceRefresh({ force = false } = {}) {
const nextToken = _getCurrentGraphRefreshToken();
if (nextToken === "hidden") {
_clearScheduledVisibleGraphRefresh();
return { scheduled: false, reason: "hidden" };
}
if (force) {
_clearScheduledVisibleGraphRefresh();
return _refreshVisibleGraphWorkspace({ force: true });
}
if (nextToken === lastVisibleGraphRefreshToken) {
return { scheduled: false, reason: "unchanged", token: nextToken };
}
if (
pendingVisibleGraphRefreshTimer &&
pendingVisibleGraphRefreshToken === nextToken &&
pendingVisibleGraphRefreshForce !== true
) {
return { scheduled: true, reason: "pending", token: nextToken };
}
const delay = Math.max(
0,
GRAPH_LIVE_REFRESH_THROTTLE_MS - (Date.now() - lastVisibleGraphRefreshAt),
);
pendingVisibleGraphRefreshToken = nextToken;
pendingVisibleGraphRefreshForce = false;
if (pendingVisibleGraphRefreshTimer) {
clearTimeout(pendingVisibleGraphRefreshTimer);
pendingVisibleGraphRefreshTimer = null;
}
if (delay <= 0) {
return _flushScheduledVisibleGraphRefresh();
}
pendingVisibleGraphRefreshTimer = setTimeout(() => {
_flushScheduledVisibleGraphRefresh();
}, delay);
return {
scheduled: true,
reason: "throttled",
token: nextToken,
delay,
};
}
/**
* 初始化面板(由 index.js 调用一次)
*/
@@ -938,7 +1128,6 @@ export function openPanel() {
panelEl?.querySelector(".bme-tab-btn.active")?.dataset.tab || currentTabId;
_switchTab(activeTabId);
_refreshRuntimeStatus();
_refreshGraph();
_buildLegend();
}
@@ -948,6 +1137,8 @@ export function openPanel() {
export function closePanel() {
if (!overlayEl) return;
overlayEl.classList.remove("active");
_clearScheduledVisibleGraphRefresh();
lastVisibleGraphRefreshToken = "";
}
/**
@@ -989,7 +1180,7 @@ export function refreshLiveState() {
_refreshMessageTraceWorkspace();
}
_refreshGraph();
_scheduleVisibleGraphWorkspaceRefresh();
}
// ==================== Tab 切换 ====================
@@ -1004,6 +1195,7 @@ function _bindTabs() {
}
function _switchTab(tabId) {
const previousVisibleGraphMode = _getVisibleGraphWorkspaceMode();
let next = tabId || "dashboard";
// 「图谱」仅移动端底部 Tab 可用;桌面端图谱在右侧主工作区,侧栏不设该 Tab
if (!_isMobile() && next === "graph") {
@@ -1035,11 +1227,17 @@ function _switchTab(tabId) {
_refreshConfigTab();
break;
case "graph":
_refreshMobileGraphTab();
break;
default:
break;
}
const nextVisibleGraphMode = _getVisibleGraphWorkspaceMode();
if (nextVisibleGraphMode !== previousVisibleGraphMode) {
_scheduleVisibleGraphWorkspaceRefresh({ force: true });
} else {
_scheduleVisibleGraphWorkspaceRefresh();
}
}
function _getPlannerApi() {
@@ -1118,8 +1316,7 @@ function _switchGraphView(view) {
if (cogWorkspace) cogWorkspace.style.display = isCognition ? "" : "none";
if (summaryWorkspace) summaryWorkspace.style.display = isSummary ? "" : "none";
if (isCognition) _refreshCognitionWorkspace();
if (isSummary) _refreshSummaryWorkspace();
_refreshGraph({ force: true });
}
// ==================== 移动端图谱 Tab ====================
@@ -1144,14 +1341,7 @@ function _switchMobileGraphSubView(view) {
}
function _refreshMobileGraphTab() {
if (currentMobileGraphView === "graph") {
_refreshGraph();
_buildMobileLegend();
} else if (currentMobileGraphView === "cognition") {
_refreshMobileCognitionFull();
} else if (currentMobileGraphView === "summary") {
_refreshMobileSummaryFull();
}
_refreshGraph({ force: true });
}
function _buildMobileLegend() {
@@ -2947,20 +3137,8 @@ function _hostUserPovAliasHintsForGraph() {
return getHostUserAliasHints();
}
function _refreshGraph() {
const graph = _getGraph?.();
if (!graph) return;
const hints = { userPovAliases: _hostUserPovAliasHintsForGraph() };
graphRenderer?.loadGraph(graph, hints);
mobileGraphRenderer?.loadGraph(graph, hints);
if (currentGraphView === "cognition") {
_refreshCognitionWorkspace();
} else if (currentGraphView === "summary") {
_refreshSummaryWorkspace();
}
if (currentTabId === "graph") {
_refreshMobileGraphTab();
}
function _refreshGraph(options = {}) {
return _refreshVisibleGraphWorkspace({ force: options.force !== false });
}
function _buildLegend() {
@@ -4544,6 +4722,7 @@ function _refreshConfigTab() {
_setInputValue("bme-setting-llm-url", settings.llmApiUrl || "");
_setInputValue("bme-setting-llm-key", settings.llmApiKey || "");
_setInputValue("bme-setting-llm-model", settings.llmModel || "");
_refreshMemoryLlmProviderHelp(settings.llmApiUrl || "");
_populateLlmPresetSelect(settings.llmPresets || {}, resolvedActiveLlmPreset);
_syncLlmPresetControls(resolvedActiveLlmPreset);
_setInputValue("bme-setting-timeout-ms", settings.timeoutMs ?? 300000);
@@ -5073,6 +5252,7 @@ function _bindConfigControls() {
_setInputValue("bme-setting-llm-url", preset.llmApiUrl);
_setInputValue("bme-setting-llm-key", preset.llmApiKey);
_setInputValue("bme-setting-llm-model", preset.llmModel);
_refreshMemoryLlmProviderHelp(preset.llmApiUrl);
_clearFetchedLlmModels();
_syncLlmPresetControls(selectedName);
});
@@ -5167,6 +5347,7 @@ function _bindConfigControls() {
bindText("bme-setting-llm-url", (value) => {
_patchSettings({ llmApiUrl: value.trim() });
_refreshMemoryLlmProviderHelp(value);
_markLlmPresetDirty({ clearFetchedModels: true });
});
bindText("bme-setting-llm-key", (value) => {
@@ -6176,6 +6357,8 @@ function _getMonitorRouteLabel(value = "") {
if (!normalized) return "";
const labels = {
"dedicated-openai-compatible": "专用 OpenAI 兼容接口",
"dedicated-anthropic-claude": "Anthropic Claude 接口",
"dedicated-google-ai-studio": "Google AI Studio / Gemini 接口",
"sillytavern-current-model": "酒馆当前模型",
"dedicated-memory-llm": "专用记忆模型",
global: "跟随当前 API",
@@ -6227,7 +6410,8 @@ function _getMonitorEjsStatusLabel(status = "") {
function _formatMonitorRouteInfo(entry = {}) {
const parts = [
_getMonitorRouteLabel(entry?.route),
_getMonitorRouteLabel(entry?.routeLabel || entry?.route),
String(entry?.llmProviderLabel || "").trim(),
_getMonitorRouteLabel(entry?.llmConfigSourceLabel),
String(entry?.model || "").trim() ? `模型:${String(entry.model).trim()}` : "",
].filter(Boolean);
@@ -7914,7 +8098,11 @@ function _renderTaskDebugLlmCard(taskType, llmRequest) {
</div>
<div class="bme-debug-kv-item">
<span class="bme-debug-kv-key">请求路径</span>
<span class="bme-debug-kv-value">${_escHtml(llmRequest.route || "—")}</span>
<span class="bme-debug-kv-value">${_escHtml(llmRequest.routeLabel || _getMonitorRouteLabel(llmRequest.route || "") || llmRequest.route || "—")}</span>
</div>
<div class="bme-debug-kv-item">
<span class="bme-debug-kv-key">识别渠道</span>
<span class="bme-debug-kv-value">${_escHtml(llmRequest.llmProviderLabel || llmRequest.llmProvider || "—")}</span>
</div>
<div class="bme-debug-kv-item">
<span class="bme-debug-kv-key">模型</span>
@@ -9538,6 +9726,7 @@ function _setText(id, text) {
function _getGraphPersistenceSnapshot() {
return _getGraphPersistenceState?.() || {
revision: 0,
loadState: "no-chat",
reason: "",
writesBlocked: true,