mirror of
https://github.com/Youzini-afk/ST-Bionic-Memory-Ecology.git
synced 2026-05-15 14:20:35 +08:00
fix: route luker extraction llm requests
This commit is contained in:
83
llm/llm.js
83
llm/llm.js
@@ -10,6 +10,7 @@ import {
|
||||
resolveDedicatedLlmProviderConfig,
|
||||
resolveLlmConfigSelection,
|
||||
} from "./llm-preset-utils.js";
|
||||
import { getBmeHostAdapter } from "../host/runtime-host-adapter.js";
|
||||
import { getActiveTaskProfile } from "../prompting/prompt-profiles.js";
|
||||
import { resolveConfiguredTimeoutMs } from "../runtime/request-timeout.js";
|
||||
import { applyTaskRegex } from "../prompting/task-regex.js";
|
||||
@@ -524,6 +525,74 @@ function getMemoryLLMConfig(taskType = "") {
|
||||
};
|
||||
}
|
||||
|
||||
function resolveHostChatCompletionRouting(taskType = "", options = {}) {
|
||||
const adapter =
|
||||
typeof getBmeHostAdapter === "function" ? getBmeHostAdapter() : null;
|
||||
if (!adapter || String(adapter.hostProfile || "") !== "luker") {
|
||||
return {
|
||||
hostProfile: String(adapter?.hostProfile || "generic-st"),
|
||||
requestApi: "",
|
||||
apiSettingsOverride: null,
|
||||
requestScope: "chat",
|
||||
routeApplied: false,
|
||||
routeReason: "not-luker",
|
||||
};
|
||||
}
|
||||
|
||||
const context =
|
||||
adapter.context && typeof adapter.context === "object"
|
||||
? adapter.context
|
||||
: {};
|
||||
const resolver =
|
||||
typeof adapter.resolveChatCompletionRequestProfile === "function"
|
||||
? adapter.resolveChatCompletionRequestProfile.bind(adapter)
|
||||
: null;
|
||||
if (!resolver) {
|
||||
return {
|
||||
hostProfile: "luker",
|
||||
requestApi: "",
|
||||
apiSettingsOverride: null,
|
||||
requestScope: "extension_internal",
|
||||
routeApplied: false,
|
||||
routeReason: "resolver-unavailable",
|
||||
};
|
||||
}
|
||||
|
||||
const profileName = String(options?.profileName || "").trim();
|
||||
const resolution =
|
||||
resolver({
|
||||
profileName,
|
||||
defaultApi: String(context?.mainApi || "openai").trim() || "openai",
|
||||
defaultSource: String(
|
||||
context?.chatCompletionSettings?.chat_completion_source || "",
|
||||
).trim(),
|
||||
taskType: String(taskType || "").trim(),
|
||||
}) || null;
|
||||
|
||||
return {
|
||||
hostProfile: "luker",
|
||||
requestApi: String(
|
||||
resolution?.requestApi ||
|
||||
context?.mainApi ||
|
||||
"openai",
|
||||
).trim() || "openai",
|
||||
apiSettingsOverride:
|
||||
resolution?.apiSettingsOverride &&
|
||||
typeof resolution.apiSettingsOverride === "object"
|
||||
? cloneRuntimeDebugValue(resolution.apiSettingsOverride, null)
|
||||
: null,
|
||||
requestScope: "extension_internal",
|
||||
routeApplied: Boolean(
|
||||
resolution?.apiSettingsOverride &&
|
||||
typeof resolution.apiSettingsOverride === "object",
|
||||
),
|
||||
routeReason:
|
||||
resolution && typeof resolution === "object"
|
||||
? "profile-resolved"
|
||||
: "profile-resolution-empty",
|
||||
};
|
||||
}
|
||||
|
||||
function getConfiguredTimeoutMs(settings = {}) {
|
||||
return typeof resolveConfiguredTimeoutMs === "function"
|
||||
? resolveConfiguredTimeoutMs(settings, LLM_REQUEST_TIMEOUT_MS)
|
||||
@@ -1892,6 +1961,9 @@ async function callDedicatedOpenAICompatible(
|
||||
);
|
||||
const transportMessages = buildTransportMessages(messages);
|
||||
const config = getMemoryLLMConfig(taskType);
|
||||
const hostRouting = resolveHostChatCompletionRouting(taskType, {
|
||||
profileName: "",
|
||||
});
|
||||
const settings = extension_settings[MODULE_NAME] || {};
|
||||
const hasDedicatedConfig = hasDedicatedLLMConfig(config);
|
||||
if (taskType && config.llmPresetFallbackReason) {
|
||||
@@ -1966,6 +2038,11 @@ async function callDedicatedOpenAICompatible(
|
||||
taskType,
|
||||
config,
|
||||
),
|
||||
hostProfile: hostRouting.hostProfile,
|
||||
hostRequestApi: hostRouting.requestApi,
|
||||
hostRouteApplied: hostRouting.routeApplied,
|
||||
hostRouteReason: hostRouting.routeReason,
|
||||
apiSettingsOverride: hostRouting.apiSettingsOverride,
|
||||
maxCompletionTokens,
|
||||
...buildStreamDebugSnapshot(streamState),
|
||||
});
|
||||
@@ -1974,7 +2051,11 @@ async function callDedicatedOpenAICompatible(
|
||||
"quiet",
|
||||
transportMessages,
|
||||
signal,
|
||||
jsonMode ? { jsonSchema: createGenericJsonSchema() } : {},
|
||||
{
|
||||
...(jsonMode ? { jsonSchema: createGenericJsonSchema() } : {}),
|
||||
apiSettingsOverride: hostRouting.apiSettingsOverride,
|
||||
requestScope: hostRouting.requestScope,
|
||||
},
|
||||
);
|
||||
const normalized = normalizeLLMResponsePayload(payload);
|
||||
if (
|
||||
|
||||
162
tests/luker-llm-routing.mjs
Normal file
162
tests/luker-llm-routing.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
import assert from "node:assert/strict";
|
||||
import { createRequire } from "node:module";
|
||||
import {
|
||||
installResolveHooks,
|
||||
toDataModuleUrl,
|
||||
} from "./helpers/register-hooks-compat.mjs";
|
||||
|
||||
const extensionsShimSource = [
|
||||
"export const extension_settings = globalThis.__lukerLlmRoutingExtensionSettings || {};",
|
||||
"export function getContext() {",
|
||||
" return null;",
|
||||
"}",
|
||||
].join("\n");
|
||||
const scriptShimSource = [
|
||||
"export function getRequestHeaders() {",
|
||||
" return { 'Content-Type': 'application/json' };",
|
||||
"}",
|
||||
].join("\n");
|
||||
const openAiShimSource = [
|
||||
"export const chat_completion_sources = { CUSTOM: 'custom', OPENAI: 'openai' };",
|
||||
"export async function sendOpenAIRequest(...args) {",
|
||||
" if (typeof globalThis.__lukerLlmRoutingSendOpenAIRequest === 'function') {",
|
||||
" return await globalThis.__lukerLlmRoutingSendOpenAIRequest(...args);",
|
||||
" }",
|
||||
" return { choices: [{ message: { content: '{}' } }] };",
|
||||
"}",
|
||||
].join("\n");
|
||||
|
||||
installResolveHooks([
|
||||
{
|
||||
specifiers: [
|
||||
"../../../extensions.js",
|
||||
"../../../../extensions.js",
|
||||
"../../../../../extensions.js",
|
||||
],
|
||||
url: toDataModuleUrl(extensionsShimSource),
|
||||
},
|
||||
{
|
||||
specifiers: [
|
||||
"../../../../script.js",
|
||||
"../../../../../script.js",
|
||||
],
|
||||
url: toDataModuleUrl(scriptShimSource),
|
||||
},
|
||||
{
|
||||
specifiers: [
|
||||
"../../../openai.js",
|
||||
"../../../../openai.js",
|
||||
],
|
||||
url: toDataModuleUrl(openAiShimSource),
|
||||
},
|
||||
]);
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const originalRequire = globalThis.require;
|
||||
const originalExtensionSettings = globalThis.__lukerLlmRoutingExtensionSettings;
|
||||
const originalSendOpenAIRequest = globalThis.__lukerLlmRoutingSendOpenAIRequest;
|
||||
const originalLuker = globalThis.Luker;
|
||||
|
||||
globalThis.__lukerLlmRoutingExtensionSettings = {
|
||||
st_bme: {},
|
||||
};
|
||||
globalThis.require = require;
|
||||
|
||||
const llm = await import("../llm/llm.js");
|
||||
const extensionsApi = await import("../../../../extensions.js");
|
||||
|
||||
if (originalRequire === undefined) {
|
||||
delete globalThis.require;
|
||||
} else {
|
||||
globalThis.require = originalRequire;
|
||||
}
|
||||
|
||||
if (originalExtensionSettings === undefined) {
|
||||
delete globalThis.__lukerLlmRoutingExtensionSettings;
|
||||
} else {
|
||||
globalThis.__lukerLlmRoutingExtensionSettings = originalExtensionSettings;
|
||||
}
|
||||
|
||||
let capturedOptions = null;
|
||||
let capturedMessages = null;
|
||||
|
||||
globalThis.Luker = {
|
||||
getContext() {
|
||||
return {
|
||||
mainApi: "openai",
|
||||
chatCompletionSettings: {
|
||||
chat_completion_source: "openai",
|
||||
},
|
||||
getChatState() {},
|
||||
updateChatState() {},
|
||||
getChatStateBatch() {},
|
||||
resolveChatCompletionRequestProfile() {
|
||||
return {
|
||||
requestApi: "openai",
|
||||
apiSettingsOverride: {
|
||||
chat_completion_source: "openai",
|
||||
reverse_proxy: "https://example-luker-route.test/v1",
|
||||
proxy_password: "sk-luker-route",
|
||||
secret_id: "luker-secret-1",
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
globalThis.__lukerLlmRoutingSendOpenAIRequest = async (
|
||||
type,
|
||||
messages,
|
||||
signal,
|
||||
options = {},
|
||||
) => {
|
||||
capturedOptions = { ...(options || {}) };
|
||||
capturedMessages = Array.isArray(messages) ? [...messages] : messages;
|
||||
return {
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
content: '{"operations":[]}',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
extensionsApi.extension_settings.st_bme = {};
|
||||
|
||||
try {
|
||||
const result = await llm.callLLMForJSON({
|
||||
systemPrompt: "system",
|
||||
userPrompt: "user",
|
||||
maxRetries: 0,
|
||||
taskType: "extract",
|
||||
requestSource: "test:luker-route",
|
||||
});
|
||||
|
||||
assert.deepEqual(result, { operations: [] });
|
||||
assert.ok(Array.isArray(capturedMessages));
|
||||
assert.equal(capturedMessages.length >= 2, true);
|
||||
assert.equal(capturedOptions?.requestScope, "extension_internal");
|
||||
assert.deepEqual(capturedOptions?.apiSettingsOverride, {
|
||||
chat_completion_source: "openai",
|
||||
reverse_proxy: "https://example-luker-route.test/v1",
|
||||
proxy_password: "sk-luker-route",
|
||||
secret_id: "luker-secret-1",
|
||||
});
|
||||
} finally {
|
||||
if (originalSendOpenAIRequest === undefined) {
|
||||
delete globalThis.__lukerLlmRoutingSendOpenAIRequest;
|
||||
} else {
|
||||
globalThis.__lukerLlmRoutingSendOpenAIRequest = originalSendOpenAIRequest;
|
||||
}
|
||||
|
||||
if (originalLuker === undefined) {
|
||||
delete globalThis.Luker;
|
||||
} else {
|
||||
globalThis.Luker = originalLuker;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("luker-llm-routing tests passed");
|
||||
Reference in New Issue
Block a user