fix: 移除重复max_tokens参数避免API报错

This commit is contained in:
Youzini-afk
2026-03-26 14:59:37 +08:00
parent 5e8fd63722
commit 09d1af787b

1
llm.js
View File

@@ -377,7 +377,6 @@ async function callDedicatedOpenAICompatible(
model: config.model,
messages,
temperature: filteredGeneration.temperature ?? (jsonMode ? 0 : 0.2),
max_tokens: resolvedCompletionTokens,
max_completion_tokens: resolvedCompletionTokens,
stream: filteredGeneration.stream ?? false,
};