fix: openai o1 models not support max_token params

close #1378
close #1378
close #1340
close #1197
close #491
This commit is contained in:
kangfenmao 2025-02-12 15:58:42 +08:00
parent 6b2452422e
commit f749bef2fd

View File

@ -123,7 +123,9 @@ export default class OpenAIProvider extends BaseProvider {
return assistant?.settings?.temperature return assistant?.settings?.temperature
} }
private getProviderSpecificParameters(model: Model) { private getProviderSpecificParameters(assistant: Assistant, model: Model) {
const { maxTokens } = getAssistantSettings(assistant)
if (this.provider.id === 'openrouter') { if (this.provider.id === 'openrouter') {
if (model.id.includes('deepseek-r1')) { if (model.id.includes('deepseek-r1')) {
return { return {
@ -132,6 +134,13 @@ export default class OpenAIProvider extends BaseProvider {
} }
} }
if (this.isOpenAIo1(model)) {
return {
max_tokens: undefined,
max_completion_tokens: maxTokens
}
}
return {} return {}
} }
@ -155,6 +164,10 @@ export default class OpenAIProvider extends BaseProvider {
return {} return {}
} }
private isOpenAIo1(model: Model) {
return model.id.startsWith('o1')
}
async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> { async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> {
const defaultModel = getDefaultModel() const defaultModel = getDefaultModel()
const model = assistant.model || defaultModel const model = assistant.model || defaultModel
@ -184,7 +197,7 @@ export default class OpenAIProvider extends BaseProvider {
userMessages.push(await this.getMessageParam(message, model)) userMessages.push(await this.getMessageParam(message, model))
} }
const isOpenAIo1 = model.id.startsWith('o1') const isOpenAIo1 = this.isOpenAIo1(model)
const isSupportStreamOutput = () => { const isSupportStreamOutput = () => {
if (isOpenAIo1) { if (isOpenAIo1) {
@ -208,7 +221,7 @@ export default class OpenAIProvider extends BaseProvider {
stream: isSupportStreamOutput(), stream: isSupportStreamOutput(),
...this.getReasoningEffort(assistant, model), ...this.getReasoningEffort(assistant, model),
...getOpenAIWebSearchParams(assistant, model), ...getOpenAIWebSearchParams(assistant, model),
...this.getProviderSpecificParameters(model), ...this.getProviderSpecificParameters(assistant, model),
...this.getCustomParameters(assistant) ...this.getCustomParameters(assistant)
}) })
@ -271,7 +284,7 @@ export default class OpenAIProvider extends BaseProvider {
{ role: 'user', content: message.content } { role: 'user', content: message.content }
] ]
const isOpenAIo1 = model.id.startsWith('o1') const isOpenAIo1 = this.isOpenAIo1(model)
const isSupportedStreamOutput = () => { const isSupportedStreamOutput = () => {
if (!onResponse) { if (!onResponse) {