fix: openai o1 models not support max_token params

close #1378
close #1378
close #1340
close #1197
close #491
This commit is contained in:
kangfenmao 2025-02-12 15:58:42 +08:00
parent 6b2452422e
commit f749bef2fd

View File

@ -123,7 +123,9 @@ export default class OpenAIProvider extends BaseProvider {
return assistant?.settings?.temperature
}
private getProviderSpecificParameters(model: Model) {
private getProviderSpecificParameters(assistant: Assistant, model: Model) {
const { maxTokens } = getAssistantSettings(assistant)
if (this.provider.id === 'openrouter') {
if (model.id.includes('deepseek-r1')) {
return {
@ -132,6 +134,13 @@ export default class OpenAIProvider extends BaseProvider {
}
}
if (this.isOpenAIo1(model)) {
return {
max_tokens: undefined,
max_completion_tokens: maxTokens
}
}
return {}
}
@ -155,6 +164,10 @@ export default class OpenAIProvider extends BaseProvider {
return {}
}
private isOpenAIo1(model: Model) {
return model.id.startsWith('o1')
}
async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> {
const defaultModel = getDefaultModel()
const model = assistant.model || defaultModel
@ -184,7 +197,7 @@ export default class OpenAIProvider extends BaseProvider {
userMessages.push(await this.getMessageParam(message, model))
}
const isOpenAIo1 = model.id.startsWith('o1')
const isOpenAIo1 = this.isOpenAIo1(model)
const isSupportStreamOutput = () => {
if (isOpenAIo1) {
@ -208,7 +221,7 @@ export default class OpenAIProvider extends BaseProvider {
stream: isSupportStreamOutput(),
...this.getReasoningEffort(assistant, model),
...getOpenAIWebSearchParams(assistant, model),
...this.getProviderSpecificParameters(model),
...this.getProviderSpecificParameters(assistant, model),
...this.getCustomParameters(assistant)
})
@ -271,7 +284,7 @@ export default class OpenAIProvider extends BaseProvider {
{ role: 'user', content: message.content }
]
const isOpenAIo1 = model.id.startsWith('o1')
const isOpenAIo1 = this.isOpenAIo1(model)
const isSupportedStreamOutput = () => {
if (!onResponse) {