fix: qwenlm context error

This commit is contained in:
Nanami 2025-01-15 01:09:06 +08:00 committed by 亢奋猫
parent eff639ddf9
commit a6795289da

View File

@ -183,6 +183,10 @@ export default class OpenAIProvider extends BaseProvider {
const _messages = filterContextMessages(takeRight(messages, contextCount + 1)) const _messages = filterContextMessages(takeRight(messages, contextCount + 1))
onFilterMessages(_messages) onFilterMessages(_messages)
if (this.provider.id === 'qwenlm' && _messages[0]?.role !== 'user') {
userMessages.push({ role: 'user', content: '' })
}
for (const message of _messages) { for (const message of _messages) {
userMessages.push(await this.getMessageParam(message, model)) userMessages.push(await this.getMessageParam(message, model))
} }