refactor(GeminiProvider): Enhance message handling for Gemma models
This commit is contained in:
parent
442ef89ce0
commit
38b52a2ee6
@ -366,7 +366,7 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
const geminiModel = this.sdk.getGenerativeModel(
|
const geminiModel = this.sdk.getGenerativeModel(
|
||||||
{
|
{
|
||||||
model: model.id,
|
model: model.id,
|
||||||
systemInstruction: assistant.prompt,
|
...(isGemmaModel(model) ? {} : { systemInstruction: assistant.prompt }),
|
||||||
generationConfig: {
|
generationConfig: {
|
||||||
maxOutputTokens: maxTokens,
|
maxOutputTokens: maxTokens,
|
||||||
temperature: assistant?.settings?.temperature
|
temperature: assistant?.settings?.temperature
|
||||||
@ -375,12 +375,17 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
this.requestOptions
|
this.requestOptions
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const content =
|
||||||
|
isGemmaModel(model) && assistant.prompt
|
||||||
|
? `<start_of_turn>user\n${assistant.prompt}<end_of_turn>\n<start_of_turn>user\n${message.content}<end_of_turn>`
|
||||||
|
: message.content
|
||||||
|
|
||||||
if (!onResponse) {
|
if (!onResponse) {
|
||||||
const { response } = await geminiModel.generateContent(message.content)
|
const { response } = await geminiModel.generateContent(content)
|
||||||
return response.text()
|
return response.text()
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await geminiModel.generateContentStream(message.content)
|
const response = await geminiModel.generateContentStream(content)
|
||||||
|
|
||||||
let text = ''
|
let text = ''
|
||||||
|
|
||||||
@ -426,7 +431,7 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
const geminiModel = this.sdk.getGenerativeModel(
|
const geminiModel = this.sdk.getGenerativeModel(
|
||||||
{
|
{
|
||||||
model: model.id,
|
model: model.id,
|
||||||
systemInstruction: systemMessage.content,
|
...(isGemmaModel(model) ? {} : { systemInstruction: systemMessage.content }),
|
||||||
generationConfig: {
|
generationConfig: {
|
||||||
temperature: assistant?.settings?.temperature
|
temperature: assistant?.settings?.temperature
|
||||||
}
|
}
|
||||||
@ -435,8 +440,11 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
)
|
)
|
||||||
|
|
||||||
const chat = await geminiModel.startChat()
|
const chat = await geminiModel.startChat()
|
||||||
|
const content = isGemmaModel(model)
|
||||||
|
? `<start_of_turn>user\n${systemMessage.content}<end_of_turn>\n<start_of_turn>user\n${userMessage.content}<end_of_turn>`
|
||||||
|
: userMessage.content
|
||||||
|
|
||||||
const { response } = await chat.sendMessage(userMessage.content)
|
const { response } = await chat.sendMessage(content)
|
||||||
|
|
||||||
return removeSpecialCharactersForTopicName(response.text())
|
return removeSpecialCharactersForTopicName(response.text())
|
||||||
}
|
}
|
||||||
@ -451,10 +459,20 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
const model = getDefaultModel()
|
const model = getDefaultModel()
|
||||||
const systemMessage = { role: 'system', content: prompt }
|
const systemMessage = { role: 'system', content: prompt }
|
||||||
|
|
||||||
const geminiModel = this.sdk.getGenerativeModel({ model: model.id }, this.requestOptions)
|
const geminiModel = this.sdk.getGenerativeModel(
|
||||||
|
{
|
||||||
|
model: model.id,
|
||||||
|
...(isGemmaModel(model) ? {} : { systemInstruction: systemMessage.content })
|
||||||
|
},
|
||||||
|
this.requestOptions
|
||||||
|
)
|
||||||
|
|
||||||
const chat = await geminiModel.startChat({ systemInstruction: systemMessage.content })
|
const chat = await geminiModel.startChat()
|
||||||
const { response } = await chat.sendMessage(content)
|
const messageContent = isGemmaModel(model)
|
||||||
|
? `<start_of_turn>user\n${prompt}<end_of_turn>\n<start_of_turn>user\n${content}<end_of_turn>`
|
||||||
|
: content
|
||||||
|
|
||||||
|
const { response } = await chat.sendMessage(messageContent)
|
||||||
|
|
||||||
return response.text()
|
return response.text()
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user