fix: add X-Api-Key headers #246

This commit is contained in:
kangfenmao 2024-10-28 23:33:20 +08:00
parent 6f9eb2ae75
commit 2143a6614e
2 changed files with 62 additions and 33 deletions

View File

@ -17,6 +17,12 @@ export default abstract class BaseProvider {
return host.endsWith('/') ? host : `${host}/v1/` return host.endsWith('/') ? host : `${host}/v1/`
} }
public getHeaders() {
return {
'X-Api-Key': this.provider.apiKey
}
}
public get keepAliveTime() { public get keepAliveTime() {
return this.provider.id === 'ollama' ? getOllamaKeepAliveTime() : undefined return this.provider.id === 'ollama' ? getOllamaKeepAliveTime() : undefined
} }

View File

@ -138,16 +138,21 @@ export default class OpenAIProvider extends BaseProvider {
const isSupportStreamOutput = streamOutput && this.isSupportStreamOutput(model.id) const isSupportStreamOutput = streamOutput && this.isSupportStreamOutput(model.id)
// @ts-ignore key is not typed // @ts-ignore key is not typed
const stream = await this.sdk.chat.completions.create({ const stream = await this.sdk.chat.completions.create(
model: model.id, {
messages: [isOpenAIo1 ? undefined : systemMessage, ...userMessages].filter( model: model.id,
Boolean messages: [isOpenAIo1 ? undefined : systemMessage, ...userMessages].filter(
) as ChatCompletionMessageParam[], Boolean
temperature: isOpenAIo1 ? 1 : assistant?.settings?.temperature, ) as ChatCompletionMessageParam[],
max_tokens: maxTokens, temperature: isOpenAIo1 ? 1 : assistant?.settings?.temperature,
keep_alive: this.keepAliveTime, max_tokens: maxTokens,
stream: isSupportStreamOutput keep_alive: this.keepAliveTime,
}) stream: isSupportStreamOutput
},
{
headers: this.getHeaders()
}
)
if (!isSupportStreamOutput) { if (!isSupportStreamOutput) {
return onChunk({ return onChunk({
@ -177,12 +182,17 @@ export default class OpenAIProvider extends BaseProvider {
] ]
// @ts-ignore key is not typed // @ts-ignore key is not typed
const response = await this.sdk.chat.completions.create({ const response = await this.sdk.chat.completions.create(
model: model.id, {
messages: messages as ChatCompletionMessageParam[], model: model.id,
stream: false, messages: messages as ChatCompletionMessageParam[],
keep_alive: this.keepAliveTime stream: false,
}) keep_alive: this.keepAliveTime
},
{
headers: this.getHeaders()
}
)
return response.choices[0].message?.content || '' return response.choices[0].message?.content || ''
} }
@ -213,13 +223,18 @@ export default class OpenAIProvider extends BaseProvider {
} }
// @ts-ignore key is not typed // @ts-ignore key is not typed
const response = await this.sdk.chat.completions.create({ const response = await this.sdk.chat.completions.create(
model: model.id, {
messages: [systemMessage, userMessage] as ChatCompletionMessageParam[], model: model.id,
stream: false, messages: [systemMessage, userMessage] as ChatCompletionMessageParam[],
keep_alive: this.keepAliveTime, stream: false,
max_tokens: 1000 keep_alive: this.keepAliveTime,
}) max_tokens: 1000
},
{
headers: this.getHeaders()
}
)
return removeQuotes(response.choices[0].message?.content?.substring(0, 50) || '') return removeQuotes(response.choices[0].message?.content?.substring(0, 50) || '')
} }
@ -227,14 +242,19 @@ export default class OpenAIProvider extends BaseProvider {
public async generateText({ prompt, content }: { prompt: string; content: string }): Promise<string> { public async generateText({ prompt, content }: { prompt: string; content: string }): Promise<string> {
const model = getDefaultModel() const model = getDefaultModel()
const response = await this.sdk.chat.completions.create({ const response = await this.sdk.chat.completions.create(
model: model.id, {
stream: false, model: model.id,
messages: [ stream: false,
{ role: 'system', content: prompt }, messages: [
{ role: 'user', content } { role: 'system', content: prompt },
] { role: 'user', content }
}) ]
},
{
headers: this.getHeaders()
}
)
return response.choices[0].message?.content || '' return response.choices[0].message?.content || ''
} }
@ -249,6 +269,7 @@ export default class OpenAIProvider extends BaseProvider {
const response: any = await this.sdk.request({ const response: any = await this.sdk.request({
method: 'post', method: 'post',
path: '/advice_questions', path: '/advice_questions',
headers: this.getHeaders(),
body: { body: {
messages: messages.filter((m) => m.role === 'user').map((m) => ({ role: m.role, content: m.content })), messages: messages.filter((m) => m.role === 'user').map((m) => ({ role: m.role, content: m.content })),
model: model.id, model: model.id,
@ -272,7 +293,9 @@ export default class OpenAIProvider extends BaseProvider {
} }
try { try {
const response = await this.sdk.chat.completions.create(body as ChatCompletionCreateParamsNonStreaming) const response = await this.sdk.chat.completions.create(body as ChatCompletionCreateParamsNonStreaming, {
headers: this.getHeaders()
})
return { return {
valid: Boolean(response?.choices[0].message), valid: Boolean(response?.choices[0].message),
@ -294,7 +317,7 @@ export default class OpenAIProvider extends BaseProvider {
query.type = 'text' query.type = 'text'
} }
const response = await this.sdk.models.list({ query }) const response = await this.sdk.models.list({ query, headers: this.getHeaders() })
if (this.provider.id === 'github') { if (this.provider.id === 'github') {
// @ts-ignore key is not typed // @ts-ignore key is not typed