feat: support DeepSeek, OpenRouter & Ollama providers
This commit is contained in:
@ -1,19 +1,45 @@
|
||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||
import { extractReasoningMiddleware, wrapLanguageModel } from 'ai'
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider'
|
||||
import { createOpenAI } from '@ai-sdk/openai'
|
||||
import {
|
||||
extractReasoningMiddleware,
|
||||
wrapLanguageModel,
|
||||
type LanguageModelV1,
|
||||
} from 'ai'
|
||||
|
||||
export const useAiModel = () => {
|
||||
const config = useConfigStore()
|
||||
let model: LanguageModelV1
|
||||
|
||||
switch (config.config.ai.provider) {
|
||||
case 'openai-compatible':
|
||||
const deepseek = createDeepSeek({
|
||||
case 'openrouter': {
|
||||
const openRouter = createOpenRouter({
|
||||
apiKey: config.config.ai.apiKey,
|
||||
baseURL: config.aiApiBase,
|
||||
})
|
||||
return wrapLanguageModel({
|
||||
model: deepseek(config.config.ai.model),
|
||||
middleware: extractReasoningMiddleware({ tagName: 'think' }),
|
||||
model = openRouter(config.config.ai.model, {
|
||||
includeReasoning: true,
|
||||
})
|
||||
default:
|
||||
throw new Error(`Unknown AI provider: ${config.config.ai.provider}`)
|
||||
}
|
||||
case 'deepseek': {
|
||||
const deepSeek = createDeepSeek({
|
||||
apiKey: config.config.ai.apiKey,
|
||||
baseURL: config.aiApiBase,
|
||||
})
|
||||
model = deepSeek(config.config.ai.model)
|
||||
}
|
||||
case 'openai-compatible':
|
||||
default: {
|
||||
const openai = createOpenAI({
|
||||
apiKey: config.config.ai.apiKey,
|
||||
baseURL: config.aiApiBase,
|
||||
})
|
||||
model = openai(config.config.ai.model)
|
||||
}
|
||||
}
|
||||
|
||||
return wrapLanguageModel({
|
||||
model,
|
||||
middleware: extractReasoningMiddleware({ tagName: 'think' }),
|
||||
})
|
||||
}
|
||||
|
Reference in New Issue
Block a user