diff --git a/README.md b/README.md
index 9eb3619..3d81251 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@ Features:
Currently available providers:
-- AI: OpenAI compatible
+- AI: OpenAI compatible, DeepSeek, OpenRouter, Ollama
- Web Search: Tavily (similar to Firecrawl, but with more free quota (1000 credits / month))
Please give a 🌟 Star if you like this project!
@@ -24,6 +24,13 @@ Please give a 🌟 Star if you like this project!
## Recent updates
+25/02/15
+
+- Added provider support for DeepSeek, OpenRouter and Ollama
+- Supported checking project updates
+- Supported regenerating reports
+- General fixes
+
25/02/14
- Supported reasoning models like DeepSeek R1
@@ -37,6 +44,7 @@ Please give a 🌟 Star if you like this project!
- Fixed "export as PDF" issues
25/02/12
+
- Added Chinese translation. The models will respond in the user's language.
- Various fixes
diff --git a/README_zh.md b/README_zh.md
index af0cb90..7209108 100644
--- a/README_zh.md
+++ b/README_zh.md
@@ -3,6 +3,7 @@
本项目是 https://github.com/dzhng/deep-research 的可视化版本,并做了一些改进。
特色:
+
- 🚀 **隐私安全**:所有配置和 API 请求均在浏览器端完成
- 🕙 **实时反馈**:流式传输 AI 响应并在界面实时展示
- 🌳 **搜索可视化**:使用树状结构展示研究过程,支持使用英文搜索词
@@ -11,14 +12,19 @@
当前支持的供应商:
-- AI 服务:任意兼容 OpenAPI 的供应商
+- AI 服务:OpenAPI 兼容、DeepSeek、OpenRouter、Ollama
- 网络搜索:Tavily(类似 Firecrawl,提供每月 1000 次免费搜索)
-喜欢本项目请点 ⭐ 收藏!
+喜欢本项目请点 ⭐ 收藏!
-
+## 近期更新
-## 最近更新
+25/02/15
+
+- AI 提供商支持 DeepSeek,OpenRouter 和 Ollama
+- 支持检查项目更新
+- 支持重新生成报告
+- 一般性优化和改进
25/02/14
@@ -33,6 +39,7 @@
- 修复“导出 PDF”不可用的问题
25/02/12
+
- 添加中文支持。模型会自动使用用户的语言回答了。
- 修复一些 bug
diff --git a/components/ConfigManager.vue b/components/ConfigManager.vue
index 593838b..e27037e 100644
--- a/components/ConfigManager.vue
+++ b/components/ConfigManager.vue
@@ -24,11 +24,20 @@
{
label: t('settings.ai.providers.openaiCompatible.title'),
help: t('settings.ai.providers.openaiCompatible.description'),
- apiBasePlaceholder: t(
- 'settings.ai.providers.openaiCompatible.apiBasePlaceholder',
- ),
value: 'openai-compatible',
},
+ {
+ label: 'DeepSeek',
+ value: 'deepseek',
+ },
+ {
+ label: 'OpenRouter',
+ value: 'openrouter',
+ },
+ {
+ label: 'Ollama',
+ value: 'ollama',
+ },
])
const selectedAiProvider = computed(() =>
aiProviderOptions.value.find((o) => o.value === config.value.ai.provider),
@@ -110,15 +119,22 @@
{{ $t('settings.ai.provider') }}
- {{ selectedAiProvider.help }}
+
+ {{ selectedAiProvider.help }}
+
-
+
-
-
+
+
+
diff --git a/components/ResearchFeedback.vue b/components/ResearchFeedback.vue
index a43dec1..4d584ed 100644
--- a/components/ResearchFeedback.vue
+++ b/components/ResearchFeedback.vue
@@ -94,7 +94,7 @@
}
} catch (e: any) {
console.error('Error getting feedback:', e)
- if (e.message.includes('Failed to fetch')) {
+ if (e.message?.includes('Failed to fetch')) {
e.message += `\n${t('error.requestBlockedByCORS')}`
}
error.value = t('modelFeedback.error', [e.message])
diff --git a/composables/useAiProvider.ts b/composables/useAiProvider.ts
index c880098..f1e2c0d 100644
--- a/composables/useAiProvider.ts
+++ b/composables/useAiProvider.ts
@@ -1,19 +1,45 @@
import { createDeepSeek } from '@ai-sdk/deepseek'
-import { extractReasoningMiddleware, wrapLanguageModel } from 'ai'
+import { createOpenRouter } from '@openrouter/ai-sdk-provider'
+import { createOpenAI } from '@ai-sdk/openai'
+import {
+ extractReasoningMiddleware,
+ wrapLanguageModel,
+ type LanguageModelV1,
+} from 'ai'
export const useAiModel = () => {
const config = useConfigStore()
+ let model: LanguageModelV1
+
switch (config.config.ai.provider) {
- case 'openai-compatible':
- const deepseek = createDeepSeek({
+ case 'openrouter': {
+ const openRouter = createOpenRouter({
apiKey: config.config.ai.apiKey,
baseURL: config.aiApiBase,
})
- return wrapLanguageModel({
- model: deepseek(config.config.ai.model),
- middleware: extractReasoningMiddleware({ tagName: 'think' }),
+ model = openRouter(config.config.ai.model, {
+ includeReasoning: true,
})
- default:
- throw new Error(`Unknown AI provider: ${config.config.ai.provider}`)
+ }
+ case 'deepseek': {
+ const deepSeek = createDeepSeek({
+ apiKey: config.config.ai.apiKey,
+ baseURL: config.aiApiBase,
+ })
+ model = deepSeek(config.config.ai.model)
+ }
+ case 'openai-compatible':
+ default: {
+ const openai = createOpenAI({
+ apiKey: config.config.ai.apiKey,
+ baseURL: config.aiApiBase,
+ })
+ model = openai(config.config.ai.model)
+ }
}
+
+ return wrapLanguageModel({
+ model,
+ middleware: extractReasoningMiddleware({ tagName: 'think' }),
+ })
}
diff --git a/i18n/en.json b/i18n/en.json
index 047c846..121e9b6 100644
--- a/i18n/en.json
+++ b/i18n/en.json
@@ -22,8 +22,7 @@
"providers": {
"openaiCompatible": {
"title": "OpenAI Compatible",
- "description": "Currently only supports OpenAI compatible providers, e.g. Gemini, Together AI, DeepSeek, SiliconCloud, ...",
- "apiBasePlaceholder": "https://api.openai.com/v1"
+ "description": "e.g. OpenAI, Gemini, Together AI, SiliconCloud, ...\n(Note: DeepSeek, OpenRouter and Ollama now have their own providers.)"
}
}
},
diff --git a/i18n/zh.json b/i18n/zh.json
index 031f4a4..d4e28d3 100644
--- a/i18n/zh.json
+++ b/i18n/zh.json
@@ -21,9 +21,8 @@
"contextSizeHelp": "上下文的最大大小(以 token 计)。这是将发送给模型的最大 token 数量。默认值为 128,000 个 token。",
"providers": {
"openaiCompatible": {
- "title": "OpenAI Compatiible",
- "description": "目前仅支持与 OpenAI 兼容的提供商,如 Gemini、Together AI、DeepSeek、SiliconCloud……",
- "apiBasePlaceholder": "https://api.openai.com/v1"
+ "title": "OpenAI Compatible",
+ "description": "如 OpenAI、Gemini、Together AI、SiliconCloud……\n注:DeepSeek、OpenRouter 和 Ollama 现在已经有了独立选项,请切换使用。"
}
}
},
diff --git a/lib/feedback.ts b/lib/feedback.ts
index 29b9261..17e79e6 100644
--- a/lib/feedback.ts
+++ b/lib/feedback.ts
@@ -38,7 +38,7 @@ export function generateFeedback({
prompt,
onError({ error }) {
console.error(`generateFeedback`, error)
- throw error
+ throw error instanceof Error ? error : new Error(String(error))
},
})
diff --git a/package.json b/package.json
index 4edda1b..f53698f 100644
--- a/package.json
+++ b/package.json
@@ -20,6 +20,7 @@
"@nuxt/ui": "3.0.0-alpha.12",
"@nuxtjs/color-mode": "^3.5.2",
"@nuxtjs/i18n": "9.2.0",
+ "@openrouter/ai-sdk-provider": "^0.2.1",
"@pinia/nuxt": "^0.10.1",
"@tailwindcss/typography": "^0.5.16",
"@tavily/core": "^0.3.1",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index b7bd628..9fd5ebc 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -32,6 +32,9 @@ importers:
'@nuxtjs/i18n':
specifier: 9.2.0
version: 9.2.0(@vue/compiler-dom@3.5.13)(eslint@9.20.1(jiti@2.4.2))(magicast@0.3.5)(rollup@4.34.6)(typescript@5.7.3)(vue@3.5.13(typescript@5.7.3))
+ '@openrouter/ai-sdk-provider':
+ specifier: ^0.2.1
+ version: 0.2.1(zod@3.24.2)
'@pinia/nuxt':
specifier: ^0.10.1
version: 0.10.1(magicast@0.3.5)(pinia@3.0.1(typescript@5.7.3)(vue@3.5.13(typescript@5.7.3)))
@@ -108,6 +111,15 @@ packages:
peerDependencies:
zod: ^3.0.0
+ '@ai-sdk/provider-utils@2.1.5':
+ resolution: {integrity: sha512-PcNR7E4ovZGV/J47gUqaFlvzorgca6uUfN5WzfXJSFWeOeLunN+oxRVwgUOwj0zbmO0yGQTHQD+FHVw8s3Rz8w==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ zod: ^3.0.0
+ peerDependenciesMeta:
+ zod:
+ optional: true
+
'@ai-sdk/provider-utils@2.1.8':
resolution: {integrity: sha512-1j9niMUAFlCBdYRYJr1yoB5kwZcRFBVuBiL1hhrf0ONFNrDiJYA6F+gROOuP16NHhezMfTo60+GeeV1xprHFjg==}
engines: {node: '>=18'}
@@ -117,6 +129,10 @@ packages:
zod:
optional: true
+ '@ai-sdk/provider@1.0.6':
+ resolution: {integrity: sha512-hwj/gFNxpDgEfTaYzCYoslmw01IY9kWLKl/wf8xuPvHtQIzlfXWmmUwc8PnCwxyt8cKzIuV0dfUghCf68HQ0SA==}
+ engines: {node: '>=18'}
+
'@ai-sdk/provider@1.0.7':
resolution: {integrity: sha512-q1PJEZ0qD9rVR+8JFEd01/QM++csMT5UVwYXSN2u54BrVw/D8TZLTeg2FEfKK00DgAx0UtWd8XOhhwITP9BT5g==}
engines: {node: '>=18'}
@@ -777,6 +793,12 @@ packages:
resolution: {integrity: sha512-R7azgNji8jIZdimlylK1CU4plO1OjRPZduTyjS9SHHasMCzxrPM+LBJLRzjt9NUNatquLeCcVfHAYvxIxPHCmg==}
engines: {node: ^14.16.0 || >=16.11.0}
+ '@openrouter/ai-sdk-provider@0.2.1':
+ resolution: {integrity: sha512-Iz+wpGR6001OfbYPp+VmXFZBNpF6a3uN5gzgEBkNCqwZUzuYANO03d4eSgqFrDvfsenG7eE9hpKHB4zIg8YmKA==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ zod: ^3.0.0
+
'@opentelemetry/api@1.9.0':
resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==}
engines: {node: '>=8.0.0'}
@@ -4293,6 +4315,15 @@ snapshots:
'@ai-sdk/provider-utils': 2.1.8(zod@3.24.2)
zod: 3.24.2
+ '@ai-sdk/provider-utils@2.1.5(zod@3.24.2)':
+ dependencies:
+ '@ai-sdk/provider': 1.0.6
+ eventsource-parser: 3.0.0
+ nanoid: 3.3.8
+ secure-json-parse: 2.7.0
+ optionalDependencies:
+ zod: 3.24.2
+
'@ai-sdk/provider-utils@2.1.8(zod@3.24.2)':
dependencies:
'@ai-sdk/provider': 1.0.7
@@ -4302,6 +4333,10 @@ snapshots:
optionalDependencies:
zod: 3.24.2
+ '@ai-sdk/provider@1.0.6':
+ dependencies:
+ json-schema: 0.4.0
+
'@ai-sdk/provider@1.0.7':
dependencies:
json-schema: 0.4.0
@@ -5329,6 +5364,12 @@ snapshots:
- typescript
- vue
+ '@openrouter/ai-sdk-provider@0.2.1(zod@3.24.2)':
+ dependencies:
+ '@ai-sdk/provider': 1.0.6
+ '@ai-sdk/provider-utils': 2.1.5(zod@3.24.2)
+ zod: 3.24.2
+
'@opentelemetry/api@1.9.0': {}
'@parcel/watcher-android-arm64@2.5.1':
diff --git a/public/version.json b/public/version.json
index 578757c..032eb1f 100644
--- a/public/version.json
+++ b/public/version.json
@@ -1,3 +1,3 @@
{
- "version": "1.0.4"
+ "version": "1.0.5"
}
\ No newline at end of file
diff --git a/stores/config.ts b/stores/config.ts
index 050c9c8..009431b 100644
--- a/stores/config.ts
+++ b/stores/config.ts
@@ -1,7 +1,12 @@
import { skipHydrate } from 'pinia'
import type { Locale } from '~/components/LangSwitcher.vue'
-export type ConfigAiProvider = 'openai-compatible'
+export type ConfigAiProvider =
+ | 'openai-compatible'
+ | 'openrouter'
+ | 'deepseek'
+ | 'ollama'
+
export interface ConfigAi {
provider: ConfigAiProvider
apiKey?: string
@@ -39,6 +44,15 @@ export const useConfigStore = defineStore('config', () => {
)
const aiApiBase = computed(() => {
+ if (config.value.ai.provider === 'openrouter') {
+ return config.value.ai.apiBase || 'https://openrouter.ai/api/v1'
+ }
+ if (config.value.ai.provider === 'deepseek') {
+ return config.value.ai.apiBase || 'https://api.deepseek.com/v1'
+ }
+ if (config.value.ai.provider === 'ollama') {
+ return config.value.ai.apiBase || 'http://localhost:11434/v1'
+ }
return config.value.ai.apiBase || 'https://api.openai.com/v1'
})