From 4b75c254a588bdcf03c0ccac128adfa0d84e6457 Mon Sep 17 00:00:00 2001 From: AnotiaWang Date: Wed, 12 Feb 2025 00:05:27 +0800 Subject: [PATCH] feat: config manager modal --- components/ConfigManager.vue | 108 +++++++++++++++++++++++++++++++++++ components/PasswordInput.vue | 25 ++++++++ composables/useAiProvider.ts | 15 +++++ composables/useTavily.ts | 9 +++ lib/ai/providers.ts | 24 ++------ lib/deep-research.ts | 38 ++++++------ lib/feedback.ts | 4 +- pages/index.vue | 40 +++++++++++-- stores/config.ts | 37 ++++++++++-- 9 files changed, 251 insertions(+), 49 deletions(-) create mode 100644 components/ConfigManager.vue create mode 100644 components/PasswordInput.vue create mode 100644 composables/useAiProvider.ts create mode 100644 composables/useTavily.ts diff --git a/components/ConfigManager.vue b/components/ConfigManager.vue new file mode 100644 index 0000000..8cd7412 --- /dev/null +++ b/components/ConfigManager.vue @@ -0,0 +1,108 @@ + + + diff --git a/components/PasswordInput.vue b/components/PasswordInput.vue new file mode 100644 index 0000000..c337ab0 --- /dev/null +++ b/components/PasswordInput.vue @@ -0,0 +1,25 @@ + + + diff --git a/composables/useAiProvider.ts b/composables/useAiProvider.ts new file mode 100644 index 0000000..3de3a40 --- /dev/null +++ b/composables/useAiProvider.ts @@ -0,0 +1,15 @@ +import { createOpenAI } from '@ai-sdk/openai' + +export const useAiModel = () => { + const config = useConfigStore() + switch (config.config.ai.provider) { + case 'openai-compatible': + const openai = createOpenAI({ + apiKey: config.config.ai.apiKey, + baseURL: config.config.ai.apiBase || 'https://api.openai.com/v1', + }) + return openai(config.config.ai.model) + default: + throw new Error(`Unknown AI provider: ${config.config.ai.provider}`) + } +} diff --git a/composables/useTavily.ts b/composables/useTavily.ts new file mode 100644 index 0000000..c41b26f --- /dev/null +++ b/composables/useTavily.ts @@ -0,0 +1,9 @@ +import { tavily } from '@tavily/core' + +export const useTavily = () => { + const config = useConfigStore() + const tvly = tavily({ + apiKey: config.config.webSearch.apiKey, + }) + return tvly +} diff --git a/lib/ai/providers.ts b/lib/ai/providers.ts index 7045d0c..5e938bd 100644 --- a/lib/ai/providers.ts +++ b/lib/ai/providers.ts @@ -3,33 +3,19 @@ import { getEncoding } from 'js-tiktoken' import { RecursiveCharacterTextSplitter } from './text-splitter' -// Providers -const openai = createOpenAI({ - apiKey: import.meta.env.VITE_OPENAI_API_KEY!, - baseURL: import.meta.env.VITE_OPENAI_ENDPOINT || 'https://api.openai.com/v1', -}) - -const customModel = import.meta.env.VITE_OPENAI_MODEL || 'o3-mini' - -// Models - -export const o3MiniModel = openai(customModel, { - // reasoningEffort: customModel.startsWith('o') ? 'medium' : undefined, - structuredOutputs: true, -}) - const MinChunkSize = 140 const encoder = getEncoding('o200k_base') // trim prompt to maximum context size -export function trimPrompt( - prompt: string, - contextSize = Number(import.meta.env.VITE_CONTEXT_SIZE) || 128_000, -) { +export function trimPrompt(prompt: string, contextSize?: number) { if (!prompt) { return '' } + if (!contextSize) { + contextSize = useConfigStore().config.ai.contextSize || 128_000 + } + const length = encoder.encode(prompt).length if (length <= contextSize) { return prompt diff --git a/lib/deep-research.ts b/lib/deep-research.ts index 2737698..bd688e7 100644 --- a/lib/deep-research.ts +++ b/lib/deep-research.ts @@ -1,13 +1,15 @@ -import { generateObject, streamText } from 'ai' +import { streamText } from 'ai' import { compact } from 'lodash-es' import pLimit from 'p-limit' import { z } from 'zod' import { parseStreamingJson, type DeepPartial } from '~/utils/json' -import { o3MiniModel, trimPrompt } from './ai/providers' +import { trimPrompt } from './ai/providers' import { systemPrompt } from './prompt' import zodToJsonSchema from 'zod-to-json-schema' -import { tavily, type TavilySearchResponse } from '@tavily/core' +import { type TavilySearchResponse } from '@tavily/core' +import { useTavily } from '~/composables/useTavily' +import { useAiModel } from '~/composables/useAiProvider' export type ResearchResult = { learnings: string[] @@ -53,16 +55,6 @@ export type ResearchStep = // increase this if you have higher API rate limits const ConcurrencyLimit = 2 -// Initialize Firecrawl with optional API key and optional base url - -// const firecrawl = new FirecrawlApp({ -// apiKey: process.env.FIRECRAWL_KEY ?? '', -// apiUrl: process.env.FIRECRAWL_BASE_URL, -// }); -const tvly = tavily({ - apiKey: import.meta.env.VITE_TAVILY_API_KEY ?? '', -}) - /** * Schema for {@link generateSearchQueries} without dynamic descriptions */ @@ -105,12 +97,14 @@ export function generateSearchQueries({ const prompt = [ `Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: ${query}\n\n`, learnings - ? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join('\n')}` + ? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join( + '\n', + )}` : '', `You MUST respond in JSON with the following schema: ${jsonSchema}`, ].join('\n\n') return streamText({ - model: o3MiniModel, + model: useAiModel(), system: systemPrompt(), prompt, }) @@ -147,12 +141,14 @@ function processSearchResult({ ) const prompt = [ `Given the following contents from a SERP search for the query ${query}, generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.`, - `${contents.map((content) => `\n${content}\n`).join('\n')}`, + `${contents + .map((content) => `\n${content}\n`) + .join('\n')}`, `You MUST respond in JSON with the following schema: ${jsonSchema}`, ].join('\n\n') return streamText({ - model: o3MiniModel, + model: useAiModel(), abortSignal: AbortSignal.timeout(60_000), system: systemPrompt(), prompt, @@ -179,7 +175,7 @@ export function writeFinalReport({ ].join('\n\n') return streamText({ - model: o3MiniModel, + model: useAiModel(), system: systemPrompt(), prompt: _prompt, }) @@ -263,7 +259,7 @@ export async function deepResearch({ // limit: 5, // scrapeOptions: { formats: ['markdown'] }, // }); - const result = await tvly.search(searchQuery.query, { + const result = await useTavily().search(searchQuery.query, { maxResults: 5, }) console.log( @@ -331,7 +327,9 @@ export async function deepResearch({ const nextQuery = ` Previous research goal: ${searchQuery.researchGoal} - Follow-up research directions: ${searchResult.followUpQuestions.map((q) => `\n${q}`).join('')} + Follow-up research directions: ${searchResult.followUpQuestions + .map((q) => `\n${q}`) + .join('')} `.trim() return deepResearch({ diff --git a/lib/feedback.ts b/lib/feedback.ts index 13a8642..36bd870 100644 --- a/lib/feedback.ts +++ b/lib/feedback.ts @@ -2,8 +2,8 @@ import { streamText } from 'ai' import { z } from 'zod' import { zodToJsonSchema } from 'zod-to-json-schema' -import { o3MiniModel } from './ai/providers' import { systemPrompt } from './prompt' +import { useAiModel } from '~/composables/useAiProvider' type PartialFeedback = DeepPartial> @@ -32,7 +32,7 @@ export function generateFeedback({ ].join('\n\n') const stream = streamText({ - model: o3MiniModel, + model: useAiModel(), system: systemPrompt(), prompt, }) diff --git a/pages/index.vue b/pages/index.vue index 6542a8c..91b2cc4 100644 --- a/pages/index.vue +++ b/pages/index.vue @@ -2,12 +2,26 @@
-
+

Deep Research Assistant

+
+ +
+ This is a web UI for + + dzhng/deep-research + + . It features streaming AI responses for realtime feedback, and + viasualization of the research process using a tree structure. +
+ All API requests are directly sent from your browser. No remote data + stored. +
+ >() const formRef = ref>() const feedbackRef = ref>() const deepResearchRef = ref>() @@ -51,14 +70,27 @@ const researchResult = ref() function getCombinedQuery() { - return ` -Initial Query: ${formRef.value?.form.query} + return `Initial Query: ${formRef.value?.form.query} Follow-up Questions and Answers: -${feedback.value.map((qa) => `Q: ${qa.assistantQuestion}\nA: ${qa.userAnswer}`).join('\n')} +${feedback.value + .map((qa) => `Q: ${qa.assistantQuestion}\nA: ${qa.userAnswer}`) + .join('\n')} ` } async function generateFeedback(data: ResearchInputData) { + const aiConfig = config.config.ai + const webSearchConfig = config.config.webSearch + + if (!aiConfig.model || !aiConfig.apiKey || !webSearchConfig.apiKey) { + toast.add({ + title: 'Config not set', + description: 'Please configure AI and Web Search settings.', + color: 'error', + }) + configManagerRef.value?.show() + return + } feedbackRef.value?.getFeedback(data.query, data.numQuestions) } diff --git a/stores/config.ts b/stores/config.ts index 3b3fe19..401c598 100644 --- a/stores/config.ts +++ b/stores/config.ts @@ -1,5 +1,34 @@ -export interface Config { - ai: { - provider: 'openai-compatible' - } +import {} from '@pinia/nuxt' + +export type ConfigAiProvider = 'openai-compatible' +export interface ConfigAi { + provider: ConfigAiProvider + apiKey?: string + apiBase?: string + model: string + contextSize?: number } +export interface ConfigWebSearch { + provider: 'tavily' + apiKey?: string +} + +export interface Config { + ai: ConfigAi + webSearch: ConfigWebSearch +} + +export const useConfigStore = defineStore('config', () => { + const config = useLocalStorage('deep-research-config', { + ai: { + provider: 'openai-compatible', + model: '', + contextSize: 128_000, + }, + webSearch: { + provider: 'tavily', + }, + }) + + return { config } +})