This commit is contained in:
AnotiaWang
2025-02-11 09:00:04 +08:00
commit 2cbd20a1da
29 changed files with 9894 additions and 0 deletions

24
.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
# Nuxt dev/build outputs
.output
.data
.nuxt
.nitro
.cache
dist
# Node dependencies
node_modules
# Logs
logs
*.log
# Misc
.DS_Store
.fleet
.idea
# Local env files
.env
.env.*
!.env.example

82
README.md Normal file
View File

@ -0,0 +1,82 @@
# Deep Research Web UI
This is a web UI for https://github.com/dzhng/deep-research. It supports streaming AI responses, and viasualization of the research process using a tree structure.
Note: The project is currently WIP, expect bugs. README will be updated once the project is usable.
Rough preview of the UI:
<img width="1087" alt="image" src="https://github.com/user-attachments/assets/4bb5b722-0300-4d4f-bb01-fc1ed2404442" />
## Setup
Make sure to install dependencies:
```bash
# npm
npm install
# pnpm
pnpm install
# yarn
yarn install
# bun
bun install
```
## Development Server
Start the development server on `http://localhost:3000`:
```bash
# npm
npm run dev
# pnpm
pnpm dev
# yarn
yarn dev
# bun
bun run dev
```
## Production
Build the application for production:
```bash
# npm
npm run build
# pnpm
pnpm build
# yarn
yarn build
# bun
bun run build
```
Locally preview production build:
```bash
# npm
npm run preview
# pnpm
pnpm preview
# yarn
yarn preview
# bun
bun run preview
```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.

7
app.config.ts Normal file
View File

@ -0,0 +1,7 @@
export default defineAppConfig({
ui: {
colors: {
primary: 'violet',
},
},
})

49
app.vue Normal file
View File

@ -0,0 +1,49 @@
<template>
<UApp :locale="zh_cn">
<NuxtLayout>
<NuxtPage />
</NuxtLayout>
</UApp>
</template>
<script setup lang="ts">
import { zh_cn } from '@nuxt/ui/locale'
// const colorMode = useColorMode()
// TODO
useHead({
title: 'SiliconCloud Stats - SiliconCloud 平台使用情况分析工具',
meta: [
{ name: 'description', content: 'SiliconCloud Stats 是一个用于分析 SiliconCloud 平台使用情况的工具。通过输入 Cookie可以拉取 SiliconCloud 控制台 API 来实现各种分析功能,如 token 用量分析等。' },
{ name: 'keywords', content: 'SiliconCloud, 数据分析, token 用量, API 分析, 控制台工具' },
// Open Graph tags
{ property: 'og:title', content: 'SiliconCloud Stats - SiliconCloud 平台使用情况分析工具' },
{ property: 'og:description', content: 'SiliconCloud Stats 是一个用于分析 SiliconCloud 平台使用情况的工具。通过输入 Cookie可以拉取 SiliconCloud 控制台 API 来实现各种分析功能,如 token 用量分析等。' },
{ property: 'og:type', content: 'website' },
{ property: 'og:image', content: '/images/readme-showcase-total.webp' },
// Twitter Card tags
{ name: 'twitter:card', content: 'summary_large_image' },
{ name: 'twitter:title', content: 'SiliconCloud Stats - SiliconCloud 平台使用情况分析工具' },
{ name: 'twitter:description', content: 'SiliconCloud Stats 是一个用于分析 SiliconCloud 平台使用情况的工具。' },
{ name: 'twitter:image', content: '/images/readme-showcase-total.webp' },
],
// script: [
// {
// defer: true,
// src: 'https://umami.ataw.top/script.js',
// 'data-website-id': '6be07672-962f-42fe-85fc-92e96a5f30d6',
// },
// ],
})
// onMounted(() => {
// watchEffect(() => {
// if (colorMode.value === 'dark') {
// document.documentElement.setAttribute('theme-mode', 'dark')
// } else {
// document.documentElement.removeAttribute('theme-mode')
// }
// })
// })
</script>

9
assets/css/main.css Normal file
View File

@ -0,0 +1,9 @@
/* Don't do `@import "tailwindcss"`, do this instead: */
@layer theme, base, components, utilities;
@import "tailwindcss/theme" layer(theme) theme(static);
@import "tailwindcss/preflight" layer(base);
@import "tailwindcss/utilities" layer(utilities);
/* Then import "@nuxt/ui": */
@import "@nuxt/ui";

View File

@ -0,0 +1,31 @@
<script setup lang="ts">
import { usePreferredColorScheme } from '@vueuse/core'
const colorMode = useColorMode()
const preferredColor = usePreferredColorScheme()
const preference = computed(() => {
// 默认为自动,会跟随用户的浏览器切换
if (colorMode.preference === 'system') {
return preferredColor.value
}
return colorMode.preference
})
const toggleColorMode = () => {
colorMode.preference = preference.value === 'light' ? 'dark' : 'light'
}
</script>
<template>
<div>
<UButton
:icon="
preference === 'dark'
? 'i-heroicons-sun-20-solid'
: 'i-heroicons-moon-20-solid'
"
color="primary"
@click="toggleColorMode"
/>
</div>
</template>

354
components/DeepResearch.vue Normal file
View File

@ -0,0 +1,354 @@
<template>
<div class="h-screen flex">
<!-- 左侧树形图 -->
<div class="w-1/2 h-full bg-transparent" ref="treeContainer">
<div v-if="!modelValue.root" class="h-full flex items-center justify-center text-gray-500 dark:text-gray-400">
<div class="text-center">
<div class="text-lg mb-2">No research data</div>
<div class="text-sm">Please answer and submit the questions to start research</div>
</div>
</div>
<svg
v-else
width="100%"
height="100%"
@click="handleSvgClick"
class="[&_.link]:stroke-2 [&_.link]:fill-none [&_.link]:stroke-gray-400 dark:[&_.link]:stroke-gray-600 [&_.link.processing]:stroke-blue-500 [&_.link.complete]:stroke-green-500 [&_.link.error]:stroke-red-500"
>
<g :transform="`translate(${margin.left}, ${margin.top})`">
<!-- 连接线 -->
<g class="links">
<path v-for="link in treeData.links" :key="link.id" :d="link.path" class="link" :class="link.status" />
</g>
<!-- 节点 -->
<g class="nodes">
<g
v-for="node in treeData.nodes"
:key="node.id"
class="node cursor-pointer transition-all"
:class="[node.status, { active: selectedNode?.id === node.id }]"
:transform="`translate(${node.x}, ${node.y})`"
@click.stop="handleNodeClick(node)"
@mouseover="handleNodeHover(node)"
>
<circle
r="20"
class="fill-white dark:fill-gray-700 stroke-2 stroke-gray-400 dark:stroke-gray-500 [.processing_&]:stroke-blue-500 [.complete_&]:stroke-green-500 [.error_&]:stroke-red-500 [.active_&]:stroke-[3px] [.active_&]:fill-gray-100 dark:[.active_&]:fill-gray-800"
/>
<text dy=".35em" text-anchor="middle" class="fill-gray-900 dark:fill-gray-100 text-sm select-none">
{{ node.depth }}
</text>
</g>
</g>
</g>
</svg>
</div>
<!-- 右侧内容区 -->
<div class="w-1/2 h-full p-4 border-l border-gray-200 dark:border-gray-700 overflow-y-auto">
<div v-if="selectedNode" class="sticky top-0">
<h3 class="text-lg font-bold mb-2 dark:text-gray-200">Search Detail</h3>
<div class="mb-4">
<div class="font-medium dark:text-gray-300">Query:</div>
<div class="text-gray-600 dark:text-gray-400">{{ selectedNode.query }}</div>
</div>
<div v-if="selectedNode.result">
<div class="font-medium mb-2 dark:text-gray-300">Learning Content:</div>
<ul class="list-disc pl-5 mb-4">
<li v-for="(learning, i) in selectedNode.result.learnings" :key="i" class="text-gray-600 dark:text-gray-400">
{{ learning }}
</li>
</ul>
<div class="font-medium mb-2 dark:text-gray-300">Follow-up Questions:</div>
<ul class="list-disc pl-5">
<li v-for="(question, i) in selectedNode.result.followUpQuestions" :key="i" class="text-gray-600 dark:text-gray-400">
{{ question }}
</li>
</ul>
</div>
</div>
<div v-else class="h-full flex items-center justify-center text-gray-500 dark:text-gray-400 text-center">
Select a node to view details
</div>
</div>
</div>
</template>
<script setup lang="ts">
import * as d3 from 'd3'
import { deepResearch, type ResearchStep } from '~/lib/deep-research'
import type { ResearchFeedbackResult } from './ResearchFeedback.vue'
export interface SearchNode {
id: string
query: string
depth: number
status: 'pending' | 'processing' | 'complete' | 'error'
children: SearchNode[]
result?: {
learnings: string[]
followUpQuestions: string[]
}
// 布局相关属性
x?: number
y?: number
parent?: SearchNode
}
export interface SearchTree {
root: SearchNode | null
currentDepth: number
maxDepth: number
maxBreadth: number
}
const modelValue = defineModel<SearchTree>({
default: () => ({
root: null,
currentDepth: 0,
maxDepth: 0,
maxBreadth: 0,
}),
})
// 树形图布局配置
const margin = { top: 40, right: 40, bottom: 40, left: 40 }
const treeContainer = ref<HTMLElement>()
const width = ref(800)
const height = ref(600)
// 节点状态管理
const selectedNode = ref<SearchNode>()
// 计算节点和连接线
const treeData = computed(() => {
if (!modelValue.value.root) return { nodes: [], links: [] }
// 计算合适的树大小
const levels = getTreeDepth(modelValue.value.root)
const estimatedHeight = Math.max(levels * 20, 300) // 每层至少 20px
height.value = Math.min(estimatedHeight, window.innerHeight - 100) // 限制最大高度
const treeLayout = d3
.tree<SearchNode>()
.size([width.value - margin.left - margin.right, height.value - margin.top - margin.bottom])
.separation((a, b) => (a.parent === b.parent ? 1.5 : 2))
const root = d3.hierarchy(modelValue.value.root)
const layout = treeLayout(root)
const nodes = layout.descendants().map((d) => ({
...d.data,
x: d.x,
y: d.y,
}))
const links = layout.links().map((d, i) => ({
id: `link-${i}`,
path: d3.linkVertical()({
source: [d.source.x, d.source.y],
target: [d.target.x, d.target.y],
}) as string,
status: d.target.data.status,
}))
return { nodes, links }
})
// 辅助函数:获取树的深度
function getTreeDepth(node: SearchNode): number {
if (!node) return 0
return 1 + Math.max(0, ...(node.children?.map(getTreeDepth) || []))
}
// 监听节点状态变化
watch(
() => modelValue.value.root,
(newRoot) => {
if (newRoot) {
// 找到最新更新的节点
const currentNode = findCurrentNode(newRoot)
if (currentNode) {
selectedNode.value = currentNode
}
}
},
{ deep: true },
)
// 事件处理
function handleNodeClick(node: SearchNode) {
selectedNode.value = node
}
function handleNodeHover(node: SearchNode) {
selectedNode.value = node
}
function handleSvgClick() {
selectedNode.value = undefined
}
// 辅助函数:查找指定深度的节点
function findNodeAtDepth(node: SearchNode | null, targetDepth: number): SearchNode | null {
if (!node) return null
if (node.depth === targetDepth) return node
if (!node.children?.length) return null
for (const child of node.children) {
const found = findNodeAtDepth(child, targetDepth)
if (found) return found
}
return null
}
// 辅助函数:查找当前正在处理的节点
function findCurrentNode(node: SearchNode): SearchNode | null {
if (node.status === 'processing') {
return node
}
if (node.children) {
for (const child of node.children) {
const found = findCurrentNode(child)
if (found) return found
}
}
// 如果没有正在处理的节点,返回最后一个完成的节点
if (node.status === 'complete' && (!node.children || node.children.length === 0)) {
return node
}
return null
}
// 辅助函数:在树中更新节点
function updateNodeInTree(root: SearchNode, nodeId: string, updates: Partial<SearchNode>): SearchNode {
if (root.id === nodeId) {
return { ...root, ...updates }
}
return {
...root,
children: root.children.map((child) => updateNodeInTree(child, nodeId, updates)),
}
}
// 监听容器大小变化
onMounted(() => {
if (treeContainer.value) {
const resizeObserver = new ResizeObserver((entries) => {
for (const entry of entries) {
width.value = entry.contentRect.width
height.value = entry.contentRect.height
}
})
resizeObserver.observe(treeContainer.value)
}
})
// 处理研究进度
function handleResearchProgress(step: ResearchStep) {
console.log(step)
if (step.type === 'start') {
// 初始化搜索树
modelValue.value = {
root: null,
currentDepth: 0,
maxDepth: step.depth || 0,
maxBreadth: step.breadth || 0,
}
} else if (step.type === 'generating_queries' && step.result) {
// 添加新的查询节点
if (step.depth === 1) {
// 第一层查询作为根节点
modelValue.value = {
...modelValue.value,
root: {
id: '0-0',
query: step.result[0].query,
depth: 0,
status: 'processing',
children: step.result.slice(1).map((item, index) => ({
id: `1-${index}`,
query: item.query,
depth: 1,
status: 'pending',
children: [],
})),
},
}
} else {
const parentNode = findNodeAtDepth(modelValue.value.root!, step.depth! - 1)
if (parentNode) {
const updatedRoot = updateNodeInTree(modelValue.value.root!, parentNode.id, {
children: step.result.map((query: any, index: number) => ({
id: `${step.depth}-${index}`,
query: query.query,
depth: step.depth!,
status: 'pending',
children: [],
})),
})
modelValue.value = {
...modelValue.value,
root: updatedRoot,
}
}
}
} else if (step.type === 'processing_serach_result' && step.result) {
// 更新节点状态和结果
const nodeId = `${step.depth}-${step.queryIndex}`
const updatedRoot = updateNodeInTree(modelValue.value.root!, nodeId, {
status: 'complete',
result: {
learnings: step.result.learnings || [],
followUpQuestions: step.result.followUpQuestions || [],
},
})
modelValue.value = {
...modelValue.value,
root: updatedRoot,
}
} else if (step.type === 'error') {
// 处理错误状态
const currentNode = findCurrentNode(modelValue.value.root!)
if (currentNode) {
const updatedRoot = updateNodeInTree(modelValue.value.root!, currentNode.id, {
status: 'error',
})
modelValue.value = {
...modelValue.value,
root: updatedRoot,
}
}
}
}
// 开始研究
async function startResearch(query: string, depth: number, breadth: number, feedback: ResearchFeedbackResult[]) {
modelValue.value = {
root: null,
currentDepth: 0,
maxDepth: 0,
maxBreadth: 0,
}
try {
const combinedQuery = `
Initial Query: ${query}
Follow-up Questions and Answers:
${feedback.map((qa) => `Q: ${qa.assistantQuestion}\nA: ${qa.userAnswer}`).join('\n')}
`
await deepResearch({
query: combinedQuery,
depth,
breadth,
onProgress: handleResearchProgress,
})
} catch (error) {
console.error('Research failed:', error)
}
}
defineExpose({
startResearch,
})
</script>

View File

@ -0,0 +1,119 @@
<script setup lang="ts">
import { parsePartialJson } from '@ai-sdk/ui-utils'
import { useChat } from '@ai-sdk/vue'
import { isObject } from '@vueuse/core'
export interface ResearchFeedbackResult {
assistantQuestion: string
userAnswer: string
}
defineEmits<{
(e: 'submit'): void
}>()
const modelValue = defineModel<ResearchFeedbackResult[]>({
default: () => [],
})
const { messages, input, error, handleSubmit, isLoading } = useChat({
api: '/api/generate-feedback',
})
const isSubmitButtonDisabled = computed(
() =>
!modelValue.value.length ||
// All questions should be answered
modelValue.value.some((v) => !v.assistantQuestion || !v.userAnswer) ||
// Should not be loading
isLoading.value,
)
async function getFeedback(query: string, numQuestions = 3) {
clear()
// Set input value. (This only makes sure that the library sends the request)
input.value = query
handleSubmit(
{},
{
body: {
query,
numQuestions,
},
},
)
}
function clear() {
messages.value = []
input.value = ''
error.value = undefined
modelValue.value = []
}
watch(messages, (m) => {
const assistantMessage = m[m.length - 1]
if (assistantMessage?.role !== 'assistant') {
return {
value: undefined,
state: 'undefined-input',
}
}
const content = removeJsonMarkdown(assistantMessage.content)
// Write the questions into modelValue
const parseResult = parsePartialJson(content)
console.log(parseResult)
if (parseResult.state === 'repaired-parse' || parseResult.state === 'successful-parse') {
if (!isObject(parseResult.value) || Array.isArray(parseResult.value)) {
return (modelValue.value = [])
}
const unsafeQuestions = parseResult.value.questions
if (!unsafeQuestions || !Array.isArray(unsafeQuestions)) return (modelValue.value = [])
const questions = unsafeQuestions.filter((s) => typeof s === 'string')
// Incrementally update modelValue
for (let i = 0; i < questions.length; i += 1) {
if (modelValue.value[i]) {
modelValue.value[i].assistantQuestion = questions[i]
} else {
modelValue.value.push({
assistantQuestion: questions[i],
userAnswer: '',
})
}
}
} else {
modelValue.value = []
}
})
watch(error, (e) => {
if (e) {
console.error(`ResearchFeedback error,`, e)
}
})
defineExpose({
getFeedback,
clear,
})
</script>
<template>
<UCard>
<div class="flex flex-col gap-2">
<div v-if="!modelValue.length && !error">Waiting for model feedback...</div>
<template v-else>
<div v-if="error" class="text-red-500">{{ error }}</div>
<div v-for="(feedback, index) in modelValue" class="flex flex-col gap-2" :key="index">
Assistant: {{ feedback.assistantQuestion }}
<UInput v-model="feedback.userAnswer" />
</div>
</template>
<UButton color="primary" :loading="isLoading" :disabled="isSubmitButtonDisabled" block @click="$emit('submit')">
Submit Answer
</UButton>
</div>
</UCard>
</template>

105
components/ResearchForm.vue Normal file
View File

@ -0,0 +1,105 @@
<template>
<div class="max-w-4xl mx-auto">
<UCard>
<div class="flex flex-col gap-2">
<UFormField label="Research Topic" required>
<UTextarea class="w-full" v-model="input" :rows="3" placeholder="Enter the topic you want to research..." required />
</UFormField>
<div class="grid grid-cols-1 sm:grid-cols-3 gap-4">
<UFormField label="Breadth" help="Suggested range: 3-10">
<UInput v-model="breadth" class="w-full" type="number" :min="3" :max="10" :step="1" />
</UFormField>
<UFormField label="Depth" help="Suggested range: 1-5">
<UInput v-model="depth" class="w-full" type="number" :min="1" :max="5" :step="1" />
</UFormField>
<UFormField label="Number of Questions" help="Suggested range: 1-10">
<UInput v-model="numQuestions" class="w-full" type="number" :min="1" :max="5" :step="1" />
</UFormField>
</div>
</div>
<template #footer>
<UButton type="submit" color="primary" :loading="isLoading" block @click="handleSubmit">
{{ isLoading ? 'Researching...' : 'Start Research' }}
</UButton>
</template>
</UCard>
<div v-if="result" class="mt-8">
<UCard>
<template #header>
<div class="flex items-center justify-between">
<h2 class="text-xl font-semibold">研究报告</h2>
<UButton color="info" variant="ghost" icon="i-heroicons-document-duplicate" @click="copyReport" />
</div>
</template>
<div class="prose max-w-none dark:prose-invert" v-html="renderedReport"></div>
</UCard>
</div>
</div>
</template>
<script setup lang="ts">
import { marked } from 'marked'
import { UFormField } from '#components'
export interface ResearchInputData {
query: string
breadth: number
depth: number
numQuestions: number
}
const emit = defineEmits<{
(e: 'submit', value: ResearchInputData): void
}>()
const input = ref('天空为什么是蓝的?')
const breadth = ref(6)
const depth = ref(3)
const numQuestions = ref(1)
const isLoading = ref(false)
const result = ref<any>(null)
const toast = useToast()
const renderedReport = computed(() => {
if (!result.value?.report) return ''
return marked(result.value.report)
})
function handleSubmit() {
emit('submit', {
query: input.value,
breadth: breadth.value,
depth: depth.value,
numQuestions: numQuestions.value,
})
}
onMounted(() => {
input.value = '天空为什么是蓝的?' // default
})
async function copyReport() {
if (!result.value?.report) return
try {
await navigator.clipboard.writeText(result.value.report)
toast.add({
title: '复制成功',
description: '研究报告已复制到剪贴板',
icon: 'i-heroicons-check-circle',
})
} catch (e) {
toast.add({
title: '复制失败',
description: '无法复制到剪贴板',
icon: 'i-heroicons-x-circle',
color: 'error',
})
}
}
</script>

7
layouts/default.vue Normal file
View File

@ -0,0 +1,7 @@
<template>
<div
class="w-full min-h-screen bg-white dark:bg-[#111729] dark:text-[#e5e7eb] transition-all"
>
<slot />
</div>
</template>

58
lib/ai/providers.ts Normal file
View File

@ -0,0 +1,58 @@
import { createOpenAI } from '@ai-sdk/openai';
import { getEncoding } from 'js-tiktoken';
import { RecursiveCharacterTextSplitter } from './text-splitter';
// Providers
const openai = createOpenAI({
apiKey: import.meta.env.VITE_OPENAI_API_KEY!,
baseURL: import.meta.env.VITE_OPENAI_ENDPOINT || 'https://api.openai.com/v1',
});
const customModel = import.meta.env.VITE_OPENAI_MODEL || 'o3-mini';
// Models
export const o3MiniModel = openai(customModel, {
// reasoningEffort: customModel.startsWith('o') ? 'medium' : undefined,
structuredOutputs: true,
});
const MinChunkSize = 140;
const encoder = getEncoding('o200k_base');
// trim prompt to maximum context size
export function trimPrompt(
prompt: string,
contextSize = Number(import.meta.env.VITE_CONTEXT_SIZE) || 128_000,
) {
if (!prompt) {
return '';
}
const length = encoder.encode(prompt).length;
if (length <= contextSize) {
return prompt;
}
const overflowTokens = length - contextSize;
// on average it's 3 characters per token, so multiply by 3 to get a rough estimate of the number of characters
const chunkSize = prompt.length - overflowTokens * 3;
if (chunkSize < MinChunkSize) {
return prompt.slice(0, MinChunkSize);
}
const splitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap: 0,
});
const trimmedPrompt = splitter.splitText(prompt)[0] ?? '';
// last catch, there's a chance that the trimmed prompt is same length as the original prompt, due to how tokens are split & innerworkings of the splitter, handle this case by just doing a hard cut
if (trimmedPrompt.length === prompt.length) {
return trimPrompt(prompt.slice(0, chunkSize), contextSize);
}
// recursively trim until the prompt is within the context size
return trimPrompt(trimmedPrompt, contextSize);
}

View File

@ -0,0 +1,77 @@
import assert from 'node:assert';
import { describe, it, beforeEach } from 'node:test';
import { RecursiveCharacterTextSplitter } from './text-splitter';
describe('RecursiveCharacterTextSplitter', () => {
let splitter: RecursiveCharacterTextSplitter;
beforeEach(() => {
splitter = new RecursiveCharacterTextSplitter({
chunkSize: 50,
chunkOverlap: 10,
});
});
it('Should correctly split text by separators', () => {
const text = 'Hello world, this is a test of the recursive text splitter.';
// Test with initial chunkSize
assert.deepEqual(
splitter.splitText(text),
['Hello world', 'this is a test of the recursive text splitter']
);
// Test with updated chunkSize
splitter.chunkSize = 100;
assert.deepEqual(
splitter.splitText(
'Hello world, this is a test of the recursive text splitter. If I have a period, it should split along the period.'
),
[
'Hello world, this is a test of the recursive text splitter',
'If I have a period, it should split along the period.',
]
);
// Test with another updated chunkSize
splitter.chunkSize = 110;
assert.deepEqual(
splitter.splitText(
'Hello world, this is a test of the recursive text splitter. If I have a period, it should split along the period.\nOr, if there is a new line, it should prioritize splitting on new lines instead.'
),
[
'Hello world, this is a test of the recursive text splitter',
'If I have a period, it should split along the period.',
'Or, if there is a new line, it should prioritize splitting on new lines instead.',
]
);
});
it('Should handle empty string', () => {
assert.deepEqual(splitter.splitText(''), []);
});
it('Should handle special characters and large texts', () => {
const largeText = 'A'.repeat(1000);
splitter.chunkSize = 200;
assert.deepEqual(
splitter.splitText(largeText),
Array(5).fill('A'.repeat(200))
);
const specialCharText = 'Hello!@# world$%^ &*( this) is+ a-test';
assert.deepEqual(
splitter.splitText(specialCharText),
['Hello!@#', 'world$%^', '&*( this)', 'is+', 'a-test']
);
});
it('Should handle chunkSize equal to chunkOverlap', () => {
splitter.chunkSize = 50;
splitter.chunkOverlap = 50;
assert.throws(
() => splitter.splitText('Invalid configuration'),
new Error('Cannot have chunkOverlap >= chunkSize')
);
});
});

143
lib/ai/text-splitter.ts Normal file
View File

@ -0,0 +1,143 @@
interface TextSplitterParams {
chunkSize: number;
chunkOverlap: number;
}
abstract class TextSplitter implements TextSplitterParams {
chunkSize = 1000;
chunkOverlap = 200;
constructor(fields?: Partial<TextSplitterParams>) {
this.chunkSize = fields?.chunkSize ?? this.chunkSize;
this.chunkOverlap = fields?.chunkOverlap ?? this.chunkOverlap;
if (this.chunkOverlap >= this.chunkSize) {
throw new Error('Cannot have chunkOverlap >= chunkSize');
}
}
abstract splitText(text: string): string[];
createDocuments(texts: string[]): string[] {
const documents: string[] = [];
for (let i = 0; i < texts.length; i += 1) {
const text = texts[i];
for (const chunk of this.splitText(text!)) {
documents.push(chunk);
}
}
return documents;
}
splitDocuments(documents: string[]): string[] {
return this.createDocuments(documents);
}
private joinDocs(docs: string[], separator: string): string | null {
const text = docs.join(separator).trim();
return text === '' ? null : text;
}
mergeSplits(splits: string[], separator: string): string[] {
const docs: string[] = [];
const currentDoc: string[] = [];
let total = 0;
for (const d of splits) {
const _len = d.length;
if (total + _len >= this.chunkSize) {
if (total > this.chunkSize) {
console.warn(
`Created a chunk of size ${total}, +
which is longer than the specified ${this.chunkSize}`,
);
}
if (currentDoc.length > 0) {
const doc = this.joinDocs(currentDoc, separator);
if (doc !== null) {
docs.push(doc);
}
// Keep on popping if:
// - we have a larger chunk than in the chunk overlap
// - or if we still have any chunks and the length is long
while (
total > this.chunkOverlap ||
(total + _len > this.chunkSize && total > 0)
) {
total -= currentDoc[0]!.length;
currentDoc.shift();
}
}
}
currentDoc.push(d);
total += _len;
}
const doc = this.joinDocs(currentDoc, separator);
if (doc !== null) {
docs.push(doc);
}
return docs;
}
}
export interface RecursiveCharacterTextSplitterParams
extends TextSplitterParams {
separators: string[];
}
export class RecursiveCharacterTextSplitter
extends TextSplitter
implements RecursiveCharacterTextSplitterParams
{
separators: string[] = ['\n\n', '\n', '.', ',', '>', '<', ' ', ''];
constructor(fields?: Partial<RecursiveCharacterTextSplitterParams>) {
super(fields);
this.separators = fields?.separators ?? this.separators;
}
splitText(text: string): string[] {
const finalChunks: string[] = [];
// Get appropriate separator to use
let separator: string = this.separators[this.separators.length - 1]!;
for (const s of this.separators) {
if (s === '') {
separator = s;
break;
}
if (text.includes(s)) {
separator = s;
break;
}
}
// Now that we have the separator, split the text
let splits: string[];
if (separator) {
splits = text.split(separator);
} else {
splits = text.split('');
}
// Now go merging things, recursively splitting longer texts.
let goodSplits: string[] = [];
for (const s of splits) {
if (s.length < this.chunkSize) {
goodSplits.push(s);
} else {
if (goodSplits.length) {
const mergedText = this.mergeSplits(goodSplits, separator);
finalChunks.push(...mergedText);
goodSplits = [];
}
const otherInfo = this.splitText(s);
finalChunks.push(...otherInfo);
}
}
if (goodSplits.length) {
const mergedText = this.mergeSplits(goodSplits, separator);
finalChunks.push(...mergedText);
}
return finalChunks;
}
}

303
lib/deep-research.ts Normal file
View File

@ -0,0 +1,303 @@
import { generateObject, streamText } from 'ai';
import { compact } from 'lodash-es';
import pLimit from 'p-limit';
import { z } from 'zod';
import { parseStreamingJson, type DeepPartial } from '~/utils/json';
import { o3MiniModel, trimPrompt } from './ai/providers';
import { systemPrompt } from './prompt';
import zodToJsonSchema from 'zod-to-json-schema';
import { tavily, type TavilySearchResponse } from '@tavily/core';
// import 'dotenv/config';
// Used for streaming response
type PartialSerpQueries = DeepPartial<z.infer<typeof serpQueriesTypeSchema>['queries']>;
type PartialSearchResult = DeepPartial<z.infer<typeof serpResultTypeSchema>>;
export type ResearchStep =
| { type: 'start'; message: string; depth: number; breadth: number }
| { type: 'generating_queries'; result: PartialSerpQueries; depth: number; breadth: number }
| { type: 'query_generated'; query: string; researchGoal: string; depth: number; breadth: number; queryIndex: number }
| { type: 'searching'; query: string; depth: number; breadth: number; queryIndex: number }
| { type: 'search_complete'; query: string; urls: string[]; depth: number; breadth: number; queryIndex: number }
| { type: 'processing_serach_result'; query: string; result: PartialSearchResult; depth: number; breadth: number; queryIndex: number }
| { type: 'error'; message: string }
| { type: 'complete' };
// increase this if you have higher API rate limits
const ConcurrencyLimit = 2;
// Initialize Firecrawl with optional API key and optional base url
// const firecrawl = new FirecrawlApp({
// apiKey: process.env.FIRECRAWL_KEY ?? '',
// apiUrl: process.env.FIRECRAWL_BASE_URL,
// });
const tvly = tavily({
apiKey: import.meta.env.VITE_TAVILY_API_KEY ?? '',
})
/**
* Schema for {@link generateSerpQueries} without dynamic descriptions
*/
export const serpQueriesTypeSchema = z.object({
queries: z.array(
z.object({
query: z.string(),
researchGoal: z.string(),
}),
),
});
// take en user query, return a list of SERP queries
export function generateSerpQueries({
query,
numQueries = 3,
learnings,
}: {
query: string;
numQueries?: number;
// optional, if provided, the research will continue from the last learning
learnings?: string[];
}) {
const schema = z.object({
queries: z
.array(
z.object({
query: z.string().describe('The SERP query'),
researchGoal: z
.string()
.describe(
'First talk about the goal of the research that this query is meant to accomplish, then go deeper into how to advance the research once the results are found, mention additional research directions. Be as specific as possible, especially for additional research directions.',
),
}),
)
.describe(`List of SERP queries, max of ${numQueries}`)
})
const jsonSchema = JSON.stringify(zodToJsonSchema(schema));
const prompt = [
`Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: <prompt>${query}</prompt>\n\n`,
learnings
? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join(
'\n',
)}`
: '',
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
].join('\n\n');
return streamText({
model: o3MiniModel,
system: systemPrompt(),
prompt,
});
}
export const serpResultTypeSchema = z.object({
learnings: z.array(z.string()),
followUpQuestions: z.array(z.string()),
});
function processSerpResult({
query,
result,
numLearnings = 3,
numFollowUpQuestions = 3,
}: {
query: string;
// result: SearchResponse;
result: TavilySearchResponse
numLearnings?: number;
numFollowUpQuestions?: number;
}) {
const schema = z.object({
learnings: z
.array(z.string())
.describe(`List of learnings, max of ${numLearnings}`),
followUpQuestions: z
.array(z.string())
.describe(
`List of follow-up questions to research the topic further, max of ${numFollowUpQuestions}`,
),
});
const jsonSchema = JSON.stringify(zodToJsonSchema(schema));
const contents = compact(result.results.map(item => item.content)).map(
content => trimPrompt(content, 25_000),
);
const prompt = [
`Given the following contents from a SERP search for the query <query>${query}</query>, generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.`,
`<contents>${contents
.map(content => `<content>\n${content}\n</content>`)
.join('\n')}</contents>`,
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
].join('\n\n');
return streamText({
model: o3MiniModel,
abortSignal: AbortSignal.timeout(60_000),
system: systemPrompt(),
prompt,
});
}
export async function writeFinalReport({
prompt,
learnings,
visitedUrls,
}: {
prompt: string;
learnings: string[];
visitedUrls: string[];
}) {
const learningsString = trimPrompt(
learnings
.map(learning => `<learning>\n${learning}\n</learning>`)
.join('\n'),
150_000,
);
const res = await generateObject({
model: o3MiniModel,
system: systemPrompt(),
prompt: `Given the following prompt from the user, write a final report on the topic using the learnings from research. Make it as as detailed as possible, aim for 3 or more pages, include ALL the learnings from research:\n\n<prompt>${prompt}</prompt>\n\nHere are all the learnings from previous research:\n\n<learnings>\n${learningsString}\n</learnings>`,
schema: z.object({
reportMarkdown: z
.string()
.describe('Final report on the topic in Markdown'),
}),
});
// Append the visited URLs section to the report
const urlsSection = `\n\n## Sources\n\n${visitedUrls.map(url => `- ${url}`).join('\n')}`;
return res.object.reportMarkdown + urlsSection;
}
export async function deepResearch({
query,
breadth,
depth,
learnings = [],
visitedUrls = [],
onProgress,
}: {
query: string;
breadth: number;
depth: number;
learnings?: string[];
visitedUrls?: string[];
onProgress: (step: ResearchStep) => void;
}): Promise<void> {
onProgress({ type: 'start', message: `开始深度研究,深度:${depth},广度:${breadth}`, depth, breadth });
try {
const serpQueriesResult = generateSerpQueries({
query,
learnings,
numQueries: breadth,
});
const limit = pLimit(ConcurrencyLimit);
let serpQueries: PartialSerpQueries = [];
for await (const parsedQueries of parseStreamingJson(
serpQueriesResult.textStream,
serpQueriesTypeSchema,
(value) => !!value.queries?.length && !!value.queries[0]?.query
)) {
if (parsedQueries.queries) {
serpQueries = parsedQueries.queries;
onProgress({
type: 'generating_queries',
result: serpQueries,
depth,
breadth
});
}
}
await Promise.all(
serpQueries.map(serpQuery =>
limit(async () => {
if (!serpQuery?.query) return
try {
// const result = await firecrawl.search(serpQuery.query, {
// timeout: 15000,
// limit: 5,
// scrapeOptions: { formats: ['markdown'] },
// });
const result = await tvly.search(serpQuery.query, {
maxResults: 5,
})
console.log(`Ran ${serpQuery.query}, found ${result.results.length} contents`);
// Collect URLs from this search
const newUrls = compact(result.results.map(item => item.url));
const newBreadth = Math.ceil(breadth / 2);
const newDepth = depth - 1;
const serpResultGenerator = processSerpResult({
query: serpQuery.query,
result,
numFollowUpQuestions: newBreadth,
});
let serpResult: PartialSearchResult = {};
for await (const parsedLearnings of parseStreamingJson(
serpResultGenerator.textStream,
serpResultTypeSchema,
(value) => !!value.learnings?.length
)) {
serpResult = parsedLearnings;
onProgress({
type: 'processing_serach_result',
result: parsedLearnings,
depth,
breadth,
query: serpQuery.query,
queryIndex: serpQueries.indexOf(serpQuery),
});
}
console.log(`Processed serp result for ${serpQuery.query}`, serpResult);
const allLearnings = [...learnings, ...(serpResult.learnings ?? [])];
const allUrls = [...visitedUrls, ...newUrls];
if (newDepth > 0 && serpResult.followUpQuestions?.length) {
console.log(
`Researching deeper, breadth: ${newBreadth}, depth: ${newDepth}`,
);
const nextQuery = `
Previous research goal: ${serpQuery.researchGoal}
Follow-up research directions: ${serpResult.followUpQuestions.map(q => `\n${q}`).join('')}
`.trim();
return deepResearch({
query: nextQuery,
breadth: newBreadth,
depth: newDepth,
learnings: allLearnings,
visitedUrls: allUrls,
onProgress,
});
} else {
return {
learnings: allLearnings,
visitedUrls: allUrls,
};
}
} catch (e: any) {
throw new Error(`Error searching for ${serpQuery.query}, depth ${depth}\nMessage: ${e.message}`)
}
}),
),
);
} catch (error: any) {
console.error(error);
onProgress({
type: 'error',
message: error?.message ?? 'Something went wrong',
})
}
onProgress({
type: 'complete',
});
}

37
lib/feedback.ts Normal file
View File

@ -0,0 +1,37 @@
import { streamText } from 'ai';
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema'
import { o3MiniModel } from './ai/providers';
import { systemPrompt } from './prompt';
export const feedbackTypeSchema = z.object({
questions: z.array(z.string())
})
export function generateFeedback({
query,
numQuestions = 3,
}: {
query: string;
numQuestions?: number;
}) {
const schema = z.object({
questions: z
.array(z.string())
.describe(
`Follow up questions to clarify the research direction, max of ${numQuestions}`,
),
});
const jsonSchema = JSON.stringify(zodToJsonSchema(schema));
const prompt = [
`Given the following query from the user, ask some follow up questions to clarify the research direction. Return a maximum of ${numQuestions} questions, but feel free to return less if the original query is clear: <query>${query}</query>`,
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
].join('\n\n');
return streamText({
model: o3MiniModel,
system: systemPrompt(),
prompt,
});
// return userFeedback.object.questions.slice(0, numQuestions);
}

15
lib/prompt.ts Normal file
View File

@ -0,0 +1,15 @@
export const systemPrompt = () => {
const now = new Date().toISOString();
return `You are an expert researcher. Today is ${now}. Follow these instructions when responding:
- You may be asked to research subjects that is after your knowledge cutoff, assume the user is right when presented with news.
- The user is a highly experienced analyst, no need to simplify it, be as detailed as possible and make sure your response is correct.
- Be highly organized.
- Suggest solutions that I didn't think about.
- Be proactive and anticipate my needs.
- Treat me as an expert in all subject matter.
- Mistakes erode my trust, so be accurate and thorough.
- Provide detailed explanations, I'm comfortable with lots of detail.
- Value good arguments over authorities, the source is irrelevant.
- Consider new technologies and contrarian ideas, not just the conventional wisdom.
- You may use high levels of speculation or prediction, just flag it for me.`;
};

93
lib/run.ts Normal file
View File

@ -0,0 +1,93 @@
import * as fs from 'fs/promises';
import * as readline from 'readline';
import { deepResearch, writeFinalReport } from './deep-research';
import { generateFeedback } from './feedback';
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
// Helper function to get user input
function askQuestion(query: string): Promise<string> {
return new Promise(resolve => {
rl.question(query, answer => {
resolve(answer);
});
});
}
// run the agent
async function run() {
// Get initial query
const initialQuery = await askQuestion('What would you like to research? ');
// Get breath and depth parameters
const breadth =
parseInt(
await askQuestion(
'Enter research breadth (recommended 2-10, default 4): ',
),
10,
) || 4;
const depth =
parseInt(
await askQuestion('Enter research depth (recommended 1-5, default 2): '),
10,
) || 2;
console.log(`Creating research plan...`);
// Generate follow-up questions
const followUpQuestions = await generateFeedback({
query: initialQuery,
});
console.log(
'\nTo better understand your research needs, please answer these follow-up questions:',
);
// Collect answers to follow-up questions
const answers: string[] = [];
for (const question of followUpQuestions) {
const answer = await askQuestion(`\n${question}\nYour answer: `);
answers.push(answer);
}
// Combine all information for deep research
const combinedQuery = `
Initial Query: ${initialQuery}
Follow-up Questions and Answers:
${followUpQuestions.map((q, i) => `Q: ${q}\nA: ${answers[i]}`).join('\n')}
`;
console.log('\nResearching your topic...');
const { learnings, visitedUrls } = await deepResearch({
query: combinedQuery,
breadth,
depth,
});
console.log(`\n\nLearnings:\n\n${learnings.join('\n')}`);
console.log(
`\n\nVisited URLs (${visitedUrls.length}):\n\n${visitedUrls.join('\n')}`,
);
console.log('Writing final report...');
const report = await writeFinalReport({
prompt: combinedQuery,
learnings,
visitedUrls,
});
// Save report to file
await fs.writeFile('output.md', report, 'utf-8');
console.log(`\n\nFinal Report:\n\n${report}`);
console.log('\nReport has been saved to output.md');
rl.close();
}
run().catch(console.error);

10
nuxt.config.ts Normal file
View File

@ -0,0 +1,10 @@
// https://nuxt.com/docs/api/configuration/nuxt-config
export default defineNuxtConfig({
modules: [
'@nuxt/ui',
'@vueuse/nuxt',
],
css: ['~/assets/css/main.css'],
compatibilityDate: '2024-11-01',
devtools: { enabled: true },
})

40
package.json Normal file
View File

@ -0,0 +1,40 @@
{
"name": "nuxt-app",
"private": true,
"type": "module",
"scripts": {
"build": "nuxt build",
"dev": "nuxt dev",
"generate": "nuxt generate",
"preview": "nuxt preview",
"postinstall": "nuxt prepare"
},
"dependencies": {
"@ai-sdk/openai": "^1.1.9",
"@ai-sdk/openai-compatible": "^0.1.8",
"@ai-sdk/provider-utils": "^2.1.6",
"@ai-sdk/ui-utils": "^1.1.11",
"@ai-sdk/vue": "^1.1.11",
"@mendable/firecrawl-js": "^1.16.0",
"@nuxt/ui": "3.0.0-alpha.12",
"@tailwindcss/typography": "^0.5.16",
"@tavily/core": "^0.0.3",
"@types/lodash-es": "^4.17.12",
"ai": "^4.1.28",
"d3": "^7.9.0",
"js-tiktoken": "^1.0.18",
"lodash-es": "^4.17.21",
"marked": "^15.0.7",
"nuxt": "^3.15.4",
"p-limit": "^6.2.0",
"tailwindcss": "^4.0.5",
"vue": "latest",
"vue-router": "latest",
"zod": "^3.24.1",
"zod-to-json-schema": "^3.24.1"
},
"devDependencies": {
"@vueuse/core": "^12.5.0",
"@vueuse/nuxt": "^12.5.0"
}
}

61
pages/index.vue Normal file
View File

@ -0,0 +1,61 @@
<template>
<div>
<UContainer>
<div class="py-8">
<div class="max-w-4xl mx-auto">
<h1 class="text-3xl font-bold text-center mb-2"> Deep Research Assistant </h1>
<ColorModeButton></ColorModeButton>
<ResearchForm @submit="generateFeedback" />
<ResearchFeedback v-model="result.feedback" ref="feedbackRef" @submit="startDeepSearch" />
<DeepResearch v-model="searchTree" ref="deepResearchRef" class="mb-8" />
</div>
</div>
</UContainer>
</div>
</template>
<script setup lang="ts">
import type ResearchFeedback from '~/components/ResearchFeedback.vue'
import type DeepResearch from '~/components/DeepResearch.vue'
import type { ResearchInputData } from '~/components/ResearchForm.vue'
import type { SearchTree } from '~/components/DeepResearch.vue'
import type { ResearchFeedbackResult } from '~/components/ResearchFeedback.vue'
interface DeepResearchResult {
feedback: Array<ResearchFeedbackResult>
}
useHead({
title: 'Deep Research Assistant - AI 深度研究助手',
meta: [
{
name: 'description',
content: '基于 AI 的深度研究助手,可以对任何主题进行迭代式深入研究',
},
],
})
const inputData = ref<ResearchInputData>()
const result = ref<DeepResearchResult>({
feedback: [],
})
const searchTree = ref<SearchTree>({
root: null,
currentDepth: 0,
maxDepth: 0,
maxBreadth: 0,
})
const feedbackRef = ref<InstanceType<typeof ResearchFeedback>>()
const deepResearchRef = ref<InstanceType<typeof DeepResearch>>()
async function generateFeedback(data: ResearchInputData) {
inputData.value = data
feedbackRef.value?.getFeedback(data.query, data.numQuestions)
}
async function startDeepSearch() {
if (!inputData.value) return
deepResearchRef.value?.startResearch(inputData.value.query, inputData.value.breadth, inputData.value.depth, result.value.feedback)
}
</script>

8148
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

10
prettier.config.cjs Normal file
View File

@ -0,0 +1,10 @@
module.exports = {
semi: false,
vueIndentScriptAndStyle: true,
singleQuote: true,
trailingComma: 'all',
proseWrap: 'never',
htmlWhitespaceSensitivity: 'strict',
endOfLine: 'auto',
printWidth: 140,
}

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

1
public/robots.txt Normal file
View File

@ -0,0 +1 @@

View File

@ -0,0 +1,33 @@
import { deepResearch, ResearchStep } from "~/lib/deep-research";
export default defineEventHandler(async event => {
const { initialQuery, feedback, depth, breadth } =
await readBody(event)
console.log({ initialQuery, feedback, depth, breadth })
// 设置 SSE 响应头
setHeader(event, 'Content-Type', 'text/event-stream')
setHeader(event, 'Cache-Control', 'no-cache')
setHeader(event, 'Connection', 'keep-alive')
const combinedQuery = `
Initial Query: ${initialQuery}
Follow-up Questions and Answers:
${feedback.map((qa: { question: string; answer: string }) => `Q: ${qa.question}\nA: ${qa.answer}`).join('\n')}
`
return new Promise<void>(async (resolve, reject) => {
const onProgress = (data: ResearchStep) => {
console.log(data)
// 发送进度事件
event.node.res.write(`data: ${JSON.stringify(data)}\n\n`)
}
await deepResearch({
query: combinedQuery,
breadth,
depth,
onProgress,
})
resolve()
})
})

View File

@ -0,0 +1,18 @@
import { generateFeedback } from "~/lib/feedback";
export default defineEventHandler(async event => {
const { query, numQuestions } = await readBody(event)
console.log({ query, numQuestions })
const feedback = generateFeedback({
query,
numQuestions,
})
return feedback.toDataStreamResponse({
sendUsage: true,
getErrorMessage(error) {
console.error('Error generating feedback:', error)
return 'Error generating feedback'
},
})
})

3
server/tsconfig.json Normal file
View File

@ -0,0 +1,3 @@
{
"extends": "../.nuxt/tsconfig.server.json"
}

4
tsconfig.json Normal file
View File

@ -0,0 +1,4 @@
{
// https://nuxt.com/docs/guide/concepts/typescript
"extends": "./.nuxt/tsconfig.json"
}

53
utils/json.ts Normal file
View File

@ -0,0 +1,53 @@
import { parsePartialJson } from '@ai-sdk/ui-utils';
import { z } from 'zod';
export type DeepPartial<T> = T extends object
? T extends Array<any>
? T
: { [P in keyof T]?: DeepPartial<T[P]> }
: T;
export function removeJsonMarkdown(text: string) {
if (text.startsWith('```json')) {
text = text.slice(7);
} else if (text.startsWith('json')) {
text = text.slice(4);
} else if (text.startsWith('```')) {
text = text.slice(3);
}
if (text.endsWith('```')) {
text = text.slice(0, -3);
}
return text;
}
/**
* 解析流式的 JSON 数据
* @param textStream 字符串流
* @param schema zod schema 用于类型验证
* @param isValid 自定义验证函数,用于判断解析出的 JSON 是否有效
* @returns 异步生成器yield 解析后的数据
*/
export async function* parseStreamingJson<T extends z.ZodType>(
textStream: AsyncIterable<string>,
schema: T,
isValid: (value: DeepPartial<z.infer<T>>) => boolean
): AsyncGenerator<DeepPartial<z.infer<T>>> {
let rawText = '';
let isParseSuccessful = false;
for await (const chunk of textStream) {
rawText = removeJsonMarkdown(rawText + chunk);
const parsed = parsePartialJson(rawText);
isParseSuccessful = parsed.state === 'repaired-parse' || parsed.state === 'successful-parse';
if (isParseSuccessful) {
yield parsed.value as DeepPartial<z.infer<T>>;
} else {
console.dir(parsed, { depth: null, colors: true });
}
}
return { isSuccessful: isParseSuccessful };
}