refactor: make requests purely client-side

This commit is contained in:
AnotiaWang
2025-02-11 22:28:58 +08:00
parent 066aafa9b2
commit 84f63abb3d
4 changed files with 43 additions and 66 deletions

View File

@ -1,7 +1,5 @@
<script setup lang="ts">
import { parsePartialJson } from '@ai-sdk/ui-utils'
import { useChat } from '@ai-sdk/vue'
import { isObject } from '@vueuse/core'
import { generateFeedback } from '~/lib/feedback'
export interface ResearchFeedbackResult {
assistantQuestion: string
@ -18,9 +16,8 @@
const feedback = ref<ResearchFeedbackResult[]>([])
const { messages, input, error, handleSubmit, isLoading } = useChat({
api: '/api/generate-feedback',
})
const isLoading = ref(false)
const error = ref('')
const isSubmitButtonDisabled = computed(
() =>
@ -34,67 +31,37 @@
async function getFeedback(query: string, numQuestions = 3) {
clear()
// Set input value. (This only makes sure that the library sends the request)
input.value = query
handleSubmit(
{},
{
body: {
query,
numQuestions,
},
},
)
}
function clear() {
messages.value = []
input.value = ''
error.value = undefined
feedback.value = []
}
watch(messages, (m) => {
const assistantMessage = m[m.length - 1]
if (assistantMessage?.role !== 'assistant') {
return {
value: undefined,
state: 'undefined-input',
}
}
const content = removeJsonMarkdown(assistantMessage.content)
// Write the questions into modelValue
const parseResult = parsePartialJson(content)
if (parseResult.state === 'repaired-parse' || parseResult.state === 'successful-parse') {
if (!isObject(parseResult.value) || Array.isArray(parseResult.value)) {
return (feedback.value = [])
}
const unsafeQuestions = parseResult.value.questions
if (!unsafeQuestions || !Array.isArray(unsafeQuestions)) return (feedback.value = [])
const questions = unsafeQuestions.filter((s) => typeof s === 'string')
// Incrementally update modelValue
for (let i = 0; i < questions.length; i += 1) {
if (feedback.value[i]) {
feedback.value[i].assistantQuestion = questions[i]
} else {
feedback.value.push({
assistantQuestion: questions[i],
userAnswer: '',
})
isLoading.value = true
try {
for await (const f of generateFeedback({
query,
numQuestions,
})) {
const questions = f.questions!.filter((s) => typeof s === 'string')
// Incrementally update modelValue
for (let i = 0; i < questions.length; i += 1) {
if (feedback.value[i]) {
feedback.value[i].assistantQuestion = questions[i]
} else {
feedback.value.push({
assistantQuestion: questions[i],
userAnswer: '',
})
}
}
}
} else {
feedback.value = []
} catch (e: any) {
console.error('Error getting feedback:', e)
error.value = e.message
} finally {
isLoading.value = false
}
})
}
watch(error, (e) => {
if (e) {
console.error(`ResearchFeedback error,`, e)
}
})
function clear() {
feedback.value = []
error.value = ''
}
defineExpose({
getFeedback,
@ -111,6 +78,7 @@
</template>
<div class="flex flex-col gap-2">
<p v-if="error" class="text-red-500">{{ error }}</p>
<div v-if="!feedback.length && !error">Waiting for model feedback...</div>
<template v-else>
<div v-if="error" class="text-red-500">{{ error }}</div>

View File

@ -19,7 +19,7 @@ export interface WriteFinalReportParams {
prompt: string;
learnings: string[];
}
// useRuntimeConfig()
// Used for streaming response
export type SearchQuery = z.infer<typeof searchQueriesTypeSchema>['queries'][0];
export type PartialSearchQuery = DeepPartial<SearchQuery>;

View File

@ -5,6 +5,8 @@ import { zodToJsonSchema } from 'zod-to-json-schema'
import { o3MiniModel } from './ai/providers';
import { systemPrompt } from './prompt';
type PartialFeedback = DeepPartial<z.infer<typeof feedbackTypeSchema>>
export const feedbackTypeSchema = z.object({
questions: z.array(z.string())
})
@ -28,10 +30,16 @@ export function generateFeedback({
`Given the following query from the user, ask some follow up questions to clarify the research direction. Return a maximum of ${numQuestions} questions, but feel free to return less if the original query is clear: <query>${query}</query>`,
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
].join('\n\n');
return streamText({
const stream = streamText({
model: o3MiniModel,
system: systemPrompt(),
prompt,
});
// return userFeedback.object.questions.slice(0, numQuestions);
return parseStreamingJson(
stream.textStream,
feedbackTypeSchema,
(value: PartialFeedback) => !!value.questions && value.questions.length > 0
)
}

View File

@ -1,3 +1,4 @@
// This file is currently unused
import { generateFeedback } from "~/lib/feedback";
export default defineEventHandler(async event => {