refactor: move general utils to shared directory

This commit is contained in:
AnotiaWang
2025-03-23 14:55:19 +08:00
parent ff1e79603d
commit 60f6f227c3
11 changed files with 124 additions and 63 deletions

4
shared/types/types.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
import type { NuxtApp } from "#app";
export type AvailableLocales = NuxtApp['$i18n']['availableLocales']
export type Locale = AvailableLocales[number]

63
shared/utils/ai-model.ts Normal file
View File

@ -0,0 +1,63 @@
import { createDeepSeek } from "@ai-sdk/deepseek"
import { createOpenAI } from "@ai-sdk/openai"
import { createOpenRouter } from "@openrouter/ai-sdk-provider"
import { wrapLanguageModel, extractReasoningMiddleware } from "ai"
import type { LanguageModelV1 } from "ai"
export function getLanguageModel(config: ConfigAi) {
const apiBase = getApiBase(config)
let model: LanguageModelV1
if (config.provider === 'openrouter') {
const openRouter = createOpenRouter({
apiKey: config.apiKey,
baseURL: apiBase,
})
model = openRouter(config.model, {
includeReasoning: true,
})
} else if (
config.provider === 'deepseek' ||
config.provider === 'siliconflow' ||
config.provider === 'infiniai' ||
// Special case if model name includes 'deepseek'
// This ensures compatibilty with providers like Siliconflow
config.model?.toLowerCase().includes('deepseek')
) {
const deepSeek = createDeepSeek({
apiKey: config.apiKey,
baseURL: apiBase,
})
model = deepSeek(config.model)
} else {
const openai = createOpenAI({
apiKey: config.apiKey,
baseURL: apiBase,
})
model = openai(config.model)
}
return wrapLanguageModel({
model,
middleware: extractReasoningMiddleware({ tagName: 'think' }),
})
}
export function getApiBase(config: ConfigAi) {
if (config.provider === 'openrouter') {
return config.apiBase || 'https://openrouter.ai/api/v1'
}
if (config.provider === 'deepseek') {
return config.apiBase || 'https://api.deepseek.com/v1'
}
if (config.provider === 'ollama') {
return config.apiBase || 'http://localhost:11434/v1'
}
if (config.provider === 'siliconflow') {
return config.apiBase || 'https://api.siliconflow.cn/v1'
}
if (config.provider === 'infiniai') {
return config.apiBase || 'https://cloud.infini-ai.com/maas/v1'
}
return config.apiBase || 'https://api.openai.com/v1'
}

52
shared/utils/errors.ts Normal file
View File

@ -0,0 +1,52 @@
import { APICallError, RetryError } from 'ai'
function handleApiCallError(operation: string, error: APICallError) {
let message = error.message
if (error.statusCode) message += ` (${error.statusCode})`
if (error.cause) message += `\nCause: ${error.cause}`
if (error.responseBody) message += `\nResponse: ${error.responseBody}`
if (error.url) message += `\nURL: ${error.url}`
console.error(`[${operation}]`, error, {
statusCode: error.statusCode,
response: error.responseBody,
cause: error.cause,
stack: error.stack,
isRetryable: error.isRetryable,
url: error.url,
})
throw new Error(message)
}
function handleRetryError(operation: string, error: RetryError) {
if (APICallError.isInstance(error.lastError)) {
handleApiCallError(operation, error.lastError)
}
let message = error.message
if (error.cause) message += `\nCause: ${error.cause}`
if (error.stack) message += `\nStack: ${error.stack}`
if (error.reason) message += `\nReason: ${error.reason}`
console.error(`[${operation}]`, error, {
cause: error.cause,
stack: error.stack,
lastError: error.lastError,
reason: error.reason,
errors: error.errors,
})
throw new Error(message)
}
/**
* Parse an error thrown by the AI SDK, and re-throw it with a human-readable message
*/
export function throwAiError(operation: string, error: unknown) {
if (APICallError.isInstance(error)) {
handleApiCallError(operation, error)
} else if (RetryError.isInstance(error)) {
handleRetryError(operation, error)
} else {
console.error(`[${operation}]`, error)
}
throw error
}

87
shared/utils/json.ts Normal file
View File

@ -0,0 +1,87 @@
import { parsePartialJson } from '@ai-sdk/ui-utils'
import type { TextStreamPart } from 'ai'
import { z } from 'zod'
export type DeepPartial<T> = T extends object
? T extends Array<any>
? T
: { [P in keyof T]?: DeepPartial<T[P]> }
: T
export type ParseStreamingJsonEvent<T> =
| { type: 'object'; value: DeepPartial<T> }
| { type: 'reasoning'; delta: string }
| { type: 'error'; message: string }
/** The call finished with invalid content that can't be parsed as JSON */
| { type: 'bad-end'; rawText: string }
export function removeJsonMarkdown(text: string) {
text = text.trim()
if (text.startsWith('```json')) {
text = text.slice(7)
} else if (text.startsWith('json')) {
text = text.slice(4)
} else if (text.startsWith('```')) {
text = text.slice(3)
}
if (text.endsWith('```')) {
text = text.slice(0, -3)
}
return text.trim()
}
/**
* Parse streaming JSON text
* @param fullStream Returned by AI SDK
* @param _schema zod schema for type definition
* @param isValid Custom validation function to check if the parsed JSON is valid
*/
export async function* parseStreamingJson<T extends z.ZodType>(
fullStream: AsyncIterable<TextStreamPart<any>>,
_schema: T,
isValid: (value: DeepPartial<z.infer<T>>) => boolean,
): AsyncGenerator<ParseStreamingJsonEvent<z.infer<T>>> {
let rawText = ''
let isParseSuccessful = false
for await (const chunk of fullStream) {
if (chunk.type === 'reasoning') {
yield { type: 'reasoning', delta: chunk.textDelta }
continue
}
if (chunk.type === 'error') {
yield {
type: 'error',
message:
chunk.error instanceof Error
? chunk.error.message
: String(chunk.error),
}
continue
}
if (chunk.type === 'text-delta') {
rawText += chunk.textDelta
const parsed = parsePartialJson(removeJsonMarkdown(rawText))
isParseSuccessful =
parsed.state === 'repaired-parse' || parsed.state === 'successful-parse'
if (isParseSuccessful && isValid(parsed.value as any)) {
yield {
type: 'object',
value: parsed.value as DeepPartial<z.infer<T>>,
}
}
}
}
// If the last chunk parses failed, return an error
if (!isParseSuccessful) {
console.warn(
`[parseStreamingJson] Failed to parse JSON: ${removeJsonMarkdown(rawText)}`,
)
yield {
type: 'bad-end',
rawText,
}
}
}