feat: config manager modal
This commit is contained in:
108
components/ConfigManager.vue
Normal file
108
components/ConfigManager.vue
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
<script setup lang="ts">
|
||||||
|
const { config } = useConfigStore()
|
||||||
|
const showModal = ref(false)
|
||||||
|
|
||||||
|
defineExpose({
|
||||||
|
show() {
|
||||||
|
showModal.value = true
|
||||||
|
},
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<div>
|
||||||
|
<UModal v-model:open="showModal" title="Settings">
|
||||||
|
<UButton icon="i-lucide-settings" />
|
||||||
|
|
||||||
|
<template #body>
|
||||||
|
<div class="space-y-2">
|
||||||
|
<!-- AI provider -->
|
||||||
|
<h3 class="font-bold">AI Provider</h3>
|
||||||
|
<UFormField label="Provider">
|
||||||
|
<template #help>
|
||||||
|
Currently only OpenAI compatible providers are supported, e.g.
|
||||||
|
Gemini, Together AI, SiliconCloud, ...
|
||||||
|
</template>
|
||||||
|
<USelect
|
||||||
|
v-model="config.ai.provider"
|
||||||
|
:items="[
|
||||||
|
{ label: 'OpenAI Compatible', value: 'openai-compatible' },
|
||||||
|
]"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
<div
|
||||||
|
v-if="config.ai.provider === 'openai-compatible'"
|
||||||
|
class="space-y-2"
|
||||||
|
>
|
||||||
|
<UFormField label="API Key" required>
|
||||||
|
<PasswordInput
|
||||||
|
v-model="config.ai.apiKey"
|
||||||
|
class="w-full"
|
||||||
|
placeholder="API Key"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
<UFormField label="API Base URL">
|
||||||
|
<UInput
|
||||||
|
v-model="config.ai.apiBase"
|
||||||
|
class="w-full"
|
||||||
|
placeholder="https://api.openai.com/v1"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
<UFormField label="Model" required>
|
||||||
|
<UInput
|
||||||
|
v-model="config.ai.model"
|
||||||
|
class="w-full"
|
||||||
|
placeholder="Model name"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<USeparator class="my-4" />
|
||||||
|
|
||||||
|
<!-- Web search provider -->
|
||||||
|
<h3 class="font-bold">Web Search Provider</h3>
|
||||||
|
<UFormField label="Provider">
|
||||||
|
<template #help>
|
||||||
|
Tavily is similar to Firecrawl, but with more free quota (1000
|
||||||
|
credits / month). Get one API key at
|
||||||
|
<UButton
|
||||||
|
class="!p-0"
|
||||||
|
to="https://app.tavily.com/home"
|
||||||
|
target="_blank"
|
||||||
|
variant="link"
|
||||||
|
>
|
||||||
|
app.tavily.com
|
||||||
|
</UButton>
|
||||||
|
.
|
||||||
|
</template>
|
||||||
|
<USelect
|
||||||
|
v-model="config.webSearch.provider"
|
||||||
|
:items="[{ label: 'Tavily', value: 'tavily' }]"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
<UFormField label="API Key" required>
|
||||||
|
<PasswordInput
|
||||||
|
v-model="config.webSearch.apiKey"
|
||||||
|
class="w-full"
|
||||||
|
placeholder="API Key"
|
||||||
|
/>
|
||||||
|
</UFormField>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
<template #footer>
|
||||||
|
<div class="flex items-center justify-between gap-2 w-full">
|
||||||
|
<p class="text-sm text-gray-500">
|
||||||
|
Settings are stored locally in your browser.
|
||||||
|
</p>
|
||||||
|
<UButton
|
||||||
|
color="primary"
|
||||||
|
icon="i-lucide-check"
|
||||||
|
@click="showModal = false"
|
||||||
|
>
|
||||||
|
Save
|
||||||
|
</UButton>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</UModal>
|
||||||
|
</div>
|
||||||
|
</template>
|
25
components/PasswordInput.vue
Normal file
25
components/PasswordInput.vue
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
<script setup lang="ts">
|
||||||
|
const show = ref(false)
|
||||||
|
const password = defineModel<string>()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<UInput
|
||||||
|
v-model="password"
|
||||||
|
:type="show ? 'text' : 'password'"
|
||||||
|
:ui="{ trailing: 'pe-1' }"
|
||||||
|
>
|
||||||
|
<template #trailing>
|
||||||
|
<UButton
|
||||||
|
color="neutral"
|
||||||
|
variant="link"
|
||||||
|
size="sm"
|
||||||
|
:icon="show ? 'i-lucide-eye-off' : 'i-lucide-eye'"
|
||||||
|
:aria-label="show ? 'Hide' : 'Show'"
|
||||||
|
:aria-pressed="show"
|
||||||
|
aria-controls="password"
|
||||||
|
@click="show = !show"
|
||||||
|
/>
|
||||||
|
</template>
|
||||||
|
</UInput>
|
||||||
|
</template>
|
15
composables/useAiProvider.ts
Normal file
15
composables/useAiProvider.ts
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import { createOpenAI } from '@ai-sdk/openai'
|
||||||
|
|
||||||
|
export const useAiModel = () => {
|
||||||
|
const config = useConfigStore()
|
||||||
|
switch (config.config.ai.provider) {
|
||||||
|
case 'openai-compatible':
|
||||||
|
const openai = createOpenAI({
|
||||||
|
apiKey: config.config.ai.apiKey,
|
||||||
|
baseURL: config.config.ai.apiBase || 'https://api.openai.com/v1',
|
||||||
|
})
|
||||||
|
return openai(config.config.ai.model)
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown AI provider: ${config.config.ai.provider}`)
|
||||||
|
}
|
||||||
|
}
|
9
composables/useTavily.ts
Normal file
9
composables/useTavily.ts
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import { tavily } from '@tavily/core'
|
||||||
|
|
||||||
|
export const useTavily = () => {
|
||||||
|
const config = useConfigStore()
|
||||||
|
const tvly = tavily({
|
||||||
|
apiKey: config.config.webSearch.apiKey,
|
||||||
|
})
|
||||||
|
return tvly
|
||||||
|
}
|
@ -3,33 +3,19 @@ import { getEncoding } from 'js-tiktoken'
|
|||||||
|
|
||||||
import { RecursiveCharacterTextSplitter } from './text-splitter'
|
import { RecursiveCharacterTextSplitter } from './text-splitter'
|
||||||
|
|
||||||
// Providers
|
|
||||||
const openai = createOpenAI({
|
|
||||||
apiKey: import.meta.env.VITE_OPENAI_API_KEY!,
|
|
||||||
baseURL: import.meta.env.VITE_OPENAI_ENDPOINT || 'https://api.openai.com/v1',
|
|
||||||
})
|
|
||||||
|
|
||||||
const customModel = import.meta.env.VITE_OPENAI_MODEL || 'o3-mini'
|
|
||||||
|
|
||||||
// Models
|
|
||||||
|
|
||||||
export const o3MiniModel = openai(customModel, {
|
|
||||||
// reasoningEffort: customModel.startsWith('o') ? 'medium' : undefined,
|
|
||||||
structuredOutputs: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const MinChunkSize = 140
|
const MinChunkSize = 140
|
||||||
const encoder = getEncoding('o200k_base')
|
const encoder = getEncoding('o200k_base')
|
||||||
|
|
||||||
// trim prompt to maximum context size
|
// trim prompt to maximum context size
|
||||||
export function trimPrompt(
|
export function trimPrompt(prompt: string, contextSize?: number) {
|
||||||
prompt: string,
|
|
||||||
contextSize = Number(import.meta.env.VITE_CONTEXT_SIZE) || 128_000,
|
|
||||||
) {
|
|
||||||
if (!prompt) {
|
if (!prompt) {
|
||||||
return ''
|
return ''
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!contextSize) {
|
||||||
|
contextSize = useConfigStore().config.ai.contextSize || 128_000
|
||||||
|
}
|
||||||
|
|
||||||
const length = encoder.encode(prompt).length
|
const length = encoder.encode(prompt).length
|
||||||
if (length <= contextSize) {
|
if (length <= contextSize) {
|
||||||
return prompt
|
return prompt
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
import { generateObject, streamText } from 'ai'
|
import { streamText } from 'ai'
|
||||||
import { compact } from 'lodash-es'
|
import { compact } from 'lodash-es'
|
||||||
import pLimit from 'p-limit'
|
import pLimit from 'p-limit'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { parseStreamingJson, type DeepPartial } from '~/utils/json'
|
import { parseStreamingJson, type DeepPartial } from '~/utils/json'
|
||||||
|
|
||||||
import { o3MiniModel, trimPrompt } from './ai/providers'
|
import { trimPrompt } from './ai/providers'
|
||||||
import { systemPrompt } from './prompt'
|
import { systemPrompt } from './prompt'
|
||||||
import zodToJsonSchema from 'zod-to-json-schema'
|
import zodToJsonSchema from 'zod-to-json-schema'
|
||||||
import { tavily, type TavilySearchResponse } from '@tavily/core'
|
import { type TavilySearchResponse } from '@tavily/core'
|
||||||
|
import { useTavily } from '~/composables/useTavily'
|
||||||
|
import { useAiModel } from '~/composables/useAiProvider'
|
||||||
|
|
||||||
export type ResearchResult = {
|
export type ResearchResult = {
|
||||||
learnings: string[]
|
learnings: string[]
|
||||||
@ -53,16 +55,6 @@ export type ResearchStep =
|
|||||||
// increase this if you have higher API rate limits
|
// increase this if you have higher API rate limits
|
||||||
const ConcurrencyLimit = 2
|
const ConcurrencyLimit = 2
|
||||||
|
|
||||||
// Initialize Firecrawl with optional API key and optional base url
|
|
||||||
|
|
||||||
// const firecrawl = new FirecrawlApp({
|
|
||||||
// apiKey: process.env.FIRECRAWL_KEY ?? '',
|
|
||||||
// apiUrl: process.env.FIRECRAWL_BASE_URL,
|
|
||||||
// });
|
|
||||||
const tvly = tavily({
|
|
||||||
apiKey: import.meta.env.VITE_TAVILY_API_KEY ?? '',
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Schema for {@link generateSearchQueries} without dynamic descriptions
|
* Schema for {@link generateSearchQueries} without dynamic descriptions
|
||||||
*/
|
*/
|
||||||
@ -105,12 +97,14 @@ export function generateSearchQueries({
|
|||||||
const prompt = [
|
const prompt = [
|
||||||
`Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: <prompt>${query}</prompt>\n\n`,
|
`Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: <prompt>${query}</prompt>\n\n`,
|
||||||
learnings
|
learnings
|
||||||
? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join('\n')}`
|
? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join(
|
||||||
|
'\n',
|
||||||
|
)}`
|
||||||
: '',
|
: '',
|
||||||
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
|
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
|
||||||
].join('\n\n')
|
].join('\n\n')
|
||||||
return streamText({
|
return streamText({
|
||||||
model: o3MiniModel,
|
model: useAiModel(),
|
||||||
system: systemPrompt(),
|
system: systemPrompt(),
|
||||||
prompt,
|
prompt,
|
||||||
})
|
})
|
||||||
@ -147,12 +141,14 @@ function processSearchResult({
|
|||||||
)
|
)
|
||||||
const prompt = [
|
const prompt = [
|
||||||
`Given the following contents from a SERP search for the query <query>${query}</query>, generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.`,
|
`Given the following contents from a SERP search for the query <query>${query}</query>, generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.`,
|
||||||
`<contents>${contents.map((content) => `<content>\n${content}\n</content>`).join('\n')}</contents>`,
|
`<contents>${contents
|
||||||
|
.map((content) => `<content>\n${content}\n</content>`)
|
||||||
|
.join('\n')}</contents>`,
|
||||||
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
|
`You MUST respond in JSON with the following schema: ${jsonSchema}`,
|
||||||
].join('\n\n')
|
].join('\n\n')
|
||||||
|
|
||||||
return streamText({
|
return streamText({
|
||||||
model: o3MiniModel,
|
model: useAiModel(),
|
||||||
abortSignal: AbortSignal.timeout(60_000),
|
abortSignal: AbortSignal.timeout(60_000),
|
||||||
system: systemPrompt(),
|
system: systemPrompt(),
|
||||||
prompt,
|
prompt,
|
||||||
@ -179,7 +175,7 @@ export function writeFinalReport({
|
|||||||
].join('\n\n')
|
].join('\n\n')
|
||||||
|
|
||||||
return streamText({
|
return streamText({
|
||||||
model: o3MiniModel,
|
model: useAiModel(),
|
||||||
system: systemPrompt(),
|
system: systemPrompt(),
|
||||||
prompt: _prompt,
|
prompt: _prompt,
|
||||||
})
|
})
|
||||||
@ -263,7 +259,7 @@ export async function deepResearch({
|
|||||||
// limit: 5,
|
// limit: 5,
|
||||||
// scrapeOptions: { formats: ['markdown'] },
|
// scrapeOptions: { formats: ['markdown'] },
|
||||||
// });
|
// });
|
||||||
const result = await tvly.search(searchQuery.query, {
|
const result = await useTavily().search(searchQuery.query, {
|
||||||
maxResults: 5,
|
maxResults: 5,
|
||||||
})
|
})
|
||||||
console.log(
|
console.log(
|
||||||
@ -331,7 +327,9 @@ export async function deepResearch({
|
|||||||
|
|
||||||
const nextQuery = `
|
const nextQuery = `
|
||||||
Previous research goal: ${searchQuery.researchGoal}
|
Previous research goal: ${searchQuery.researchGoal}
|
||||||
Follow-up research directions: ${searchResult.followUpQuestions.map((q) => `\n${q}`).join('')}
|
Follow-up research directions: ${searchResult.followUpQuestions
|
||||||
|
.map((q) => `\n${q}`)
|
||||||
|
.join('')}
|
||||||
`.trim()
|
`.trim()
|
||||||
|
|
||||||
return deepResearch({
|
return deepResearch({
|
||||||
|
@ -2,8 +2,8 @@ import { streamText } from 'ai'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { zodToJsonSchema } from 'zod-to-json-schema'
|
import { zodToJsonSchema } from 'zod-to-json-schema'
|
||||||
|
|
||||||
import { o3MiniModel } from './ai/providers'
|
|
||||||
import { systemPrompt } from './prompt'
|
import { systemPrompt } from './prompt'
|
||||||
|
import { useAiModel } from '~/composables/useAiProvider'
|
||||||
|
|
||||||
type PartialFeedback = DeepPartial<z.infer<typeof feedbackTypeSchema>>
|
type PartialFeedback = DeepPartial<z.infer<typeof feedbackTypeSchema>>
|
||||||
|
|
||||||
@ -32,7 +32,7 @@ export function generateFeedback({
|
|||||||
].join('\n\n')
|
].join('\n\n')
|
||||||
|
|
||||||
const stream = streamText({
|
const stream = streamText({
|
||||||
model: o3MiniModel,
|
model: useAiModel(),
|
||||||
system: systemPrompt(),
|
system: systemPrompt(),
|
||||||
prompt,
|
prompt,
|
||||||
})
|
})
|
||||||
|
@ -2,12 +2,26 @@
|
|||||||
<div>
|
<div>
|
||||||
<UContainer>
|
<UContainer>
|
||||||
<div class="max-w-4xl mx-auto py-8 space-y-4">
|
<div class="max-w-4xl mx-auto py-8 space-y-4">
|
||||||
<div class="flex items-center justify-between">
|
<div class="flex items-center gap-2">
|
||||||
<h1 class="text-3xl font-bold text-center mb-2">
|
<h1 class="text-3xl font-bold text-center mb-2">
|
||||||
Deep Research Assistant
|
Deep Research Assistant
|
||||||
</h1>
|
</h1>
|
||||||
|
<ConfigManager ref="configManagerRef" class="ml-auto" />
|
||||||
<ColorModeButton />
|
<ColorModeButton />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
This is a web UI for
|
||||||
|
<ULink target="_blank" href="https://github.com/dzhng/deep-research">
|
||||||
|
dzhng/deep-research
|
||||||
|
</ULink>
|
||||||
|
. It features streaming AI responses for realtime feedback, and
|
||||||
|
viasualization of the research process using a tree structure.
|
||||||
|
<br />
|
||||||
|
All API requests are directly sent from your browser. No remote data
|
||||||
|
stored.
|
||||||
|
</div>
|
||||||
|
|
||||||
<ResearchForm
|
<ResearchForm
|
||||||
:is-loading-feedback="!!feedbackRef?.isLoading"
|
:is-loading-feedback="!!feedbackRef?.isLoading"
|
||||||
ref="formRef"
|
ref="formRef"
|
||||||
@ -34,6 +48,7 @@
|
|||||||
import type ResearchFeedback from '~/components/ResearchFeedback.vue'
|
import type ResearchFeedback from '~/components/ResearchFeedback.vue'
|
||||||
import type DeepResearch from '~/components/DeepResearch.vue'
|
import type DeepResearch from '~/components/DeepResearch.vue'
|
||||||
import type ResearchReport from '~/components/ResearchReport.vue'
|
import type ResearchReport from '~/components/ResearchReport.vue'
|
||||||
|
import type ConfigManager from '~/components/ConfigManager.vue'
|
||||||
import type { ResearchInputData } from '~/components/ResearchForm.vue'
|
import type { ResearchInputData } from '~/components/ResearchForm.vue'
|
||||||
import type { ResearchFeedbackResult } from '~/components/ResearchFeedback.vue'
|
import type { ResearchFeedbackResult } from '~/components/ResearchFeedback.vue'
|
||||||
import type { ResearchResult } from '~/lib/deep-research'
|
import type { ResearchResult } from '~/lib/deep-research'
|
||||||
@ -42,6 +57,10 @@
|
|||||||
title: 'Deep Research Web UI',
|
title: 'Deep Research Web UI',
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const config = useConfigStore()
|
||||||
|
const toast = useToast()
|
||||||
|
|
||||||
|
const configManagerRef = ref<InstanceType<typeof ConfigManager>>()
|
||||||
const formRef = ref<InstanceType<typeof ResearchForm>>()
|
const formRef = ref<InstanceType<typeof ResearchForm>>()
|
||||||
const feedbackRef = ref<InstanceType<typeof ResearchFeedback>>()
|
const feedbackRef = ref<InstanceType<typeof ResearchFeedback>>()
|
||||||
const deepResearchRef = ref<InstanceType<typeof DeepResearch>>()
|
const deepResearchRef = ref<InstanceType<typeof DeepResearch>>()
|
||||||
@ -51,14 +70,27 @@
|
|||||||
const researchResult = ref<ResearchResult>()
|
const researchResult = ref<ResearchResult>()
|
||||||
|
|
||||||
function getCombinedQuery() {
|
function getCombinedQuery() {
|
||||||
return `
|
return `Initial Query: ${formRef.value?.form.query}
|
||||||
Initial Query: ${formRef.value?.form.query}
|
|
||||||
Follow-up Questions and Answers:
|
Follow-up Questions and Answers:
|
||||||
${feedback.value.map((qa) => `Q: ${qa.assistantQuestion}\nA: ${qa.userAnswer}`).join('\n')}
|
${feedback.value
|
||||||
|
.map((qa) => `Q: ${qa.assistantQuestion}\nA: ${qa.userAnswer}`)
|
||||||
|
.join('\n')}
|
||||||
`
|
`
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateFeedback(data: ResearchInputData) {
|
async function generateFeedback(data: ResearchInputData) {
|
||||||
|
const aiConfig = config.config.ai
|
||||||
|
const webSearchConfig = config.config.webSearch
|
||||||
|
|
||||||
|
if (!aiConfig.model || !aiConfig.apiKey || !webSearchConfig.apiKey) {
|
||||||
|
toast.add({
|
||||||
|
title: 'Config not set',
|
||||||
|
description: 'Please configure AI and Web Search settings.',
|
||||||
|
color: 'error',
|
||||||
|
})
|
||||||
|
configManagerRef.value?.show()
|
||||||
|
return
|
||||||
|
}
|
||||||
feedbackRef.value?.getFeedback(data.query, data.numQuestions)
|
feedbackRef.value?.getFeedback(data.query, data.numQuestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,34 @@
|
|||||||
|
import {} from '@pinia/nuxt'
|
||||||
|
|
||||||
|
export type ConfigAiProvider = 'openai-compatible'
|
||||||
|
export interface ConfigAi {
|
||||||
|
provider: ConfigAiProvider
|
||||||
|
apiKey?: string
|
||||||
|
apiBase?: string
|
||||||
|
model: string
|
||||||
|
contextSize?: number
|
||||||
|
}
|
||||||
|
export interface ConfigWebSearch {
|
||||||
|
provider: 'tavily'
|
||||||
|
apiKey?: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface Config {
|
export interface Config {
|
||||||
|
ai: ConfigAi
|
||||||
|
webSearch: ConfigWebSearch
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useConfigStore = defineStore('config', () => {
|
||||||
|
const config = useLocalStorage<Config>('deep-research-config', {
|
||||||
ai: {
|
ai: {
|
||||||
provider: 'openai-compatible'
|
provider: 'openai-compatible',
|
||||||
}
|
model: '',
|
||||||
}
|
contextSize: 128_000,
|
||||||
|
},
|
||||||
|
webSearch: {
|
||||||
|
provider: 'tavily',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return { config }
|
||||||
|
})
|
||||||
|
Reference in New Issue
Block a user