stream response

This commit is contained in:
Philipinho
2025-10-09 01:58:44 +01:00
parent 863eab68de
commit 68177d6f34
5 changed files with 123 additions and 23 deletions

View File

@ -8,17 +8,29 @@ import { markdownToHtml } from "@docmost/editor-ext";
import DOMPurify from "dompurify";
interface AiSearchResultProps {
result: IAiSearchResponse;
result?: IAiSearchResponse;
isLoading?: boolean;
streamingAnswer?: string;
streamingSources?: any[];
}
export function AiSearchResult({ result, isLoading }: AiSearchResultProps) {
export function AiSearchResult({
result,
isLoading,
streamingAnswer = "",
streamingSources = [],
}: AiSearchResultProps) {
// Use streaming data if available, otherwise fall back to result
const answer = streamingAnswer || result?.answer || "";
const sources =
streamingSources.length > 0 ? streamingSources : result?.sources || [];
// Deduplicate sources by pageId, keeping the one with highest similarity
const deduplicatedSources = useMemo(() => {
if (!result?.sources) return [];
if (!sources || sources.length === 0) return [];
const pageMap = new Map();
result.sources.forEach((source) => {
sources.forEach((source) => {
const existing = pageMap.get(source.pageId);
if (!existing || source.similarity > existing.similarity) {
pageMap.set(source.pageId, source);
@ -26,9 +38,9 @@ export function AiSearchResult({ result, isLoading }: AiSearchResultProps) {
});
return Array.from(pageMap.values());
}, [result?.sources]);
}, [sources]);
if (isLoading) {
if (isLoading && !answer) {
return (
<Paper p="md" radius="md" withBorder>
<Group>
@ -39,7 +51,7 @@ export function AiSearchResult({ result, isLoading }: AiSearchResultProps) {
);
}
if (!result) {
if (!answer && !isLoading) {
return null;
}
@ -51,12 +63,11 @@ export function AiSearchResult({ result, isLoading }: AiSearchResultProps) {
<Text fw={600} size="sm">
AI Answer
</Text>
{isLoading && <Loader size="xs" />}
</Group>
<Text
size="sm"
style={{ whiteSpace: "pre-wrap" }}
<div
dangerouslySetInnerHTML={{
__html: DOMPurify.sanitize(markdownToHtml(result.answer) as string),
__html: DOMPurify.sanitize(markdownToHtml(answer) as string),
}}
/>
</Paper>

View File

@ -48,7 +48,16 @@ export function SearchSpotlight({ spaceId }: SearchSpotlightProps) {
searchParams,
!isAiMode // Disable regular search when in AI mode
);
const { data: aiSearchResult, isPending: isAiLoading, mutate: triggerAiSearchMutation } = useAiSearch();
const {
//@ts-ignore
data: aiSearchResult,
//@ts-ignore
isPending: isAiLoading,
//@ts-ignore
mutate: triggerAiSearchMutation,
streamingAnswer,
streamingSources,
} = useAiSearch();
// Determine result type for rendering
const isAttachmentSearch =
@ -134,10 +143,12 @@ export function SearchSpotlight({ spaceId }: SearchSpotlightProps) {
{query.length === 0 && (
<Spotlight.Empty>{t("Ask a question...")}</Spotlight.Empty>
)}
{query.length > 0 && (isAiLoading || aiSearchResult) && (
{query.length > 0 && (isAiLoading || aiSearchResult || streamingAnswer) && (
<AiSearchResult
result={aiSearchResult}
isLoading={isAiLoading}
streamingAnswer={streamingAnswer}
streamingSources={streamingSources}
/>
)}
{query.length > 0 && !isAiLoading && !aiSearchResult && (

View File

@ -1,11 +1,39 @@
import { useMutation, UseMutationResult } from "@tanstack/react-query";
import { useState } from "react";
import { askAi, IAiSearchResponse } from "@/features/search/services/ai-search-service";
import { IPageSearchParams } from "@/features/search/types/search.types";
export function useAiSearch(): UseMutationResult<IAiSearchResponse, Error, IPageSearchParams> {
return useMutation({
mutationFn: async (params: IPageSearchParams) => {
return await askAi(params);
// @ts-ignore
interface UseAiSearchResult extends UseMutationResult<IAiSearchResponse, Error, IPageSearchParams> {
streamingAnswer: string;
streamingSources: any[];
}
export function useAiSearch(): UseAiSearchResult {
const [streamingAnswer, setStreamingAnswer] = useState("");
const [streamingSources, setStreamingSources] = useState<any[]>([]);
const mutation = useMutation({
mutationFn: async (params: IPageSearchParams & { contentType?: string }) => {
setStreamingAnswer("");
setStreamingSources([]);
const { contentType, ...apiParams } = params;
return await askAi(apiParams, (chunk) => {
if (chunk.content) {
setStreamingAnswer((prev) => prev + chunk.content);
}
if (chunk.sources) {
setStreamingSources(chunk.sources);
}
});
},
});
}
return {
...mutation,
streamingAnswer,
streamingSources,
};
}

View File

@ -1,5 +1,5 @@
import api from "@/lib/api-client";
import { IPageSearchParams } from '@/features/search/types/search.types';
import { IPageSearchParams } from "@/features/search/types/search.types";
export interface IAiSearchResponse {
answer: string;
@ -17,7 +17,57 @@ export interface IAiSearchResponse {
export async function askAi(
params: IPageSearchParams,
onChunk?: (chunk: { content?: string; sources?: any[] }) => void,
): Promise<IAiSearchResponse> {
const req = await api.post<IAiSearchResponse>("/ai/ask", params);
return req.data;
}
const response = await fetch("/api/ai/ask", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
credentials: "include",
body: JSON.stringify(params),
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let answer = "";
let sources: any[] = [];
if (reader) {
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split("\n");
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") break;
try {
const parsed = JSON.parse(data);
if (parsed.content) {
answer += parsed.content;
onChunk?.({ content: parsed.content });
}
if (parsed.sources) {
sources = parsed.sources;
onChunk?.({ sources: parsed.sources });
}
} catch (e) {
// Skip invalid JSON
}
}
}
}
}
return { answer, sources };
}