mirror of
https://github.com/docmost/docmost.git
synced 2025-11-19 03:02:09 +10:00
AI module - init
This commit is contained in:
61
apps/client/src/ee/ai/hooks/use-ai.ts
Normal file
61
apps/client/src/ee/ai/hooks/use-ai.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { useState, useCallback, useRef } from "react";
|
||||
import { useAiGenerateStreamMutation } from "@/ee/ai/queries/ai-query.ts";
|
||||
import { AiGenerateDto } from "@/ee/ai/types/ai.types.ts";
|
||||
|
||||
export function useAiStream() {
|
||||
const [content, setContent] = useState("");
|
||||
const [isStreaming, setIsStreaming] = useState(false);
|
||||
const abortControllerRef = useRef<AbortController | null>(null);
|
||||
const mutation = useAiGenerateStreamMutation();
|
||||
|
||||
const startStream = useCallback(
|
||||
async (data: AiGenerateDto) => {
|
||||
setContent("");
|
||||
setIsStreaming(true);
|
||||
|
||||
try {
|
||||
const controller = await mutation.mutateAsync({
|
||||
...data,
|
||||
onChunk: (chunk) => {
|
||||
setContent((prev) => prev + chunk.content);
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error("AI stream error:", error);
|
||||
setIsStreaming(false);
|
||||
},
|
||||
onComplete: () => {
|
||||
setIsStreaming(false);
|
||||
},
|
||||
});
|
||||
|
||||
abortControllerRef.current = controller;
|
||||
} catch (error) {
|
||||
console.error("Failed to start stream:", error);
|
||||
setIsStreaming(false);
|
||||
}
|
||||
},
|
||||
[mutation]
|
||||
);
|
||||
|
||||
const stopStream = useCallback(() => {
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort();
|
||||
abortControllerRef.current = null;
|
||||
setIsStreaming(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const resetContent = useCallback(() => {
|
||||
setContent("");
|
||||
}, []);
|
||||
|
||||
return {
|
||||
content,
|
||||
isStreaming,
|
||||
startStream,
|
||||
stopStream,
|
||||
resetContent,
|
||||
isLoading: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
45
apps/client/src/ee/ai/queries/ai-query.ts
Normal file
45
apps/client/src/ee/ai/queries/ai-query.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import {
|
||||
useMutation,
|
||||
UseMutationResult,
|
||||
useQuery,
|
||||
UseQueryResult,
|
||||
} from "@tanstack/react-query";
|
||||
import {
|
||||
generateAiContent,
|
||||
generateAiContentStream,
|
||||
getAiConfig,
|
||||
} from "@/ee/ai/services/ai-service.ts";
|
||||
import {
|
||||
AiConfigResponse,
|
||||
AiContentResponse,
|
||||
AiGenerateDto,
|
||||
AiStreamChunk,
|
||||
AiStreamError,
|
||||
} from "@/ee/ai/types/ai.types.ts";
|
||||
|
||||
export function useAiGenerateMutation(): UseMutationResult<
|
||||
AiContentResponse,
|
||||
Error,
|
||||
AiGenerateDto
|
||||
> {
|
||||
return useMutation({
|
||||
mutationFn: (data: AiGenerateDto) => generateAiContent(data),
|
||||
});
|
||||
}
|
||||
|
||||
interface StreamCallbacks {
|
||||
onChunk: (chunk: AiStreamChunk) => void;
|
||||
onError?: (error: AiStreamError) => void;
|
||||
onComplete?: () => void;
|
||||
}
|
||||
|
||||
export function useAiGenerateStreamMutation(): UseMutationResult<
|
||||
AbortController,
|
||||
Error,
|
||||
AiGenerateDto & StreamCallbacks
|
||||
> {
|
||||
return useMutation({
|
||||
mutationFn: ({ onChunk, onError, onComplete, ...data }) =>
|
||||
generateAiContentStream(data, onChunk, onError, onComplete),
|
||||
});
|
||||
}
|
||||
89
apps/client/src/ee/ai/services/ai-service.ts
Normal file
89
apps/client/src/ee/ai/services/ai-service.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import api from "@/lib/api-client.ts";
|
||||
import {
|
||||
AiGenerateDto,
|
||||
AiContentResponse,
|
||||
AiStreamChunk,
|
||||
AiStreamError,
|
||||
} from "@/ee/ai/types/ai.types.ts";
|
||||
|
||||
export async function generateAiContent(
|
||||
data: AiGenerateDto,
|
||||
): Promise<AiContentResponse> {
|
||||
const req = await api.post<AiContentResponse>("/ai/generate", data);
|
||||
return req.data;
|
||||
}
|
||||
|
||||
export async function generateAiContentStream(
|
||||
data: AiGenerateDto,
|
||||
onChunk: (chunk: AiStreamChunk) => void,
|
||||
onError?: (error: AiStreamError) => void,
|
||||
onComplete?: () => void,
|
||||
): Promise<AbortController> {
|
||||
const abortController = new AbortController();
|
||||
try {
|
||||
const response = await fetch("/api/ai/generate/stream", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
signal: abortController.signal,
|
||||
credentials: "include", // This ensures cookies are sent, matching axios withCredentials
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
if (!reader) {
|
||||
throw new Error("Response body is not readable");
|
||||
}
|
||||
|
||||
const processStream = async () => {
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
const chunk = decoder.decode(value, { stream: true });
|
||||
const lines = chunk.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("data: ")) {
|
||||
const data = line.slice(6);
|
||||
if (data === "[DONE]") {
|
||||
onComplete?.();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(data);
|
||||
if (parsed.error) {
|
||||
onError?.(parsed);
|
||||
} else {
|
||||
onChunk(parsed);
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore parse errors for incomplete chunks
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.name !== "AbortError") {
|
||||
onError?.({ error: error.message });
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
};
|
||||
|
||||
processStream();
|
||||
} catch (error) {
|
||||
onError?.({ error: error.message });
|
||||
}
|
||||
|
||||
return abortController;
|
||||
}
|
||||
40
apps/client/src/ee/ai/types/ai.types.ts
Normal file
40
apps/client/src/ee/ai/types/ai.types.ts
Normal file
@ -0,0 +1,40 @@
|
||||
export enum AiAction {
|
||||
IMPROVE_WRITING = "improve_writing",
|
||||
FIX_SPELLING_GRAMMAR = "fix_spelling_grammar",
|
||||
MAKE_SHORTER = "make_shorter",
|
||||
MAKE_LONGER = "make_longer",
|
||||
SIMPLIFY = "simplify",
|
||||
CHANGE_TONE = "change_tone",
|
||||
SUMMARIZE = "summarize",
|
||||
CONTINUE_WRITING = "continue_writing",
|
||||
TRANSLATE = "translate",
|
||||
CUSTOM = "custom",
|
||||
}
|
||||
|
||||
export interface AiGenerateDto {
|
||||
action?: AiAction;
|
||||
content: string;
|
||||
prompt?: string;
|
||||
}
|
||||
|
||||
export interface AiContentResponse {
|
||||
content: string;
|
||||
usage?: {
|
||||
promptTokens: number;
|
||||
completionTokens: number;
|
||||
totalTokens: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface AiConfigResponse {
|
||||
configured: boolean;
|
||||
availableActions: AiAction[];
|
||||
}
|
||||
|
||||
export interface AiStreamChunk {
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface AiStreamError {
|
||||
error: string;
|
||||
}
|
||||
Submodule apps/server/src/ee updated: fbc01d808f...4100345c18
Reference in New Issue
Block a user