This commit is contained in:
Philipinho
2025-06-08 18:55:57 -07:00
parent 097e30e992
commit cbaf3394c0
11 changed files with 166 additions and 74 deletions

View File

@ -31,6 +31,8 @@ import { getFileImportSizeLimit, isCloud } from "@/lib/config.ts";
import { formatBytes } from "@/lib";
import { workspaceAtom } from "@/features/user/atoms/current-user-atom.ts";
import { getFileTaskById } from "@/features/file-task/services/file-task-service.ts";
import { queryClient } from "@/main.tsx";
import { useQueryEmit } from "@/features/websocket/use-query-emit.ts";
interface PageImportModalProps {
spaceId: string;
@ -79,6 +81,7 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
const [treeData, setTreeData] = useAtom(treeDataAtom);
const [workspace] = useAtom(workspaceAtom);
const [fileTaskId, setFileTaskId] = useState<string | null>(null);
const emit = useQueryEmit();
const canUseConfluence = isCloud() || workspace?.hasLicenseKey;
@ -94,16 +97,13 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
notifications.show({
id: "import",
title: t("Importing pages"),
message: t(
"Page import is in progress. Refresh this tab after a while.",
),
message: t("Page import is in progress."),
loading: true,
withCloseButton: false,
autoClose: false,
});
setFileTaskId(importTask.id);
console.log("taskId set", importTask.id);
} catch (err) {
console.log("Failed to import page", err);
notifications.update({
@ -140,6 +140,17 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
});
clearInterval(intervalId);
setFileTaskId(null);
await queryClient.refetchQueries({
queryKey: ["root-sidebar-pages", fileTask.spaceId],
});
setTimeout(() => {
emit({
operation: "refetchRootTreeNodeEvent",
spaceId: spaceId,
});
}, 50);
}
if (status === "failed") {

View File

@ -24,7 +24,10 @@ import {
IconPointFilled,
IconTrash,
} from "@tabler/icons-react";
import { appendNodeChildrenAtom, treeDataAtom } from "@/features/page/tree/atoms/tree-data-atom.ts";
import {
appendNodeChildrenAtom,
treeDataAtom,
} from "@/features/page/tree/atoms/tree-data-atom.ts";
import clsx from "clsx";
import EmojiPicker from "@/components/ui/emoji-picker.tsx";
import { useTreeMutation } from "@/features/page/tree/hooks/use-tree-mutation.ts";
@ -32,6 +35,7 @@ import {
appendNodeChildren,
buildTree,
buildTreeWithChildren,
mergeRootTrees,
updateTreeNodeIcon,
} from "@/features/page/tree/utils/utils.ts";
import { SpaceTreeNode } from "@/features/page/tree/types.ts";
@ -104,17 +108,17 @@ export default function SpaceTree({ spaceId, readOnly }: SpaceTreeProps) {
const allItems = pagesData.pages.flatMap((page) => page.items);
const treeData = buildTree(allItems);
if (data.length < 1 || data?.[0].spaceId !== spaceId) {
//Thoughts
// don't reset if there is data in state
// we only expect to call this once on initial load
// even if we decide to refetch, it should only update
// and append root pages instead of resetting the entire tree
// which looses async loaded children too
setData(treeData);
setData((prev) => {
// fresh space; full reset
if (prev.length === 0 || prev[0]?.spaceId !== spaceId) {
setIsDataLoaded(true);
setOpenTreeNodes({});
return treeData;
}
// same space; append only missing roots
return mergeRootTrees(prev, treeData);
});
}
}, [pagesData, hasNextPage]);
@ -297,14 +301,16 @@ function Node({ node, style, dragHandle, tree }: NodeRendererProps<any>) {
const handleEmojiSelect = (emoji: { native: string }) => {
handleUpdateNodeIcon(node.id, emoji.native);
updatePageMutation.mutateAsync({ pageId: node.id, icon: emoji.native }).then((data) => {
updatePageMutation
.mutateAsync({ pageId: node.id, icon: emoji.native })
.then((data) => {
setTimeout(() => {
emit({
operation: "updateOne",
spaceId: node.data.spaceId,
entity: ["pages"],
id: node.id,
payload: { icon: emoji.native, parentPageId: data.parentPageId},
payload: { icon: emoji.native, parentPageId: data.parentPageId },
});
}, 50);
});
@ -570,7 +576,7 @@ interface PageArrowProps {
function PageArrow({ node, onExpandTree }: PageArrowProps) {
useEffect(() => {
if(node.isOpen){
if (node.isOpen) {
onExpandTree();
}
}, []);

View File

@ -121,7 +121,6 @@ export const deleteTreeNode = (
.filter((node) => node !== null);
};
export function buildTreeWithChildren(items: SpaceTreeNode[]): SpaceTreeNode[] {
const nodeMap = {};
let result: SpaceTreeNode[] = [];
@ -167,10 +166,12 @@ export function appendNodeChildren(
// Preserve deeper children if they exist and remove node if deleted
return treeItems.map((node) => {
if (node.id === nodeId) {
const newIds = new Set(children.map(c => c.id));
const newIds = new Set(children.map((c) => c.id));
const existingMap = new Map(
(node.children ?? []).filter(c => newIds.has(c.id)).map(c => [c.id, c])
(node.children ?? [])
.filter((c) => newIds.has(c.id))
.map((c) => [c.id, c]),
);
const merged = children.map((newChild) => {
@ -196,3 +197,21 @@ export function appendNodeChildren(
return node;
});
}
/**
* Merge root nodes; keep existing ones intact, append new ones,
*/
export function mergeRootTrees(
prevRoots: SpaceTreeNode[],
incomingRoots: SpaceTreeNode[],
): SpaceTreeNode[] {
const seen = new Set(prevRoots.map((r) => r.id));
// add new roots that were not present before
const merged = [...prevRoots];
incomingRoots.forEach((node) => {
if (!seen.has(node.id)) merged.push(node);
});
return sortPositionKeys(merged);
}

View File

@ -47,15 +47,28 @@ export type MoveTreeNodeEvent = {
parentId: string;
index: number;
position: string;
}
};
};
export type DeleteTreeNodeEvent = {
operation: "deleteTreeNode";
spaceId: string;
payload: {
node: SpaceTreeNode
}
node: SpaceTreeNode;
};
};
export type WebSocketEvent = InvalidateEvent | InvalidateCommentsEvent | UpdateEvent | DeleteEvent | AddTreeNodeEvent | MoveTreeNodeEvent | DeleteTreeNodeEvent;
export type RefetchRootTreeNodeEvent = {
operation: "refetchRootTreeNodeEvent";
spaceId: string;
};
export type WebSocketEvent =
| InvalidateEvent
| InvalidateCommentsEvent
| UpdateEvent
| DeleteEvent
| AddTreeNodeEvent
| MoveTreeNodeEvent
| DeleteTreeNodeEvent
| RefetchRootTreeNodeEvent;

View File

@ -12,6 +12,7 @@ import {
invalidateOnUpdatePage,
} from "../page/queries/page-query";
import { RQ_KEY } from "../comment/queries/comment-query";
import { queryClient } from "@/main.tsx";
export const useQuerySubscription = () => {
const queryClient = useQueryClient();
@ -84,6 +85,17 @@ export const useQuerySubscription = () => {
);
*/
break;
case "refetchRootTreeNodeEvent": {
const spaceId = data.spaceId;
queryClient.refetchQueries({
queryKey: ["root-sidebar-pages", spaceId],
});
queryClient.invalidateQueries({
queryKey: ["recent-changes", spaceId],
});
break;
}
}
});
}, [queryClient, socket]);

View File

@ -5,3 +5,14 @@ export class FileTaskIdDto {
@IsUUID()
fileTaskId: string;
}
export type ImportPageNode = {
id: string;
slugId: string;
name: string;
content: string;
position?: string | null;
parentPageId: string | null;
fileExtension: string;
filePath: string;
};

View File

@ -6,6 +6,7 @@ import { FileTaskService } from './services/file-task.service';
import { FileTaskProcessor } from './processors/file-task.processor';
import { ImportAttachmentService } from './services/import-attachment.service';
import { FileTaskController } from './file-task.controller';
import { PageModule } from '../../core/page/page.module';
@Module({
providers: [
@ -16,6 +17,6 @@ import { FileTaskController } from './file-task.controller';
],
exports: [ImportService, ImportAttachmentService],
controllers: [ImportController, FileTaskController],
imports: [StorageModule],
imports: [StorageModule, PageModule],
})
export class ImportModule {}

View File

@ -44,18 +44,14 @@ export class FileTaskProcessor extends WorkerHost implements OnModuleDestroy {
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
);
const MAX_JOB_ATTEMPTS = 3;
try {
const fileTaskId = job.data.fileTaskId;
if (job.attemptsMade >= MAX_JOB_ATTEMPTS) {
this.logger.error(`Max import attempts reached for Task ${fileTaskId}.`);
await this.fileTaskService.updateTaskStatus(
fileTaskId,
FileTaskStatus.Failed,
job.failedReason,
);
try {
const fileTask = await this.fileTaskService.getFileTask(fileTaskId);
if (fileTask) {
await this.storageService.delete(fileTask.filePath);
@ -64,14 +60,6 @@ export class FileTaskProcessor extends WorkerHost implements OnModuleDestroy {
this.logger.error(err);
}
}
}
@OnWorkerEvent('stalled')
async onStalled(job: Job) {
this.logger.error(
`Stalled processing ${job.name} job. Reason: ${job.failedReason}`,
);
}
@OnWorkerEvent('completed')
onCompleted(job: Job) {

View File

@ -29,6 +29,8 @@ import { executeTx } from '@docmost/db/utils';
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
import { ImportAttachmentService } from './import-attachment.service';
import { ModuleRef } from '@nestjs/core';
import { PageService } from '../../../core/page/services/page.service';
import { ImportPageNode } from '../dto/file-task-dto';
@Injectable()
export class FileTaskService {
@ -37,6 +39,7 @@ export class FileTaskService {
constructor(
private readonly storageService: StorageService,
private readonly importService: ImportService,
private readonly pageService: PageService,
private readonly backlinkRepo: BacklinkRepo,
@InjectKysely() private readonly db: KyselyDB,
private readonly importAttachmentService: ImportAttachmentService,
@ -55,6 +58,10 @@ export class FileTaskService {
return;
}
if (fileTask.status === FileTaskStatus.Failed) {
return;
}
if (fileTask.status === FileTaskStatus.Success) {
this.logger.log('Imported task already processed.');
return;
@ -118,6 +125,8 @@ export class FileTaskService {
}
try {
await this.updateTaskStatus(fileTaskId, FileTaskStatus.Success, null);
await cleanupTmpFile();
await cleanupTmpDir();
// delete stored file on success
await this.storageService.delete(fileTask.filePath);
} catch (err) {
@ -142,19 +151,7 @@ export class FileTaskService {
const allFiles = await collectMarkdownAndHtmlFiles(extractDir);
const attachmentCandidates = await buildAttachmentCandidates(extractDir);
const pagesMap = new Map<
string,
{
id: string;
slugId: string;
name: string;
content: string;
position?: string | null;
parentPageId: string | null;
fileExtension: string;
filePath: string;
}
>();
const pagesMap = new Map<string, ImportPageNode>();
for (const absPath of allFiles) {
const relPath = path
@ -201,14 +198,42 @@ export class FileTaskService {
});
// generate position keys
const siblingsMap = new Map<string | null, typeof Array.prototype>();
const siblingsMap = new Map<string | null, ImportPageNode[]>();
pagesMap.forEach((page) => {
const sibs = siblingsMap.get(page.parentPageId) || [];
sibs.push(page);
siblingsMap.set(page.parentPageId, sibs);
const group = siblingsMap.get(page.parentPageId) ?? [];
group.push(page);
siblingsMap.set(page.parentPageId, group);
});
siblingsMap.forEach((sibs) => {
// get root pages
const rootSibs = siblingsMap.get(null);
if (rootSibs?.length) {
rootSibs.sort((a, b) => a.name.localeCompare(b.name));
// get first position key from the server
const nextPosition = await this.pageService.nextPagePosition(
fileTask.spaceId,
);
let prevPos: string | null = null;
rootSibs.forEach((page, idx) => {
if (idx === 0) {
page.position = nextPosition;
} else {
page.position = generateJitteredKeyBetween(prevPos, null);
}
prevPos = page.position;
});
}
// non-root buckets (children & deeper levels)
siblingsMap.forEach((sibs, parentId) => {
if (parentId === null) return; // root already done
sibs.sort((a, b) => a.name.localeCompare(b.name));
let prevPos: string | null = null;
for (const page of sibs) {
page.position = generateJitteredKeyBetween(prevPos, null);
@ -216,6 +241,7 @@ export class FileTaskService {
}
});
// internal page links
const filePathToPageMetaMap = new Map<
string,
{ id: string; title: string; slugId: string }

View File

@ -260,7 +260,11 @@ export class ImportAttachmentService {
}
// wait for all uploads & DB inserts
try {
await Promise.all(attachmentTasks);
} catch (err) {
this.logger.log('Import attachment upload error', err);
}
return $.root().html() || '';
}

View File

@ -54,6 +54,7 @@ import { BacklinksProcessor } from './processors/backlinks.processor';
defaultJobOptions: {
removeOnComplete: true,
removeOnFail: true,
attempts: 1,
},
}),
],