mirror of
https://github.com/docmost/docmost.git
synced 2025-11-16 02:11:10 +10:00
WIP
This commit is contained in:
@ -31,6 +31,8 @@ import { getFileImportSizeLimit, isCloud } from "@/lib/config.ts";
|
|||||||
import { formatBytes } from "@/lib";
|
import { formatBytes } from "@/lib";
|
||||||
import { workspaceAtom } from "@/features/user/atoms/current-user-atom.ts";
|
import { workspaceAtom } from "@/features/user/atoms/current-user-atom.ts";
|
||||||
import { getFileTaskById } from "@/features/file-task/services/file-task-service.ts";
|
import { getFileTaskById } from "@/features/file-task/services/file-task-service.ts";
|
||||||
|
import { queryClient } from "@/main.tsx";
|
||||||
|
import { useQueryEmit } from "@/features/websocket/use-query-emit.ts";
|
||||||
|
|
||||||
interface PageImportModalProps {
|
interface PageImportModalProps {
|
||||||
spaceId: string;
|
spaceId: string;
|
||||||
@ -79,6 +81,7 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
|
|||||||
const [treeData, setTreeData] = useAtom(treeDataAtom);
|
const [treeData, setTreeData] = useAtom(treeDataAtom);
|
||||||
const [workspace] = useAtom(workspaceAtom);
|
const [workspace] = useAtom(workspaceAtom);
|
||||||
const [fileTaskId, setFileTaskId] = useState<string | null>(null);
|
const [fileTaskId, setFileTaskId] = useState<string | null>(null);
|
||||||
|
const emit = useQueryEmit();
|
||||||
|
|
||||||
const canUseConfluence = isCloud() || workspace?.hasLicenseKey;
|
const canUseConfluence = isCloud() || workspace?.hasLicenseKey;
|
||||||
|
|
||||||
@ -94,16 +97,13 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
|
|||||||
notifications.show({
|
notifications.show({
|
||||||
id: "import",
|
id: "import",
|
||||||
title: t("Importing pages"),
|
title: t("Importing pages"),
|
||||||
message: t(
|
message: t("Page import is in progress."),
|
||||||
"Page import is in progress. Refresh this tab after a while.",
|
|
||||||
),
|
|
||||||
loading: true,
|
loading: true,
|
||||||
withCloseButton: false,
|
withCloseButton: false,
|
||||||
autoClose: false,
|
autoClose: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
setFileTaskId(importTask.id);
|
setFileTaskId(importTask.id);
|
||||||
console.log("taskId set", importTask.id);
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log("Failed to import page", err);
|
console.log("Failed to import page", err);
|
||||||
notifications.update({
|
notifications.update({
|
||||||
@ -140,6 +140,17 @@ function ImportFormatSelection({ spaceId, onClose }: ImportFormatSelection) {
|
|||||||
});
|
});
|
||||||
clearInterval(intervalId);
|
clearInterval(intervalId);
|
||||||
setFileTaskId(null);
|
setFileTaskId(null);
|
||||||
|
|
||||||
|
await queryClient.refetchQueries({
|
||||||
|
queryKey: ["root-sidebar-pages", fileTask.spaceId],
|
||||||
|
});
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
emit({
|
||||||
|
operation: "refetchRootTreeNodeEvent",
|
||||||
|
spaceId: spaceId,
|
||||||
|
});
|
||||||
|
}, 50);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (status === "failed") {
|
if (status === "failed") {
|
||||||
|
|||||||
@ -24,7 +24,10 @@ import {
|
|||||||
IconPointFilled,
|
IconPointFilled,
|
||||||
IconTrash,
|
IconTrash,
|
||||||
} from "@tabler/icons-react";
|
} from "@tabler/icons-react";
|
||||||
import { appendNodeChildrenAtom, treeDataAtom } from "@/features/page/tree/atoms/tree-data-atom.ts";
|
import {
|
||||||
|
appendNodeChildrenAtom,
|
||||||
|
treeDataAtom,
|
||||||
|
} from "@/features/page/tree/atoms/tree-data-atom.ts";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
import EmojiPicker from "@/components/ui/emoji-picker.tsx";
|
import EmojiPicker from "@/components/ui/emoji-picker.tsx";
|
||||||
import { useTreeMutation } from "@/features/page/tree/hooks/use-tree-mutation.ts";
|
import { useTreeMutation } from "@/features/page/tree/hooks/use-tree-mutation.ts";
|
||||||
@ -32,6 +35,7 @@ import {
|
|||||||
appendNodeChildren,
|
appendNodeChildren,
|
||||||
buildTree,
|
buildTree,
|
||||||
buildTreeWithChildren,
|
buildTreeWithChildren,
|
||||||
|
mergeRootTrees,
|
||||||
updateTreeNodeIcon,
|
updateTreeNodeIcon,
|
||||||
} from "@/features/page/tree/utils/utils.ts";
|
} from "@/features/page/tree/utils/utils.ts";
|
||||||
import { SpaceTreeNode } from "@/features/page/tree/types.ts";
|
import { SpaceTreeNode } from "@/features/page/tree/types.ts";
|
||||||
@ -104,17 +108,17 @@ export default function SpaceTree({ spaceId, readOnly }: SpaceTreeProps) {
|
|||||||
const allItems = pagesData.pages.flatMap((page) => page.items);
|
const allItems = pagesData.pages.flatMap((page) => page.items);
|
||||||
const treeData = buildTree(allItems);
|
const treeData = buildTree(allItems);
|
||||||
|
|
||||||
if (data.length < 1 || data?.[0].spaceId !== spaceId) {
|
setData((prev) => {
|
||||||
//Thoughts
|
// fresh space; full reset
|
||||||
// don't reset if there is data in state
|
if (prev.length === 0 || prev[0]?.spaceId !== spaceId) {
|
||||||
// we only expect to call this once on initial load
|
setIsDataLoaded(true);
|
||||||
// even if we decide to refetch, it should only update
|
setOpenTreeNodes({});
|
||||||
// and append root pages instead of resetting the entire tree
|
return treeData;
|
||||||
// which looses async loaded children too
|
}
|
||||||
setData(treeData);
|
|
||||||
setIsDataLoaded(true);
|
// same space; append only missing roots
|
||||||
setOpenTreeNodes({});
|
return mergeRootTrees(prev, treeData);
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
}, [pagesData, hasNextPage]);
|
}, [pagesData, hasNextPage]);
|
||||||
|
|
||||||
@ -297,17 +301,19 @@ function Node({ node, style, dragHandle, tree }: NodeRendererProps<any>) {
|
|||||||
|
|
||||||
const handleEmojiSelect = (emoji: { native: string }) => {
|
const handleEmojiSelect = (emoji: { native: string }) => {
|
||||||
handleUpdateNodeIcon(node.id, emoji.native);
|
handleUpdateNodeIcon(node.id, emoji.native);
|
||||||
updatePageMutation.mutateAsync({ pageId: node.id, icon: emoji.native }).then((data) => {
|
updatePageMutation
|
||||||
setTimeout(() => {
|
.mutateAsync({ pageId: node.id, icon: emoji.native })
|
||||||
emit({
|
.then((data) => {
|
||||||
operation: "updateOne",
|
setTimeout(() => {
|
||||||
spaceId: node.data.spaceId,
|
emit({
|
||||||
entity: ["pages"],
|
operation: "updateOne",
|
||||||
id: node.id,
|
spaceId: node.data.spaceId,
|
||||||
payload: { icon: emoji.native, parentPageId: data.parentPageId},
|
entity: ["pages"],
|
||||||
});
|
id: node.id,
|
||||||
}, 50);
|
payload: { icon: emoji.native, parentPageId: data.parentPageId },
|
||||||
});
|
});
|
||||||
|
}, 50);
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleRemoveEmoji = () => {
|
const handleRemoveEmoji = () => {
|
||||||
@ -570,7 +576,7 @@ interface PageArrowProps {
|
|||||||
|
|
||||||
function PageArrow({ node, onExpandTree }: PageArrowProps) {
|
function PageArrow({ node, onExpandTree }: PageArrowProps) {
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if(node.isOpen){
|
if (node.isOpen) {
|
||||||
onExpandTree();
|
onExpandTree();
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|||||||
@ -121,7 +121,6 @@ export const deleteTreeNode = (
|
|||||||
.filter((node) => node !== null);
|
.filter((node) => node !== null);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
export function buildTreeWithChildren(items: SpaceTreeNode[]): SpaceTreeNode[] {
|
export function buildTreeWithChildren(items: SpaceTreeNode[]): SpaceTreeNode[] {
|
||||||
const nodeMap = {};
|
const nodeMap = {};
|
||||||
let result: SpaceTreeNode[] = [];
|
let result: SpaceTreeNode[] = [];
|
||||||
@ -167,10 +166,12 @@ export function appendNodeChildren(
|
|||||||
// Preserve deeper children if they exist and remove node if deleted
|
// Preserve deeper children if they exist and remove node if deleted
|
||||||
return treeItems.map((node) => {
|
return treeItems.map((node) => {
|
||||||
if (node.id === nodeId) {
|
if (node.id === nodeId) {
|
||||||
const newIds = new Set(children.map(c => c.id));
|
const newIds = new Set(children.map((c) => c.id));
|
||||||
|
|
||||||
const existingMap = new Map(
|
const existingMap = new Map(
|
||||||
(node.children ?? []).filter(c => newIds.has(c.id)).map(c => [c.id, c])
|
(node.children ?? [])
|
||||||
|
.filter((c) => newIds.has(c.id))
|
||||||
|
.map((c) => [c.id, c]),
|
||||||
);
|
);
|
||||||
|
|
||||||
const merged = children.map((newChild) => {
|
const merged = children.map((newChild) => {
|
||||||
@ -196,3 +197,21 @@ export function appendNodeChildren(
|
|||||||
return node;
|
return node;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge root nodes; keep existing ones intact, append new ones,
|
||||||
|
*/
|
||||||
|
export function mergeRootTrees(
|
||||||
|
prevRoots: SpaceTreeNode[],
|
||||||
|
incomingRoots: SpaceTreeNode[],
|
||||||
|
): SpaceTreeNode[] {
|
||||||
|
const seen = new Set(prevRoots.map((r) => r.id));
|
||||||
|
|
||||||
|
// add new roots that were not present before
|
||||||
|
const merged = [...prevRoots];
|
||||||
|
incomingRoots.forEach((node) => {
|
||||||
|
if (!seen.has(node.id)) merged.push(node);
|
||||||
|
});
|
||||||
|
|
||||||
|
return sortPositionKeys(merged);
|
||||||
|
}
|
||||||
|
|||||||
@ -47,15 +47,28 @@ export type MoveTreeNodeEvent = {
|
|||||||
parentId: string;
|
parentId: string;
|
||||||
index: number;
|
index: number;
|
||||||
position: string;
|
position: string;
|
||||||
}
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DeleteTreeNodeEvent = {
|
export type DeleteTreeNodeEvent = {
|
||||||
operation: "deleteTreeNode";
|
operation: "deleteTreeNode";
|
||||||
spaceId: string;
|
spaceId: string;
|
||||||
payload: {
|
payload: {
|
||||||
node: SpaceTreeNode
|
node: SpaceTreeNode;
|
||||||
}
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export type WebSocketEvent = InvalidateEvent | InvalidateCommentsEvent | UpdateEvent | DeleteEvent | AddTreeNodeEvent | MoveTreeNodeEvent | DeleteTreeNodeEvent;
|
export type RefetchRootTreeNodeEvent = {
|
||||||
|
operation: "refetchRootTreeNodeEvent";
|
||||||
|
spaceId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type WebSocketEvent =
|
||||||
|
| InvalidateEvent
|
||||||
|
| InvalidateCommentsEvent
|
||||||
|
| UpdateEvent
|
||||||
|
| DeleteEvent
|
||||||
|
| AddTreeNodeEvent
|
||||||
|
| MoveTreeNodeEvent
|
||||||
|
| DeleteTreeNodeEvent
|
||||||
|
| RefetchRootTreeNodeEvent;
|
||||||
|
|||||||
@ -12,6 +12,7 @@ import {
|
|||||||
invalidateOnUpdatePage,
|
invalidateOnUpdatePage,
|
||||||
} from "../page/queries/page-query";
|
} from "../page/queries/page-query";
|
||||||
import { RQ_KEY } from "../comment/queries/comment-query";
|
import { RQ_KEY } from "../comment/queries/comment-query";
|
||||||
|
import { queryClient } from "@/main.tsx";
|
||||||
|
|
||||||
export const useQuerySubscription = () => {
|
export const useQuerySubscription = () => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
@ -84,6 +85,17 @@ export const useQuerySubscription = () => {
|
|||||||
);
|
);
|
||||||
*/
|
*/
|
||||||
break;
|
break;
|
||||||
|
case "refetchRootTreeNodeEvent": {
|
||||||
|
const spaceId = data.spaceId;
|
||||||
|
queryClient.refetchQueries({
|
||||||
|
queryKey: ["root-sidebar-pages", spaceId],
|
||||||
|
});
|
||||||
|
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: ["recent-changes", spaceId],
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}, [queryClient, socket]);
|
}, [queryClient, socket]);
|
||||||
|
|||||||
@ -5,3 +5,14 @@ export class FileTaskIdDto {
|
|||||||
@IsUUID()
|
@IsUUID()
|
||||||
fileTaskId: string;
|
fileTaskId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ImportPageNode = {
|
||||||
|
id: string;
|
||||||
|
slugId: string;
|
||||||
|
name: string;
|
||||||
|
content: string;
|
||||||
|
position?: string | null;
|
||||||
|
parentPageId: string | null;
|
||||||
|
fileExtension: string;
|
||||||
|
filePath: string;
|
||||||
|
};
|
||||||
@ -6,6 +6,7 @@ import { FileTaskService } from './services/file-task.service';
|
|||||||
import { FileTaskProcessor } from './processors/file-task.processor';
|
import { FileTaskProcessor } from './processors/file-task.processor';
|
||||||
import { ImportAttachmentService } from './services/import-attachment.service';
|
import { ImportAttachmentService } from './services/import-attachment.service';
|
||||||
import { FileTaskController } from './file-task.controller';
|
import { FileTaskController } from './file-task.controller';
|
||||||
|
import { PageModule } from '../../core/page/page.module';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
@ -16,6 +17,6 @@ import { FileTaskController } from './file-task.controller';
|
|||||||
],
|
],
|
||||||
exports: [ImportService, ImportAttachmentService],
|
exports: [ImportService, ImportAttachmentService],
|
||||||
controllers: [ImportController, FileTaskController],
|
controllers: [ImportController, FileTaskController],
|
||||||
imports: [StorageModule],
|
imports: [StorageModule, PageModule],
|
||||||
})
|
})
|
||||||
export class ImportModule {}
|
export class ImportModule {}
|
||||||
|
|||||||
@ -44,35 +44,23 @@ export class FileTaskProcessor extends WorkerHost implements OnModuleDestroy {
|
|||||||
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
|
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
const MAX_JOB_ATTEMPTS = 3;
|
try {
|
||||||
const fileTaskId = job.data.fileTaskId;
|
const fileTaskId = job.data.fileTaskId;
|
||||||
|
|
||||||
if (job.attemptsMade >= MAX_JOB_ATTEMPTS) {
|
|
||||||
this.logger.error(`Max import attempts reached for Task ${fileTaskId}.`);
|
|
||||||
await this.fileTaskService.updateTaskStatus(
|
await this.fileTaskService.updateTaskStatus(
|
||||||
fileTaskId,
|
fileTaskId,
|
||||||
FileTaskStatus.Failed,
|
FileTaskStatus.Failed,
|
||||||
job.failedReason,
|
job.failedReason,
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
const fileTask = await this.fileTaskService.getFileTask(fileTaskId);
|
||||||
const fileTask = await this.fileTaskService.getFileTask(fileTaskId);
|
if (fileTask) {
|
||||||
if (fileTask) {
|
await this.storageService.delete(fileTask.filePath);
|
||||||
await this.storageService.delete(fileTask.filePath);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
this.logger.error(err);
|
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.error(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnWorkerEvent('stalled')
|
|
||||||
async onStalled(job: Job) {
|
|
||||||
this.logger.error(
|
|
||||||
`Stalled processing ${job.name} job. Reason: ${job.failedReason}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
@OnWorkerEvent('completed')
|
@OnWorkerEvent('completed')
|
||||||
onCompleted(job: Job) {
|
onCompleted(job: Job) {
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
|
|||||||
@ -29,6 +29,8 @@ import { executeTx } from '@docmost/db/utils';
|
|||||||
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
|
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
|
||||||
import { ImportAttachmentService } from './import-attachment.service';
|
import { ImportAttachmentService } from './import-attachment.service';
|
||||||
import { ModuleRef } from '@nestjs/core';
|
import { ModuleRef } from '@nestjs/core';
|
||||||
|
import { PageService } from '../../../core/page/services/page.service';
|
||||||
|
import { ImportPageNode } from '../dto/file-task-dto';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class FileTaskService {
|
export class FileTaskService {
|
||||||
@ -37,6 +39,7 @@ export class FileTaskService {
|
|||||||
constructor(
|
constructor(
|
||||||
private readonly storageService: StorageService,
|
private readonly storageService: StorageService,
|
||||||
private readonly importService: ImportService,
|
private readonly importService: ImportService,
|
||||||
|
private readonly pageService: PageService,
|
||||||
private readonly backlinkRepo: BacklinkRepo,
|
private readonly backlinkRepo: BacklinkRepo,
|
||||||
@InjectKysely() private readonly db: KyselyDB,
|
@InjectKysely() private readonly db: KyselyDB,
|
||||||
private readonly importAttachmentService: ImportAttachmentService,
|
private readonly importAttachmentService: ImportAttachmentService,
|
||||||
@ -55,6 +58,10 @@ export class FileTaskService {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fileTask.status === FileTaskStatus.Failed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (fileTask.status === FileTaskStatus.Success) {
|
if (fileTask.status === FileTaskStatus.Success) {
|
||||||
this.logger.log('Imported task already processed.');
|
this.logger.log('Imported task already processed.');
|
||||||
return;
|
return;
|
||||||
@ -118,6 +125,8 @@ export class FileTaskService {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await this.updateTaskStatus(fileTaskId, FileTaskStatus.Success, null);
|
await this.updateTaskStatus(fileTaskId, FileTaskStatus.Success, null);
|
||||||
|
await cleanupTmpFile();
|
||||||
|
await cleanupTmpDir();
|
||||||
// delete stored file on success
|
// delete stored file on success
|
||||||
await this.storageService.delete(fileTask.filePath);
|
await this.storageService.delete(fileTask.filePath);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -142,19 +151,7 @@ export class FileTaskService {
|
|||||||
const allFiles = await collectMarkdownAndHtmlFiles(extractDir);
|
const allFiles = await collectMarkdownAndHtmlFiles(extractDir);
|
||||||
const attachmentCandidates = await buildAttachmentCandidates(extractDir);
|
const attachmentCandidates = await buildAttachmentCandidates(extractDir);
|
||||||
|
|
||||||
const pagesMap = new Map<
|
const pagesMap = new Map<string, ImportPageNode>();
|
||||||
string,
|
|
||||||
{
|
|
||||||
id: string;
|
|
||||||
slugId: string;
|
|
||||||
name: string;
|
|
||||||
content: string;
|
|
||||||
position?: string | null;
|
|
||||||
parentPageId: string | null;
|
|
||||||
fileExtension: string;
|
|
||||||
filePath: string;
|
|
||||||
}
|
|
||||||
>();
|
|
||||||
|
|
||||||
for (const absPath of allFiles) {
|
for (const absPath of allFiles) {
|
||||||
const relPath = path
|
const relPath = path
|
||||||
@ -201,14 +198,42 @@ export class FileTaskService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// generate position keys
|
// generate position keys
|
||||||
const siblingsMap = new Map<string | null, typeof Array.prototype>();
|
const siblingsMap = new Map<string | null, ImportPageNode[]>();
|
||||||
|
|
||||||
pagesMap.forEach((page) => {
|
pagesMap.forEach((page) => {
|
||||||
const sibs = siblingsMap.get(page.parentPageId) || [];
|
const group = siblingsMap.get(page.parentPageId) ?? [];
|
||||||
sibs.push(page);
|
group.push(page);
|
||||||
siblingsMap.set(page.parentPageId, sibs);
|
siblingsMap.set(page.parentPageId, group);
|
||||||
});
|
});
|
||||||
siblingsMap.forEach((sibs) => {
|
|
||||||
|
// get root pages
|
||||||
|
const rootSibs = siblingsMap.get(null);
|
||||||
|
|
||||||
|
if (rootSibs?.length) {
|
||||||
|
rootSibs.sort((a, b) => a.name.localeCompare(b.name));
|
||||||
|
|
||||||
|
// get first position key from the server
|
||||||
|
const nextPosition = await this.pageService.nextPagePosition(
|
||||||
|
fileTask.spaceId,
|
||||||
|
);
|
||||||
|
|
||||||
|
let prevPos: string | null = null;
|
||||||
|
rootSibs.forEach((page, idx) => {
|
||||||
|
if (idx === 0) {
|
||||||
|
page.position = nextPosition;
|
||||||
|
} else {
|
||||||
|
page.position = generateJitteredKeyBetween(prevPos, null);
|
||||||
|
}
|
||||||
|
prevPos = page.position;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// non-root buckets (children & deeper levels)
|
||||||
|
siblingsMap.forEach((sibs, parentId) => {
|
||||||
|
if (parentId === null) return; // root already done
|
||||||
|
|
||||||
sibs.sort((a, b) => a.name.localeCompare(b.name));
|
sibs.sort((a, b) => a.name.localeCompare(b.name));
|
||||||
|
|
||||||
let prevPos: string | null = null;
|
let prevPos: string | null = null;
|
||||||
for (const page of sibs) {
|
for (const page of sibs) {
|
||||||
page.position = generateJitteredKeyBetween(prevPos, null);
|
page.position = generateJitteredKeyBetween(prevPos, null);
|
||||||
@ -216,6 +241,7 @@ export class FileTaskService {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// internal page links
|
||||||
const filePathToPageMetaMap = new Map<
|
const filePathToPageMetaMap = new Map<
|
||||||
string,
|
string,
|
||||||
{ id: string; title: string; slugId: string }
|
{ id: string; title: string; slugId: string }
|
||||||
|
|||||||
@ -260,7 +260,11 @@ export class ImportAttachmentService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// wait for all uploads & DB inserts
|
// wait for all uploads & DB inserts
|
||||||
await Promise.all(attachmentTasks);
|
try {
|
||||||
|
await Promise.all(attachmentTasks);
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.log('Import attachment upload error', err);
|
||||||
|
}
|
||||||
|
|
||||||
return $.root().html() || '';
|
return $.root().html() || '';
|
||||||
}
|
}
|
||||||
|
|||||||
@ -54,6 +54,7 @@ import { BacklinksProcessor } from './processors/backlinks.processor';
|
|||||||
defaultJobOptions: {
|
defaultJobOptions: {
|
||||||
removeOnComplete: true,
|
removeOnComplete: true,
|
||||||
removeOnFail: true,
|
removeOnFail: true,
|
||||||
|
attempts: 1,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
|
|||||||
Reference in New Issue
Block a user