mirror of
https://github.com/docmost/docmost.git
synced 2025-11-18 03:31:10 +10:00
Merge branch 'main' into ai-vector
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "client",
|
||||
"private": true,
|
||||
"version": "0.23.1",
|
||||
"version": "0.23.2",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
|
||||
@ -10,7 +10,7 @@ import {
|
||||
TextInput,
|
||||
Tooltip,
|
||||
} from "@mantine/core";
|
||||
import { IconExternalLink, IconWorld } from "@tabler/icons-react";
|
||||
import { IconExternalLink, IconWorld, IconLock } from "@tabler/icons-react";
|
||||
import React, { useEffect, useMemo, useState } from "react";
|
||||
import {
|
||||
useCreateShareMutation,
|
||||
@ -18,23 +18,27 @@ import {
|
||||
useShareForPageQuery,
|
||||
useUpdateShareMutation,
|
||||
} from "@/features/share/queries/share-query.ts";
|
||||
import { Link, useParams } from "react-router-dom";
|
||||
import { Link, useNavigate, useParams } from "react-router-dom";
|
||||
import { extractPageSlugId, getPageIcon } from "@/lib";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import CopyTextButton from "@/components/common/copy.tsx";
|
||||
import { getAppUrl } from "@/lib/config.ts";
|
||||
import { getAppUrl, isCloud } from "@/lib/config.ts";
|
||||
import { buildPageUrl } from "@/features/page/page.utils.ts";
|
||||
import classes from "@/features/share/components/share.module.css";
|
||||
import useTrial from "@/ee/hooks/use-trial.tsx";
|
||||
import { getCheckoutLink } from "@/ee/billing/services/billing-service.ts";
|
||||
|
||||
interface ShareModalProps {
|
||||
readOnly: boolean;
|
||||
}
|
||||
export default function ShareModal({ readOnly }: ShareModalProps) {
|
||||
const { t } = useTranslation();
|
||||
const navigate = useNavigate();
|
||||
const { pageSlug } = useParams();
|
||||
const pageId = extractPageSlugId(pageSlug);
|
||||
const { data: share } = useShareForPageQuery(pageId);
|
||||
const { spaceSlug } = useParams();
|
||||
const { isTrial } = useTrial();
|
||||
const createShareMutation = useCreateShareMutation();
|
||||
const updateShareMutation = useUpdateShareMutation();
|
||||
const deleteShareMutation = useDeleteShareMutation();
|
||||
@ -61,7 +65,7 @@ export default function ShareModal({ readOnly }: ShareModalProps) {
|
||||
createShareMutation.mutateAsync({
|
||||
pageId: pageId,
|
||||
includeSubPages: true,
|
||||
searchIndexing: true,
|
||||
searchIndexing: false,
|
||||
});
|
||||
setIsPagePublic(value);
|
||||
} else {
|
||||
@ -92,26 +96,29 @@ export default function ShareModal({ readOnly }: ShareModalProps) {
|
||||
});
|
||||
};
|
||||
|
||||
const shareLink = useMemo(() => (
|
||||
<Group my="sm" gap={4} wrap="nowrap">
|
||||
<TextInput
|
||||
variant="filled"
|
||||
value={publicLink}
|
||||
readOnly
|
||||
rightSection={<CopyTextButton text={publicLink} />}
|
||||
style={{ width: "100%" }}
|
||||
/>
|
||||
<ActionIcon
|
||||
component="a"
|
||||
variant="default"
|
||||
target="_blank"
|
||||
href={publicLink}
|
||||
size="sm"
|
||||
>
|
||||
<IconExternalLink size={16} />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
), [publicLink]);
|
||||
const shareLink = useMemo(
|
||||
() => (
|
||||
<Group my="sm" gap={4} wrap="nowrap">
|
||||
<TextInput
|
||||
variant="filled"
|
||||
value={publicLink}
|
||||
readOnly
|
||||
rightSection={<CopyTextButton text={publicLink} />}
|
||||
style={{ width: "100%" }}
|
||||
/>
|
||||
<ActionIcon
|
||||
component="a"
|
||||
variant="default"
|
||||
target="_blank"
|
||||
href={publicLink}
|
||||
size="sm"
|
||||
>
|
||||
<IconExternalLink size={16} />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
),
|
||||
[publicLink],
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover width={350} position="bottom" withArrow shadow="md">
|
||||
@ -135,7 +142,28 @@ export default function ShareModal({ readOnly }: ShareModalProps) {
|
||||
</Button>
|
||||
</Popover.Target>
|
||||
<Popover.Dropdown style={{ userSelect: "none" }}>
|
||||
{isDescendantShared ? (
|
||||
{isCloud() && isTrial ? (
|
||||
<>
|
||||
<Group justify="center" mb="sm">
|
||||
<IconLock size={20} stroke={1.5} />
|
||||
</Group>
|
||||
<Text size="sm" ta="center" fw={500} mb="xs">
|
||||
{t("Upgrade to share pages")}
|
||||
</Text>
|
||||
<Text size="sm" c="dimmed" ta="center" mb="sm">
|
||||
{t(
|
||||
"Page sharing is available on paid plans. Upgrade to share your pages publicly.",
|
||||
)}
|
||||
</Text>
|
||||
<Button
|
||||
size="xs"
|
||||
onClick={() => navigate("/settings/billing")}
|
||||
fullWidth
|
||||
>
|
||||
{t("Upgrade Plan")}
|
||||
</Button>
|
||||
</>
|
||||
) : isDescendantShared ? (
|
||||
<>
|
||||
<Text size="sm">{t("Inherits public sharing from")}</Text>
|
||||
<Anchor
|
||||
|
||||
@ -152,13 +152,36 @@ export function useDeleteSpaceMutation() {
|
||||
});
|
||||
}
|
||||
|
||||
const spaces = queryClient.getQueryData(["spaces"]) as any;
|
||||
// Remove space-specific queries
|
||||
if (variables.id) {
|
||||
queryClient.removeQueries({
|
||||
queryKey: ["space", variables.id],
|
||||
exact: true,
|
||||
});
|
||||
|
||||
// Invalidate recent changes
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["recent-changes"],
|
||||
});
|
||||
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["recent-changes", variables.id],
|
||||
});
|
||||
}
|
||||
|
||||
// Update spaces list cache
|
||||
/* const spaces = queryClient.getQueryData(["spaces"]) as any;
|
||||
if (spaces) {
|
||||
spaces.items = spaces.items?.filter(
|
||||
(space: ISpace) => space.id !== variables.id,
|
||||
);
|
||||
queryClient.setQueryData(["spaces"], spaces);
|
||||
}
|
||||
}*/
|
||||
|
||||
// Invalidate all spaces queries to refresh lists
|
||||
queryClient.invalidateQueries({
|
||||
predicate: (item) => ["spaces"].includes(item.queryKey[0] as string),
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
const errorMessage = error["response"]?.data?.message;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "server",
|
||||
"version": "0.23.1",
|
||||
"version": "0.23.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
|
||||
Submodule apps/server/src/ee updated: fd34d4183a...3af21def15
@ -47,15 +47,23 @@ export class FileTaskProcessor extends WorkerHost implements OnModuleDestroy {
|
||||
await this.handleFailedJob(job);
|
||||
}
|
||||
|
||||
@OnWorkerEvent('stalled')
|
||||
async onStalled(job: Job) {
|
||||
this.logger.error(
|
||||
`Job ${job.name} stalled. . Import Task ID: ${job.data.fileTaskId}.. Job ID: ${job.id}`,
|
||||
@OnWorkerEvent('completed')
|
||||
async onCompleted(job: Job) {
|
||||
this.logger.log(
|
||||
`Completed ${job.name} job for File task ID ${job.data.fileTaskId}`,
|
||||
);
|
||||
|
||||
// Set failedReason for stalled jobs since it's not automatically set
|
||||
job.failedReason = 'Job stalled and was marked as failed';
|
||||
await this.handleFailedJob(job);
|
||||
try {
|
||||
const fileTask = await this.fileTaskService.getFileTask(
|
||||
job.data.fileTaskId,
|
||||
);
|
||||
if (fileTask) {
|
||||
await this.storageService.delete(fileTask.filePath);
|
||||
this.logger.debug(`Deleted imported zip file: ${fileTask.filePath}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`Failed to delete imported zip file:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
private async handleFailedJob(job: Job) {
|
||||
@ -78,25 +86,6 @@ export class FileTaskProcessor extends WorkerHost implements OnModuleDestroy {
|
||||
}
|
||||
}
|
||||
|
||||
@OnWorkerEvent('completed')
|
||||
async onCompleted(job: Job) {
|
||||
this.logger.log(
|
||||
`Completed ${job.name} job for File task ID ${job.data.fileTaskId}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const fileTask = await this.fileTaskService.getFileTask(
|
||||
job.data.fileTaskId,
|
||||
);
|
||||
if (fileTask) {
|
||||
await this.storageService.delete(fileTask.filePath);
|
||||
this.logger.debug(`Deleted imported zip file: ${fileTask.filePath}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`Failed to delete imported zip file:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
if (this.worker) {
|
||||
await this.worker.close();
|
||||
|
||||
@ -24,6 +24,7 @@ import { formatImportHtml } from '../utils/import-formatter';
|
||||
import {
|
||||
buildAttachmentCandidates,
|
||||
collectMarkdownAndHtmlFiles,
|
||||
stripNotionID,
|
||||
} from '../utils/import.utils';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
|
||||
@ -159,17 +160,12 @@ export class FileImportTaskService {
|
||||
.split(path.sep)
|
||||
.join('/'); // normalize to forward-slashes
|
||||
const ext = path.extname(relPath).toLowerCase();
|
||||
let content = await fs.readFile(absPath, 'utf-8');
|
||||
|
||||
if (ext.toLowerCase() === '.md') {
|
||||
content = await markdownToHtml(content);
|
||||
}
|
||||
|
||||
pagesMap.set(relPath, {
|
||||
id: v7(),
|
||||
slugId: generateSlugId(),
|
||||
name: path.basename(relPath, ext),
|
||||
content,
|
||||
name: stripNotionID(path.basename(relPath, ext)),
|
||||
content: '',
|
||||
parentPageId: null,
|
||||
fileExtension: ext,
|
||||
filePath: relPath,
|
||||
@ -254,71 +250,160 @@ export class FileImportTaskService {
|
||||
});
|
||||
});
|
||||
|
||||
const pageResults = await Promise.all(
|
||||
Array.from(pagesMap.values()).map(async (page) => {
|
||||
const htmlContent =
|
||||
await this.importAttachmentService.processAttachments({
|
||||
html: page.content,
|
||||
pageRelativePath: page.filePath,
|
||||
extractDir,
|
||||
pageId: page.id,
|
||||
fileTask,
|
||||
attachmentCandidates,
|
||||
});
|
||||
// Group pages by level (topological sort for parent-child relationships)
|
||||
const pagesByLevel = new Map<number, Array<[string, ImportPageNode]>>();
|
||||
const pageLevel = new Map<string, number>();
|
||||
|
||||
const { html, backlinks, pageIcon } = await formatImportHtml({
|
||||
html: htmlContent,
|
||||
currentFilePath: page.filePath,
|
||||
filePathToPageMetaMap: filePathToPageMetaMap,
|
||||
creatorId: fileTask.creatorId,
|
||||
sourcePageId: page.id,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
});
|
||||
// Calculate levels using BFS
|
||||
const calculateLevels = () => {
|
||||
const queue: Array<{ filePath: string; level: number }> = [];
|
||||
|
||||
const pmState = getProsemirrorContent(
|
||||
await this.importService.processHTML(html),
|
||||
// Start with root pages (no parent)
|
||||
for (const [filePath, page] of pagesMap.entries()) {
|
||||
if (!page.parentPageId) {
|
||||
queue.push({ filePath, level: 0 });
|
||||
pageLevel.set(filePath, 0);
|
||||
}
|
||||
}
|
||||
|
||||
// BFS to assign levels
|
||||
while (queue.length > 0) {
|
||||
const { filePath, level } = queue.shift()!;
|
||||
const currentPage = pagesMap.get(filePath)!;
|
||||
|
||||
// Find children of current page
|
||||
for (const [childFilePath, childPage] of pagesMap.entries()) {
|
||||
if (
|
||||
childPage.parentPageId === currentPage.id &&
|
||||
!pageLevel.has(childFilePath)
|
||||
) {
|
||||
pageLevel.set(childFilePath, level + 1);
|
||||
queue.push({ filePath: childFilePath, level: level + 1 });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Group pages by level
|
||||
for (const [filePath, page] of pagesMap.entries()) {
|
||||
const level = pageLevel.get(filePath) || 0;
|
||||
if (!pagesByLevel.has(level)) {
|
||||
pagesByLevel.set(level, []);
|
||||
}
|
||||
pagesByLevel.get(level)!.push([filePath, page]);
|
||||
}
|
||||
};
|
||||
|
||||
calculateLevels();
|
||||
|
||||
if (pagesMap.size < 1) return;
|
||||
|
||||
// Process pages level by level sequentially to respect foreign key constraints
|
||||
const allBacklinks: any[] = [];
|
||||
const validPageIds = new Set<string>();
|
||||
let totalPagesProcessed = 0;
|
||||
|
||||
// Sort levels to process in order
|
||||
const sortedLevels = Array.from(pagesByLevel.keys()).sort((a, b) => a - b);
|
||||
|
||||
try {
|
||||
await executeTx(this.db, async (trx) => {
|
||||
// Process pages level by level sequentially within the transaction
|
||||
for (const level of sortedLevels) {
|
||||
const levelPages = pagesByLevel.get(level)!;
|
||||
|
||||
for (const [filePath, page] of levelPages) {
|
||||
const absPath = path.join(extractDir, filePath);
|
||||
let content = await fs.readFile(absPath, 'utf-8');
|
||||
|
||||
if (page.fileExtension.toLowerCase() === '.md') {
|
||||
content = await markdownToHtml(content);
|
||||
}
|
||||
|
||||
const htmlContent =
|
||||
await this.importAttachmentService.processAttachments({
|
||||
html: content,
|
||||
pageRelativePath: page.filePath,
|
||||
extractDir,
|
||||
pageId: page.id,
|
||||
fileTask,
|
||||
attachmentCandidates,
|
||||
});
|
||||
|
||||
const { html, backlinks, pageIcon } = await formatImportHtml({
|
||||
html: htmlContent,
|
||||
currentFilePath: page.filePath,
|
||||
filePathToPageMetaMap: filePathToPageMetaMap,
|
||||
creatorId: fileTask.creatorId,
|
||||
sourcePageId: page.id,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
});
|
||||
|
||||
const pmState = getProsemirrorContent(
|
||||
await this.importService.processHTML(html),
|
||||
);
|
||||
|
||||
const { title, prosemirrorJson } =
|
||||
this.importService.extractTitleAndRemoveHeading(pmState);
|
||||
|
||||
const insertablePage: InsertablePage = {
|
||||
id: page.id,
|
||||
slugId: page.slugId,
|
||||
title: title || page.name,
|
||||
icon: pageIcon || null,
|
||||
content: prosemirrorJson,
|
||||
textContent: jsonToText(prosemirrorJson),
|
||||
ydoc: await this.importService.createYdoc(prosemirrorJson),
|
||||
position: page.position!,
|
||||
spaceId: fileTask.spaceId,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
creatorId: fileTask.creatorId,
|
||||
lastUpdatedById: fileTask.creatorId,
|
||||
parentPageId: page.parentPageId,
|
||||
};
|
||||
|
||||
await trx.insertInto('pages').values(insertablePage).execute();
|
||||
|
||||
// Track valid page IDs and collect backlinks
|
||||
validPageIds.add(insertablePage.id);
|
||||
allBacklinks.push(...backlinks);
|
||||
totalPagesProcessed++;
|
||||
|
||||
// Log progress periodically
|
||||
if (totalPagesProcessed % 50 === 0) {
|
||||
this.logger.debug(`Processed ${totalPagesProcessed} pages...`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filteredBacklinks = allBacklinks.filter(
|
||||
({ sourcePageId, targetPageId }) =>
|
||||
validPageIds.has(sourcePageId) && validPageIds.has(targetPageId),
|
||||
);
|
||||
|
||||
const { title, prosemirrorJson } =
|
||||
this.importService.extractTitleAndRemoveHeading(pmState);
|
||||
// Insert backlinks in batches
|
||||
if (filteredBacklinks.length > 0) {
|
||||
const BACKLINK_BATCH_SIZE = 100;
|
||||
for (
|
||||
let i = 0;
|
||||
i < filteredBacklinks.length;
|
||||
i += BACKLINK_BATCH_SIZE
|
||||
) {
|
||||
const backlinkChunk = filteredBacklinks.slice(
|
||||
i,
|
||||
Math.min(i + BACKLINK_BATCH_SIZE, filteredBacklinks.length),
|
||||
);
|
||||
await this.backlinkRepo.insertBacklink(backlinkChunk, trx);
|
||||
}
|
||||
}
|
||||
|
||||
const insertablePage: InsertablePage = {
|
||||
id: page.id,
|
||||
slugId: page.slugId,
|
||||
title: title || page.name,
|
||||
icon: pageIcon || null,
|
||||
content: prosemirrorJson,
|
||||
textContent: jsonToText(prosemirrorJson),
|
||||
ydoc: await this.importService.createYdoc(prosemirrorJson),
|
||||
position: page.position!,
|
||||
spaceId: fileTask.spaceId,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
creatorId: fileTask.creatorId,
|
||||
lastUpdatedById: fileTask.creatorId,
|
||||
parentPageId: page.parentPageId,
|
||||
};
|
||||
|
||||
return { insertablePage, backlinks };
|
||||
}),
|
||||
);
|
||||
|
||||
const insertablePages = pageResults.map((r) => r.insertablePage);
|
||||
const insertableBacklinks = pageResults.flatMap((r) => r.backlinks);
|
||||
|
||||
if (insertablePages.length < 1) return;
|
||||
const validPageIds = new Set(insertablePages.map((row) => row.id));
|
||||
const filteredBacklinks = insertableBacklinks.filter(
|
||||
({ sourcePageId, targetPageId }) =>
|
||||
validPageIds.has(sourcePageId) && validPageIds.has(targetPageId),
|
||||
);
|
||||
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await trx.insertInto('pages').values(insertablePages).execute();
|
||||
|
||||
if (filteredBacklinks.length > 0) {
|
||||
await this.backlinkRepo.insertBacklink(filteredBacklinks, trx);
|
||||
}
|
||||
});
|
||||
this.logger.log(
|
||||
`Successfully imported ${totalPagesProcessed} pages with ${filteredBacklinks.length} backlinks`,
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to import files:', error);
|
||||
throw new Error(`File import failed: ${error?.['message']}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getFileTask(fileTaskId: string) {
|
||||
|
||||
@ -35,7 +35,7 @@ interface DrawioPair {
|
||||
@Injectable()
|
||||
export class ImportAttachmentService {
|
||||
private readonly logger = new Logger(ImportAttachmentService.name);
|
||||
private readonly CONCURRENT_UPLOADS = 1;
|
||||
private readonly CONCURRENT_UPLOADS = 3;
|
||||
private readonly MAX_RETRIES = 2;
|
||||
private readonly RETRY_DELAY = 2000;
|
||||
|
||||
|
||||
@ -222,17 +222,40 @@ export function notionFormatter($: CheerioAPI, $root: Cheerio<any>) {
|
||||
}
|
||||
|
||||
export function unwrapFromParagraph($: CheerioAPI, $node: Cheerio<any>) {
|
||||
// find the nearest <p> or <a> ancestor
|
||||
let $wrapper = $node.closest('p, a');
|
||||
// Keep track of processed wrappers to avoid infinite loops
|
||||
const processedWrappers = new Set<any>();
|
||||
|
||||
let $wrapper = $node.closest('p, a');
|
||||
while ($wrapper.length) {
|
||||
// if the wrapper has only our node inside, replace it entirely
|
||||
if ($wrapper.contents().length === 1) {
|
||||
const wrapperElement = $wrapper.get(0);
|
||||
|
||||
// If we've already processed this wrapper, break to avoid infinite loop
|
||||
if (processedWrappers.has(wrapperElement)) {
|
||||
break;
|
||||
}
|
||||
|
||||
processedWrappers.add(wrapperElement);
|
||||
|
||||
// Check if the wrapper contains only whitespace and our target node
|
||||
const hasOnlyTargetNode =
|
||||
$wrapper.contents().filter((_, el) => {
|
||||
const $el = $(el);
|
||||
// Skip whitespace-only text nodes. NodeType 3 = text node
|
||||
if (el.nodeType === 3 && !$el.text().trim()) {
|
||||
return false;
|
||||
}
|
||||
// Return true if this is not our target node
|
||||
return !$el.is($node) && !$node.is($el);
|
||||
}).length === 0;
|
||||
|
||||
if (hasOnlyTargetNode) {
|
||||
// Replace the wrapper entirely with our node
|
||||
$wrapper.replaceWith($node);
|
||||
} else {
|
||||
// otherwise just move the node to before the wrapper
|
||||
// Move the node to before the wrapper, preserving other content
|
||||
$wrapper.before($node);
|
||||
}
|
||||
|
||||
// look again for any new wrapper around $node
|
||||
$wrapper = $node.closest('p, a');
|
||||
}
|
||||
|
||||
@ -64,3 +64,9 @@ export async function collectMarkdownAndHtmlFiles(
|
||||
await walk(dir);
|
||||
return results;
|
||||
}
|
||||
|
||||
export function stripNotionID(fileName: string): string {
|
||||
// Handle optional separator (space or dash) + 32 alphanumeric chars at end
|
||||
const notionIdPattern = /[ -]?[a-z0-9]{32}$/i;
|
||||
return fileName.replace(notionIdPattern, '').trim();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user