feat: internal page links and mentions (#604)

* Work on mentions

* fix: properly parse page slug

* fix editor suggestion bugs

* mentions must start with whitespace

* add icon to page mention render

* feat: backlinks - WIP

* UI - WIP

* permissions check
* use FTS for page suggestion

* cleanup

* WIP

* page title fallback

* feat: handle internal link paste

* link styling

* WIP

* Switch back to LIKE operator for search suggestion

* WIP
* scope to workspaceId
* still create link for pages not found

* select necessary columns

* cleanups
This commit is contained in:
Philip Okugbe
2025-02-14 15:36:44 +00:00
committed by GitHub
parent 0ef6b1978a
commit e209aaa272
46 changed files with 1679 additions and 101 deletions

View File

@ -76,7 +76,11 @@ export class ExportController {
return;
}
const rawContent = await this.exportService.exportPage(dto.format, page);
const rawContent = await this.exportService.exportPage(
dto.format,
page,
true,
);
res.headers({
'Content-Type': getMimeType(fileExt),

View File

@ -4,7 +4,7 @@ import {
Logger,
NotFoundException,
} from '@nestjs/common';
import { jsonToHtml } from '../../collaboration/collaboration.util';
import { jsonToHtml, jsonToNode } from '../../collaboration/collaboration.util';
import { turndown } from './turndown-utils';
import { ExportFormat } from './dto/export-dto';
import { Page } from '@docmost/db/types/entity.types';
@ -24,6 +24,11 @@ import {
updateAttachmentUrls,
} from './utils';
import { PageRepo } from '@docmost/db/repos/page/page.repo';
import { Node } from '@tiptap/pm/model';
import { EditorState } from '@tiptap/pm/state';
// eslint-disable-next-line @typescript-eslint/no-require-imports
import slugify = require('@sindresorhus/slugify');
import { EnvironmentService } from '../environment/environment.service';
@Injectable()
export class ExportService {
@ -33,16 +38,27 @@ export class ExportService {
private readonly pageRepo: PageRepo,
@InjectKysely() private readonly db: KyselyDB,
private readonly storageService: StorageService,
private readonly environmentService: EnvironmentService,
) {}
async exportPage(format: string, page: Page) {
async exportPage(format: string, page: Page, singlePage?: boolean) {
const titleNode = {
type: 'heading',
attrs: { level: 1 },
content: [{ type: 'text', text: getPageTitle(page.title) }],
};
const prosemirrorJson: any = getProsemirrorContent(page.content);
let prosemirrorJson: any;
if (singlePage) {
prosemirrorJson = await this.turnPageMentionsToLinks(
getProsemirrorContent(page.content),
page.workspaceId,
);
} else {
// mentions is already turned to links during the zip process
prosemirrorJson = getProsemirrorContent(page.content);
}
if (page.title) {
prosemirrorJson.content.unshift(titleNode);
@ -115,7 +131,8 @@ export class ExportService {
'pages.title',
'pages.content',
'pages.parentPageId',
'pages.spaceId'
'pages.spaceId',
'pages.workspaceId',
])
.where('spaceId', '=', spaceId)
.execute();
@ -160,7 +177,10 @@ export class ExportService {
for (const page of children) {
const childPages = tree[page.id] || [];
const prosemirrorJson = getProsemirrorContent(page.content);
const prosemirrorJson = await this.turnPageMentionsToLinks(
getProsemirrorContent(page.content),
page.workspaceId,
);
const currentPagePath = slugIdToPath[page.slugId];
@ -219,4 +239,107 @@ export class ExportService {
);
}
}
async turnPageMentionsToLinks(prosemirrorJson: any, workspaceId: string) {
const doc = jsonToNode(prosemirrorJson);
const pageMentionIds = [];
doc.descendants((node: Node) => {
if (node.type.name === 'mention' && node.attrs.entityType === 'page') {
if (node.attrs.entityId) {
pageMentionIds.push(node.attrs.entityId);
}
}
});
if (pageMentionIds.length < 1) {
return prosemirrorJson;
}
const pages = await this.db
.selectFrom('pages')
.select([
'id',
'slugId',
'title',
'creatorId',
'spaceId',
'workspaceId',
])
.select((eb) => this.pageRepo.withSpace(eb))
.where('id', 'in', pageMentionIds)
.where('workspaceId', '=', workspaceId)
.execute();
const pageMap = new Map(pages.map((page) => [page.id, page]));
let editorState = EditorState.create({
doc: doc,
});
const transaction = editorState.tr;
let offset = 0;
/**
* Helper function to replace a mention node with a link node.
*/
const replaceMentionWithLink = (
node: Node,
pos: number,
title: string,
slugId: string,
spaceSlug: string,
) => {
const linkTitle = title || 'untitled';
const truncatedTitle = linkTitle?.substring(0, 70);
const pageSlug = `${slugify(truncatedTitle)}-${slugId}`;
// Create the link URL
const link = `${this.environmentService.getAppUrl()}/s/${spaceSlug}/p/${pageSlug}`;
// Create a link mark and a text node with that mark
const linkMark = editorState.schema.marks.link.create({ href: link });
const linkTextNode = editorState.schema.text(linkTitle, [linkMark]);
// Calculate positions (adjusted by the current offset)
const from = pos + offset;
const to = pos + offset + node.nodeSize;
// Replace the node in the transaction and update the offset
transaction.replaceWith(from, to, linkTextNode);
offset += linkTextNode.nodeSize - node.nodeSize;
};
// find and convert page mentions to links
editorState.doc.descendants((node: Node, pos: number) => {
// Check if the node is a page mention
if (node.type.name === 'mention' && node.attrs.entityType === 'page') {
const { entityId: pageId, slugId, label } = node.attrs;
const page = pageMap.get(pageId);
if (page) {
replaceMentionWithLink(
node,
pos,
page.title,
page.slugId,
page.space.slug,
);
} else {
// if page is not found, default to the node label and slugId
replaceMentionWithLink(node, pos, label, slugId, 'undefined');
}
}
});
if (transaction.docChanged) {
editorState = editorState.apply(transaction);
}
const updatedDoc = editorState.doc;
return updatedDoc.toJSON();
}
}

View File

@ -7,6 +7,9 @@ import { Page } from '@docmost/db/types/entity.types';
export type PageExportTree = Record<string, Page[]>;
export const INTERNAL_LINK_REGEX =
/^(https?:\/\/)?([^\/]+)?(\/s\/([^\/]+)\/)?p\/([a-zA-Z0-9-]+)\/?$/;
export function getExportExtension(format: string) {
if (format === ExportFormat.HTML) {
return '.html';
@ -83,13 +86,11 @@ export function replaceInternalLinks(
currentPagePath: string,
) {
const doc = jsonToNode(prosemirrorJson);
const internalLinkRegex =
/^(https?:\/\/)?([^\/]+)?(\/s\/([^\/]+)\/)?p\/([a-zA-Z0-9-]+)\/?$/;
doc.descendants((node: Node) => {
for (const mark of node.marks) {
if (mark.type.name === 'link' && mark.attrs.href) {
const match = mark.attrs.href.match(internalLinkRegex);
const match = mark.attrs.href.match(INTERNAL_LINK_REGEX);
if (match) {
const markLink = mark.attrs.href;

View File

@ -1,10 +1,16 @@
export enum QueueName {
EMAIL_QUEUE = '{email-queue}',
ATTACHEMENT_QUEUE = '{attachment-queue}',
ATTACHMENT_QUEUE = '{attachment-queue}',
GENERAL_QUEUE = '{general-queue}',
}
export enum QueueJob {
SEND_EMAIL = 'send-email',
DELETE_SPACE_ATTACHMENTS = 'delete-space-attachments',
DELETE_PAGE_ATTACHMENTS = 'delete-page-attachments',
PAGE_CONTENT_UPDATE = 'page-content-update',
PAGE_BACKLINKS = 'page-backlinks',
}

View File

@ -0,0 +1,8 @@
import { MentionNode } from "../../../common/helpers/prosemirror/utils";
export interface IPageBacklinkJob {
pageId: string;
workspaceId: string;
mentions: MentionNode[];
}

View File

@ -0,0 +1,129 @@
import { Logger, OnModuleDestroy } from '@nestjs/common';
import { OnWorkerEvent, Processor, WorkerHost } from '@nestjs/bullmq';
import { Job } from 'bullmq';
import { QueueJob, QueueName } from '../constants';
import { IPageBacklinkJob } from '../constants/queue.interface';
import { InjectKysely } from 'nestjs-kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
import { executeTx } from '@docmost/db/utils';
@Processor(QueueName.GENERAL_QUEUE)
export class BacklinksProcessor extends WorkerHost implements OnModuleDestroy {
private readonly logger = new Logger(BacklinksProcessor.name);
constructor(
@InjectKysely() private readonly db: KyselyDB,
private readonly backlinkRepo: BacklinkRepo,
) {
super();
}
async process(job: Job<IPageBacklinkJob, void>): Promise<void> {
try {
const { pageId, mentions, workspaceId } = job.data;
switch (job.name) {
case QueueJob.PAGE_BACKLINKS:
{
await executeTx(this.db, async (trx) => {
const existingBacklinks = await trx
.selectFrom('backlinks')
.select('targetPageId')
.where('sourcePageId', '=', pageId)
.execute();
if (existingBacklinks.length === 0 && mentions.length === 0) {
return;
}
const existingTargetPageIds = existingBacklinks.map(
(backlink) => backlink.targetPageId,
);
const targetPageIds = mentions
.filter((mention) => mention.entityId !== pageId)
.map((mention) => mention.entityId);
// make sure target pages belong to the same workspace
let validTargetPages = [];
if (targetPageIds.length > 0) {
validTargetPages = await trx
.selectFrom('pages')
.select('id')
.where('id', 'in', targetPageIds)
.where('workspaceId', '=', workspaceId)
.execute();
}
const validTargetPageIds = validTargetPages.map(
(page) => page.id,
);
// new backlinks
const backlinksToAdd = validTargetPageIds.filter(
(id) => !existingTargetPageIds.includes(id),
);
// stale backlinks
const backlinksToRemove = existingTargetPageIds.filter(
(existingId) => !validTargetPageIds.includes(existingId),
);
// add new backlinks
if (backlinksToAdd.length > 0) {
const newBacklinks = backlinksToAdd.map((targetPageId) => ({
sourcePageId: pageId,
targetPageId: targetPageId,
workspaceId: workspaceId,
}));
await this.backlinkRepo.insertBacklink(newBacklinks, trx);
this.logger.debug(
`Added ${newBacklinks.length} new backlinks to ${pageId}`,
);
}
// remove stale backlinks
if (backlinksToRemove.length > 0) {
await this.db
.deleteFrom('backlinks')
.where('sourcePageId', '=', pageId)
.where('targetPageId', 'in', backlinksToRemove)
.execute();
this.logger.debug(
`Removed ${backlinksToRemove.length} outdated backlinks from ${pageId}.`,
);
}
});
}
break;
}
} catch (err) {
throw err;
}
}
@OnWorkerEvent('active')
onActive(job: Job) {
this.logger.debug(`Processing ${job.name} job`);
}
@OnWorkerEvent('failed')
onError(job: Job) {
this.logger.error(
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
);
}
@OnWorkerEvent('completed')
onCompleted(job: Job) {
this.logger.debug(`Completed ${job.name} job`);
}
async onModuleDestroy(): Promise<void> {
if (this.worker) {
await this.worker.close();
}
}
}

View File

@ -3,6 +3,7 @@ import { BullModule } from '@nestjs/bullmq';
import { EnvironmentService } from '../environment/environment.service';
import { createRetryStrategy, parseRedisUrl } from '../../common/helpers';
import { QueueName } from './constants';
import { BacklinksProcessor } from "./processors/backlinks.processor";
@Global()
@Module({
@ -33,9 +34,13 @@ import { QueueName } from './constants';
name: QueueName.EMAIL_QUEUE,
}),
BullModule.registerQueue({
name: QueueName.ATTACHEMENT_QUEUE,
name: QueueName.ATTACHMENT_QUEUE,
}),
BullModule.registerQueue({
name: QueueName.GENERAL_QUEUE,
}),
],
exports: [BullModule],
providers: [BacklinksProcessor]
})
export class QueueModule {}