mirror of
https://github.com/docmost/docmost.git
synced 2025-11-24 15:11:08 +10:00
Merge branch 'main' into sso-group-sync
This commit is contained in:
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "server",
|
||||
"version": "0.21.0",
|
||||
"version": "0.22.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@ -50,7 +50,7 @@
|
||||
"@nestjs/schedule": "^6.0.0",
|
||||
"@nestjs/terminus": "^11.0.0",
|
||||
"@nestjs/websockets": "^11.1.3",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@react-email/components": "0.0.28",
|
||||
"@react-email/render": "1.0.2",
|
||||
"@socket.io/redis-adapter": "^8.3.0",
|
||||
@ -71,6 +71,8 @@
|
||||
"nestjs-kysely": "^1.2.0",
|
||||
"nodemailer": "^7.0.3",
|
||||
"openid-client": "^5.7.1",
|
||||
"otpauth": "^9.4.0",
|
||||
"p-limit": "^6.2.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"pg": "^8.16.0",
|
||||
|
||||
@ -10,8 +10,6 @@ import { Typography } from '@tiptap/extension-typography';
|
||||
import { TextStyle } from '@tiptap/extension-text-style';
|
||||
import { Color } from '@tiptap/extension-color';
|
||||
import { Youtube } from '@tiptap/extension-youtube';
|
||||
import Table from '@tiptap/extension-table';
|
||||
import TableHeader from '@tiptap/extension-table-header';
|
||||
import {
|
||||
Callout,
|
||||
Comment,
|
||||
@ -22,8 +20,10 @@ import {
|
||||
LinkExtension,
|
||||
MathBlock,
|
||||
MathInline,
|
||||
TableHeader,
|
||||
TableCell,
|
||||
TableRow,
|
||||
CustomTable,
|
||||
TiptapImage,
|
||||
TiptapVideo,
|
||||
TrailingNode,
|
||||
@ -31,7 +31,8 @@ import {
|
||||
Drawio,
|
||||
Excalidraw,
|
||||
Embed,
|
||||
Mention
|
||||
Mention,
|
||||
Subpages,
|
||||
} from '@docmost/editor-ext';
|
||||
import { generateText, getSchema, JSONContent } from '@tiptap/core';
|
||||
import { generateHTML } from '../common/helpers/prosemirror/html';
|
||||
@ -46,9 +47,11 @@ export const tiptapExtensions = [
|
||||
codeBlock: false,
|
||||
}),
|
||||
Comment,
|
||||
TextAlign.configure({ types: ["heading", "paragraph"] }),
|
||||
TextAlign.configure({ types: ['heading', 'paragraph'] }),
|
||||
TaskList,
|
||||
TaskItem,
|
||||
TaskItem.configure({
|
||||
nested: true,
|
||||
}),
|
||||
Underline,
|
||||
LinkExtension,
|
||||
Superscript,
|
||||
@ -63,10 +66,10 @@ export const tiptapExtensions = [
|
||||
Details,
|
||||
DetailsContent,
|
||||
DetailsSummary,
|
||||
Table,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
CustomTable,
|
||||
TableCell,
|
||||
TableRow,
|
||||
TableHeader,
|
||||
Youtube,
|
||||
TiptapImage,
|
||||
TiptapVideo,
|
||||
@ -76,7 +79,8 @@ export const tiptapExtensions = [
|
||||
Drawio,
|
||||
Excalidraw,
|
||||
Embed,
|
||||
Mention
|
||||
Mention,
|
||||
Subpages,
|
||||
] as any;
|
||||
|
||||
export function jsonToHtml(tiptapJson: any) {
|
||||
|
||||
@ -46,6 +46,10 @@ export class AuthenticationExtension implements Extension {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
if (user.deactivatedAt || user.deletedAt) {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
const page = await this.pageRepo.findById(pageId);
|
||||
if (!page) {
|
||||
this.logger.warn(`Page not found: ${pageId}`);
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import * as path from 'path';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
import { sanitize } from 'sanitize-filename-ts';
|
||||
import { FastifyRequest } from 'fastify';
|
||||
|
||||
export const envPath = path.resolve(process.cwd(), '..', '..', '.env');
|
||||
|
||||
@ -74,3 +75,15 @@ export function sanitizeFileName(fileName: string): string {
|
||||
const sanitizedFilename = sanitize(fileName).replace(/ /g, '_');
|
||||
return sanitizedFilename.slice(0, 255);
|
||||
}
|
||||
|
||||
export function removeAccent(str: string): string {
|
||||
if (!str) return str;
|
||||
return str.normalize('NFD').replace(/[\u0300-\u036f]/g, '');
|
||||
}
|
||||
|
||||
export function extractBearerTokenFromHeader(
|
||||
request: FastifyRequest,
|
||||
): string | undefined {
|
||||
const [type, token] = request.headers.authorization?.split(' ') ?? [];
|
||||
return type === 'Bearer' ? token : undefined;
|
||||
}
|
||||
|
||||
@ -12,10 +12,14 @@ export class InternalLogFilter extends ConsoleLogger {
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.allowedLogLevels =
|
||||
process.env.NODE_ENV === 'production'
|
||||
? ['log', 'error', 'fatal']
|
||||
: ['log', 'debug', 'verbose', 'warn', 'error', 'fatal'];
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const isDebugMode = process.env.DEBUG_MODE === 'true';
|
||||
|
||||
if (isProduction && !isDebugMode) {
|
||||
this.allowedLogLevels = ['log', 'error', 'fatal'];
|
||||
} else {
|
||||
this.allowedLogLevels = ['log', 'debug', 'verbose', 'warn', 'error', 'fatal'];
|
||||
}
|
||||
}
|
||||
|
||||
private isLogLevelAllowed(level: string): boolean {
|
||||
|
||||
@ -50,6 +50,7 @@ import { validate as isValidUUID } from 'uuid';
|
||||
import { EnvironmentService } from '../../integrations/environment/environment.service';
|
||||
import { TokenService } from '../auth/services/token.service';
|
||||
import { JwtAttachmentPayload, JwtType } from '../auth/dto/jwt-payload';
|
||||
import * as path from 'path';
|
||||
|
||||
@Controller()
|
||||
export class AttachmentController {
|
||||
@ -356,6 +357,11 @@ export class AttachmentController {
|
||||
throw new BadRequestException('Invalid image attachment type');
|
||||
}
|
||||
|
||||
const filenameWithoutExt = path.basename(fileName, path.extname(fileName));
|
||||
if (!isValidUUID(filenameWithoutExt)) {
|
||||
throw new BadRequestException('Invalid file id');
|
||||
}
|
||||
|
||||
const filePath = `${getAttachmentFolderPath(attachmentType, workspace.id)}/${fileName}`;
|
||||
|
||||
try {
|
||||
|
||||
@ -12,7 +12,7 @@ export class AttachmentProcessor extends WorkerHost implements OnModuleDestroy {
|
||||
super();
|
||||
}
|
||||
|
||||
async process(job: Job<Space, void>): Promise<void> {
|
||||
async process(job: Job<any, void>): Promise<void> {
|
||||
try {
|
||||
if (job.name === QueueJob.DELETE_SPACE_ATTACHMENTS) {
|
||||
await this.attachmentService.handleDeleteSpaceAttachments(job.data.id);
|
||||
@ -20,6 +20,11 @@ export class AttachmentProcessor extends WorkerHost implements OnModuleDestroy {
|
||||
if (job.name === QueueJob.DELETE_USER_AVATARS) {
|
||||
await this.attachmentService.handleDeleteUserAvatars(job.data.id);
|
||||
}
|
||||
if (job.name === QueueJob.DELETE_PAGE_ATTACHMENTS) {
|
||||
await this.attachmentService.handleDeletePageAttachments(
|
||||
job.data.pageId,
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
@ -321,4 +321,50 @@ export class AttachmentService {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async handleDeletePageAttachments(pageId: string) {
|
||||
try {
|
||||
// Fetch attachments for this page from database
|
||||
const attachments = await this.db
|
||||
.selectFrom('attachments')
|
||||
.select(['id', 'filePath'])
|
||||
.where('pageId', '=', pageId)
|
||||
.execute();
|
||||
|
||||
if (!attachments || attachments.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const failedDeletions = [];
|
||||
|
||||
await Promise.all(
|
||||
attachments.map(async (attachment) => {
|
||||
try {
|
||||
// Delete from storage
|
||||
await this.storageService.delete(attachment.filePath);
|
||||
// Delete from database
|
||||
await this.attachmentRepo.deleteAttachmentById(attachment.id);
|
||||
} catch (err) {
|
||||
failedDeletions.push(attachment.id);
|
||||
this.logger.error(
|
||||
`Failed to delete attachment ${attachment.id} for page ${pageId}:`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
if (failedDeletions.length > 0) {
|
||||
this.logger.warn(
|
||||
`Failed to delete ${failedDeletions.length} attachments for page ${pageId}`,
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(
|
||||
`Error in handleDeletePageAttachments for page ${pageId}:`,
|
||||
err,
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import {
|
||||
Post,
|
||||
Res,
|
||||
UseGuards,
|
||||
Logger,
|
||||
} from '@nestjs/common';
|
||||
import { LoginDto } from './dto/login.dto';
|
||||
import { AuthService } from './services/auth.service';
|
||||
@ -22,12 +23,16 @@ import { PasswordResetDto } from './dto/password-reset.dto';
|
||||
import { VerifyUserTokenDto } from './dto/verify-user-token.dto';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { validateSsoEnforcement } from './auth.util';
|
||||
import { ModuleRef } from '@nestjs/core';
|
||||
|
||||
@Controller('auth')
|
||||
export class AuthController {
|
||||
private readonly logger = new Logger(AuthController.name);
|
||||
|
||||
constructor(
|
||||
private authService: AuthService,
|
||||
private environmentService: EnvironmentService,
|
||||
private moduleRef: ModuleRef,
|
||||
) {}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ -39,6 +44,45 @@ export class AuthController {
|
||||
) {
|
||||
validateSsoEnforcement(workspace);
|
||||
|
||||
let MfaModule: any;
|
||||
let isMfaModuleReady = false;
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
MfaModule = require('./../../ee/mfa/services/mfa.service');
|
||||
isMfaModuleReady = true;
|
||||
} catch (err) {
|
||||
this.logger.debug(
|
||||
'MFA module requested but EE module not bundled in this build',
|
||||
);
|
||||
isMfaModuleReady = false;
|
||||
}
|
||||
if (isMfaModuleReady) {
|
||||
const mfaService = this.moduleRef.get(MfaModule.MfaService, {
|
||||
strict: false,
|
||||
});
|
||||
|
||||
const mfaResult = await mfaService.checkMfaRequirements(
|
||||
loginInput,
|
||||
workspace,
|
||||
res,
|
||||
);
|
||||
|
||||
if (mfaResult) {
|
||||
// If user has MFA enabled OR workspace enforces MFA, require MFA verification
|
||||
if (mfaResult.userHasMfa || mfaResult.requiresMfaSetup) {
|
||||
return {
|
||||
userHasMfa: mfaResult.userHasMfa,
|
||||
requiresMfaSetup: mfaResult.requiresMfaSetup,
|
||||
isMfaEnforced: mfaResult.isMfaEnforced,
|
||||
};
|
||||
} else if (mfaResult.authToken) {
|
||||
// User doesn't have MFA and workspace doesn't require it
|
||||
this.setAuthCookie(res, mfaResult.authToken);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const authToken = await this.authService.login(loginInput, workspace.id);
|
||||
this.setAuthCookie(res, authToken);
|
||||
}
|
||||
@ -85,11 +129,22 @@ export class AuthController {
|
||||
@Body() passwordResetDto: PasswordResetDto,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const authToken = await this.authService.passwordReset(
|
||||
const result = await this.authService.passwordReset(
|
||||
passwordResetDto,
|
||||
workspace.id,
|
||||
workspace,
|
||||
);
|
||||
this.setAuthCookie(res, authToken);
|
||||
|
||||
if (result.requiresLogin) {
|
||||
return {
|
||||
requiresLogin: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Set auth cookie if no MFA is required
|
||||
this.setAuthCookie(res, result.authToken);
|
||||
return {
|
||||
requiresLogin: false,
|
||||
};
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ -108,7 +163,7 @@ export class AuthController {
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
return this.authService.getCollabToken(user.id, workspace.id);
|
||||
return this.authService.getCollabToken(user, workspace.id);
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
|
||||
@ -3,6 +3,7 @@ export enum JwtType {
|
||||
COLLAB = 'collab',
|
||||
EXCHANGE = 'exchange',
|
||||
ATTACHMENT = 'attachment',
|
||||
MFA_TOKEN = 'mfa_token',
|
||||
}
|
||||
export type JwtPayload = {
|
||||
sub: string;
|
||||
@ -30,3 +31,8 @@ export type JwtAttachmentPayload = {
|
||||
type: 'attachment';
|
||||
};
|
||||
|
||||
export interface JwtMfaTokenPayload {
|
||||
sub: string;
|
||||
workspaceId: string;
|
||||
type: 'mfa_token';
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@ import { ForgotPasswordDto } from '../dto/forgot-password.dto';
|
||||
import ForgotPasswordEmail from '@docmost/transactional/emails/forgot-password-email';
|
||||
import { UserTokenRepo } from '@docmost/db/repos/user-token/user-token.repo';
|
||||
import { PasswordResetDto } from '../dto/password-reset.dto';
|
||||
import { UserToken, Workspace } from '@docmost/db/types/entity.types';
|
||||
import { User, UserToken, Workspace } from '@docmost/db/types/entity.types';
|
||||
import { UserTokenType } from '../auth.constants';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
@ -47,7 +47,7 @@ export class AuthService {
|
||||
includePassword: true,
|
||||
});
|
||||
|
||||
const errorMessage = 'email or password does not match';
|
||||
const errorMessage = 'Email or password does not match';
|
||||
if (!user || user?.deletedAt) {
|
||||
throw new UnauthorizedException(errorMessage);
|
||||
}
|
||||
@ -156,10 +156,13 @@ export class AuthService {
|
||||
});
|
||||
}
|
||||
|
||||
async passwordReset(passwordResetDto: PasswordResetDto, workspaceId: string) {
|
||||
async passwordReset(
|
||||
passwordResetDto: PasswordResetDto,
|
||||
workspace: Workspace,
|
||||
) {
|
||||
const userToken = await this.userTokenRepo.findById(
|
||||
passwordResetDto.token,
|
||||
workspaceId,
|
||||
workspace.id,
|
||||
);
|
||||
|
||||
if (
|
||||
@ -170,7 +173,9 @@ export class AuthService {
|
||||
throw new BadRequestException('Invalid or expired token');
|
||||
}
|
||||
|
||||
const user = await this.userRepo.findById(userToken.userId, workspaceId);
|
||||
const user = await this.userRepo.findById(userToken.userId, workspace.id, {
|
||||
includeUserMfa: true,
|
||||
});
|
||||
if (!user || user.deletedAt) {
|
||||
throw new NotFoundException('User not found');
|
||||
}
|
||||
@ -183,7 +188,7 @@ export class AuthService {
|
||||
password: newPasswordHash,
|
||||
},
|
||||
user.id,
|
||||
workspaceId,
|
||||
workspace.id,
|
||||
trx,
|
||||
);
|
||||
|
||||
@ -201,7 +206,18 @@ export class AuthService {
|
||||
template: emailTemplate,
|
||||
});
|
||||
|
||||
return this.tokenService.generateAccessToken(user);
|
||||
// Check if user has MFA enabled or workspace enforces MFA
|
||||
const userHasMfa = user?.['mfa']?.isEnabled || false;
|
||||
const workspaceEnforcesMfa = workspace.enforceMfa || false;
|
||||
|
||||
if (userHasMfa || workspaceEnforcesMfa) {
|
||||
return {
|
||||
requiresLogin: true,
|
||||
};
|
||||
}
|
||||
|
||||
const authToken = await this.tokenService.generateAccessToken(user);
|
||||
return { authToken };
|
||||
}
|
||||
|
||||
async verifyUserToken(
|
||||
@ -222,9 +238,9 @@ export class AuthService {
|
||||
}
|
||||
}
|
||||
|
||||
async getCollabToken(userId: string, workspaceId: string) {
|
||||
async getCollabToken(user: User, workspaceId: string) {
|
||||
const token = await this.tokenService.generateCollabToken(
|
||||
userId,
|
||||
user,
|
||||
workspaceId,
|
||||
);
|
||||
return { token };
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
JwtAttachmentPayload,
|
||||
JwtCollabPayload,
|
||||
JwtExchangePayload,
|
||||
JwtMfaTokenPayload,
|
||||
JwtPayload,
|
||||
JwtType,
|
||||
} from '../dto/jwt-payload';
|
||||
@ -22,7 +23,7 @@ export class TokenService {
|
||||
) {}
|
||||
|
||||
async generateAccessToken(user: User): Promise<string> {
|
||||
if (user.deletedAt) {
|
||||
if (user.deactivatedAt || user.deletedAt) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
@ -35,12 +36,13 @@ export class TokenService {
|
||||
return this.jwtService.sign(payload);
|
||||
}
|
||||
|
||||
async generateCollabToken(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
): Promise<string> {
|
||||
async generateCollabToken(user: User, workspaceId: string): Promise<string> {
|
||||
if (user.deactivatedAt || user.deletedAt) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
const payload: JwtCollabPayload = {
|
||||
sub: userId,
|
||||
sub: user.id,
|
||||
workspaceId,
|
||||
type: JwtType.COLLAB,
|
||||
};
|
||||
@ -75,6 +77,22 @@ export class TokenService {
|
||||
return this.jwtService.sign(payload, { expiresIn: '1h' });
|
||||
}
|
||||
|
||||
async generateMfaToken(
|
||||
user: User,
|
||||
workspaceId: string,
|
||||
): Promise<string> {
|
||||
if (user.deactivatedAt || user.deletedAt) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
const payload: JwtMfaTokenPayload = {
|
||||
sub: user.id,
|
||||
workspaceId,
|
||||
type: JwtType.MFA_TOKEN,
|
||||
};
|
||||
return this.jwtService.sign(payload, { expiresIn: '5m' });
|
||||
}
|
||||
|
||||
async verifyJwt(token: string, tokenType: string) {
|
||||
const payload = await this.jwtService.verifyAsync(token, {
|
||||
secret: this.environmentService.getAppSecret(),
|
||||
|
||||
@ -6,6 +6,7 @@ import { JwtPayload, JwtType } from '../dto/jwt-payload';
|
||||
import { WorkspaceRepo } from '@docmost/db/repos/workspace/workspace.repo';
|
||||
import { UserRepo } from '@docmost/db/repos/user/user.repo';
|
||||
import { FastifyRequest } from 'fastify';
|
||||
import { extractBearerTokenFromHeader } from '../../../common/helpers';
|
||||
|
||||
@Injectable()
|
||||
export class JwtStrategy extends PassportStrategy(Strategy, 'jwt') {
|
||||
@ -18,7 +19,7 @@ export class JwtStrategy extends PassportStrategy(Strategy, 'jwt') {
|
||||
) {
|
||||
super({
|
||||
jwtFromRequest: (req: FastifyRequest) => {
|
||||
return req.cookies?.authToken || this.extractTokenFromHeader(req);
|
||||
return req.cookies?.authToken || extractBearerTokenFromHeader(req);
|
||||
},
|
||||
ignoreExpiration: false,
|
||||
secretOrKey: environmentService.getAppSecret(),
|
||||
@ -42,15 +43,10 @@ export class JwtStrategy extends PassportStrategy(Strategy, 'jwt') {
|
||||
}
|
||||
const user = await this.userRepo.findById(payload.sub, payload.workspaceId);
|
||||
|
||||
if (!user || user.deletedAt) {
|
||||
if (!user || user.deactivatedAt || user.deletedAt) {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
return { user, workspace };
|
||||
}
|
||||
|
||||
private extractTokenFromHeader(request: FastifyRequest): string | undefined {
|
||||
const [type, token] = request.headers.authorization?.split(' ') ?? [];
|
||||
return type === 'Bearer' ? token : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,7 +43,7 @@ export class CommentController {
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const page = await this.pageRepo.findById(createCommentDto.pageId);
|
||||
if (!page) {
|
||||
if (!page || page.deletedAt) {
|
||||
throw new NotFoundException('Page not found');
|
||||
}
|
||||
|
||||
@ -53,9 +53,11 @@ export class CommentController {
|
||||
}
|
||||
|
||||
return this.commentService.create(
|
||||
user.id,
|
||||
page.id,
|
||||
workspace.id,
|
||||
{
|
||||
userId: user.id,
|
||||
page,
|
||||
workspaceId: workspace.id,
|
||||
},
|
||||
createCommentDto,
|
||||
);
|
||||
}
|
||||
@ -67,7 +69,6 @@ export class CommentController {
|
||||
@Body()
|
||||
pagination: PaginationOptions,
|
||||
@AuthUser() user: User,
|
||||
// @AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const page = await this.pageRepo.findById(input.pageId);
|
||||
if (!page) {
|
||||
@ -89,12 +90,10 @@ export class CommentController {
|
||||
throw new NotFoundException('Comment not found');
|
||||
}
|
||||
|
||||
const page = await this.pageRepo.findById(comment.pageId);
|
||||
if (!page) {
|
||||
throw new NotFoundException('Page not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, page.spaceId);
|
||||
const ability = await this.spaceAbility.createForUser(
|
||||
user,
|
||||
comment.spaceId,
|
||||
);
|
||||
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
@ -103,19 +102,76 @@ export class CommentController {
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('update')
|
||||
update(@Body() updateCommentDto: UpdateCommentDto, @AuthUser() user: User) {
|
||||
//TODO: only comment creators can update their comments
|
||||
return this.commentService.update(
|
||||
updateCommentDto.commentId,
|
||||
updateCommentDto,
|
||||
async update(@Body() dto: UpdateCommentDto, @AuthUser() user: User) {
|
||||
const comment = await this.commentRepo.findById(dto.commentId);
|
||||
if (!comment) {
|
||||
throw new NotFoundException('Comment not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(
|
||||
user,
|
||||
comment.spaceId,
|
||||
);
|
||||
|
||||
// must be a space member with edit permission
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException(
|
||||
'You must have space edit permission to edit comments',
|
||||
);
|
||||
}
|
||||
|
||||
return this.commentService.update(comment, dto, user);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
remove(@Body() input: CommentIdDto, @AuthUser() user: User) {
|
||||
// TODO: only comment creators and admins can delete their comments
|
||||
return this.commentService.remove(input.commentId, user);
|
||||
async delete(@Body() input: CommentIdDto, @AuthUser() user: User) {
|
||||
const comment = await this.commentRepo.findById(input.commentId);
|
||||
if (!comment) {
|
||||
throw new NotFoundException('Comment not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(
|
||||
user,
|
||||
comment.spaceId,
|
||||
);
|
||||
|
||||
// must be a space member with edit permission
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
// Check if user is the comment owner
|
||||
const isOwner = comment.creatorId === user.id;
|
||||
|
||||
if (isOwner) {
|
||||
/*
|
||||
// Check if comment has children from other users
|
||||
const hasChildrenFromOthers =
|
||||
await this.commentRepo.hasChildrenFromOtherUsers(comment.id, user.id);
|
||||
|
||||
// Owner can delete if no children from other users
|
||||
if (!hasChildrenFromOthers) {
|
||||
await this.commentRepo.deleteComment(comment.id);
|
||||
return;
|
||||
}
|
||||
|
||||
// If has children from others, only space admin can delete
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Settings)) {
|
||||
throw new ForbiddenException(
|
||||
'Only space admins can delete comments with replies from other users',
|
||||
);
|
||||
}*/
|
||||
await this.commentRepo.deleteComment(comment.id);
|
||||
return;
|
||||
}
|
||||
|
||||
// Space admin can delete any comment
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Settings)) {
|
||||
throw new ForbiddenException(
|
||||
'You can only delete your own comments or must be a space admin',
|
||||
);
|
||||
}
|
||||
await this.commentRepo.deleteComment(comment.id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,21 +7,24 @@ import {
|
||||
import { CreateCommentDto } from './dto/create-comment.dto';
|
||||
import { UpdateCommentDto } from './dto/update-comment.dto';
|
||||
import { CommentRepo } from '@docmost/db/repos/comment/comment.repo';
|
||||
import { Comment, User } from '@docmost/db/types/entity.types';
|
||||
import { Comment, Page, User } from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import { PaginationResult } from '@docmost/db/pagination/pagination';
|
||||
import { PageRepo } from '@docmost/db/repos/page/page.repo';
|
||||
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
|
||||
|
||||
@Injectable()
|
||||
export class CommentService {
|
||||
constructor(
|
||||
private commentRepo: CommentRepo,
|
||||
private pageRepo: PageRepo,
|
||||
private spaceMemberRepo: SpaceMemberRepo,
|
||||
) {}
|
||||
|
||||
async findById(commentId: string) {
|
||||
const comment = await this.commentRepo.findById(commentId, {
|
||||
includeCreator: true,
|
||||
includeResolvedBy: true,
|
||||
});
|
||||
if (!comment) {
|
||||
throw new NotFoundException('Comment not found');
|
||||
@ -30,11 +33,10 @@ export class CommentService {
|
||||
}
|
||||
|
||||
async create(
|
||||
userId: string,
|
||||
pageId: string,
|
||||
workspaceId: string,
|
||||
opts: { userId: string; page: Page; workspaceId: string },
|
||||
createCommentDto: CreateCommentDto,
|
||||
) {
|
||||
const { userId, page, workspaceId } = opts;
|
||||
const commentContent = JSON.parse(createCommentDto.content);
|
||||
|
||||
if (createCommentDto.parentCommentId) {
|
||||
@ -42,7 +44,7 @@ export class CommentService {
|
||||
createCommentDto.parentCommentId,
|
||||
);
|
||||
|
||||
if (!parentComment || parentComment.pageId !== pageId) {
|
||||
if (!parentComment || parentComment.pageId !== page.id) {
|
||||
throw new BadRequestException('Parent comment not found');
|
||||
}
|
||||
|
||||
@ -51,17 +53,16 @@ export class CommentService {
|
||||
}
|
||||
}
|
||||
|
||||
const createdComment = await this.commentRepo.insertComment({
|
||||
pageId: pageId,
|
||||
return await this.commentRepo.insertComment({
|
||||
pageId: page.id,
|
||||
content: commentContent,
|
||||
selection: createCommentDto?.selection?.substring(0, 250),
|
||||
type: 'inline',
|
||||
parentCommentId: createCommentDto?.parentCommentId,
|
||||
creatorId: userId,
|
||||
workspaceId: workspaceId,
|
||||
spaceId: page.spaceId,
|
||||
});
|
||||
|
||||
return createdComment;
|
||||
}
|
||||
|
||||
async findByPageId(
|
||||
@ -74,26 +75,16 @@ export class CommentService {
|
||||
throw new BadRequestException('Page not found');
|
||||
}
|
||||
|
||||
const pageComments = await this.commentRepo.findPageComments(
|
||||
pageId,
|
||||
pagination,
|
||||
);
|
||||
|
||||
return pageComments;
|
||||
return await this.commentRepo.findPageComments(pageId, pagination);
|
||||
}
|
||||
|
||||
async update(
|
||||
commentId: string,
|
||||
comment: Comment,
|
||||
updateCommentDto: UpdateCommentDto,
|
||||
authUser: User,
|
||||
): Promise<Comment> {
|
||||
const commentContent = JSON.parse(updateCommentDto.content);
|
||||
|
||||
const comment = await this.commentRepo.findById(commentId);
|
||||
if (!comment) {
|
||||
throw new NotFoundException('Comment not found');
|
||||
}
|
||||
|
||||
if (comment.creatorId !== authUser.id) {
|
||||
throw new ForbiddenException('You can only edit your own comments');
|
||||
}
|
||||
@ -104,26 +95,14 @@ export class CommentService {
|
||||
{
|
||||
content: commentContent,
|
||||
editedAt: editedAt,
|
||||
updatedAt: editedAt,
|
||||
},
|
||||
commentId,
|
||||
comment.id,
|
||||
);
|
||||
comment.content = commentContent;
|
||||
comment.editedAt = editedAt;
|
||||
comment.updatedAt = editedAt;
|
||||
|
||||
return comment;
|
||||
}
|
||||
|
||||
async remove(commentId: string, authUser: User): Promise<void> {
|
||||
const comment = await this.commentRepo.findById(commentId);
|
||||
|
||||
if (!comment) {
|
||||
throw new NotFoundException('Comment not found');
|
||||
}
|
||||
|
||||
if (comment.creatorId !== authUser.id) {
|
||||
throw new ForbiddenException('You can only delete your own comments');
|
||||
}
|
||||
|
||||
await this.commentRepo.deleteComment(commentId);
|
||||
}
|
||||
}
|
||||
|
||||
7
apps/server/src/core/page/dto/deleted-page.dto.ts
Normal file
7
apps/server/src/core/page/dto/deleted-page.dto.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { IsNotEmpty, IsString } from 'class-validator';
|
||||
|
||||
export class DeletedPageDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
spaceId: string;
|
||||
}
|
||||
@ -1,13 +1,13 @@
|
||||
import { IsString, IsNotEmpty } from 'class-validator';
|
||||
import { IsString, IsNotEmpty, IsOptional } from 'class-validator';
|
||||
|
||||
export class CopyPageToSpaceDto {
|
||||
export class DuplicatePageDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
pageId: string;
|
||||
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
spaceId: string;
|
||||
spaceId?: string;
|
||||
}
|
||||
|
||||
export type CopyPageMapEntry = {
|
||||
@ -31,3 +31,9 @@ export class PageInfoDto extends PageIdDto {
|
||||
@IsBoolean()
|
||||
includeContent: boolean;
|
||||
}
|
||||
|
||||
export class DeletePageDto extends PageIdDto {
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
permanentlyDelete?: boolean;
|
||||
}
|
||||
|
||||
@ -1,7 +1,11 @@
|
||||
import { IsOptional, IsString } from 'class-validator';
|
||||
import { IsOptional, IsString, IsUUID } from 'class-validator';
|
||||
import { SpaceIdDto } from './page.dto';
|
||||
|
||||
export class SidebarPageDto extends SpaceIdDto {
|
||||
export class SidebarPageDto {
|
||||
@IsOptional()
|
||||
@IsUUID()
|
||||
spaceId: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
pageId: string;
|
||||
|
||||
@ -13,7 +13,12 @@ import { PageService } from './services/page.service';
|
||||
import { CreatePageDto } from './dto/create-page.dto';
|
||||
import { UpdatePageDto } from './dto/update-page.dto';
|
||||
import { MovePageDto, MovePageToSpaceDto } from './dto/move-page.dto';
|
||||
import { PageHistoryIdDto, PageIdDto, PageInfoDto } from './dto/page.dto';
|
||||
import {
|
||||
PageHistoryIdDto,
|
||||
PageIdDto,
|
||||
PageInfoDto,
|
||||
DeletePageDto,
|
||||
} from './dto/page.dto';
|
||||
import { PageHistoryService } from './services/page-history.service';
|
||||
import { AuthUser } from '../../common/decorators/auth-user.decorator';
|
||||
import { AuthWorkspace } from '../../common/decorators/auth-workspace.decorator';
|
||||
@ -28,7 +33,8 @@ import {
|
||||
import SpaceAbilityFactory from '../casl/abilities/space-ability.factory';
|
||||
import { PageRepo } from '@docmost/db/repos/page/page.repo';
|
||||
import { RecentPageDto } from './dto/recent-page.dto';
|
||||
import { CopyPageToSpaceDto } from './dto/copy-page.dto';
|
||||
import { DuplicatePageDto } from './dto/duplicate-page.dto';
|
||||
import { DeletedPageDto } from './dto/deleted-page.dto';
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@Controller('pages')
|
||||
@ -100,7 +106,35 @@ export class PageController {
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
async delete(@Body() pageIdDto: PageIdDto, @AuthUser() user: User) {
|
||||
async delete(@Body() deletePageDto: DeletePageDto, @AuthUser() user: User) {
|
||||
const page = await this.pageRepo.findById(deletePageDto.pageId);
|
||||
|
||||
if (!page) {
|
||||
throw new NotFoundException('Page not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, page.spaceId);
|
||||
|
||||
if (deletePageDto.permanentlyDelete) {
|
||||
// Permanent deletion requires space admin permissions
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Settings)) {
|
||||
throw new ForbiddenException(
|
||||
'Only space admins can permanently delete pages',
|
||||
);
|
||||
}
|
||||
await this.pageService.forceDelete(deletePageDto.pageId);
|
||||
} else {
|
||||
// Soft delete requires page manage permissions
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
await this.pageService.remove(deletePageDto.pageId, user.id);
|
||||
}
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('restore')
|
||||
async restore(@Body() pageIdDto: PageIdDto, @AuthUser() user: User) {
|
||||
const page = await this.pageRepo.findById(pageIdDto.pageId);
|
||||
|
||||
if (!page) {
|
||||
@ -111,13 +145,14 @@ export class PageController {
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
await this.pageService.forceDelete(pageIdDto.pageId);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('restore')
|
||||
async restore(@Body() pageIdDto: PageIdDto) {
|
||||
// await this.pageService.restore(deletePageDto.id);
|
||||
await this.pageRepo.restorePage(pageIdDto.pageId);
|
||||
|
||||
// Return the restored page data with hasChildren info
|
||||
const restoredPage = await this.pageRepo.findById(pageIdDto.pageId, {
|
||||
includeHasChildren: true,
|
||||
});
|
||||
return restoredPage;
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ -146,6 +181,30 @@ export class PageController {
|
||||
return this.pageService.getRecentPages(user.id, pagination);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('trash')
|
||||
async getDeletedPages(
|
||||
@Body() deletedPageDto: DeletedPageDto,
|
||||
@Body() pagination: PaginationOptions,
|
||||
@AuthUser() user: User,
|
||||
) {
|
||||
if (deletedPageDto.spaceId) {
|
||||
const ability = await this.spaceAbility.createForUser(
|
||||
user,
|
||||
deletedPageDto.spaceId,
|
||||
);
|
||||
|
||||
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.pageService.getDeletedSpacePages(
|
||||
deletedPageDto.spaceId,
|
||||
pagination,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: scope to workspaces
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('/history')
|
||||
@ -155,6 +214,10 @@ export class PageController {
|
||||
@AuthUser() user: User,
|
||||
) {
|
||||
const page = await this.pageRepo.findById(dto.pageId);
|
||||
if (!page) {
|
||||
throw new NotFoundException('Page not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, page.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
@ -191,21 +254,28 @@ export class PageController {
|
||||
@Body() pagination: PaginationOptions,
|
||||
@AuthUser() user: User,
|
||||
) {
|
||||
const ability = await this.spaceAbility.createForUser(user, dto.spaceId);
|
||||
if (!dto.spaceId && !dto.pageId) {
|
||||
throw new BadRequestException(
|
||||
'Either spaceId or pageId must be provided',
|
||||
);
|
||||
}
|
||||
let spaceId = dto.spaceId;
|
||||
|
||||
if (dto.pageId) {
|
||||
const page = await this.pageRepo.findById(dto.pageId);
|
||||
if (!page) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
spaceId = page.spaceId;
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
let pageId = null;
|
||||
if (dto.pageId) {
|
||||
const page = await this.pageRepo.findById(dto.pageId);
|
||||
if (page.spaceId !== dto.spaceId) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
pageId = page.id;
|
||||
}
|
||||
|
||||
return this.pageService.getSidebarPages(dto.spaceId, pagination, pageId);
|
||||
return this.pageService.getSidebarPages(spaceId, pagination, dto.pageId);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ -239,33 +309,41 @@ export class PageController {
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('copy-to-space')
|
||||
async copyPageToSpace(
|
||||
@Body() dto: CopyPageToSpaceDto,
|
||||
@AuthUser() user: User,
|
||||
) {
|
||||
@Post('duplicate')
|
||||
async duplicatePage(@Body() dto: DuplicatePageDto, @AuthUser() user: User) {
|
||||
const copiedPage = await this.pageRepo.findById(dto.pageId);
|
||||
if (!copiedPage) {
|
||||
throw new NotFoundException('Page to copy not found');
|
||||
}
|
||||
if (copiedPage.spaceId === dto.spaceId) {
|
||||
throw new BadRequestException('Page is already in this space');
|
||||
|
||||
// If spaceId is provided, it's a copy to different space
|
||||
if (dto.spaceId) {
|
||||
const abilities = await Promise.all([
|
||||
this.spaceAbility.createForUser(user, copiedPage.spaceId),
|
||||
this.spaceAbility.createForUser(user, dto.spaceId),
|
||||
]);
|
||||
|
||||
if (
|
||||
abilities.some((ability) =>
|
||||
ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Page),
|
||||
)
|
||||
) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.pageService.duplicatePage(copiedPage, dto.spaceId, user);
|
||||
} else {
|
||||
// If no spaceId, it's a duplicate in same space
|
||||
const ability = await this.spaceAbility.createForUser(
|
||||
user,
|
||||
copiedPage.spaceId,
|
||||
);
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Page)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.pageService.duplicatePage(copiedPage, undefined, user);
|
||||
}
|
||||
|
||||
const abilities = await Promise.all([
|
||||
this.spaceAbility.createForUser(user, copiedPage.spaceId),
|
||||
this.spaceAbility.createForUser(user, dto.spaceId),
|
||||
]);
|
||||
|
||||
if (
|
||||
abilities.some((ability) =>
|
||||
ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Page),
|
||||
)
|
||||
) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.pageService.copyPageToSpace(copiedPage, dto.spaceId, user);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
|
||||
@ -2,11 +2,12 @@ import { Module } from '@nestjs/common';
|
||||
import { PageService } from './services/page.service';
|
||||
import { PageController } from './page.controller';
|
||||
import { PageHistoryService } from './services/page-history.service';
|
||||
import { TrashCleanupService } from './services/trash-cleanup.service';
|
||||
import { StorageModule } from '../../integrations/storage/storage.module';
|
||||
|
||||
@Module({
|
||||
controllers: [PageController],
|
||||
providers: [PageService, PageHistoryService],
|
||||
providers: [PageService, PageHistoryService, TrashCleanupService],
|
||||
exports: [PageService, PageHistoryService],
|
||||
imports: [StorageModule]
|
||||
})
|
||||
|
||||
@ -17,8 +17,6 @@ import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { MovePageDto } from '../dto/move-page.dto';
|
||||
import { ExpressionBuilder } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import { generateSlugId } from '../../../common/helpers';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import { AttachmentRepo } from '@docmost/db/repos/attachment/attachment.repo';
|
||||
@ -31,9 +29,15 @@ import {
|
||||
removeMarkTypeFromDoc,
|
||||
} from '../../../common/helpers/prosemirror/utils';
|
||||
import { jsonToNode, jsonToText } from 'src/collaboration/collaboration.util';
|
||||
import { CopyPageMapEntry, ICopyPageAttachment } from '../dto/copy-page.dto';
|
||||
import {
|
||||
CopyPageMapEntry,
|
||||
ICopyPageAttachment,
|
||||
} from '../dto/duplicate-page.dto';
|
||||
import { Node as PMNode } from '@tiptap/pm/model';
|
||||
import { StorageService } from '../../../integrations/storage/storage.service';
|
||||
import { InjectQueue } from '@nestjs/bullmq';
|
||||
import { Queue } from 'bullmq';
|
||||
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
|
||||
|
||||
@Injectable()
|
||||
export class PageService {
|
||||
@ -44,6 +48,7 @@ export class PageService {
|
||||
private attachmentRepo: AttachmentRepo,
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
private readonly storageService: StorageService,
|
||||
@InjectQueue(QueueName.ATTACHMENT_QUEUE) private attachmentQueue: Queue,
|
||||
) {}
|
||||
|
||||
async findById(
|
||||
@ -104,7 +109,8 @@ export class PageService {
|
||||
.selectFrom('pages')
|
||||
.select(['position'])
|
||||
.where('spaceId', '=', spaceId)
|
||||
.orderBy('position', 'desc')
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy('position', (ob) => ob.collate('C').desc())
|
||||
.limit(1);
|
||||
|
||||
if (parentPageId) {
|
||||
@ -166,23 +172,6 @@ export class PageService {
|
||||
});
|
||||
}
|
||||
|
||||
withHasChildren(eb: ExpressionBuilder<DB, 'pages'>) {
|
||||
return eb
|
||||
.selectFrom('pages as child')
|
||||
.select((eb) =>
|
||||
eb
|
||||
.case()
|
||||
.when(eb.fn.countAll(), '>', 0)
|
||||
.then(true)
|
||||
.else(false)
|
||||
.end()
|
||||
.as('count'),
|
||||
)
|
||||
.whereRef('child.parentPageId', '=', 'pages.id')
|
||||
.limit(1)
|
||||
.as('hasChildren');
|
||||
}
|
||||
|
||||
async getSidebarPages(
|
||||
spaceId: string,
|
||||
pagination: PaginationOptions,
|
||||
@ -199,9 +188,11 @@ export class PageService {
|
||||
'parentPageId',
|
||||
'spaceId',
|
||||
'creatorId',
|
||||
'deletedAt',
|
||||
])
|
||||
.select((eb) => this.withHasChildren(eb))
|
||||
.orderBy('position', 'asc')
|
||||
.select((eb) => this.pageRepo.withHasChildren(eb))
|
||||
.orderBy('position', (ob) => ob.collate('C').asc())
|
||||
.where('deletedAt', 'is', null)
|
||||
.where('spaceId', '=', spaceId);
|
||||
|
||||
if (pageId) {
|
||||
@ -258,11 +249,24 @@ export class PageService {
|
||||
});
|
||||
}
|
||||
|
||||
async copyPageToSpace(rootPage: Page, spaceId: string, authUser: User) {
|
||||
//TODO:
|
||||
// i. maintain internal links within copied pages
|
||||
async duplicatePage(
|
||||
rootPage: Page,
|
||||
targetSpaceId: string | undefined,
|
||||
authUser: User,
|
||||
) {
|
||||
const spaceId = targetSpaceId || rootPage.spaceId;
|
||||
const isDuplicateInSameSpace =
|
||||
!targetSpaceId || targetSpaceId === rootPage.spaceId;
|
||||
|
||||
const nextPosition = await this.nextPagePosition(spaceId);
|
||||
let nextPosition: string;
|
||||
|
||||
if (isDuplicateInSameSpace) {
|
||||
// For duplicate in same space, position right after the original page
|
||||
nextPosition = generateJitteredKeyBetween(rootPage.position, null);
|
||||
} else {
|
||||
// For copy to different space, position at the end
|
||||
nextPosition = await this.nextPagePosition(spaceId);
|
||||
}
|
||||
|
||||
const pages = await this.pageRepo.getPageAndDescendants(rootPage.id, {
|
||||
includeContent: true,
|
||||
@ -326,12 +330,38 @@ export class PageService {
|
||||
});
|
||||
}
|
||||
|
||||
// Update internal page links in mention nodes
|
||||
prosemirrorDoc.descendants((node: PMNode) => {
|
||||
if (
|
||||
node.type.name === 'mention' &&
|
||||
node.attrs.entityType === 'page'
|
||||
) {
|
||||
const referencedPageId = node.attrs.entityId;
|
||||
|
||||
// Check if the referenced page is within the pages being copied
|
||||
if (referencedPageId && pageMap.has(referencedPageId)) {
|
||||
const mappedPage = pageMap.get(referencedPageId);
|
||||
//@ts-ignore
|
||||
node.attrs.entityId = mappedPage.newPageId;
|
||||
//@ts-ignore
|
||||
node.attrs.slugId = mappedPage.newSlugId;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const prosemirrorJson = prosemirrorDoc.toJSON();
|
||||
|
||||
// Add "Copy of " prefix to the root page title only for duplicates in same space
|
||||
let title = page.title;
|
||||
if (isDuplicateInSameSpace && page.id === rootPage.id) {
|
||||
const originalTitle = page.title || 'Untitled';
|
||||
title = `Copy of ${originalTitle}`;
|
||||
}
|
||||
|
||||
return {
|
||||
id: pageFromMap.newPageId,
|
||||
slugId: pageFromMap.newSlugId,
|
||||
title: page.title,
|
||||
title: title,
|
||||
icon: page.icon,
|
||||
content: prosemirrorJson,
|
||||
textContent: jsonToText(prosemirrorJson),
|
||||
@ -377,33 +407,50 @@ export class PageService {
|
||||
attachment.id,
|
||||
newAttachmentId,
|
||||
);
|
||||
await this.storageService.copy(attachment.filePath, newPathFile);
|
||||
await this.db
|
||||
.insertInto('attachments')
|
||||
.values({
|
||||
id: newAttachmentId,
|
||||
type: attachment.type,
|
||||
filePath: newPathFile,
|
||||
fileName: attachment.fileName,
|
||||
fileSize: attachment.fileSize,
|
||||
mimeType: attachment.mimeType,
|
||||
fileExt: attachment.fileExt,
|
||||
creatorId: attachment.creatorId,
|
||||
workspaceId: attachment.workspaceId,
|
||||
pageId: newPageId,
|
||||
spaceId: spaceId,
|
||||
})
|
||||
.execute();
|
||||
|
||||
try {
|
||||
await this.storageService.copy(attachment.filePath, newPathFile);
|
||||
|
||||
await this.db
|
||||
.insertInto('attachments')
|
||||
.values({
|
||||
id: newAttachmentId,
|
||||
type: attachment.type,
|
||||
filePath: newPathFile,
|
||||
fileName: attachment.fileName,
|
||||
fileSize: attachment.fileSize,
|
||||
mimeType: attachment.mimeType,
|
||||
fileExt: attachment.fileExt,
|
||||
creatorId: attachment.creatorId,
|
||||
workspaceId: attachment.workspaceId,
|
||||
pageId: newPageId,
|
||||
spaceId: spaceId,
|
||||
})
|
||||
.execute();
|
||||
} catch (err) {
|
||||
this.logger.error(
|
||||
`Duplicate page: failed to copy attachment ${attachment.id}`,
|
||||
err,
|
||||
);
|
||||
// Continue with other attachments even if one fails
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.log(err);
|
||||
this.logger.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const newPageId = pageMap.get(rootPage.id).newPageId;
|
||||
return await this.pageRepo.findById(newPageId, {
|
||||
const duplicatedPage = await this.pageRepo.findById(newPageId, {
|
||||
includeSpace: true,
|
||||
});
|
||||
|
||||
const hasChildren = pages.length > 1;
|
||||
|
||||
return {
|
||||
...duplicatedPage,
|
||||
hasChildren,
|
||||
};
|
||||
}
|
||||
|
||||
async movePage(dto: MovePageDto, movedPage: Page) {
|
||||
@ -450,9 +497,11 @@ export class PageService {
|
||||
'position',
|
||||
'parentPageId',
|
||||
'spaceId',
|
||||
'deletedAt',
|
||||
])
|
||||
.select((eb) => this.withHasChildren(eb))
|
||||
.select((eb) => this.pageRepo.withHasChildren(eb))
|
||||
.where('id', '=', childPageId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
@ -464,6 +513,7 @@ export class PageService {
|
||||
'p.position',
|
||||
'p.parentPageId',
|
||||
'p.spaceId',
|
||||
'p.deletedAt',
|
||||
])
|
||||
.select(
|
||||
exp
|
||||
@ -478,11 +528,13 @@ export class PageService {
|
||||
.as('count'),
|
||||
)
|
||||
.whereRef('child.parentPageId', '=', 'id')
|
||||
.where('child.deletedAt', 'is', null)
|
||||
.limit(1)
|
||||
.as('hasChildren'),
|
||||
)
|
||||
//.select((eb) => this.withHasChildren(eb))
|
||||
.innerJoin('page_ancestors as pa', 'pa.parentPageId', 'p.id'),
|
||||
.innerJoin('page_ancestors as pa', 'pa.parentPageId', 'p.id')
|
||||
.where('p.deletedAt', 'is', null),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_ancestors')
|
||||
@ -506,98 +558,58 @@ export class PageService {
|
||||
return await this.pageRepo.getRecentPages(userId, pagination);
|
||||
}
|
||||
|
||||
async getDeletedSpacePages(
|
||||
spaceId: string,
|
||||
pagination: PaginationOptions,
|
||||
): Promise<PaginationResult<Page>> {
|
||||
return await this.pageRepo.getDeletedPagesInSpace(spaceId, pagination);
|
||||
}
|
||||
|
||||
async forceDelete(pageId: string): Promise<void> {
|
||||
await this.pageRepo.deletePage(pageId);
|
||||
// Get all descendant IDs (including the page itself) using recursive CTE
|
||||
const descendants = await this.db
|
||||
.withRecursive('page_descendants', (db) =>
|
||||
db
|
||||
.selectFrom('pages')
|
||||
.select(['id'])
|
||||
.where('id', '=', pageId)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
.select(['p.id'])
|
||||
.innerJoin('page_descendants as pd', 'pd.id', 'p.parentPageId'),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_descendants')
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
const pageIds = descendants.map((d) => d.id);
|
||||
|
||||
// Queue attachment deletion for all pages with unique job IDs to prevent duplicates
|
||||
for (const id of pageIds) {
|
||||
await this.attachmentQueue.add(
|
||||
QueueJob.DELETE_PAGE_ATTACHMENTS,
|
||||
{
|
||||
pageId: id,
|
||||
},
|
||||
{
|
||||
jobId: `delete-page-attachments-${id}`,
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 5000,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (pageIds.length > 0) {
|
||||
await this.db.deleteFrom('pages').where('id', 'in', pageIds).execute();
|
||||
}
|
||||
}
|
||||
|
||||
async remove(pageId: string, userId: string): Promise<void> {
|
||||
await this.pageRepo.removePage(pageId, userId);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
// TODO: page deletion and restoration
|
||||
async delete(pageId: string): Promise<void> {
|
||||
await this.dataSource.transaction(async (manager: EntityManager) => {
|
||||
const page = await manager
|
||||
.createQueryBuilder(Page, 'page')
|
||||
.where('page.id = :pageId', { pageId })
|
||||
.select(['page.id', 'page.workspaceId'])
|
||||
.getOne();
|
||||
|
||||
if (!page) {
|
||||
throw new NotFoundException(`Page not found`);
|
||||
}
|
||||
await this.softDeleteChildrenRecursive(page.id, manager);
|
||||
await this.pageOrderingService.removePageFromHierarchy(page, manager);
|
||||
|
||||
await manager.softDelete(Page, pageId);
|
||||
});
|
||||
}
|
||||
|
||||
private async softDeleteChildrenRecursive(
|
||||
parentId: string,
|
||||
manager: EntityManager,
|
||||
): Promise<void> {
|
||||
const childrenPage = await manager
|
||||
.createQueryBuilder(Page, 'page')
|
||||
.where('page.parentPageId = :parentId', { parentId })
|
||||
.select(['page.id', 'page.title', 'page.parentPageId'])
|
||||
.getMany();
|
||||
|
||||
for (const child of childrenPage) {
|
||||
await this.softDeleteChildrenRecursive(child.id, manager);
|
||||
await manager.softDelete(Page, child.id);
|
||||
}
|
||||
}
|
||||
|
||||
async restore(pageId: string): Promise<void> {
|
||||
await this.dataSource.transaction(async (manager: EntityManager) => {
|
||||
const isDeleted = await manager
|
||||
.createQueryBuilder(Page, 'page')
|
||||
.where('page.id = :pageId', { pageId })
|
||||
.withDeleted()
|
||||
.getCount();
|
||||
|
||||
if (!isDeleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
await manager.recover(Page, { id: pageId });
|
||||
|
||||
await this.restoreChildrenRecursive(pageId, manager);
|
||||
|
||||
// Fetch the page details to find out its parent and workspace
|
||||
const restoredPage = await manager
|
||||
.createQueryBuilder(Page, 'page')
|
||||
.where('page.id = :pageId', { pageId })
|
||||
.select(['page.id', 'page.title', 'page.spaceId', 'page.parentPageId'])
|
||||
.getOne();
|
||||
|
||||
if (!restoredPage) {
|
||||
throw new NotFoundException(`Restored page not found.`);
|
||||
}
|
||||
|
||||
// add page back to its hierarchy
|
||||
await this.pageOrderingService.addPageToOrder(
|
||||
restoredPage.spaceId,
|
||||
pageId,
|
||||
restoredPage.parentPageId,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
private async restoreChildrenRecursive(
|
||||
parentId: string,
|
||||
manager: EntityManager,
|
||||
): Promise<void> {
|
||||
const childrenPage = await manager
|
||||
.createQueryBuilder(Page, 'page')
|
||||
.setLock('pessimistic_write')
|
||||
.where('page.parentPageId = :parentId', { parentId })
|
||||
.select(['page.id', 'page.title', 'page.parentPageId'])
|
||||
.withDeleted()
|
||||
.getMany();
|
||||
|
||||
for (const child of childrenPage) {
|
||||
await this.restoreChildrenRecursive(child.id, manager);
|
||||
await manager.recover(Page, { id: child.id });
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
116
apps/server/src/core/page/services/trash-cleanup.service.ts
Normal file
116
apps/server/src/core/page/services/trash-cleanup.service.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Interval } from '@nestjs/schedule';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { InjectQueue } from '@nestjs/bullmq';
|
||||
import { Queue } from 'bullmq';
|
||||
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
|
||||
|
||||
@Injectable()
|
||||
export class TrashCleanupService {
|
||||
private readonly logger = new Logger(TrashCleanupService.name);
|
||||
private readonly RETENTION_DAYS = 30;
|
||||
|
||||
constructor(
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
@InjectQueue(QueueName.ATTACHMENT_QUEUE) private attachmentQueue: Queue,
|
||||
) {}
|
||||
|
||||
@Interval('trash-cleanup', 24 * 60 * 60 * 1000) // every 24 hours
|
||||
async cleanupOldTrash() {
|
||||
try {
|
||||
this.logger.debug('Starting trash cleanup job');
|
||||
|
||||
const retentionDate = new Date();
|
||||
retentionDate.setDate(retentionDate.getDate() - this.RETENTION_DAYS);
|
||||
|
||||
// Get all pages that were deleted more than 30 days ago
|
||||
const oldDeletedPages = await this.db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'spaceId', 'workspaceId'])
|
||||
.where('deletedAt', '<', retentionDate)
|
||||
.execute();
|
||||
|
||||
if (oldDeletedPages.length === 0) {
|
||||
this.logger.debug('No old trash items to clean up');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.debug(`Found ${oldDeletedPages.length} pages to clean up`);
|
||||
|
||||
// Process each page
|
||||
for (const page of oldDeletedPages) {
|
||||
try {
|
||||
await this.cleanupPage(page.id);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to cleanup page ${page.id}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
error instanceof Error ? error.stack : undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug('Trash cleanup job completed');
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
'Trash cleanup job failed',
|
||||
error instanceof Error ? error.stack : undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupPage(pageId: string) {
|
||||
// Get all descendants using recursive CTE (including the page itself)
|
||||
const descendants = await this.db
|
||||
.withRecursive('page_descendants', (db) =>
|
||||
db
|
||||
.selectFrom('pages')
|
||||
.select(['id'])
|
||||
.where('id', '=', pageId)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
.select(['p.id'])
|
||||
.innerJoin('page_descendants as pd', 'pd.id', 'p.parentPageId'),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_descendants')
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
const pageIds = descendants.map((d) => d.id);
|
||||
|
||||
this.logger.debug(
|
||||
`Cleaning up page ${pageId} with ${pageIds.length - 1} descendants`,
|
||||
);
|
||||
|
||||
// Queue attachment deletion for all pages with unique job IDs to prevent duplicates
|
||||
for (const id of pageIds) {
|
||||
await this.attachmentQueue.add(
|
||||
QueueJob.DELETE_PAGE_ATTACHMENTS,
|
||||
{
|
||||
pageId: id,
|
||||
},
|
||||
{
|
||||
jobId: `delete-page-attachments-${id}`,
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 5000,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
if (pageIds.length > 0) {
|
||||
await this.db.deleteFrom('pages').where('id', 'in', pageIds).execute();
|
||||
}
|
||||
} catch (error) {
|
||||
// Log but don't throw - pages might have been deleted by another node
|
||||
this.logger.warn(
|
||||
`Error deleting pages, they may have been already deleted: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -44,15 +44,22 @@ export class SearchService {
|
||||
'creatorId',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
sql<number>`ts_rank(tsv, to_tsquery(${searchQuery}))`.as('rank'),
|
||||
sql<string>`ts_headline('english', text_content, to_tsquery(${searchQuery}),'MinWords=9, MaxWords=10, MaxFragments=3')`.as(
|
||||
sql<number>`ts_rank(tsv, to_tsquery('english', f_unaccent(${searchQuery})))`.as(
|
||||
'rank',
|
||||
),
|
||||
sql<string>`ts_headline('english', text_content, to_tsquery('english', f_unaccent(${searchQuery})),'MinWords=9, MaxWords=10, MaxFragments=3')`.as(
|
||||
'highlight',
|
||||
),
|
||||
])
|
||||
.where('tsv', '@@', sql<string>`to_tsquery(${searchQuery})`)
|
||||
.where(
|
||||
'tsv',
|
||||
'@@',
|
||||
sql<string>`to_tsquery('english', f_unaccent(${searchQuery}))`,
|
||||
)
|
||||
.$if(Boolean(searchParams.creatorId), (qb) =>
|
||||
qb.where('creatorId', '=', searchParams.creatorId),
|
||||
)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy('rank', 'desc')
|
||||
.limit(searchParams.limit | 20)
|
||||
.offset(searchParams.offset || 0);
|
||||
@ -138,21 +145,37 @@ export class SearchService {
|
||||
const query = suggestion.query.toLowerCase().trim();
|
||||
|
||||
if (suggestion.includeUsers) {
|
||||
users = await this.db
|
||||
const userQuery = this.db
|
||||
.selectFrom('users')
|
||||
.select(['id', 'name', 'avatarUrl'])
|
||||
.where((eb) => eb(sql`LOWER(users.name)`, 'like', `%${query}%`))
|
||||
.select(['id', 'name', 'email', 'avatarUrl'])
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.limit(limit)
|
||||
.execute();
|
||||
.where((eb) =>
|
||||
eb.or([
|
||||
eb(
|
||||
sql`LOWER(f_unaccent(users.name))`,
|
||||
'like',
|
||||
sql`LOWER(f_unaccent(${`%${query}%`}))`,
|
||||
),
|
||||
eb(sql`users.email`, 'ilike', sql`f_unaccent(${`%${query}%`})`),
|
||||
]),
|
||||
)
|
||||
.limit(limit);
|
||||
|
||||
users = await userQuery.execute();
|
||||
}
|
||||
|
||||
if (suggestion.includeGroups) {
|
||||
groups = await this.db
|
||||
.selectFrom('groups')
|
||||
.select(['id', 'name', 'description'])
|
||||
.where((eb) => eb(sql`LOWER(groups.name)`, 'like', `%${query}%`))
|
||||
.where((eb) =>
|
||||
eb(
|
||||
sql`LOWER(f_unaccent(groups.name))`,
|
||||
'like',
|
||||
sql`LOWER(f_unaccent(${`%${query}%`}))`,
|
||||
),
|
||||
)
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.limit(limit)
|
||||
.execute();
|
||||
@ -162,7 +185,14 @@ export class SearchService {
|
||||
let pageSearch = this.db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'slugId', 'title', 'icon', 'spaceId'])
|
||||
.where((eb) => eb(sql`LOWER(pages.title)`, 'like', `%${query}%`))
|
||||
.where((eb) =>
|
||||
eb(
|
||||
sql`LOWER(f_unaccent(pages.title))`,
|
||||
'like',
|
||||
sql`LOWER(f_unaccent(${`%${query}%`}))`,
|
||||
),
|
||||
)
|
||||
.where('deletedAt', 'is', null)
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.limit(limit);
|
||||
|
||||
|
||||
@ -108,12 +108,12 @@ export class ShareService {
|
||||
includeCreator: true,
|
||||
});
|
||||
|
||||
page.content = await this.updatePublicAttachments(page);
|
||||
|
||||
if (!page) {
|
||||
if (!page || page.deletedAt) {
|
||||
throw new NotFoundException('Shared page not found');
|
||||
}
|
||||
|
||||
page.content = await this.updatePublicAttachments(page);
|
||||
|
||||
return { page, share };
|
||||
}
|
||||
|
||||
@ -132,6 +132,7 @@ export class ShareService {
|
||||
sql`0`.as('level'),
|
||||
])
|
||||
.where(isValidUUID(pageId) ? 'id' : 'slugId', '=', pageId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.unionAll((union) =>
|
||||
union
|
||||
.selectFrom('pages as p')
|
||||
@ -144,7 +145,8 @@ export class ShareService {
|
||||
// Increase the level by 1 for each ancestor.
|
||||
sql`ph.level + 1`.as('level'),
|
||||
])
|
||||
.innerJoin('page_hierarchy as ph', 'ph.parentPageId', 'p.id'),
|
||||
.innerJoin('page_hierarchy as ph', 'ph.parentPageId', 'p.id')
|
||||
.where('p.deletedAt', 'is', null),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_hierarchy')
|
||||
|
||||
@ -29,7 +29,8 @@ import WorkspaceAbilityFactory from '../../casl/abilities/workspace-ability.fact
|
||||
import {
|
||||
WorkspaceCaslAction,
|
||||
WorkspaceCaslSubject,
|
||||
} from '../../casl/interfaces/workspace-ability.type';import { FastifyReply } from 'fastify';
|
||||
} from '../../casl/interfaces/workspace-ability.type';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
import { CheckHostnameDto } from '../dto/check-hostname.dto';
|
||||
import { RemoveWorkspaceUserDto } from '../dto/remove-workspace-user.dto';
|
||||
@ -257,17 +258,27 @@ export class WorkspaceController {
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
@Res({ passthrough: true }) res: FastifyReply,
|
||||
) {
|
||||
const authToken = await this.workspaceInvitationService.acceptInvitation(
|
||||
const result = await this.workspaceInvitationService.acceptInvitation(
|
||||
acceptInviteDto,
|
||||
workspace,
|
||||
);
|
||||
|
||||
res.setCookie('authToken', authToken, {
|
||||
if (result.requiresLogin) {
|
||||
return {
|
||||
requiresLogin: true,
|
||||
};
|
||||
}
|
||||
|
||||
res.setCookie('authToken', result.authToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
expires: this.environmentService.getCookieExpiresIn(),
|
||||
secure: this.environmentService.isHttps(),
|
||||
});
|
||||
|
||||
return {
|
||||
requiresLogin: false,
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
|
||||
@ -14,4 +14,8 @@ export class UpdateWorkspaceDto extends PartialType(CreateWorkspaceDto) {
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
enforceSso: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
enforceMfa: boolean;
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import { AcceptInviteDto, InviteUserDto } from '../dto/invitation.dto';
|
||||
import { UserRepo } from '@docmost/db/repos/user/user.repo';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { sql } from 'kysely';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import {
|
||||
Group,
|
||||
@ -55,7 +56,11 @@ export class WorkspaceInvitationService {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('email', 'ilike', `%${pagination.query}%`),
|
||||
eb(
|
||||
sql`email`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -177,7 +182,14 @@ export class WorkspaceInvitationService {
|
||||
}
|
||||
}
|
||||
|
||||
async acceptInvitation(dto: AcceptInviteDto, workspace: Workspace) {
|
||||
async acceptInvitation(
|
||||
dto: AcceptInviteDto,
|
||||
workspace: Workspace,
|
||||
): Promise<{
|
||||
authToken?: string;
|
||||
requiresLogin?: boolean;
|
||||
message?: string;
|
||||
}> {
|
||||
const invitation = await this.db
|
||||
.selectFrom('workspaceInvitations')
|
||||
.selectAll()
|
||||
@ -289,7 +301,14 @@ export class WorkspaceInvitationService {
|
||||
});
|
||||
}
|
||||
|
||||
return this.tokenService.generateAccessToken(newUser);
|
||||
if (workspace.enforceMfa) {
|
||||
return {
|
||||
requiresLogin: true,
|
||||
};
|
||||
}
|
||||
|
||||
const authToken = await this.tokenService.generateAccessToken(newUser);
|
||||
return { authToken };
|
||||
}
|
||||
|
||||
async resendInvitation(
|
||||
|
||||
39
apps/server/src/database/migrations/20250715T070817-mfa.ts
Normal file
39
apps/server/src/database/migrations/20250715T070817-mfa.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await db.schema
|
||||
.createTable('user_mfa')
|
||||
.addColumn('id', 'uuid', (col) =>
|
||||
col.primaryKey().defaultTo(sql`gen_uuid_v7()`),
|
||||
)
|
||||
.addColumn('user_id', 'uuid', (col) =>
|
||||
col.references('users.id').onDelete('cascade').notNull(),
|
||||
)
|
||||
.addColumn('method', 'varchar', (col) => col.notNull().defaultTo('totp'))
|
||||
.addColumn('secret', 'text', (col) => col)
|
||||
.addColumn('is_enabled', 'boolean', (col) => col.defaultTo(false))
|
||||
.addColumn('backup_codes', sql`text[]`, (col) => col)
|
||||
.addColumn('workspace_id', 'uuid', (col) =>
|
||||
col.references('workspaces.id').onDelete('cascade').notNull(),
|
||||
)
|
||||
.addColumn('created_at', 'timestamptz', (col) =>
|
||||
col.notNull().defaultTo(sql`now()`),
|
||||
)
|
||||
.addColumn('updated_at', 'timestamptz', (col) =>
|
||||
col.notNull().defaultTo(sql`now()`),
|
||||
)
|
||||
.addUniqueConstraint('user_mfa_user_id_unique', ['user_id'])
|
||||
.execute();
|
||||
|
||||
// Add MFA policy columns to workspaces
|
||||
await db.schema
|
||||
.alterTable('workspaces')
|
||||
.addColumn('enforce_mfa', 'boolean', (col) => col.defaultTo(false))
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await db.schema.alterTable('workspaces').dropColumn('enforce_mfa').execute();
|
||||
|
||||
await db.schema.dropTable('user_mfa').execute();
|
||||
}
|
||||
@ -0,0 +1,61 @@
|
||||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
// Add last_edited_by_id column to comments table
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.addColumn('last_edited_by_id', 'uuid', (col) =>
|
||||
col.references('users.id').onDelete('set null'),
|
||||
)
|
||||
.execute();
|
||||
|
||||
// Add resolved_by_id column to comments table
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.addColumn('resolved_by_id', 'uuid', (col) =>
|
||||
col.references('users.id').onDelete('set null'),
|
||||
)
|
||||
.execute();
|
||||
|
||||
// Add updated_at timestamp column to comments table
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.addColumn('updated_at', 'timestamptz', (col) =>
|
||||
col.notNull().defaultTo(sql`now()`),
|
||||
)
|
||||
.execute();
|
||||
|
||||
// Add space_id column to comments table
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.addColumn('space_id', 'uuid', (col) =>
|
||||
col.references('spaces.id').onDelete('cascade'),
|
||||
)
|
||||
.execute();
|
||||
|
||||
// Backfill space_id from the related pages
|
||||
await db
|
||||
.updateTable('comments as c')
|
||||
.set((eb) => ({
|
||||
space_id: eb.ref('p.space_id'),
|
||||
}))
|
||||
.from('pages as p')
|
||||
.whereRef('c.page_id', '=', 'p.id')
|
||||
.execute();
|
||||
|
||||
// Make space_id NOT NULL after populating data
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.alterColumn('space_id', (col) => col.setNotNull())
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('comments')
|
||||
.dropColumn('last_edited_by_id')
|
||||
.execute();
|
||||
await db.schema.alterTable('comments').dropColumn('resolved_by_id').execute();
|
||||
await db.schema.alterTable('comments').dropColumn('updated_at').execute();
|
||||
await db.schema.alterTable('comments').dropColumn('space_id').execute();
|
||||
}
|
||||
@ -0,0 +1,50 @@
|
||||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
// Create unaccent extension
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS unaccent`.execute(db);
|
||||
|
||||
// Create pg_trgm extension
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS pg_trgm`.execute(db);
|
||||
|
||||
// Create IMMUTABLE wrapper function for unaccent
|
||||
// This allows us to create indexes on unaccented columns for better performance
|
||||
// https://stackoverflow.com/a/11007216/8299075
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION f_unaccent(text) RETURNS text
|
||||
AS $$
|
||||
SELECT unaccent('unaccent', $1);
|
||||
$$ LANGUAGE sql IMMUTABLE PARALLEL SAFE STRICT;
|
||||
`.execute(db);
|
||||
|
||||
// Update the pages tsvector trigger to use the immutable function
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION pages_tsvector_trigger() RETURNS trigger AS $$
|
||||
begin
|
||||
new.tsv :=
|
||||
setweight(to_tsvector('english', f_unaccent(coalesce(new.title, ''))), 'A') ||
|
||||
setweight(to_tsvector('english', f_unaccent(substring(coalesce(new.text_content, ''), 1, 1000000))), 'B');
|
||||
return new;
|
||||
end;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION pages_tsvector_trigger() RETURNS trigger AS $$
|
||||
begin
|
||||
new.tsv :=
|
||||
setweight(to_tsvector('english', coalesce(new.title, '')), 'A') ||
|
||||
setweight(to_tsvector('english', coalesce(new.text_content, '')), 'B');
|
||||
return new;
|
||||
end;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`.execute(db);
|
||||
|
||||
await sql`DROP FUNCTION IF EXISTS f_unaccent(text)`.execute(db);
|
||||
|
||||
await sql`DROP EXTENSION IF EXISTS pg_trgm`.execute(db);
|
||||
|
||||
await sql`DROP EXTENSION IF EXISTS unaccent`.execute(db);
|
||||
}
|
||||
@ -20,12 +20,13 @@ export class CommentRepo {
|
||||
// todo, add workspaceId
|
||||
async findById(
|
||||
commentId: string,
|
||||
opts?: { includeCreator: boolean },
|
||||
opts?: { includeCreator: boolean; includeResolvedBy: boolean },
|
||||
): Promise<Comment> {
|
||||
return await this.db
|
||||
.selectFrom('comments')
|
||||
.selectAll('comments')
|
||||
.$if(opts?.includeCreator, (qb) => qb.select(this.withCreator))
|
||||
.$if(opts?.includeResolvedBy, (qb) => qb.select(this.withResolvedBy))
|
||||
.where('id', '=', commentId)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@ -35,6 +36,7 @@ export class CommentRepo {
|
||||
.selectFrom('comments')
|
||||
.selectAll('comments')
|
||||
.select((eb) => this.withCreator(eb))
|
||||
.select((eb) => this.withResolvedBy(eb))
|
||||
.where('pageId', '=', pageId)
|
||||
.orderBy('createdAt', 'asc');
|
||||
|
||||
@ -80,7 +82,37 @@ export class CommentRepo {
|
||||
).as('creator');
|
||||
}
|
||||
|
||||
withResolvedBy(eb: ExpressionBuilder<DB, 'comments'>) {
|
||||
return jsonObjectFrom(
|
||||
eb
|
||||
.selectFrom('users')
|
||||
.select(['users.id', 'users.name', 'users.avatarUrl'])
|
||||
.whereRef('users.id', '=', 'comments.resolvedById'),
|
||||
).as('resolvedBy');
|
||||
}
|
||||
|
||||
async deleteComment(commentId: string): Promise<void> {
|
||||
await this.db.deleteFrom('comments').where('id', '=', commentId).execute();
|
||||
}
|
||||
|
||||
async hasChildren(commentId: string): Promise<boolean> {
|
||||
const result = await this.db
|
||||
.selectFrom('comments')
|
||||
.select((eb) => eb.fn.count('id').as('count'))
|
||||
.where('parentCommentId', '=', commentId)
|
||||
.executeTakeFirst();
|
||||
|
||||
return Number(result?.count) > 0;
|
||||
}
|
||||
|
||||
async hasChildrenFromOtherUsers(commentId: string, userId: string): Promise<boolean> {
|
||||
const result = await this.db
|
||||
.selectFrom('comments')
|
||||
.select((eb) => eb.fn.count('id').as('count'))
|
||||
.where('parentCommentId', '=', commentId)
|
||||
.where('creatorId', '!=', userId)
|
||||
.executeTakeFirst();
|
||||
|
||||
return Number(result?.count) > 0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import {
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
|
||||
import { dbOrTx, executeTx } from '@docmost/db/utils';
|
||||
import { sql } from 'kysely';
|
||||
import { GroupUser, InsertableGroupUser } from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '../../pagination/pagination-options';
|
||||
import { executeWithPagination } from '@docmost/db/pagination/pagination';
|
||||
@ -56,7 +57,7 @@ export class GroupUserRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('users.name', 'ilike', `%${pagination.query}%`),
|
||||
eb(sql`f_unaccent(users.name)`, 'ilike', sql`f_unaccent(${'%' + pagination.query + '%'})`),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -114,10 +114,10 @@ export class GroupRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('name', 'ilike', `%${pagination.query}%`).or(
|
||||
'description',
|
||||
eb(sql`f_unaccent(name)`, 'ilike', sql`f_unaccent(${'%' + pagination.query + '%'})`).or(
|
||||
sql`f_unaccent(description)`,
|
||||
'ilike',
|
||||
`%${pagination.query}%`,
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB, KyselyTransaction } from '../../types/kysely.types';
|
||||
import { dbOrTx } from '../../utils';
|
||||
import { dbOrTx, executeTx } from '../../utils';
|
||||
import {
|
||||
InsertablePage,
|
||||
Page,
|
||||
@ -22,6 +22,24 @@ export class PageRepo {
|
||||
private spaceMemberRepo: SpaceMemberRepo,
|
||||
) {}
|
||||
|
||||
withHasChildren(eb: ExpressionBuilder<DB, 'pages'>) {
|
||||
return eb
|
||||
.selectFrom('pages as child')
|
||||
.select((eb) =>
|
||||
eb
|
||||
.case()
|
||||
.when(eb.fn.countAll(), '>', 0)
|
||||
.then(true)
|
||||
.else(false)
|
||||
.end()
|
||||
.as('count'),
|
||||
)
|
||||
.whereRef('child.parentPageId', '=', 'pages.id')
|
||||
.where('child.deletedAt', 'is', null)
|
||||
.limit(1)
|
||||
.as('hasChildren');
|
||||
}
|
||||
|
||||
private baseFields: Array<keyof Page> = [
|
||||
'id',
|
||||
'slugId',
|
||||
@ -50,6 +68,7 @@ export class PageRepo {
|
||||
includeCreator?: boolean;
|
||||
includeLastUpdatedBy?: boolean;
|
||||
includeContributors?: boolean;
|
||||
includeHasChildren?: boolean;
|
||||
withLock?: boolean;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
@ -60,7 +79,10 @@ export class PageRepo {
|
||||
.selectFrom('pages')
|
||||
.select(this.baseFields)
|
||||
.$if(opts?.includeContent, (qb) => qb.select('content'))
|
||||
.$if(opts?.includeYdoc, (qb) => qb.select('ydoc'));
|
||||
.$if(opts?.includeYdoc, (qb) => qb.select('ydoc'))
|
||||
.$if(opts?.includeHasChildren, (qb) =>
|
||||
qb.select((eb) => this.withHasChildren(eb)),
|
||||
);
|
||||
|
||||
if (opts?.includeCreator) {
|
||||
query = query.select((eb) => this.withCreator(eb));
|
||||
@ -139,12 +161,113 @@ export class PageRepo {
|
||||
await query.execute();
|
||||
}
|
||||
|
||||
async removePage(pageId: string, deletedById: string): Promise<void> {
|
||||
const currentDate = new Date();
|
||||
|
||||
const descendants = await this.db
|
||||
.withRecursive('page_descendants', (db) =>
|
||||
db
|
||||
.selectFrom('pages')
|
||||
.select(['id'])
|
||||
.where('id', '=', pageId)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
.select(['p.id'])
|
||||
.innerJoin('page_descendants as pd', 'pd.id', 'p.parentPageId'),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_descendants')
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
const pageIds = descendants.map((d) => d.id);
|
||||
|
||||
if (pageIds.length > 0) {
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await trx
|
||||
.updateTable('pages')
|
||||
.set({
|
||||
deletedById: deletedById,
|
||||
deletedAt: currentDate,
|
||||
})
|
||||
.where('id', 'in', pageIds)
|
||||
.execute();
|
||||
|
||||
await trx.deleteFrom('shares').where('pageId', 'in', pageIds).execute();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async restorePage(pageId: string): Promise<void> {
|
||||
// First, check if the page being restored has a deleted parent
|
||||
const pageToRestore = await this.db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'parentPageId'])
|
||||
.where('id', '=', pageId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!pageToRestore) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the parent is also deleted
|
||||
let shouldDetachFromParent = false;
|
||||
if (pageToRestore.parentPageId) {
|
||||
const parent = await this.db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'deletedAt'])
|
||||
.where('id', '=', pageToRestore.parentPageId)
|
||||
.executeTakeFirst();
|
||||
|
||||
// If parent is deleted, we should detach this page from it
|
||||
shouldDetachFromParent = parent?.deletedAt !== null;
|
||||
}
|
||||
|
||||
// Find all descendants to restore
|
||||
const pages = await this.db
|
||||
.withRecursive('page_descendants', (db) =>
|
||||
db
|
||||
.selectFrom('pages')
|
||||
.select(['id'])
|
||||
.where('id', '=', pageId)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
.select(['p.id'])
|
||||
.innerJoin('page_descendants as pd', 'pd.id', 'p.parentPageId'),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_descendants')
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
const pageIds = pages.map((p) => p.id);
|
||||
|
||||
// Restore all pages, but only detach the root page if its parent is deleted
|
||||
await this.db
|
||||
.updateTable('pages')
|
||||
.set({ deletedById: null, deletedAt: null })
|
||||
.where('id', 'in', pageIds)
|
||||
.execute();
|
||||
|
||||
// If we need to detach the restored page from its deleted parent
|
||||
if (shouldDetachFromParent) {
|
||||
await this.db
|
||||
.updateTable('pages')
|
||||
.set({ parentPageId: null })
|
||||
.where('id', '=', pageId)
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
async getRecentPagesInSpace(spaceId: string, pagination: PaginationOptions) {
|
||||
const query = this.db
|
||||
.selectFrom('pages')
|
||||
.select(this.baseFields)
|
||||
.select((eb) => this.withSpace(eb))
|
||||
.where('spaceId', '=', spaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy('updatedAt', 'desc');
|
||||
|
||||
const result = executeWithPagination(query, {
|
||||
@ -163,6 +286,7 @@ export class PageRepo {
|
||||
.select(this.baseFields)
|
||||
.select((eb) => this.withSpace(eb))
|
||||
.where('spaceId', 'in', userSpaceIds)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy('updatedAt', 'desc');
|
||||
|
||||
const hasEmptyIds = userSpaceIds.length === 0;
|
||||
@ -175,6 +299,41 @@ export class PageRepo {
|
||||
return result;
|
||||
}
|
||||
|
||||
async getDeletedPagesInSpace(spaceId: string, pagination: PaginationOptions) {
|
||||
const query = this.db
|
||||
.selectFrom('pages')
|
||||
.select(this.baseFields)
|
||||
.select('content')
|
||||
.select((eb) => this.withSpace(eb))
|
||||
.select((eb) => this.withDeletedBy(eb))
|
||||
.where('spaceId', '=', spaceId)
|
||||
.where('deletedAt', 'is not', null)
|
||||
// Only include pages that are either root pages (no parent) or whose parent is not deleted
|
||||
// This prevents showing orphaned pages when their parent has been soft-deleted
|
||||
.where((eb) =>
|
||||
eb.or([
|
||||
eb('parentPageId', 'is', null),
|
||||
eb.not(
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('pages as parent')
|
||||
.select('parent.id')
|
||||
.where('parent.id', '=', eb.ref('pages.parentPageId'))
|
||||
.where('parent.deletedAt', 'is not', null),
|
||||
),
|
||||
),
|
||||
]),
|
||||
)
|
||||
.orderBy('deletedAt', 'desc');
|
||||
|
||||
const result = executeWithPagination(query, {
|
||||
page: pagination.page,
|
||||
perPage: pagination.limit,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
withSpace(eb: ExpressionBuilder<DB, 'pages'>) {
|
||||
return jsonObjectFrom(
|
||||
eb
|
||||
@ -202,6 +361,15 @@ export class PageRepo {
|
||||
).as('lastUpdatedBy');
|
||||
}
|
||||
|
||||
withDeletedBy(eb: ExpressionBuilder<DB, 'pages'>) {
|
||||
return jsonObjectFrom(
|
||||
eb
|
||||
.selectFrom('users')
|
||||
.select(['users.id', 'users.name', 'users.avatarUrl'])
|
||||
.whereRef('users.id', '=', 'pages.deletedById'),
|
||||
).as('deletedBy');
|
||||
}
|
||||
|
||||
withContributors(eb: ExpressionBuilder<DB, 'pages'>) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
@ -231,6 +399,7 @@ export class PageRepo {
|
||||
])
|
||||
.$if(opts?.includeContent, (qb) => qb.select('content'))
|
||||
.where('id', '=', parentPageId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.unionAll((exp) =>
|
||||
exp
|
||||
.selectFrom('pages as p')
|
||||
@ -245,7 +414,8 @@ export class PageRepo {
|
||||
'p.workspaceId',
|
||||
])
|
||||
.$if(opts?.includeContent, (qb) => qb.select('p.content'))
|
||||
.innerJoin('page_hierarchy as ph', 'p.parentPageId', 'ph.id'),
|
||||
.innerJoin('page_hierarchy as ph', 'p.parentPageId', 'ph.id')
|
||||
.where('p.deletedAt', 'is', null),
|
||||
),
|
||||
)
|
||||
.selectFrom('page_hierarchy')
|
||||
|
||||
@ -2,6 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
|
||||
import { dbOrTx } from '@docmost/db/utils';
|
||||
import { sql } from 'kysely';
|
||||
import {
|
||||
InsertableSpaceMember,
|
||||
SpaceMember,
|
||||
@ -119,9 +120,21 @@ export class SpaceMemberRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('users.name', 'ilike', `%${pagination.query}%`)
|
||||
.or('users.email', 'ilike', `%${pagination.query}%`)
|
||||
.or('groups.name', 'ilike', `%${pagination.query}%`),
|
||||
eb(
|
||||
sql`f_unaccent(users.name)`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
)
|
||||
.or(
|
||||
sql`users.email`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
)
|
||||
.or(
|
||||
sql`f_unaccent(groups.name)`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -228,10 +241,14 @@ export class SpaceMemberRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('name', 'ilike', `%${pagination.query}%`).or(
|
||||
'description',
|
||||
eb(
|
||||
sql`f_unaccent(name)`,
|
||||
'ilike',
|
||||
`%${pagination.query}%`,
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
).or(
|
||||
sql`f_unaccent(description)`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -110,10 +110,10 @@ export class SpaceRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('name', 'ilike', `%${pagination.query}%`).or(
|
||||
'description',
|
||||
eb(sql`f_unaccent(name)`, 'ilike', sql`f_unaccent(${'%' + pagination.query + '%'})`).or(
|
||||
sql`f_unaccent(description)`,
|
||||
'ilike',
|
||||
`%${pagination.query}%`,
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
|
||||
import { Users } from '@docmost/db/types/db';
|
||||
import { DB, Users } from '@docmost/db/types/db';
|
||||
import { hashPassword } from '../../../common/helpers';
|
||||
import { dbOrTx } from '@docmost/db/utils';
|
||||
import {
|
||||
@ -11,7 +11,8 @@ import {
|
||||
} from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '../../pagination/pagination-options';
|
||||
import { executeWithPagination } from '@docmost/db/pagination/pagination';
|
||||
import { sql } from 'kysely';
|
||||
import { ExpressionBuilder, sql } from 'kysely';
|
||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
|
||||
@Injectable()
|
||||
export class UserRepo {
|
||||
@ -40,6 +41,7 @@ export class UserRepo {
|
||||
workspaceId: string,
|
||||
opts?: {
|
||||
includePassword?: boolean;
|
||||
includeUserMfa?: boolean;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<User> {
|
||||
@ -48,6 +50,7 @@ export class UserRepo {
|
||||
.selectFrom('users')
|
||||
.select(this.baseFields)
|
||||
.$if(opts?.includePassword, (qb) => qb.select('password'))
|
||||
.$if(opts?.includeUserMfa, (qb) => qb.select(this.withUserMfa))
|
||||
.where('id', '=', userId)
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.executeTakeFirst();
|
||||
@ -58,6 +61,7 @@ export class UserRepo {
|
||||
workspaceId: string,
|
||||
opts?: {
|
||||
includePassword?: boolean;
|
||||
includeUserMfa?: boolean;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<User> {
|
||||
@ -66,6 +70,7 @@ export class UserRepo {
|
||||
.selectFrom('users')
|
||||
.select(this.baseFields)
|
||||
.$if(opts?.includePassword, (qb) => qb.select('password'))
|
||||
.$if(opts?.includeUserMfa, (qb) => qb.select(this.withUserMfa))
|
||||
.where(sql`LOWER(email)`, '=', sql`LOWER(${email})`)
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.executeTakeFirst();
|
||||
@ -144,10 +149,14 @@ export class UserRepo {
|
||||
|
||||
if (pagination.query) {
|
||||
query = query.where((eb) =>
|
||||
eb('users.name', 'ilike', `%${pagination.query}%`).or(
|
||||
'users.email',
|
||||
eb(
|
||||
sql`f_unaccent(users.name)`,
|
||||
'ilike',
|
||||
`%${pagination.query}%`,
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
).or(
|
||||
sql`users.email`,
|
||||
'ilike',
|
||||
sql`f_unaccent(${'%' + pagination.query + '%'})`,
|
||||
),
|
||||
);
|
||||
}
|
||||
@ -177,4 +186,18 @@ export class UserRepo {
|
||||
.returning(this.baseFields)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
withUserMfa(eb: ExpressionBuilder<DB, 'users'>) {
|
||||
return jsonObjectFrom(
|
||||
eb
|
||||
.selectFrom('userMfa')
|
||||
.select([
|
||||
'userMfa.id',
|
||||
'userMfa.method',
|
||||
'userMfa.isEnabled',
|
||||
'userMfa.createdAt',
|
||||
])
|
||||
.whereRef('userMfa.userId', '=', 'users.id'),
|
||||
).as('mfa');
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,6 +32,7 @@ export class WorkspaceRepo {
|
||||
'trialEndAt',
|
||||
'enforceSso',
|
||||
'plan',
|
||||
'enforceMfa',
|
||||
];
|
||||
constructor(@InjectKysely() private readonly db: KyselyDB) {}
|
||||
|
||||
|
||||
17
apps/server/src/database/types/db.d.ts
vendored
17
apps/server/src/database/types/db.d.ts
vendored
@ -120,12 +120,15 @@ export interface Comments {
|
||||
deletedAt: Timestamp | null;
|
||||
editedAt: Timestamp | null;
|
||||
id: Generated<string>;
|
||||
lastEditedById: string | null;
|
||||
pageId: string;
|
||||
parentCommentId: string | null;
|
||||
resolvedAt: Timestamp | null;
|
||||
resolvedById: string | null;
|
||||
selection: string | null;
|
||||
spaceId: string;
|
||||
type: string | null;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
workspaceId: string;
|
||||
}
|
||||
|
||||
@ -249,6 +252,18 @@ export interface Spaces {
|
||||
workspaceId: string;
|
||||
}
|
||||
|
||||
export interface UserMfa {
|
||||
backupCodes: string[] | null;
|
||||
createdAt: Generated<Timestamp>;
|
||||
id: Generated<string>;
|
||||
isEnabled: Generated<boolean | null>;
|
||||
method: Generated<string>;
|
||||
secret: string | null;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
}
|
||||
|
||||
export interface Users {
|
||||
avatarUrl: string | null;
|
||||
createdAt: Generated<Timestamp>;
|
||||
@ -302,6 +317,7 @@ export interface Workspaces {
|
||||
deletedAt: Timestamp | null;
|
||||
description: string | null;
|
||||
emailDomains: Generated<string[] | null>;
|
||||
enforceMfa: Generated<boolean | null>;
|
||||
enforceSso: Generated<boolean>;
|
||||
hostname: string | null;
|
||||
id: Generated<string>;
|
||||
@ -331,6 +347,7 @@ export interface DB {
|
||||
shares: Shares;
|
||||
spaceMembers: SpaceMembers;
|
||||
spaces: Spaces;
|
||||
userMfa: UserMfa;
|
||||
users: Users;
|
||||
userTokens: UserTokens;
|
||||
workspaceInvitations: WorkspaceInvitations;
|
||||
|
||||
@ -18,6 +18,7 @@ import {
|
||||
AuthAccounts,
|
||||
Shares,
|
||||
FileTasks,
|
||||
UserMfa as _UserMFA,
|
||||
} from './db';
|
||||
|
||||
// Workspace
|
||||
@ -113,3 +114,8 @@ export type UpdatableShare = Updateable<Omit<Shares, 'id'>>;
|
||||
export type FileTask = Selectable<FileTasks>;
|
||||
export type InsertableFileTask = Insertable<FileTasks>;
|
||||
export type UpdatableFileTask = Updateable<Omit<FileTasks, 'id'>>;
|
||||
|
||||
// UserMFA
|
||||
export type UserMFA = Selectable<_UserMFA>;
|
||||
export type InsertableUserMFA = Insertable<_UserMFA>;
|
||||
export type UpdatableUserMFA = Updateable<Omit<_UserMFA, 'id'>>;
|
||||
|
||||
Submodule apps/server/src/ee updated: 8f0fbf6964...27c23e94fc
@ -23,6 +23,10 @@ export class ExportPageDto {
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
includeChildren?: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
includeAttachments?: boolean;
|
||||
}
|
||||
|
||||
export class ExportSpaceDto {
|
||||
|
||||
@ -46,7 +46,7 @@ export class ExportController {
|
||||
includeContent: true,
|
||||
});
|
||||
|
||||
if (!page) {
|
||||
if (!page || page.deletedAt) {
|
||||
throw new NotFoundException('Page not found');
|
||||
}
|
||||
|
||||
@ -55,40 +55,22 @@ export class ExportController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
const fileExt = getExportExtension(dto.format);
|
||||
const fileName = sanitize(page.title || 'untitled') + fileExt;
|
||||
|
||||
if (dto.includeChildren) {
|
||||
const zipFileBuffer = await this.exportService.exportPageWithChildren(
|
||||
dto.pageId,
|
||||
dto.format,
|
||||
);
|
||||
|
||||
const newName = path.parse(fileName).name + '.zip';
|
||||
|
||||
res.headers({
|
||||
'Content-Type': 'application/zip',
|
||||
'Content-Disposition':
|
||||
'attachment; filename="' + encodeURIComponent(newName) + '"',
|
||||
});
|
||||
|
||||
res.send(zipFileBuffer);
|
||||
return;
|
||||
}
|
||||
|
||||
const rawContent = await this.exportService.exportPage(
|
||||
const zipFileBuffer = await this.exportService.exportPages(
|
||||
dto.pageId,
|
||||
dto.format,
|
||||
page,
|
||||
true,
|
||||
dto.includeAttachments,
|
||||
dto.includeChildren,
|
||||
);
|
||||
|
||||
const fileName = sanitize(page.title || 'untitled') + '.zip';
|
||||
|
||||
res.headers({
|
||||
'Content-Type': getMimeType(fileExt),
|
||||
'Content-Type': 'application/zip',
|
||||
'Content-Disposition':
|
||||
'attachment; filename="' + encodeURIComponent(fileName) + '"',
|
||||
});
|
||||
|
||||
res.send(rawContent);
|
||||
res.send(zipFileBuffer);
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
|
||||
@ -89,10 +89,28 @@ export class ExportService {
|
||||
return;
|
||||
}
|
||||
|
||||
async exportPageWithChildren(pageId: string, format: string) {
|
||||
const pages = await this.pageRepo.getPageAndDescendants(pageId, {
|
||||
includeContent: true,
|
||||
});
|
||||
async exportPages(
|
||||
pageId: string,
|
||||
format: string,
|
||||
includeAttachments: boolean,
|
||||
includeChildren: boolean,
|
||||
) {
|
||||
let pages: Page[];
|
||||
|
||||
if (includeChildren) {
|
||||
//@ts-ignore
|
||||
pages = await this.pageRepo.getPageAndDescendants(pageId, {
|
||||
includeContent: true,
|
||||
});
|
||||
} else {
|
||||
// Only fetch the single page when includeChildren is false
|
||||
const page = await this.pageRepo.findById(pageId, {
|
||||
includeContent: true,
|
||||
});
|
||||
if (page){
|
||||
pages = [page];
|
||||
}
|
||||
}
|
||||
|
||||
if (!pages || pages.length === 0) {
|
||||
throw new BadRequestException('No pages to export');
|
||||
@ -105,7 +123,7 @@ export class ExportService {
|
||||
const tree = buildTree(pages as Page[]);
|
||||
|
||||
const zip = new JSZip();
|
||||
await this.zipPages(tree, format, zip);
|
||||
await this.zipPages(tree, format, zip, includeAttachments);
|
||||
|
||||
const zipFile = zip.generateNodeStream({
|
||||
type: 'nodebuffer',
|
||||
@ -168,7 +186,7 @@ export class ExportService {
|
||||
tree: PageExportTree,
|
||||
format: string,
|
||||
zip: JSZip,
|
||||
includeAttachments = true,
|
||||
includeAttachments: boolean,
|
||||
): Promise<void> {
|
||||
const slugIdToPath: Record<string, string> = {};
|
||||
|
||||
@ -200,7 +218,8 @@ export class ExportService {
|
||||
|
||||
if (includeAttachments) {
|
||||
await this.zipAttachments(updatedJsonContent, page.spaceId, folder);
|
||||
updatedJsonContent = updateAttachmentUrlsToLocalPaths(updatedJsonContent);
|
||||
updatedJsonContent =
|
||||
updateAttachmentUrlsToLocalPaths(updatedJsonContent);
|
||||
}
|
||||
|
||||
const pageTitle = getPageTitle(page.title);
|
||||
|
||||
@ -69,8 +69,17 @@ function taskList(turndownService: TurndownService) {
|
||||
'input[type="checkbox"]',
|
||||
) as HTMLInputElement;
|
||||
const isChecked = checkbox.checked;
|
||||
|
||||
return `- ${isChecked ? '[x]' : '[ ]'} ${content.trim()} \n`;
|
||||
|
||||
// Process content like regular list items
|
||||
content = content
|
||||
.replace(/^\n+/, '') // remove leading newlines
|
||||
.replace(/\n+$/, '\n') // replace trailing newlines with just a single one
|
||||
.replace(/\n/gm, '\n '); // indent nested content with 2 spaces
|
||||
|
||||
// Create the checkbox prefix
|
||||
const prefix = `- ${isChecked ? '[x]' : '[ ]'} `;
|
||||
|
||||
return prefix + content + (node.nextSibling && !/\n$/.test(content) ? '\n' : '');
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@ -14,10 +14,14 @@ import { AttachmentType } from '../../../core/attachment/attachment.constants';
|
||||
import { unwrapFromParagraph } from '../utils/import-formatter';
|
||||
import { resolveRelativeAttachmentPath } from '../utils/import.utils';
|
||||
import { load } from 'cheerio';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
@Injectable()
|
||||
export class ImportAttachmentService {
|
||||
private readonly logger = new Logger(ImportAttachmentService.name);
|
||||
private readonly CONCURRENT_UPLOADS = 3;
|
||||
private readonly MAX_RETRIES = 2;
|
||||
private readonly RETRY_DELAY = 2000;
|
||||
|
||||
constructor(
|
||||
private readonly storageService: StorageService,
|
||||
@ -41,7 +45,14 @@ export class ImportAttachmentService {
|
||||
attachmentCandidates,
|
||||
} = opts;
|
||||
|
||||
const attachmentTasks: Promise<void>[] = [];
|
||||
const attachmentTasks: (() => Promise<void>)[] = [];
|
||||
const limit = pLimit(this.CONCURRENT_UPLOADS);
|
||||
const uploadStats = {
|
||||
total: 0,
|
||||
completed: 0,
|
||||
failed: 0,
|
||||
failedFiles: [] as string[],
|
||||
};
|
||||
|
||||
/**
|
||||
* Cache keyed by the *relative* path that appears in the HTML.
|
||||
@ -74,30 +85,16 @@ export class ImportAttachmentService {
|
||||
|
||||
const apiFilePath = `/api/files/${attachmentId}/${fileNameWithExt}`;
|
||||
|
||||
attachmentTasks.push(
|
||||
(async () => {
|
||||
const fileStream = createReadStream(abs);
|
||||
await this.storageService.uploadStream(storageFilePath, fileStream);
|
||||
const stat = await fs.stat(abs);
|
||||
|
||||
await this.db
|
||||
.insertInto('attachments')
|
||||
.values({
|
||||
id: attachmentId,
|
||||
filePath: storageFilePath,
|
||||
fileName: fileNameWithExt,
|
||||
fileSize: stat.size,
|
||||
mimeType: getMimeType(fileNameWithExt),
|
||||
type: 'file',
|
||||
fileExt: ext,
|
||||
creatorId: fileTask.creatorId,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
pageId,
|
||||
spaceId: fileTask.spaceId,
|
||||
})
|
||||
.execute();
|
||||
})(),
|
||||
);
|
||||
attachmentTasks.push(() => this.uploadWithRetry({
|
||||
abs,
|
||||
storageFilePath,
|
||||
attachmentId,
|
||||
fileNameWithExt,
|
||||
ext,
|
||||
pageId,
|
||||
fileTask,
|
||||
uploadStats,
|
||||
}));
|
||||
|
||||
return {
|
||||
attachmentId,
|
||||
@ -292,12 +289,113 @@ export class ImportAttachmentService {
|
||||
}
|
||||
|
||||
// wait for all uploads & DB inserts
|
||||
try {
|
||||
await Promise.all(attachmentTasks);
|
||||
} catch (err) {
|
||||
this.logger.log('Import attachment upload error', err);
|
||||
uploadStats.total = attachmentTasks.length;
|
||||
|
||||
if (uploadStats.total > 0) {
|
||||
this.logger.debug(`Starting upload of ${uploadStats.total} attachments...`);
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
attachmentTasks.map(task => limit(task))
|
||||
);
|
||||
} catch (err) {
|
||||
this.logger.error('Import attachment upload error', err);
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Upload completed: ${uploadStats.completed}/${uploadStats.total} successful, ${uploadStats.failed} failed`
|
||||
);
|
||||
|
||||
if (uploadStats.failed > 0) {
|
||||
this.logger.warn(
|
||||
`Failed to upload ${uploadStats.failed} files:`,
|
||||
uploadStats.failedFiles
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return $.root().html() || '';
|
||||
}
|
||||
|
||||
private async uploadWithRetry(opts: {
|
||||
abs: string;
|
||||
storageFilePath: string;
|
||||
attachmentId: string;
|
||||
fileNameWithExt: string;
|
||||
ext: string;
|
||||
pageId: string;
|
||||
fileTask: FileTask;
|
||||
uploadStats: {
|
||||
total: number;
|
||||
completed: number;
|
||||
failed: number;
|
||||
failedFiles: string[];
|
||||
};
|
||||
}): Promise<void> {
|
||||
const {
|
||||
abs,
|
||||
storageFilePath,
|
||||
attachmentId,
|
||||
fileNameWithExt,
|
||||
ext,
|
||||
pageId,
|
||||
fileTask,
|
||||
uploadStats,
|
||||
} = opts;
|
||||
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 1; attempt <= this.MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
const fileStream = createReadStream(abs);
|
||||
await this.storageService.uploadStream(storageFilePath, fileStream);
|
||||
const stat = await fs.stat(abs);
|
||||
|
||||
await this.db
|
||||
.insertInto('attachments')
|
||||
.values({
|
||||
id: attachmentId,
|
||||
filePath: storageFilePath,
|
||||
fileName: fileNameWithExt,
|
||||
fileSize: stat.size,
|
||||
mimeType: getMimeType(fileNameWithExt),
|
||||
type: 'file',
|
||||
fileExt: ext,
|
||||
creatorId: fileTask.creatorId,
|
||||
workspaceId: fileTask.workspaceId,
|
||||
pageId,
|
||||
spaceId: fileTask.spaceId,
|
||||
})
|
||||
.execute();
|
||||
|
||||
uploadStats.completed++;
|
||||
|
||||
if (uploadStats.completed % 10 === 0) {
|
||||
this.logger.debug(
|
||||
`Upload progress: ${uploadStats.completed}/${uploadStats.total}`
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
this.logger.warn(
|
||||
`Upload attempt ${attempt}/${this.MAX_RETRIES} failed for ${fileNameWithExt}: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
|
||||
if (attempt < this.MAX_RETRIES) {
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, this.RETRY_DELAY * attempt)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uploadStats.failed++;
|
||||
uploadStats.failedFiles.push(fileNameWithExt);
|
||||
this.logger.error(
|
||||
`Failed to upload ${fileNameWithExt} after ${this.MAX_RETRIES} attempts:`,
|
||||
lastError
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -178,7 +178,7 @@ export class ImportService {
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'position'])
|
||||
.where('spaceId', '=', spaceId)
|
||||
.orderBy('position', 'desc')
|
||||
.orderBy('position', (ob) => ob.collate('C').desc())
|
||||
.limit(1)
|
||||
.where('parentPageId', 'is', null)
|
||||
.executeTakeFirst();
|
||||
|
||||
@ -40,8 +40,11 @@ export class LocalDriver implements StorageDriver {
|
||||
|
||||
async copy(fromFilePath: string, toFilePath: string): Promise<void> {
|
||||
try {
|
||||
const fromFullPath = this._fullPath(fromFilePath);
|
||||
const toFullPath = this._fullPath(toFilePath);
|
||||
|
||||
if (await this.exists(fromFilePath)) {
|
||||
await fs.copy(fromFilePath, toFilePath);
|
||||
await fs.copy(fromFullPath, toFullPath);
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to copy file: ${(err as Error).message}`);
|
||||
|
||||
Reference in New Issue
Block a user