mirror of
https://github.com/documenso/documenso.git
synced 2025-11-12 15:53:02 +10:00
feat: universal upload
Implementation of a universal upload allowing for multiple storage backends starting with `database` and `s3`. Allows clients to put and retrieve files from either client or server using a blend of client and server actions.
This commit is contained in:
5
packages/lib/constants/time.ts
Normal file
5
packages/lib/constants/time.ts
Normal file
@ -0,0 +1,5 @@
|
||||
export const ONE_SECOND = 1000;
|
||||
export const ONE_MINUTE = ONE_SECOND * 60;
|
||||
export const ONE_HOUR = ONE_MINUTE * 60;
|
||||
export const ONE_DAY = ONE_HOUR * 24;
|
||||
export const ONE_WEEK = ONE_DAY * 7;
|
||||
@ -12,12 +12,15 @@
|
||||
],
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@aws-sdk/s3-request-presigner": "^3.405.0",
|
||||
"@aws-sdk/client-s3": "^3.405.0",
|
||||
"@aws-sdk/client-s3": "^3.410.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.410.0",
|
||||
"@aws-sdk/signature-v4-crt": "^3.410.0",
|
||||
"@documenso/email": "*",
|
||||
"@documenso/prisma": "*",
|
||||
"@next-auth/prisma-adapter": "1.0.7",
|
||||
"@pdf-lib/fontkit": "^1.1.1",
|
||||
"@scure/base": "^1.1.3",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@upstash/redis": "^1.20.6",
|
||||
"bcrypt": "^5.1.0",
|
||||
"luxon": "^3.4.0",
|
||||
|
||||
@ -0,0 +1,19 @@
|
||||
'use server';
|
||||
|
||||
import { prisma } from '@documenso/prisma';
|
||||
import { DocumentDataType } from '@documenso/prisma/client';
|
||||
|
||||
export type CreateDocumentDataOptions = {
|
||||
type: DocumentDataType;
|
||||
data: string;
|
||||
};
|
||||
|
||||
export const createDocumentData = async ({ type, data }: CreateDocumentDataOptions) => {
|
||||
return await prisma.documentData.create({
|
||||
data: {
|
||||
type,
|
||||
data,
|
||||
initialData: data,
|
||||
},
|
||||
});
|
||||
};
|
||||
@ -1,10 +1,19 @@
|
||||
'use server';
|
||||
|
||||
import { prisma } from '@documenso/prisma';
|
||||
|
||||
export type CreateDocumentOptions = {
|
||||
title: string;
|
||||
userId: number;
|
||||
fileName: string;
|
||||
documentDataId: string;
|
||||
};
|
||||
|
||||
export const createDocument = () => {
|
||||
//
|
||||
export const createDocument = async ({ userId, title, documentDataId }: CreateDocumentOptions) => {
|
||||
return await prisma.document.create({
|
||||
data: {
|
||||
title,
|
||||
documentDataId,
|
||||
userId,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
'use server';
|
||||
|
||||
import path from 'node:path';
|
||||
import { PDFDocument } from 'pdf-lib';
|
||||
|
||||
import { prisma } from '@documenso/prisma';
|
||||
import { DocumentStatus, SigningStatus } from '@documenso/prisma/client';
|
||||
|
||||
import { getFile } from '../../universal/upload/get-file';
|
||||
import { putFile } from '../../universal/upload/put-file';
|
||||
import { insertFieldInPDF } from '../pdf/insert-field-in-pdf';
|
||||
|
||||
export type SealDocumentOptions = {
|
||||
@ -23,7 +26,9 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
||||
},
|
||||
});
|
||||
|
||||
if (!document.documentData) {
|
||||
const { documentData } = document;
|
||||
|
||||
if (!documentData) {
|
||||
throw new Error(`Document ${document.id} has no document data`);
|
||||
}
|
||||
|
||||
@ -55,7 +60,7 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
||||
}
|
||||
|
||||
// !: Need to write the fields onto the document as a hard copy
|
||||
const { data: pdfData } = document.documentData;
|
||||
const pdfData = await getFile(documentData);
|
||||
|
||||
const doc = await PDFDocument.load(pdfData);
|
||||
|
||||
@ -65,17 +70,20 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
||||
|
||||
const pdfBytes = await doc.save();
|
||||
|
||||
await prisma.document.update({
|
||||
const { name, ext } = path.parse(document.title);
|
||||
|
||||
const { data: newData } = await putFile({
|
||||
name: `${name}_signed${ext}`,
|
||||
type: 'application/pdf',
|
||||
arrayBuffer: async () => Promise.resolve(Buffer.from(pdfBytes)),
|
||||
});
|
||||
|
||||
await prisma.documentData.update({
|
||||
where: {
|
||||
id: document.id,
|
||||
status: DocumentStatus.COMPLETED,
|
||||
id: documentData.id,
|
||||
},
|
||||
data: {
|
||||
documentData: {
|
||||
update: {
|
||||
data: Buffer.from(pdfBytes).toString('base64'),
|
||||
},
|
||||
},
|
||||
data: newData,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { prisma } from '@documenso/prisma';
|
||||
import { SendStatus, SigningStatus } from '@documenso/prisma/client';
|
||||
|
||||
import { nanoid } from '../../universal/id';
|
||||
|
||||
export interface SetRecipientsForDocumentOptions {
|
||||
userId: number;
|
||||
documentId: number;
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
{
|
||||
"extends": "@documenso/tsconfig/react-library.json",
|
||||
"compilerOptions": {
|
||||
"types": ["@documenso/tsconfig/process-env.d.ts"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "**/*.d.ts"],
|
||||
"exclude": ["dist", "build", "node_modules"]
|
||||
}
|
||||
|
||||
5
packages/lib/universal/id.ts
Normal file
5
packages/lib/universal/id.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
|
||||
export const alphaid = customAlphabet('0123456789abcdefghijklmnopqrstuvwxyz', 10);
|
||||
|
||||
export { nanoid } from 'nanoid';
|
||||
22
packages/lib/universal/upload/delete-file.ts
Normal file
22
packages/lib/universal/upload/delete-file.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { match } from 'ts-pattern';
|
||||
|
||||
import { DocumentDataType } from '@documenso/prisma/client';
|
||||
|
||||
import { deleteS3File } from './server-actions';
|
||||
|
||||
export type DeleteFileOptions = {
|
||||
type: DocumentDataType;
|
||||
data: string;
|
||||
};
|
||||
|
||||
export const deleteFile = async ({ type, data }: DeleteFileOptions) => {
|
||||
return await match(type)
|
||||
.with(DocumentDataType.S3_PATH, async () => deleteFileFromS3(data))
|
||||
.otherwise(() => {
|
||||
return;
|
||||
});
|
||||
};
|
||||
|
||||
const deleteFileFromS3 = async (key: string) => {
|
||||
await deleteS3File(key);
|
||||
};
|
||||
45
packages/lib/universal/upload/get-file.ts
Normal file
45
packages/lib/universal/upload/get-file.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { base64 } from '@scure/base';
|
||||
import { match } from 'ts-pattern';
|
||||
|
||||
import { DocumentDataType } from '@documenso/prisma/client';
|
||||
|
||||
import { getPresignGetUrl } from './server-actions';
|
||||
|
||||
export type GetFileOptions = {
|
||||
type: DocumentDataType;
|
||||
data: string;
|
||||
};
|
||||
|
||||
export const getFile = async ({ type, data }: GetFileOptions) => {
|
||||
return await match(type)
|
||||
.with(DocumentDataType.BYTES, () => getFileFromBytes(data))
|
||||
.with(DocumentDataType.BYTES_64, () => getFileFromBytes64(data))
|
||||
.with(DocumentDataType.S3_PATH, async () => getFileFromS3(data))
|
||||
.exhaustive();
|
||||
};
|
||||
|
||||
const getFileFromBytes = (data: string) => {
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const binaryData = encoder.encode(data);
|
||||
|
||||
return binaryData;
|
||||
};
|
||||
|
||||
const getFileFromBytes64 = (data: string) => {
|
||||
const binaryData = base64.decode(data);
|
||||
|
||||
return binaryData;
|
||||
};
|
||||
|
||||
const getFileFromS3 = async (key: string) => {
|
||||
const { url } = await getPresignGetUrl(key);
|
||||
|
||||
const buffer = await fetch(url, {
|
||||
method: 'GET',
|
||||
}).then(async (res) => res.arrayBuffer());
|
||||
|
||||
const binaryData = new Uint8Array(buffer);
|
||||
|
||||
return binaryData;
|
||||
};
|
||||
53
packages/lib/universal/upload/put-file.ts
Normal file
53
packages/lib/universal/upload/put-file.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { base64 } from '@scure/base';
|
||||
import { match } from 'ts-pattern';
|
||||
|
||||
import { DocumentDataType } from '@documenso/prisma/client';
|
||||
|
||||
import { createDocumentData } from '../../server-only/document-data/create-document-data';
|
||||
import { getPresignPostUrl } from './server-actions';
|
||||
|
||||
type File = {
|
||||
name: string;
|
||||
type: string;
|
||||
arrayBuffer: () => Promise<ArrayBuffer>;
|
||||
};
|
||||
|
||||
export const putFile = async (file: File) => {
|
||||
const { type, data } = await match(process.env.NEXT_PUBLIC_UPLOAD_TRANSPORT)
|
||||
.with('s3', async () => putFileInS3(file))
|
||||
.otherwise(async () => putFileInDatabase(file));
|
||||
|
||||
return await createDocumentData({ type, data });
|
||||
};
|
||||
|
||||
const putFileInDatabase = async (file: File) => {
|
||||
const contents = await file.arrayBuffer();
|
||||
|
||||
const binaryData = new Uint8Array(contents);
|
||||
|
||||
const asciiData = base64.encode(binaryData);
|
||||
|
||||
return {
|
||||
type: DocumentDataType.BYTES_64,
|
||||
data: asciiData,
|
||||
};
|
||||
};
|
||||
|
||||
const putFileInS3 = async (file: File) => {
|
||||
const { url, key } = await getPresignPostUrl(file.name, file.type);
|
||||
|
||||
const body = await file.arrayBuffer();
|
||||
|
||||
await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
},
|
||||
body,
|
||||
});
|
||||
|
||||
return {
|
||||
type: DocumentDataType.S3_PATH,
|
||||
data: key,
|
||||
};
|
||||
};
|
||||
104
packages/lib/universal/upload/server-actions.ts
Normal file
104
packages/lib/universal/upload/server-actions.ts
Normal file
@ -0,0 +1,104 @@
|
||||
'use server';
|
||||
|
||||
import {
|
||||
DeleteObjectCommand,
|
||||
GetObjectCommand,
|
||||
PutObjectCommand,
|
||||
S3Client,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
import slugify from '@sindresorhus/slugify';
|
||||
import path from 'node:path';
|
||||
|
||||
import { ONE_HOUR, ONE_SECOND } from '../../constants/time';
|
||||
import { getServerComponentSession } from '../../next-auth/get-server-session';
|
||||
import { alphaid } from '../id';
|
||||
|
||||
export const getPresignPostUrl = async (fileName: string, contentType: string) => {
|
||||
const client = getS3Client();
|
||||
|
||||
const user = await getServerComponentSession();
|
||||
|
||||
// Get the basename and extension for the file
|
||||
const { name, ext } = path.parse(fileName);
|
||||
|
||||
let key = `${alphaid(12)}/${slugify(name)}${ext}`;
|
||||
|
||||
if (user) {
|
||||
key = `${user.id}/${key}`;
|
||||
}
|
||||
|
||||
const putObjectCommand = new PutObjectCommand({
|
||||
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||
Key: key,
|
||||
ContentType: contentType,
|
||||
});
|
||||
|
||||
const url = await getSignedUrl(client, putObjectCommand, {
|
||||
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||
});
|
||||
|
||||
return { key, url };
|
||||
};
|
||||
|
||||
export const getAbsolutePresignPostUrl = async (key: string) => {
|
||||
const client = getS3Client();
|
||||
|
||||
const putObjectCommand = new PutObjectCommand({
|
||||
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
const url = await getSignedUrl(client, putObjectCommand, {
|
||||
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||
});
|
||||
|
||||
return { key, url };
|
||||
};
|
||||
|
||||
export const getPresignGetUrl = async (key: string) => {
|
||||
const client = getS3Client();
|
||||
|
||||
const getObjectCommand = new GetObjectCommand({
|
||||
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
const url = await getSignedUrl(client, getObjectCommand, {
|
||||
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||
});
|
||||
|
||||
return { key, url };
|
||||
};
|
||||
|
||||
export const deleteS3File = async (key: string) => {
|
||||
const client = getS3Client();
|
||||
|
||||
await client.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
const getS3Client = () => {
|
||||
if (process.env.NEXT_PUBLIC_UPLOAD_TRANSPORT !== 's3') {
|
||||
throw new Error('Invalid upload transport');
|
||||
}
|
||||
|
||||
const hasCredentials =
|
||||
process.env.NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID &&
|
||||
process.env.NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY;
|
||||
|
||||
return new S3Client({
|
||||
endpoint: process.env.NEXT_PRIVATE_UPLOAD_ENDPOINT || undefined,
|
||||
region: process.env.NEXT_PRIVATE_UPLOAD_REGION || 'us-east-1',
|
||||
credentials: hasCredentials
|
||||
? {
|
||||
accessKeyId: String(process.env.NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID),
|
||||
secretAccessKey: String(process.env.NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY),
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
};
|
||||
54
packages/lib/universal/upload/update-file.ts
Normal file
54
packages/lib/universal/upload/update-file.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { base64 } from '@scure/base';
|
||||
import { match } from 'ts-pattern';
|
||||
|
||||
import { DocumentDataType } from '@documenso/prisma/client';
|
||||
|
||||
import { getAbsolutePresignPostUrl } from './server-actions';
|
||||
|
||||
export type UpdateFileOptions = {
|
||||
type: DocumentDataType;
|
||||
oldData: string;
|
||||
newData: string;
|
||||
};
|
||||
|
||||
export const updateFile = async ({ type, oldData, newData }: UpdateFileOptions) => {
|
||||
return await match(type)
|
||||
.with(DocumentDataType.BYTES, () => updateFileWithBytes(newData))
|
||||
.with(DocumentDataType.BYTES_64, () => updateFileWithBytes64(newData))
|
||||
.with(DocumentDataType.S3_PATH, async () => updateFileWithS3(oldData, newData))
|
||||
.exhaustive();
|
||||
};
|
||||
|
||||
const updateFileWithBytes = (data: string) => {
|
||||
return {
|
||||
type: DocumentDataType.BYTES,
|
||||
data,
|
||||
};
|
||||
};
|
||||
|
||||
const updateFileWithBytes64 = (data: string) => {
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const binaryData = encoder.encode(data);
|
||||
|
||||
const asciiData = base64.encode(binaryData);
|
||||
|
||||
return {
|
||||
type: DocumentDataType.BYTES_64,
|
||||
data: asciiData,
|
||||
};
|
||||
};
|
||||
|
||||
const updateFileWithS3 = async (key: string, data: string) => {
|
||||
const { url } = await getAbsolutePresignPostUrl(key);
|
||||
|
||||
await fetch(url, {
|
||||
method: 'PUT',
|
||||
body: data,
|
||||
});
|
||||
|
||||
return {
|
||||
type: DocumentDataType.S3_PATH,
|
||||
data: key,
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user