mirror of
https://github.com/documenso/documenso.git
synced 2025-11-18 02:32:00 +10:00
chore: merged feat/refresh
This commit is contained in:
@ -1,20 +1,32 @@
|
|||||||
{
|
{
|
||||||
"name": "Documenso",
|
"name": "Documenso",
|
||||||
"image": "mcr.microsoft.com/devcontainers/base:bullseye",
|
"image": "mcr.microsoft.com/devcontainers/base:bullseye",
|
||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||||
"version": "latest",
|
"version": "latest",
|
||||||
"enableNonRootDocker": "true",
|
"enableNonRootDocker": "true",
|
||||||
"moby": "true"
|
"moby": "true"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers/features/node:1": {}
|
"ghcr.io/devcontainers/features/node:1": {}
|
||||||
},
|
},
|
||||||
"onCreateCommand": "./.devcontainer/on-create.sh",
|
"onCreateCommand": "./.devcontainer/on-create.sh",
|
||||||
"forwardPorts": [
|
"forwardPorts": [3000, 54320, 9000, 2500, 1100],
|
||||||
3000,
|
"customizations": {
|
||||||
54320,
|
"vscode": {
|
||||||
9000,
|
"extensions": [
|
||||||
2500,
|
"aaron-bond.better-comments",
|
||||||
1100
|
"bradlc.vscode-tailwindcss",
|
||||||
]
|
"dbaeumer.vscode-eslint",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"mikestead.dotenv",
|
||||||
|
"unifiedjs.vscode-mdx",
|
||||||
|
"GitHub.copilot-chat",
|
||||||
|
"GitHub.copilot-labs",
|
||||||
|
"GitHub.copilot",
|
||||||
|
"GitHub.vscode-pull-request-github",
|
||||||
|
"Prisma.prisma",
|
||||||
|
"VisualStudioExptTeam.vscodeintellicode",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
18
.env.example
18
.env.example
@ -7,8 +7,8 @@ NEXT_PRIVATE_GOOGLE_CLIENT_ID=""
|
|||||||
NEXT_PRIVATE_GOOGLE_CLIENT_SECRET=""
|
NEXT_PRIVATE_GOOGLE_CLIENT_SECRET=""
|
||||||
|
|
||||||
# [[APP]]
|
# [[APP]]
|
||||||
NEXT_PUBLIC_SITE_URL="http://localhost:3000"
|
NEXT_PUBLIC_WEBAPP_URL="http://localhost:3000"
|
||||||
NEXT_PUBLIC_APP_URL="http://localhost:3000"
|
NEXT_PUBLIC_MARKETING_URL="http://localhost:3001"
|
||||||
|
|
||||||
# [[DATABASE]]
|
# [[DATABASE]]
|
||||||
NEXT_PRIVATE_DATABASE_URL="postgres://documenso:password@127.0.0.1:54320/documenso"
|
NEXT_PRIVATE_DATABASE_URL="postgres://documenso:password@127.0.0.1:54320/documenso"
|
||||||
@ -20,6 +20,20 @@ E2E_TEST_USERNAME=""
|
|||||||
E2E_TEST_USER_EMAIL=""
|
E2E_TEST_USER_EMAIL=""
|
||||||
E2E_TEST_USER_PASSWORD=""
|
E2E_TEST_USER_PASSWORD=""
|
||||||
|
|
||||||
|
# [[STORAGE]]
|
||||||
|
# OPTIONAL: Defines the storage transport to use. Available options: database (default) | s3
|
||||||
|
NEXT_PUBLIC_UPLOAD_TRANSPORT="database"
|
||||||
|
# OPTIONAL: Defines the endpoint to use for the S3 storage transport. Relevant when using third-party S3-compatible providers.
|
||||||
|
NEXT_PRIVATE_UPLOAD_ENDPOINT=
|
||||||
|
# OPTIONAL: Defines the region to use for the S3 storage transport. Defaults to us-east-1.
|
||||||
|
NEXT_PRIVATE_UPLOAD_REGION=
|
||||||
|
# REQUIRED: Defines the bucket to use for the S3 storage transport.
|
||||||
|
NEXT_PRIVATE_UPLOAD_BUCKET=
|
||||||
|
# OPTIONAL: Defines the access key ID to use for the S3 storage transport.
|
||||||
|
NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID=
|
||||||
|
# OPTIONAL: Defines the secret access key to use for the S3 storage transport.
|
||||||
|
NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY=
|
||||||
|
|
||||||
# [[SMTP]]
|
# [[SMTP]]
|
||||||
# OPTIONAL: Defines the transport to use for sending emails. Available options: smtp-auth (default) | smtp-api | mailchannels
|
# OPTIONAL: Defines the transport to use for sending emails. Available options: smtp-auth (default) | smtp-api | mailchannels
|
||||||
NEXT_PRIVATE_SMTP_TRANSPORT="smtp-auth"
|
NEXT_PRIVATE_SMTP_TRANSPORT="smtp-auth"
|
||||||
|
|||||||
@ -5,3 +5,4 @@
|
|||||||
# Statically hosted javascript files
|
# Statically hosted javascript files
|
||||||
apps/*/public/*.js
|
apps/*/public/*.js
|
||||||
apps/*/public/*.cjs
|
apps/*/public/*.cjs
|
||||||
|
scripts/
|
||||||
|
|||||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@ -22,12 +22,18 @@ jobs:
|
|||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 18
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Copy env
|
||||||
|
run: cp .env.example .env
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: npm run build
|
run: npm run build
|
||||||
|
|||||||
7
.github/workflows/codeql-analysis.yml
vendored
7
.github/workflows/codeql-analysis.yml
vendored
@ -32,7 +32,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Copy env
|
||||||
|
run: cp .env.example .env
|
||||||
|
|
||||||
- name: Build Documenso
|
- name: Build Documenso
|
||||||
run: npm run build
|
run: npm run build
|
||||||
|
|
||||||
@ -42,4 +45,4 @@ jobs:
|
|||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v2
|
||||||
|
|||||||
118
README.md
118
README.md
@ -1,13 +1,11 @@
|
|||||||
<p align="center" style="margin-top: 120px">
|
<p align="center" style="margin-top: 120px">
|
||||||
<a href="https://github.com/documenso/documenso.com">
|
<a href="https://github.com/documenso/documenso">
|
||||||
<img width="250px" src="https://github.com/documenso/documenso/assets/1309312/cd7823ec-4baa-40b9-be78-4acb3b1c73cb" alt="Documenso Logo">
|
<img width="250px" src="https://github.com/documenso/documenso/assets/1309312/cd7823ec-4baa-40b9-be78-4acb3b1c73cb" alt="Documenso Logo">
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
<h3 align="center">Open Source Signing Infrastructure</h3>
|
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
The DocuSign Open Source Alternative.
|
The Open Source DocuSign Alternative.
|
||||||
<br />
|
<br>
|
||||||
<a href="https://documenso.com"><strong>Learn more »</strong></a>
|
<a href="https://documenso.com"><strong>Learn more »</strong></a>
|
||||||
<br />
|
<br />
|
||||||
<br />
|
<br />
|
||||||
@ -22,12 +20,16 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://documen.so/discord"><img src="https://img.shields.io/badge/Discord-documen.so/discord-%235865F2" alt="Join Documenso on Discord"></a>
|
<a href="https://documen.so/discord"><img src="https://img.shields.io/badge/Discord-documen.so/discord-%235865F2" alt="Join Documenso on Discord"></a>
|
||||||
<a href="https://github.com/documenso/documenso/stargazers"><img src="https://img.shields.io/github/stars/documenso/documenso" alt="Github Stars"></a>
|
<a href="https://github.com/documenso/documenso/stargazers"><img src="https://img.shields.io/github/stars/documenso/documenso" alt="Github Stars"></a>
|
||||||
<a href="https://github.com/documenso/documenso/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-AGPLv3-purple" alt="License"></a>
|
<a href="https://github.com/documenso/documenso/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-AGPLv3-purple" alt="License"></a>
|
||||||
<a href="https://github.com/documenso/documenso/pulse"><img src="https://img.shields.io/github/commit-activity/m/documenso/documenso" alt="Commits-per-month"></a>
|
<a href="https://github.com/documenso/documenso/pulse"><img src="https://img.shields.io/github/commit-activity/m/documenso/documenso" alt="Commits-per-month"></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
> **🚧 We're currently working on a large scale refactor which can be found on the [feat/refresh](https://github.com/documenso/documenso/tree/feat/refresh) branch.**
|
||||||
|
>
|
||||||
|
> **[Read more on why 👀](https://documenso.com/blog/why-were-doing-a-rewrite)**
|
||||||
|
|
||||||
# Documenso 0.9 - Developer Preview
|
# Documenso 0.9 - Developer Preview
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
@ -63,18 +65,28 @@ Signing documents digitally is fast, easy and should be best practice for every
|
|||||||
|
|
||||||
## Community and Next Steps 🎯
|
## Community and Next Steps 🎯
|
||||||
|
|
||||||
The current project goal is to <b>[release a production ready version](https://github.com/documenso/documenso/milestone/1)</b> for self-hosting as soon as possible. If you want to help making that happen you can:
|
We're currently working on a redesign of the application including a revamp of the codebase so Documenso can be more intuitive to use and robust to develop upon.
|
||||||
|
|
||||||
- Check out the first source code release in this repository and test it
|
- Check out the first source code release in this repository and test it
|
||||||
- Tell us what you think in the current [Discussions](https://github.com/documenso/documenso/discussions)
|
- Tell us what you think in the current [Discussions](https://github.com/documenso/documenso/discussions)
|
||||||
- Join the [Slack Channel](https://documen.so/slack) for any questions and getting to know to other community members
|
- Join the [Discord server](https://documen.so/discord) for any questions and getting to know to other community members
|
||||||
- ⭐ the repository to help us raise awareness
|
- ⭐ the repository to help us raise awareness
|
||||||
- Spread the word on Twitter, that Documenso is working towards a more open signing tool
|
- Spread the word on Twitter, that Documenso is working towards a more open signing tool
|
||||||
- Fix or create [issues](https://github.com/documenso/documenso/issues), that are needed for the first production release
|
- Fix or create [issues](https://github.com/documenso/documenso/issues), that are needed for the first production release
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
- To contribute please see our [contribution guide](https://github.com/documenso/documenso/blob/main/CONTRIBUTING.md).
|
- To contribute, please see our [contribution guide](https://github.com/documenso/documenso/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Contact us
|
||||||
|
|
||||||
|
Contact us if you are interested in our Enterprise plan for large organizations that need extra flexibility and control.
|
||||||
|
|
||||||
|
<a href="https://cal.com/timurercan/enterprise-customers?utm_source=banner&utm_campaign=oss"><img alt="Book us with Cal.com" src="https://cal.com/book-with-cal-dark.svg" /></a>
|
||||||
|
|
||||||
|
## Activity
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
# Tech
|
# Tech
|
||||||
|
|
||||||
@ -89,10 +101,6 @@ Documenso is built using awesome open source tech including:
|
|||||||
- [Node SignPDF (Digital Signature)](https://github.com/vbuch/node-signpdf)
|
- [Node SignPDF (Digital Signature)](https://github.com/vbuch/node-signpdf)
|
||||||
- [React-PDF for viewing PDFs](https://github.com/wojtekmaj/react-pdf)
|
- [React-PDF for viewing PDFs](https://github.com/wojtekmaj/react-pdf)
|
||||||
- [PDF-Lib for PDF manipulation](https://github.com/Hopding/pdf-lib)
|
- [PDF-Lib for PDF manipulation](https://github.com/Hopding/pdf-lib)
|
||||||
- [Zod for schema declaration and validation](https://zod.dev/)
|
|
||||||
- [Lucide React for icons in React app](https://lucide.dev/)
|
|
||||||
- [Framer Motion for motion library](https://www.framer.com/motion/)
|
|
||||||
- [Radix UI for component library](https://www.radix-ui.com/)
|
|
||||||
- Check out `/package.json` and `/apps/web/package.json` for more
|
- Check out `/package.json` and `/apps/web/package.json` for more
|
||||||
- Support for [opensignpdf (requires Java on server)](https://github.com/open-pdf-sign) is currently planned.
|
- Support for [opensignpdf (requires Java on server)](https://github.com/open-pdf-sign) is currently planned.
|
||||||
|
|
||||||
@ -135,37 +143,47 @@ Your database will also be available on port `54320`. You can connect to it usin
|
|||||||
|
|
||||||
## Developer Setup
|
## Developer Setup
|
||||||
|
|
||||||
|
### Manual Setup
|
||||||
|
|
||||||
Follow these steps to setup documenso on you local machine:
|
Follow these steps to setup documenso on you local machine:
|
||||||
|
|
||||||
- [Clone the repository](https://help.github.com/articles/cloning-a-repository/) it to your local device.
|
- [Clone the repository](https://help.github.com/articles/cloning-a-repository/) it to your local device.
|
||||||
```sh
|
```sh
|
||||||
git clone https://github.com/documenso/documenso
|
git clone https://github.com/documenso/documenso
|
||||||
```
|
```
|
||||||
- Run <code>npm i</code> in root directory
|
- Run `npm i` in root directory
|
||||||
- Rename <code>.env.example</code> to <code>.env</code>
|
- Rename `.env.example` to `.env`
|
||||||
- Set DATABASE_URL value in .env file
|
- Set DATABASE_URL value in .env file
|
||||||
- You can use the provided test database url (may be wiped at any point)
|
- You can use the provided test database url (may be wiped at any point)
|
||||||
- Or setup a local postgres sql instance (recommended)
|
- Or setup a local postgres sql instance (recommended)
|
||||||
- Create the database scheme by running <code>db-migrate:dev</code>
|
- Create the database scheme by running `db-migrate:dev`
|
||||||
- Setup your mail provider
|
- Setup your mail provider
|
||||||
- Set <code>SENDGRID_API_KEY</code> value in .env file
|
- Set `SENDGRID_API_KEY` value in .env file
|
||||||
- You need a SendGrid account, which you can create [here](https://signup.sendgrid.com/).
|
- You need a SendGrid account, which you can create [here](https://signup.sendgrid.com/).
|
||||||
- Documenso uses [Nodemailer](https://nodemailer.com/about/) so you can easily use your own SMTP server by setting the <code>SMTP\_\* variables</code> in your .env
|
- Documenso uses [Nodemailer](https://nodemailer.com/about/) so you can easily use your own SMTP server by setting the `SMTP
|
||||||
- Run <code>npm run dev</code> root directory to start
|
\_
|
||||||
|
* variables` in your .env
|
||||||
|
- Run `npm run dev` root directory to start
|
||||||
- Register a new user at http://localhost:3000/signup
|
- Register a new user at http://localhost:3000/signup
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
- Optional: Seed the database using <code>npm run db-seed</code> to create a test user and document
|
- Optional: Seed the database using `npm run db-seed` to create a test user and document
|
||||||
- Optional: Upload and sign <code>apps/web/resources/example.pdf</code> manually to test your setup
|
- Optional: Upload and sign `apps/web/resources/example.pdf` manually to test your setup
|
||||||
|
|
||||||
- Optional: Create your own signing certificate
|
- Optional: Create your own signing certificate
|
||||||
- A demo certificate is provided in `/app/web/resources/certificate.p12`
|
- A demo certificate is provided in `/app/web/resources/certificate.p12`
|
||||||
- To generate your own using these steps and a Linux Terminal or Windows Subsystem for Linux (WSL) see **[Create your own signing certificate](#creating-your-own-signing-certificate)**.
|
- To generate your own using these steps and a Linux Terminal or Windows Subsystem for Linux (WSL) see **[Create your own signing certificate](#creating-your-own-signing-certificate)**.
|
||||||
|
|
||||||
|
### Run in Gitpod
|
||||||
|
|
||||||
|
- Click below to launch a ready-to-use Gitpod workspace in your browser.
|
||||||
|
|
||||||
|
[](https://gitpod.io/#https://github.com/documenso/documenso)
|
||||||
|
|
||||||
## Updating
|
## Updating
|
||||||
|
|
||||||
- If you pull the newest version from main, using <code>git pull</code>, it may be necessary to regenerate your database client
|
- If you pull the newest version from main, using `git pull`, it may be necessary to regenerate your database client
|
||||||
- You can do this by running the generate command in `/packages/prisma`:
|
- You can do this by running the generate command in `/packages/prisma`:
|
||||||
```sh
|
```sh
|
||||||
npx prisma generate
|
npx prisma generate
|
||||||
@ -176,16 +194,22 @@ Follow these steps to setup documenso on you local machine:
|
|||||||
|
|
||||||
For the digital signature of your documents you need a signing certificate in .p12 format (public and private key). You can buy one (not recommended for dev) or use the steps to create a self-signed one:
|
For the digital signature of your documents you need a signing certificate in .p12 format (public and private key). You can buy one (not recommended for dev) or use the steps to create a self-signed one:
|
||||||
|
|
||||||
1. Generate a private key using the OpenSSL command. You can run the following command to generate a 2048-bit RSA key:\
|
1. Generate a private key using the OpenSSL command. You can run the following command to generate a 2048-bit RSA key:
|
||||||
<code>openssl genrsa -out private.key 2048</code>
|
|
||||||
|
`openssl genrsa -out private.key 2048`
|
||||||
|
|
||||||
|
2. Generate a self-signed certificate using the private key. You can run the following command to generate a self-signed certificate:
|
||||||
|
|
||||||
|
`openssl req -new -x509 -key private.key -out certificate.crt -days 365`
|
||||||
|
|
||||||
2. Generate a self-signed certificate using the private key. You can run the following command to generate a self-signed certificate:\
|
|
||||||
<code>openssl req -new -x509 -key private.key -out certificate.crt -days 365</code> \
|
|
||||||
This will prompt you to enter some information, such as the Common Name (CN) for the certificate. Make sure you enter the correct information. The -days parameter sets the number of days for which the certificate is valid.
|
This will prompt you to enter some information, such as the Common Name (CN) for the certificate. Make sure you enter the correct information. The -days parameter sets the number of days for which the certificate is valid.
|
||||||
3. Combine the private key and the self-signed certificate to create the p12 certificate. You can run the following command to do this: \
|
|
||||||
<code>openssl pkcs12 -export -out certificate.p12 -inkey private.key -in certificate.crt</code>
|
3. Combine the private key and the self-signed certificate to create the p12 certificate. You can run the following command to do this:
|
||||||
|
|
||||||
|
`openssl pkcs12 -export -out certificate.p12 -inkey private.key -in certificate.crt`
|
||||||
|
|
||||||
4. You will be prompted to enter a password for the p12 file. Choose a strong password and remember it, as you will need it to use the certificate (**can be empty for dev certificates**)
|
4. You will be prompted to enter a password for the p12 file. Choose a strong password and remember it, as you will need it to use the certificate (**can be empty for dev certificates**)
|
||||||
5. Place the certificate <code>/apps/web/resources/certificate.p12</code>
|
5. Place the certificate `/apps/web/resources/certificate.p12`
|
||||||
|
|
||||||
# Docker
|
# Docker
|
||||||
|
|
||||||
@ -193,16 +217,42 @@ For the digital signature of your documents you need a signing certificate in .p
|
|||||||
|
|
||||||
Want to create a production ready docker image? Follow these steps:
|
Want to create a production ready docker image? Follow these steps:
|
||||||
|
|
||||||
- Run `./docker/build.sh` in the root directory.
|
- cd into `docker` directory
|
||||||
- Publish the image to your docker registry of choice.
|
- Make `build.sh` executable by running `chmod +x build.sh`
|
||||||
|
- Run `./build.sh` to start building the docker image.
|
||||||
|
- Publish the image to your docker registry of choice (or) If you prefer running the image from local, run the below command
|
||||||
|
|
||||||
# Deploying - Coming Soon™
|
```
|
||||||
|
docker run -d --restart=unless-stopped -p 3000:3000 -v documenso:/app/data --name documenso documenso:latest
|
||||||
|
```
|
||||||
|
|
||||||
- Docker support
|
Command Breakdown:
|
||||||
- One-Click-Deploy on Render.com Deploy
|
- `-d` - Let's you run the container in background
|
||||||
|
- `-p` - Passes down which ports to use. First half is the host port, Second half is the app port. You can change the first half anything you want and reverse proxy to that port.
|
||||||
|
- `-v` - Volume let's you persist the data
|
||||||
|
- `--name` - Name of the container
|
||||||
|
- `documenso:latest` - Image you have built
|
||||||
|
|
||||||
|
# Deployment
|
||||||
|
|
||||||
|
We support a variety of deployment methods, and are actively working on adding more. Stay tuned for updates!
|
||||||
|
|
||||||
|
## Railway
|
||||||
|
|
||||||
|
[](https://railway.app/template/DjrRRX)
|
||||||
|
|
||||||
|
## Render
|
||||||
|
|
||||||
|
[](https://render.com/deploy?repo=https://github.com/documenso/documenso)
|
||||||
|
|
||||||
# Troubleshooting
|
# Troubleshooting
|
||||||
|
|
||||||
|
## I'm not receiving any emails when using the developer quickstart
|
||||||
|
|
||||||
|
When using the developer quickstart an [Inbucket](https://inbucket.org/) server will be spun up in a docker container that will store all outgoing email locally for you to view.
|
||||||
|
|
||||||
|
The Web UI can be found at http://localhost:9000 while the SMTP port will be on localhost:2500.
|
||||||
|
|
||||||
## Support IPv6
|
## Support IPv6
|
||||||
|
|
||||||
In case you are deploying to a cluster that uses only IPv6. You can use a custom command to pass a parameter to the NextJS start command
|
In case you are deploying to a cluster that uses only IPv6. You can use a custom command to pass a parameter to the NextJS start command
|
||||||
|
|||||||
@ -1,98 +1,98 @@
|
|||||||
---
|
---
|
||||||
title: 'Building Documenso — Part 1: Certificates'
|
title: 'Building Documenso — Part 1: Certificates'
|
||||||
description: In today's fast-paced world, productivity and efficiency are crucial for success, both in personal and professional endeavors. We all strive to make the most of our time and energy to achieve our goals effectively. However, it's not always easy to stay on track and maintain peak performance. In this blog post, we'll explore 10 valuable tips to help you boost productivity and efficiency in your daily life.
|
description: This is the first part of the new Building Documenso series, where I describe the challenges and design choices that we make while building the world’s most open signing platform.
|
||||||
authorName: 'Timur Ercan'
|
authorName: 'Timur Ercan'
|
||||||
authorImage: '/blog/blog-author-timur.jpeg'
|
authorImage: '/blog/blog-author-timur.jpeg'
|
||||||
authorRole: 'Co-Founder'
|
authorRole: 'Co-Founder'
|
||||||
date: 2023-06-23
|
date: 2023-06-23
|
||||||
tags:
|
tags:
|
||||||
- Open Source
|
- Open Source
|
||||||
- Document Signature
|
- Document Signature
|
||||||
- Certificates
|
- Certificates
|
||||||
- Signing
|
- Signing
|
||||||
---
|
---
|
||||||
|
|
||||||
<figure>
|
<figure>
|
||||||
<MdxNextImage
|
<MdxNextImage
|
||||||
src="/blog/blog-banner-building-documenso.webp"
|
src="/blog/blog-banner-building-documenso.webp"
|
||||||
width="1200"
|
width="1200"
|
||||||
height="675"
|
height="675"
|
||||||
alt="Building Documenso blog banner"
|
alt="Building Documenso blog banner"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<figcaption className="text-center">
|
<figcaption className="text-center">
|
||||||
What actually is a signature?
|
What actually is a signature?
|
||||||
</figcaption>
|
</figcaption>
|
||||||
</figure>
|
</figure>
|
||||||
|
|
||||||
> Disclaimer: I’m not a lawyer and this isn’t legal advice. We plan to publish a much more specific framework on the topic of signature validity.
|
> Disclaimer: I’m not a lawyer and this isn’t legal advice. We plan to publish a much more specific framework on the topic of signature validity.
|
||||||
|
|
||||||
This is the first installment of the new Building Documenso series, where I describe the challenges and design choices that we make while building the world’s most open signing platform.
|
This is the first installment of the new Building Documenso series, where I describe the challenges and design choices that we make while building the world’s most open signing platform.
|
||||||
|
|
||||||
As you may have heard, we launched the community-reviewed <a href="https://github.com/documenso/documenso" target="_blank">version 0.9 of Documenso on GitHub</a> recently and it’s now available through the early adopter’s plan. One of the most fundamental choices we had to make on this first release, was the choice of certificate. While it’s interesting to know what we opted for, this shall also serve as a guide for everyone facing the same choice for self-hosting Documenso.
|
As you may have heard, we launched the community-reviewed <a href="https://github.com/documenso/documenso" target="_blank">version 0.9 of Documenso on GitHub</a> recently and it’s now available through the early adopter’s plan. One of the most fundamental choices we had to make on this first release, was the choice of certificate. While it’s interesting to know what we opted for, this shall also serve as a guide for everyone facing the same choice for self-hosting Documenso.
|
||||||
|
|
||||||
> Question: Why do I need a document signing certificate to self-host?
|
> Question: Why do I need a document signing certificate to self-host?
|
||||||
>
|
>
|
||||||
> Short Answer: Inserting the images of a signature into the document is only part of the signing process.
|
> Short Answer: Inserting the images of a signature into the document is only part of the signing process.
|
||||||
|
|
||||||
To have an actual digitally signed document you need a document signing certificate that is used to create the digital signature that is inserted into the document, alongside the visible one¹.
|
To have an actual digitally signed document you need a document signing certificate that is used to create the digital signature that is inserted into the document, alongside the visible one¹.
|
||||||
|
|
||||||
When hosting a signature service yourself, as we do, there are four main choices for handling the certificate: Not using a certificate, creating your own, buying a trusted certificate, and becoming and trusted service provider to issue your own trusted certificate.
|
When hosting a signature service yourself, as we do, there are four main choices for handling the certificate: Not using a certificate, creating your own, buying a trusted certificate, and becoming and trusted service provider to issue your own trusted certificate.
|
||||||
|
|
||||||
## 1\. No Certificate
|
## 1\. No Certificate
|
||||||
|
|
||||||
A lot of signing services actually don’t employ actual digital signatures besides the inserted image. The only insert and image of the signatures into the document you sign. This can be done and is legally acceptable in many cases. This option isn’t directly supported by Documenso without changing the code.
|
A lot of signing services actually don’t employ actual digital signatures besides the inserted image. The only insert and image of the signatures into the document you sign. This can be done and is legally acceptable in many cases. This option isn’t directly supported by Documenso without changing the code.
|
||||||
|
|
||||||
## 2\. Create your own
|
## 2\. Create your own
|
||||||
|
|
||||||
Since the cryptography behind certificates is freely available as open source you could generate your own using OpenSSL for example. Since it’s hardly more work than option 1 (using Documenso at least), this would be my minimum effort recommendation. Having a self-created (“self-signed”) certificate doesn’t add much in terms of regulation but it guarantees the document’s integrity, meaning no changes have been made after signing². What this doesn’t give you, is the famous green checkmark in Adobe Acrobat. Why? Because you aren’t on the list of providers Adobe “trusts”.³
|
Since the cryptography behind certificates is freely available as open source you could generate your own using OpenSSL for example. Since it’s hardly more work than option 1 (using Documenso at least), this would be my minimum effort recommendation. Having a self-created (“self-signed”) certificate doesn’t add much in terms of regulation but it guarantees the document’s integrity, meaning no changes have been made after signing². What this doesn’t give you, is the famous green checkmark in Adobe Acrobat. Why? Because you aren’t on the list of providers Adobe “trusts”.³
|
||||||
|
|
||||||
## 3\. Buy a “trusted” certificate.
|
## 3\. Buy a “trusted” certificate.
|
||||||
|
|
||||||
There are Certificate Authorities (CAs) that can sell you a certificate⁴. The service they provide is, that they validate your name (personal certificates) or your organization’s name (corporate certificate) before creating your certificate for you, just like you did in option 2. The difference is, that they are listed on the previously mentioned trust lists (e.g. Adobe’s) and thus the resulting signatures get a nice, green checkmark in Adobe Reader⁵
|
There are Certificate Authorities (CAs) that can sell you a certificate⁴. The service they provide is, that they validate your name (personal certificates) or your organization’s name (corporate certificate) before creating your certificate for you, just like you did in option 2. The difference is, that they are listed on the previously mentioned trust lists (e.g. Adobe’s) and thus the resulting signatures get a nice, green checkmark in Adobe Reader⁵
|
||||||
|
|
||||||
## 4\. Becoming a Trusted Certificate Authority (CA) yourself and create your own certificate
|
## 4\. Becoming a Trusted Certificate Authority (CA) yourself and create your own certificate
|
||||||
|
|
||||||
This option is an incredibly complex endeavour, requiring a lot of effort and skill. It can be done, as there are multiple CAs around the world. Is it worth the effort? That depends a lot on what you’re trying to accomplish.
|
This option is an incredibly complex endeavour, requiring a lot of effort and skill. It can be done, as there are multiple CAs around the world. Is it worth the effort? That depends a lot on what you’re trying to accomplish.
|
||||||
|
|
||||||
<center>. . .</center>
|
<center>. . .</center>
|
||||||
|
|
||||||
## What we did
|
## What we did
|
||||||
|
|
||||||
Having briefly introduced the options, here is what we did: Since we aim to raise the bar on digital signature proliferation and trust, we opted to buy an “Advanced Personal Certificates for Companies/Organisations” from WiseKey. Thus, documents signed with Documenso’s hosted version look like this:
|
Having briefly introduced the options, here is what we did: Since we aim to raise the bar on digital signature proliferation and trust, we opted to buy an “Advanced Personal Certificates for Companies/Organisations” from WiseKey. Thus, documents signed with Documenso’s hosted version look like this:
|
||||||
|
|
||||||
<figure>
|
<figure>
|
||||||
<MdxNextImage
|
<MdxNextImage
|
||||||
src="/blog/blog-fig-building-documenso.webp"
|
src="/blog/blog-fig-building-documenso.webp"
|
||||||
width="1262"
|
width="1262"
|
||||||
height="481"
|
height="481"
|
||||||
alt="Figure 1"
|
alt="Figure 1"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<figcaption className="text-center">The famous green checkmark: Signed by hosted Documenso</figcaption>
|
<figcaption className="text-center">The famous green checkmark: Signed by hosted Documenso</figcaption>
|
||||||
</figure>
|
</figure>
|
||||||
|
|
||||||
There weren’t any deeper reasons we choose WiseKey, other than they offered what we needed and there wasn’t any reason to look much further. While I didn’t map the entire certificate market offering (yet), I’m pretty sure something similar could be found elsewhere. While we opted for option 3, choosing option 2 might be perfectly reasonable considering your use case.⁶
|
There weren’t any deeper reasons we choose WiseKey, other than they offered what we needed and there wasn’t any reason to look much further. While I didn’t map the entire certificate market offering (yet), I’m pretty sure something similar could be found elsewhere. While we opted for option 3, choosing option 2 might be perfectly reasonable considering your use case.⁶
|
||||||
|
|
||||||
> While this is our setup, for now, we have a bigger plan for this topic. While globally trusted SSL Certificates have been available for free, courtesy of Let’s Encrypt, for a while now, there is no such thing as document signing. And there should be. Not having free and trusted infrastructure for signing is blocking a completely new generation of signing products from being created. This is why we’ll start working on option 4 when the time is right.
|
> While this is our setup, for now, we have a bigger plan for this topic. While globally trusted SSL Certificates have been available for free, courtesy of Let’s Encrypt, for a while now, there is no such thing as document signing. And there should be. Not having free and trusted infrastructure for signing is blocking a completely new generation of signing products from being created. This is why we’ll start working on option 4 when the time is right.
|
||||||
|
|
||||||
Do you have questions or thoughts about this? As always, let me know in the comments, on <a href="http://twitter.com/eltimuro" target="_blank">twitter.com/eltimuro</a>
|
Do you have questions or thoughts about this? As always, let me know in the comments, on <a href="http://twitter.com/eltimuro" target="_blank">twitter.com/eltimuro</a>
|
||||||
or directly: <a href="https://documen.so/timur" target="_blank">documen.so/timur</a>
|
or directly: <a href="https://documen.so/timur" target="_blank">documen.so/timur</a>
|
||||||
|
|
||||||
Join the self-hoster community here: <a href="https://documenso.slack.com/" target="_blank">https://documenso.slack.com/</a>
|
Join the self-hoster community here: <a href="https://documen.so/discord" target="_blank">https://documen.so/discord</a>
|
||||||
|
|
||||||
Best from Hamburg
|
Best from Hamburg
|
||||||
|
|
||||||
Timur
|
Timur
|
||||||
|
|
||||||
\[1\] There are different approaches to signing a document. For the sake of simplicity, here we talk about a document with X inserted signature images, that is afterward signed once the by signing service, i.e. Documenso. If each visual signature should have its own digital one (e.g. QES — eIDAS Level 3), the case is a bit more complex.
|
\[1\] There are different approaches to signing a document. For the sake of simplicity, here we talk about a document with X inserted signature images, that is afterward signed once the by signing service, i.e. Documenso. If each visual signature should have its own digital one (e.g. QES — eIDAS Level 3), the case is a bit more complex.
|
||||||
|
|
||||||
\[2\] Of course, the signing service provider technically can change and resign the document, especially in the case mentioned in \[1\]. This can be countered by requiring actual digital signatures from each signer, that are bound to their identity/ account. Creating a completely trustless system in the context however is extremely hard to do and not the most pressing business need for the industry at this point, in my opinion. Though, this would be nice.
|
\[2\] Of course, the signing service provider technically can change and resign the document, especially in the case mentioned in \[1\]. This can be countered by requiring actual digital signatures from each signer, that are bound to their identity/ account. Creating a completely trustless system in the context however is extremely hard to do and not the most pressing business need for the industry at this point, in my opinion. Though, this would be nice.
|
||||||
|
|
||||||
\[3\] Adobe, like the EU, has a list of organizations they trust. The Adobe green checkmark is powered by the Adobe trust list, if you want to be trusted by EU standards here: <a href="https://ec.europa.eu/digital-building-blocks/DSS/webapp-demo/validation" target="_blank">https://ec.europa.eu/digital-building-blocks/DSS/webapp-demo/validation</a>, you need to be on the EU trust list. Getting on each list is possible, though the latter is much more work.
|
\[3\] Adobe, like the EU, has a list of organizations they trust. The Adobe green checkmark is powered by the Adobe trust list, if you want to be trusted by EU standards here: <a href="https://ec.europa.eu/digital-building-blocks/DSS/webapp-demo/validation" target="_blank">https://ec.europa.eu/digital-building-blocks/DSS/webapp-demo/validation</a>, you need to be on the EU trust list. Getting on each list is possible, though the latter is much more work.
|
||||||
|
|
||||||
\[4\] Technically, they sign your certificate creation request (created by you), containing your info with their certificate (which is trusted), making your certificate trusted. This way, everything you sign with your certificate is seen as trusted. They created their certificate just like you, the difference is they are on the lists, mentioned in \[3\]
|
\[4\] Technically, they sign your certificate creation request (created by you), containing your info with their certificate (which is trusted), making your certificate trusted. This way, everything you sign with your certificate is seen as trusted. They created their certificate just like you, the difference is they are on the lists, mentioned in \[3\]
|
||||||
|
|
||||||
\[5\] Why does Adobe get to say, what is trusted? They simply happen to have the most used pdf viewer. And since everyone checks there, whom they consider trusted carries weight. If it should be like this, is a different matter.
|
\[5\] Why does Adobe get to say, what is trusted? They simply happen to have the most used pdf viewer. And since everyone checks there, whom they consider trusted carries weight. If it should be like this, is a different matter.
|
||||||
|
|
||||||
\[6\] Self-Signed signatures, even purely visual signatures, are fully legally binding. Why you use changes mainly your confidence in the signature and the burden of proof. Also, some industries require a certain level of signatures e.g. retail loans (QES/ eIDAS Level 3 in the EU).
|
\[6\] Self-Signed signatures, even purely visual signatures, are fully legally binding. Why you use changes mainly your confidence in the signature and the burden of proof. Also, some industries require a certain level of signatures e.g. retail loans (QES/ eIDAS Level 3 in the EU).
|
||||||
|
|||||||
113
apps/marketing/content/blog/why-were-doing-a-rewrite.mdx
Normal file
113
apps/marketing/content/blog/why-were-doing-a-rewrite.mdx
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
---
|
||||||
|
title: Why we're doing a rewrite
|
||||||
|
description: As we move beyond MVP and onto creating the open signing infrastructure we all deserve we need to take a quick pit-stop.
|
||||||
|
authorName: 'Lucas Smith'
|
||||||
|
authorImage: '/blog/blog-author-lucas.png'
|
||||||
|
authorRole: 'Co-Founder'
|
||||||
|
date: 2023-08-05
|
||||||
|
tags:
|
||||||
|
- Community
|
||||||
|
- Development
|
||||||
|
---
|
||||||
|
|
||||||
|
<figure>
|
||||||
|
<MdxNextImage
|
||||||
|
src="/blog/blog-banner-rewrite.png"
|
||||||
|
width="1260"
|
||||||
|
height="630"
|
||||||
|
alt="Next generation documenso"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<figcaption className="text-center">
|
||||||
|
The next generation of Documenso and signing infrastructure.
|
||||||
|
</figcaption>
|
||||||
|
</figure>
|
||||||
|
|
||||||
|
> TLDR; We're rewriting Documenso to move on from our MVP foundations and create an even better base for the project. This rewrite will provide us the opportunity to fix a few things within the project while enabling a faster development process moving forward.
|
||||||
|
|
||||||
|
# Introduction
|
||||||
|
|
||||||
|
At Documenso, we're building the next generation of signing infrastructure with a focus on making it inclusive and accessible for all. To do this we need to ensure that the software we write is also inclusive and accessible and for this reason we’ve decided to take a step back and perform a _quick_ rewrite.
|
||||||
|
|
||||||
|
Although we've achieved validated MVP status and gained paying customers, we're still quite far from our goal of creating a trusted, open signing experience. To move closer to that future, we need to step back and focus on the project's foundations to ensure we can solve all the items we set out to on our current homepage.
|
||||||
|
|
||||||
|
Fortunately, this wasn't a case of someone joining the team and proposing a rewrite due to a lack of understanding of the codebase and context surrounding it. Prior to joining Documenso as a co-founder, I had spent an extensive amount of time within the Documenso codebase and had a fairly intimate understanding of what was happening for the most part. This knowledge allowed me to make the fair and simultaneously hard call to take a quick pause so we can rebuild our current foundations to enable accessibility and a faster delivery time in the future.
|
||||||
|
|
||||||
|
# The Reasoning: TypeScript
|
||||||
|
|
||||||
|
Our primary reason for the rewrite is to better leverage the tools and technologies we've already chosen, namely TypeScript. While Documenso currently uses TypeScript, it's not fully taking advantage of its safety features, such as generics and type guards.
|
||||||
|
|
||||||
|
The codebase currently has several instances of `any` types, which is expected when working in an unknown domain where object models aren't fully understood before exploration and experimentation. These `any`s initially sped up development, but have since become a hindrance due to the lack of type information, combined with prop drilling. As a result, it's necessary to go through a lot of context to understand the root of any given issue.
|
||||||
|
|
||||||
|
The rewrite is using TypeScript to its full potential, ensuring that every interaction is strongly typed, both through general TypeScript tooling and the introduction of [Zod](https://github.com/colinhacks/zod), a validation library with excellent TypeScript support. With these choices, we can ensure that the codebase is robust to various inputs and states, as most issues will be caught during compile time and flagged within a developer's IDE.
|
||||||
|
|
||||||
|
# The Reasoning: Stronger API contracts
|
||||||
|
|
||||||
|
In line with our pattern of creating strongly typed contracts, we've decided to use [tRPC](https://github.com/trpc/trpc) for our internal API. This enables us to share types between our frontend and backend and establish a solid contract for interactions between the two. This is in contrast to the currently untyped API endpoints in Documenso, which are accessed using the `fetch` API that is itself untyped.
|
||||||
|
|
||||||
|
Using tRPC drastically reduces the chance of failures resulting from mundane things like argument or response shape changes during updates and upgrades. We made this decision easily because tRPC is a mature technology with no signs of losing momentum any time soon.
|
||||||
|
|
||||||
|
Additionally, many of our open-source friends have made the same choice for similar reasons.
|
||||||
|
|
||||||
|
# The Reasoning: Choosing exciting technologies
|
||||||
|
|
||||||
|
Although we already work with what I consider to be a fun stack that includes Next.js, Prisma, Tailwind, and more, it's no secret that contributors enjoy working with new technologies that benefit them in their own careers and projects.
|
||||||
|
|
||||||
|
To take advantage of this, we have decided to use Next.js 13 and React's new server component and actions architecture. Server components are currently popular among developers, with many loving and hating them at the same time.
|
||||||
|
|
||||||
|
I have personally worked with server components and actions since they were first released in October 2022 and have dealt with most of the hiccups and limitations along the way. Now, in July 2023, I believe they are in a much more stable place and are ready to be adopted, with their benefits being recognised by many.
|
||||||
|
|
||||||
|
By choosing to use server components and actions, we hope to encourage the community to participate more than they otherwise might. However, we are only choosing this because it has become more mature and stable. We will not choose things that are less likely to become the de-facto solution in the future, as we do not wish to inherit a pile of tech debt later on.
|
||||||
|
|
||||||
|
# The Reasoning: Allowing concurrent work
|
||||||
|
|
||||||
|
Another compelling reason for the rewrite was to effectively modularise code so we can work on features concurrently and without issue. This means extracting as much as possible out of components, API handlers and more and into a set of methods and functions that attempt to focus on just one thing.
|
||||||
|
|
||||||
|
In performing this work we should be able to easily make refactors and other changes to various parts of the code without stepping on each others feet, this also grants us the ability to upgrade or deprecate items as required by sticking to the contract of the previous method.
|
||||||
|
|
||||||
|
Additionally, this makes testing a much easier task as we can focus more on units of work rather than extensive end to end testing although we aim to have both, just not straight away.
|
||||||
|
|
||||||
|
# The Reasoning: Licensing of work
|
||||||
|
|
||||||
|
Another major reasoning for the rewrite is to ensure that all work performed on the project by both our internal team and external contributors is licensed in a way that benefits the project long-term. Prior to the rewrite contributors would create pull requests that would be merged in without any further process outside of the common code-review and testing cycles.
|
||||||
|
|
||||||
|
This was fine for the most part since we were simply working on the MVP but now as we move towards an infrastructure focus we intend on taking on enterprise clients who will have a need for a non-GPLv3 license since interpretations of it can be quite harmful to private hosting, to facilitate this we will require contributors to sign a contributor license agreement (CLA) prior to their changes being merged which will assign a perpetual license for us to use their code and relicense it as required such as for the use-case above.
|
||||||
|
|
||||||
|
While some might cringe at the idea of signing a CLA, we want to offer a compelling enterprise offering through means of dual-licensing. Great enterprise adoption is one of the cornerstones of our strategy and will be key to funding community and product development long-term.
|
||||||
|
|
||||||
|
_Do note that the above does not mean that we will ever go closed-source, it’s a point in our investor agreements that [https://github.com/documenso/documenso](https://github.com/documenso/documenso) will always remain available and open-source._
|
||||||
|
|
||||||
|
# Goals and Non-Goals
|
||||||
|
|
||||||
|
Rewriting an application is a monumental task that I have taken on and rejected many times in my career. As I get older, I become more hesitant to perform these rewrites because I understand that systems carry a lot of context and history. This makes them better suited for piecemeal refactoring instead, which avoids learning the lessons of the past all over again during the launch of the rewrite.
|
||||||
|
|
||||||
|
To ensure that we aren't just jumping off the deep end, I have set out a list of goals and non-goals to keep this rewrite lean and affordable.
|
||||||
|
|
||||||
|
### Goals
|
||||||
|
|
||||||
|
- Provide a clean design and interface for the newly rewritten application that creates a sense of trust and security at first glance.
|
||||||
|
- Create a stable foundation and architecture that will allow for growth into our future roadmap items (teams, automation, workflows, etc.).
|
||||||
|
- Create a robust system that requires minimal context through strong contracts and typing.
|
||||||
|
|
||||||
|
### Non-Goals
|
||||||
|
|
||||||
|
- Change the database schema (we don't want to make migration harder than it needs to be, thus all changes must be additive).
|
||||||
|
- Add too many features that weren't in the system prior to the rewrite.
|
||||||
|
- Remove any features that were in the older version of Documenso, such as free signatures (signatures that have no corresponding field).
|
||||||
|
|
||||||
|
# Rollout Plan
|
||||||
|
|
||||||
|
Thanks to the constraints listed above our rollout will hopefully be fairly painless, still to be safe we plan on doing the following.
|
||||||
|
|
||||||
|
1. In the current [testing environment](https://test.documenso.com), create and sign a number of documents leaving many in varying states of completion.
|
||||||
|
2. Deploy the rewrite to the testing environment and verify that all existing documents and information is retrievable and modifiable without any issue.
|
||||||
|
3. Create another set of documents using the new rewrite and verify that all interactions between authoring and signing work as expected.
|
||||||
|
4. Repeat this until we reach a general confidence level (expectation of two weeks).
|
||||||
|
|
||||||
|
Once we’ve reached the desired confidence level with our testing environment we will look to deploy the rewrite to the production environment ensuring that we’ve performed all the required backups in the event of a catastrophic failure.
|
||||||
|
|
||||||
|
# Want to help out?
|
||||||
|
|
||||||
|
We’re currently working on the **[feat/refresh](https://github.com/documenso/documenso/tree/feat/refresh)** branch on GitHub, we aim to have a CLA available to sign in the coming days so we can start accepting external contributions asap. While we’re nearing the end-stage of the rewrite we will be throwing up a couple of bounties shortly for things like [Husky](https://github.com/typicode/husky) and [Changesets](https://github.com/changesets/changesets).
|
||||||
|
|
||||||
|
Keep an eye on our [GitHub issues](https://github.com/documenso/documenso/issues) to stay up to date!
|
||||||
@ -8,9 +8,50 @@ const { parsed: env } = require('dotenv').config({
|
|||||||
|
|
||||||
/** @type {import('next').NextConfig} */
|
/** @type {import('next').NextConfig} */
|
||||||
const config = {
|
const config = {
|
||||||
|
experimental: {
|
||||||
|
serverActions: true,
|
||||||
|
},
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
transpilePackages: ['@documenso/lib', '@documenso/prisma', '@documenso/trpc', '@documenso/ui'],
|
transpilePackages: ['@documenso/lib', '@documenso/prisma', '@documenso/trpc', '@documenso/ui'],
|
||||||
env,
|
modularizeImports: {
|
||||||
|
'lucide-react': {
|
||||||
|
transform: 'lucide-react/dist/esm/icons/{{ kebabCase member }}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
async headers() {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
source: '/:path*',
|
||||||
|
headers: [
|
||||||
|
{
|
||||||
|
key: 'x-dns-prefetch-control',
|
||||||
|
value: 'on',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'strict-transport-security',
|
||||||
|
value: 'max-age=31536000; includeSubDomains; preload',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'x-frame-options',
|
||||||
|
value: 'SAMEORIGIN',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'x-content-type-options',
|
||||||
|
value: 'nosniff',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'referrer-policy',
|
||||||
|
value: 'strict-origin-when-cross-origin',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'permissions-policy',
|
||||||
|
value:
|
||||||
|
'accelerometer=(), camera=(), geolocation=(), gyroscope=(), magnetometer=(), microphone=(), payment=(), usb=()',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = withContentlayer(config);
|
module.exports = withContentlayer(config);
|
||||||
|
|||||||
3
apps/marketing/process-env.d.ts
vendored
3
apps/marketing/process-env.d.ts
vendored
@ -1,6 +1,7 @@
|
|||||||
declare namespace NodeJS {
|
declare namespace NodeJS {
|
||||||
export interface ProcessEnv {
|
export interface ProcessEnv {
|
||||||
NEXT_PUBLIC_SITE_URL?: string;
|
NEXT_PUBLIC_WEBAPP_URL?: string;
|
||||||
|
NEXT_PUBLIC_MARKETING_URL?: string;
|
||||||
|
|
||||||
NEXT_PRIVATE_DATABASE_URL: string;
|
NEXT_PRIVATE_DATABASE_URL: string;
|
||||||
|
|
||||||
|
|||||||
@ -19,6 +19,7 @@ export const generateMetadata = ({ params }: { params: { post: string } }) => {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
title: `Documenso - ${blogPost.title}`,
|
title: `Documenso - ${blogPost.title}`,
|
||||||
|
description: blogPost.description,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -161,7 +161,7 @@ export default async function ClaimedPlanPage({ searchParams = {} }: ClaimedPlan
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<Link
|
<Link
|
||||||
href={`${process.env.NEXT_PUBLIC_APP_URL}/login`}
|
href={`${process.env.NEXT_PUBLIC_WEBAPP_URL}/login`}
|
||||||
target="_blank"
|
target="_blank"
|
||||||
className="mt-4 block"
|
className="mt-4 block"
|
||||||
>
|
>
|
||||||
|
|||||||
@ -21,12 +21,12 @@ export const metadata = {
|
|||||||
description:
|
description:
|
||||||
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
||||||
type: 'website',
|
type: 'website',
|
||||||
images: [`${process.env.NEXT_PUBLIC_SITE_URL}/opengraph-image.jpg`],
|
images: [`${process.env.NEXT_PUBLIC_MARKETING_URL}/opengraph-image.jpg`],
|
||||||
},
|
},
|
||||||
twitter: {
|
twitter: {
|
||||||
site: '@documenso',
|
site: '@documenso',
|
||||||
card: 'summary_large_image',
|
card: 'summary_large_image',
|
||||||
images: [`${process.env.NEXT_PUBLIC_SITE_URL}/opengraph-image.jpg`],
|
images: [`${process.env.NEXT_PUBLIC_MARKETING_URL}/opengraph-image.jpg`],
|
||||||
description:
|
description:
|
||||||
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
||||||
},
|
},
|
||||||
|
|||||||
@ -43,7 +43,7 @@ export default async function handler(
|
|||||||
|
|
||||||
if (user && user.Subscription.length > 0) {
|
if (user && user.Subscription.length > 0) {
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
redirectUrl: `${process.env.NEXT_PUBLIC_APP_URL}/login`,
|
redirectUrl: `${process.env.NEXT_PUBLIC_WEBAPP_URL}/login`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,8 +103,8 @@ export default async function handler(
|
|||||||
mode: 'subscription',
|
mode: 'subscription',
|
||||||
metadata,
|
metadata,
|
||||||
allow_promotion_codes: true,
|
allow_promotion_codes: true,
|
||||||
success_url: `${process.env.NEXT_PUBLIC_SITE_URL}/claimed?sessionId={CHECKOUT_SESSION_ID}`,
|
success_url: `${process.env.NEXT_PUBLIC_MARKETING_URL}/claimed?sessionId={CHECKOUT_SESSION_ID}`,
|
||||||
cancel_url: `${process.env.NEXT_PUBLIC_SITE_URL}/pricing?email=${encodeURIComponent(
|
cancel_url: `${process.env.NEXT_PUBLIC_MARKETING_URL}/pricing?email=${encodeURIComponent(
|
||||||
email,
|
email,
|
||||||
)}&name=${encodeURIComponent(name)}&planId=${planId}&cancelled=true`,
|
)}&name=${encodeURIComponent(name)}&planId=${planId}&cancelled=true`,
|
||||||
});
|
});
|
||||||
|
|||||||
@ -8,8 +8,11 @@ import { insertImageInPDF } from '@documenso/lib/server-only/pdf/insert-image-in
|
|||||||
import { insertTextInPDF } from '@documenso/lib/server-only/pdf/insert-text-in-pdf';
|
import { insertTextInPDF } from '@documenso/lib/server-only/pdf/insert-text-in-pdf';
|
||||||
import { redis } from '@documenso/lib/server-only/redis';
|
import { redis } from '@documenso/lib/server-only/redis';
|
||||||
import { Stripe, stripe } from '@documenso/lib/server-only/stripe';
|
import { Stripe, stripe } from '@documenso/lib/server-only/stripe';
|
||||||
|
import { getFile } from '@documenso/lib/universal/upload/get-file';
|
||||||
|
import { updateFile } from '@documenso/lib/universal/upload/update-file';
|
||||||
import { prisma } from '@documenso/prisma';
|
import { prisma } from '@documenso/prisma';
|
||||||
import {
|
import {
|
||||||
|
DocumentDataType,
|
||||||
DocumentStatus,
|
DocumentStatus,
|
||||||
FieldType,
|
FieldType,
|
||||||
ReadStatus,
|
ReadStatus,
|
||||||
@ -85,16 +88,34 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
|
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
|
const bytes64 = readFileSync('./public/documenso-supporter-pledge.pdf').toString('base64');
|
||||||
|
|
||||||
|
const { id: documentDataId } = await prisma.documentData.create({
|
||||||
|
data: {
|
||||||
|
type: DocumentDataType.BYTES_64,
|
||||||
|
data: bytes64,
|
||||||
|
initialData: bytes64,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const document = await prisma.document.create({
|
const document = await prisma.document.create({
|
||||||
data: {
|
data: {
|
||||||
title: 'Documenso Supporter Pledge.pdf',
|
title: 'Documenso Supporter Pledge.pdf',
|
||||||
status: DocumentStatus.COMPLETED,
|
status: DocumentStatus.COMPLETED,
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
document: readFileSync('./public/documenso-supporter-pledge.pdf').toString('base64'),
|
documentDataId,
|
||||||
created: now,
|
},
|
||||||
|
include: {
|
||||||
|
documentData: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { documentData } = document;
|
||||||
|
|
||||||
|
if (!documentData) {
|
||||||
|
throw new Error(`Document ${document.id} has no document data`);
|
||||||
|
}
|
||||||
|
|
||||||
const recipient = await prisma.recipient.create({
|
const recipient = await prisma.recipient.create({
|
||||||
data: {
|
data: {
|
||||||
name: user.name ?? '',
|
name: user.name ?? '',
|
||||||
@ -121,17 +142,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let pdfData = await getFile(documentData).then((data) =>
|
||||||
|
Buffer.from(data).toString('base64'),
|
||||||
|
);
|
||||||
|
|
||||||
if (signatureDataUrl) {
|
if (signatureDataUrl) {
|
||||||
document.document = await insertImageInPDF(
|
pdfData = await insertImageInPDF(
|
||||||
document.document,
|
pdfData,
|
||||||
signatureDataUrl,
|
signatureDataUrl,
|
||||||
Number(field.positionX),
|
Number(field.positionX),
|
||||||
Number(field.positionY),
|
Number(field.positionY),
|
||||||
field.page,
|
field.page,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
document.document = await insertTextInPDF(
|
pdfData = await insertTextInPDF(
|
||||||
document.document,
|
pdfData,
|
||||||
signatureText ?? '',
|
signatureText ?? '',
|
||||||
Number(field.positionX),
|
Number(field.positionX),
|
||||||
Number(field.positionY),
|
Number(field.positionY),
|
||||||
@ -139,6 +164,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { data: newData } = await updateFile({
|
||||||
|
type: documentData.type,
|
||||||
|
oldData: documentData.initialData,
|
||||||
|
newData: Buffer.from(pdfData, 'base64').toString('binary'),
|
||||||
|
});
|
||||||
|
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
prisma.signature.create({
|
prisma.signature.create({
|
||||||
data: {
|
data: {
|
||||||
@ -148,12 +179,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
typedSignature: signatureDataUrl ? '' : signatureText,
|
typedSignature: signatureDataUrl ? '' : signatureText,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
prisma.document.update({
|
prisma.documentData.update({
|
||||||
where: {
|
where: {
|
||||||
id: document.id,
|
id: documentData.id,
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
document: document.document,
|
data: newData,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const { version } = require('./package.json');
|
||||||
|
|
||||||
const { parsed: env } = require('dotenv').config({
|
const { parsed: env } = require('dotenv').config({
|
||||||
path: path.join(__dirname, '../../.env.local'),
|
path: path.join(__dirname, '../../.env.local'),
|
||||||
@ -9,6 +10,7 @@ const { parsed: env } = require('dotenv').config({
|
|||||||
const config = {
|
const config = {
|
||||||
experimental: {
|
experimental: {
|
||||||
serverActions: true,
|
serverActions: true,
|
||||||
|
serverActionsBodySizeLimit: '50mb',
|
||||||
},
|
},
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
transpilePackages: [
|
transpilePackages: [
|
||||||
@ -18,7 +20,9 @@ const config = {
|
|||||||
'@documenso/ui',
|
'@documenso/ui',
|
||||||
'@documenso/email',
|
'@documenso/email',
|
||||||
],
|
],
|
||||||
env,
|
env: {
|
||||||
|
APP_VERSION: version,
|
||||||
|
},
|
||||||
modularizeImports: {
|
modularizeImports: {
|
||||||
'lucide-react': {
|
'lucide-react': {
|
||||||
transform: 'lucide-react/dist/esm/icons/{{ kebabCase member }}',
|
transform: 'lucide-react/dist/esm/icons/{{ kebabCase member }}',
|
||||||
|
|||||||
@ -24,7 +24,6 @@
|
|||||||
"lucide-react": "^0.214.0",
|
"lucide-react": "^0.214.0",
|
||||||
"luxon": "^3.4.0",
|
"luxon": "^3.4.0",
|
||||||
"micro": "^10.0.1",
|
"micro": "^10.0.1",
|
||||||
"nanoid": "^4.0.2",
|
|
||||||
"next": "13.4.12",
|
"next": "13.4.12",
|
||||||
"next-auth": "4.22.3",
|
"next-auth": "4.22.3",
|
||||||
"next-plausible": "^3.10.1",
|
"next-plausible": "^3.10.1",
|
||||||
|
|||||||
3
apps/web/process-env.d.ts
vendored
3
apps/web/process-env.d.ts
vendored
@ -1,6 +1,7 @@
|
|||||||
declare namespace NodeJS {
|
declare namespace NodeJS {
|
||||||
export interface ProcessEnv {
|
export interface ProcessEnv {
|
||||||
NEXT_PUBLIC_SITE_URL?: string;
|
NEXT_PUBLIC_WEBAPP_URL?: string;
|
||||||
|
NEXT_PUBLIC_MARKETING_URL?: string;
|
||||||
|
|
||||||
NEXT_PRIVATE_DATABASE_URL: string;
|
NEXT_PRIVATE_DATABASE_URL: string;
|
||||||
|
|
||||||
|
|||||||
@ -1,34 +0,0 @@
|
|||||||
import { useMutation } from '@tanstack/react-query';
|
|
||||||
|
|
||||||
import { TCreateDocumentRequestSchema, ZCreateDocumentResponseSchema } from './types';
|
|
||||||
|
|
||||||
export const useCreateDocument = () => {
|
|
||||||
return useMutation(async ({ file }: TCreateDocumentRequestSchema) => {
|
|
||||||
const formData = new FormData();
|
|
||||||
|
|
||||||
formData.set('file', file);
|
|
||||||
|
|
||||||
const response = await fetch('/api/document/create', {
|
|
||||||
method: 'POST',
|
|
||||||
body: formData,
|
|
||||||
});
|
|
||||||
|
|
||||||
const body = await response.json();
|
|
||||||
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new Error('Failed to create document');
|
|
||||||
}
|
|
||||||
|
|
||||||
const safeBody = ZCreateDocumentResponseSchema.safeParse(body);
|
|
||||||
|
|
||||||
if (!safeBody.success) {
|
|
||||||
throw new Error('Failed to create document');
|
|
||||||
}
|
|
||||||
|
|
||||||
if ('error' in safeBody.data) {
|
|
||||||
throw new Error(safeBody.data.error);
|
|
||||||
}
|
|
||||||
|
|
||||||
return safeBody.data;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
import { z } from 'zod';
|
|
||||||
|
|
||||||
export const ZCreateDocumentRequestSchema = z.object({
|
|
||||||
file: z.instanceof(File),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TCreateDocumentRequestSchema = z.infer<typeof ZCreateDocumentRequestSchema>;
|
|
||||||
|
|
||||||
export const ZCreateDocumentResponseSchema = z
|
|
||||||
.object({
|
|
||||||
id: z.number(),
|
|
||||||
})
|
|
||||||
.or(
|
|
||||||
z.object({
|
|
||||||
error: z.string(),
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
export type TCreateDocumentResponseSchema = z.infer<typeof ZCreateDocumentResponseSchema>;
|
|
||||||
30
apps/web/src/app/(dashboard)/admin/layout.tsx
Normal file
30
apps/web/src/app/(dashboard)/admin/layout.tsx
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
import { redirect } from 'next/navigation';
|
||||||
|
|
||||||
|
import { getRequiredServerComponentSession } from '@documenso/lib/next-auth/get-server-session';
|
||||||
|
import { isAdmin } from '@documenso/lib/next-auth/guards/is-admin';
|
||||||
|
|
||||||
|
import { AdminNav } from './nav';
|
||||||
|
|
||||||
|
export type AdminSectionLayoutProps = {
|
||||||
|
children: React.ReactNode;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default async function AdminSectionLayout({ children }: AdminSectionLayoutProps) {
|
||||||
|
const user = await getRequiredServerComponentSession();
|
||||||
|
|
||||||
|
if (!isAdmin(user)) {
|
||||||
|
redirect('/documents');
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="mx-auto mt-16 w-full max-w-screen-xl px-4 md:px-8">
|
||||||
|
<div className="grid grid-cols-12 gap-x-8 md:mt-8">
|
||||||
|
<AdminNav className="col-span-12 md:col-span-3 md:flex" />
|
||||||
|
|
||||||
|
<div className="col-span-12 mt-12 md:col-span-9 md:mt-0">{children}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
47
apps/web/src/app/(dashboard)/admin/nav.tsx
Normal file
47
apps/web/src/app/(dashboard)/admin/nav.tsx
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import { HTMLAttributes } from 'react';
|
||||||
|
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { usePathname } from 'next/navigation';
|
||||||
|
|
||||||
|
import { BarChart3, User2 } from 'lucide-react';
|
||||||
|
|
||||||
|
import { cn } from '@documenso/ui/lib/utils';
|
||||||
|
import { Button } from '@documenso/ui/primitives/button';
|
||||||
|
|
||||||
|
export type AdminNavProps = HTMLAttributes<HTMLDivElement>;
|
||||||
|
|
||||||
|
export const AdminNav = ({ className, ...props }: AdminNavProps) => {
|
||||||
|
const pathname = usePathname();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn('flex gap-x-2.5 gap-y-2 md:flex-col', className)} {...props}>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
className={cn(
|
||||||
|
'justify-start md:w-full',
|
||||||
|
pathname?.startsWith('/admin/stats') && 'bg-secondary',
|
||||||
|
)}
|
||||||
|
asChild
|
||||||
|
>
|
||||||
|
<Link href="/admin/stats">
|
||||||
|
<BarChart3 className="mr-2 h-5 w-5" />
|
||||||
|
Stats
|
||||||
|
</Link>
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
className={cn(
|
||||||
|
'justify-start md:w-full',
|
||||||
|
pathname?.startsWith('/admin/users') && 'bg-secondary',
|
||||||
|
)}
|
||||||
|
disabled
|
||||||
|
>
|
||||||
|
<User2 className="mr-2 h-5 w-5" />
|
||||||
|
Users (Coming Soon)
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
5
apps/web/src/app/(dashboard)/admin/page.tsx
Normal file
5
apps/web/src/app/(dashboard)/admin/page.tsx
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { redirect } from 'next/navigation';
|
||||||
|
|
||||||
|
export default function Admin() {
|
||||||
|
redirect('/admin/stats');
|
||||||
|
}
|
||||||
75
apps/web/src/app/(dashboard)/admin/stats/page.tsx
Normal file
75
apps/web/src/app/(dashboard)/admin/stats/page.tsx
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import {
|
||||||
|
File,
|
||||||
|
FileCheck,
|
||||||
|
FileClock,
|
||||||
|
FileEdit,
|
||||||
|
Mail,
|
||||||
|
MailOpen,
|
||||||
|
PenTool,
|
||||||
|
User as UserIcon,
|
||||||
|
UserPlus2,
|
||||||
|
UserSquare2,
|
||||||
|
} from 'lucide-react';
|
||||||
|
|
||||||
|
import { getDocumentStats } from '@documenso/lib/server-only/admin/get-documents-stats';
|
||||||
|
import { getRecipientsStats } from '@documenso/lib/server-only/admin/get-recipients-stats';
|
||||||
|
import {
|
||||||
|
getUsersCount,
|
||||||
|
getUsersWithSubscriptionsCount,
|
||||||
|
} from '@documenso/lib/server-only/admin/get-users-stats';
|
||||||
|
|
||||||
|
import { CardMetric } from '~/components/(dashboard)/metric-card/metric-card';
|
||||||
|
|
||||||
|
export default async function AdminStatsPage() {
|
||||||
|
const [usersCount, usersWithSubscriptionsCount, docStats, recipientStats] = await Promise.all([
|
||||||
|
getUsersCount(),
|
||||||
|
getUsersWithSubscriptionsCount(),
|
||||||
|
getDocumentStats(),
|
||||||
|
getRecipientsStats(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<h2 className="text-4xl font-semibold">Instance Stats</h2>
|
||||||
|
|
||||||
|
<div className="mt-8 grid flex-1 grid-cols-1 gap-4 md:grid-cols-4">
|
||||||
|
<CardMetric icon={UserIcon} title="Total Users" value={usersCount} />
|
||||||
|
<CardMetric icon={File} title="Total Documents" value={docStats.ALL} />
|
||||||
|
<CardMetric
|
||||||
|
icon={UserPlus2}
|
||||||
|
title="Active Subscriptions"
|
||||||
|
value={usersWithSubscriptionsCount}
|
||||||
|
/>
|
||||||
|
<CardMetric icon={UserPlus2} title="App Version" value={`v${process.env.APP_VERSION}`} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-16 grid grid-cols-1 gap-8 md:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<h3 className="text-3xl font-semibold">Document metrics</h3>
|
||||||
|
|
||||||
|
<div className="mt-8 grid flex-1 grid-cols-2 gap-4">
|
||||||
|
<CardMetric icon={File} title="Total Documents" value={docStats.ALL} />
|
||||||
|
<CardMetric icon={FileEdit} title="Drafted Documents" value={docStats.DRAFT} />
|
||||||
|
<CardMetric icon={FileClock} title="Pending Documents" value={docStats.PENDING} />
|
||||||
|
<CardMetric icon={FileCheck} title="Completed Documents" value={docStats.COMPLETED} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h3 className="text-3xl font-semibold">Recipients metrics</h3>
|
||||||
|
|
||||||
|
<div className="mt-8 grid flex-1 grid-cols-2 gap-4">
|
||||||
|
<CardMetric
|
||||||
|
icon={UserSquare2}
|
||||||
|
title="Total Recipients"
|
||||||
|
value={recipientStats.TOTAL_RECIPIENTS}
|
||||||
|
/>
|
||||||
|
<CardMetric icon={Mail} title="Documents Received" value={recipientStats.SENT} />
|
||||||
|
<CardMetric icon={MailOpen} title="Documents Viewed" value={recipientStats.OPENED} />
|
||||||
|
<CardMetric icon={PenTool} title="Signatures Collected" value={recipientStats.SIGNED} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -4,7 +4,8 @@ import { useState } from 'react';
|
|||||||
|
|
||||||
import { useRouter } from 'next/navigation';
|
import { useRouter } from 'next/navigation';
|
||||||
|
|
||||||
import { Document, Field, Recipient, User } from '@documenso/prisma/client';
|
import { Field, Recipient, User } from '@documenso/prisma/client';
|
||||||
|
import { DocumentWithData } from '@documenso/prisma/types/document-with-data';
|
||||||
import { cn } from '@documenso/ui/lib/utils';
|
import { cn } from '@documenso/ui/lib/utils';
|
||||||
import { Card, CardContent } from '@documenso/ui/primitives/card';
|
import { Card, CardContent } from '@documenso/ui/primitives/card';
|
||||||
import { AddFieldsFormPartial } from '@documenso/ui/primitives/document-flow/add-fields';
|
import { AddFieldsFormPartial } from '@documenso/ui/primitives/document-flow/add-fields';
|
||||||
@ -28,9 +29,10 @@ import { completeDocument } from '~/components/forms/edit-document/add-subject.a
|
|||||||
export type EditDocumentFormProps = {
|
export type EditDocumentFormProps = {
|
||||||
className?: string;
|
className?: string;
|
||||||
user: User;
|
user: User;
|
||||||
document: Document;
|
document: DocumentWithData;
|
||||||
recipients: Recipient[];
|
recipients: Recipient[];
|
||||||
fields: Field[];
|
fields: Field[];
|
||||||
|
dataUrl: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
type EditDocumentStep = 'signers' | 'fields' | 'subject';
|
type EditDocumentStep = 'signers' | 'fields' | 'subject';
|
||||||
@ -41,14 +43,13 @@ export const EditDocumentForm = ({
|
|||||||
recipients,
|
recipients,
|
||||||
fields,
|
fields,
|
||||||
user: _user,
|
user: _user,
|
||||||
|
dataUrl,
|
||||||
}: EditDocumentFormProps) => {
|
}: EditDocumentFormProps) => {
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
|
||||||
const [step, setStep] = useState<EditDocumentStep>('signers');
|
const [step, setStep] = useState<EditDocumentStep>('signers');
|
||||||
|
|
||||||
const documentUrl = `data:application/pdf;base64,${document.document}`;
|
|
||||||
|
|
||||||
const documentFlow: Record<EditDocumentStep, DocumentFlowStep> = {
|
const documentFlow: Record<EditDocumentStep, DocumentFlowStep> = {
|
||||||
signers: {
|
signers: {
|
||||||
title: 'Add Signers',
|
title: 'Add Signers',
|
||||||
@ -151,11 +152,11 @@ export const EditDocumentForm = ({
|
|||||||
return (
|
return (
|
||||||
<div className={cn('grid w-full grid-cols-12 gap-8', className)}>
|
<div className={cn('grid w-full grid-cols-12 gap-8', className)}>
|
||||||
<Card
|
<Card
|
||||||
className="col-span-12 rounded-xl before:rounded-xl lg:col-span-6 xl:col-span-7"
|
className="relative col-span-12 rounded-xl before:rounded-xl lg:col-span-6 xl:col-span-7"
|
||||||
gradient
|
gradient
|
||||||
>
|
>
|
||||||
<CardContent className="p-2">
|
<CardContent className="p-2">
|
||||||
<LazyPDFViewer document={documentUrl} />
|
<LazyPDFViewer document={dataUrl} />
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
|
|||||||
@ -1,20 +0,0 @@
|
|||||||
'use client';
|
|
||||||
|
|
||||||
import { Card, CardContent } from '@documenso/ui/primitives/card';
|
|
||||||
import { LazyPDFViewer } from '@documenso/ui/primitives/lazy-pdf-viewer';
|
|
||||||
import { PDFViewerProps } from '@documenso/ui/primitives/pdf-viewer';
|
|
||||||
|
|
||||||
export type LoadablePDFCard = PDFViewerProps & {
|
|
||||||
className?: string;
|
|
||||||
pdfClassName?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const LoadablePDFCard = ({ className, pdfClassName, ...props }: LoadablePDFCard) => {
|
|
||||||
return (
|
|
||||||
<Card className={className} gradient {...props}>
|
|
||||||
<CardContent className="p-2">
|
|
||||||
<LazyPDFViewer className={pdfClassName} {...props} />
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@ -7,6 +7,7 @@ import { getRequiredServerComponentSession } from '@documenso/lib/next-auth/get-
|
|||||||
import { getDocumentById } from '@documenso/lib/server-only/document/get-document-by-id';
|
import { getDocumentById } from '@documenso/lib/server-only/document/get-document-by-id';
|
||||||
import { getFieldsForDocument } from '@documenso/lib/server-only/field/get-fields-for-document';
|
import { getFieldsForDocument } from '@documenso/lib/server-only/field/get-fields-for-document';
|
||||||
import { getRecipientsForDocument } from '@documenso/lib/server-only/recipient/get-recipients-for-document';
|
import { getRecipientsForDocument } from '@documenso/lib/server-only/recipient/get-recipients-for-document';
|
||||||
|
import { getFile } from '@documenso/lib/universal/upload/get-file';
|
||||||
import { DocumentStatus as InternalDocumentStatus } from '@documenso/prisma/client';
|
import { DocumentStatus as InternalDocumentStatus } from '@documenso/prisma/client';
|
||||||
import { LazyPDFViewer } from '@documenso/ui/primitives/lazy-pdf-viewer';
|
import { LazyPDFViewer } from '@documenso/ui/primitives/lazy-pdf-viewer';
|
||||||
|
|
||||||
@ -36,10 +37,16 @@ export default async function DocumentPage({ params }: DocumentPageProps) {
|
|||||||
userId: session.id,
|
userId: session.id,
|
||||||
}).catch(() => null);
|
}).catch(() => null);
|
||||||
|
|
||||||
if (!document) {
|
if (!document || !document.documentData) {
|
||||||
redirect('/documents');
|
redirect('/documents');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { documentData } = document;
|
||||||
|
|
||||||
|
const documentDataUrl = await getFile(documentData)
|
||||||
|
.then((buffer) => Buffer.from(buffer).toString('base64'))
|
||||||
|
.then((data) => `data:application/pdf;base64,${data}`);
|
||||||
|
|
||||||
const [recipients, fields] = await Promise.all([
|
const [recipients, fields] = await Promise.all([
|
||||||
await getRecipientsForDocument({
|
await getRecipientsForDocument({
|
||||||
documentId,
|
documentId,
|
||||||
@ -86,12 +93,13 @@ export default async function DocumentPage({ params }: DocumentPageProps) {
|
|||||||
user={session}
|
user={session}
|
||||||
recipients={recipients}
|
recipients={recipients}
|
||||||
fields={fields}
|
fields={fields}
|
||||||
|
dataUrl={documentDataUrl}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{document.status === InternalDocumentStatus.COMPLETED && (
|
{document.status === InternalDocumentStatus.COMPLETED && (
|
||||||
<div className="mx-auto mt-12 max-w-2xl">
|
<div className="mx-auto mt-12 max-w-2xl">
|
||||||
<LazyPDFViewer document={`data:application/pdf;base64,${document.document}`} />
|
<LazyPDFViewer document={documentDataUrl} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -15,7 +15,10 @@ import {
|
|||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { useSession } from 'next-auth/react';
|
import { useSession } from 'next-auth/react';
|
||||||
|
|
||||||
|
import { getFile } from '@documenso/lib/universal/upload/get-file';
|
||||||
import { Document, DocumentStatus, Recipient, User } from '@documenso/prisma/client';
|
import { Document, DocumentStatus, Recipient, User } from '@documenso/prisma/client';
|
||||||
|
import { DocumentWithData } from '@documenso/prisma/types/document-with-data';
|
||||||
|
import { trpc } from '@documenso/trpc/client';
|
||||||
import {
|
import {
|
||||||
DropdownMenu,
|
DropdownMenu,
|
||||||
DropdownMenuContent,
|
DropdownMenuContent,
|
||||||
@ -47,17 +50,26 @@ export const DataTableActionDropdown = ({ row }: DataTableActionDropdownProps) =
|
|||||||
const isComplete = row.status === DocumentStatus.COMPLETED;
|
const isComplete = row.status === DocumentStatus.COMPLETED;
|
||||||
// const isSigned = recipient?.signingStatus === SigningStatus.SIGNED;
|
// const isSigned = recipient?.signingStatus === SigningStatus.SIGNED;
|
||||||
|
|
||||||
const onDownloadClick = () => {
|
const onDownloadClick = async () => {
|
||||||
let decodedDocument = row.document;
|
let document: DocumentWithData | null = null;
|
||||||
|
|
||||||
try {
|
if (!recipient) {
|
||||||
decodedDocument = atob(decodedDocument);
|
document = await trpc.document.getDocumentById.query({
|
||||||
} catch (err) {
|
id: row.id,
|
||||||
// We're just going to ignore this error and try to download the document
|
});
|
||||||
console.error(err);
|
} else {
|
||||||
|
document = await trpc.document.getDocumentByToken.query({
|
||||||
|
token: recipient.token,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const documentBytes = Uint8Array.from(decodedDocument.split('').map((c) => c.charCodeAt(0)));
|
const documentData = document?.documentData;
|
||||||
|
|
||||||
|
if (!documentData) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const documentBytes = await getFile(documentData);
|
||||||
|
|
||||||
const blob = new Blob([documentBytes], {
|
const blob = new Blob([documentBytes], {
|
||||||
type: 'application/pdf',
|
type: 'application/pdf',
|
||||||
|
|||||||
@ -53,8 +53,8 @@ export const DocumentsDataTable = ({ results }: DocumentsDataTableProps) => {
|
|||||||
columns={[
|
columns={[
|
||||||
{
|
{
|
||||||
header: 'Created',
|
header: 'Created',
|
||||||
accessorKey: 'created',
|
accessorKey: 'createdAt',
|
||||||
cell: ({ row }) => <LocaleDate date={row.getValue('created')} />,
|
cell: ({ row }) => <LocaleDate date={row.original.createdAt} />,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
header: 'Title',
|
header: 'Title',
|
||||||
|
|||||||
@ -39,7 +39,7 @@ export default async function DocumentsPage({ searchParams = {} }: DocumentsPage
|
|||||||
userId: user.id,
|
userId: user.id,
|
||||||
status,
|
status,
|
||||||
orderBy: {
|
orderBy: {
|
||||||
column: 'created',
|
column: 'createdAt',
|
||||||
direction: 'desc',
|
direction: 'desc',
|
||||||
},
|
},
|
||||||
page,
|
page,
|
||||||
|
|||||||
@ -1,29 +1,45 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
|
import { useState } from 'react';
|
||||||
|
|
||||||
import { useRouter } from 'next/navigation';
|
import { useRouter } from 'next/navigation';
|
||||||
|
|
||||||
import { Loader } from 'lucide-react';
|
import { Loader } from 'lucide-react';
|
||||||
|
|
||||||
|
import { createDocumentData } from '@documenso/lib/server-only/document-data/create-document-data';
|
||||||
|
import { putFile } from '@documenso/lib/universal/upload/put-file';
|
||||||
|
import { trpc } from '@documenso/trpc/react';
|
||||||
import { cn } from '@documenso/ui/lib/utils';
|
import { cn } from '@documenso/ui/lib/utils';
|
||||||
import { DocumentDropzone } from '@documenso/ui/primitives/document-dropzone';
|
import { DocumentDropzone } from '@documenso/ui/primitives/document-dropzone';
|
||||||
import { useToast } from '@documenso/ui/primitives/use-toast';
|
import { useToast } from '@documenso/ui/primitives/use-toast';
|
||||||
|
|
||||||
import { useCreateDocument } from '~/api/document/create/fetcher';
|
|
||||||
|
|
||||||
export type UploadDocumentProps = {
|
export type UploadDocumentProps = {
|
||||||
className?: string;
|
className?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const UploadDocument = ({ className }: UploadDocumentProps) => {
|
export const UploadDocument = ({ className }: UploadDocumentProps) => {
|
||||||
const { toast } = useToast();
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
|
||||||
const { isLoading, mutateAsync: createDocument } = useCreateDocument();
|
const { toast } = useToast();
|
||||||
|
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
|
const { mutateAsync: createDocument } = trpc.document.createDocument.useMutation();
|
||||||
|
|
||||||
const onFileDrop = async (file: File) => {
|
const onFileDrop = async (file: File) => {
|
||||||
try {
|
try {
|
||||||
|
setIsLoading(true);
|
||||||
|
|
||||||
|
const { type, data } = await putFile(file);
|
||||||
|
|
||||||
|
const { id: documentDataId } = await createDocumentData({
|
||||||
|
type,
|
||||||
|
data,
|
||||||
|
});
|
||||||
|
|
||||||
const { id } = await createDocument({
|
const { id } = await createDocument({
|
||||||
file: file,
|
title: file.name,
|
||||||
|
documentDataId,
|
||||||
});
|
});
|
||||||
|
|
||||||
toast({
|
toast({
|
||||||
@ -41,6 +57,8 @@ export const UploadDocument = ({ className }: UploadDocumentProps) => {
|
|||||||
description: 'An error occurred while uploading your document.',
|
description: 'An error occurred while uploading your document.',
|
||||||
variant: 'destructive',
|
variant: 'destructive',
|
||||||
});
|
});
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -21,19 +21,21 @@ export default async function BillingSettingsPage() {
|
|||||||
redirect('/settings/profile');
|
redirect('/settings/profile');
|
||||||
}
|
}
|
||||||
|
|
||||||
let subscription = await getSubscriptionByUserId({ userId: user.id });
|
const subscription = await getSubscriptionByUserId({ userId: user.id }).then(async (sub) => {
|
||||||
|
if (sub) {
|
||||||
|
return sub;
|
||||||
|
}
|
||||||
|
|
||||||
// If we don't have a customer record, create one as well as an empty subscription.
|
// If we don't have a customer record, create one as well as an empty subscription.
|
||||||
if (!subscription?.customerId) {
|
return createCustomer({ user });
|
||||||
subscription = await createCustomer({ user });
|
});
|
||||||
}
|
|
||||||
|
|
||||||
let billingPortalUrl = '';
|
let billingPortalUrl = '';
|
||||||
|
|
||||||
if (subscription?.customerId) {
|
if (subscription.customerId) {
|
||||||
billingPortalUrl = await getPortalSession({
|
billingPortalUrl = await getPortalSession({
|
||||||
customerId: subscription.customerId,
|
customerId: subscription.customerId,
|
||||||
returnUrl: `${process.env.NEXT_PUBLIC_SITE_URL}/settings/billing`,
|
returnUrl: `${process.env.NEXT_PUBLIC_WEBAPP_URL}/settings/billing`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,55 +1,64 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { HTMLAttributes } from 'react';
|
import { HTMLAttributes, useState } from 'react';
|
||||||
|
|
||||||
import { Download } from 'lucide-react';
|
import { Download } from 'lucide-react';
|
||||||
|
|
||||||
|
import { getFile } from '@documenso/lib/universal/upload/get-file';
|
||||||
|
import { DocumentData } from '@documenso/prisma/client';
|
||||||
import { Button } from '@documenso/ui/primitives/button';
|
import { Button } from '@documenso/ui/primitives/button';
|
||||||
|
import { useToast } from '@documenso/ui/primitives/use-toast';
|
||||||
|
|
||||||
export type DownloadButtonProps = HTMLAttributes<HTMLButtonElement> & {
|
export type DownloadButtonProps = HTMLAttributes<HTMLButtonElement> & {
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
fileName?: string;
|
fileName?: string;
|
||||||
document?: string;
|
documentData?: DocumentData;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const DownloadButton = ({
|
export const DownloadButton = ({
|
||||||
className,
|
className,
|
||||||
fileName,
|
fileName,
|
||||||
document,
|
documentData,
|
||||||
disabled,
|
disabled,
|
||||||
...props
|
...props
|
||||||
}: DownloadButtonProps) => {
|
}: DownloadButtonProps) => {
|
||||||
/**
|
const { toast } = useToast();
|
||||||
* Convert the document from base64 to a blob and download it.
|
|
||||||
*/
|
|
||||||
const onDownloadClick = () => {
|
|
||||||
if (!document) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let decodedDocument = document;
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
|
const onDownloadClick = async () => {
|
||||||
try {
|
try {
|
||||||
decodedDocument = atob(document);
|
setIsLoading(true);
|
||||||
|
|
||||||
|
if (!documentData) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bytes = await getFile(documentData);
|
||||||
|
|
||||||
|
const blob = new Blob([bytes], {
|
||||||
|
type: 'application/pdf',
|
||||||
|
});
|
||||||
|
|
||||||
|
const link = window.document.createElement('a');
|
||||||
|
|
||||||
|
link.href = window.URL.createObjectURL(blob);
|
||||||
|
link.download = fileName || 'document.pdf';
|
||||||
|
|
||||||
|
link.click();
|
||||||
|
|
||||||
|
window.URL.revokeObjectURL(link.href);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// We're just going to ignore this error and try to download the document
|
|
||||||
console.error(err);
|
console.error(err);
|
||||||
|
|
||||||
|
toast({
|
||||||
|
title: 'Error',
|
||||||
|
description: 'An error occurred while downloading your document.',
|
||||||
|
variant: 'destructive',
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
const documentBytes = Uint8Array.from(decodedDocument.split('').map((c) => c.charCodeAt(0)));
|
|
||||||
|
|
||||||
const blob = new Blob([documentBytes], {
|
|
||||||
type: 'application/pdf',
|
|
||||||
});
|
|
||||||
|
|
||||||
const link = window.document.createElement('a');
|
|
||||||
|
|
||||||
link.href = window.URL.createObjectURL(blob);
|
|
||||||
link.download = fileName || 'document.pdf';
|
|
||||||
|
|
||||||
link.click();
|
|
||||||
|
|
||||||
window.URL.revokeObjectURL(link.href);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -57,8 +66,9 @@ export const DownloadButton = ({
|
|||||||
type="button"
|
type="button"
|
||||||
variant="outline"
|
variant="outline"
|
||||||
className={className}
|
className={className}
|
||||||
disabled={disabled || !document}
|
disabled={disabled || !documentData}
|
||||||
onClick={onDownloadClick}
|
onClick={onDownloadClick}
|
||||||
|
loading={isLoading}
|
||||||
{...props}
|
{...props}
|
||||||
>
|
>
|
||||||
<Download className="mr-2 h-5 w-5" />
|
<Download className="mr-2 h-5 w-5" />
|
||||||
|
|||||||
@ -30,15 +30,21 @@ export default async function CompletedSigningPage({
|
|||||||
token,
|
token,
|
||||||
}).catch(() => null);
|
}).catch(() => null);
|
||||||
|
|
||||||
if (!document) {
|
if (!document || !document.documentData) {
|
||||||
return notFound();
|
return notFound();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { documentData } = document;
|
||||||
|
|
||||||
const [fields, recipient] = await Promise.all([
|
const [fields, recipient] = await Promise.all([
|
||||||
getFieldsForToken({ token }),
|
getFieldsForToken({ token }),
|
||||||
getRecipientByToken({ token }),
|
getRecipientByToken({ token }).catch(() => null),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
if (!recipient) {
|
||||||
|
return notFound();
|
||||||
|
}
|
||||||
|
|
||||||
const recipientName =
|
const recipientName =
|
||||||
recipient.name ||
|
recipient.name ||
|
||||||
fields.find((field) => field.type === FieldType.NAME)?.customText ||
|
fields.find((field) => field.type === FieldType.NAME)?.customText ||
|
||||||
@ -91,7 +97,7 @@ export default async function CompletedSigningPage({
|
|||||||
<DownloadButton
|
<DownloadButton
|
||||||
className="flex-1"
|
className="flex-1"
|
||||||
fileName={document.title}
|
fileName={document.title}
|
||||||
document={document.status === DocumentStatus.COMPLETED ? document.document : undefined}
|
documentData={documentData}
|
||||||
disabled={document.status !== DocumentStatus.COMPLETED}
|
disabled={document.status !== DocumentStatus.COMPLETED}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import { getDocumentAndSenderByToken } from '@documenso/lib/server-only/document
|
|||||||
import { viewedDocument } from '@documenso/lib/server-only/document/viewed-document';
|
import { viewedDocument } from '@documenso/lib/server-only/document/viewed-document';
|
||||||
import { getFieldsForToken } from '@documenso/lib/server-only/field/get-fields-for-token';
|
import { getFieldsForToken } from '@documenso/lib/server-only/field/get-fields-for-token';
|
||||||
import { getRecipientByToken } from '@documenso/lib/server-only/recipient/get-recipient-by-token';
|
import { getRecipientByToken } from '@documenso/lib/server-only/recipient/get-recipient-by-token';
|
||||||
|
import { getFile } from '@documenso/lib/universal/upload/get-file';
|
||||||
import { FieldType } from '@documenso/prisma/client';
|
import { FieldType } from '@documenso/prisma/client';
|
||||||
import { Card, CardContent } from '@documenso/ui/primitives/card';
|
import { Card, CardContent } from '@documenso/ui/primitives/card';
|
||||||
import { ElementVisible } from '@documenso/ui/primitives/element-visible';
|
import { ElementVisible } from '@documenso/ui/primitives/element-visible';
|
||||||
@ -36,17 +37,21 @@ export default async function SigningPage({ params: { token } }: SigningPageProp
|
|||||||
token,
|
token,
|
||||||
}).catch(() => null),
|
}).catch(() => null),
|
||||||
getFieldsForToken({ token }),
|
getFieldsForToken({ token }),
|
||||||
getRecipientByToken({ token }),
|
getRecipientByToken({ token }).catch(() => null),
|
||||||
viewedDocument({ token }),
|
viewedDocument({ token }),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (!document) {
|
if (!document || !document.documentData || !recipient) {
|
||||||
return notFound();
|
return notFound();
|
||||||
}
|
}
|
||||||
|
|
||||||
const user = await getServerComponentSession();
|
const { documentData } = document;
|
||||||
|
|
||||||
const documentUrl = `data:application/pdf;base64,${document.document}`;
|
const documentDataUrl = await getFile(documentData)
|
||||||
|
.then((buffer) => Buffer.from(buffer).toString('base64'))
|
||||||
|
.then((data) => `data:application/pdf;base64,${data}`);
|
||||||
|
|
||||||
|
const user = await getServerComponentSession();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SigningProvider email={recipient.email} fullName={recipient.name} signature={user?.signature}>
|
<SigningProvider email={recipient.email} fullName={recipient.name} signature={user?.signature}>
|
||||||
@ -67,7 +72,7 @@ export default async function SigningPage({ params: { token } }: SigningPageProp
|
|||||||
gradient
|
gradient
|
||||||
>
|
>
|
||||||
<CardContent className="p-2">
|
<CardContent className="p-2">
|
||||||
<LazyPDFViewer document={documentUrl} />
|
<LazyPDFViewer document={documentDataUrl} />
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
|
|||||||
@ -33,12 +33,12 @@ export const metadata = {
|
|||||||
description:
|
description:
|
||||||
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
||||||
type: 'website',
|
type: 'website',
|
||||||
images: [`${process.env.NEXT_PUBLIC_SITE_URL}/opengraph-image.jpg`],
|
images: [`${process.env.NEXT_PUBLIC_WEBAPP_URL}/opengraph-image.jpg`],
|
||||||
},
|
},
|
||||||
twitter: {
|
twitter: {
|
||||||
site: '@documenso',
|
site: '@documenso',
|
||||||
card: 'summary_large_image',
|
card: 'summary_large_image',
|
||||||
images: [`${process.env.NEXT_PUBLIC_SITE_URL}/opengraph-image.jpg`],
|
images: [`${process.env.NEXT_PUBLIC_WEBAPP_URL}/opengraph-image.jpg`],
|
||||||
description:
|
description:
|
||||||
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
'Join Documenso, the open signing infrastructure, and get a 10x better signing experience. Pricing starts at $30/mo. forever! Sign in now and enjoy a faster, smarter, and more beautiful document signing process. Integrates with your favorite tools, customizable, and expandable. Support our mission and become a part of our open-source community.',
|
||||||
},
|
},
|
||||||
|
|||||||
@ -15,7 +15,7 @@ export type StackAvatarProps = {
|
|||||||
type: 'unsigned' | 'waiting' | 'opened' | 'completed';
|
type: 'unsigned' | 'waiting' | 'opened' | 'completed';
|
||||||
};
|
};
|
||||||
|
|
||||||
export const StackAvatar = ({ first, zIndex, fallbackText, type }: StackAvatarProps) => {
|
export const StackAvatar = ({ first, zIndex, fallbackText = '', type }: StackAvatarProps) => {
|
||||||
let classes = '';
|
let classes = '';
|
||||||
let zIndexClass = '';
|
let zIndexClass = '';
|
||||||
const firstClass = first ? '' : '-ml-3';
|
const firstClass = first ? '' : '-ml-3';
|
||||||
@ -48,7 +48,7 @@ export const StackAvatar = ({ first, zIndex, fallbackText, type }: StackAvatarPr
|
|||||||
${firstClass}
|
${firstClass}
|
||||||
dark:border-border h-10 w-10 border-2 border-solid border-white`}
|
dark:border-border h-10 w-10 border-2 border-solid border-white`}
|
||||||
>
|
>
|
||||||
<AvatarFallback className={classes}>{fallbackText ?? 'UK'}</AvatarFallback>
|
<AvatarFallback className={classes}>{fallbackText}</AvatarFallback>
|
||||||
</Avatar>
|
</Avatar>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { initials } from '@documenso/lib/client-only/recipient-initials';
|
|
||||||
import { getRecipientType } from '@documenso/lib/client-only/recipient-type';
|
import { getRecipientType } from '@documenso/lib/client-only/recipient-type';
|
||||||
|
import { recipientAbbreviation } from '@documenso/lib/utils/recipient-formatter';
|
||||||
import { Recipient } from '@documenso/prisma/client';
|
import { Recipient } from '@documenso/prisma/client';
|
||||||
import {
|
import {
|
||||||
Tooltip,
|
Tooltip,
|
||||||
@ -56,7 +56,7 @@ export const StackAvatarsWithTooltip = ({
|
|||||||
first={true}
|
first={true}
|
||||||
key={recipient.id}
|
key={recipient.id}
|
||||||
type={getRecipientType(recipient)}
|
type={getRecipientType(recipient)}
|
||||||
fallbackText={initials(recipient.name)}
|
fallbackText={recipientAbbreviation(recipient)}
|
||||||
/>
|
/>
|
||||||
<span className="text-sm text-gray-500">{recipient.email}</span>
|
<span className="text-sm text-gray-500">{recipient.email}</span>
|
||||||
</div>
|
</div>
|
||||||
@ -73,7 +73,7 @@ export const StackAvatarsWithTooltip = ({
|
|||||||
first={true}
|
first={true}
|
||||||
key={recipient.id}
|
key={recipient.id}
|
||||||
type={getRecipientType(recipient)}
|
type={getRecipientType(recipient)}
|
||||||
fallbackText={initials(recipient.name)}
|
fallbackText={recipientAbbreviation(recipient)}
|
||||||
/>
|
/>
|
||||||
<span className="text-sm text-gray-500">{recipient.email}</span>
|
<span className="text-sm text-gray-500">{recipient.email}</span>
|
||||||
</div>
|
</div>
|
||||||
@ -90,7 +90,7 @@ export const StackAvatarsWithTooltip = ({
|
|||||||
first={true}
|
first={true}
|
||||||
key={recipient.id}
|
key={recipient.id}
|
||||||
type={getRecipientType(recipient)}
|
type={getRecipientType(recipient)}
|
||||||
fallbackText={initials(recipient.name)}
|
fallbackText={recipientAbbreviation(recipient)}
|
||||||
/>
|
/>
|
||||||
<span className="text-sm text-gray-500">{recipient.email}</span>
|
<span className="text-sm text-gray-500">{recipient.email}</span>
|
||||||
</div>
|
</div>
|
||||||
@ -107,7 +107,7 @@ export const StackAvatarsWithTooltip = ({
|
|||||||
first={true}
|
first={true}
|
||||||
key={recipient.id}
|
key={recipient.id}
|
||||||
type={getRecipientType(recipient)}
|
type={getRecipientType(recipient)}
|
||||||
fallbackText={initials(recipient.name)}
|
fallbackText={recipientAbbreviation(recipient)}
|
||||||
/>
|
/>
|
||||||
<span className="text-sm text-gray-500">{recipient.email}</span>
|
<span className="text-sm text-gray-500">{recipient.email}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
|
||||||
import { initials } from '@documenso/lib/client-only/recipient-initials';
|
|
||||||
import { getRecipientType } from '@documenso/lib/client-only/recipient-type';
|
import { getRecipientType } from '@documenso/lib/client-only/recipient-type';
|
||||||
|
import { recipientAbbreviation } from '@documenso/lib/utils/recipient-formatter';
|
||||||
import { Recipient } from '@documenso/prisma/client';
|
import { Recipient } from '@documenso/prisma/client';
|
||||||
|
|
||||||
import { StackAvatar } from './stack-avatar';
|
import { StackAvatar } from './stack-avatar';
|
||||||
@ -26,7 +26,7 @@ export function StackAvatars({ recipients }: { recipients: Recipient[] }) {
|
|||||||
first={first}
|
first={first}
|
||||||
zIndex={String(zIndex - index * 10)}
|
zIndex={String(zIndex - index * 10)}
|
||||||
type={lastItemText && index === 4 ? 'unsigned' : getRecipientType(recipient)}
|
type={lastItemText && index === 4 ? 'unsigned' : getRecipientType(recipient)}
|
||||||
fallbackText={lastItemText ? lastItemText : initials(recipient.name)}
|
fallbackText={lastItemText ? lastItemText : recipientAbbreviation(recipient)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { HTMLAttributes } from 'react';
|
import { HTMLAttributes, useEffect, useState } from 'react';
|
||||||
|
|
||||||
import Link from 'next/link';
|
import Link from 'next/link';
|
||||||
|
|
||||||
@ -17,10 +17,23 @@ export type HeaderProps = HTMLAttributes<HTMLDivElement> & {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const Header = ({ className, user, ...props }: HeaderProps) => {
|
export const Header = ({ className, user, ...props }: HeaderProps) => {
|
||||||
|
const [scrollY, setScrollY] = useState(0);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const onScroll = () => {
|
||||||
|
setScrollY(window.scrollY);
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener('scroll', onScroll);
|
||||||
|
|
||||||
|
return () => window.removeEventListener('scroll', onScroll);
|
||||||
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<header
|
<header
|
||||||
className={cn(
|
className={cn(
|
||||||
'supports-backdrop-blur:bg-background/60 bg-background/95 sticky top-0 z-50 flex h-16 w-full items-center border-b backdrop-blur',
|
'supports-backdrop-blur:bg-background/60 bg-background/95 sticky top-0 z-50 flex h-16 w-full items-center border-b border-b-transparent backdrop-blur duration-200',
|
||||||
|
scrollY > 5 && 'border-b-border',
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
{...props}
|
{...props}
|
||||||
|
|||||||
@ -11,10 +11,13 @@ import {
|
|||||||
Monitor,
|
Monitor,
|
||||||
Moon,
|
Moon,
|
||||||
Sun,
|
Sun,
|
||||||
|
UserCog,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { signOut } from 'next-auth/react';
|
import { signOut } from 'next-auth/react';
|
||||||
import { useTheme } from 'next-themes';
|
import { useTheme } from 'next-themes';
|
||||||
|
|
||||||
|
import { isAdmin } from '@documenso/lib/next-auth/guards/is-admin';
|
||||||
|
import { recipientInitials } from '@documenso/lib/utils/recipient-formatter';
|
||||||
import { User } from '@documenso/prisma/client';
|
import { User } from '@documenso/prisma/client';
|
||||||
import { Avatar, AvatarFallback } from '@documenso/ui/primitives/avatar';
|
import { Avatar, AvatarFallback } from '@documenso/ui/primitives/avatar';
|
||||||
import { Button } from '@documenso/ui/primitives/button';
|
import { Button } from '@documenso/ui/primitives/button';
|
||||||
@ -35,24 +38,21 @@ export type ProfileDropdownProps = {
|
|||||||
|
|
||||||
export const ProfileDropdown = ({ user }: ProfileDropdownProps) => {
|
export const ProfileDropdown = ({ user }: ProfileDropdownProps) => {
|
||||||
const { theme, setTheme } = useTheme();
|
const { theme, setTheme } = useTheme();
|
||||||
|
|
||||||
const { getFlag } = useFeatureFlags();
|
const { getFlag } = useFeatureFlags();
|
||||||
|
const isUserAdmin = isAdmin(user);
|
||||||
|
|
||||||
const isBillingEnabled = getFlag('app_billing');
|
const isBillingEnabled = getFlag('app_billing');
|
||||||
|
|
||||||
const initials =
|
const avatarFallback = user.name
|
||||||
user.name
|
? recipientInitials(user.name)
|
||||||
?.split(' ')
|
: user.email.slice(0, 1).toUpperCase();
|
||||||
.map((name: string) => name.slice(0, 1).toUpperCase())
|
|
||||||
.slice(0, 2)
|
|
||||||
.join('') ?? 'UK';
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DropdownMenu>
|
<DropdownMenu>
|
||||||
<DropdownMenuTrigger asChild>
|
<DropdownMenuTrigger asChild>
|
||||||
<Button variant="ghost" className="relative h-10 w-10 rounded-full">
|
<Button variant="ghost" className="relative h-10 w-10 rounded-full">
|
||||||
<Avatar className="h-10 w-10">
|
<Avatar className="h-10 w-10">
|
||||||
<AvatarFallback>{initials}</AvatarFallback>
|
<AvatarFallback>{avatarFallback}</AvatarFallback>
|
||||||
</Avatar>
|
</Avatar>
|
||||||
</Button>
|
</Button>
|
||||||
</DropdownMenuTrigger>
|
</DropdownMenuTrigger>
|
||||||
@ -60,6 +60,19 @@ export const ProfileDropdown = ({ user }: ProfileDropdownProps) => {
|
|||||||
<DropdownMenuContent className="w-56" align="end" forceMount>
|
<DropdownMenuContent className="w-56" align="end" forceMount>
|
||||||
<DropdownMenuLabel>Account</DropdownMenuLabel>
|
<DropdownMenuLabel>Account</DropdownMenuLabel>
|
||||||
|
|
||||||
|
{isUserAdmin && (
|
||||||
|
<>
|
||||||
|
<DropdownMenuItem asChild>
|
||||||
|
<Link href="/admin" className="cursor-pointer">
|
||||||
|
<UserCog className="mr-2 h-4 w-4" />
|
||||||
|
Admin
|
||||||
|
</Link>
|
||||||
|
</DropdownMenuItem>
|
||||||
|
|
||||||
|
<DropdownMenuSeparator />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
<DropdownMenuItem asChild>
|
<DropdownMenuItem asChild>
|
||||||
<Link href="/settings/profile" className="cursor-pointer">
|
<Link href="/settings/profile" className="cursor-pointer">
|
||||||
<LucideUser className="mr-2 h-4 w-4" />
|
<LucideUser className="mr-2 h-4 w-4" />
|
||||||
|
|||||||
@ -18,10 +18,10 @@ export const CardMetric = ({ icon: Icon, title, value, className }: CardMetricPr
|
|||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<div className="px-4 pb-6 pt-4 sm:px-4 sm:pb-8 sm:pt-4">
|
<div className="px-4 pb-6 pt-4 sm:px-4 sm:pb-8 sm:pt-4">
|
||||||
<div className="flex items-start">
|
<div className="flex items-center">
|
||||||
{Icon && <Icon className="mr-2 h-4 w-4 text-slate-500" />}
|
{Icon && <Icon className="text-muted-foreground mr-2 h-4 w-4" />}
|
||||||
|
|
||||||
<h3 className="flex items-end text-sm font-medium text-slate-500">{title}</h3>
|
<h3 className="text-primary-forground flex items-end text-sm font-medium">{title}</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<p className="text-foreground mt-6 text-4xl font-semibold leading-8 md:mt-8">
|
<p className="text-foreground mt-6 text-4xl font-semibold leading-8 md:mt-8">
|
||||||
|
|||||||
@ -21,7 +21,7 @@ export const getFlag = async (
|
|||||||
return LOCAL_FEATURE_FLAGS[flag] ?? true;
|
return LOCAL_FEATURE_FLAGS[flag] ?? true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = new URL(`${process.env.NEXT_PUBLIC_SITE_URL}/api/feature-flag/get`);
|
const url = new URL(`${process.env.NEXT_PUBLIC_WEBAPP_URL}/api/feature-flag/get`);
|
||||||
url.searchParams.set('flag', flag);
|
url.searchParams.set('flag', flag);
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
@ -54,7 +54,7 @@ export const getAllFlags = async (
|
|||||||
return LOCAL_FEATURE_FLAGS;
|
return LOCAL_FEATURE_FLAGS;
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = new URL(`${process.env.NEXT_PUBLIC_SITE_URL}/api/feature-flag/all`);
|
const url = new URL(`${process.env.NEXT_PUBLIC_WEBAPP_URL}/api/feature-flag/all`);
|
||||||
|
|
||||||
return fetch(url, {
|
return fetch(url, {
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@ -43,7 +43,7 @@ export default async function handler(
|
|||||||
|
|
||||||
if (user && user.Subscription.length > 0) {
|
if (user && user.Subscription.length > 0) {
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
redirectUrl: `${process.env.NEXT_PUBLIC_APP_URL}/login`,
|
redirectUrl: `${process.env.NEXT_PUBLIC_WEBAPP_URL}/login`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,8 +103,8 @@ export default async function handler(
|
|||||||
mode: 'subscription',
|
mode: 'subscription',
|
||||||
metadata,
|
metadata,
|
||||||
allow_promotion_codes: true,
|
allow_promotion_codes: true,
|
||||||
success_url: `${process.env.NEXT_PUBLIC_SITE_URL}/claimed?sessionId={CHECKOUT_SESSION_ID}`,
|
success_url: `${process.env.NEXT_PUBLIC_MARKETING_URL}/claimed?sessionId={CHECKOUT_SESSION_ID}`,
|
||||||
cancel_url: `${process.env.NEXT_PUBLIC_SITE_URL}/pricing?email=${encodeURIComponent(
|
cancel_url: `${process.env.NEXT_PUBLIC_MARKETING_URL}/pricing?email=${encodeURIComponent(
|
||||||
email,
|
email,
|
||||||
)}&name=${encodeURIComponent(name)}&planId=${planId}&cancelled=true`,
|
)}&name=${encodeURIComponent(name)}&planId=${planId}&cancelled=true`,
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,88 +0,0 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from 'next';
|
|
||||||
|
|
||||||
import formidable, { type File } from 'formidable';
|
|
||||||
import { readFileSync } from 'fs';
|
|
||||||
|
|
||||||
import { getServerSession } from '@documenso/lib/next-auth/get-server-session';
|
|
||||||
import { prisma } from '@documenso/prisma';
|
|
||||||
import { DocumentStatus } from '@documenso/prisma/client';
|
|
||||||
|
|
||||||
import {
|
|
||||||
TCreateDocumentRequestSchema,
|
|
||||||
TCreateDocumentResponseSchema,
|
|
||||||
} from '~/api/document/create/types';
|
|
||||||
|
|
||||||
export const config = {
|
|
||||||
api: {
|
|
||||||
bodyParser: false,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
export type TFormidableCreateDocumentRequestSchema = {
|
|
||||||
file: File;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse<TCreateDocumentResponseSchema>,
|
|
||||||
) {
|
|
||||||
const user = await getServerSession({ req, res });
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return res.status(401).json({
|
|
||||||
error: 'Unauthorized',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const form = formidable();
|
|
||||||
|
|
||||||
const { file } = await new Promise<TFormidableCreateDocumentRequestSchema>(
|
|
||||||
(resolve, reject) => {
|
|
||||||
form.parse(req, (err, fields, files) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We had intended to do this with Zod but we can only validate it
|
|
||||||
// as a persistent file which does not include the properties that we
|
|
||||||
// need.
|
|
||||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions, @typescript-eslint/no-explicit-any
|
|
||||||
resolve({ ...fields, ...files } as any);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const fileBuffer = readFileSync(file.filepath);
|
|
||||||
|
|
||||||
const document = await prisma.document.create({
|
|
||||||
data: {
|
|
||||||
title: file.originalFilename ?? file.newFilename,
|
|
||||||
status: DocumentStatus.DRAFT,
|
|
||||||
userId: user.id,
|
|
||||||
document: fileBuffer.toString('base64'),
|
|
||||||
created: new Date(),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return res.status(200).json({
|
|
||||||
id: document.id,
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
|
|
||||||
return res.status(500).json({
|
|
||||||
error: 'Internal server error',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a hack to ensure that the types are correct.
|
|
||||||
*/
|
|
||||||
type FormidableSatisfiesCreateDocument =
|
|
||||||
keyof TCreateDocumentRequestSchema extends keyof TFormidableCreateDocumentRequestSchema
|
|
||||||
? true
|
|
||||||
: never;
|
|
||||||
|
|
||||||
true satisfies FormidableSatisfiesCreateDocument;
|
|
||||||
@ -1,9 +1,9 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
|
|
||||||
import { nanoid } from 'nanoid';
|
|
||||||
import { JWT, getToken } from 'next-auth/jwt';
|
import { JWT, getToken } from 'next-auth/jwt';
|
||||||
|
|
||||||
import { LOCAL_FEATURE_FLAGS, extractPostHogConfig } from '@documenso/lib/constants/feature-flags';
|
import { LOCAL_FEATURE_FLAGS, extractPostHogConfig } from '@documenso/lib/constants/feature-flags';
|
||||||
|
import { nanoid } from '@documenso/lib/universal/id';
|
||||||
|
|
||||||
import PostHogServerClient from '~/helpers/get-post-hog-server-client';
|
import PostHogServerClient from '~/helpers/get-post-hog-server-client';
|
||||||
|
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import { redis } from '@documenso/lib/server-only/redis';
|
|||||||
import { Stripe, stripe } from '@documenso/lib/server-only/stripe';
|
import { Stripe, stripe } from '@documenso/lib/server-only/stripe';
|
||||||
import { prisma } from '@documenso/prisma';
|
import { prisma } from '@documenso/prisma';
|
||||||
import {
|
import {
|
||||||
|
DocumentDataType,
|
||||||
DocumentStatus,
|
DocumentStatus,
|
||||||
FieldType,
|
FieldType,
|
||||||
ReadStatus,
|
ReadStatus,
|
||||||
@ -85,16 +86,34 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
|
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
|
const bytes64 = readFileSync('./public/documenso-supporter-pledge.pdf').toString('base64');
|
||||||
|
|
||||||
|
const { id: documentDataId } = await prisma.documentData.create({
|
||||||
|
data: {
|
||||||
|
type: DocumentDataType.BYTES_64,
|
||||||
|
data: bytes64,
|
||||||
|
initialData: bytes64,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const document = await prisma.document.create({
|
const document = await prisma.document.create({
|
||||||
data: {
|
data: {
|
||||||
title: 'Documenso Supporter Pledge.pdf',
|
title: 'Documenso Supporter Pledge.pdf',
|
||||||
status: DocumentStatus.COMPLETED,
|
status: DocumentStatus.COMPLETED,
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
document: readFileSync('./public/documenso-supporter-pledge.pdf').toString('base64'),
|
documentDataId,
|
||||||
created: now,
|
},
|
||||||
|
include: {
|
||||||
|
documentData: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { documentData } = document;
|
||||||
|
|
||||||
|
if (!documentData) {
|
||||||
|
throw new Error(`Document ${document.id} has no document data`);
|
||||||
|
}
|
||||||
|
|
||||||
const recipient = await prisma.recipient.create({
|
const recipient = await prisma.recipient.create({
|
||||||
data: {
|
data: {
|
||||||
name: user.name ?? '',
|
name: user.name ?? '',
|
||||||
@ -122,16 +141,16 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (signatureDataUrl) {
|
if (signatureDataUrl) {
|
||||||
document.document = await insertImageInPDF(
|
documentData.data = await insertImageInPDF(
|
||||||
document.document,
|
documentData.data,
|
||||||
signatureDataUrl,
|
signatureDataUrl,
|
||||||
field.positionX.toNumber(),
|
field.positionX.toNumber(),
|
||||||
field.positionY.toNumber(),
|
field.positionY.toNumber(),
|
||||||
field.page,
|
field.page,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
document.document = await insertTextInPDF(
|
documentData.data = await insertTextInPDF(
|
||||||
document.document,
|
documentData.data,
|
||||||
signatureText ?? '',
|
signatureText ?? '',
|
||||||
field.positionX.toNumber(),
|
field.positionX.toNumber(),
|
||||||
field.positionY.toNumber(),
|
field.positionY.toNumber(),
|
||||||
@ -153,7 +172,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
id: document.id,
|
id: document.id,
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
document: document.document,
|
documentData: {
|
||||||
|
update: {
|
||||||
|
data: documentData.data,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
|||||||
BIN
assets/example.pdf
Normal file
BIN
assets/example.pdf
Normal file
Binary file not shown.
1844
package-lock.json
generated
1844
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -16,13 +16,12 @@
|
|||||||
"worker:test": "tsup worker/index.ts --format esm"
|
"worker:test": "tsup worker/index.ts --format esm"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@documenso/tsconfig": "*",
|
|
||||||
"@documenso/tailwind-config": "*",
|
|
||||||
"@documenso/ui": "*",
|
|
||||||
"@react-email/components": "^0.0.7",
|
"@react-email/components": "^0.0.7",
|
||||||
"nodemailer": "^6.9.3"
|
"nodemailer": "^6.9.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@documenso/tsconfig": "*",
|
||||||
|
"@documenso/tailwind-config": "*",
|
||||||
"@types/nodemailer": "^6.4.8",
|
"@types/nodemailer": "^6.4.8",
|
||||||
"tsup": "^7.1.0"
|
"tsup": "^7.1.0"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,8 +4,5 @@ const path = require('path');
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
...baseConfig,
|
...baseConfig,
|
||||||
content: [
|
content: [`templates/**/*.{ts,tsx}`],
|
||||||
`templates/**/*.{ts,tsx}`,
|
|
||||||
`${path.join(require.resolve('@documenso/ui'), '..')}/**/*.{ts,tsx}`,
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
export const initials = (text: string) =>
|
|
||||||
text
|
|
||||||
?.split(' ')
|
|
||||||
.map((name: string) => name.slice(0, 1).toUpperCase())
|
|
||||||
.slice(0, 2)
|
|
||||||
.join('') ?? 'UK';
|
|
||||||
5
packages/lib/constants/time.ts
Normal file
5
packages/lib/constants/time.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
export const ONE_SECOND = 1000;
|
||||||
|
export const ONE_MINUTE = ONE_SECOND * 60;
|
||||||
|
export const ONE_HOUR = ONE_MINUTE * 60;
|
||||||
|
export const ONE_DAY = ONE_HOUR * 24;
|
||||||
|
export const ONE_WEEK = ONE_DAY * 7;
|
||||||
5
packages/lib/next-auth/guards/is-admin.ts
Normal file
5
packages/lib/next-auth/guards/is-admin.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { Role, User } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
const isAdmin = (user: User) => user.roles.includes(Role.ADMIN);
|
||||||
|
|
||||||
|
export { isAdmin };
|
||||||
@ -12,10 +12,15 @@
|
|||||||
],
|
],
|
||||||
"scripts": {},
|
"scripts": {},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.410.0",
|
||||||
|
"@aws-sdk/s3-request-presigner": "^3.410.0",
|
||||||
|
"@aws-sdk/signature-v4-crt": "^3.410.0",
|
||||||
"@documenso/email": "*",
|
"@documenso/email": "*",
|
||||||
"@documenso/prisma": "*",
|
"@documenso/prisma": "*",
|
||||||
"@next-auth/prisma-adapter": "1.0.7",
|
"@next-auth/prisma-adapter": "1.0.7",
|
||||||
"@pdf-lib/fontkit": "^1.1.1",
|
"@pdf-lib/fontkit": "^1.1.1",
|
||||||
|
"@scure/base": "^1.1.3",
|
||||||
|
"@sindresorhus/slugify": "^2.2.1",
|
||||||
"@upstash/redis": "^1.20.6",
|
"@upstash/redis": "^1.20.6",
|
||||||
"bcrypt": "^5.1.0",
|
"bcrypt": "^5.1.0",
|
||||||
"luxon": "^3.4.0",
|
"luxon": "^3.4.0",
|
||||||
|
|||||||
26
packages/lib/server-only/admin/get-documents-stats.ts
Normal file
26
packages/lib/server-only/admin/get-documents-stats.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import { prisma } from '@documenso/prisma';
|
||||||
|
import { ExtendedDocumentStatus } from '@documenso/prisma/types/extended-document-status';
|
||||||
|
|
||||||
|
export const getDocumentStats = async () => {
|
||||||
|
const counts = await prisma.document.groupBy({
|
||||||
|
by: ['status'],
|
||||||
|
_count: {
|
||||||
|
_all: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const stats: Record<Exclude<ExtendedDocumentStatus, 'INBOX'>, number> = {
|
||||||
|
[ExtendedDocumentStatus.DRAFT]: 0,
|
||||||
|
[ExtendedDocumentStatus.PENDING]: 0,
|
||||||
|
[ExtendedDocumentStatus.COMPLETED]: 0,
|
||||||
|
[ExtendedDocumentStatus.ALL]: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
counts.forEach((stat) => {
|
||||||
|
stats[stat.status] = stat._count._all;
|
||||||
|
|
||||||
|
stats.ALL += stat._count._all;
|
||||||
|
});
|
||||||
|
|
||||||
|
return stats;
|
||||||
|
};
|
||||||
29
packages/lib/server-only/admin/get-recipients-stats.ts
Normal file
29
packages/lib/server-only/admin/get-recipients-stats.ts
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import { prisma } from '@documenso/prisma';
|
||||||
|
import { ReadStatus, SendStatus, SigningStatus } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export const getRecipientsStats = async () => {
|
||||||
|
const results = await prisma.recipient.groupBy({
|
||||||
|
by: ['readStatus', 'signingStatus', 'sendStatus'],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const stats = {
|
||||||
|
TOTAL_RECIPIENTS: 0,
|
||||||
|
[ReadStatus.OPENED]: 0,
|
||||||
|
[ReadStatus.NOT_OPENED]: 0,
|
||||||
|
[SigningStatus.SIGNED]: 0,
|
||||||
|
[SigningStatus.NOT_SIGNED]: 0,
|
||||||
|
[SendStatus.SENT]: 0,
|
||||||
|
[SendStatus.NOT_SENT]: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
results.forEach((result) => {
|
||||||
|
const { readStatus, signingStatus, sendStatus, _count } = result;
|
||||||
|
stats[readStatus] += _count;
|
||||||
|
stats[signingStatus] += _count;
|
||||||
|
stats[sendStatus] += _count;
|
||||||
|
stats.TOTAL_RECIPIENTS += _count;
|
||||||
|
});
|
||||||
|
|
||||||
|
return stats;
|
||||||
|
};
|
||||||
18
packages/lib/server-only/admin/get-users-stats.ts
Normal file
18
packages/lib/server-only/admin/get-users-stats.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import { prisma } from '@documenso/prisma';
|
||||||
|
import { SubscriptionStatus } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export const getUsersCount = async () => {
|
||||||
|
return await prisma.user.count();
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getUsersWithSubscriptionsCount = async () => {
|
||||||
|
return await prisma.user.count({
|
||||||
|
where: {
|
||||||
|
Subscription: {
|
||||||
|
some: {
|
||||||
|
status: SubscriptionStatus.ACTIVE,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
@ -0,0 +1,19 @@
|
|||||||
|
'use server';
|
||||||
|
|
||||||
|
import { prisma } from '@documenso/prisma';
|
||||||
|
import { DocumentDataType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export type CreateDocumentDataOptions = {
|
||||||
|
type: DocumentDataType;
|
||||||
|
data: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const createDocumentData = async ({ type, data }: CreateDocumentDataOptions) => {
|
||||||
|
return await prisma.documentData.create({
|
||||||
|
data: {
|
||||||
|
type,
|
||||||
|
data,
|
||||||
|
initialData: data,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
19
packages/lib/server-only/document/create-document.ts
Normal file
19
packages/lib/server-only/document/create-document.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
'use server';
|
||||||
|
|
||||||
|
import { prisma } from '@documenso/prisma';
|
||||||
|
|
||||||
|
export type CreateDocumentOptions = {
|
||||||
|
title: string;
|
||||||
|
userId: number;
|
||||||
|
documentDataId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const createDocument = async ({ userId, title, documentDataId }: CreateDocumentOptions) => {
|
||||||
|
return await prisma.document.create({
|
||||||
|
data: {
|
||||||
|
title,
|
||||||
|
documentDataId,
|
||||||
|
userId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
@ -32,7 +32,7 @@ export const findDocuments = async ({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const orderByColumn = orderBy?.column ?? 'created';
|
const orderByColumn = orderBy?.column ?? 'createdAt';
|
||||||
const orderByDirection = orderBy?.direction ?? 'desc';
|
const orderByDirection = orderBy?.direction ?? 'desc';
|
||||||
|
|
||||||
const termFilters = !term
|
const termFilters = !term
|
||||||
|
|||||||
@ -11,5 +11,8 @@ export const getDocumentById = async ({ id, userId }: GetDocumentByIdOptions) =>
|
|||||||
id,
|
id,
|
||||||
userId,
|
userId,
|
||||||
},
|
},
|
||||||
|
include: {
|
||||||
|
documentData: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@ -17,6 +17,7 @@ export const getDocumentAndSenderByToken = async ({
|
|||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
User: true,
|
User: true,
|
||||||
|
documentData: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,13 @@
|
|||||||
'use server';
|
'use server';
|
||||||
|
|
||||||
|
import path from 'node:path';
|
||||||
import { PDFDocument } from 'pdf-lib';
|
import { PDFDocument } from 'pdf-lib';
|
||||||
|
|
||||||
import { prisma } from '@documenso/prisma';
|
import { prisma } from '@documenso/prisma';
|
||||||
import { DocumentStatus, SigningStatus } from '@documenso/prisma/client';
|
import { DocumentStatus, SigningStatus } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { getFile } from '../../universal/upload/get-file';
|
||||||
|
import { putFile } from '../../universal/upload/put-file';
|
||||||
import { insertFieldInPDF } from '../pdf/insert-field-in-pdf';
|
import { insertFieldInPDF } from '../pdf/insert-field-in-pdf';
|
||||||
|
|
||||||
export type SealDocumentOptions = {
|
export type SealDocumentOptions = {
|
||||||
@ -18,8 +21,17 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
|||||||
where: {
|
where: {
|
||||||
id: documentId,
|
id: documentId,
|
||||||
},
|
},
|
||||||
|
include: {
|
||||||
|
documentData: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { documentData } = document;
|
||||||
|
|
||||||
|
if (!documentData) {
|
||||||
|
throw new Error(`Document ${document.id} has no document data`);
|
||||||
|
}
|
||||||
|
|
||||||
if (document.status !== DocumentStatus.COMPLETED) {
|
if (document.status !== DocumentStatus.COMPLETED) {
|
||||||
throw new Error(`Document ${document.id} has not been completed`);
|
throw new Error(`Document ${document.id} has not been completed`);
|
||||||
}
|
}
|
||||||
@ -48,7 +60,7 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// !: Need to write the fields onto the document as a hard copy
|
// !: Need to write the fields onto the document as a hard copy
|
||||||
const { document: pdfData } = document;
|
const pdfData = await getFile(documentData);
|
||||||
|
|
||||||
const doc = await PDFDocument.load(pdfData);
|
const doc = await PDFDocument.load(pdfData);
|
||||||
|
|
||||||
@ -58,13 +70,20 @@ export const sealDocument = async ({ documentId }: SealDocumentOptions) => {
|
|||||||
|
|
||||||
const pdfBytes = await doc.save();
|
const pdfBytes = await doc.save();
|
||||||
|
|
||||||
await prisma.document.update({
|
const { name, ext } = path.parse(document.title);
|
||||||
|
|
||||||
|
const { data: newData } = await putFile({
|
||||||
|
name: `${name}_signed${ext}`,
|
||||||
|
type: 'application/pdf',
|
||||||
|
arrayBuffer: async () => Promise.resolve(Buffer.from(pdfBytes)),
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.documentData.update({
|
||||||
where: {
|
where: {
|
||||||
id: document.id,
|
id: documentData.id,
|
||||||
status: DocumentStatus.COMPLETED,
|
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
document: Buffer.from(pdfBytes).toString('base64'),
|
data: newData,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@ -48,8 +48,8 @@ export const sendDocument = async ({ documentId, userId }: SendDocumentOptions)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const assetBaseUrl = process.env.NEXT_PUBLIC_SITE_URL || 'http://localhost:3000';
|
const assetBaseUrl = process.env.NEXT_PUBLIC_WEBAPP_URL || 'http://localhost:3000';
|
||||||
const signDocumentLink = `${process.env.NEXT_PUBLIC_SITE_URL}/sign/${recipient.token}`;
|
const signDocumentLink = `${process.env.NEXT_PUBLIC_WEBAPP_URL}/sign/${recipient.token}`;
|
||||||
|
|
||||||
const template = createElement(DocumentInviteEmailTemplate, {
|
const template = createElement(DocumentInviteEmailTemplate, {
|
||||||
documentName: document.title,
|
documentName: document.title,
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import { nanoid } from 'nanoid';
|
|
||||||
|
|
||||||
import { prisma } from '@documenso/prisma';
|
import { prisma } from '@documenso/prisma';
|
||||||
import { SendStatus, SigningStatus } from '@documenso/prisma/client';
|
import { SendStatus, SigningStatus } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { nanoid } from '../../universal/id';
|
||||||
|
|
||||||
export interface SetRecipientsForDocumentOptions {
|
export interface SetRecipientsForDocumentOptions {
|
||||||
userId: number;
|
userId: number;
|
||||||
documentId: number;
|
documentId: number;
|
||||||
|
|||||||
@ -1,5 +1,8 @@
|
|||||||
{
|
{
|
||||||
"extends": "@documenso/tsconfig/react-library.json",
|
"extends": "@documenso/tsconfig/react-library.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"types": ["@documenso/tsconfig/process-env.d.ts"]
|
||||||
|
},
|
||||||
"include": ["**/*.ts", "**/*.tsx", "**/*.d.ts"],
|
"include": ["**/*.ts", "**/*.tsx", "**/*.d.ts"],
|
||||||
"exclude": ["dist", "build", "node_modules"]
|
"exclude": ["dist", "build", "node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,8 +8,8 @@ export const getBaseUrl = () => {
|
|||||||
return `https://${process.env.VERCEL_URL}`;
|
return `https://${process.env.VERCEL_URL}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env.NEXT_PUBLIC_SITE_URL) {
|
if (process.env.NEXT_PUBLIC_WEBAPP_URL) {
|
||||||
return `https://${process.env.NEXT_PUBLIC_SITE_URL}`;
|
return process.env.NEXT_PUBLIC_WEBAPP_URL;
|
||||||
}
|
}
|
||||||
|
|
||||||
return `http://localhost:${process.env.PORT ?? 3000}`;
|
return `http://localhost:${process.env.PORT ?? 3000}`;
|
||||||
|
|||||||
5
packages/lib/universal/id.ts
Normal file
5
packages/lib/universal/id.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { customAlphabet } from 'nanoid';
|
||||||
|
|
||||||
|
export const alphaid = customAlphabet('0123456789abcdefghijklmnopqrstuvwxyz', 10);
|
||||||
|
|
||||||
|
export { nanoid } from 'nanoid';
|
||||||
22
packages/lib/universal/upload/delete-file.ts
Normal file
22
packages/lib/universal/upload/delete-file.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { match } from 'ts-pattern';
|
||||||
|
|
||||||
|
import { DocumentDataType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { deleteS3File } from './server-actions';
|
||||||
|
|
||||||
|
export type DeleteFileOptions = {
|
||||||
|
type: DocumentDataType;
|
||||||
|
data: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const deleteFile = async ({ type, data }: DeleteFileOptions) => {
|
||||||
|
return await match(type)
|
||||||
|
.with(DocumentDataType.S3_PATH, async () => deleteFileFromS3(data))
|
||||||
|
.otherwise(() => {
|
||||||
|
return;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteFileFromS3 = async (key: string) => {
|
||||||
|
await deleteS3File(key);
|
||||||
|
};
|
||||||
51
packages/lib/universal/upload/get-file.ts
Normal file
51
packages/lib/universal/upload/get-file.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import { base64 } from '@scure/base';
|
||||||
|
import { match } from 'ts-pattern';
|
||||||
|
|
||||||
|
import { DocumentDataType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { getPresignGetUrl } from './server-actions';
|
||||||
|
|
||||||
|
export type GetFileOptions = {
|
||||||
|
type: DocumentDataType;
|
||||||
|
data: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getFile = async ({ type, data }: GetFileOptions) => {
|
||||||
|
return await match(type)
|
||||||
|
.with(DocumentDataType.BYTES, () => getFileFromBytes(data))
|
||||||
|
.with(DocumentDataType.BYTES_64, () => getFileFromBytes64(data))
|
||||||
|
.with(DocumentDataType.S3_PATH, async () => getFileFromS3(data))
|
||||||
|
.exhaustive();
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFileFromBytes = (data: string) => {
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
const binaryData = encoder.encode(data);
|
||||||
|
|
||||||
|
return binaryData;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFileFromBytes64 = (data: string) => {
|
||||||
|
const binaryData = base64.decode(data);
|
||||||
|
|
||||||
|
return binaryData;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFileFromS3 = async (key: string) => {
|
||||||
|
const { url } = await getPresignGetUrl(key);
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to get file "${key}", failed with status code ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = await response.arrayBuffer();
|
||||||
|
|
||||||
|
const binaryData = new Uint8Array(buffer);
|
||||||
|
|
||||||
|
return binaryData;
|
||||||
|
};
|
||||||
59
packages/lib/universal/upload/put-file.ts
Normal file
59
packages/lib/universal/upload/put-file.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import { base64 } from '@scure/base';
|
||||||
|
import { match } from 'ts-pattern';
|
||||||
|
|
||||||
|
import { DocumentDataType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { createDocumentData } from '../../server-only/document-data/create-document-data';
|
||||||
|
import { getPresignPostUrl } from './server-actions';
|
||||||
|
|
||||||
|
type File = {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
arrayBuffer: () => Promise<ArrayBuffer>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const putFile = async (file: File) => {
|
||||||
|
const { type, data } = await match(process.env.NEXT_PUBLIC_UPLOAD_TRANSPORT)
|
||||||
|
.with('s3', async () => putFileInS3(file))
|
||||||
|
.otherwise(async () => putFileInDatabase(file));
|
||||||
|
|
||||||
|
return await createDocumentData({ type, data });
|
||||||
|
};
|
||||||
|
|
||||||
|
const putFileInDatabase = async (file: File) => {
|
||||||
|
const contents = await file.arrayBuffer();
|
||||||
|
|
||||||
|
const binaryData = new Uint8Array(contents);
|
||||||
|
|
||||||
|
const asciiData = base64.encode(binaryData);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: DocumentDataType.BYTES_64,
|
||||||
|
data: asciiData,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const putFileInS3 = async (file: File) => {
|
||||||
|
const { url, key } = await getPresignPostUrl(file.name, file.type);
|
||||||
|
|
||||||
|
const body = await file.arrayBuffer();
|
||||||
|
|
||||||
|
const reponse = await fetch(url, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!reponse.ok) {
|
||||||
|
throw new Error(
|
||||||
|
`Failed to upload file "${file.name}", failed with status code ${reponse.status}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: DocumentDataType.S3_PATH,
|
||||||
|
data: key,
|
||||||
|
};
|
||||||
|
};
|
||||||
104
packages/lib/universal/upload/server-actions.ts
Normal file
104
packages/lib/universal/upload/server-actions.ts
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
'use server';
|
||||||
|
|
||||||
|
import {
|
||||||
|
DeleteObjectCommand,
|
||||||
|
GetObjectCommand,
|
||||||
|
PutObjectCommand,
|
||||||
|
S3Client,
|
||||||
|
} from '@aws-sdk/client-s3';
|
||||||
|
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||||
|
import slugify from '@sindresorhus/slugify';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import { ONE_HOUR, ONE_SECOND } from '../../constants/time';
|
||||||
|
import { getServerComponentSession } from '../../next-auth/get-server-session';
|
||||||
|
import { alphaid } from '../id';
|
||||||
|
|
||||||
|
export const getPresignPostUrl = async (fileName: string, contentType: string) => {
|
||||||
|
const client = getS3Client();
|
||||||
|
|
||||||
|
const user = await getServerComponentSession();
|
||||||
|
|
||||||
|
// Get the basename and extension for the file
|
||||||
|
const { name, ext } = path.parse(fileName);
|
||||||
|
|
||||||
|
let key = `${alphaid(12)}/${slugify(name)}${ext}`;
|
||||||
|
|
||||||
|
if (user) {
|
||||||
|
key = `${user.id}/${key}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const putObjectCommand = new PutObjectCommand({
|
||||||
|
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||||
|
Key: key,
|
||||||
|
ContentType: contentType,
|
||||||
|
});
|
||||||
|
|
||||||
|
const url = await getSignedUrl(client, putObjectCommand, {
|
||||||
|
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { key, url };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getAbsolutePresignPostUrl = async (key: string) => {
|
||||||
|
const client = getS3Client();
|
||||||
|
|
||||||
|
const putObjectCommand = new PutObjectCommand({
|
||||||
|
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||||
|
Key: key,
|
||||||
|
});
|
||||||
|
|
||||||
|
const url = await getSignedUrl(client, putObjectCommand, {
|
||||||
|
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { key, url };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getPresignGetUrl = async (key: string) => {
|
||||||
|
const client = getS3Client();
|
||||||
|
|
||||||
|
const getObjectCommand = new GetObjectCommand({
|
||||||
|
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||||
|
Key: key,
|
||||||
|
});
|
||||||
|
|
||||||
|
const url = await getSignedUrl(client, getObjectCommand, {
|
||||||
|
expiresIn: ONE_HOUR / ONE_SECOND,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { key, url };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const deleteS3File = async (key: string) => {
|
||||||
|
const client = getS3Client();
|
||||||
|
|
||||||
|
await client.send(
|
||||||
|
new DeleteObjectCommand({
|
||||||
|
Bucket: process.env.NEXT_PRIVATE_UPLOAD_BUCKET,
|
||||||
|
Key: key,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getS3Client = () => {
|
||||||
|
if (process.env.NEXT_PUBLIC_UPLOAD_TRANSPORT !== 's3') {
|
||||||
|
throw new Error('Invalid upload transport');
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasCredentials =
|
||||||
|
process.env.NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID &&
|
||||||
|
process.env.NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY;
|
||||||
|
|
||||||
|
return new S3Client({
|
||||||
|
endpoint: process.env.NEXT_PRIVATE_UPLOAD_ENDPOINT || undefined,
|
||||||
|
region: process.env.NEXT_PRIVATE_UPLOAD_REGION || 'us-east-1',
|
||||||
|
credentials: hasCredentials
|
||||||
|
? {
|
||||||
|
accessKeyId: String(process.env.NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID),
|
||||||
|
secretAccessKey: String(process.env.NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY),
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
});
|
||||||
|
};
|
||||||
58
packages/lib/universal/upload/update-file.ts
Normal file
58
packages/lib/universal/upload/update-file.ts
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import { base64 } from '@scure/base';
|
||||||
|
import { match } from 'ts-pattern';
|
||||||
|
|
||||||
|
import { DocumentDataType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
import { getAbsolutePresignPostUrl } from './server-actions';
|
||||||
|
|
||||||
|
export type UpdateFileOptions = {
|
||||||
|
type: DocumentDataType;
|
||||||
|
oldData: string;
|
||||||
|
newData: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const updateFile = async ({ type, oldData, newData }: UpdateFileOptions) => {
|
||||||
|
return await match(type)
|
||||||
|
.with(DocumentDataType.BYTES, () => updateFileWithBytes(newData))
|
||||||
|
.with(DocumentDataType.BYTES_64, () => updateFileWithBytes64(newData))
|
||||||
|
.with(DocumentDataType.S3_PATH, async () => updateFileWithS3(oldData, newData))
|
||||||
|
.exhaustive();
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateFileWithBytes = (data: string) => {
|
||||||
|
return {
|
||||||
|
type: DocumentDataType.BYTES,
|
||||||
|
data,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateFileWithBytes64 = (data: string) => {
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
const binaryData = encoder.encode(data);
|
||||||
|
|
||||||
|
const asciiData = base64.encode(binaryData);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: DocumentDataType.BYTES_64,
|
||||||
|
data: asciiData,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateFileWithS3 = async (key: string, data: string) => {
|
||||||
|
const { url } = await getAbsolutePresignPostUrl(key);
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'PUT',
|
||||||
|
body: data,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to update file "${key}", failed with status code ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: DocumentDataType.S3_PATH,
|
||||||
|
data: key,
|
||||||
|
};
|
||||||
|
};
|
||||||
12
packages/lib/utils/recipient-formatter.ts
Normal file
12
packages/lib/utils/recipient-formatter.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { Recipient } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export const recipientInitials = (text: string) =>
|
||||||
|
text
|
||||||
|
.split(' ')
|
||||||
|
.map((name: string) => name.slice(0, 1).toUpperCase())
|
||||||
|
.slice(0, 2)
|
||||||
|
.join('');
|
||||||
|
|
||||||
|
export const recipientAbbreviation = (recipient: Recipient) => {
|
||||||
|
return recipientInitials(recipient.name) || recipient.email.slice(0, 1).toUpperCase();
|
||||||
|
};
|
||||||
52
packages/prisma/helper.ts
Normal file
52
packages/prisma/helper.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
/// <reference types="@documenso/tsconfig/process-env.d.ts" />
|
||||||
|
|
||||||
|
export const getDatabaseUrl = () => {
|
||||||
|
if (process.env.NEXT_PRIVATE_DATABASE_URL) {
|
||||||
|
return process.env.NEXT_PRIVATE_DATABASE_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.POSTGRES_URL) {
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = process.env.POSTGRES_URL;
|
||||||
|
process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL = process.env.POSTGRES_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.DATABASE_URL) {
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = process.env.DATABASE_URL;
|
||||||
|
process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL = process.env.DATABASE_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.POSTGRES_PRISMA_URL) {
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = process.env.POSTGRES_PRISMA_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.POSTGRES_URL_NON_POOLING) {
|
||||||
|
process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL = process.env.POSTGRES_URL_NON_POOLING;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We change the protocol from `postgres:` to `https:` so we can construct a easily
|
||||||
|
// mofifiable URL.
|
||||||
|
const url = new URL(process.env.NEXT_PRIVATE_DATABASE_URL.replace('postgres://', 'https://'));
|
||||||
|
|
||||||
|
// If we're using a connection pool, we need to let Prisma know that
|
||||||
|
// we're using PgBouncer.
|
||||||
|
if (process.env.NEXT_PRIVATE_DATABASE_URL !== process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL) {
|
||||||
|
url.searchParams.set('pgbouncer', 'true');
|
||||||
|
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = url.toString().replace('https://', 'postgres://');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support for neon.tech (Neon Database)
|
||||||
|
if (url.hostname.endsWith('neon.tech')) {
|
||||||
|
const [projectId, ...rest] = url.hostname.split('.');
|
||||||
|
|
||||||
|
if (!projectId.endsWith('-pooler')) {
|
||||||
|
url.hostname = `${projectId}-pooler.${rest.join('.')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
url.searchParams.set('pgbouncer', 'true');
|
||||||
|
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = url.toString().replace('https://', 'postgres://');
|
||||||
|
}
|
||||||
|
|
||||||
|
return process.env.NEXT_PRIVATE_DATABASE_URL;
|
||||||
|
};
|
||||||
@ -1,5 +1,7 @@
|
|||||||
import { PrismaClient } from '@prisma/client';
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
|
||||||
|
import { getDatabaseUrl } from './helper';
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
// We need `var` to declare a global variable in TypeScript
|
// We need `var` to declare a global variable in TypeScript
|
||||||
// eslint-disable-next-line no-var
|
// eslint-disable-next-line no-var
|
||||||
@ -7,9 +9,13 @@ declare global {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!globalThis.prisma) {
|
if (!globalThis.prisma) {
|
||||||
globalThis.prisma = new PrismaClient();
|
globalThis.prisma = new PrismaClient({ datasourceUrl: getDatabaseUrl() });
|
||||||
}
|
}
|
||||||
|
|
||||||
export const prisma = globalThis.prisma || new PrismaClient();
|
export const prisma =
|
||||||
|
globalThis.prisma ||
|
||||||
|
new PrismaClient({
|
||||||
|
datasourceUrl: getDatabaseUrl(),
|
||||||
|
});
|
||||||
|
|
||||||
export const getPrismaClient = () => prisma;
|
export const getPrismaClient = () => prisma;
|
||||||
|
|||||||
@ -0,0 +1,19 @@
|
|||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "DocumentDataType" AS ENUM ('S3_PATH', 'BYTES', 'BYTES_64');
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "DocumentData" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"type" "DocumentDataType" NOT NULL,
|
||||||
|
"data" TEXT NOT NULL,
|
||||||
|
"initialData" TEXT NOT NULL,
|
||||||
|
"documentId" INTEGER NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "DocumentData_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "DocumentData_documentId_key" ON "DocumentData"("documentId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "DocumentData" ADD CONSTRAINT "DocumentData_documentId_fkey" FOREIGN KEY ("documentId") REFERENCES "Document"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
INSERT INTO
|
||||||
|
"DocumentData" ("id", "type", "data", "initialData", "documentId") (
|
||||||
|
SELECT
|
||||||
|
CAST(gen_random_uuid() AS TEXT),
|
||||||
|
'BYTES_64',
|
||||||
|
d."document",
|
||||||
|
d."document",
|
||||||
|
d."id"
|
||||||
|
FROM
|
||||||
|
"Document" d
|
||||||
|
WHERE
|
||||||
|
d."id" IS NOT NULL
|
||||||
|
AND d."document" IS NOT NULL
|
||||||
|
);
|
||||||
@ -0,0 +1,5 @@
|
|||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "Role" AS ENUM ('ADMIN', 'USER');
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "User" ADD COLUMN "roles" "Role"[] DEFAULT ARRAY['USER']::"Role"[];
|
||||||
@ -0,0 +1,19 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Document" ADD COLUMN "createdAt" TIMESTAMP(3);
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Document" ADD COLUMN "updatedAt" TIMESTAMP(3);
|
||||||
|
|
||||||
|
-- DefaultValues
|
||||||
|
UPDATE "Document"
|
||||||
|
SET
|
||||||
|
"createdAt" = COALESCE("created"::TIMESTAMP, NOW()),
|
||||||
|
"updatedAt" = COALESCE("created"::TIMESTAMP, NOW());
|
||||||
|
|
||||||
|
-- AlterColumn
|
||||||
|
ALTER TABLE "Document" ALTER COLUMN "createdAt" SET DEFAULT NOW();
|
||||||
|
ALTER TABLE "Document" ALTER COLUMN "createdAt" SET NOT NULL;
|
||||||
|
|
||||||
|
-- AlterColumn
|
||||||
|
ALTER TABLE "Document" ALTER COLUMN "updatedAt" SET DEFAULT NOW();
|
||||||
|
ALTER TABLE "Document" ALTER COLUMN "updatedAt" SET NOT NULL;
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `document` on the `Document` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Document" DROP COLUMN "document";
|
||||||
@ -0,0 +1,23 @@
|
|||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "DocumentData" DROP CONSTRAINT "DocumentData_documentId_fkey";
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX "DocumentData_documentId_key";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Document" ADD COLUMN "documentDataId" TEXT;
|
||||||
|
|
||||||
|
-- Reverse relation foreign key ids
|
||||||
|
UPDATE "Document" SET "documentDataId" = "DocumentData"."id" FROM "DocumentData" WHERE "Document"."id" = "DocumentData"."documentId";
|
||||||
|
|
||||||
|
-- AlterColumn
|
||||||
|
ALTER TABLE "Document" ALTER COLUMN "documentDataId" SET NOT NULL;
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "DocumentData" DROP COLUMN "documentId";
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "Document_documentDataId_key" ON "Document"("documentDataId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Document" ADD CONSTRAINT "Document_documentDataId_fkey" FOREIGN KEY ("documentDataId") REFERENCES "DocumentData"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `created` on the `Document` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Document" DROP COLUMN "created";
|
||||||
@ -9,10 +9,18 @@
|
|||||||
"format": "prisma format",
|
"format": "prisma format",
|
||||||
"prisma:generate": "prisma generate",
|
"prisma:generate": "prisma generate",
|
||||||
"prisma:migrate-dev": "prisma migrate dev",
|
"prisma:migrate-dev": "prisma migrate dev",
|
||||||
"prisma:migrate-deploy": "prisma migrate deploy"
|
"prisma:migrate-deploy": "prisma migrate deploy",
|
||||||
|
"prisma:seed": "prisma db seed"
|
||||||
|
},
|
||||||
|
"prisma": {
|
||||||
|
"seed": "ts-node --transpileOnly --skipProject ./seed-database.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/client": "5.0.0",
|
"@prisma/client": "5.3.1",
|
||||||
"prisma": "5.0.0"
|
"prisma": "5.3.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ts-node": "^10.9.1",
|
||||||
|
"typescript": "^5.1.6"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,6 +13,11 @@ enum IdentityProvider {
|
|||||||
GOOGLE
|
GOOGLE
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
}
|
||||||
|
|
||||||
model User {
|
model User {
|
||||||
id Int @id @default(autoincrement())
|
id Int @id @default(autoincrement())
|
||||||
name String?
|
name String?
|
||||||
@ -21,6 +26,7 @@ model User {
|
|||||||
password String?
|
password String?
|
||||||
source String?
|
source String?
|
||||||
signature String?
|
signature String?
|
||||||
|
roles Role[] @default([USER])
|
||||||
identityProvider IdentityProvider @default(DOCUMENSO)
|
identityProvider IdentityProvider @default(DOCUMENSO)
|
||||||
accounts Account[]
|
accounts Account[]
|
||||||
sessions Session[]
|
sessions Session[]
|
||||||
@ -85,15 +91,33 @@ enum DocumentStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model Document {
|
model Document {
|
||||||
id Int @id @default(autoincrement())
|
id Int @id @default(autoincrement())
|
||||||
created DateTime @default(now())
|
userId Int
|
||||||
userId Int
|
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
title String
|
||||||
title String
|
status DocumentStatus @default(DRAFT)
|
||||||
status DocumentStatus @default(DRAFT)
|
Recipient Recipient[]
|
||||||
document String
|
Field Field[]
|
||||||
Recipient Recipient[]
|
documentDataId String
|
||||||
Field Field[]
|
documentData DocumentData @relation(fields: [documentDataId], references: [id], onDelete: Cascade)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @default(now()) @updatedAt
|
||||||
|
|
||||||
|
@@unique([documentDataId])
|
||||||
|
}
|
||||||
|
|
||||||
|
enum DocumentDataType {
|
||||||
|
S3_PATH
|
||||||
|
BYTES
|
||||||
|
BYTES_64
|
||||||
|
}
|
||||||
|
|
||||||
|
model DocumentData {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
type DocumentDataType
|
||||||
|
data String
|
||||||
|
initialData String
|
||||||
|
Document Document?
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ReadStatus {
|
enum ReadStatus {
|
||||||
|
|||||||
82
packages/prisma/seed-database.ts
Normal file
82
packages/prisma/seed-database.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import { DocumentDataType, Role } from '@prisma/client';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import { hashSync } from '@documenso/lib/server-only/auth/hash';
|
||||||
|
|
||||||
|
import { prisma } from './index';
|
||||||
|
|
||||||
|
const seedDatabase = async () => {
|
||||||
|
const examplePdf = fs
|
||||||
|
.readFileSync(path.join(__dirname, '../../assets/example.pdf'))
|
||||||
|
.toString('base64');
|
||||||
|
|
||||||
|
const exampleUser = await prisma.user.upsert({
|
||||||
|
where: {
|
||||||
|
email: 'example@documenso.com',
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
name: 'Example User',
|
||||||
|
email: 'example@documenso.com',
|
||||||
|
password: hashSync('password'),
|
||||||
|
roles: [Role.USER],
|
||||||
|
},
|
||||||
|
update: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const adminUser = await prisma.user.upsert({
|
||||||
|
where: {
|
||||||
|
email: 'admin@documenso.com',
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
name: 'Admin User',
|
||||||
|
email: 'admin@documenso.com',
|
||||||
|
password: hashSync('password'),
|
||||||
|
roles: [Role.USER, Role.ADMIN],
|
||||||
|
},
|
||||||
|
update: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const examplePdfData = await prisma.documentData.upsert({
|
||||||
|
where: {
|
||||||
|
id: 'clmn0kv5k0000pe04vcqg5zla',
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: 'clmn0kv5k0000pe04vcqg5zla',
|
||||||
|
type: DocumentDataType.BYTES_64,
|
||||||
|
data: examplePdf,
|
||||||
|
initialData: examplePdf,
|
||||||
|
},
|
||||||
|
update: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.document.upsert({
|
||||||
|
where: {
|
||||||
|
id: 1,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: 1,
|
||||||
|
title: 'Example Document',
|
||||||
|
documentDataId: examplePdfData.id,
|
||||||
|
userId: exampleUser.id,
|
||||||
|
Recipient: {
|
||||||
|
create: {
|
||||||
|
name: String(adminUser.name),
|
||||||
|
email: adminUser.email,
|
||||||
|
token: Math.random().toString(36).slice(2, 9),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
seedDatabase()
|
||||||
|
.then(() => {
|
||||||
|
console.log('Database seeded');
|
||||||
|
process.exit(0);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
5
packages/prisma/types/document-with-data.ts
Normal file
5
packages/prisma/types/document-with-data.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { Document, DocumentData } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export type DocumentWithData = Document & {
|
||||||
|
documentData?: DocumentData | null;
|
||||||
|
};
|
||||||
@ -1,17 +1,81 @@
|
|||||||
import { TRPCError } from '@trpc/server';
|
import { TRPCError } from '@trpc/server';
|
||||||
|
|
||||||
|
import { createDocument } from '@documenso/lib/server-only/document/create-document';
|
||||||
|
import { getDocumentById } from '@documenso/lib/server-only/document/get-document-by-id';
|
||||||
|
import { getDocumentAndSenderByToken } from '@documenso/lib/server-only/document/get-document-by-token';
|
||||||
import { sendDocument } from '@documenso/lib/server-only/document/send-document';
|
import { sendDocument } from '@documenso/lib/server-only/document/send-document';
|
||||||
import { setFieldsForDocument } from '@documenso/lib/server-only/field/set-fields-for-document';
|
import { setFieldsForDocument } from '@documenso/lib/server-only/field/set-fields-for-document';
|
||||||
import { setRecipientsForDocument } from '@documenso/lib/server-only/recipient/set-recipients-for-document';
|
import { setRecipientsForDocument } from '@documenso/lib/server-only/recipient/set-recipients-for-document';
|
||||||
|
|
||||||
import { authenticatedProcedure, router } from '../trpc';
|
import { authenticatedProcedure, procedure, router } from '../trpc';
|
||||||
import {
|
import {
|
||||||
|
ZCreateDocumentMutationSchema,
|
||||||
|
ZGetDocumentByIdQuerySchema,
|
||||||
|
ZGetDocumentByTokenQuerySchema,
|
||||||
ZSendDocumentMutationSchema,
|
ZSendDocumentMutationSchema,
|
||||||
ZSetFieldsForDocumentMutationSchema,
|
ZSetFieldsForDocumentMutationSchema,
|
||||||
ZSetRecipientsForDocumentMutationSchema,
|
ZSetRecipientsForDocumentMutationSchema,
|
||||||
} from './schema';
|
} from './schema';
|
||||||
|
|
||||||
export const documentRouter = router({
|
export const documentRouter = router({
|
||||||
|
getDocumentById: authenticatedProcedure
|
||||||
|
.input(ZGetDocumentByIdQuerySchema)
|
||||||
|
.query(async ({ input, ctx }) => {
|
||||||
|
try {
|
||||||
|
const { id } = input;
|
||||||
|
|
||||||
|
return await getDocumentById({
|
||||||
|
id,
|
||||||
|
userId: ctx.user.id,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
|
||||||
|
throw new TRPCError({
|
||||||
|
code: 'BAD_REQUEST',
|
||||||
|
message: 'We were unable to find this document. Please try again later.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
getDocumentByToken: procedure.input(ZGetDocumentByTokenQuerySchema).query(async ({ input }) => {
|
||||||
|
try {
|
||||||
|
const { token } = input;
|
||||||
|
|
||||||
|
return await getDocumentAndSenderByToken({
|
||||||
|
token,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
|
||||||
|
throw new TRPCError({
|
||||||
|
code: 'BAD_REQUEST',
|
||||||
|
message: 'We were unable to find this document. Please try again later.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
createDocument: authenticatedProcedure
|
||||||
|
.input(ZCreateDocumentMutationSchema)
|
||||||
|
.mutation(async ({ input, ctx }) => {
|
||||||
|
try {
|
||||||
|
const { title, documentDataId } = input;
|
||||||
|
|
||||||
|
return await createDocument({
|
||||||
|
userId: ctx.user.id,
|
||||||
|
title,
|
||||||
|
documentDataId,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
|
||||||
|
throw new TRPCError({
|
||||||
|
code: 'BAD_REQUEST',
|
||||||
|
message: 'We were unable to create this document. Please try again later.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
setRecipientsForDocument: authenticatedProcedure
|
setRecipientsForDocument: authenticatedProcedure
|
||||||
.input(ZSetRecipientsForDocumentMutationSchema)
|
.input(ZSetRecipientsForDocumentMutationSchema)
|
||||||
.mutation(async ({ input, ctx }) => {
|
.mutation(async ({ input, ctx }) => {
|
||||||
|
|||||||
@ -2,6 +2,25 @@ import { z } from 'zod';
|
|||||||
|
|
||||||
import { FieldType } from '@documenso/prisma/client';
|
import { FieldType } from '@documenso/prisma/client';
|
||||||
|
|
||||||
|
export const ZGetDocumentByIdQuerySchema = z.object({
|
||||||
|
id: z.number().min(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TGetDocumentByIdQuerySchema = z.infer<typeof ZGetDocumentByIdQuerySchema>;
|
||||||
|
|
||||||
|
export const ZGetDocumentByTokenQuerySchema = z.object({
|
||||||
|
token: z.string().min(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TGetDocumentByTokenQuerySchema = z.infer<typeof ZGetDocumentByTokenQuerySchema>;
|
||||||
|
|
||||||
|
export const ZCreateDocumentMutationSchema = z.object({
|
||||||
|
title: z.string().min(1),
|
||||||
|
documentDataId: z.string().min(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TCreateDocumentMutationSchema = z.infer<typeof ZCreateDocumentMutationSchema>;
|
||||||
|
|
||||||
export const ZSetRecipientsForDocumentMutationSchema = z.object({
|
export const ZSetRecipientsForDocumentMutationSchema = z.object({
|
||||||
documentId: z.number(),
|
documentId: z.number(),
|
||||||
recipients: z.array(
|
recipients: z.array(
|
||||||
|
|||||||
24
packages/tsconfig/process-env.d.ts
vendored
24
packages/tsconfig/process-env.d.ts
vendored
@ -1,6 +1,7 @@
|
|||||||
declare namespace NodeJS {
|
declare namespace NodeJS {
|
||||||
export interface ProcessEnv {
|
export interface ProcessEnv {
|
||||||
NEXT_PUBLIC_SITE_URL?: string;
|
NEXT_PUBLIC_WEBAPP_URL?: string;
|
||||||
|
NEXT_PUBLIC_MARKETING_URL?: string;
|
||||||
|
|
||||||
NEXT_PRIVATE_GOOGLE_CLIENT_ID?: string;
|
NEXT_PRIVATE_GOOGLE_CLIENT_ID?: string;
|
||||||
NEXT_PRIVATE_GOOGLE_CLIENT_SECRET?: string;
|
NEXT_PRIVATE_GOOGLE_CLIENT_SECRET?: string;
|
||||||
@ -13,6 +14,13 @@ declare namespace NodeJS {
|
|||||||
NEXT_PRIVATE_STRIPE_API_KEY: string;
|
NEXT_PRIVATE_STRIPE_API_KEY: string;
|
||||||
NEXT_PRIVATE_STRIPE_WEBHOOK_SECRET: string;
|
NEXT_PRIVATE_STRIPE_WEBHOOK_SECRET: string;
|
||||||
|
|
||||||
|
NEXT_PUBLIC_UPLOAD_TRANSPORT?: 'database' | 's3';
|
||||||
|
NEXT_PRIVATE_UPLOAD_ENDPOINT?: string;
|
||||||
|
NEXT_PRIVATE_UPLOAD_REGION?: string;
|
||||||
|
NEXT_PRIVATE_UPLOAD_BUCKET?: string;
|
||||||
|
NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID?: string;
|
||||||
|
NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY?: string;
|
||||||
|
|
||||||
NEXT_PRIVATE_SMTP_TRANSPORT?: 'mailchannels' | 'smtp-auth' | 'smtp-api';
|
NEXT_PRIVATE_SMTP_TRANSPORT?: 'mailchannels' | 'smtp-auth' | 'smtp-api';
|
||||||
|
|
||||||
NEXT_PRIVATE_MAILCHANNELS_API_KEY?: string;
|
NEXT_PRIVATE_MAILCHANNELS_API_KEY?: string;
|
||||||
@ -33,5 +41,19 @@ declare namespace NodeJS {
|
|||||||
|
|
||||||
NEXT_PRIVATE_SMTP_FROM_NAME?: string;
|
NEXT_PRIVATE_SMTP_FROM_NAME?: string;
|
||||||
NEXT_PRIVATE_SMTP_FROM_ADDRESS?: string;
|
NEXT_PRIVATE_SMTP_FROM_ADDRESS?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Vercel environment variables
|
||||||
|
*/
|
||||||
|
VERCEL?: string;
|
||||||
|
VERCEL_ENV?: 'production' | 'development' | 'preview';
|
||||||
|
VERCEL_URL?: string;
|
||||||
|
|
||||||
|
DEPLOYMENT_TARGET?: 'webapp' | 'marketing';
|
||||||
|
|
||||||
|
POSTGRES_URL?: string;
|
||||||
|
DATABASE_URL?: string;
|
||||||
|
POSTGRES_PRISMA_URL?: string;
|
||||||
|
POSTGRES_URL_NON_POOLING?: string;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,6 +15,7 @@
|
|||||||
"lint": "eslint \"**/*.ts*\""
|
"lint": "eslint \"**/*.ts*\""
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@documenso/tailwind-config": "*",
|
||||||
"@documenso/tsconfig": "*",
|
"@documenso/tsconfig": "*",
|
||||||
"@types/react": "18.2.18",
|
"@types/react": "18.2.18",
|
||||||
"@types/react-dom": "18.2.7",
|
"@types/react-dom": "18.2.7",
|
||||||
@ -22,6 +23,7 @@
|
|||||||
"typescript": "^5.1.6"
|
"typescript": "^5.1.6"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@documenso/lib": "*",
|
||||||
"@radix-ui/react-accordion": "^1.1.1",
|
"@radix-ui/react-accordion": "^1.1.1",
|
||||||
"@radix-ui/react-alert-dialog": "^1.0.3",
|
"@radix-ui/react-alert-dialog": "^1.0.3",
|
||||||
"@radix-ui/react-aspect-ratio": "^1.0.2",
|
"@radix-ui/react-aspect-ratio": "^1.0.2",
|
||||||
@ -51,7 +53,6 @@
|
|||||||
"class-variance-authority": "^0.6.0",
|
"class-variance-authority": "^0.6.0",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
"cmdk": "^0.2.0",
|
"cmdk": "^0.2.0",
|
||||||
"date-fns": "^2.30.0",
|
|
||||||
"framer-motion": "^10.12.8",
|
"framer-motion": "^10.12.8",
|
||||||
"lucide-react": "^0.214.0",
|
"lucide-react": "^0.214.0",
|
||||||
"next": "13.4.12",
|
"next": "13.4.12",
|
||||||
@ -61,4 +62,4 @@
|
|||||||
"tailwind-merge": "^1.12.0",
|
"tailwind-merge": "^1.12.0",
|
||||||
"tailwindcss-animate": "^1.0.5"
|
"tailwindcss-animate": "^1.0.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,12 +5,12 @@ import { useCallback, useEffect, useRef, useState } from 'react';
|
|||||||
import { Caveat } from 'next/font/google';
|
import { Caveat } from 'next/font/google';
|
||||||
|
|
||||||
import { Check, ChevronsUpDown, Info } from 'lucide-react';
|
import { Check, ChevronsUpDown, Info } from 'lucide-react';
|
||||||
import { nanoid } from 'nanoid';
|
|
||||||
import { useFieldArray, useForm } from 'react-hook-form';
|
import { useFieldArray, useForm } from 'react-hook-form';
|
||||||
|
|
||||||
import { getBoundingClientRect } from '@documenso/lib/client-only/get-bounding-client-rect';
|
import { getBoundingClientRect } from '@documenso/lib/client-only/get-bounding-client-rect';
|
||||||
import { useDocumentElement } from '@documenso/lib/client-only/hooks/use-document-element';
|
import { useDocumentElement } from '@documenso/lib/client-only/hooks/use-document-element';
|
||||||
import { PDF_VIEWER_PAGE_SELECTOR } from '@documenso/lib/constants/pdf-viewer';
|
import { PDF_VIEWER_PAGE_SELECTOR } from '@documenso/lib/constants/pdf-viewer';
|
||||||
|
import { nanoid } from '@documenso/lib/universal/id';
|
||||||
import { Field, FieldType, Recipient, SendStatus } from '@documenso/prisma/client';
|
import { Field, FieldType, Recipient, SendStatus } from '@documenso/prisma/client';
|
||||||
import { cn } from '@documenso/ui/lib/utils';
|
import { cn } from '@documenso/ui/lib/utils';
|
||||||
import { Button } from '@documenso/ui/primitives/button';
|
import { Button } from '@documenso/ui/primitives/button';
|
||||||
|
|||||||
@ -5,9 +5,9 @@ import React, { useId } from 'react';
|
|||||||
import { zodResolver } from '@hookform/resolvers/zod';
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
import { AnimatePresence, motion } from 'framer-motion';
|
import { AnimatePresence, motion } from 'framer-motion';
|
||||||
import { Plus, Trash } from 'lucide-react';
|
import { Plus, Trash } from 'lucide-react';
|
||||||
import { nanoid } from 'nanoid';
|
|
||||||
import { Controller, useFieldArray, useForm } from 'react-hook-form';
|
import { Controller, useFieldArray, useForm } from 'react-hook-form';
|
||||||
|
|
||||||
|
import { nanoid } from '@documenso/lib/universal/id';
|
||||||
import { Field, Recipient, SendStatus } from '@documenso/prisma/client';
|
import { Field, Recipient, SendStatus } from '@documenso/prisma/client';
|
||||||
import { Button } from '@documenso/ui/primitives/button';
|
import { Button } from '@documenso/ui/primitives/button';
|
||||||
import { FormErrorMessage } from '@documenso/ui/primitives/form/form-error-message';
|
import { FormErrorMessage } from '@documenso/ui/primitives/form/form-error-message';
|
||||||
|
|||||||
45
scripts/remap-vercel-env.cjs
Normal file
45
scripts/remap-vercel-env.cjs
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
/** @typedef {import('@documenso/tsconfig/process-env')} */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remap Vercel environment variables to our defined Next.js environment variables.
|
||||||
|
*
|
||||||
|
* @deprecated This is no longer needed because we can't inject runtime environment variables via next.config.js
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
const remapVercelEnv = () => {
|
||||||
|
if (!process.env.VERCEL || !process.env.DEPLOYMENT_TARGET) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.POSTGRES_URL) {
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = process.env.POSTGRES_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.POSTGRES_URL_NON_POOLING) {
|
||||||
|
process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL = process.env.POSTGRES_URL_NON_POOLING;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're using a connection pool, we need to let Prisma know that
|
||||||
|
// we're using PgBouncer.
|
||||||
|
if (process.env.NEXT_PRIVATE_DATABASE_URL !== process.env.NEXT_PRIVATE_DIRECT_DATABASE_URL) {
|
||||||
|
const url = new URL(process.env.NEXT_PRIVATE_DATABASE_URL);
|
||||||
|
|
||||||
|
url.searchParams.set('pgbouncer', 'true');
|
||||||
|
|
||||||
|
process.env.NEXT_PRIVATE_DATABASE_URL = url.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.VERCEL_ENV !== 'production' && process.env.DEPLOYMENT_TARGET === 'webapp') {
|
||||||
|
process.env.NEXTAUTH_URL = `https://${process.env.VERCEL_URL}`;
|
||||||
|
process.env.NEXT_PUBLIC_WEBAPP_URL = `https://${process.env.VERCEL_URL}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.VERCEL_ENV !== 'production' && process.env.DEPLOYMENT_TARGET === 'marketing') {
|
||||||
|
process.env.NEXT_PUBLIC_MARKETING_URL = `https://${process.env.VERCEL_URL}`;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
remapVercelEnv,
|
||||||
|
};
|
||||||
107
scripts/vercel.sh
Executable file
107
scripts/vercel.sh
Executable file
@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Exit on error.
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
# Get the directory of this script, regardless of where it is called from.
|
||||||
|
SCRIPT_DIR="$(readlink -f "$(dirname "$0")")"
|
||||||
|
|
||||||
|
|
||||||
|
function log() {
|
||||||
|
echo "[VercelBuild]: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_webapp() {
|
||||||
|
log "Building webapp for $VERCEL_ENV"
|
||||||
|
|
||||||
|
remap_database_integration
|
||||||
|
|
||||||
|
npm run prisma:generate --workspace=@documenso/prisma
|
||||||
|
npm run prisma:migrate-deploy --workspace=@documenso/prisma
|
||||||
|
|
||||||
|
if [[ "$VERCEL_ENV" != "production" ]]; then
|
||||||
|
log "Seeding database for $VERCEL_ENV"
|
||||||
|
|
||||||
|
npm run prisma:seed --workspace=@documenso/prisma
|
||||||
|
fi
|
||||||
|
|
||||||
|
npm run build -- --filter @documenso/web
|
||||||
|
}
|
||||||
|
|
||||||
|
function remap_webapp_env() {
|
||||||
|
if [[ "$VERCEL_ENV" != "production" ]]; then
|
||||||
|
log "Remapping webapp environment variables for $VERCEL_ENV"
|
||||||
|
|
||||||
|
export NEXTAUTH_URL="https://$VERCEL_URL"
|
||||||
|
export NEXT_PUBLIC_WEBAPP_URL="https://$VERCEL_URL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_marketing() {
|
||||||
|
log "Building marketing for $VERCEL_ENV"
|
||||||
|
|
||||||
|
remap_database_integration
|
||||||
|
|
||||||
|
npm run prisma:generate --workspace=@documenso/prisma
|
||||||
|
npm run build -- --filter @documenso/marketing
|
||||||
|
}
|
||||||
|
|
||||||
|
function remap_marketing_env() {
|
||||||
|
if [[ "$VERCEL_ENV" != "production" ]]; then
|
||||||
|
log "Remapping marketing environment variables for $VERCEL_ENV"
|
||||||
|
|
||||||
|
export NEXT_PUBLIC_MARKETING_URL="https://$VERCEL_URL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function remap_database_integration() {
|
||||||
|
log "Remapping Supabase integration for $VERCEL_ENV"
|
||||||
|
|
||||||
|
if [[ ! -z "$POSTGRES_URL" ]]; then
|
||||||
|
export NEXT_PRIVATE_DATABASE_URL="$POSTGRES_URL"
|
||||||
|
export NEXT_PRIVATE_DIRECT_DATABASE_URL="$POSTGRES_URL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -z "$DATABASE_URL" ]]; then
|
||||||
|
export NEXT_PRIVATE_DATABASE_URL="$DATABASE_URL"
|
||||||
|
export NEXT_PRIVATE_DIRECT_DATABASE_URL="$DATABASE_URL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -z "$POSTGRES_URL_NON_POOLING" ]]; then
|
||||||
|
export NEXT_PRIVATE_DATABASE_URL="$POSTGRES_URL?pgbouncer=true"
|
||||||
|
export NEXT_PRIVATE_DIRECT_DATABASE_URL="$POSTGRES_URL_NON_POOLING"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
if [[ "$NEXT_PRIVATE_DATABASE_URL" == *"neon.tech"* ]]; then
|
||||||
|
log "Remapping for Neon integration"
|
||||||
|
|
||||||
|
PROJECT_ID="$(echo "$PGHOST" | cut -d'.' -f1)"
|
||||||
|
PGBOUNCER_HOST="$(echo "$PGHOST" | sed "s/${PROJECT_ID}/${PROJECT_ID}-pooler/")"
|
||||||
|
|
||||||
|
export NEXT_PRIVATE_DATABASE_URL="postgres://${PGUSER}:${PGPASSWORD}@${PGBOUNCER_HOST}/${PGDATABASE}?pgbouncer=true"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Navigate to the root of the project.
|
||||||
|
cd "$SCRIPT_DIR/.."
|
||||||
|
|
||||||
|
# Check if the script is running on Vercel.
|
||||||
|
if [[ -z "$VERCEL" ]]; then
|
||||||
|
log "ERROR - This script must be run as part of the Vercel build process."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$DEPLOYMENT_TARGET" in
|
||||||
|
"webapp")
|
||||||
|
build_webapp
|
||||||
|
;;
|
||||||
|
"marketing")
|
||||||
|
build_marketing
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
log "ERROR - Missing or invalid DEPLOYMENT_TARGET environment variable."
|
||||||
|
log "ERROR - DEPLOYMENT_TARGET must be either 'webapp' or 'marketing'."
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
24
turbo.json
24
turbo.json
@ -13,22 +13,29 @@
|
|||||||
},
|
},
|
||||||
"globalDependencies": ["**/.env.*local"],
|
"globalDependencies": ["**/.env.*local"],
|
||||||
"globalEnv": [
|
"globalEnv": [
|
||||||
|
"APP_VERSION",
|
||||||
"E2E_TEST_USERNAME",
|
"E2E_TEST_USERNAME",
|
||||||
"E2E_TEST_USER_EMAIL",
|
"E2E_TEST_USER_EMAIL",
|
||||||
"E2E_TEST_USER_PASSWORD",
|
"E2E_TEST_USER_PASSWORD",
|
||||||
"NEXTAUTH_URL",
|
"NEXTAUTH_URL",
|
||||||
"NEXTAUTH_SECRET",
|
"NEXTAUTH_SECRET",
|
||||||
"NEXT_PUBLIC_APP_URL",
|
"NEXT_PUBLIC_WEBAPP_URL",
|
||||||
"NEXT_PUBLIC_SITE_URL",
|
"NEXT_PUBLIC_MARKETING_URL",
|
||||||
"NEXT_PUBLIC_POSTHOG_KEY",
|
"NEXT_PUBLIC_POSTHOG_KEY",
|
||||||
"NEXT_PUBLIC_POSTHOG_HOST",
|
"NEXT_PUBLIC_POSTHOG_HOST",
|
||||||
"NEXT_PUBLIC_FEATURE_BILLING_ENABLED",
|
"NEXT_PUBLIC_FEATURE_BILLING_ENABLED",
|
||||||
"NEXT_PUBLIC_STRIPE_COMMUNITY_PLAN_YEARLY_PRICE_ID",
|
"NEXT_PUBLIC_STRIPE_COMMUNITY_PLAN_YEARLY_PRICE_ID",
|
||||||
"NEXT_PUBLIC_STRIPE_COMMUNITY_PLAN_MONTHLY_PRICE_ID",
|
"NEXT_PUBLIC_STRIPE_COMMUNITY_PLAN_MONTHLY_PRICE_ID",
|
||||||
"NEXT_PRIVATE_DATABASE_URL",
|
"NEXT_PRIVATE_DATABASE_URL",
|
||||||
"NEXT_PRIVATE_NEXT_AUTH_SECRET",
|
"NEXT_PRIVATE_DIRECT_DATABASE_URL",
|
||||||
"NEXT_PRIVATE_GOOGLE_CLIENT_ID",
|
"NEXT_PRIVATE_GOOGLE_CLIENT_ID",
|
||||||
"NEXT_PRIVATE_GOOGLE_CLIENT_SECRET",
|
"NEXT_PRIVATE_GOOGLE_CLIENT_SECRET",
|
||||||
|
"NEXT_PUBLIC_UPLOAD_TRANSPORT",
|
||||||
|
"NEXT_PRIVATE_UPLOAD_ENDPOINT",
|
||||||
|
"NEXT_PRIVATE_UPLOAD_REGION",
|
||||||
|
"NEXT_PRIVATE_UPLOAD_BUCKET",
|
||||||
|
"NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID",
|
||||||
|
"NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY",
|
||||||
"NEXT_PRIVATE_SMTP_TRANSPORT",
|
"NEXT_PRIVATE_SMTP_TRANSPORT",
|
||||||
"NEXT_PRIVATE_MAILCHANNELS_API_KEY",
|
"NEXT_PRIVATE_MAILCHANNELS_API_KEY",
|
||||||
"NEXT_PRIVATE_MAILCHANNELS_ENDPOINT",
|
"NEXT_PRIVATE_MAILCHANNELS_ENDPOINT",
|
||||||
@ -44,6 +51,15 @@
|
|||||||
"NEXT_PRIVATE_SMTP_SECURE",
|
"NEXT_PRIVATE_SMTP_SECURE",
|
||||||
"NEXT_PRIVATE_SMTP_FROM_NAME",
|
"NEXT_PRIVATE_SMTP_FROM_NAME",
|
||||||
"NEXT_PRIVATE_SMTP_FROM_ADDRESS",
|
"NEXT_PRIVATE_SMTP_FROM_ADDRESS",
|
||||||
"NEXT_PRIVATE_STRIPE_API_KEY"
|
"NEXT_PRIVATE_STRIPE_API_KEY",
|
||||||
|
|
||||||
|
"VERCEL",
|
||||||
|
"VERCEL_ENV",
|
||||||
|
"VERCEL_URL",
|
||||||
|
"DEPLOYMENT_TARGET",
|
||||||
|
"POSTGRES_URL",
|
||||||
|
"DATABASE_URL",
|
||||||
|
"POSTGRES_PRISMA_URL",
|
||||||
|
"POSTGRES_URL_NON_POOLING"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user