Merge pull request #290 from nsylke/nsylke-patch-5

feat: create robots.txt and sitemap.xml
This commit is contained in:
Lucas Smith
2023-08-23 13:56:46 +10:00
committed by GitHub
2 changed files with 55 additions and 0 deletions

View File

@ -0,0 +1,14 @@
import { MetadataRoute } from 'next';
import { getBaseUrl } from '@documenso/lib/universal/get-base-url';
export default function robots(): MetadataRoute.Robots {
return {
rules: {
userAgent: '*',
allow: '/*',
disallow: ['/_next/*'],
},
sitemap: `${getBaseUrl()}/sitemap.xml`,
};
}

View File

@ -0,0 +1,41 @@
import { MetadataRoute } from 'next';
import { allBlogPosts, allGenericPages } from 'contentlayer/generated';
import { getBaseUrl } from '@documenso/lib/universal/get-base-url';
export default function sitemap(): MetadataRoute.Sitemap {
const baseUrl = getBaseUrl();
const lastModified = new Date();
return [
{
url: baseUrl,
lastModified,
},
...allGenericPages.map((doc) => ({
url: `${baseUrl}/${doc._raw.flattenedPath}`,
lastModified,
})),
{
url: `${baseUrl}/blog`,
lastModified,
},
...allBlogPosts.map((doc) => ({
url: `${baseUrl}/${doc._raw.flattenedPath}`,
lastModified,
})),
{
url: `${baseUrl}/open`,
lastModified,
},
{
url: `${baseUrl}/oss-friends`,
lastModified,
},
{
url: `${baseUrl}/pricing`,
lastModified,
},
];
}