Skip to content

Commit f6d3fa8

Browse files
authored
Merge pull request #137 from techulus/develop
Add sitemap to robots.txt
2 parents 85031ef + f0f6e07 commit f6d3fa8

1 file changed

Lines changed: 8 additions & 3 deletions

File tree

apps/page/pages/api/robots.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
import type { NextApiRequest, NextApiResponse } from "next";
22
import { fetchRenderData, translateHostToPageIdentifier } from "../../lib/data";
33

4-
const ALLOW = `User-agent: *
4+
const getAllowRobots = (hostname: string) => `User-agent: *
55
Allow: /
66
Disallow: /api/
7-
Disallow: /_next/`;
7+
Disallow: /_next/
8+
9+
Sitemap: https://${hostname}/sitemap.xml`;
810

911
const DISALLOW = `User-agent: *
1012
Disallow: /`;
@@ -16,6 +18,7 @@ async function handler(
1618
const hostname = String(req?.headers?.host);
1719

1820
const { domain, page: url_slug } = translateHostToPageIdentifier(hostname);
21+
const pageUrl = domain ?? `${url_slug}.changes.page`;
1922

2023
res.setHeader("Content-Type", "text/plain; charset=UTF-8");
2124

@@ -27,7 +30,9 @@ async function handler(
2730
if (!page) throw new Error("Page not found");
2831
if (!settings) throw new Error("Settings not found");
2932

30-
res.status(200).send(settings?.hide_search_engine ? DISALLOW : ALLOW);
33+
res
34+
.status(200)
35+
.send(settings?.hide_search_engine ? DISALLOW : getAllowRobots(pageUrl));
3136
} catch (e: unknown) {
3237
console.log("robots.txt [Error]", e);
3338
res.status(200).send(DISALLOW);

0 commit comments

Comments
 (0)