- {#if p.repository}
+ {#if sanitizeUrl(p.repository)}
repo
{/if}
- {#if p.homepage}
+ {#if sanitizeUrl(p.homepage)}
homepage
diff --git a/src/routes/robots.txt/+server.ts b/src/routes/robots.txt/+server.ts
new file mode 100644
index 0000000..8d82476
--- /dev/null
+++ b/src/routes/robots.txt/+server.ts
@@ -0,0 +1,19 @@
+import type { RequestHandler } from "@sveltejs/kit";
+
+export const GET: RequestHandler = async ({ url }) => {
+ const origin = url.origin;
+ const lines = [
+ "# allow crawling everything by default",
+ "User-agent: *",
+ "Disallow:",
+ "",
+ `Sitemap: ${origin}/sitemap.xml`
+ ];
+ const body = lines.join("\n") + "\n";
+ return new Response(body, {
+ headers: {
+ "Content-Type": "text/plain; charset=utf-8",
+ "Cache-Control": "public, max-age=86400"
+ }
+ });
+};
diff --git a/src/routes/sitemap.xml/+server.ts b/src/routes/sitemap.xml/+server.ts
new file mode 100644
index 0000000..e96cbf8
--- /dev/null
+++ b/src/routes/sitemap.xml/+server.ts
@@ -0,0 +1,49 @@
+import type { RequestHandler } from "@sveltejs/kit";
+
+// Compute a stable lastmod at module load (approximates deploy time)
+const BUILD_LASTMOD = new Date().toISOString();
+
+const pages = [
+ {
+ path: "/",
+ changefreq: "yearly",
+ priority: 1.0
+ },
+ {
+ path: "/licenses",
+ changefreq: "yearly",
+ priority: 0.6
+ }
+] as const;
+
+function xmlEscape(s: string): string {
+ return s
+ .replace(/&/g, "&")
+ .replace(//g, ">")
+ .replace(/"/g, """)
+ .replace(/'/g, "'");
+}
+
+export const GET: RequestHandler = async ({ url }) => {
+ const origin = url.origin;
+ const lastmod = BUILD_LASTMOD;
+ const body =
+ `\n` +
+ `
` +
+ pages
+ .map((p) => {
+ const loc = xmlEscape(`${origin}${p.path}`);
+ return `\n \n ${loc}\n ${lastmod}\n ${p.changefreq}\n ${p.priority.toFixed(1)}\n `;
+ })
+ .join("") +
+ `\n\n`;
+
+ return new Response(body, {
+ headers: {
+ "Content-Type": "application/xml; charset=utf-8",
+ // Encourage caching for a day; adjust if content changes more frequently
+ "Cache-Control": "public, max-age=86400"
+ }
+ });
+};
diff --git a/static/robots.txt b/static/robots.txt
deleted file mode 100644
index b6dd667..0000000
--- a/static/robots.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# allow crawling everything by default
-User-agent: *
-Disallow: