diff --git a/.changeset/long-days-fetch.md b/.changeset/long-days-fetch.md
new file mode 100644
index 0000000..a53988c
--- /dev/null
+++ b/.changeset/long-days-fetch.md
@@ -0,0 +1,5 @@
+---
+"create-better-t-stack": minor
+---
+
+added github seo add-on
diff --git a/apps/cli/src/helpers/addons-setup.ts b/apps/cli/src/helpers/addons-setup.ts
index 39eadac..300c39c 100644
--- a/apps/cli/src/helpers/addons-setup.ts
+++ b/apps/cli/src/helpers/addons-setup.ts
@@ -14,11 +14,12 @@ export async function setupAddons(projectDir: string, addons: ProjectAddons[]) {
}
if (addons.includes("SEO")) {
- log.info(
- pc.yellow(
- "SEO feature is still a work-in-progress and will be available in a future update.",
- ),
- );
+ // log.info(
+ // pc.yellow(
+ // "SEO feature is still a work-in-progress and will be available in a future update.",
+ // ),
+ // );
+ await setupSEO(projectDir);
}
}
@@ -180,3 +181,105 @@ jobs:
deployWorkflowContent,
);
}
+
+async function setupSEO(projectDir: string) {
+ const robotsContent = `# Instructions: Customize this file to control how search engines crawl your site
+# Learn more: https://developers.google.com/search/docs/advanced/robots/create-robots-txt
+
+# Allow all crawlers (default)
+User-agent: *
+Allow: /
+
+# Disallow crawling of specific directories (uncomment and customize as needed)
+# Disallow: /admin/
+# Disallow: /private/
+
+# Specify the location of your sitemap
+Sitemap: https://yourdomain.com/sitemap.xml
+`;
+
+ await fs.writeFile(
+ path.join(projectDir, "packages", "client", "robots.txt"),
+ robotsContent,
+ );
+
+ const sitemapContent = `
+