From aaec2733d88d4f82e18acbd8eebc1a0e2f8e2cdf Mon Sep 17 00:00:00 2001 From: Daniel Lockyer Date: Wed, 20 Mar 2024 14:29:34 +0100 Subject: [PATCH] Added comments count endpoint to robots.txt disallow list fix https://linear.app/tryghost/issue/ENG-771/add-comments-count-endpoint-to-robotstxt-ignorelist - we've seen web scrapers hitting this endpoint a lot, but the value to be taken from it is minimal for SEO purposes - adding it to robots.txt should encourage web scrapers to ignore it, and we should see less traffic as a result --- ghost/core/core/frontend/public/robots.txt | 1 + ghost/core/test/e2e-frontend/default_routes.test.js | 1 + 2 files changed, 2 insertions(+) diff --git a/ghost/core/core/frontend/public/robots.txt b/ghost/core/core/frontend/public/robots.txt index 8ad5b3f97bcf..fe721db892a6 100644 --- a/ghost/core/core/frontend/public/robots.txt +++ b/ghost/core/core/frontend/public/robots.txt @@ -2,5 +2,6 @@ User-agent: * Sitemap: {{blog-url}}/sitemap.xml Disallow: /ghost/ Disallow: /email/ +Disallow: /members/api/comments/counts/ Disallow: /r/ Disallow: /webmentions/receive/ diff --git a/ghost/core/test/e2e-frontend/default_routes.test.js b/ghost/core/test/e2e-frontend/default_routes.test.js index 567cf1349460..d8bfdc161b11 100644 --- a/ghost/core/test/e2e-frontend/default_routes.test.js +++ b/ghost/core/test/e2e-frontend/default_routes.test.js @@ -323,6 +323,7 @@ describe('Default Frontend routing', function () { 'User-agent: *\n' + 'Sitemap: http://127.0.0.1:2369/sitemap.xml\nDisallow: /ghost/\n' + 'Disallow: /email/\n' + + 'Disallow: /members/api/comments/count/\n' + 'Disallow: /r/\n' + 'Disallow: /webmentions/receive/\n' );