diff --git a/docs/content/3.api/1.config.md b/docs/content/3.api/1.config.md index 058f11da..32232adc 100644 --- a/docs/content/3.api/1.config.md +++ b/docs/content/3.api/1.config.md @@ -80,12 +80,6 @@ The value to use when the page is indexable. The value to use when the page is not indexable. -## `disallowNonIndexableRoutes: boolean`{lang="ts"} - -- Default: `'false'`{lang="ts"} - -Should route rules which disallow indexing be added to the `/robots.txt` file. - ## `mergeWithRobotsTxtPath: boolean | string`{lang="ts"} - Default: `true`{lang="ts"} @@ -167,3 +161,11 @@ export default defineNuxtConfig({ } }) ``` + +## `disallowNonIndexableRoutes: boolean`{lang="ts"} + +**⚠️ Deprecated**: Explicitly disallow routes in the `/robots.txt` file if you don't want them to be accessible. + +- Default: `'false'`{lang="ts"} + +Should route rules which disallow indexing be added to the `/robots.txt` file. diff --git a/src/module.ts b/src/module.ts index f5d8f32d..7b754ef5 100644 --- a/src/module.ts +++ b/src/module.ts @@ -93,9 +93,11 @@ export interface ModuleOptions { */ robotsDisabledValue: string /** + * @deprecated Explicitly add paths to your robots.txt with the `allow` and `disallow` options. + * * Should route rules which disallow indexing be added to the `/robots.txt` file. * - * @default true + * @default false */ disallowNonIndexableRoutes: boolean /** @@ -191,7 +193,7 @@ export default defineNuxtModule({ cacheControl: 'max-age=14400, must-revalidate', robotsEnabledValue: 'index, follow, max-image-preview:large, max-snippet:-1, max-video-preview:-1', robotsDisabledValue: 'noindex, nofollow', - disallowNonIndexableRoutes: true, + disallowNonIndexableRoutes: false, robotsTxt: true, }, async setup(config, nuxt) { diff --git a/src/runtime/server/composables/getPathRobotConfig.ts b/src/runtime/server/composables/getPathRobotConfig.ts index 08a691c7..d4fa27cf 100644 --- a/src/runtime/server/composables/getPathRobotConfig.ts +++ b/src/runtime/server/composables/getPathRobotConfig.ts @@ -82,7 +82,7 @@ export function getPathRobotConfig(e: H3Event, options?: { userAgent?: string, s // 3. nitro route rules nitroApp._robotsRuleMactcher = nitroApp._robotsRuleMactcher || createNitroRouteRuleMatcher() const routeRules = normaliseRobotsRouteRule(nitroApp._robotsRuleMactcher(path)) - if (routeRules && (routeRules.allow || routeRules.rule)) { + if (routeRules && (typeof routeRules.allow !== 'undefined' || typeof routeRules.rule !== 'undefined')) { return { indexable: routeRules.allow, rule: routeRules.rule || (routeRules.allow ? robotsEnabledValue : robotsDisabledValue), diff --git a/test/default.test.ts b/test/default.test.ts index fa6b7f13..e7fb067f 100644 --- a/test/default.test.ts +++ b/test/default.test.ts @@ -24,8 +24,6 @@ describe('default', () => { Disallow: /users/*/hidden Disallow: /?a= Disallow: /visible?*a= - Disallow: /*/account - Disallow: /sub/* Sitemap: https://nuxtseo.com/sitemap.xml # END nuxt-robots" diff --git a/test/groups.test.ts b/test/groups.test.ts index e075dd0b..2d0c9c73 100644 --- a/test/groups.test.ts +++ b/test/groups.test.ts @@ -56,8 +56,6 @@ describe('stack', () => { Disallow: /users/*/hidden Disallow: /?a= Disallow: /visible?*a= - Disallow: /*/account - Disallow: /sub/* Sitemap: https://nuxtseo.com/sitemap.xml # END nuxt-robots" diff --git a/test/mergeWithRobotsTxtPath.test.ts b/test/mergeWithRobotsTxtPath.test.ts index cc55d1a7..b263522f 100644 --- a/test/mergeWithRobotsTxtPath.test.ts +++ b/test/mergeWithRobotsTxtPath.test.ts @@ -25,8 +25,6 @@ describe('mergeWithRobotsTxtPath', () => { Allow: /secret/exception Disallow: /secret Disallow: /admin - Disallow: /*/account - Disallow: /sub/* Sitemap: https://nuxtseo.com/sitemap.xml # END nuxt-robots"