diff --git a/docs/content/2.guides/2.route-rules.md b/docs/content/2.guides/2.route-rules.md
index 8d73eee8..7ef1df13 100644
--- a/docs/content/2.guides/2.route-rules.md
+++ b/docs/content/2.guides/2.route-rules.md
@@ -12,7 +12,7 @@ You can provide the following rules:
The rules are applied using the following logic:
- `X-Robots-Tag` header - SSR only,
-- ``{lang="html"} - When using the `defineRobotMeta` or `RobotMeta` composable or component
+- ``{lang="html"}
- `/robots.txt` disallow entry - When [disallowNonIndexableRoutes](/docs/robots/api/config#robotsdisabledvalue) is enabled
## Inline Route Rules
diff --git a/src/module.ts b/src/module.ts
index 719b1a72..9ce060a3 100644
--- a/src/module.ts
+++ b/src/module.ts
@@ -1,7 +1,6 @@
-import type { Arrayable, AutoI18nConfig, Robots3Rules, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
+import type { Arrayable, AutoI18nConfig, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
import fsp from 'node:fs/promises'
import {
- addComponent,
addImports,
addPlugin,
addServerHandler,
@@ -80,10 +79,6 @@ export interface ModuleOptions {
* ]
*/
groups: RobotsGroupInput[]
- /**
- * @deprecated backwards compatibility with Nuxt Robots v3
- */
- rules?: Robots3Rules | Robots3Rules[]
/**
* The value to use when the site is indexable.
*
@@ -205,10 +200,7 @@ export default defineNuxtModule({
if (config.enabled === false) {
logger.debug('The module is disabled, skipping setup.')
// need to mock the composables to allow module still to work when disabled
- ;['defineRobotMeta', 'useRobotsRule']
- .forEach((name) => {
- addImports({ name, from: resolve(`./runtime/app/composables/mock`) })
- })
+ addImports({ name: 'useRobotsRule', from: resolve(`./runtime/app/composables/mock`) })
nuxt.options.nitro = nuxt.options.nitro || {}
nuxt.options.nitro.imports = nuxt.options.nitro.imports || {}
nuxt.options.nitro.imports.presets = nuxt.options.nitro.imports.presets || []
@@ -227,43 +219,6 @@ export default defineNuxtModule({
config.robotsTxt = false
}
- // TODO remove with v5
- if (config.rules) {
- // warn v3 usage and convert to v4
- logger.warn('The `rules` option is deprecated, please use the `groups` option instead.')
- if (!config.groups?.length) {
- const group: RobotsGroupInput = {}
- const keyMap: Robots3Rules = {
- UserAgent: 'userAgent',
- Disallow: 'disallow',
- Allow: 'allow',
- } as const
- const rules = asArray(config.rules)
- for (const k in rules) {
- // need to map all keys within the rules
- const rule = rules[k]
- for (const k2 in rule) {
- const key = (keyMap[k2 as keyof Robots3Rules] || k2) as (keyof RobotsGroupInput | 'Sitemap')
- if (key === 'Sitemap') {
- config.sitemap = asArray(config.sitemap)
- config.sitemap.push(rule[k2])
- }
- else if (keyMap[k2 as keyof Robots3Rules]) {
- if (group[key]) {
- // @ts-expect-error untyped
- group[key] = asArray(group[key])
- group[key].push(rule[k2])
- }
- else {
- group[key] = rule[k2]
- }
- }
- }
- }
- config.groups.push(group)
- }
- }
-
const resolvedAutoI18n = typeof config.autoI18n === 'boolean' ? false : (config.autoI18n || await resolveI18nConfig())
if (config.blockNonSeoBots) {
@@ -471,8 +426,6 @@ export default defineNuxtModule({
// @ts-expect-error untyped
cacheControl: config.cacheControl,
}
- // TODO deprecated, backwards compatiblity
- nuxt.options.runtimeConfig['nuxt-simple-robots'] = nuxt.options.runtimeConfig['nuxt-robots']
})
extendTypes('nuxt-robots', ({ typesPath }) => {
@@ -486,20 +439,12 @@ declare module 'nitropack' {
_robotsRuleMactcher: (url: string) => string
}
interface NitroRouteRules {
- /**
- * @deprecated Use \`robots: \` instead.
- */
- index?: boolean
robots?: boolean | string | {
indexable: boolean
rule: string
}
}
interface NitroRouteConfig {
- /**
- * @deprecated Use \`robots: \` instead.
- */
- index?: boolean
robots?: boolean | string | {
indexable: boolean
rule: string
@@ -531,24 +476,11 @@ declare module 'h3' {
logger.info('Firebase does not support dynamic robots.txt files. Prerendering /robots.txt.')
}
- // defineRobotMeta is a server-only composable
- nuxt.options.optimization.treeShake.composables.client['nuxt-robots'] = ['defineRobotMeta']
-
- addImports({
- name: 'defineRobotMeta',
- from: resolve('./runtime/app/composables/defineRobotMeta'),
- })
-
addImports({
name: 'useRobotsRule',
from: resolve('./runtime/app/composables/useRobotsRule'),
})
- addComponent({
- name: 'RobotMeta',
- filePath: resolve('./runtime/app/components/RobotMeta'),
- })
-
if (config.robotsTxt) {
// add robots.txt server handler
addServerHandler({
diff --git a/src/runtime/app/components/RobotMeta.ts b/src/runtime/app/components/RobotMeta.ts
deleted file mode 100644
index 84cd0b27..00000000
--- a/src/runtime/app/components/RobotMeta.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-import { defineComponent } from 'vue'
-import { defineRobotMeta } from '../composables/defineRobotMeta'
-
-/**
- * @deprecated The robots meta tag is now enabled by default.
- */
-export default defineComponent({
- name: 'RobotMeta',
- setup() {
- defineRobotMeta(true)
- return () => null
- },
-})
diff --git a/src/runtime/app/composables/defineRobotMeta.ts b/src/runtime/app/composables/defineRobotMeta.ts
deleted file mode 100644
index 30de363f..00000000
--- a/src/runtime/app/composables/defineRobotMeta.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-import { consola } from 'consola'
-import { basename, dirname, join } from 'pathe'
-import { getCurrentInstance } from 'vue'
-
-/**
- * @deprecated The robots meta tag is now enabled by default.
- */
-export function defineRobotMeta(component?: boolean) {
- // handle deprecation
- const vm = getCurrentInstance()
- if (vm) {
- const src = (component ? vm?.parent?.type?.__file : vm?.type?.__file) || ''
- const filePath = join(dirname(src).split('/').pop() || '', basename(src))
- consola.warn(`[Nuxt Robots] The \`\` tag is now enabled by default. \`${component ? '' : 'defineRobotMeta()'}\` is deprecated, you should remove it from \`${filePath}\`.`)
- }
-}
diff --git a/src/runtime/app/composables/mock.ts b/src/runtime/app/composables/mock.ts
index 024731dc..53dabeb5 100644
--- a/src/runtime/app/composables/mock.ts
+++ b/src/runtime/app/composables/mock.ts
@@ -1,9 +1,6 @@
import type { MaybeRef } from 'vue'
import { ref } from 'vue'
-// eslint-disable-next-line unused-imports/no-unused-vars
-export function defineRobotMeta(component?: boolean) {}
-
// eslint-disable-next-line unused-imports/no-unused-vars
export function useRobotsRule(rule?: MaybeRef) {
return ref('')
diff --git a/src/runtime/server/routes/__robots__/nuxt-content.ts b/src/runtime/server/routes/__robots__/nuxt-content.ts
index 29201819..706fe6ed 100644
--- a/src/runtime/server/routes/__robots__/nuxt-content.ts
+++ b/src/runtime/server/routes/__robots__/nuxt-content.ts
@@ -17,7 +17,7 @@ export default defineEventHandler(async (e) => {
if (c._draft || c._extension !== 'md' || c._partial)
return false
if (c.path) {
- if (String(c.robots) === 'false' || String(c.indexable) === 'false' || String(c.index) === 'false')
+ if (String(c.robots) === 'false')
return c.path
}
return false
diff --git a/src/runtime/types.ts b/src/runtime/types.ts
index 77f23ce7..b729885d 100644
--- a/src/runtime/types.ts
+++ b/src/runtime/types.ts
@@ -10,19 +10,6 @@ export interface ParsedRobotsTxt {
export type RobotsGroupInput = GoogleInput | YandexInput
-export interface Robots3Rules {
- UserAgent?: string
- BlankLine?: true
- Comment?: string
- Disallow?: string
- Allow?: string
- Host?: string
- Sitemap?: string
- // yandex only
- CleanParam?: string
- CrawlDelay?: string
-}
-
// google is the base input
export interface GoogleInput {
comment?: Arrayable
diff --git a/src/runtime/util.ts b/src/runtime/util.ts
index 9a7bac3c..6821d7f1 100644
--- a/src/runtime/util.ts
+++ b/src/runtime/util.ts
@@ -287,8 +287,6 @@ export function normaliseRobotsRouteRule(config: NitroRouteConfig) {
allow = config.robots
else if (typeof config.robots === 'object' && typeof config.robots.indexable !== 'undefined')
allow = config.robots.indexable
- else if (typeof config.index !== 'undefined')
- allow = config.index
// parse rule
let rule: string | undefined
if (typeof config.robots === 'object' && typeof config.robots.rule !== 'undefined')
diff --git a/test/fixtures/basic/pages/hidden-route-rules.vue b/test/fixtures/basic/pages/hidden-route-rules.vue
index 00e1005e..bc429ea9 100644
--- a/test/fixtures/basic/pages/hidden-route-rules.vue
+++ b/test/fixtures/basic/pages/hidden-route-rules.vue
@@ -1,9 +1,3 @@
-
-
hello world
diff --git a/test/manualNoIndexing.test.ts b/test/manualNoIndexing.test.ts
index e10cd438..8056ad7a 100644
--- a/test/manualNoIndexing.test.ts
+++ b/test/manualNoIndexing.test.ts
@@ -25,7 +25,6 @@ describe('manualNoIndexing', () => {
it('basic', async () => {
const robotsTxt = await $fetch('/robots.txt')
// the site.url should be appended
- // site.indexable should be honoured
expect(robotsTxt).toMatchInlineSnapshot(`
"# START nuxt-robots (indexing disabled)
User-agent: *
diff --git a/test/routeRules.test.ts b/test/routeRules.test.ts
index 28ea6277..eb6c699d 100644
--- a/test/routeRules.test.ts
+++ b/test/routeRules.test.ts
@@ -14,7 +14,7 @@ await setup({
},
routeRules: {
'/index-rule/*': {
- index: false,
+ robots: false,
},
'/robots-rule/*': {
robots: 'noindex',
@@ -28,7 +28,7 @@ await setup({
robots: 'index, follow',
},
'/excluded/*': {
- index: false,
+ robots: false,
},
},
},
diff --git a/test/routeRulesTrailingSlash.test.ts b/test/routeRulesTrailingSlash.test.ts
index 48b83c24..1651c6bc 100644
--- a/test/routeRulesTrailingSlash.test.ts
+++ b/test/routeRulesTrailingSlash.test.ts
@@ -12,10 +12,10 @@ await setup({
},
routeRules: {
'/hidden-route-rules': {
- index: false,
+ robots: false,
},
'/hidden-route-rules/': {
- index: false,
+ robots: false,
},
},
},
diff --git a/test/siteConfigLegacy.test.ts b/test/siteConfigLegacy.test.ts
index f9f7015d..d0d6ad9e 100644
--- a/test/siteConfigLegacy.test.ts
+++ b/test/siteConfigLegacy.test.ts
@@ -24,7 +24,6 @@ await setup({
describe('siteConfig', () => {
it('basic', async () => {
const robotsTxt = await $fetch('/robots.txt')
- // site.indexable should be honoured
expect(robotsTxt.includes('(indexable)')).toBe(true)
})
})