Skip to content

Commit

Permalink
fix!: drop all deprecations (#167)
Browse files Browse the repository at this point in the history
  • Loading branch information
harlan-zw authored Nov 24, 2024
1 parent aa82756 commit f499cb4
Show file tree
Hide file tree
Showing 13 changed files with 8 additions and 131 deletions.
2 changes: 1 addition & 1 deletion docs/content/2.guides/2.route-rules.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ You can provide the following rules:

The rules are applied using the following logic:
- `X-Robots-Tag` header - SSR only,
- `<meta name="robots">`{lang="html"} - When using the `defineRobotMeta` or `RobotMeta` composable or component
- `<meta name="robots">`{lang="html"}
- `/robots.txt` disallow entry - When [disallowNonIndexableRoutes](/docs/robots/api/config#robotsdisabledvalue) is enabled

## Inline Route Rules
Expand Down
72 changes: 2 additions & 70 deletions src/module.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import type { Arrayable, AutoI18nConfig, Robots3Rules, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
import type { Arrayable, AutoI18nConfig, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
import fsp from 'node:fs/promises'
import {
addComponent,
addImports,
addPlugin,
addServerHandler,
Expand Down Expand Up @@ -80,10 +79,6 @@ export interface ModuleOptions {
* ]
*/
groups: RobotsGroupInput[]
/**
* @deprecated backwards compatibility with Nuxt Robots v3
*/
rules?: Robots3Rules | Robots3Rules[]
/**
* The value to use when the site is indexable.
*
Expand Down Expand Up @@ -205,10 +200,7 @@ export default defineNuxtModule<ModuleOptions>({
if (config.enabled === false) {
logger.debug('The module is disabled, skipping setup.')
// need to mock the composables to allow module still to work when disabled
;['defineRobotMeta', 'useRobotsRule']
.forEach((name) => {
addImports({ name, from: resolve(`./runtime/app/composables/mock`) })
})
addImports({ name: 'useRobotsRule', from: resolve(`./runtime/app/composables/mock`) })
nuxt.options.nitro = nuxt.options.nitro || {}
nuxt.options.nitro.imports = nuxt.options.nitro.imports || {}
nuxt.options.nitro.imports.presets = nuxt.options.nitro.imports.presets || []
Expand All @@ -227,43 +219,6 @@ export default defineNuxtModule<ModuleOptions>({
config.robotsTxt = false
}

// TODO remove with v5
if (config.rules) {
// warn v3 usage and convert to v4
logger.warn('The `rules` option is deprecated, please use the `groups` option instead.')
if (!config.groups?.length) {
const group: RobotsGroupInput = {}
const keyMap: Robots3Rules = {
UserAgent: 'userAgent',
Disallow: 'disallow',
Allow: 'allow',
} as const
const rules = asArray(config.rules)
for (const k in rules) {
// need to map all keys within the rules
const rule = rules[k]
for (const k2 in rule) {
const key = (keyMap[k2 as keyof Robots3Rules] || k2) as (keyof RobotsGroupInput | 'Sitemap')
if (key === 'Sitemap') {
config.sitemap = asArray(config.sitemap)
config.sitemap.push(rule[k2])
}
else if (keyMap[k2 as keyof Robots3Rules]) {
if (group[key]) {
// @ts-expect-error untyped
group[key] = asArray(group[key])
group[key].push(rule[k2])
}
else {
group[key] = rule[k2]
}
}
}
}
config.groups.push(group)
}
}

const resolvedAutoI18n = typeof config.autoI18n === 'boolean' ? false : (config.autoI18n || await resolveI18nConfig())

if (config.blockNonSeoBots) {
Expand Down Expand Up @@ -471,8 +426,6 @@ export default defineNuxtModule<ModuleOptions>({
// @ts-expect-error untyped
cacheControl: config.cacheControl,
}
// TODO deprecated, backwards compatiblity
nuxt.options.runtimeConfig['nuxt-simple-robots'] = nuxt.options.runtimeConfig['nuxt-robots']
})

extendTypes('nuxt-robots', ({ typesPath }) => {
Expand All @@ -486,20 +439,12 @@ declare module 'nitropack' {
_robotsRuleMactcher: (url: string) => string
}
interface NitroRouteRules {
/**
* @deprecated Use \`robots: <boolean>\` instead.
*/
index?: boolean
robots?: boolean | string | {
indexable: boolean
rule: string
}
}
interface NitroRouteConfig {
/**
* @deprecated Use \`robots: <boolean>\` instead.
*/
index?: boolean
robots?: boolean | string | {
indexable: boolean
rule: string
Expand Down Expand Up @@ -531,24 +476,11 @@ declare module 'h3' {
logger.info('Firebase does not support dynamic robots.txt files. Prerendering /robots.txt.')
}

// defineRobotMeta is a server-only composable
nuxt.options.optimization.treeShake.composables.client['nuxt-robots'] = ['defineRobotMeta']

addImports({
name: 'defineRobotMeta',
from: resolve('./runtime/app/composables/defineRobotMeta'),
})

addImports({
name: 'useRobotsRule',
from: resolve('./runtime/app/composables/useRobotsRule'),
})

addComponent({
name: 'RobotMeta',
filePath: resolve('./runtime/app/components/RobotMeta'),
})

if (config.robotsTxt) {
// add robots.txt server handler
addServerHandler({
Expand Down
13 changes: 0 additions & 13 deletions src/runtime/app/components/RobotMeta.ts

This file was deleted.

16 changes: 0 additions & 16 deletions src/runtime/app/composables/defineRobotMeta.ts

This file was deleted.

3 changes: 0 additions & 3 deletions src/runtime/app/composables/mock.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import type { MaybeRef } from 'vue'
import { ref } from 'vue'

// eslint-disable-next-line unused-imports/no-unused-vars
export function defineRobotMeta(component?: boolean) {}

// eslint-disable-next-line unused-imports/no-unused-vars
export function useRobotsRule(rule?: MaybeRef<boolean | string>) {
return ref('')
Expand Down
2 changes: 1 addition & 1 deletion src/runtime/server/routes/__robots__/nuxt-content.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ export default defineEventHandler(async (e) => {
if (c._draft || c._extension !== 'md' || c._partial)
return false
if (c.path) {
if (String(c.robots) === 'false' || String(c.indexable) === 'false' || String(c.index) === 'false')
if (String(c.robots) === 'false')
return c.path
}
return false
Expand Down
13 changes: 0 additions & 13 deletions src/runtime/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,6 @@ export interface ParsedRobotsTxt {

export type RobotsGroupInput = GoogleInput | YandexInput

export interface Robots3Rules {
UserAgent?: string
BlankLine?: true
Comment?: string
Disallow?: string
Allow?: string
Host?: string
Sitemap?: string
// yandex only
CleanParam?: string
CrawlDelay?: string
}

// google is the base input
export interface GoogleInput {
comment?: Arrayable<string>
Expand Down
2 changes: 0 additions & 2 deletions src/runtime/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -287,8 +287,6 @@ export function normaliseRobotsRouteRule(config: NitroRouteConfig) {
allow = config.robots
else if (typeof config.robots === 'object' && typeof config.robots.indexable !== 'undefined')
allow = config.robots.indexable
else if (typeof config.index !== 'undefined')
allow = config.index
// parse rule
let rule: string | undefined
if (typeof config.robots === 'object' && typeof config.robots.rule !== 'undefined')
Expand Down
6 changes: 0 additions & 6 deletions test/fixtures/basic/pages/hidden-route-rules.vue
Original file line number Diff line number Diff line change
@@ -1,9 +1,3 @@
<script lang="ts" setup>
import { defineRobotMeta } from '#imports'
defineRobotMeta()
</script>

<template>
<div>hello world</div>
</template>
1 change: 0 additions & 1 deletion test/manualNoIndexing.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ describe('manualNoIndexing', () => {
it('basic', async () => {
const robotsTxt = await $fetch('/robots.txt')
// the site.url should be appended
// site.indexable should be honoured
expect(robotsTxt).toMatchInlineSnapshot(`
"# START nuxt-robots (indexing disabled)
User-agent: *
Expand Down
4 changes: 2 additions & 2 deletions test/routeRules.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ await setup({
},
routeRules: {
'/index-rule/*': {
index: false,
robots: false,
},
'/robots-rule/*': {
robots: 'noindex',
Expand All @@ -28,7 +28,7 @@ await setup({
robots: 'index, follow',
},
'/excluded/*': {
index: false,
robots: false,
},
},
},
Expand Down
4 changes: 2 additions & 2 deletions test/routeRulesTrailingSlash.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ await setup({
},
routeRules: {
'/hidden-route-rules': {
index: false,
robots: false,
},
'/hidden-route-rules/': {
index: false,
robots: false,
},
},
},
Expand Down
1 change: 0 additions & 1 deletion test/siteConfigLegacy.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ await setup({
describe('siteConfig', () => {
it('basic', async () => {
const robotsTxt = await $fetch('/robots.txt')
// site.indexable should be honoured
expect(robotsTxt.includes('(indexable)')).toBe(true)
})
})

0 comments on commit f499cb4

Please # to comment.