Barazo default frontend
barazo.forum
1/**
2 * Tests for robots.txt configuration.
3 * @see specs/prd-web.md Section 5 (robots.txt)
4 */
5
6import { describe, it, expect } from 'vitest'
7import robots from './robots'
8
9describe('robots.txt', () => {
10 function getRules() {
11 const result = robots()
12 return Array.isArray(result.rules) ? result.rules : [result.rules]
13 }
14
15 it('allows general crawlers on public pages', () => {
16 const rules = getRules()
17 expect(rules[0]).toMatchObject({
18 userAgent: '*',
19 allow: '/',
20 })
21 })
22
23 it('disallows admin, auth, API, and non-public pages', () => {
24 const rules = getRules()
25 expect(rules[0]!.disallow).toEqual(
26 expect.arrayContaining([
27 '/admin/',
28 '/auth/',
29 '/api/',
30 '/search',
31 '/settings',
32 '/notifications',
33 ])
34 )
35 })
36
37 it('blocks SEO bots', () => {
38 const rules = getRules()
39 expect(rules[1]!.userAgent).toEqual(
40 expect.arrayContaining(['SemrushBot', 'AhrefsBot', 'MJ12bot'])
41 )
42 expect(rules[1]!.disallow).toBe('/')
43 })
44
45 it('blocks AI crawlers', () => {
46 const rules = getRules()
47 expect(rules[2]!.userAgent).toEqual(
48 expect.arrayContaining(['GPTBot', 'ClaudeBot', 'CCBot', 'Google-Extended'])
49 )
50 expect(rules[2]!.disallow).toBe('/')
51 })
52
53 it('includes sitemap directive', () => {
54 const result = robots()
55 expect(result.sitemap).toMatch(/\/sitemap\.xml$/)
56 })
57})