Weighs the soul of incoming HTTP requests to stop AI crawlers
1{
2 "name": "docs",
3 "version": "0.0.0",
4 "private": true,
5 "scripts": {
6 "docusaurus": "docusaurus",
7 "start": "docusaurus start --host 0.0.0.0",
8 "build": "docusaurus build",
9 "swizzle": "docusaurus swizzle",
10 "deploy": "echo 'use CI' && exit 1",
11 "clear": "docusaurus clear",
12 "serve": "docusaurus serve",
13 "write-translations": "docusaurus write-translations",
14 "write-heading-ids": "docusaurus write-heading-ids",
15 "typecheck": "tsc"
16 },
17 "dependencies": {
18 "@docusaurus/core": "^3.8.1",
19 "@docusaurus/preset-classic": "^3.8.1",
20 "@docusaurus/theme-mermaid": "^3.8.1",
21 "@mdx-js/react": "^3.0.0",
22 "clsx": "^2.0.0",
23 "prism-react-renderer": "^2.3.0",
24 "react": "^19.0.0",
25 "react-dom": "^19.0.0"
26 },
27 "devDependencies": {
28 "@docusaurus/module-type-aliases": "^3.8.1",
29 "@docusaurus/tsconfig": "^3.8.1",
30 "@docusaurus/types": "^3.8.1",
31 "typescript": "~5.6.2"
32 },
33 "browserslist": {
34 "production": [
35 ">0.5%",
36 "not dead",
37 "not op_mini all"
38 ],
39 "development": [
40 "last 3 chrome version",
41 "last 3 firefox version",
42 "last 5 safari version"
43 ]
44 },
45 "engines": {
46 "node": ">=18.0"
47 }
48}