+5
deno.lock
+5
deno.lock
···
1
1
{
2
2
"version": "5",
3
3
"specifiers": {
4
+
"npm:@darkvisitors/sdk@^1.6.0": "1.6.0",
4
5
"npm:@jsr/std__toml@1.0.11": "1.0.11",
5
6
"npm:@neodrag/svelte@^2.3.3": "2.3.3_svelte@5.46.0__acorn@8.15.0",
6
7
"npm:@rowanmanning/feed-parser@^2.1.1": "2.1.1",
···
131
132
},
132
133
"@badrap/valita@0.4.6": {
133
134
"integrity": "sha512-4kdqcjyxo/8RQ8ayjms47HCWZIF5981oE5nIenbfThKDxWXtEHKipAOWlflpPJzZx9y/JWYQkp18Awr7VuepFg=="
135
+
},
136
+
"@darkvisitors/sdk@1.6.0": {
137
+
"integrity": "sha512-KfAO7Dzg/EGZGNRUVpjK8dBzOe9xQI2bXF9aq8JcEk8BiOIAn+e4Vf+ceVMhOBB8PkuLvRBnJwfADAYXNL0iFg=="
134
138
},
135
139
"@esbuild/aix-ppc64@0.27.2": {
136
140
"integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==",
···
2327
2331
"eunomia": {
2328
2332
"packageJson": {
2329
2333
"dependencies": [
2334
+
"npm:@darkvisitors/sdk@^1.6.0",
2330
2335
"npm:@jsr/std__toml@1.0.11",
2331
2336
"npm:@neodrag/svelte@^2.3.3",
2332
2337
"npm:@rowanmanning/feed-parser@^2.1.1",
+1
eunomia/package.json
+1
eunomia/package.json
+4
eunomia/src/lib/darkvisitors.ts
+4
eunomia/src/lib/darkvisitors.ts
+37
-47
eunomia/src/lib/robots.ts
+37
-47
eunomia/src/lib/robots.ts
···
1
-
import { env } from '$env/dynamic/private'
2
-
import { get, writable } from 'svelte/store'
3
-
import { type Robot } from 'robots-parser'
4
-
import robotsParser from 'robots-parser'
5
-
import { PUBLIC_BASE_URL } from '$env/static/public'
1
+
import { env } from '$env/dynamic/private';
2
+
import { get, writable } from 'svelte/store';
3
+
import { type Robot } from 'robots-parser';
4
+
import robotsParser from 'robots-parser';
5
+
import { PUBLIC_BASE_URL } from '$env/static/public';
6
+
import { darkVisitors } from './darkvisitors';
7
+
import { AgentType } from '@darkvisitors/sdk';
6
8
7
-
const cachedParsedRobots = writable<Robot | null>(null)
8
-
const cachedRobots = writable<string>("")
9
-
const lastFetched = writable<number>(Date.now())
9
+
const cachedParsedRobots = writable<Robot | null>(null);
10
+
const cachedRobots = writable<string>('');
11
+
const lastFetched = writable<number>(Date.now());
10
12
11
13
const fetchRobotsTxt = async () => {
12
-
const robotsTxtResp = await fetch(
13
-
"https://api.darkvisitors.com/robots-txts",
14
-
{
15
-
method: "POST",
16
-
headers: {
17
-
"Authorization": `Bearer ${env.DARK_VISITORS_TOKEN}`,
18
-
"Content-Type": "application/json"
19
-
},
20
-
body: JSON.stringify({
21
-
agent_types: [
22
-
"AI Assistant",
23
-
"AI Data Scraper",
24
-
"AI Search Crawler",
25
-
"Undocumented AI Agent",
26
-
],
27
-
disallow: "/"
28
-
})
29
-
}
30
-
)
31
-
const robotsTxt = await robotsTxtResp.text()
32
-
lastFetched.set(Date.now())
33
-
return robotsTxt
34
-
}
14
+
const robotsTxt = await darkVisitors.generateRobotsTxt([
15
+
AgentType.AIAgent,
16
+
AgentType.AIAssistant,
17
+
AgentType.AIDataScraper,
18
+
AgentType.AISearchCrawler,
19
+
AgentType.UndocumentedAIAgent,
20
+
AgentType.SEOCrawler
21
+
]);
22
+
lastFetched.set(Date.now());
23
+
return robotsTxt;
24
+
};
35
25
36
26
export const getRobotsTxt = async () => {
37
-
let robotsTxt = get(cachedRobots)
38
-
if (robotsTxt.length === 0 || Date.now() - get(lastFetched) > 1000 * 60 * 60 * 24) {
39
-
robotsTxt = await fetchRobotsTxt()
40
-
cachedRobots.set(robotsTxt)
41
-
cachedParsedRobots.set(robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, robotsTxt))
42
-
}
43
-
return robotsTxt
44
-
}
27
+
let robotsTxt = get(cachedRobots);
28
+
if (robotsTxt.length === 0 || Date.now() - get(lastFetched) > 1000 * 60 * 60 * 24) {
29
+
robotsTxt = await fetchRobotsTxt();
30
+
cachedRobots.set(robotsTxt);
31
+
cachedParsedRobots.set(robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, robotsTxt));
32
+
}
33
+
return robotsTxt;
34
+
};
45
35
46
36
export const testUa = async (url: string, ua: string) => {
47
-
if (ua.length === 0) return false
48
-
let parsedRobots = get(cachedParsedRobots)
49
-
if (parsedRobots === null) {
50
-
parsedRobots = robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, await getRobotsTxt())
51
-
cachedParsedRobots.set(parsedRobots)
52
-
}
53
-
return parsedRobots.isAllowed(url, ua)
54
-
}
37
+
if (ua.length === 0) return false;
38
+
let parsedRobots = get(cachedParsedRobots);
39
+
if (parsedRobots === null) {
40
+
parsedRobots = robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, await getRobotsTxt());
41
+
cachedParsedRobots.set(parsedRobots);
42
+
}
43
+
return parsedRobots.isAllowed(url, ua);
44
+
};
+11
-21
eunomia/src/lib/visits.ts
+11
-21
eunomia/src/lib/visits.ts
···
1
1
import { env } from '$env/dynamic/private';
2
2
import { scopeCookies } from '$lib';
3
+
import { DarkVisitors } from '@darkvisitors/sdk';
3
4
import type { Cookies } from '@sveltejs/kit';
4
5
import { nanoid } from 'nanoid';
5
6
import { get, writable } from 'svelte/store';
7
+
import { darkVisitors } from './darkvisitors';
6
8
7
9
const visitCountFile = `${env.WEBSITE_DATA_DIR}/visitcount`;
8
10
const readVisitCount = async () => {
···
115
117
};
116
118
117
119
export const notifyDarkVisitors = (url: URL, request: Request) => {
118
-
fetch('https://api.darkvisitors.com/visits', {
119
-
method: 'POST',
120
-
headers: {
121
-
authorization: `Bearer ${env.DARK_VISITORS_TOKEN}`,
122
-
'content-type': 'application/json'
123
-
},
124
-
body: JSON.stringify({
125
-
request_path: url.pathname,
126
-
request_method: request.method,
127
-
request_headers: request.headers
128
-
})
129
-
})
130
-
.catch((why) => {
131
-
console.log('failed sending dark visitors analytics:', why);
132
-
return null;
133
-
})
134
-
.then((resp) => {
135
-
if (resp !== null && resp.status !== 401 && resp.status !== 400) {
136
-
const host = `(${request.headers.get('host')}|${request.headers.get('x-real-ip')}|${request.headers.get('user-agent')})`;
137
-
console.log(`sent visitor analytic to dark visitors: ${resp.statusText}; ${host}`);
138
-
}
120
+
const headers = Object.fromEntries(request.headers.entries());
121
+
try {
122
+
darkVisitors.trackVisit({
123
+
path: url.pathname,
124
+
method: request.method,
125
+
headers: headers
139
126
});
127
+
} catch (error) {
128
+
console.error('failed to notify dark visitors:', error);
129
+
}
140
130
};