+6
hosting-service/src/index.ts
+6
hosting-service/src/index.ts
···
4
4
import { logger } from './lib/observability';
5
5
import { mkdirSync, existsSync } from 'fs';
6
6
import { backfillCache } from './lib/backfill';
7
+
import { startDomainCacheCleanup, stopDomainCacheCleanup } from './lib/db';
7
8
8
9
const PORT = process.env.PORT ? parseInt(process.env.PORT) : 3001;
9
10
const CACHE_DIR = process.env.CACHE_DIR || './cache/sites';
···
18
19
mkdirSync(CACHE_DIR, { recursive: true });
19
20
console.log('Created cache directory:', CACHE_DIR);
20
21
}
22
+
23
+
// Start domain cache cleanup
24
+
startDomainCacheCleanup();
21
25
22
26
// Start firehose worker with observability logger
23
27
const firehose = new FirehoseWorker((msg, data) => {
···
67
71
process.on('SIGINT', async () => {
68
72
console.log('\n🛑 Shutting down...');
69
73
firehose.stop();
74
+
stopDomainCacheCleanup();
70
75
server.close();
71
76
process.exit(0);
72
77
});
···
74
79
process.on('SIGTERM', async () => {
75
80
console.log('\n🛑 Shutting down...');
76
81
firehose.stop();
82
+
stopDomainCacheCleanup();
77
83
server.close();
78
84
process.exit(0);
79
85
});
+177
hosting-service/src/lib/cache.ts
+177
hosting-service/src/lib/cache.ts
···
1
+
// In-memory LRU cache for file contents and metadata
2
+
3
+
interface CacheEntry<T> {
4
+
value: T;
5
+
size: number;
6
+
timestamp: number;
7
+
}
8
+
9
+
interface CacheStats {
10
+
hits: number;
11
+
misses: number;
12
+
evictions: number;
13
+
currentSize: number;
14
+
currentCount: number;
15
+
}
16
+
17
+
export class LRUCache<T> {
18
+
private cache: Map<string, CacheEntry<T>>;
19
+
private maxSize: number;
20
+
private maxCount: number;
21
+
private currentSize: number;
22
+
private stats: CacheStats;
23
+
24
+
constructor(maxSize: number, maxCount: number) {
25
+
this.cache = new Map();
26
+
this.maxSize = maxSize;
27
+
this.maxCount = maxCount;
28
+
this.currentSize = 0;
29
+
this.stats = {
30
+
hits: 0,
31
+
misses: 0,
32
+
evictions: 0,
33
+
currentSize: 0,
34
+
currentCount: 0,
35
+
};
36
+
}
37
+
38
+
get(key: string): T | null {
39
+
const entry = this.cache.get(key);
40
+
if (!entry) {
41
+
this.stats.misses++;
42
+
return null;
43
+
}
44
+
45
+
// Move to end (most recently used)
46
+
this.cache.delete(key);
47
+
this.cache.set(key, entry);
48
+
49
+
this.stats.hits++;
50
+
return entry.value;
51
+
}
52
+
53
+
set(key: string, value: T, size: number): void {
54
+
// Remove existing entry if present
55
+
if (this.cache.has(key)) {
56
+
const existing = this.cache.get(key)!;
57
+
this.currentSize -= existing.size;
58
+
this.cache.delete(key);
59
+
}
60
+
61
+
// Evict entries if needed
62
+
while (
63
+
(this.cache.size >= this.maxCount || this.currentSize + size > this.maxSize) &&
64
+
this.cache.size > 0
65
+
) {
66
+
const firstKey = this.cache.keys().next().value;
67
+
if (!firstKey) break; // Should never happen, but satisfy TypeScript
68
+
const firstEntry = this.cache.get(firstKey);
69
+
if (!firstEntry) break; // Should never happen, but satisfy TypeScript
70
+
this.cache.delete(firstKey);
71
+
this.currentSize -= firstEntry.size;
72
+
this.stats.evictions++;
73
+
}
74
+
75
+
// Add new entry
76
+
this.cache.set(key, {
77
+
value,
78
+
size,
79
+
timestamp: Date.now(),
80
+
});
81
+
this.currentSize += size;
82
+
83
+
// Update stats
84
+
this.stats.currentSize = this.currentSize;
85
+
this.stats.currentCount = this.cache.size;
86
+
}
87
+
88
+
delete(key: string): boolean {
89
+
const entry = this.cache.get(key);
90
+
if (!entry) return false;
91
+
92
+
this.cache.delete(key);
93
+
this.currentSize -= entry.size;
94
+
this.stats.currentSize = this.currentSize;
95
+
this.stats.currentCount = this.cache.size;
96
+
return true;
97
+
}
98
+
99
+
// Invalidate all entries for a specific site
100
+
invalidateSite(did: string, rkey: string): number {
101
+
const prefix = `${did}:${rkey}:`;
102
+
let count = 0;
103
+
104
+
for (const key of Array.from(this.cache.keys())) {
105
+
if (key.startsWith(prefix)) {
106
+
this.delete(key);
107
+
count++;
108
+
}
109
+
}
110
+
111
+
return count;
112
+
}
113
+
114
+
// Get cache size
115
+
size(): number {
116
+
return this.cache.size;
117
+
}
118
+
119
+
clear(): void {
120
+
this.cache.clear();
121
+
this.currentSize = 0;
122
+
this.stats.currentSize = 0;
123
+
this.stats.currentCount = 0;
124
+
}
125
+
126
+
getStats(): CacheStats {
127
+
return { ...this.stats };
128
+
}
129
+
130
+
// Get cache hit rate
131
+
getHitRate(): number {
132
+
const total = this.stats.hits + this.stats.misses;
133
+
return total === 0 ? 0 : (this.stats.hits / total) * 100;
134
+
}
135
+
}
136
+
137
+
// File metadata cache entry
138
+
export interface FileMetadata {
139
+
encoding?: 'gzip';
140
+
mimeType: string;
141
+
}
142
+
143
+
// Global cache instances
144
+
const FILE_CACHE_SIZE = 100 * 1024 * 1024; // 100MB
145
+
const FILE_CACHE_COUNT = 500;
146
+
const METADATA_CACHE_COUNT = 2000;
147
+
148
+
export const fileCache = new LRUCache<Buffer>(FILE_CACHE_SIZE, FILE_CACHE_COUNT);
149
+
export const metadataCache = new LRUCache<FileMetadata>(1024 * 1024, METADATA_CACHE_COUNT); // 1MB for metadata
150
+
export const rewrittenHtmlCache = new LRUCache<Buffer>(50 * 1024 * 1024, 200); // 50MB for rewritten HTML
151
+
152
+
// Helper to generate cache keys
153
+
export function getCacheKey(did: string, rkey: string, filePath: string, suffix?: string): string {
154
+
const base = `${did}:${rkey}:${filePath}`;
155
+
return suffix ? `${base}:${suffix}` : base;
156
+
}
157
+
158
+
// Invalidate all caches for a site
159
+
export function invalidateSiteCache(did: string, rkey: string): void {
160
+
const fileCount = fileCache.invalidateSite(did, rkey);
161
+
const metaCount = metadataCache.invalidateSite(did, rkey);
162
+
const htmlCount = rewrittenHtmlCache.invalidateSite(did, rkey);
163
+
164
+
console.log(`[Cache] Invalidated site ${did}:${rkey} - ${fileCount} files, ${metaCount} metadata, ${htmlCount} HTML`);
165
+
}
166
+
167
+
// Get overall cache statistics
168
+
export function getCacheStats() {
169
+
return {
170
+
files: fileCache.getStats(),
171
+
fileHitRate: fileCache.getHitRate(),
172
+
metadata: metadataCache.getStats(),
173
+
metadataHitRate: metadataCache.getHitRate(),
174
+
rewrittenHtml: rewrittenHtmlCache.getStats(),
175
+
rewrittenHtmlHitRate: rewrittenHtmlCache.getHitRate(),
176
+
};
177
+
}
+69
hosting-service/src/lib/db.ts
+69
hosting-service/src/lib/db.ts
···
9
9
}
10
10
);
11
11
12
+
// Domain lookup cache with TTL
13
+
const DOMAIN_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
14
+
15
+
interface CachedDomain<T> {
16
+
value: T;
17
+
timestamp: number;
18
+
}
19
+
20
+
const domainCache = new Map<string, CachedDomain<DomainLookup | null>>();
21
+
const customDomainCache = new Map<string, CachedDomain<CustomDomainLookup | null>>();
22
+
23
+
let cleanupInterval: NodeJS.Timeout | null = null;
24
+
25
+
export function startDomainCacheCleanup() {
26
+
if (cleanupInterval) return;
27
+
28
+
cleanupInterval = setInterval(() => {
29
+
const now = Date.now();
30
+
31
+
for (const [key, entry] of domainCache.entries()) {
32
+
if (now - entry.timestamp > DOMAIN_CACHE_TTL) {
33
+
domainCache.delete(key);
34
+
}
35
+
}
36
+
37
+
for (const [key, entry] of customDomainCache.entries()) {
38
+
if (now - entry.timestamp > DOMAIN_CACHE_TTL) {
39
+
customDomainCache.delete(key);
40
+
}
41
+
}
42
+
}, 30 * 60 * 1000); // Run every 30 minutes
43
+
}
44
+
45
+
export function stopDomainCacheCleanup() {
46
+
if (cleanupInterval) {
47
+
clearInterval(cleanupInterval);
48
+
cleanupInterval = null;
49
+
}
50
+
}
51
+
12
52
export interface DomainLookup {
13
53
did: string;
14
54
rkey: string | null;
···
27
67
export async function getWispDomain(domain: string): Promise<DomainLookup | null> {
28
68
const key = domain.toLowerCase();
29
69
70
+
// Check cache first
71
+
const cached = domainCache.get(key);
72
+
if (cached && Date.now() - cached.timestamp < DOMAIN_CACHE_TTL) {
73
+
return cached.value;
74
+
}
75
+
30
76
// Query database
31
77
const result = await sql<DomainLookup[]>`
32
78
SELECT did, rkey FROM domains WHERE domain = ${key} LIMIT 1
33
79
`;
34
80
const data = result[0] || null;
35
81
82
+
// Cache the result
83
+
domainCache.set(key, { value: data, timestamp: Date.now() });
84
+
36
85
return data;
37
86
}
38
87
39
88
export async function getCustomDomain(domain: string): Promise<CustomDomainLookup | null> {
40
89
const key = domain.toLowerCase();
41
90
91
+
// Check cache first
92
+
const cached = customDomainCache.get(key);
93
+
if (cached && Date.now() - cached.timestamp < DOMAIN_CACHE_TTL) {
94
+
return cached.value;
95
+
}
96
+
42
97
// Query database
43
98
const result = await sql<CustomDomainLookup[]>`
44
99
SELECT id, domain, did, rkey, verified FROM custom_domains
···
46
101
`;
47
102
const data = result[0] || null;
48
103
104
+
// Cache the result
105
+
customDomainCache.set(key, { value: data, timestamp: Date.now() });
106
+
49
107
return data;
50
108
}
51
109
52
110
export async function getCustomDomainByHash(hash: string): Promise<CustomDomainLookup | null> {
111
+
const key = `hash:${hash}`;
112
+
113
+
// Check cache first
114
+
const cached = customDomainCache.get(key);
115
+
if (cached && Date.now() - cached.timestamp < DOMAIN_CACHE_TTL) {
116
+
return cached.value;
117
+
}
118
+
53
119
// Query database
54
120
const result = await sql<CustomDomainLookup[]>`
55
121
SELECT id, domain, did, rkey, verified FROM custom_domains
56
122
WHERE id = ${hash} AND verified = true LIMIT 1
57
123
`;
58
124
const data = result[0] || null;
125
+
126
+
// Cache the result
127
+
customDomainCache.set(key, { value: data, timestamp: Date.now() });
59
128
60
129
return data;
61
130
}
+8
-1
hosting-service/src/lib/firehose.ts
+8
-1
hosting-service/src/lib/firehose.ts
···
10
10
import { isRecord, validateRecord } from '../lexicon/types/place/wisp/fs'
11
11
import { Firehose } from '@atproto/sync'
12
12
import { IdResolver } from '@atproto/identity'
13
+
import { invalidateSiteCache } from './cache'
13
14
14
15
const CACHE_DIR = './cache/sites'
15
16
···
182
183
return
183
184
}
184
185
186
+
// Invalidate in-memory caches before updating
187
+
invalidateSiteCache(did, site)
188
+
185
189
// Cache the record with verified CID (uses atomic swap internally)
186
190
// All instances cache locally for edge serving
187
191
await downloadAndCacheSite(
···
257
261
})
258
262
}
259
263
260
-
// Delete cache
264
+
// Invalidate in-memory caches
265
+
invalidateSiteCache(did, site)
266
+
267
+
// Delete disk cache
261
268
this.deleteCache(did, site)
262
269
263
270
this.log('Successfully processed delete', { did, site })
+185
-126
hosting-service/src/server.ts
+185
-126
hosting-service/src/server.ts
···
2
2
import { getWispDomain, getCustomDomain, getCustomDomainByHash } from './lib/db';
3
3
import { resolveDid, getPdsForDid, fetchSiteRecord, downloadAndCacheSite, getCachedFilePath, isCached, sanitizePath, shouldCompressMimeType } from './lib/utils';
4
4
import { rewriteHtmlPaths, isHtmlContent } from './lib/html-rewriter';
5
-
import { existsSync, readFileSync } from 'fs';
5
+
import { existsSync } from 'fs';
6
+
import { readFile, access } from 'fs/promises';
6
7
import { lookup } from 'mime-types';
7
8
import { logger, observabilityMiddleware, observabilityErrorHandler, logCollector, errorTracker, metricsCollector } from './lib/observability';
9
+
import { fileCache, metadataCache, rewrittenHtmlCache, getCacheKey, type FileMetadata } from './lib/cache';
8
10
9
11
const BASE_HOST = process.env.BASE_HOST || 'wisp.place';
10
12
···
21
23
return validRkeyPattern.test(rkey);
22
24
}
23
25
26
+
/**
27
+
* Async file existence check
28
+
*/
29
+
async function fileExists(path: string): Promise<boolean> {
30
+
try {
31
+
await access(path);
32
+
return true;
33
+
} catch {
34
+
return false;
35
+
}
36
+
}
37
+
24
38
// Helper to serve files from cache
25
39
async function serveFromCache(did: string, rkey: string, filePath: string) {
26
40
// Default to index.html if path is empty or ends with /
···
29
43
requestPath += 'index.html';
30
44
}
31
45
46
+
const cacheKey = getCacheKey(did, rkey, requestPath);
32
47
const cachedFile = getCachedFilePath(did, rkey, requestPath);
33
48
34
-
if (existsSync(cachedFile)) {
35
-
const content = readFileSync(cachedFile);
49
+
// Check in-memory cache first
50
+
let content = fileCache.get(cacheKey);
51
+
let meta = metadataCache.get(cacheKey);
52
+
53
+
if (!content && await fileExists(cachedFile)) {
54
+
// Read from disk and cache
55
+
content = await readFile(cachedFile);
56
+
fileCache.set(cacheKey, content, content.length);
57
+
36
58
const metaFile = `${cachedFile}.meta`;
59
+
if (await fileExists(metaFile)) {
60
+
const metaJson = await readFile(metaFile, 'utf-8');
61
+
meta = JSON.parse(metaJson);
62
+
metadataCache.set(cacheKey, meta!, JSON.stringify(meta).length);
63
+
}
64
+
}
37
65
38
-
console.log(`[DEBUG SERVE] ${requestPath}: file size=${content.length} bytes, metaFile exists=${existsSync(metaFile)}`);
66
+
if (content) {
67
+
// Build headers with caching
68
+
const headers: Record<string, string> = {};
39
69
40
-
// Check if file has compression metadata
41
-
if (existsSync(metaFile)) {
42
-
const meta = JSON.parse(readFileSync(metaFile, 'utf-8'));
43
-
console.log(`[DEBUG SERVE] ${requestPath}: meta=${JSON.stringify(meta)}`);
44
-
45
-
// Check actual content for gzip magic bytes
46
-
if (content.length >= 2) {
47
-
const hasGzipMagic = content[0] === 0x1f && content[1] === 0x8b;
48
-
console.log(`[DEBUG SERVE] ${requestPath}: has gzip magic bytes=${hasGzipMagic}`);
49
-
}
50
-
51
-
if (meta.encoding === 'gzip' && meta.mimeType) {
52
-
// Use shared function to determine if this should be served compressed
53
-
const shouldServeCompressed = shouldCompressMimeType(meta.mimeType);
54
-
55
-
if (!shouldServeCompressed) {
56
-
// This shouldn't happen if caching is working correctly, but handle it gracefully
57
-
console.log(`[DEBUG SERVE] ${requestPath}: decompressing file that shouldn't be compressed (${meta.mimeType})`);
58
-
const { gunzipSync } = await import('zlib');
59
-
const decompressed = gunzipSync(content);
60
-
console.log(`[DEBUG SERVE] ${requestPath}: decompressed from ${content.length} to ${decompressed.length} bytes`);
61
-
return new Response(decompressed, {
62
-
headers: {
63
-
'Content-Type': meta.mimeType,
64
-
},
65
-
});
66
-
}
67
-
68
-
// Serve gzipped content with proper headers (for HTML, CSS, JS, etc.)
69
-
console.log(`[DEBUG SERVE] ${requestPath}: serving as gzipped with Content-Encoding header`);
70
-
return new Response(content, {
71
-
headers: {
72
-
'Content-Type': meta.mimeType,
73
-
'Content-Encoding': 'gzip',
74
-
},
75
-
});
70
+
if (meta && meta.encoding === 'gzip' && meta.mimeType) {
71
+
const shouldServeCompressed = shouldCompressMimeType(meta.mimeType);
72
+
73
+
if (!shouldServeCompressed) {
74
+
const { gunzipSync } = await import('zlib');
75
+
const decompressed = gunzipSync(content);
76
+
headers['Content-Type'] = meta.mimeType;
77
+
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
78
+
return new Response(decompressed, { headers });
76
79
}
80
+
81
+
headers['Content-Type'] = meta.mimeType;
82
+
headers['Content-Encoding'] = 'gzip';
83
+
headers['Cache-Control'] = meta.mimeType.startsWith('text/html')
84
+
? 'public, max-age=300'
85
+
: 'public, max-age=31536000, immutable';
86
+
return new Response(content, { headers });
77
87
}
78
88
79
-
// Serve non-compressed files normally
89
+
// Non-compressed files
80
90
const mimeType = lookup(cachedFile) || 'application/octet-stream';
81
-
return new Response(content, {
82
-
headers: {
83
-
'Content-Type': mimeType,
84
-
},
85
-
});
91
+
headers['Content-Type'] = mimeType;
92
+
headers['Cache-Control'] = mimeType.startsWith('text/html')
93
+
? 'public, max-age=300'
94
+
: 'public, max-age=31536000, immutable';
95
+
return new Response(content, { headers });
86
96
}
87
97
88
98
// Try index.html for directory-like paths
89
99
if (!requestPath.includes('.')) {
90
-
const indexFile = getCachedFilePath(did, rkey, `${requestPath}/index.html`);
91
-
if (existsSync(indexFile)) {
92
-
const content = readFileSync(indexFile);
93
-
const metaFile = `${indexFile}.meta`;
100
+
const indexPath = `${requestPath}/index.html`;
101
+
const indexCacheKey = getCacheKey(did, rkey, indexPath);
102
+
const indexFile = getCachedFilePath(did, rkey, indexPath);
94
103
95
-
// Check if file has compression metadata
96
-
if (existsSync(metaFile)) {
97
-
const meta = JSON.parse(readFileSync(metaFile, 'utf-8'));
98
-
if (meta.encoding === 'gzip' && meta.mimeType) {
99
-
return new Response(content, {
100
-
headers: {
101
-
'Content-Type': meta.mimeType,
102
-
'Content-Encoding': 'gzip',
103
-
},
104
-
});
105
-
}
104
+
let indexContent = fileCache.get(indexCacheKey);
105
+
let indexMeta = metadataCache.get(indexCacheKey);
106
+
107
+
if (!indexContent && await fileExists(indexFile)) {
108
+
indexContent = await readFile(indexFile);
109
+
fileCache.set(indexCacheKey, indexContent, indexContent.length);
110
+
111
+
const indexMetaFile = `${indexFile}.meta`;
112
+
if (await fileExists(indexMetaFile)) {
113
+
const metaJson = await readFile(indexMetaFile, 'utf-8');
114
+
indexMeta = JSON.parse(metaJson);
115
+
metadataCache.set(indexCacheKey, indexMeta!, JSON.stringify(indexMeta).length);
116
+
}
117
+
}
118
+
119
+
if (indexContent) {
120
+
const headers: Record<string, string> = {
121
+
'Content-Type': 'text/html; charset=utf-8',
122
+
'Cache-Control': 'public, max-age=300',
123
+
};
124
+
125
+
if (indexMeta && indexMeta.encoding === 'gzip') {
126
+
headers['Content-Encoding'] = 'gzip';
106
127
}
107
128
108
-
return new Response(content, {
109
-
headers: {
110
-
'Content-Type': 'text/html; charset=utf-8',
111
-
},
112
-
});
129
+
return new Response(indexContent, { headers });
113
130
}
114
131
}
115
132
···
129
146
requestPath += 'index.html';
130
147
}
131
148
149
+
const cacheKey = getCacheKey(did, rkey, requestPath);
132
150
const cachedFile = getCachedFilePath(did, rkey, requestPath);
133
151
134
-
if (existsSync(cachedFile)) {
152
+
// Check for rewritten HTML in cache first (if it's HTML)
153
+
const mimeTypeGuess = lookup(requestPath) || 'application/octet-stream';
154
+
if (isHtmlContent(requestPath, mimeTypeGuess)) {
155
+
const rewrittenKey = getCacheKey(did, rkey, requestPath, `rewritten:${basePath}`);
156
+
const rewrittenContent = rewrittenHtmlCache.get(rewrittenKey);
157
+
if (rewrittenContent) {
158
+
return new Response(rewrittenContent, {
159
+
headers: {
160
+
'Content-Type': 'text/html; charset=utf-8',
161
+
'Content-Encoding': 'gzip',
162
+
'Cache-Control': 'public, max-age=300',
163
+
},
164
+
});
165
+
}
166
+
}
167
+
168
+
// Check in-memory file cache
169
+
let content = fileCache.get(cacheKey);
170
+
let meta = metadataCache.get(cacheKey);
171
+
172
+
if (!content && await fileExists(cachedFile)) {
173
+
// Read from disk and cache
174
+
content = await readFile(cachedFile);
175
+
fileCache.set(cacheKey, content, content.length);
176
+
135
177
const metaFile = `${cachedFile}.meta`;
136
-
let mimeType = lookup(cachedFile) || 'application/octet-stream';
137
-
let isGzipped = false;
178
+
if (await fileExists(metaFile)) {
179
+
const metaJson = await readFile(metaFile, 'utf-8');
180
+
meta = JSON.parse(metaJson);
181
+
metadataCache.set(cacheKey, meta!, JSON.stringify(meta).length);
182
+
}
183
+
}
138
184
139
-
// Check if file has compression metadata
140
-
if (existsSync(metaFile)) {
141
-
const meta = JSON.parse(readFileSync(metaFile, 'utf-8'));
142
-
if (meta.encoding === 'gzip' && meta.mimeType) {
143
-
mimeType = meta.mimeType;
144
-
isGzipped = true;
145
-
}
146
-
}
185
+
if (content) {
186
+
const mimeType = meta?.mimeType || lookup(cachedFile) || 'application/octet-stream';
187
+
const isGzipped = meta?.encoding === 'gzip';
147
188
148
189
// Check if this is HTML content that needs rewriting
149
-
// We decompress, rewrite paths, then recompress for efficient delivery
150
190
if (isHtmlContent(requestPath, mimeType)) {
151
-
let content: string;
191
+
let htmlContent: string;
152
192
if (isGzipped) {
153
193
const { gunzipSync } = await import('zlib');
154
-
const compressed = readFileSync(cachedFile);
155
-
content = gunzipSync(compressed).toString('utf-8');
194
+
htmlContent = gunzipSync(content).toString('utf-8');
156
195
} else {
157
-
content = readFileSync(cachedFile, 'utf-8');
196
+
htmlContent = content.toString('utf-8');
158
197
}
159
-
const rewritten = rewriteHtmlPaths(content, basePath, requestPath);
160
-
161
-
// Recompress the HTML for efficient delivery
198
+
const rewritten = rewriteHtmlPaths(htmlContent, basePath, requestPath);
199
+
200
+
// Recompress and cache the rewritten HTML
162
201
const { gzipSync } = await import('zlib');
163
202
const recompressed = gzipSync(Buffer.from(rewritten, 'utf-8'));
164
-
203
+
204
+
const rewrittenKey = getCacheKey(did, rkey, requestPath, `rewritten:${basePath}`);
205
+
rewrittenHtmlCache.set(rewrittenKey, recompressed, recompressed.length);
206
+
165
207
return new Response(recompressed, {
166
208
headers: {
167
209
'Content-Type': 'text/html; charset=utf-8',
168
210
'Content-Encoding': 'gzip',
211
+
'Cache-Control': 'public, max-age=300',
169
212
},
170
213
});
171
214
}
172
215
173
-
// Non-HTML files: serve gzipped content as-is with proper headers
174
-
const content = readFileSync(cachedFile);
216
+
// Non-HTML files: serve as-is
217
+
const headers: Record<string, string> = {
218
+
'Content-Type': mimeType,
219
+
'Cache-Control': 'public, max-age=31536000, immutable',
220
+
};
221
+
175
222
if (isGzipped) {
176
-
// Use shared function to determine if this should be served compressed
177
223
const shouldServeCompressed = shouldCompressMimeType(mimeType);
178
-
179
224
if (!shouldServeCompressed) {
180
-
// This shouldn't happen if caching is working correctly, but handle it gracefully
181
225
const { gunzipSync } = await import('zlib');
182
226
const decompressed = gunzipSync(content);
183
-
return new Response(decompressed, {
184
-
headers: {
185
-
'Content-Type': mimeType,
186
-
},
187
-
});
227
+
return new Response(decompressed, { headers });
188
228
}
189
-
190
-
return new Response(content, {
229
+
headers['Content-Encoding'] = 'gzip';
230
+
}
231
+
232
+
return new Response(content, { headers });
233
+
}
234
+
235
+
// Try index.html for directory-like paths
236
+
if (!requestPath.includes('.')) {
237
+
const indexPath = `${requestPath}/index.html`;
238
+
const indexCacheKey = getCacheKey(did, rkey, indexPath);
239
+
const indexFile = getCachedFilePath(did, rkey, indexPath);
240
+
241
+
// Check for rewritten index.html in cache
242
+
const rewrittenKey = getCacheKey(did, rkey, indexPath, `rewritten:${basePath}`);
243
+
const rewrittenContent = rewrittenHtmlCache.get(rewrittenKey);
244
+
if (rewrittenContent) {
245
+
return new Response(rewrittenContent, {
191
246
headers: {
192
-
'Content-Type': mimeType,
247
+
'Content-Type': 'text/html; charset=utf-8',
193
248
'Content-Encoding': 'gzip',
249
+
'Cache-Control': 'public, max-age=300',
194
250
},
195
251
});
196
252
}
197
-
return new Response(content, {
198
-
headers: {
199
-
'Content-Type': mimeType,
200
-
},
201
-
});
202
-
}
203
253
204
-
// Try index.html for directory-like paths
205
-
if (!requestPath.includes('.')) {
206
-
const indexFile = getCachedFilePath(did, rkey, `${requestPath}/index.html`);
207
-
if (existsSync(indexFile)) {
208
-
const metaFile = `${indexFile}.meta`;
209
-
let isGzipped = false;
254
+
let indexContent = fileCache.get(indexCacheKey);
255
+
let indexMeta = metadataCache.get(indexCacheKey);
210
256
211
-
if (existsSync(metaFile)) {
212
-
const meta = JSON.parse(readFileSync(metaFile, 'utf-8'));
213
-
if (meta.encoding === 'gzip') {
214
-
isGzipped = true;
215
-
}
257
+
if (!indexContent && await fileExists(indexFile)) {
258
+
indexContent = await readFile(indexFile);
259
+
fileCache.set(indexCacheKey, indexContent, indexContent.length);
260
+
261
+
const indexMetaFile = `${indexFile}.meta`;
262
+
if (await fileExists(indexMetaFile)) {
263
+
const metaJson = await readFile(indexMetaFile, 'utf-8');
264
+
indexMeta = JSON.parse(metaJson);
265
+
metadataCache.set(indexCacheKey, indexMeta!, JSON.stringify(indexMeta).length);
216
266
}
267
+
}
217
268
218
-
// HTML needs path rewriting, decompress, rewrite, then recompress
219
-
let content: string;
269
+
if (indexContent) {
270
+
const isGzipped = indexMeta?.encoding === 'gzip';
271
+
272
+
let htmlContent: string;
220
273
if (isGzipped) {
221
274
const { gunzipSync } = await import('zlib');
222
-
const compressed = readFileSync(indexFile);
223
-
content = gunzipSync(compressed).toString('utf-8');
275
+
htmlContent = gunzipSync(indexContent).toString('utf-8');
224
276
} else {
225
-
content = readFileSync(indexFile, 'utf-8');
277
+
htmlContent = indexContent.toString('utf-8');
226
278
}
227
-
const indexPath = `${requestPath}/index.html`;
228
-
const rewritten = rewriteHtmlPaths(content, basePath, indexPath);
229
-
230
-
// Recompress the HTML for efficient delivery
279
+
const rewritten = rewriteHtmlPaths(htmlContent, basePath, indexPath);
280
+
231
281
const { gzipSync } = await import('zlib');
232
282
const recompressed = gzipSync(Buffer.from(rewritten, 'utf-8'));
233
-
283
+
284
+
rewrittenHtmlCache.set(rewrittenKey, recompressed, recompressed.length);
285
+
234
286
return new Response(recompressed, {
235
287
headers: {
236
288
'Content-Type': 'text/html; charset=utf-8',
237
289
'Content-Encoding': 'gzip',
290
+
'Cache-Control': 'public, max-age=300',
238
291
},
239
292
});
240
293
}
···
442
495
const timeWindow = query.timeWindow ? parseInt(query.timeWindow as string) : 3600000;
443
496
const stats = metricsCollector.getStats('hosting-service', timeWindow);
444
497
return c.json({ stats, timeWindow });
498
+
});
499
+
500
+
app.get('/__internal__/observability/cache', async (c) => {
501
+
const { getCacheStats } = await import('./lib/cache');
502
+
const stats = getCacheStats();
503
+
return c.json({ cache: stats });
445
504
});
446
505
447
506
export default app;