+8
-2
hosting-service/src/lib/db.ts
+8
-2
hosting-service/src/lib/db.ts
···
46
47
export async function upsertSite(did: string, rkey: string, displayName?: string) {
48
try {
49
await sql`
50
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
51
-
VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
52
ON CONFLICT (did, rkey)
53
DO UPDATE SET
54
-
display_name = COALESCE(EXCLUDED.display_name, sites.display_name),
55
updated_at = EXTRACT(EPOCH FROM NOW())
56
`;
57
} catch (err) {
···
46
47
export async function upsertSite(did: string, rkey: string, displayName?: string) {
48
try {
49
+
// Only set display_name if provided (not undefined/null/empty)
50
+
const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null;
51
+
52
await sql`
53
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
54
+
VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
55
ON CONFLICT (did, rkey)
56
DO UPDATE SET
57
+
display_name = CASE
58
+
WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name
59
+
ELSE sites.display_name
60
+
END,
61
updated_at = EXTRACT(EPOCH FROM NOW())
62
`;
63
} catch (err) {
+12
hosting-service/src/lib/utils.ts
+12
hosting-service/src/lib/utils.ts
···
102
103
export async function downloadAndCacheSite(did: string, rkey: string, record: WispFsRecord, pdsEndpoint: string): Promise<void> {
104
console.log('Caching site', did, rkey);
105
+
106
+
// Validate record structure
107
+
if (!record.root) {
108
+
console.error('Record missing root directory:', JSON.stringify(record, null, 2));
109
+
throw new Error('Invalid record structure: missing root directory');
110
+
}
111
+
112
+
if (!record.root.entries || !Array.isArray(record.root.entries)) {
113
+
console.error('Record root missing entries array:', JSON.stringify(record.root, null, 2));
114
+
throw new Error('Invalid record structure: root missing entries array');
115
+
}
116
+
117
await cacheFiles(did, rkey, record.root.entries, pdsEndpoint, '');
118
}
119
+7
-1
src/index.ts
+7
-1
src/index.ts
+8
-2
src/lib/db.ts
+8
-2
src/lib/db.ts
···
391
392
export const upsertSite = async (did: string, rkey: string, displayName?: string) => {
393
try {
394
await db`
395
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
396
-
VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
397
ON CONFLICT (did, rkey)
398
DO UPDATE SET
399
-
display_name = COALESCE(EXCLUDED.display_name, sites.display_name),
400
updated_at = EXTRACT(EPOCH FROM NOW())
401
`;
402
return { success: true };
···
391
392
export const upsertSite = async (did: string, rkey: string, displayName?: string) => {
393
try {
394
+
// Only set display_name if provided (not undefined/null/empty)
395
+
const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null;
396
+
397
await db`
398
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
399
+
VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
400
ON CONFLICT (did, rkey)
401
DO UPDATE SET
402
+
display_name = CASE
403
+
WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name
404
+
ELSE sites.display_name
405
+
END,
406
updated_at = EXTRACT(EPOCH FROM NOW())
407
`;
408
return { success: true };
+55
-31
src/lib/wisp-utils.ts
+55
-31
src/lib/wisp-utils.ts
···
22
* Process uploaded files into a directory structure
23
*/
24
export function processUploadedFiles(files: UploadedFile[]): ProcessedDirectory {
25
-
console.log(`🏗️ Processing ${files.length} uploaded files`);
26
const entries: Entry[] = [];
27
let fileCount = 0;
28
···
33
// Remove any base folder name from the path
34
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
35
const parts = normalizedPath.split('/');
36
-
37
-
console.log(`📄 Processing file: ${file.name} -> normalized: ${normalizedPath}`);
38
39
if (parts.length === 1) {
40
// Root level file
41
-
console.log(`📁 Root level file: ${parts[0]}`);
42
entries.push({
43
name: parts[0],
44
node: {
···
51
} else {
52
// File in subdirectory
53
const dirPath = parts.slice(0, -1).join('/');
54
-
console.log(`📂 Subdirectory file: ${dirPath}/${parts[parts.length - 1]}`);
55
if (!directoryMap.has(dirPath)) {
56
directoryMap.set(dirPath, []);
57
-
console.log(`➕ Created directory: ${dirPath}`);
58
}
59
directoryMap.get(dirPath)!.push({
60
...file,
···
64
}
65
66
// Process subdirectories
67
-
console.log(`📂 Processing ${directoryMap.size} subdirectories`);
68
for (const [dirPath, dirFiles] of directoryMap) {
69
-
console.log(`📁 Processing directory: ${dirPath} with ${dirFiles.length} files`);
70
const dirEntries: Entry[] = [];
71
72
for (const file of dirFiles) {
73
const fileName = file.name.split('/').pop()!;
74
-
console.log(` 📄 Adding file to directory: ${fileName}`);
75
dirEntries.push({
76
name: fileName,
77
node: {
···
86
// Build nested directory structure
87
const pathParts = dirPath.split('/');
88
let currentEntries = entries;
89
-
90
-
console.log(`🏗️ Building nested structure for path: ${pathParts.join('/')}`);
91
92
for (let i = 0; i < pathParts.length; i++) {
93
const part = pathParts[i];
···
107
node: newDir
108
};
109
currentEntries.push(existingEntry);
110
-
console.log(` ➕ Created directory entry: ${part}`);
111
} else if ('entries' in existingEntry.node && isLast) {
112
(existingEntry.node as any).entries.push(...dirEntries);
113
-
console.log(` 📝 Added files to existing directory: ${part}`);
114
}
115
116
if (existingEntry && 'entries' in existingEntry.node) {
···
119
}
120
}
121
122
-
console.log(`✅ Directory structure completed with ${fileCount} total files`);
123
-
124
const result = {
125
directory: {
126
$type: 'place.wisp.fs#directory' as const,
···
130
fileCount
131
};
132
133
-
console.log('📋 Final directory structure:', JSON.stringify(result, null, 2));
134
return result;
135
}
136
···
142
root: Directory,
143
fileCount: number
144
): Record {
145
-
const manifest: Record = {
146
$type: 'place.wisp.fs' as const,
147
site: siteName,
148
root,
149
fileCount,
150
createdAt: new Date().toISOString()
151
};
152
-
153
-
console.log(`📋 Created manifest for site "${siteName}" with ${fileCount} files`);
154
-
console.log('📄 Manifest structure:', JSON.stringify(manifest, null, 2));
155
-
156
-
return manifest;
157
}
158
159
/**
160
* Update file blobs in directory structure after upload
161
*/
162
export function updateFileBlobs(
163
directory: Directory,
164
uploadResults: FileUploadResult[],
165
-
filePaths: string[]
166
): Directory {
167
-
console.log(`🔄 Updating file blobs: ${uploadResults.length} results for ${filePaths.length} paths`);
168
169
const updatedEntries = directory.entries.map(entry => {
170
if ('type' in entry.node && entry.node.type === 'file') {
171
-
const fileIndex = filePaths.findIndex(path => path.endsWith(entry.name));
172
if (fileIndex !== -1 && uploadResults[fileIndex]) {
173
-
console.log(` 🔗 Updating blob for file: ${entry.name} -> ${uploadResults[fileIndex].hash}`);
174
return {
175
...entry,
176
node: {
177
$type: 'place.wisp.fs#file' as const,
178
type: 'file' as const,
179
-
blob: uploadResults[fileIndex].blobRef
180
}
181
};
182
} else {
183
-
console.warn(` ⚠️ Could not find upload result for file: ${entry.name}`);
184
}
185
} else if ('type' in entry.node && entry.node.type === 'directory') {
186
-
console.log(` 📂 Recursively updating directory: ${entry.name}`);
187
return {
188
...entry,
189
-
node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths)
190
};
191
}
192
return entry;
193
}) as Entry[];
194
195
const result = {
196
$type: 'place.wisp.fs#directory' as const,
197
type: 'directory' as const,
198
entries: updatedEntries
199
};
200
201
-
console.log('✅ File blobs updated');
202
return result;
203
}
···
22
* Process uploaded files into a directory structure
23
*/
24
export function processUploadedFiles(files: UploadedFile[]): ProcessedDirectory {
25
const entries: Entry[] = [];
26
let fileCount = 0;
27
···
32
// Remove any base folder name from the path
33
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
34
const parts = normalizedPath.split('/');
35
36
if (parts.length === 1) {
37
// Root level file
38
entries.push({
39
name: parts[0],
40
node: {
···
47
} else {
48
// File in subdirectory
49
const dirPath = parts.slice(0, -1).join('/');
50
if (!directoryMap.has(dirPath)) {
51
directoryMap.set(dirPath, []);
52
}
53
directoryMap.get(dirPath)!.push({
54
...file,
···
58
}
59
60
// Process subdirectories
61
for (const [dirPath, dirFiles] of directoryMap) {
62
const dirEntries: Entry[] = [];
63
64
for (const file of dirFiles) {
65
const fileName = file.name.split('/').pop()!;
66
dirEntries.push({
67
name: fileName,
68
node: {
···
77
// Build nested directory structure
78
const pathParts = dirPath.split('/');
79
let currentEntries = entries;
80
81
for (let i = 0; i < pathParts.length; i++) {
82
const part = pathParts[i];
···
96
node: newDir
97
};
98
currentEntries.push(existingEntry);
99
} else if ('entries' in existingEntry.node && isLast) {
100
(existingEntry.node as any).entries.push(...dirEntries);
101
}
102
103
if (existingEntry && 'entries' in existingEntry.node) {
···
106
}
107
}
108
109
const result = {
110
directory: {
111
$type: 'place.wisp.fs#directory' as const,
···
115
fileCount
116
};
117
118
return result;
119
}
120
···
126
root: Directory,
127
fileCount: number
128
): Record {
129
+
return {
130
$type: 'place.wisp.fs' as const,
131
site: siteName,
132
root,
133
fileCount,
134
createdAt: new Date().toISOString()
135
};
136
}
137
138
/**
139
* Update file blobs in directory structure after upload
140
+
* Uses path-based matching to correctly match files in nested directories
141
*/
142
export function updateFileBlobs(
143
directory: Directory,
144
uploadResults: FileUploadResult[],
145
+
filePaths: string[],
146
+
currentPath: string = ''
147
): Directory {
148
+
const mimeTypeMismatches: string[] = [];
149
150
const updatedEntries = directory.entries.map(entry => {
151
if ('type' in entry.node && entry.node.type === 'file') {
152
+
// Build the full path for this file
153
+
const fullPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
154
+
155
+
// Find exact match in filePaths (need to handle normalized paths)
156
+
const fileIndex = filePaths.findIndex((path) => {
157
+
// Normalize both paths by removing leading base folder
158
+
const normalizedUploadPath = path.replace(/^[^\/]*\//, '');
159
+
const normalizedEntryPath = fullPath;
160
+
return normalizedUploadPath === normalizedEntryPath || path === fullPath;
161
+
});
162
+
163
if (fileIndex !== -1 && uploadResults[fileIndex]) {
164
+
const blobRef = uploadResults[fileIndex].blobRef;
165
+
const uploadedPath = filePaths[fileIndex];
166
+
167
+
// Check if MIME types make sense for this file extension
168
+
const expectedMime = getExpectedMimeType(entry.name);
169
+
if (expectedMime && blobRef.mimeType !== expectedMime && !blobRef.mimeType.startsWith(expectedMime)) {
170
+
mimeTypeMismatches.push(`${fullPath}: expected ${expectedMime}, got ${blobRef.mimeType} (from upload: ${uploadedPath})`);
171
+
}
172
+
173
return {
174
...entry,
175
node: {
176
$type: 'place.wisp.fs#file' as const,
177
type: 'file' as const,
178
+
blob: blobRef
179
}
180
};
181
} else {
182
+
console.error(`❌ BLOB MATCHING ERROR: Could not find blob for file: ${fullPath}`);
183
+
console.error(` Available paths:`, filePaths.slice(0, 10), filePaths.length > 10 ? `... and ${filePaths.length - 10} more` : '');
184
}
185
} else if ('type' in entry.node && entry.node.type === 'directory') {
186
+
const dirPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
187
return {
188
...entry,
189
+
node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths, dirPath)
190
};
191
}
192
return entry;
193
}) as Entry[];
194
195
+
if (mimeTypeMismatches.length > 0) {
196
+
console.error('\n⚠️ MIME TYPE MISMATCHES DETECTED IN MANIFEST:');
197
+
mimeTypeMismatches.forEach(m => console.error(` ${m}`));
198
+
console.error('');
199
+
}
200
+
201
const result = {
202
$type: 'place.wisp.fs#directory' as const,
203
type: 'directory' as const,
204
entries: updatedEntries
205
};
206
207
return result;
208
}
209
+
210
+
function getExpectedMimeType(filename: string): string | null {
211
+
const ext = filename.toLowerCase().split('.').pop();
212
+
const mimeMap: Record<string, string> = {
213
+
'html': 'text/html',
214
+
'htm': 'text/html',
215
+
'css': 'text/css',
216
+
'js': 'text/javascript',
217
+
'mjs': 'text/javascript',
218
+
'json': 'application/json',
219
+
'jpg': 'image/jpeg',
220
+
'jpeg': 'image/jpeg',
221
+
'png': 'image/png',
222
+
'gif': 'image/gif',
223
+
'webp': 'image/webp',
224
+
'svg': 'image/svg+xml',
225
+
};
226
+
return ext ? (mimeMap[ext] || null) : null;
227
+
}
+78
-21
src/routes/auth.ts
+78
-21
src/routes/auth.ts
···
2
import { NodeOAuthClient } from '@atproto/oauth-client-node'
3
import { getSitesByDid, getDomainByDid } from '../lib/db'
4
import { syncSitesFromPDS } from '../lib/sync-sites'
5
6
export const authRoutes = (client: NodeOAuthClient) => new Elysia()
7
.post('/api/auth/signin', async (c) => {
···
16
}
17
})
18
.get('/api/auth/callback', async (c) => {
19
-
const params = new URLSearchParams(c.query)
20
-
const { session } = await client.callback(params)
21
-
if (!session) return { error: 'Authentication failed' }
22
23
-
const cookieSession = c.cookie
24
-
cookieSession.did.value = session.did
25
26
-
// Sync sites from PDS to database cache
27
-
console.log('[Auth] Syncing sites from PDS for', session.did)
28
try {
29
-
const syncResult = await syncSitesFromPDS(session.did, session)
30
-
console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
31
-
if (syncResult.errors.length > 0) {
32
-
console.warn('[Auth] Sync errors:', syncResult.errors)
33
}
34
} catch (err) {
35
-
console.error('[Auth] Failed to sync sites:', err)
36
-
// Don't fail auth if sync fails, just log it
37
}
38
39
-
// Check if user has any sites or domain
40
-
const sites = await getSitesByDid(session.did)
41
-
const domain = await getDomainByDid(session.did)
42
43
-
// If no sites and no domain, redirect to onboarding
44
-
if (sites.length === 0 && !domain) {
45
-
return c.redirect('/onboarding')
46
}
47
-
48
-
return c.redirect('/editor')
49
})
···
2
import { NodeOAuthClient } from '@atproto/oauth-client-node'
3
import { getSitesByDid, getDomainByDid } from '../lib/db'
4
import { syncSitesFromPDS } from '../lib/sync-sites'
5
+
import { authenticateRequest } from '../lib/wisp-auth'
6
7
export const authRoutes = (client: NodeOAuthClient) => new Elysia()
8
.post('/api/auth/signin', async (c) => {
···
17
}
18
})
19
.get('/api/auth/callback', async (c) => {
20
+
try {
21
+
const params = new URLSearchParams(c.query)
22
+
23
+
// client.callback() validates the state parameter internally
24
+
// It will throw an error if state validation fails (CSRF protection)
25
+
const { session } = await client.callback(params)
26
+
27
+
if (!session) {
28
+
console.error('[Auth] OAuth callback failed: no session returned')
29
+
return c.redirect('/?error=auth_failed')
30
+
}
31
32
+
const cookieSession = c.cookie
33
+
cookieSession.did.value = session.did
34
35
+
// Sync sites from PDS to database cache
36
+
console.log('[Auth] Syncing sites from PDS for', session.did)
37
+
try {
38
+
const syncResult = await syncSitesFromPDS(session.did, session)
39
+
console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
40
+
if (syncResult.errors.length > 0) {
41
+
console.warn('[Auth] Sync errors:', syncResult.errors)
42
+
}
43
+
} catch (err) {
44
+
console.error('[Auth] Failed to sync sites:', err)
45
+
// Don't fail auth if sync fails, just log it
46
+
}
47
+
48
+
// Check if user has any sites or domain
49
+
const sites = await getSitesByDid(session.did)
50
+
const domain = await getDomainByDid(session.did)
51
+
52
+
// If no sites and no domain, redirect to onboarding
53
+
if (sites.length === 0 && !domain) {
54
+
return c.redirect('/onboarding')
55
+
}
56
+
57
+
return c.redirect('/editor')
58
+
} catch (err) {
59
+
// This catches state validation failures and other OAuth errors
60
+
console.error('[Auth] OAuth callback error:', err)
61
+
return c.redirect('/?error=auth_failed')
62
+
}
63
+
})
64
+
.post('/api/auth/logout', async (c) => {
65
try {
66
+
const cookieSession = c.cookie
67
+
const did = cookieSession.did?.value
68
+
69
+
// Clear the session cookie
70
+
cookieSession.did.value = ''
71
+
cookieSession.did.maxAge = 0
72
+
73
+
// If we have a DID, try to revoke the OAuth session
74
+
if (did && typeof did === 'string') {
75
+
try {
76
+
await client.revoke(did)
77
+
console.log('[Auth] Revoked OAuth session for', did)
78
+
} catch (err) {
79
+
console.error('[Auth] Failed to revoke session:', err)
80
+
// Continue with logout even if revoke fails
81
+
}
82
}
83
+
84
+
return { success: true }
85
} catch (err) {
86
+
console.error('[Auth] Logout error:', err)
87
+
return { error: 'Logout failed' }
88
}
89
+
})
90
+
.get('/api/auth/status', async (c) => {
91
+
try {
92
+
const auth = await authenticateRequest(client, c.cookie)
93
94
+
if (!auth) {
95
+
return { authenticated: false }
96
+
}
97
98
+
return {
99
+
authenticated: true,
100
+
did: auth.did
101
+
}
102
+
} catch (err) {
103
+
console.error('[Auth] Status check error:', err)
104
+
return { authenticated: false }
105
}
106
})
+97
-103
src/routes/wisp.ts
+97
-103
src/routes/wisp.ts
···
51
files: File | File[]
52
};
53
54
-
console.log('🚀 Starting upload process', { siteName, fileCount: Array.isArray(files) ? files.length : 1 });
55
-
56
try {
57
if (!siteName) {
58
-
console.error('❌ Site name is required');
59
throw new Error('Site name is required')
60
}
61
62
if (!isValidSiteName(siteName)) {
63
-
console.error('❌ Invalid site name format');
64
throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
65
}
66
67
-
console.log('✅ Initial validation passed');
68
-
69
// Check if files were provided
70
const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
71
72
if (!hasFiles) {
73
-
console.log('📝 Creating empty site (no files provided)');
74
-
75
// Create agent with OAuth session
76
-
console.log('🔐 Creating agent with OAuth session');
77
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
78
-
console.log('✅ Agent created successfully');
79
80
// Create empty manifest
81
const emptyManifest = {
···
92
// Use site name as rkey
93
const rkey = siteName;
94
95
-
// Create the record with explicit rkey
96
-
console.log(`📝 Creating empty site record in repo with rkey: ${rkey}`);
97
const record = await agent.com.atproto.repo.putRecord({
98
repo: auth.did,
99
collection: 'place.wisp.fs',
···
101
record: emptyManifest
102
});
103
104
-
console.log('✅ Empty site record created successfully:', {
105
-
uri: record.data.uri,
106
-
cid: record.data.cid
107
-
});
108
-
109
-
// Store site in database cache
110
-
console.log('💾 Storing site in database cache');
111
await upsertSite(auth.did, rkey, siteName);
112
-
console.log('✅ Site stored in database');
113
114
return {
115
success: true,
···
121
}
122
123
// Create agent with OAuth session
124
-
console.log('🔐 Creating agent with OAuth session');
125
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
126
-
console.log('✅ Agent created successfully');
127
128
// Convert File objects to UploadedFile format
129
// Elysia gives us File objects directly, handle both single file and array
130
const fileArray = Array.isArray(files) ? files : [files];
131
-
console.log(`📁 Processing ${fileArray.length} files`);
132
const uploadedFiles: UploadedFile[] = [];
133
134
// Define allowed file extensions for static site hosting
···
161
for (let i = 0; i < fileArray.length; i++) {
162
const file = fileArray[i];
163
const fileExtension = '.' + file.name.split('.').pop()?.toLowerCase();
164
-
165
-
console.log(`📄 Processing file ${i + 1}/${fileArray.length}: ${file.name} (${file.size} bytes, ${file.type})`);
166
-
167
// Skip excluded files
168
if (excludedFiles.has(fileExtension)) {
169
-
console.log(`⏭️ Skipping excluded file: ${file.name}`);
170
continue;
171
}
172
-
173
// Skip files that aren't in allowed extensions
174
if (!allowedExtensions.has(fileExtension)) {
175
-
console.log(`⏭️ Skipping non-web file: ${file.name} (${fileExtension})`);
176
continue;
177
}
178
-
179
// Skip files that are too large (limit to 100MB per file)
180
const maxSize = 100 * 1024 * 1024; // 100MB
181
if (file.size > maxSize) {
182
-
console.log(`⏭️ Skipping large file: ${file.name} (${(file.size / 1024 / 1024).toFixed(2)}MB > 100MB limit)`);
183
continue;
184
}
185
-
186
-
console.log(`✅ Including file: ${file.name}`);
187
const arrayBuffer = await file.arrayBuffer();
188
uploadedFiles.push({
189
name: file.name,
···
196
// Check total size limit (300MB)
197
const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
198
const maxTotalSize = 300 * 1024 * 1024; // 300MB
199
-
200
-
console.log(`📊 Filtered to ${uploadedFiles.length} files from ${fileArray.length} total files`);
201
-
console.log(`📦 Total size: ${(totalSize / 1024 / 1024).toFixed(2)}MB (limit: 300MB)`);
202
203
if (totalSize > maxTotalSize) {
204
throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
205
}
206
207
if (uploadedFiles.length === 0) {
208
-
console.log('⚠️ No valid web files found, creating empty site instead');
209
210
// Create empty manifest
211
const emptyManifest = {
···
222
// Use site name as rkey
223
const rkey = siteName;
224
225
-
// Create the record with explicit rkey
226
-
console.log(`📝 Creating empty site record in repo with rkey: ${rkey}`);
227
const record = await agent.com.atproto.repo.putRecord({
228
repo: auth.did,
229
collection: 'place.wisp.fs',
···
231
record: emptyManifest
232
});
233
234
-
console.log('✅ Empty site record created successfully:', {
235
-
uri: record.data.uri,
236
-
cid: record.data.cid
237
-
});
238
-
239
-
// Store site in database cache
240
-
console.log('💾 Storing site in database cache');
241
await upsertSite(auth.did, rkey, siteName);
242
-
console.log('✅ Site stored in database');
243
244
return {
245
success: true,
···
251
};
252
}
253
254
-
console.log('✅ File conversion completed');
255
-
256
// Process files into directory structure
257
-
console.log('🏗️ Building directory structure');
258
const { directory, fileCount } = processUploadedFiles(uploadedFiles);
259
-
console.log(`✅ Directory structure created with ${fileCount} files`);
260
261
-
// Upload files as blobs
262
-
const uploadResults: FileUploadResult[] = [];
263
-
const filePaths: string[] = [];
264
265
-
console.log('⬆️ Starting blob upload process');
266
-
for (let i = 0; i < uploadedFiles.length; i++) {
267
-
const file = uploadedFiles[i];
268
-
console.log(`📤 Uploading blob ${i + 1}/${uploadedFiles.length}: ${file.name}`);
269
-
270
try {
271
-
console.log(`🔍 Upload details:`, {
272
-
fileName: file.name,
273
-
fileSize: file.size,
274
-
mimeType: file.mimeType,
275
-
contentLength: file.content.length
276
-
});
277
-
278
const uploadResult = await agent.com.atproto.repo.uploadBlob(
279
file.content,
280
{
···
282
}
283
);
284
285
-
console.log(`✅ Upload successful for ${file.name}:`, {
286
-
hash: uploadResult.data.blob.ref.toString(),
287
-
mimeType: uploadResult.data.blob.mimeType,
288
-
size: uploadResult.data.blob.size
289
-
});
290
291
-
uploadResults.push({
292
-
hash: uploadResult.data.blob.ref.toString(),
293
-
blobRef: uploadResult.data.blob
294
-
});
295
296
-
filePaths.push(file.name);
297
} catch (uploadError) {
298
-
console.error(`❌ Upload failed for file ${file.name}:`, uploadError);
299
-
console.error('Upload error details:', {
300
-
fileName: file.name,
301
-
fileSize: file.size,
302
-
mimeType: file.mimeType,
303
-
error: uploadError
304
-
});
305
throw uploadError;
306
}
307
}
308
309
-
console.log('✅ All blobs uploaded successfully');
310
311
// Update directory with file blobs
312
-
console.log('🔄 Updating file blobs in directory structure');
313
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
314
-
console.log('✅ File blobs updated');
315
316
// Create manifest
317
-
console.log('📋 Creating manifest');
318
const manifest = createManifest(siteName, updatedDirectory, fileCount);
319
-
console.log('✅ Manifest created');
320
321
// Use site name as rkey
322
const rkey = siteName;
323
324
-
// Create the record with explicit rkey
325
-
console.log(`📝 Creating record in repo with rkey: ${rkey}`);
326
-
const record = await agent.com.atproto.repo.putRecord({
327
-
repo: auth.did,
328
-
collection: 'place.wisp.fs',
329
-
rkey: rkey,
330
-
record: manifest
331
-
});
332
333
-
console.log('✅ Record created successfully:', {
334
-
uri: record.data.uri,
335
-
cid: record.data.cid
336
-
});
337
338
// Store site in database cache
339
-
console.log('💾 Storing site in database cache');
340
await upsertSite(auth.did, rkey, siteName);
341
-
console.log('✅ Site stored in database');
342
343
const result = {
344
success: true,
···
348
siteName
349
};
350
351
-
console.log('🎉 Upload process completed successfully');
352
return result;
353
} catch (error) {
354
console.error('❌ Upload error:', error);
···
51
files: File | File[]
52
};
53
54
try {
55
if (!siteName) {
56
throw new Error('Site name is required')
57
}
58
59
if (!isValidSiteName(siteName)) {
60
throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
61
}
62
63
// Check if files were provided
64
const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
65
66
if (!hasFiles) {
67
// Create agent with OAuth session
68
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
69
70
// Create empty manifest
71
const emptyManifest = {
···
82
// Use site name as rkey
83
const rkey = siteName;
84
85
const record = await agent.com.atproto.repo.putRecord({
86
repo: auth.did,
87
collection: 'place.wisp.fs',
···
89
record: emptyManifest
90
});
91
92
await upsertSite(auth.did, rkey, siteName);
93
94
return {
95
success: true,
···
101
}
102
103
// Create agent with OAuth session
104
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
105
106
// Convert File objects to UploadedFile format
107
// Elysia gives us File objects directly, handle both single file and array
108
const fileArray = Array.isArray(files) ? files : [files];
109
const uploadedFiles: UploadedFile[] = [];
110
111
// Define allowed file extensions for static site hosting
···
138
for (let i = 0; i < fileArray.length; i++) {
139
const file = fileArray[i];
140
const fileExtension = '.' + file.name.split('.').pop()?.toLowerCase();
141
+
142
// Skip excluded files
143
if (excludedFiles.has(fileExtension)) {
144
continue;
145
}
146
+
147
// Skip files that aren't in allowed extensions
148
if (!allowedExtensions.has(fileExtension)) {
149
continue;
150
}
151
+
152
// Skip files that are too large (limit to 100MB per file)
153
const maxSize = 100 * 1024 * 1024; // 100MB
154
if (file.size > maxSize) {
155
continue;
156
}
157
+
158
const arrayBuffer = await file.arrayBuffer();
159
uploadedFiles.push({
160
name: file.name,
···
167
// Check total size limit (300MB)
168
const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
169
const maxTotalSize = 300 * 1024 * 1024; // 300MB
170
171
if (totalSize > maxTotalSize) {
172
throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
173
}
174
175
if (uploadedFiles.length === 0) {
176
177
// Create empty manifest
178
const emptyManifest = {
···
189
// Use site name as rkey
190
const rkey = siteName;
191
192
const record = await agent.com.atproto.repo.putRecord({
193
repo: auth.did,
194
collection: 'place.wisp.fs',
···
196
record: emptyManifest
197
});
198
199
await upsertSite(auth.did, rkey, siteName);
200
201
return {
202
success: true,
···
208
};
209
}
210
211
// Process files into directory structure
212
const { directory, fileCount } = processUploadedFiles(uploadedFiles);
213
214
+
// Upload files as blobs in parallel
215
+
const mimeTypeMismatches: Array<{file: string, sent: string, returned: string}> = [];
216
217
+
const uploadPromises = uploadedFiles.map(async (file, i) => {
218
try {
219
const uploadResult = await agent.com.atproto.repo.uploadBlob(
220
file.content,
221
{
···
223
}
224
);
225
226
+
const sentMimeType = file.mimeType;
227
+
const returnedBlobRef = uploadResult.data.blob;
228
229
+
// Track MIME type mismatches for summary
230
+
if (sentMimeType !== returnedBlobRef.mimeType) {
231
+
mimeTypeMismatches.push({
232
+
file: file.name,
233
+
sent: sentMimeType,
234
+
returned: returnedBlobRef.mimeType
235
+
});
236
+
}
237
238
+
// Use the blob ref exactly as returned from PDS
239
+
return {
240
+
result: {
241
+
hash: returnedBlobRef.ref.$link || returnedBlobRef.ref.toString(),
242
+
blobRef: returnedBlobRef
243
+
},
244
+
filePath: file.name,
245
+
sentMimeType,
246
+
returnedMimeType: returnedBlobRef.mimeType
247
+
};
248
} catch (uploadError) {
249
+
console.error(`❌ Upload failed for ${file.name}:`, uploadError);
250
throw uploadError;
251
}
252
+
});
253
+
254
+
// Wait for all uploads to complete
255
+
const uploadedBlobs = await Promise.all(uploadPromises);
256
+
257
+
// Show MIME type mismatch summary
258
+
if (mimeTypeMismatches.length > 0) {
259
+
console.warn(`\n⚠️ PDS changed MIME types for ${mimeTypeMismatches.length} files:`);
260
+
mimeTypeMismatches.slice(0, 20).forEach(m => {
261
+
console.warn(` ${m.file}: ${m.sent} → ${m.returned}`);
262
+
});
263
+
if (mimeTypeMismatches.length > 20) {
264
+
console.warn(` ... and ${mimeTypeMismatches.length - 20} more`);
265
+
}
266
+
console.warn('');
267
}
268
269
+
// CRITICAL: Find files uploaded as application/octet-stream
270
+
const octetStreamFiles = uploadedBlobs.filter(b => b.returnedMimeType === 'application/octet-stream');
271
+
if (octetStreamFiles.length > 0) {
272
+
console.error(`\n🚨 FILES UPLOADED AS application/octet-stream (${octetStreamFiles.length}):`);
273
+
octetStreamFiles.forEach(f => {
274
+
console.error(` ${f.filePath}: sent=${f.sentMimeType}, returned=${f.returnedMimeType}`);
275
+
});
276
+
console.error('');
277
+
}
278
+
279
+
// Extract results and file paths in correct order
280
+
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
281
+
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
282
283
// Update directory with file blobs
284
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
285
286
// Create manifest
287
const manifest = createManifest(siteName, updatedDirectory, fileCount);
288
289
// Use site name as rkey
290
const rkey = siteName;
291
292
+
let record;
293
+
try {
294
+
record = await agent.com.atproto.repo.putRecord({
295
+
repo: auth.did,
296
+
collection: 'place.wisp.fs',
297
+
rkey: rkey,
298
+
record: manifest
299
+
});
300
+
} catch (putRecordError: any) {
301
+
console.error('\n❌ Failed to create record on PDS');
302
+
console.error('Error:', putRecordError.message);
303
304
+
// Try to identify which file has the MIME type mismatch
305
+
if (putRecordError.message?.includes('Mimetype') || putRecordError.message?.includes('mimeType')) {
306
+
console.error('\n🔍 Analyzing manifest for MIME type issues...');
307
+
308
+
// Recursively check all blobs in manifest
309
+
const checkBlobs = (node: any, path: string = '') => {
310
+
if (node.type === 'file' && node.blob) {
311
+
const mimeType = node.blob.mimeType;
312
+
console.error(` File: ${path} - MIME: ${mimeType}`);
313
+
} else if (node.type === 'directory' && node.entries) {
314
+
for (const entry of node.entries) {
315
+
const entryPath = path ? `${path}/${entry.name}` : entry.name;
316
+
checkBlobs(entry.node, entryPath);
317
+
}
318
+
}
319
+
};
320
+
321
+
checkBlobs(manifest.root, '');
322
+
323
+
console.error('\n📊 Blob upload summary:');
324
+
uploadedBlobs.slice(0, 20).forEach((b, i) => {
325
+
console.error(` [${i}] ${b.filePath}: sent=${b.sentMimeType}, returned=${b.returnedMimeType}`);
326
+
});
327
+
if (uploadedBlobs.length > 20) {
328
+
console.error(` ... and ${uploadedBlobs.length - 20} more`);
329
+
}
330
+
}
331
+
332
+
throw putRecordError;
333
+
}
334
335
// Store site in database cache
336
await upsertSite(auth.did, rkey, siteName);
337
338
const result = {
339
success: true,
···
343
siteName
344
};
345
346
return result;
347
} catch (error) {
348
console.error('❌ Upload error:', error);