Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol. wisp.place

wah

Changed files
+265 -160
hosting-service
src
src
+8 -2
hosting-service/src/lib/db.ts
··· 46 46 47 47 export async function upsertSite(did: string, rkey: string, displayName?: string) { 48 48 try { 49 + // Only set display_name if provided (not undefined/null/empty) 50 + const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null; 51 + 49 52 await sql` 50 53 INSERT INTO sites (did, rkey, display_name, created_at, updated_at) 51 - VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW())) 54 + VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW())) 52 55 ON CONFLICT (did, rkey) 53 56 DO UPDATE SET 54 - display_name = COALESCE(EXCLUDED.display_name, sites.display_name), 57 + display_name = CASE 58 + WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name 59 + ELSE sites.display_name 60 + END, 55 61 updated_at = EXTRACT(EPOCH FROM NOW()) 56 62 `; 57 63 } catch (err) {
+12
hosting-service/src/lib/utils.ts
··· 102 102 103 103 export async function downloadAndCacheSite(did: string, rkey: string, record: WispFsRecord, pdsEndpoint: string): Promise<void> { 104 104 console.log('Caching site', did, rkey); 105 + 106 + // Validate record structure 107 + if (!record.root) { 108 + console.error('Record missing root directory:', JSON.stringify(record, null, 2)); 109 + throw new Error('Invalid record structure: missing root directory'); 110 + } 111 + 112 + if (!record.root.entries || !Array.isArray(record.root.entries)) { 113 + console.error('Record root missing entries array:', JSON.stringify(record.root, null, 2)); 114 + throw new Error('Invalid record structure: root missing entries array'); 115 + } 116 + 105 117 await cacheFiles(did, rkey, record.root.entries, pdsEndpoint, ''); 106 118 } 107 119
+7 -1
src/index.ts
··· 52 52 }) 53 53 } 54 54 }) 55 - .use(cors()) 55 + .use(cors({ 56 + origin: config.domain, 57 + credentials: true, 58 + methods: ['GET', 'POST', 'DELETE', 'OPTIONS'], 59 + allowedHeaders: ['Content-Type', 'Authorization'], 60 + maxAge: 86400 // 24 hours 61 + })) 56 62 .listen(8000) 57 63 58 64 console.log(
+8 -2
src/lib/db.ts
··· 391 391 392 392 export const upsertSite = async (did: string, rkey: string, displayName?: string) => { 393 393 try { 394 + // Only set display_name if provided (not undefined/null/empty) 395 + const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null; 396 + 394 397 await db` 395 398 INSERT INTO sites (did, rkey, display_name, created_at, updated_at) 396 - VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW())) 399 + VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW())) 397 400 ON CONFLICT (did, rkey) 398 401 DO UPDATE SET 399 - display_name = COALESCE(EXCLUDED.display_name, sites.display_name), 402 + display_name = CASE 403 + WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name 404 + ELSE sites.display_name 405 + END, 400 406 updated_at = EXTRACT(EPOCH FROM NOW()) 401 407 `; 402 408 return { success: true };
+55 -31
src/lib/wisp-utils.ts
··· 22 22 * Process uploaded files into a directory structure 23 23 */ 24 24 export function processUploadedFiles(files: UploadedFile[]): ProcessedDirectory { 25 - console.log(`๐Ÿ—๏ธ Processing ${files.length} uploaded files`); 26 25 const entries: Entry[] = []; 27 26 let fileCount = 0; 28 27 ··· 33 32 // Remove any base folder name from the path 34 33 const normalizedPath = file.name.replace(/^[^\/]*\//, ''); 35 34 const parts = normalizedPath.split('/'); 36 - 37 - console.log(`๐Ÿ“„ Processing file: ${file.name} -> normalized: ${normalizedPath}`); 38 35 39 36 if (parts.length === 1) { 40 37 // Root level file 41 - console.log(`๐Ÿ“ Root level file: ${parts[0]}`); 42 38 entries.push({ 43 39 name: parts[0], 44 40 node: { ··· 51 47 } else { 52 48 // File in subdirectory 53 49 const dirPath = parts.slice(0, -1).join('/'); 54 - console.log(`๐Ÿ“‚ Subdirectory file: ${dirPath}/${parts[parts.length - 1]}`); 55 50 if (!directoryMap.has(dirPath)) { 56 51 directoryMap.set(dirPath, []); 57 - console.log(`โž• Created directory: ${dirPath}`); 58 52 } 59 53 directoryMap.get(dirPath)!.push({ 60 54 ...file, ··· 64 58 } 65 59 66 60 // Process subdirectories 67 - console.log(`๐Ÿ“‚ Processing ${directoryMap.size} subdirectories`); 68 61 for (const [dirPath, dirFiles] of directoryMap) { 69 - console.log(`๐Ÿ“ Processing directory: ${dirPath} with ${dirFiles.length} files`); 70 62 const dirEntries: Entry[] = []; 71 63 72 64 for (const file of dirFiles) { 73 65 const fileName = file.name.split('/').pop()!; 74 - console.log(` ๐Ÿ“„ Adding file to directory: ${fileName}`); 75 66 dirEntries.push({ 76 67 name: fileName, 77 68 node: { ··· 86 77 // Build nested directory structure 87 78 const pathParts = dirPath.split('/'); 88 79 let currentEntries = entries; 89 - 90 - console.log(`๐Ÿ—๏ธ Building nested structure for path: ${pathParts.join('/')}`); 91 80 92 81 for (let i = 0; i < pathParts.length; i++) { 93 82 const part = pathParts[i]; ··· 107 96 node: newDir 108 97 }; 109 98 currentEntries.push(existingEntry); 110 - console.log(` โž• Created directory entry: ${part}`); 111 99 } else if ('entries' in existingEntry.node && isLast) { 112 100 (existingEntry.node as any).entries.push(...dirEntries); 113 - console.log(` ๐Ÿ“ Added files to existing directory: ${part}`); 114 101 } 115 102 116 103 if (existingEntry && 'entries' in existingEntry.node) { ··· 119 106 } 120 107 } 121 108 122 - console.log(`โœ… Directory structure completed with ${fileCount} total files`); 123 - 124 109 const result = { 125 110 directory: { 126 111 $type: 'place.wisp.fs#directory' as const, ··· 130 115 fileCount 131 116 }; 132 117 133 - console.log('๐Ÿ“‹ Final directory structure:', JSON.stringify(result, null, 2)); 134 118 return result; 135 119 } 136 120 ··· 142 126 root: Directory, 143 127 fileCount: number 144 128 ): Record { 145 - const manifest: Record = { 129 + return { 146 130 $type: 'place.wisp.fs' as const, 147 131 site: siteName, 148 132 root, 149 133 fileCount, 150 134 createdAt: new Date().toISOString() 151 135 }; 152 - 153 - console.log(`๐Ÿ“‹ Created manifest for site "${siteName}" with ${fileCount} files`); 154 - console.log('๐Ÿ“„ Manifest structure:', JSON.stringify(manifest, null, 2)); 155 - 156 - return manifest; 157 136 } 158 137 159 138 /** 160 139 * Update file blobs in directory structure after upload 140 + * Uses path-based matching to correctly match files in nested directories 161 141 */ 162 142 export function updateFileBlobs( 163 143 directory: Directory, 164 144 uploadResults: FileUploadResult[], 165 - filePaths: string[] 145 + filePaths: string[], 146 + currentPath: string = '' 166 147 ): Directory { 167 - console.log(`๐Ÿ”„ Updating file blobs: ${uploadResults.length} results for ${filePaths.length} paths`); 148 + const mimeTypeMismatches: string[] = []; 168 149 169 150 const updatedEntries = directory.entries.map(entry => { 170 151 if ('type' in entry.node && entry.node.type === 'file') { 171 - const fileIndex = filePaths.findIndex(path => path.endsWith(entry.name)); 152 + // Build the full path for this file 153 + const fullPath = currentPath ? `${currentPath}/${entry.name}` : entry.name; 154 + 155 + // Find exact match in filePaths (need to handle normalized paths) 156 + const fileIndex = filePaths.findIndex((path) => { 157 + // Normalize both paths by removing leading base folder 158 + const normalizedUploadPath = path.replace(/^[^\/]*\//, ''); 159 + const normalizedEntryPath = fullPath; 160 + return normalizedUploadPath === normalizedEntryPath || path === fullPath; 161 + }); 162 + 172 163 if (fileIndex !== -1 && uploadResults[fileIndex]) { 173 - console.log(` ๐Ÿ”— Updating blob for file: ${entry.name} -> ${uploadResults[fileIndex].hash}`); 164 + const blobRef = uploadResults[fileIndex].blobRef; 165 + const uploadedPath = filePaths[fileIndex]; 166 + 167 + // Check if MIME types make sense for this file extension 168 + const expectedMime = getExpectedMimeType(entry.name); 169 + if (expectedMime && blobRef.mimeType !== expectedMime && !blobRef.mimeType.startsWith(expectedMime)) { 170 + mimeTypeMismatches.push(`${fullPath}: expected ${expectedMime}, got ${blobRef.mimeType} (from upload: ${uploadedPath})`); 171 + } 172 + 174 173 return { 175 174 ...entry, 176 175 node: { 177 176 $type: 'place.wisp.fs#file' as const, 178 177 type: 'file' as const, 179 - blob: uploadResults[fileIndex].blobRef 178 + blob: blobRef 180 179 } 181 180 }; 182 181 } else { 183 - console.warn(` โš ๏ธ Could not find upload result for file: ${entry.name}`); 182 + console.error(`โŒ BLOB MATCHING ERROR: Could not find blob for file: ${fullPath}`); 183 + console.error(` Available paths:`, filePaths.slice(0, 10), filePaths.length > 10 ? `... and ${filePaths.length - 10} more` : ''); 184 184 } 185 185 } else if ('type' in entry.node && entry.node.type === 'directory') { 186 - console.log(` ๐Ÿ“‚ Recursively updating directory: ${entry.name}`); 186 + const dirPath = currentPath ? `${currentPath}/${entry.name}` : entry.name; 187 187 return { 188 188 ...entry, 189 - node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths) 189 + node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths, dirPath) 190 190 }; 191 191 } 192 192 return entry; 193 193 }) as Entry[]; 194 194 195 + if (mimeTypeMismatches.length > 0) { 196 + console.error('\nโš ๏ธ MIME TYPE MISMATCHES DETECTED IN MANIFEST:'); 197 + mimeTypeMismatches.forEach(m => console.error(` ${m}`)); 198 + console.error(''); 199 + } 200 + 195 201 const result = { 196 202 $type: 'place.wisp.fs#directory' as const, 197 203 type: 'directory' as const, 198 204 entries: updatedEntries 199 205 }; 200 206 201 - console.log('โœ… File blobs updated'); 202 207 return result; 203 208 } 209 + 210 + function getExpectedMimeType(filename: string): string | null { 211 + const ext = filename.toLowerCase().split('.').pop(); 212 + const mimeMap: Record<string, string> = { 213 + 'html': 'text/html', 214 + 'htm': 'text/html', 215 + 'css': 'text/css', 216 + 'js': 'text/javascript', 217 + 'mjs': 'text/javascript', 218 + 'json': 'application/json', 219 + 'jpg': 'image/jpeg', 220 + 'jpeg': 'image/jpeg', 221 + 'png': 'image/png', 222 + 'gif': 'image/gif', 223 + 'webp': 'image/webp', 224 + 'svg': 'image/svg+xml', 225 + }; 226 + return ext ? (mimeMap[ext] || null) : null; 227 + }
+78 -21
src/routes/auth.ts
··· 2 2 import { NodeOAuthClient } from '@atproto/oauth-client-node' 3 3 import { getSitesByDid, getDomainByDid } from '../lib/db' 4 4 import { syncSitesFromPDS } from '../lib/sync-sites' 5 + import { authenticateRequest } from '../lib/wisp-auth' 5 6 6 7 export const authRoutes = (client: NodeOAuthClient) => new Elysia() 7 8 .post('/api/auth/signin', async (c) => { ··· 16 17 } 17 18 }) 18 19 .get('/api/auth/callback', async (c) => { 19 - const params = new URLSearchParams(c.query) 20 - const { session } = await client.callback(params) 21 - if (!session) return { error: 'Authentication failed' } 20 + try { 21 + const params = new URLSearchParams(c.query) 22 + 23 + // client.callback() validates the state parameter internally 24 + // It will throw an error if state validation fails (CSRF protection) 25 + const { session } = await client.callback(params) 26 + 27 + if (!session) { 28 + console.error('[Auth] OAuth callback failed: no session returned') 29 + return c.redirect('/?error=auth_failed') 30 + } 22 31 23 - const cookieSession = c.cookie 24 - cookieSession.did.value = session.did 32 + const cookieSession = c.cookie 33 + cookieSession.did.value = session.did 25 34 26 - // Sync sites from PDS to database cache 27 - console.log('[Auth] Syncing sites from PDS for', session.did) 35 + // Sync sites from PDS to database cache 36 + console.log('[Auth] Syncing sites from PDS for', session.did) 37 + try { 38 + const syncResult = await syncSitesFromPDS(session.did, session) 39 + console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`) 40 + if (syncResult.errors.length > 0) { 41 + console.warn('[Auth] Sync errors:', syncResult.errors) 42 + } 43 + } catch (err) { 44 + console.error('[Auth] Failed to sync sites:', err) 45 + // Don't fail auth if sync fails, just log it 46 + } 47 + 48 + // Check if user has any sites or domain 49 + const sites = await getSitesByDid(session.did) 50 + const domain = await getDomainByDid(session.did) 51 + 52 + // If no sites and no domain, redirect to onboarding 53 + if (sites.length === 0 && !domain) { 54 + return c.redirect('/onboarding') 55 + } 56 + 57 + return c.redirect('/editor') 58 + } catch (err) { 59 + // This catches state validation failures and other OAuth errors 60 + console.error('[Auth] OAuth callback error:', err) 61 + return c.redirect('/?error=auth_failed') 62 + } 63 + }) 64 + .post('/api/auth/logout', async (c) => { 28 65 try { 29 - const syncResult = await syncSitesFromPDS(session.did, session) 30 - console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`) 31 - if (syncResult.errors.length > 0) { 32 - console.warn('[Auth] Sync errors:', syncResult.errors) 66 + const cookieSession = c.cookie 67 + const did = cookieSession.did?.value 68 + 69 + // Clear the session cookie 70 + cookieSession.did.value = '' 71 + cookieSession.did.maxAge = 0 72 + 73 + // If we have a DID, try to revoke the OAuth session 74 + if (did && typeof did === 'string') { 75 + try { 76 + await client.revoke(did) 77 + console.log('[Auth] Revoked OAuth session for', did) 78 + } catch (err) { 79 + console.error('[Auth] Failed to revoke session:', err) 80 + // Continue with logout even if revoke fails 81 + } 33 82 } 83 + 84 + return { success: true } 34 85 } catch (err) { 35 - console.error('[Auth] Failed to sync sites:', err) 36 - // Don't fail auth if sync fails, just log it 86 + console.error('[Auth] Logout error:', err) 87 + return { error: 'Logout failed' } 37 88 } 89 + }) 90 + .get('/api/auth/status', async (c) => { 91 + try { 92 + const auth = await authenticateRequest(client, c.cookie) 38 93 39 - // Check if user has any sites or domain 40 - const sites = await getSitesByDid(session.did) 41 - const domain = await getDomainByDid(session.did) 94 + if (!auth) { 95 + return { authenticated: false } 96 + } 42 97 43 - // If no sites and no domain, redirect to onboarding 44 - if (sites.length === 0 && !domain) { 45 - return c.redirect('/onboarding') 98 + return { 99 + authenticated: true, 100 + did: auth.did 101 + } 102 + } catch (err) { 103 + console.error('[Auth] Status check error:', err) 104 + return { authenticated: false } 46 105 } 47 - 48 - return c.redirect('/editor') 49 106 })
+97 -103
src/routes/wisp.ts
··· 51 51 files: File | File[] 52 52 }; 53 53 54 - console.log('๐Ÿš€ Starting upload process', { siteName, fileCount: Array.isArray(files) ? files.length : 1 }); 55 - 56 54 try { 57 55 if (!siteName) { 58 - console.error('โŒ Site name is required'); 59 56 throw new Error('Site name is required') 60 57 } 61 58 62 59 if (!isValidSiteName(siteName)) { 63 - console.error('โŒ Invalid site name format'); 64 60 throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons') 65 61 } 66 62 67 - console.log('โœ… Initial validation passed'); 68 - 69 63 // Check if files were provided 70 64 const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files); 71 65 72 66 if (!hasFiles) { 73 - console.log('๐Ÿ“ Creating empty site (no files provided)'); 74 - 75 67 // Create agent with OAuth session 76 - console.log('๐Ÿ” Creating agent with OAuth session'); 77 68 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init)) 78 - console.log('โœ… Agent created successfully'); 79 69 80 70 // Create empty manifest 81 71 const emptyManifest = { ··· 92 82 // Use site name as rkey 93 83 const rkey = siteName; 94 84 95 - // Create the record with explicit rkey 96 - console.log(`๐Ÿ“ Creating empty site record in repo with rkey: ${rkey}`); 97 85 const record = await agent.com.atproto.repo.putRecord({ 98 86 repo: auth.did, 99 87 collection: 'place.wisp.fs', ··· 101 89 record: emptyManifest 102 90 }); 103 91 104 - console.log('โœ… Empty site record created successfully:', { 105 - uri: record.data.uri, 106 - cid: record.data.cid 107 - }); 108 - 109 - // Store site in database cache 110 - console.log('๐Ÿ’พ Storing site in database cache'); 111 92 await upsertSite(auth.did, rkey, siteName); 112 - console.log('โœ… Site stored in database'); 113 93 114 94 return { 115 95 success: true, ··· 121 101 } 122 102 123 103 // Create agent with OAuth session 124 - console.log('๐Ÿ” Creating agent with OAuth session'); 125 104 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init)) 126 - console.log('โœ… Agent created successfully'); 127 105 128 106 // Convert File objects to UploadedFile format 129 107 // Elysia gives us File objects directly, handle both single file and array 130 108 const fileArray = Array.isArray(files) ? files : [files]; 131 - console.log(`๐Ÿ“ Processing ${fileArray.length} files`); 132 109 const uploadedFiles: UploadedFile[] = []; 133 110 134 111 // Define allowed file extensions for static site hosting ··· 161 138 for (let i = 0; i < fileArray.length; i++) { 162 139 const file = fileArray[i]; 163 140 const fileExtension = '.' + file.name.split('.').pop()?.toLowerCase(); 164 - 165 - console.log(`๐Ÿ“„ Processing file ${i + 1}/${fileArray.length}: ${file.name} (${file.size} bytes, ${file.type})`); 166 - 141 + 167 142 // Skip excluded files 168 143 if (excludedFiles.has(fileExtension)) { 169 - console.log(`โญ๏ธ Skipping excluded file: ${file.name}`); 170 144 continue; 171 145 } 172 - 146 + 173 147 // Skip files that aren't in allowed extensions 174 148 if (!allowedExtensions.has(fileExtension)) { 175 - console.log(`โญ๏ธ Skipping non-web file: ${file.name} (${fileExtension})`); 176 149 continue; 177 150 } 178 - 151 + 179 152 // Skip files that are too large (limit to 100MB per file) 180 153 const maxSize = 100 * 1024 * 1024; // 100MB 181 154 if (file.size > maxSize) { 182 - console.log(`โญ๏ธ Skipping large file: ${file.name} (${(file.size / 1024 / 1024).toFixed(2)}MB > 100MB limit)`); 183 155 continue; 184 156 } 185 - 186 - console.log(`โœ… Including file: ${file.name}`); 157 + 187 158 const arrayBuffer = await file.arrayBuffer(); 188 159 uploadedFiles.push({ 189 160 name: file.name, ··· 196 167 // Check total size limit (300MB) 197 168 const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0); 198 169 const maxTotalSize = 300 * 1024 * 1024; // 300MB 199 - 200 - console.log(`๐Ÿ“Š Filtered to ${uploadedFiles.length} files from ${fileArray.length} total files`); 201 - console.log(`๐Ÿ“ฆ Total size: ${(totalSize / 1024 / 1024).toFixed(2)}MB (limit: 300MB)`); 202 170 203 171 if (totalSize > maxTotalSize) { 204 172 throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`); 205 173 } 206 174 207 175 if (uploadedFiles.length === 0) { 208 - console.log('โš ๏ธ No valid web files found, creating empty site instead'); 209 176 210 177 // Create empty manifest 211 178 const emptyManifest = { ··· 222 189 // Use site name as rkey 223 190 const rkey = siteName; 224 191 225 - // Create the record with explicit rkey 226 - console.log(`๐Ÿ“ Creating empty site record in repo with rkey: ${rkey}`); 227 192 const record = await agent.com.atproto.repo.putRecord({ 228 193 repo: auth.did, 229 194 collection: 'place.wisp.fs', ··· 231 196 record: emptyManifest 232 197 }); 233 198 234 - console.log('โœ… Empty site record created successfully:', { 235 - uri: record.data.uri, 236 - cid: record.data.cid 237 - }); 238 - 239 - // Store site in database cache 240 - console.log('๐Ÿ’พ Storing site in database cache'); 241 199 await upsertSite(auth.did, rkey, siteName); 242 - console.log('โœ… Site stored in database'); 243 200 244 201 return { 245 202 success: true, ··· 251 208 }; 252 209 } 253 210 254 - console.log('โœ… File conversion completed'); 255 - 256 211 // Process files into directory structure 257 - console.log('๐Ÿ—๏ธ Building directory structure'); 258 212 const { directory, fileCount } = processUploadedFiles(uploadedFiles); 259 - console.log(`โœ… Directory structure created with ${fileCount} files`); 260 213 261 - // Upload files as blobs 262 - const uploadResults: FileUploadResult[] = []; 263 - const filePaths: string[] = []; 214 + // Upload files as blobs in parallel 215 + const mimeTypeMismatches: Array<{file: string, sent: string, returned: string}> = []; 264 216 265 - console.log('โฌ†๏ธ Starting blob upload process'); 266 - for (let i = 0; i < uploadedFiles.length; i++) { 267 - const file = uploadedFiles[i]; 268 - console.log(`๐Ÿ“ค Uploading blob ${i + 1}/${uploadedFiles.length}: ${file.name}`); 269 - 217 + const uploadPromises = uploadedFiles.map(async (file, i) => { 270 218 try { 271 - console.log(`๐Ÿ” Upload details:`, { 272 - fileName: file.name, 273 - fileSize: file.size, 274 - mimeType: file.mimeType, 275 - contentLength: file.content.length 276 - }); 277 - 278 219 const uploadResult = await agent.com.atproto.repo.uploadBlob( 279 220 file.content, 280 221 { ··· 282 223 } 283 224 ); 284 225 285 - console.log(`โœ… Upload successful for ${file.name}:`, { 286 - hash: uploadResult.data.blob.ref.toString(), 287 - mimeType: uploadResult.data.blob.mimeType, 288 - size: uploadResult.data.blob.size 289 - }); 226 + const sentMimeType = file.mimeType; 227 + const returnedBlobRef = uploadResult.data.blob; 290 228 291 - uploadResults.push({ 292 - hash: uploadResult.data.blob.ref.toString(), 293 - blobRef: uploadResult.data.blob 294 - }); 229 + // Track MIME type mismatches for summary 230 + if (sentMimeType !== returnedBlobRef.mimeType) { 231 + mimeTypeMismatches.push({ 232 + file: file.name, 233 + sent: sentMimeType, 234 + returned: returnedBlobRef.mimeType 235 + }); 236 + } 295 237 296 - filePaths.push(file.name); 238 + // Use the blob ref exactly as returned from PDS 239 + return { 240 + result: { 241 + hash: returnedBlobRef.ref.$link || returnedBlobRef.ref.toString(), 242 + blobRef: returnedBlobRef 243 + }, 244 + filePath: file.name, 245 + sentMimeType, 246 + returnedMimeType: returnedBlobRef.mimeType 247 + }; 297 248 } catch (uploadError) { 298 - console.error(`โŒ Upload failed for file ${file.name}:`, uploadError); 299 - console.error('Upload error details:', { 300 - fileName: file.name, 301 - fileSize: file.size, 302 - mimeType: file.mimeType, 303 - error: uploadError 304 - }); 249 + console.error(`โŒ Upload failed for ${file.name}:`, uploadError); 305 250 throw uploadError; 306 251 } 252 + }); 253 + 254 + // Wait for all uploads to complete 255 + const uploadedBlobs = await Promise.all(uploadPromises); 256 + 257 + // Show MIME type mismatch summary 258 + if (mimeTypeMismatches.length > 0) { 259 + console.warn(`\nโš ๏ธ PDS changed MIME types for ${mimeTypeMismatches.length} files:`); 260 + mimeTypeMismatches.slice(0, 20).forEach(m => { 261 + console.warn(` ${m.file}: ${m.sent} โ†’ ${m.returned}`); 262 + }); 263 + if (mimeTypeMismatches.length > 20) { 264 + console.warn(` ... and ${mimeTypeMismatches.length - 20} more`); 265 + } 266 + console.warn(''); 307 267 } 308 268 309 - console.log('โœ… All blobs uploaded successfully'); 269 + // CRITICAL: Find files uploaded as application/octet-stream 270 + const octetStreamFiles = uploadedBlobs.filter(b => b.returnedMimeType === 'application/octet-stream'); 271 + if (octetStreamFiles.length > 0) { 272 + console.error(`\n๐Ÿšจ FILES UPLOADED AS application/octet-stream (${octetStreamFiles.length}):`); 273 + octetStreamFiles.forEach(f => { 274 + console.error(` ${f.filePath}: sent=${f.sentMimeType}, returned=${f.returnedMimeType}`); 275 + }); 276 + console.error(''); 277 + } 278 + 279 + // Extract results and file paths in correct order 280 + const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result); 281 + const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath); 310 282 311 283 // Update directory with file blobs 312 - console.log('๐Ÿ”„ Updating file blobs in directory structure'); 313 284 const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths); 314 - console.log('โœ… File blobs updated'); 315 285 316 286 // Create manifest 317 - console.log('๐Ÿ“‹ Creating manifest'); 318 287 const manifest = createManifest(siteName, updatedDirectory, fileCount); 319 - console.log('โœ… Manifest created'); 320 288 321 289 // Use site name as rkey 322 290 const rkey = siteName; 323 291 324 - // Create the record with explicit rkey 325 - console.log(`๐Ÿ“ Creating record in repo with rkey: ${rkey}`); 326 - const record = await agent.com.atproto.repo.putRecord({ 327 - repo: auth.did, 328 - collection: 'place.wisp.fs', 329 - rkey: rkey, 330 - record: manifest 331 - }); 292 + let record; 293 + try { 294 + record = await agent.com.atproto.repo.putRecord({ 295 + repo: auth.did, 296 + collection: 'place.wisp.fs', 297 + rkey: rkey, 298 + record: manifest 299 + }); 300 + } catch (putRecordError: any) { 301 + console.error('\nโŒ Failed to create record on PDS'); 302 + console.error('Error:', putRecordError.message); 332 303 333 - console.log('โœ… Record created successfully:', { 334 - uri: record.data.uri, 335 - cid: record.data.cid 336 - }); 304 + // Try to identify which file has the MIME type mismatch 305 + if (putRecordError.message?.includes('Mimetype') || putRecordError.message?.includes('mimeType')) { 306 + console.error('\n๐Ÿ” Analyzing manifest for MIME type issues...'); 307 + 308 + // Recursively check all blobs in manifest 309 + const checkBlobs = (node: any, path: string = '') => { 310 + if (node.type === 'file' && node.blob) { 311 + const mimeType = node.blob.mimeType; 312 + console.error(` File: ${path} - MIME: ${mimeType}`); 313 + } else if (node.type === 'directory' && node.entries) { 314 + for (const entry of node.entries) { 315 + const entryPath = path ? `${path}/${entry.name}` : entry.name; 316 + checkBlobs(entry.node, entryPath); 317 + } 318 + } 319 + }; 320 + 321 + checkBlobs(manifest.root, ''); 322 + 323 + console.error('\n๐Ÿ“Š Blob upload summary:'); 324 + uploadedBlobs.slice(0, 20).forEach((b, i) => { 325 + console.error(` [${i}] ${b.filePath}: sent=${b.sentMimeType}, returned=${b.returnedMimeType}`); 326 + }); 327 + if (uploadedBlobs.length > 20) { 328 + console.error(` ... and ${uploadedBlobs.length - 20} more`); 329 + } 330 + } 331 + 332 + throw putRecordError; 333 + } 337 334 338 335 // Store site in database cache 339 - console.log('๐Ÿ’พ Storing site in database cache'); 340 336 await upsertSite(auth.did, rkey, siteName); 341 - console.log('โœ… Site stored in database'); 342 337 343 338 const result = { 344 339 success: true, ··· 348 343 siteName 349 344 }; 350 345 351 - console.log('๐ŸŽ‰ Upload process completed successfully'); 352 346 return result; 353 347 } catch (error) { 354 348 console.error('โŒ Upload error:', error);