+10
-4
hosting-service/src/lib/utils.ts
+10
-4
hosting-service/src/lib/utils.ts
···
26
26
*/
27
27
export function shouldCompressMimeType(mimeType: string | undefined): boolean {
28
28
if (!mimeType) return false;
29
-
29
+
30
30
const mime = mimeType.toLowerCase();
31
-
32
-
// Text-based web assets that benefit from compression
31
+
32
+
// Text-based web assets and uncompressed audio that benefit from compression
33
33
const compressibleTypes = [
34
34
'text/html',
35
35
'text/css',
···
41
41
'application/json',
42
42
'text/plain',
43
43
'image/svg+xml',
44
+
// Uncompressed audio formats
45
+
'audio/wav',
46
+
'audio/wave',
47
+
'audio/x-wav',
48
+
'audio/aiff',
49
+
'audio/x-aiff',
44
50
];
45
-
51
+
46
52
if (compressibleTypes.some(type => mime === type || mime.startsWith(type))) {
47
53
return true;
48
54
}
+43
-12
hosting-service/src/server.ts
+43
-12
hosting-service/src/server.ts
···
166
166
const shouldServeCompressed = shouldCompressMimeType(meta.mimeType);
167
167
168
168
if (!shouldServeCompressed) {
169
-
const { gunzipSync } = await import('zlib');
170
-
const decompressed = gunzipSync(content);
171
-
headers['Content-Type'] = meta.mimeType;
172
-
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
173
-
return new Response(decompressed, { headers });
169
+
// Verify content is actually gzipped before attempting decompression
170
+
const isGzipped = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
171
+
if (isGzipped) {
172
+
const { gunzipSync } = await import('zlib');
173
+
const decompressed = gunzipSync(content);
174
+
headers['Content-Type'] = meta.mimeType;
175
+
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
176
+
return new Response(decompressed, { headers });
177
+
} else {
178
+
// Meta says gzipped but content isn't - serve as-is
179
+
console.warn(`File ${filePath} has gzip encoding in meta but content lacks gzip magic bytes`);
180
+
headers['Content-Type'] = meta.mimeType;
181
+
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
182
+
return new Response(content, { headers });
183
+
}
174
184
}
175
185
176
186
headers['Content-Type'] = meta.mimeType;
···
368
378
if (isHtmlContent(requestPath, mimeType)) {
369
379
let htmlContent: string;
370
380
if (isGzipped) {
371
-
const { gunzipSync } = await import('zlib');
372
-
htmlContent = gunzipSync(content).toString('utf-8');
381
+
// Verify content is actually gzipped
382
+
const hasGzipMagic = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
383
+
if (hasGzipMagic) {
384
+
const { gunzipSync } = await import('zlib');
385
+
htmlContent = gunzipSync(content).toString('utf-8');
386
+
} else {
387
+
console.warn(`File ${requestPath} marked as gzipped but lacks magic bytes, serving as-is`);
388
+
htmlContent = content.toString('utf-8');
389
+
}
373
390
} else {
374
391
htmlContent = content.toString('utf-8');
375
392
}
···
400
417
if (isGzipped) {
401
418
const shouldServeCompressed = shouldCompressMimeType(mimeType);
402
419
if (!shouldServeCompressed) {
403
-
const { gunzipSync } = await import('zlib');
404
-
const decompressed = gunzipSync(content);
405
-
return new Response(decompressed, { headers });
420
+
// Verify content is actually gzipped
421
+
const hasGzipMagic = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
422
+
if (hasGzipMagic) {
423
+
const { gunzipSync } = await import('zlib');
424
+
const decompressed = gunzipSync(content);
425
+
return new Response(decompressed, { headers });
426
+
} else {
427
+
console.warn(`File ${requestPath} marked as gzipped but lacks magic bytes, serving as-is`);
428
+
return new Response(content, { headers });
429
+
}
406
430
}
407
431
headers['Content-Encoding'] = 'gzip';
408
432
}
···
449
473
450
474
let htmlContent: string;
451
475
if (isGzipped) {
452
-
const { gunzipSync } = await import('zlib');
453
-
htmlContent = gunzipSync(indexContent).toString('utf-8');
476
+
// Verify content is actually gzipped
477
+
const hasGzipMagic = indexContent.length >= 2 && indexContent[0] === 0x1f && indexContent[1] === 0x8b;
478
+
if (hasGzipMagic) {
479
+
const { gunzipSync } = await import('zlib');
480
+
htmlContent = gunzipSync(indexContent).toString('utf-8');
481
+
} else {
482
+
console.warn(`Index file marked as gzipped but lacks magic bytes, serving as-is`);
483
+
htmlContent = indexContent.toString('utf-8');
484
+
}
454
485
} else {
455
486
htmlContent = indexContent.toString('utf-8');
456
487
}
+162
-8
public/editor/tabs/UploadTab.tsx
+162
-8
public/editor/tabs/UploadTab.tsx
···
15
15
Globe,
16
16
Upload,
17
17
AlertCircle,
18
-
Loader2
18
+
Loader2,
19
+
ChevronDown,
20
+
ChevronUp,
21
+
CheckCircle2,
22
+
XCircle,
23
+
RefreshCw
19
24
} from 'lucide-react'
20
25
import type { SiteWithDomains } from '../hooks/useSiteData'
26
+
27
+
type FileStatus = 'pending' | 'checking' | 'uploading' | 'uploaded' | 'reused' | 'failed'
28
+
29
+
interface FileProgress {
30
+
name: string
31
+
status: FileStatus
32
+
error?: string
33
+
}
21
34
22
35
interface UploadTabProps {
23
36
sites: SiteWithDomains[]
···
38
51
const [isUploading, setIsUploading] = useState(false)
39
52
const [uploadProgress, setUploadProgress] = useState('')
40
53
const [skippedFiles, setSkippedFiles] = useState<Array<{ name: string; reason: string }>>([])
54
+
const [failedFiles, setFailedFiles] = useState<Array<{ name: string; index: number; error: string; size: number }>>([])
41
55
const [uploadedCount, setUploadedCount] = useState(0)
56
+
const [fileProgressList, setFileProgressList] = useState<FileProgress[]>([])
57
+
const [showFileProgress, setShowFileProgress] = useState(false)
42
58
43
59
// Keep SSE connection alive across tab switches
44
60
const eventSourceRef = useRef<EventSource | null>(null)
···
79
95
const progressData = JSON.parse(event.data)
80
96
const { progress, status } = progressData
81
97
98
+
// Update file progress list if we have current file info
99
+
if (progress.currentFile && progress.currentFileStatus) {
100
+
setFileProgressList(prev => {
101
+
const existing = prev.find(f => f.name === progress.currentFile)
102
+
if (existing) {
103
+
// Update existing file status
104
+
return prev.map(f =>
105
+
f.name === progress.currentFile
106
+
? { ...f, status: progress.currentFileStatus as FileStatus }
107
+
: f
108
+
)
109
+
} else {
110
+
// Add new file
111
+
return [...prev, {
112
+
name: progress.currentFile,
113
+
status: progress.currentFileStatus as FileStatus
114
+
}]
115
+
}
116
+
})
117
+
}
118
+
82
119
// Update progress message based on phase
83
120
let message = 'Processing...'
84
121
if (progress.phase === 'validating') {
···
110
147
eventSourceRef.current = null
111
148
currentJobIdRef.current = null
112
149
113
-
setUploadProgress('Upload complete!')
150
+
const hasIssues = (result.skippedFiles && result.skippedFiles.length > 0) ||
151
+
(result.failedFiles && result.failedFiles.length > 0)
152
+
153
+
// Update file progress list with failed files
154
+
if (result.failedFiles && result.failedFiles.length > 0) {
155
+
setFileProgressList(prev => {
156
+
const updated = [...prev]
157
+
result.failedFiles.forEach((failedFile: any) => {
158
+
const existing = updated.find(f => f.name === failedFile.name)
159
+
if (existing) {
160
+
existing.status = 'failed'
161
+
existing.error = failedFile.error
162
+
} else {
163
+
updated.push({
164
+
name: failedFile.name,
165
+
status: 'failed',
166
+
error: failedFile.error
167
+
})
168
+
}
169
+
})
170
+
return updated
171
+
})
172
+
}
173
+
174
+
setUploadProgress(hasIssues ? 'Upload completed with issues' : 'Upload complete!')
114
175
setSkippedFiles(result.skippedFiles || [])
176
+
setFailedFiles(result.failedFiles || [])
115
177
setUploadedCount(result.uploadedCount || result.fileCount || 0)
116
178
setSelectedSiteRkey('')
117
179
setNewSiteName('')
···
120
182
// Refresh sites list
121
183
onUploadComplete()
122
184
123
-
// Reset form
124
-
const resetDelay = result.skippedFiles && result.skippedFiles.length > 0 ? 4000 : 1500
185
+
// Reset form (wait longer if there are issues to show)
186
+
const resetDelay = hasIssues ? 6000 : 1500
125
187
setTimeout(() => {
126
188
setUploadProgress('')
127
189
setSkippedFiles([])
190
+
setFailedFiles([])
128
191
setUploadedCount(0)
192
+
setFileProgressList([])
129
193
setIsUploading(false)
130
194
}, resetDelay)
131
195
})
···
376
440
</div>
377
441
</div>
378
442
379
-
{skippedFiles.length > 0 && (
380
-
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
381
-
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
443
+
{fileProgressList.length > 0 && (
444
+
<div className="border rounded-lg overflow-hidden">
445
+
<button
446
+
onClick={() => setShowFileProgress(!showFileProgress)}
447
+
className="w-full p-3 bg-muted/50 hover:bg-muted transition-colors flex items-center justify-between text-sm font-medium"
448
+
>
449
+
<span>
450
+
Processing files ({fileProgressList.filter(f => f.status === 'uploaded' || f.status === 'reused').length}/{fileProgressList.length})
451
+
</span>
452
+
{showFileProgress ? (
453
+
<ChevronUp className="w-4 h-4" />
454
+
) : (
455
+
<ChevronDown className="w-4 h-4" />
456
+
)}
457
+
</button>
458
+
{showFileProgress && (
459
+
<div className="max-h-64 overflow-y-auto p-3 space-y-1 bg-background">
460
+
{fileProgressList.map((file, idx) => (
461
+
<div
462
+
key={idx}
463
+
className="flex items-start gap-2 text-xs p-2 rounded hover:bg-muted/50 transition-colors"
464
+
>
465
+
{file.status === 'checking' && (
466
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-blue-500 shrink-0" />
467
+
)}
468
+
{file.status === 'uploading' && (
469
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-purple-500 shrink-0" />
470
+
)}
471
+
{file.status === 'uploaded' && (
472
+
<CheckCircle2 className="w-3 h-3 mt-0.5 text-green-500 shrink-0" />
473
+
)}
474
+
{file.status === 'reused' && (
475
+
<RefreshCw className="w-3 h-3 mt-0.5 text-cyan-500 shrink-0" />
476
+
)}
477
+
{file.status === 'failed' && (
478
+
<XCircle className="w-3 h-3 mt-0.5 text-red-500 shrink-0" />
479
+
)}
480
+
<div className="flex-1 min-w-0">
481
+
<div className="font-mono truncate">{file.name}</div>
482
+
{file.error && (
483
+
<div className="text-red-500 mt-0.5">
484
+
{file.error}
485
+
</div>
486
+
)}
487
+
{file.status === 'checking' && (
488
+
<div className="text-muted-foreground">Checking for changes...</div>
489
+
)}
490
+
{file.status === 'uploading' && (
491
+
<div className="text-muted-foreground">Uploading to PDS...</div>
492
+
)}
493
+
{file.status === 'reused' && (
494
+
<div className="text-muted-foreground">Reused (unchanged)</div>
495
+
)}
496
+
</div>
497
+
</div>
498
+
))}
499
+
</div>
500
+
)}
501
+
</div>
502
+
)}
503
+
504
+
{failedFiles.length > 0 && (
505
+
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-lg">
506
+
<div className="flex items-start gap-2 text-red-600 dark:text-red-400 mb-2">
382
507
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
383
508
<div className="flex-1">
384
509
<span className="font-medium">
385
-
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
510
+
{failedFiles.length} file{failedFiles.length > 1 ? 's' : ''} failed to upload
386
511
</span>
387
512
{uploadedCount > 0 && (
388
513
<span className="text-sm ml-2">
389
514
({uploadedCount} uploaded successfully)
390
515
</span>
391
516
)}
517
+
</div>
518
+
</div>
519
+
<div className="ml-6 space-y-1 max-h-40 overflow-y-auto">
520
+
{failedFiles.slice(0, 10).map((file, idx) => (
521
+
<div key={idx} className="text-xs">
522
+
<div className="font-mono font-semibold">{file.name}</div>
523
+
<div className="text-muted-foreground ml-2">
524
+
Error: {file.error}
525
+
{file.size > 0 && ` (${(file.size / 1024).toFixed(1)} KB)`}
526
+
</div>
527
+
</div>
528
+
))}
529
+
{failedFiles.length > 10 && (
530
+
<div className="text-xs text-muted-foreground">
531
+
...and {failedFiles.length - 10} more
532
+
</div>
533
+
)}
534
+
</div>
535
+
</div>
536
+
)}
537
+
538
+
{skippedFiles.length > 0 && (
539
+
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
540
+
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
541
+
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
542
+
<div className="flex-1">
543
+
<span className="font-medium">
544
+
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
545
+
</span>
392
546
</div>
393
547
</div>
394
548
<div className="ml-6 space-y-1 max-h-32 overflow-y-auto">
+3
src/lib/upload-jobs.ts
+3
src/lib/upload-jobs.ts
···
8
8
filesUploaded: number;
9
9
filesReused: number;
10
10
currentFile?: string;
11
+
currentFileStatus?: 'checking' | 'uploading' | 'uploaded' | 'reused' | 'failed';
11
12
phase: 'validating' | 'compressing' | 'uploading' | 'creating_manifest' | 'finalizing' | 'done';
12
13
}
13
14
···
24
25
fileCount?: number;
25
26
siteName?: string;
26
27
skippedFiles?: Array<{ name: string; reason: string }>;
28
+
failedFiles?: Array<{ name: string; index: number; error: string; size: number }>;
27
29
uploadedCount?: number;
30
+
hasFailures?: boolean;
28
31
};
29
32
error?: string;
30
33
createdAt: number;
+9
-2
src/lib/wisp-utils.ts
+9
-2
src/lib/wisp-utils.ts
···
14
14
mimeType: string;
15
15
size: number;
16
16
compressed?: boolean;
17
+
base64Encoded?: boolean;
17
18
originalMimeType?: string;
18
19
}
19
20
···
34
35
* Determine if a file should be gzip compressed based on its MIME type
35
36
*/
36
37
export function shouldCompressFile(mimeType: string): boolean {
37
-
// Compress text-based files
38
+
// Compress text-based files and uncompressed audio formats
38
39
const compressibleTypes = [
39
40
'text/html',
40
41
'text/css',
···
45
46
'text/xml',
46
47
'application/xml',
47
48
'text/plain',
48
-
'application/x-javascript'
49
+
'application/x-javascript',
50
+
// Uncompressed audio formats (WAV, AIFF, etc.)
51
+
'audio/wav',
52
+
'audio/wave',
53
+
'audio/x-wav',
54
+
'audio/aiff',
55
+
'audio/x-aiff'
49
56
];
50
57
51
58
// Check if mime type starts with any compressible type
+172
-32
src/routes/wisp.ts
+172
-32
src/routes/wisp.ts
···
149
149
150
150
for (let i = 0; i < fileArray.length; i++) {
151
151
const file = fileArray[i];
152
+
153
+
// Skip undefined/null files
154
+
if (!file || !file.name) {
155
+
console.log(`Skipping undefined file at index ${i}`);
156
+
skippedFiles.push({
157
+
name: `[undefined file at index ${i}]`,
158
+
reason: 'Invalid file object'
159
+
});
160
+
continue;
161
+
}
162
+
152
163
console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
153
164
updateJobProgress(jobId, {
154
165
filesProcessed: i + 1,
···
180
191
const originalContent = Buffer.from(arrayBuffer);
181
192
const originalMimeType = file.type || 'application/octet-stream';
182
193
183
-
// Compress and base64 encode ALL files
184
-
const compressedContent = compressFile(originalContent);
185
-
const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary');
186
-
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
187
-
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
188
-
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
194
+
// Determine if file should be compressed
195
+
const shouldCompress = shouldCompressFile(originalMimeType);
196
+
197
+
// Text files (HTML/CSS/JS) need base64 encoding to prevent PDS content sniffing
198
+
// Audio files just need compression without base64
199
+
const needsBase64 = originalMimeType.startsWith('text/') ||
200
+
originalMimeType.includes('html') ||
201
+
originalMimeType.includes('javascript') ||
202
+
originalMimeType.includes('css') ||
203
+
originalMimeType.includes('json') ||
204
+
originalMimeType.includes('xml') ||
205
+
originalMimeType.includes('svg');
206
+
207
+
let finalContent: Buffer;
208
+
let compressed = false;
209
+
let base64Encoded = false;
210
+
211
+
if (shouldCompress) {
212
+
const compressedContent = compressFile(originalContent);
213
+
compressed = true;
214
+
215
+
if (needsBase64) {
216
+
// Text files: compress AND base64 encode
217
+
finalContent = Buffer.from(compressedContent.toString('base64'), 'binary');
218
+
base64Encoded = true;
219
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
220
+
console.log(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
221
+
logger.info(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
222
+
} else {
223
+
// Audio files: just compress, no base64
224
+
finalContent = compressedContent;
225
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
226
+
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
227
+
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
228
+
}
229
+
} else {
230
+
// Binary files: upload directly
231
+
finalContent = originalContent;
232
+
console.log(`Uploading ${file.name} directly: ${originalContent.length} bytes (no compression)`);
233
+
logger.info(`Uploading ${file.name} directly: ${originalContent.length} bytes (binary)`);
234
+
}
189
235
190
236
uploadedFiles.push({
191
237
name: file.name,
192
-
content: base64Content,
238
+
content: finalContent,
193
239
mimeType: originalMimeType,
194
-
size: base64Content.length,
195
-
compressed: true,
240
+
size: finalContent.length,
241
+
compressed,
242
+
base64Encoded,
196
243
originalMimeType
197
244
});
198
245
}
···
275
322
console.log('Starting blob upload/reuse phase...');
276
323
updateJobProgress(jobId, { phase: 'uploading' });
277
324
278
-
// Helper function to upload blob with exponential backoff retry
325
+
// Helper function to upload blob with exponential backoff retry and timeout
279
326
const uploadBlobWithRetry = async (
280
327
agent: Agent,
281
328
content: Buffer,
282
329
mimeType: string,
283
330
fileName: string,
284
-
maxRetries = 3
331
+
maxRetries = 5
285
332
) => {
286
333
for (let attempt = 0; attempt < maxRetries; attempt++) {
287
334
try {
288
-
return await agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
335
+
console.log(`[File Upload] Starting upload attempt ${attempt + 1}/${maxRetries} for ${fileName} (${content.length} bytes, ${mimeType})`);
336
+
337
+
// Add timeout wrapper to prevent hanging requests
338
+
const uploadPromise = agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
339
+
const timeoutMs = 300000; // 5 minute timeout per upload
340
+
341
+
const timeoutPromise = new Promise((_, reject) => {
342
+
setTimeout(() => reject(new Error('Upload timeout')), timeoutMs);
343
+
});
344
+
345
+
const result = await Promise.race([uploadPromise, timeoutPromise]) as any;
346
+
console.log(`[File Upload] โ
Successfully uploaded ${fileName} on attempt ${attempt + 1}`);
347
+
return result;
289
348
} catch (error: any) {
290
349
const isDPoPNonceError =
291
350
error?.message?.toLowerCase().includes('nonce') ||
292
351
error?.message?.toLowerCase().includes('dpop') ||
293
352
error?.status === 409;
294
353
295
-
if (isDPoPNonceError && attempt < maxRetries - 1) {
296
-
const backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms
297
-
logger.info(`[File Upload] ๐ DPoP nonce conflict for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
354
+
const isTimeout = error?.message === 'Upload timeout';
355
+
const isRateLimited = error?.status === 429 || error?.message?.toLowerCase().includes('rate');
356
+
357
+
// Retry on DPoP nonce conflicts, timeouts, or rate limits
358
+
if ((isDPoPNonceError || isTimeout || isRateLimited) && attempt < maxRetries - 1) {
359
+
let backoffMs: number;
360
+
if (isRateLimited) {
361
+
backoffMs = 2000 * Math.pow(2, attempt); // 2s, 4s, 8s, 16s for rate limits
362
+
} else if (isTimeout) {
363
+
backoffMs = 1000 * Math.pow(2, attempt); // 1s, 2s, 4s, 8s for timeouts
364
+
} else {
365
+
backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms for DPoP
366
+
}
367
+
368
+
const reason = isDPoPNonceError ? 'DPoP nonce conflict' : isTimeout ? 'timeout' : 'rate limit';
369
+
logger.info(`[File Upload] ๐ ${reason} for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
370
+
console.log(`[File Upload] ๐ ${reason} for ${fileName}, retrying in ${backoffMs}ms`);
298
371
await new Promise(resolve => setTimeout(resolve, backoffMs));
299
372
continue;
300
373
}
374
+
375
+
// Log detailed error information before throwing
376
+
logger.error(`[File Upload] โ Upload failed for ${fileName} (size: ${content.length} bytes, mimeType: ${mimeType}, attempt: ${attempt + 1}/${maxRetries})`, {
377
+
error: error?.error || error?.message || 'Unknown error',
378
+
status: error?.status,
379
+
headers: error?.headers,
380
+
success: error?.success
381
+
});
382
+
console.error(`[File Upload] โ Upload failed for ${fileName}:`, {
383
+
error: error?.error || error?.message || 'Unknown error',
384
+
status: error?.status,
385
+
size: content.length,
386
+
mimeType,
387
+
attempt: attempt + 1
388
+
});
301
389
throw error;
302
390
}
303
391
}
···
305
393
};
306
394
307
395
// Use sliding window concurrency for maximum throughput
308
-
const CONCURRENCY_LIMIT = 50; // Maximum concurrent uploads with retry logic
396
+
const CONCURRENCY_LIMIT = 20; // Maximum concurrent uploads
309
397
const uploadedBlobs: Array<{
310
398
result: FileUploadResult;
311
399
filePath: string;
···
313
401
returnedMimeType: string;
314
402
reused: boolean;
315
403
}> = [];
404
+
const failedFiles: Array<{
405
+
name: string;
406
+
index: number;
407
+
error: string;
408
+
size: number;
409
+
}> = [];
316
410
317
411
// Process file with sliding window concurrency
318
412
const processFile = async (file: UploadedFile, index: number) => {
···
327
421
328
422
if (existingBlob && existingBlob.cid === fileCID) {
329
423
logger.info(`[File Upload] โป๏ธ Reused: ${file.name} (unchanged, CID: ${fileCID})`);
330
-
updateJobProgress(jobId, { filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1 });
424
+
updateJobProgress(jobId, {
425
+
filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1
426
+
});
331
427
332
428
return {
333
429
result: {
···
336
432
...(file.compressed && {
337
433
encoding: 'gzip' as const,
338
434
mimeType: file.originalMimeType || file.mimeType,
339
-
base64: true
435
+
base64: file.base64Encoded || false
340
436
})
341
437
},
342
438
filePath: file.name,
···
362
458
);
363
459
364
460
const returnedBlobRef = uploadResult.data.blob;
365
-
updateJobProgress(jobId, { filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1 });
461
+
updateJobProgress(jobId, {
462
+
filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1
463
+
});
366
464
logger.info(`[File Upload] โ
Uploaded: ${file.name} (CID: ${fileCID})`);
367
465
368
466
return {
···
372
470
...(file.compressed && {
373
471
encoding: 'gzip' as const,
374
472
mimeType: file.originalMimeType || file.mimeType,
375
-
base64: true
473
+
base64: file.base64Encoded || false
376
474
})
377
475
},
378
476
filePath: file.name,
···
381
479
reused: false
382
480
};
383
481
} catch (uploadError) {
384
-
logger.error('Upload failed for file', uploadError);
385
-
throw uploadError;
482
+
const fileName = file?.name || 'unknown';
483
+
const fileSize = file?.size || 0;
484
+
const errorMessage = uploadError instanceof Error ? uploadError.message : 'Unknown error';
485
+
const errorDetails = {
486
+
fileName,
487
+
fileSize,
488
+
index,
489
+
error: errorMessage,
490
+
stack: uploadError instanceof Error ? uploadError.stack : undefined
491
+
};
492
+
logger.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
493
+
console.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
494
+
495
+
// Track failed file but don't throw - continue with other files
496
+
failedFiles.push({
497
+
name: fileName,
498
+
index,
499
+
error: errorMessage,
500
+
size: fileSize
501
+
});
502
+
503
+
return null; // Return null to indicate failure
386
504
}
387
505
};
388
506
···
390
508
const processWithConcurrency = async () => {
391
509
const results: any[] = [];
392
510
let fileIndex = 0;
393
-
const executing = new Set<Promise<void>>();
511
+
const executing = new Map<Promise<void>, { index: number; name: string }>();
394
512
395
513
for (const file of validUploadedFiles) {
396
514
const currentIndex = fileIndex++;
···
398
516
const promise = processFile(file, currentIndex)
399
517
.then(result => {
400
518
results[currentIndex] = result;
519
+
console.log(`[Concurrency] File ${currentIndex} (${file.name}) completed successfully`);
401
520
})
402
521
.catch(error => {
403
-
logger.error(`Failed to process file at index ${currentIndex}`, error);
404
-
throw error; // Re-throw to fail the entire upload
522
+
// This shouldn't happen since processFile catches errors, but just in case
523
+
logger.error(`Unexpected error processing file at index ${currentIndex}`, error);
524
+
console.error(`[Concurrency] File ${currentIndex} (${file.name}) had unexpected error:`, error);
525
+
results[currentIndex] = null;
405
526
})
406
527
.finally(() => {
407
528
executing.delete(promise);
529
+
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
530
+
console.log(`[Concurrency] File ${currentIndex} (${file.name}) removed. Remaining ${executing.size}: [${remaining.join(', ')}]`);
408
531
});
409
532
410
-
executing.add(promise);
533
+
executing.set(promise, { index: currentIndex, name: file.name });
534
+
const current = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
535
+
console.log(`[Concurrency] Added file ${currentIndex} (${file.name}). Total ${executing.size}: [${current.join(', ')}]`);
411
536
412
537
if (executing.size >= CONCURRENCY_LIMIT) {
413
-
await Promise.race(executing);
538
+
console.log(`[Concurrency] Hit limit (${CONCURRENCY_LIMIT}), waiting for one to complete...`);
539
+
await Promise.race(executing.keys());
540
+
console.log(`[Concurrency] One completed, continuing. Remaining: ${executing.size}`);
414
541
}
415
542
}
416
543
417
544
// Wait for remaining uploads
418
-
await Promise.all(executing);
419
-
return results.filter(r => r !== undefined); // Filter out any undefined entries
545
+
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
546
+
console.log(`[Concurrency] Waiting for ${executing.size} remaining uploads: [${remaining.join(', ')}]`);
547
+
await Promise.all(executing.keys());
548
+
console.log(`[Concurrency] All uploads complete!`);
549
+
return results.filter(r => r !== undefined && r !== null); // Filter out null (failed) and undefined entries
420
550
};
421
551
422
552
const allResults = await processWithConcurrency();
···
424
554
425
555
const currentReused = uploadedBlobs.filter(b => b.reused).length;
426
556
const currentUploaded = uploadedBlobs.filter(b => !b.reused).length;
427
-
logger.info(`[File Upload] ๐ Upload complete โ ${uploadedBlobs.length}/${validUploadedFiles.length} files (${currentUploaded} uploaded, ${currentReused} reused)`);
557
+
const successfulCount = uploadedBlobs.length;
558
+
const failedCount = failedFiles.length;
559
+
560
+
logger.info(`[File Upload] ๐ Upload complete โ ${successfulCount}/${validUploadedFiles.length} files succeeded (${currentUploaded} uploaded, ${currentReused} reused), ${failedCount} failed`);
561
+
562
+
if (failedCount > 0) {
563
+
logger.warn(`[File Upload] โ ๏ธ Failed files:`, failedFiles);
564
+
console.warn(`[File Upload] โ ๏ธ ${failedCount} files failed to upload:`, failedFiles.map(f => f.name).join(', '));
565
+
}
428
566
429
567
const reusedCount = uploadedBlobs.filter(b => b.reused).length;
430
568
const uploadedCount = uploadedBlobs.filter(b => !b.reused).length;
431
-
logger.info(`[File Upload] ๐ Upload phase complete! Total: ${uploadedBlobs.length} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
569
+
logger.info(`[File Upload] ๐ Upload phase complete! Total: ${successfulCount} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
432
570
433
571
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
434
572
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
···
594
732
fileCount,
595
733
siteName,
596
734
skippedFiles,
597
-
uploadedCount: validUploadedFiles.length
735
+
failedFiles,
736
+
uploadedCount: validUploadedFiles.length - failedFiles.length,
737
+
hasFailures: failedFiles.length > 0
598
738
});
599
739
600
740
console.log('=== UPLOAD FILES COMPLETE ===');