+530
-529
public/editor/tabs/UploadTab.tsx
+530
-529
public/editor/tabs/UploadTab.tsx
···
1
1
import { useState, useEffect, useRef } from 'react'
2
2
import {
3
-
Card,
4
-
CardContent,
5
-
CardDescription,
6
-
CardHeader,
7
-
CardTitle
3
+
Card,
4
+
CardContent,
5
+
CardDescription,
6
+
CardHeader,
7
+
CardTitle
8
8
} from '@public/components/ui/card'
9
9
import { Button } from '@public/components/ui/button'
10
10
import { Input } from '@public/components/ui/input'
···
12
12
import { RadioGroup, RadioGroupItem } from '@public/components/ui/radio-group'
13
13
import { Badge } from '@public/components/ui/badge'
14
14
import {
15
-
Globe,
16
-
Upload,
17
-
AlertCircle,
18
-
Loader2,
19
-
ChevronDown,
20
-
ChevronUp,
21
-
CheckCircle2,
22
-
XCircle,
23
-
RefreshCw
15
+
Globe,
16
+
Upload,
17
+
AlertCircle,
18
+
Loader2,
19
+
ChevronDown,
20
+
ChevronUp,
21
+
CheckCircle2,
22
+
XCircle,
23
+
RefreshCw
24
24
} from 'lucide-react'
25
25
import type { SiteWithDomains } from '../hooks/useSiteData'
26
26
27
27
type FileStatus = 'pending' | 'checking' | 'uploading' | 'uploaded' | 'reused' | 'failed'
28
28
29
29
interface FileProgress {
30
-
name: string
31
-
status: FileStatus
32
-
error?: string
30
+
name: string
31
+
status: FileStatus
32
+
error?: string
33
33
}
34
34
35
35
interface UploadTabProps {
36
-
sites: SiteWithDomains[]
37
-
sitesLoading: boolean
38
-
onUploadComplete: () => Promise<void>
36
+
sites: SiteWithDomains[]
37
+
sitesLoading: boolean
38
+
onUploadComplete: () => Promise<void>
39
39
}
40
40
41
41
export function UploadTab({
42
-
sites,
43
-
sitesLoading,
44
-
onUploadComplete
42
+
sites,
43
+
sitesLoading,
44
+
onUploadComplete
45
45
}: UploadTabProps) {
46
-
// Upload state
47
-
const [siteMode, setSiteMode] = useState<'existing' | 'new'>('existing')
48
-
const [selectedSiteRkey, setSelectedSiteRkey] = useState<string>('')
49
-
const [newSiteName, setNewSiteName] = useState('')
50
-
const [selectedFiles, setSelectedFiles] = useState<FileList | null>(null)
51
-
const [isUploading, setIsUploading] = useState(false)
52
-
const [uploadProgress, setUploadProgress] = useState('')
53
-
const [skippedFiles, setSkippedFiles] = useState<Array<{ name: string; reason: string }>>([])
54
-
const [failedFiles, setFailedFiles] = useState<Array<{ name: string; index: number; error: string; size: number }>>([])
55
-
const [uploadedCount, setUploadedCount] = useState(0)
56
-
const [fileProgressList, setFileProgressList] = useState<FileProgress[]>([])
57
-
const [showFileProgress, setShowFileProgress] = useState(false)
46
+
// Upload state
47
+
const [siteMode, setSiteMode] = useState<'existing' | 'new'>('existing')
48
+
const [selectedSiteRkey, setSelectedSiteRkey] = useState<string>('')
49
+
const [newSiteName, setNewSiteName] = useState('')
50
+
const [selectedFiles, setSelectedFiles] = useState<FileList | null>(null)
51
+
const [isUploading, setIsUploading] = useState(false)
52
+
const [uploadProgress, setUploadProgress] = useState('')
53
+
const [skippedFiles, setSkippedFiles] = useState<Array<{ name: string; reason: string }>>([])
54
+
const [failedFiles, setFailedFiles] = useState<Array<{ name: string; index: number; error: string; size: number }>>([])
55
+
const [uploadedCount, setUploadedCount] = useState(0)
56
+
const [fileProgressList, setFileProgressList] = useState<FileProgress[]>([])
57
+
const [showFileProgress, setShowFileProgress] = useState(false)
58
58
59
-
// Keep SSE connection alive across tab switches
60
-
const eventSourceRef = useRef<EventSource | null>(null)
61
-
const currentJobIdRef = useRef<string | null>(null)
59
+
// Keep SSE connection alive across tab switches
60
+
const eventSourceRef = useRef<EventSource | null>(null)
61
+
const currentJobIdRef = useRef<string | null>(null)
62
62
63
-
// Auto-switch to 'new' mode if no sites exist
64
-
useEffect(() => {
65
-
if (!sitesLoading && sites.length === 0 && siteMode === 'existing') {
66
-
setSiteMode('new')
67
-
}
68
-
}, [sites, sitesLoading, siteMode])
63
+
// Auto-switch to 'new' mode if no sites exist
64
+
useEffect(() => {
65
+
if (!sitesLoading && sites.length === 0 && siteMode === 'existing') {
66
+
setSiteMode('new')
67
+
}
68
+
}, [sites, sitesLoading, siteMode])
69
69
70
-
// Cleanup SSE connection on unmount
71
-
useEffect(() => {
72
-
return () => {
73
-
// Don't close the connection on unmount (tab switch)
74
-
// It will be reused when the component remounts
75
-
}
76
-
}, [])
70
+
// Cleanup SSE connection on unmount
71
+
useEffect(() => {
72
+
return () => {
73
+
// Don't close the connection on unmount (tab switch)
74
+
// It will be reused when the component remounts
75
+
}
76
+
}, [])
77
77
78
-
const handleFileSelect = (e: React.ChangeEvent<HTMLInputElement>) => {
79
-
if (e.target.files && e.target.files.length > 0) {
80
-
setSelectedFiles(e.target.files)
81
-
}
82
-
}
78
+
const handleFileSelect = (e: React.ChangeEvent<HTMLInputElement>) => {
79
+
if (e.target.files && e.target.files.length > 0) {
80
+
setSelectedFiles(e.target.files)
81
+
}
82
+
}
83
83
84
-
const setupSSE = (jobId: string) => {
85
-
// Close existing connection if any
86
-
if (eventSourceRef.current) {
87
-
eventSourceRef.current.close()
88
-
}
84
+
const setupSSE = (jobId: string) => {
85
+
// Close existing connection if any
86
+
if (eventSourceRef.current) {
87
+
eventSourceRef.current.close()
88
+
}
89
89
90
-
currentJobIdRef.current = jobId
91
-
const eventSource = new EventSource(`/wisp/upload-progress/${jobId}`)
92
-
eventSourceRef.current = eventSource
90
+
currentJobIdRef.current = jobId
91
+
const eventSource = new EventSource(`/wisp/upload-progress/${jobId}`)
92
+
eventSourceRef.current = eventSource
93
93
94
-
eventSource.addEventListener('progress', (event) => {
95
-
const progressData = JSON.parse(event.data)
96
-
const { progress, status } = progressData
94
+
eventSource.addEventListener('progress', (event) => {
95
+
const progressData = JSON.parse(event.data)
96
+
const { progress, status } = progressData
97
97
98
-
// Update file progress list if we have current file info
99
-
if (progress.currentFile && progress.currentFileStatus) {
100
-
setFileProgressList(prev => {
101
-
const existing = prev.find(f => f.name === progress.currentFile)
102
-
if (existing) {
103
-
// Update existing file status
104
-
return prev.map(f =>
105
-
f.name === progress.currentFile
106
-
? { ...f, status: progress.currentFileStatus as FileStatus }
107
-
: f
108
-
)
109
-
} else {
110
-
// Add new file
111
-
return [...prev, {
112
-
name: progress.currentFile,
113
-
status: progress.currentFileStatus as FileStatus
114
-
}]
115
-
}
116
-
})
117
-
}
98
+
// Update file progress list if we have current file info
99
+
if (progress.currentFile && progress.currentFileStatus) {
100
+
setFileProgressList(prev => {
101
+
const existingIndex = prev.findIndex(f => f.name === progress.currentFile)
102
+
if (existingIndex !== -1) {
103
+
// Update existing file status - create new array with single update
104
+
const updated = [...prev]
105
+
updated[existingIndex] = { ...updated[existingIndex], status: progress.currentFileStatus as FileStatus }
106
+
return updated
107
+
} else {
108
+
// Add new file
109
+
return [...prev, {
110
+
name: progress.currentFile,
111
+
status: progress.currentFileStatus as FileStatus
112
+
}]
113
+
}
114
+
})
115
+
}
118
116
119
-
// Update progress message based on phase
120
-
let message = 'Processing...'
121
-
if (progress.phase === 'validating') {
122
-
message = 'Validating files...'
123
-
} else if (progress.phase === 'compressing') {
124
-
const current = progress.filesProcessed || 0
125
-
const total = progress.totalFiles || 0
126
-
message = `Compressing files (${current}/${total})...`
127
-
if (progress.currentFile) {
128
-
message += ` - ${progress.currentFile}`
129
-
}
130
-
} else if (progress.phase === 'uploading') {
131
-
const uploaded = progress.filesUploaded || 0
132
-
const reused = progress.filesReused || 0
133
-
const total = progress.totalFiles || 0
134
-
message = `Uploading to PDS (${uploaded + reused}/${total})...`
135
-
} else if (progress.phase === 'creating_manifest') {
136
-
message = 'Creating manifest...'
137
-
} else if (progress.phase === 'finalizing') {
138
-
message = 'Finalizing upload...'
139
-
}
117
+
// Update progress message based on phase
118
+
let message = 'Processing...'
119
+
if (progress.phase === 'validating') {
120
+
message = 'Validating files...'
121
+
} else if (progress.phase === 'compressing') {
122
+
const current = progress.filesProcessed || 0
123
+
const total = progress.totalFiles || 0
124
+
message = `Compressing files (${current}/${total})...`
125
+
if (progress.currentFile) {
126
+
message += ` - ${progress.currentFile}`
127
+
}
128
+
} else if (progress.phase === 'uploading') {
129
+
const uploaded = progress.filesUploaded || 0
130
+
const reused = progress.filesReused || 0
131
+
const total = progress.totalFiles || 0
132
+
message = `Uploading to PDS (${uploaded + reused}/${total})...`
133
+
} else if (progress.phase === 'creating_manifest') {
134
+
message = 'Creating manifest...'
135
+
} else if (progress.phase === 'finalizing') {
136
+
message = 'Finalizing upload...'
137
+
}
140
138
141
-
setUploadProgress(message)
142
-
})
139
+
setUploadProgress(message)
140
+
})
143
141
144
-
eventSource.addEventListener('done', (event) => {
145
-
const result = JSON.parse(event.data)
146
-
eventSource.close()
147
-
eventSourceRef.current = null
148
-
currentJobIdRef.current = null
142
+
eventSource.addEventListener('done', (event) => {
143
+
const result = JSON.parse(event.data)
144
+
eventSource.close()
145
+
eventSourceRef.current = null
146
+
currentJobIdRef.current = null
149
147
150
-
const hasIssues = (result.skippedFiles && result.skippedFiles.length > 0) ||
151
-
(result.failedFiles && result.failedFiles.length > 0)
148
+
const hasIssues = (result.skippedFiles && result.skippedFiles.length > 0) ||
149
+
(result.failedFiles && result.failedFiles.length > 0)
152
150
153
-
// Update file progress list with failed files
154
-
if (result.failedFiles && result.failedFiles.length > 0) {
155
-
setFileProgressList(prev => {
156
-
const updated = [...prev]
157
-
result.failedFiles.forEach((failedFile: any) => {
158
-
const existing = updated.find(f => f.name === failedFile.name)
159
-
if (existing) {
160
-
existing.status = 'failed'
161
-
existing.error = failedFile.error
162
-
} else {
163
-
updated.push({
164
-
name: failedFile.name,
165
-
status: 'failed',
166
-
error: failedFile.error
167
-
})
168
-
}
169
-
})
170
-
return updated
171
-
})
172
-
}
151
+
// Update file progress list with failed files
152
+
if (result.failedFiles && result.failedFiles.length > 0) {
153
+
setFileProgressList(prev => {
154
+
const updated = [...prev]
155
+
result.failedFiles.forEach((failedFile: any) => {
156
+
const existingIndex = updated.findIndex(f => f.name === failedFile.name)
157
+
if (existingIndex !== -1) {
158
+
updated[existingIndex] = {
159
+
...updated[existingIndex],
160
+
status: 'failed',
161
+
error: failedFile.error
162
+
}
163
+
} else {
164
+
updated.push({
165
+
name: failedFile.name,
166
+
status: 'failed',
167
+
error: failedFile.error
168
+
})
169
+
}
170
+
})
171
+
return updated
172
+
})
173
+
}
173
174
174
-
setUploadProgress(hasIssues ? 'Upload completed with issues' : 'Upload complete!')
175
-
setSkippedFiles(result.skippedFiles || [])
176
-
setFailedFiles(result.failedFiles || [])
177
-
setUploadedCount(result.uploadedCount || result.fileCount || 0)
178
-
setSelectedSiteRkey('')
179
-
setNewSiteName('')
180
-
setSelectedFiles(null)
175
+
setUploadProgress(hasIssues ? 'Upload completed with issues' : 'Upload complete!')
176
+
setSkippedFiles(result.skippedFiles || [])
177
+
setFailedFiles(result.failedFiles || [])
178
+
setUploadedCount(result.uploadedCount || result.fileCount || 0)
179
+
setSelectedSiteRkey('')
180
+
setNewSiteName('')
181
+
setSelectedFiles(null)
181
182
182
-
// Refresh sites list
183
-
onUploadComplete()
183
+
// Refresh sites list
184
+
onUploadComplete()
184
185
185
-
// Reset form (wait longer if there are issues to show)
186
-
const resetDelay = hasIssues ? 6000 : 1500
187
-
setTimeout(() => {
188
-
setUploadProgress('')
189
-
setSkippedFiles([])
190
-
setFailedFiles([])
191
-
setUploadedCount(0)
192
-
setFileProgressList([])
193
-
setIsUploading(false)
194
-
}, resetDelay)
195
-
})
186
+
// Reset form (wait longer if there are issues to show)
187
+
const resetDelay = hasIssues ? 6000 : 1500
188
+
setTimeout(() => {
189
+
setUploadProgress('')
190
+
setSkippedFiles([])
191
+
setFailedFiles([])
192
+
setUploadedCount(0)
193
+
setFileProgressList([])
194
+
setIsUploading(false)
195
+
}, resetDelay)
196
+
})
196
197
197
-
eventSource.addEventListener('error', (event) => {
198
-
const errorData = JSON.parse((event as any).data || '{}')
199
-
eventSource.close()
200
-
eventSourceRef.current = null
201
-
currentJobIdRef.current = null
198
+
eventSource.addEventListener('error', (event) => {
199
+
const errorData = JSON.parse((event as any).data || '{}')
200
+
eventSource.close()
201
+
eventSourceRef.current = null
202
+
currentJobIdRef.current = null
202
203
203
-
console.error('Upload error:', errorData)
204
-
alert(
205
-
`Upload failed: ${errorData.error || 'Unknown error'}`
206
-
)
207
-
setIsUploading(false)
208
-
setUploadProgress('')
209
-
})
204
+
console.error('Upload error:', errorData)
205
+
alert(
206
+
`Upload failed: ${errorData.error || 'Unknown error'}`
207
+
)
208
+
setIsUploading(false)
209
+
setUploadProgress('')
210
+
})
210
211
211
-
eventSource.onerror = () => {
212
-
eventSource.close()
213
-
eventSourceRef.current = null
214
-
currentJobIdRef.current = null
212
+
eventSource.onerror = () => {
213
+
eventSource.close()
214
+
eventSourceRef.current = null
215
+
currentJobIdRef.current = null
215
216
216
-
console.error('SSE connection error')
217
-
alert('Lost connection to upload progress. The upload may still be processing.')
218
-
setIsUploading(false)
219
-
setUploadProgress('')
220
-
}
221
-
}
217
+
console.error('SSE connection error')
218
+
alert('Lost connection to upload progress. The upload may still be processing.')
219
+
setIsUploading(false)
220
+
setUploadProgress('')
221
+
}
222
+
}
222
223
223
-
const handleUpload = async () => {
224
-
const siteName = siteMode === 'existing' ? selectedSiteRkey : newSiteName
224
+
const handleUpload = async () => {
225
+
const siteName = siteMode === 'existing' ? selectedSiteRkey : newSiteName
225
226
226
-
if (!siteName) {
227
-
alert(siteMode === 'existing' ? 'Please select a site' : 'Please enter a site name')
228
-
return
229
-
}
227
+
if (!siteName) {
228
+
alert(siteMode === 'existing' ? 'Please select a site' : 'Please enter a site name')
229
+
return
230
+
}
230
231
231
-
setIsUploading(true)
232
-
setUploadProgress('Preparing files...')
232
+
setIsUploading(true)
233
+
setUploadProgress('Preparing files...')
233
234
234
-
try {
235
-
const formData = new FormData()
236
-
formData.append('siteName', siteName)
235
+
try {
236
+
const formData = new FormData()
237
+
formData.append('siteName', siteName)
237
238
238
-
if (selectedFiles) {
239
-
for (let i = 0; i < selectedFiles.length; i++) {
240
-
formData.append('files', selectedFiles[i])
241
-
}
242
-
}
239
+
if (selectedFiles) {
240
+
for (let i = 0; i < selectedFiles.length; i++) {
241
+
formData.append('files', selectedFiles[i])
242
+
}
243
+
}
243
244
244
-
// If no files, handle synchronously (old behavior)
245
-
if (!selectedFiles || selectedFiles.length === 0) {
246
-
setUploadProgress('Creating empty site...')
247
-
const response = await fetch('/wisp/upload-files', {
248
-
method: 'POST',
249
-
body: formData
250
-
})
245
+
// If no files, handle synchronously (old behavior)
246
+
if (!selectedFiles || selectedFiles.length === 0) {
247
+
setUploadProgress('Creating empty site...')
248
+
const response = await fetch('/wisp/upload-files', {
249
+
method: 'POST',
250
+
body: formData
251
+
})
251
252
252
-
const data = await response.json()
253
-
if (data.success) {
254
-
setUploadProgress('Site created!')
255
-
setSelectedSiteRkey('')
256
-
setNewSiteName('')
257
-
setSelectedFiles(null)
253
+
const data = await response.json()
254
+
if (data.success) {
255
+
setUploadProgress('Site created!')
256
+
setSelectedSiteRkey('')
257
+
setNewSiteName('')
258
+
setSelectedFiles(null)
258
259
259
-
await onUploadComplete()
260
+
await onUploadComplete()
260
261
261
-
setTimeout(() => {
262
-
setUploadProgress('')
263
-
setIsUploading(false)
264
-
}, 1500)
265
-
} else {
266
-
throw new Error(data.error || 'Upload failed')
267
-
}
268
-
return
269
-
}
262
+
setTimeout(() => {
263
+
setUploadProgress('')
264
+
setIsUploading(false)
265
+
}, 1500)
266
+
} else {
267
+
throw new Error(data.error || 'Upload failed')
268
+
}
269
+
return
270
+
}
270
271
271
-
// For file uploads, use SSE for progress
272
-
setUploadProgress('Starting upload...')
273
-
const response = await fetch('/wisp/upload-files', {
274
-
method: 'POST',
275
-
body: formData
276
-
})
272
+
// For file uploads, use SSE for progress
273
+
setUploadProgress('Starting upload...')
274
+
const response = await fetch('/wisp/upload-files', {
275
+
method: 'POST',
276
+
body: formData
277
+
})
277
278
278
-
const data = await response.json()
279
-
if (!data.success || !data.jobId) {
280
-
throw new Error(data.error || 'Failed to start upload')
281
-
}
279
+
const data = await response.json()
280
+
if (!data.success || !data.jobId) {
281
+
throw new Error(data.error || 'Failed to start upload')
282
+
}
282
283
283
-
const jobId = data.jobId
284
-
setUploadProgress('Connecting to progress stream...')
284
+
const jobId = data.jobId
285
+
setUploadProgress('Connecting to progress stream...')
285
286
286
-
// Setup SSE connection (persists across tab switches via ref)
287
-
setupSSE(jobId)
287
+
// Setup SSE connection (persists across tab switches via ref)
288
+
setupSSE(jobId)
288
289
289
-
} catch (err) {
290
-
console.error('Upload error:', err)
291
-
alert(
292
-
`Upload failed: ${err instanceof Error ? err.message : 'Unknown error'}`
293
-
)
294
-
setIsUploading(false)
295
-
setUploadProgress('')
296
-
}
297
-
}
290
+
} catch (err) {
291
+
console.error('Upload error:', err)
292
+
alert(
293
+
`Upload failed: ${err instanceof Error ? err.message : 'Unknown error'}`
294
+
)
295
+
setIsUploading(false)
296
+
setUploadProgress('')
297
+
}
298
+
}
298
299
299
-
return (
300
-
<div className="space-y-4 min-h-[400px]">
301
-
<Card>
302
-
<CardHeader>
303
-
<CardTitle>Upload Site</CardTitle>
304
-
<CardDescription>
305
-
Deploy a new site from a folder or Git repository
306
-
</CardDescription>
307
-
</CardHeader>
308
-
<CardContent className="space-y-6">
309
-
<div className="space-y-4">
310
-
<div className="p-4 bg-muted/50 rounded-lg">
311
-
<RadioGroup
312
-
value={siteMode}
313
-
onValueChange={(value) => setSiteMode(value as 'existing' | 'new')}
314
-
disabled={isUploading}
315
-
>
316
-
<div className="flex items-center space-x-2">
317
-
<RadioGroupItem value="existing" id="existing" />
318
-
<Label htmlFor="existing" className="cursor-pointer">
319
-
Update existing site
320
-
</Label>
321
-
</div>
322
-
<div className="flex items-center space-x-2">
323
-
<RadioGroupItem value="new" id="new" />
324
-
<Label htmlFor="new" className="cursor-pointer">
325
-
Create new site
326
-
</Label>
327
-
</div>
328
-
</RadioGroup>
329
-
</div>
300
+
return (
301
+
<div className="space-y-4 min-h-[400px]">
302
+
<Card>
303
+
<CardHeader>
304
+
<CardTitle>Upload Site</CardTitle>
305
+
<CardDescription>
306
+
Deploy a new site from a folder or Git repository
307
+
</CardDescription>
308
+
</CardHeader>
309
+
<CardContent className="space-y-6">
310
+
<div className="space-y-4">
311
+
<div className="p-4 bg-muted/50 rounded-lg">
312
+
<RadioGroup
313
+
value={siteMode}
314
+
onValueChange={(value) => setSiteMode(value as 'existing' | 'new')}
315
+
disabled={isUploading}
316
+
>
317
+
<div className="flex items-center space-x-2">
318
+
<RadioGroupItem value="existing" id="existing" />
319
+
<Label htmlFor="existing" className="cursor-pointer">
320
+
Update existing site
321
+
</Label>
322
+
</div>
323
+
<div className="flex items-center space-x-2">
324
+
<RadioGroupItem value="new" id="new" />
325
+
<Label htmlFor="new" className="cursor-pointer">
326
+
Create new site
327
+
</Label>
328
+
</div>
329
+
</RadioGroup>
330
+
</div>
330
331
331
-
{siteMode === 'existing' ? (
332
-
<div className="space-y-2">
333
-
<Label htmlFor="site-select">Select Site</Label>
334
-
{sitesLoading ? (
335
-
<div className="flex items-center justify-center py-4">
336
-
<Loader2 className="w-5 h-5 animate-spin text-muted-foreground" />
337
-
</div>
338
-
) : sites.length === 0 ? (
339
-
<div className="p-4 border border-dashed rounded-lg text-center text-sm text-muted-foreground">
340
-
No sites available. Create a new site instead.
341
-
</div>
342
-
) : (
343
-
<select
344
-
id="site-select"
345
-
className="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
346
-
value={selectedSiteRkey}
347
-
onChange={(e) => setSelectedSiteRkey(e.target.value)}
348
-
disabled={isUploading}
349
-
>
350
-
<option value="">Select a site...</option>
351
-
{sites.map((site) => (
352
-
<option key={site.rkey} value={site.rkey}>
353
-
{site.display_name || site.rkey}
354
-
</option>
355
-
))}
356
-
</select>
357
-
)}
358
-
</div>
359
-
) : (
360
-
<div className="space-y-2">
361
-
<Label htmlFor="new-site-name">New Site Name</Label>
362
-
<Input
363
-
id="new-site-name"
364
-
placeholder="my-awesome-site"
365
-
value={newSiteName}
366
-
onChange={(e) => setNewSiteName(e.target.value)}
367
-
disabled={isUploading}
368
-
/>
369
-
</div>
370
-
)}
332
+
{siteMode === 'existing' ? (
333
+
<div className="space-y-2">
334
+
<Label htmlFor="site-select">Select Site</Label>
335
+
{sitesLoading ? (
336
+
<div className="flex items-center justify-center py-4">
337
+
<Loader2 className="w-5 h-5 animate-spin text-muted-foreground" />
338
+
</div>
339
+
) : sites.length === 0 ? (
340
+
<div className="p-4 border border-dashed rounded-lg text-center text-sm text-muted-foreground">
341
+
No sites available. Create a new site instead.
342
+
</div>
343
+
) : (
344
+
<select
345
+
id="site-select"
346
+
className="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
347
+
value={selectedSiteRkey}
348
+
onChange={(e) => setSelectedSiteRkey(e.target.value)}
349
+
disabled={isUploading}
350
+
>
351
+
<option value="">Select a site...</option>
352
+
{sites.map((site) => (
353
+
<option key={site.rkey} value={site.rkey}>
354
+
{site.display_name || site.rkey}
355
+
</option>
356
+
))}
357
+
</select>
358
+
)}
359
+
</div>
360
+
) : (
361
+
<div className="space-y-2">
362
+
<Label htmlFor="new-site-name">New Site Name</Label>
363
+
<Input
364
+
id="new-site-name"
365
+
placeholder="my-awesome-site"
366
+
value={newSiteName}
367
+
onChange={(e) => setNewSiteName(e.target.value)}
368
+
disabled={isUploading}
369
+
/>
370
+
</div>
371
+
)}
371
372
372
-
<p className="text-xs text-muted-foreground">
373
-
File limits: 100MB per file, 300MB total
374
-
</p>
375
-
</div>
373
+
<p className="text-xs text-muted-foreground">
374
+
File limits: 100MB per file, 300MB total
375
+
</p>
376
+
</div>
376
377
377
-
<div className="grid md:grid-cols-2 gap-4">
378
-
<Card className="border-2 border-dashed hover:border-accent transition-colors cursor-pointer">
379
-
<CardContent className="flex flex-col items-center justify-center p-8 text-center">
380
-
<Upload className="w-12 h-12 text-muted-foreground mb-4" />
381
-
<h3 className="font-semibold mb-2">
382
-
Upload Folder
383
-
</h3>
384
-
<p className="text-sm text-muted-foreground mb-4">
385
-
Drag and drop or click to upload your
386
-
static site files
387
-
</p>
388
-
<input
389
-
type="file"
390
-
id="file-upload"
391
-
multiple
392
-
onChange={handleFileSelect}
393
-
className="hidden"
394
-
{...(({ webkitdirectory: '', directory: '' } as any))}
395
-
disabled={isUploading}
396
-
/>
397
-
<label htmlFor="file-upload">
398
-
<Button
399
-
variant="outline"
400
-
type="button"
401
-
onClick={() =>
402
-
document
403
-
.getElementById('file-upload')
404
-
?.click()
405
-
}
406
-
disabled={isUploading}
407
-
>
408
-
Choose Folder
409
-
</Button>
410
-
</label>
411
-
{selectedFiles && selectedFiles.length > 0 && (
412
-
<p className="text-sm text-muted-foreground mt-3">
413
-
{selectedFiles.length} files selected
414
-
</p>
415
-
)}
416
-
</CardContent>
417
-
</Card>
378
+
<div className="grid md:grid-cols-2 gap-4">
379
+
<Card className="border-2 border-dashed hover:border-accent transition-colors cursor-pointer">
380
+
<CardContent className="flex flex-col items-center justify-center p-8 text-center">
381
+
<Upload className="w-12 h-12 text-muted-foreground mb-4" />
382
+
<h3 className="font-semibold mb-2">
383
+
Upload Folder
384
+
</h3>
385
+
<p className="text-sm text-muted-foreground mb-4">
386
+
Drag and drop or click to upload your
387
+
static site files
388
+
</p>
389
+
<input
390
+
type="file"
391
+
id="file-upload"
392
+
multiple
393
+
onChange={handleFileSelect}
394
+
className="hidden"
395
+
{...(({ webkitdirectory: '', directory: '' } as any))}
396
+
disabled={isUploading}
397
+
/>
398
+
<label htmlFor="file-upload">
399
+
<Button
400
+
variant="outline"
401
+
type="button"
402
+
onClick={() =>
403
+
document
404
+
.getElementById('file-upload')
405
+
?.click()
406
+
}
407
+
disabled={isUploading}
408
+
>
409
+
Choose Folder
410
+
</Button>
411
+
</label>
412
+
{selectedFiles && selectedFiles.length > 0 && (
413
+
<p className="text-sm text-muted-foreground mt-3">
414
+
{selectedFiles.length} files selected
415
+
</p>
416
+
)}
417
+
</CardContent>
418
+
</Card>
418
419
419
-
<Card className="border-2 border-dashed opacity-50">
420
-
<CardContent className="flex flex-col items-center justify-center p-8 text-center">
421
-
<Globe className="w-12 h-12 text-muted-foreground mb-4" />
422
-
<h3 className="font-semibold mb-2">
423
-
Connect Git Repository
424
-
</h3>
425
-
<p className="text-sm text-muted-foreground mb-4">
426
-
Link your GitHub, GitLab, or any Git
427
-
repository
428
-
</p>
429
-
<Badge variant="secondary">Coming soon!</Badge>
430
-
</CardContent>
431
-
</Card>
432
-
</div>
420
+
<Card className="border-2 border-dashed opacity-50">
421
+
<CardContent className="flex flex-col items-center justify-center p-8 text-center">
422
+
<Globe className="w-12 h-12 text-muted-foreground mb-4" />
423
+
<h3 className="font-semibold mb-2">
424
+
Connect Git Repository
425
+
</h3>
426
+
<p className="text-sm text-muted-foreground mb-4">
427
+
Link your GitHub, GitLab, or any Git
428
+
repository
429
+
</p>
430
+
<Badge variant="secondary">Coming soon!</Badge>
431
+
</CardContent>
432
+
</Card>
433
+
</div>
433
434
434
-
{uploadProgress && (
435
-
<div className="space-y-3">
436
-
<div className="p-4 bg-muted rounded-lg">
437
-
<div className="flex items-center gap-2">
438
-
<Loader2 className="w-4 h-4 animate-spin" />
439
-
<span className="text-sm">{uploadProgress}</span>
440
-
</div>
441
-
</div>
435
+
{uploadProgress && (
436
+
<div className="space-y-3">
437
+
<div className="p-4 bg-muted rounded-lg">
438
+
<div className="flex items-center gap-2">
439
+
<Loader2 className="w-4 h-4 animate-spin" />
440
+
<span className="text-sm">{uploadProgress}</span>
441
+
</div>
442
+
</div>
442
443
443
-
{fileProgressList.length > 0 && (
444
-
<div className="border rounded-lg overflow-hidden">
445
-
<button
446
-
onClick={() => setShowFileProgress(!showFileProgress)}
447
-
className="w-full p-3 bg-muted/50 hover:bg-muted transition-colors flex items-center justify-between text-sm font-medium"
448
-
>
449
-
<span>
450
-
Processing files ({fileProgressList.filter(f => f.status === 'uploaded' || f.status === 'reused').length}/{fileProgressList.length})
451
-
</span>
452
-
{showFileProgress ? (
453
-
<ChevronUp className="w-4 h-4" />
454
-
) : (
455
-
<ChevronDown className="w-4 h-4" />
456
-
)}
457
-
</button>
458
-
{showFileProgress && (
459
-
<div className="max-h-64 overflow-y-auto p-3 space-y-1 bg-background">
460
-
{fileProgressList.map((file, idx) => (
461
-
<div
462
-
key={idx}
463
-
className="flex items-start gap-2 text-xs p-2 rounded hover:bg-muted/50 transition-colors"
464
-
>
465
-
{file.status === 'checking' && (
466
-
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-blue-500 shrink-0" />
467
-
)}
468
-
{file.status === 'uploading' && (
469
-
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-purple-500 shrink-0" />
470
-
)}
471
-
{file.status === 'uploaded' && (
472
-
<CheckCircle2 className="w-3 h-3 mt-0.5 text-green-500 shrink-0" />
473
-
)}
474
-
{file.status === 'reused' && (
475
-
<RefreshCw className="w-3 h-3 mt-0.5 text-cyan-500 shrink-0" />
476
-
)}
477
-
{file.status === 'failed' && (
478
-
<XCircle className="w-3 h-3 mt-0.5 text-red-500 shrink-0" />
479
-
)}
480
-
<div className="flex-1 min-w-0">
481
-
<div className="font-mono truncate">{file.name}</div>
482
-
{file.error && (
483
-
<div className="text-red-500 mt-0.5">
484
-
{file.error}
485
-
</div>
486
-
)}
487
-
{file.status === 'checking' && (
488
-
<div className="text-muted-foreground">Checking for changes...</div>
489
-
)}
490
-
{file.status === 'uploading' && (
491
-
<div className="text-muted-foreground">Uploading to PDS...</div>
492
-
)}
493
-
{file.status === 'reused' && (
494
-
<div className="text-muted-foreground">Reused (unchanged)</div>
495
-
)}
496
-
</div>
497
-
</div>
498
-
))}
499
-
</div>
500
-
)}
501
-
</div>
502
-
)}
444
+
{fileProgressList.length > 0 && (
445
+
<div className="border rounded-lg overflow-hidden">
446
+
<button
447
+
onClick={() => setShowFileProgress(!showFileProgress)}
448
+
className="w-full p-3 bg-muted/50 hover:bg-muted transition-colors flex items-center justify-between text-sm font-medium"
449
+
>
450
+
<span>
451
+
Processing files ({fileProgressList.filter(f => f.status === 'uploaded' || f.status === 'reused').length}/{fileProgressList.length})
452
+
</span>
453
+
{showFileProgress ? (
454
+
<ChevronUp className="w-4 h-4" />
455
+
) : (
456
+
<ChevronDown className="w-4 h-4" />
457
+
)}
458
+
</button>
459
+
{showFileProgress && (
460
+
<div className="max-h-64 overflow-y-auto p-3 space-y-1 bg-background">
461
+
{fileProgressList.map((file, idx) => (
462
+
<div
463
+
key={idx}
464
+
className="flex items-start gap-2 text-xs p-2 rounded hover:bg-muted/50 transition-colors"
465
+
>
466
+
{file.status === 'checking' && (
467
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-blue-500 shrink-0" />
468
+
)}
469
+
{file.status === 'uploading' && (
470
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-purple-500 shrink-0" />
471
+
)}
472
+
{file.status === 'uploaded' && (
473
+
<CheckCircle2 className="w-3 h-3 mt-0.5 text-green-500 shrink-0" />
474
+
)}
475
+
{file.status === 'reused' && (
476
+
<RefreshCw className="w-3 h-3 mt-0.5 text-cyan-500 shrink-0" />
477
+
)}
478
+
{file.status === 'failed' && (
479
+
<XCircle className="w-3 h-3 mt-0.5 text-red-500 shrink-0" />
480
+
)}
481
+
<div className="flex-1 min-w-0">
482
+
<div className="font-mono truncate">{file.name}</div>
483
+
{file.error && (
484
+
<div className="text-red-500 mt-0.5">
485
+
{file.error}
486
+
</div>
487
+
)}
488
+
{file.status === 'checking' && (
489
+
<div className="text-muted-foreground">Checking for changes...</div>
490
+
)}
491
+
{file.status === 'uploading' && (
492
+
<div className="text-muted-foreground">Uploading to PDS...</div>
493
+
)}
494
+
{file.status === 'reused' && (
495
+
<div className="text-muted-foreground">Reused (unchanged)</div>
496
+
)}
497
+
</div>
498
+
</div>
499
+
))}
500
+
</div>
501
+
)}
502
+
</div>
503
+
)}
503
504
504
-
{failedFiles.length > 0 && (
505
-
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-lg">
506
-
<div className="flex items-start gap-2 text-red-600 dark:text-red-400 mb-2">
507
-
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
508
-
<div className="flex-1">
509
-
<span className="font-medium">
510
-
{failedFiles.length} file{failedFiles.length > 1 ? 's' : ''} failed to upload
511
-
</span>
512
-
{uploadedCount > 0 && (
513
-
<span className="text-sm ml-2">
514
-
({uploadedCount} uploaded successfully)
515
-
</span>
516
-
)}
517
-
</div>
518
-
</div>
519
-
<div className="ml-6 space-y-1 max-h-40 overflow-y-auto">
520
-
{failedFiles.slice(0, 10).map((file, idx) => (
521
-
<div key={idx} className="text-xs">
522
-
<div className="font-mono font-semibold">{file.name}</div>
523
-
<div className="text-muted-foreground ml-2">
524
-
Error: {file.error}
525
-
{file.size > 0 && ` (${(file.size / 1024).toFixed(1)} KB)`}
526
-
</div>
527
-
</div>
528
-
))}
529
-
{failedFiles.length > 10 && (
530
-
<div className="text-xs text-muted-foreground">
531
-
...and {failedFiles.length - 10} more
532
-
</div>
533
-
)}
534
-
</div>
535
-
</div>
536
-
)}
505
+
{failedFiles.length > 0 && (
506
+
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-lg">
507
+
<div className="flex items-start gap-2 text-red-600 dark:text-red-400 mb-2">
508
+
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
509
+
<div className="flex-1">
510
+
<span className="font-medium">
511
+
{failedFiles.length} file{failedFiles.length > 1 ? 's' : ''} failed to upload
512
+
</span>
513
+
{uploadedCount > 0 && (
514
+
<span className="text-sm ml-2">
515
+
({uploadedCount} uploaded successfully)
516
+
</span>
517
+
)}
518
+
</div>
519
+
</div>
520
+
<div className="ml-6 space-y-1 max-h-40 overflow-y-auto">
521
+
{failedFiles.slice(0, 10).map((file, idx) => (
522
+
<div key={idx} className="text-xs">
523
+
<div className="font-mono font-semibold">{file.name}</div>
524
+
<div className="text-muted-foreground ml-2">
525
+
Error: {file.error}
526
+
{file.size > 0 && ` (${(file.size / 1024).toFixed(1)} KB)`}
527
+
</div>
528
+
</div>
529
+
))}
530
+
{failedFiles.length > 10 && (
531
+
<div className="text-xs text-muted-foreground">
532
+
...and {failedFiles.length - 10} more
533
+
</div>
534
+
)}
535
+
</div>
536
+
</div>
537
+
)}
537
538
538
-
{skippedFiles.length > 0 && (
539
-
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
540
-
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
541
-
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
542
-
<div className="flex-1">
543
-
<span className="font-medium">
544
-
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
545
-
</span>
546
-
</div>
547
-
</div>
548
-
<div className="ml-6 space-y-1 max-h-32 overflow-y-auto">
549
-
{skippedFiles.slice(0, 5).map((file, idx) => (
550
-
<div key={idx} className="text-xs">
551
-
<span className="font-mono">{file.name}</span>
552
-
<span className="text-muted-foreground"> - {file.reason}</span>
553
-
</div>
554
-
))}
555
-
{skippedFiles.length > 5 && (
556
-
<div className="text-xs text-muted-foreground">
557
-
...and {skippedFiles.length - 5} more
558
-
</div>
559
-
)}
560
-
</div>
561
-
</div>
562
-
)}
563
-
</div>
564
-
)}
539
+
{skippedFiles.length > 0 && (
540
+
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
541
+
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
542
+
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
543
+
<div className="flex-1">
544
+
<span className="font-medium">
545
+
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
546
+
</span>
547
+
</div>
548
+
</div>
549
+
<div className="ml-6 space-y-1 max-h-32 overflow-y-auto">
550
+
{skippedFiles.slice(0, 5).map((file, idx) => (
551
+
<div key={idx} className="text-xs">
552
+
<span className="font-mono">{file.name}</span>
553
+
<span className="text-muted-foreground"> - {file.reason}</span>
554
+
</div>
555
+
))}
556
+
{skippedFiles.length > 5 && (
557
+
<div className="text-xs text-muted-foreground">
558
+
...and {skippedFiles.length - 5} more
559
+
</div>
560
+
)}
561
+
</div>
562
+
</div>
563
+
)}
564
+
</div>
565
+
)}
565
566
566
-
<Button
567
-
onClick={handleUpload}
568
-
className="w-full"
569
-
disabled={
570
-
(siteMode === 'existing' ? !selectedSiteRkey : !newSiteName) ||
571
-
isUploading ||
572
-
(siteMode === 'existing' && (!selectedFiles || selectedFiles.length === 0))
573
-
}
574
-
>
575
-
{isUploading ? (
576
-
<>
577
-
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
578
-
Uploading...
579
-
</>
580
-
) : (
581
-
<>
582
-
{siteMode === 'existing' ? (
583
-
'Update Site'
584
-
) : (
585
-
selectedFiles && selectedFiles.length > 0
586
-
? 'Upload & Deploy'
587
-
: 'Create Empty Site'
588
-
)}
589
-
</>
590
-
)}
591
-
</Button>
592
-
</CardContent>
593
-
</Card>
594
-
</div>
595
-
)
567
+
<Button
568
+
onClick={handleUpload}
569
+
className="w-full"
570
+
disabled={
571
+
(siteMode === 'existing' ? !selectedSiteRkey : !newSiteName) ||
572
+
isUploading ||
573
+
(siteMode === 'existing' && (!selectedFiles || selectedFiles.length === 0))
574
+
}
575
+
>
576
+
{isUploading ? (
577
+
<>
578
+
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
579
+
Uploading...
580
+
</>
581
+
) : (
582
+
<>
583
+
{siteMode === 'existing' ? (
584
+
'Update Site'
585
+
) : (
586
+
selectedFiles && selectedFiles.length > 0
587
+
? 'Upload & Deploy'
588
+
: 'Create Empty Site'
589
+
)}
590
+
</>
591
+
)}
592
+
</Button>
593
+
</CardContent>
594
+
</Card>
595
+
</div>
596
+
)
596
597
}
+777
-791
src/routes/wisp.ts
+777
-791
src/routes/wisp.ts
···
4
4
import { Agent } from '@atproto/api'
5
5
import { TID } from '@atproto/common-web'
6
6
import {
7
-
type UploadedFile,
8
-
type FileUploadResult,
9
-
processUploadedFiles,
10
-
createManifest,
11
-
updateFileBlobs,
12
-
shouldCompressFile,
13
-
compressFile,
14
-
computeCID,
15
-
extractBlobMap,
16
-
extractSubfsUris,
17
-
findLargeDirectories,
18
-
replaceDirectoryWithSubfs,
19
-
estimateDirectorySize
7
+
type UploadedFile,
8
+
type FileUploadResult,
9
+
processUploadedFiles,
10
+
createManifest,
11
+
updateFileBlobs,
12
+
shouldCompressFile,
13
+
compressFile,
14
+
computeCID,
15
+
extractBlobMap,
16
+
extractSubfsUris,
17
+
findLargeDirectories,
18
+
replaceDirectoryWithSubfs,
19
+
estimateDirectorySize
20
20
} from '../lib/wisp-utils'
21
21
import { upsertSite } from '../lib/db'
22
22
import { logger } from '../lib/observability'
···
24
24
import { validateRecord as validateSubfsRecord } from '../lexicons/types/place/wisp/subfs'
25
25
import { MAX_SITE_SIZE, MAX_FILE_SIZE, MAX_FILE_COUNT } from '../lib/constants'
26
26
import {
27
-
createUploadJob,
28
-
getUploadJob,
29
-
updateJobProgress,
30
-
completeUploadJob,
31
-
failUploadJob,
32
-
addJobListener
27
+
createUploadJob,
28
+
getUploadJob,
29
+
updateJobProgress,
30
+
completeUploadJob,
31
+
failUploadJob,
32
+
addJobListener
33
33
} from '../lib/upload-jobs'
34
34
35
35
function isValidSiteName(siteName: string): boolean {
36
-
if (!siteName || typeof siteName !== 'string') return false;
36
+
if (!siteName || typeof siteName !== 'string') return false;
37
37
38
-
// Length check (AT Protocol rkey limit)
39
-
if (siteName.length < 1 || siteName.length > 512) return false;
38
+
// Length check (AT Protocol rkey limit)
39
+
if (siteName.length < 1 || siteName.length > 512) return false;
40
40
41
-
// Check for path traversal
42
-
if (siteName === '.' || siteName === '..') return false;
43
-
if (siteName.includes('/') || siteName.includes('\\')) return false;
44
-
if (siteName.includes('\0')) return false;
41
+
// Check for path traversal
42
+
if (siteName === '.' || siteName === '..') return false;
43
+
if (siteName.includes('/') || siteName.includes('\\')) return false;
44
+
if (siteName.includes('\0')) return false;
45
45
46
-
// AT Protocol rkey format: alphanumeric, dots, dashes, underscores, tildes, colons
47
-
// Based on NSID format rules
48
-
const validRkeyPattern = /^[a-zA-Z0-9._~:-]+$/;
49
-
if (!validRkeyPattern.test(siteName)) return false;
46
+
// AT Protocol rkey format: alphanumeric, dots, dashes, underscores, tildes, colons
47
+
// Based on NSID format rules
48
+
const validRkeyPattern = /^[a-zA-Z0-9._~:-]+$/;
49
+
if (!validRkeyPattern.test(siteName)) return false;
50
50
51
-
return true;
51
+
return true;
52
52
}
53
53
54
54
async function processUploadInBackground(
55
-
jobId: string,
56
-
agent: Agent,
57
-
did: string,
58
-
siteName: string,
59
-
fileArray: File[]
55
+
jobId: string,
56
+
agent: Agent,
57
+
did: string,
58
+
siteName: string,
59
+
fileArray: File[]
60
60
): Promise<void> {
61
-
try {
62
-
// Try to fetch existing record to enable incremental updates
63
-
let existingBlobMap = new Map<string, { blobRef: any; cid: string }>();
64
-
let oldSubfsUris: Array<{ uri: string; path: string }> = [];
65
-
console.log('Attempting to fetch existing record...');
66
-
updateJobProgress(jobId, { phase: 'validating' });
61
+
try {
62
+
// Try to fetch existing record to enable incremental updates
63
+
let existingBlobMap = new Map<string, { blobRef: any; cid: string }>();
64
+
let oldSubfsUris: Array<{ uri: string; path: string }> = [];
65
+
console.log('Attempting to fetch existing record...');
66
+
updateJobProgress(jobId, { phase: 'validating' });
67
67
68
-
try {
69
-
const rkey = siteName;
70
-
const existingRecord = await agent.com.atproto.repo.getRecord({
71
-
repo: did,
72
-
collection: 'place.wisp.fs',
73
-
rkey: rkey
74
-
});
75
-
console.log('Existing record found!');
68
+
try {
69
+
const rkey = siteName;
70
+
const existingRecord = await agent.com.atproto.repo.getRecord({
71
+
repo: did,
72
+
collection: 'place.wisp.fs',
73
+
rkey: rkey
74
+
});
75
+
console.log('Existing record found!');
76
76
77
-
if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) {
78
-
const manifest = existingRecord.data.value as any;
77
+
if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) {
78
+
const manifest = existingRecord.data.value as any;
79
79
80
-
// Extract blob map from main record
81
-
existingBlobMap = extractBlobMap(manifest.root);
82
-
console.log(`Found existing manifest with ${existingBlobMap.size} files in main record`);
80
+
// Extract blob map from main record
81
+
existingBlobMap = extractBlobMap(manifest.root);
82
+
console.log(`Found existing manifest with ${existingBlobMap.size} files in main record`);
83
83
84
-
// Extract subfs URIs with their mount paths from main record
85
-
const subfsUris = extractSubfsUris(manifest.root);
86
-
oldSubfsUris = subfsUris; // Save for cleanup later
84
+
// Extract subfs URIs with their mount paths from main record
85
+
const subfsUris = extractSubfsUris(manifest.root);
86
+
oldSubfsUris = subfsUris; // Save for cleanup later
87
87
88
-
if (subfsUris.length > 0) {
89
-
console.log(`Found ${subfsUris.length} subfs records, fetching in parallel...`);
90
-
logger.info(`Fetching ${subfsUris.length} subfs records for blob reuse`);
88
+
if (subfsUris.length > 0) {
89
+
console.log(`Found ${subfsUris.length} subfs records, fetching in parallel...`);
90
+
logger.info(`Fetching ${subfsUris.length} subfs records for blob reuse`);
91
91
92
-
// Fetch all subfs records in parallel
93
-
const subfsRecords = await Promise.all(
94
-
subfsUris.map(async ({ uri, path }) => {
95
-
try {
96
-
// Parse URI: at://did/collection/rkey
97
-
const parts = uri.replace('at://', '').split('/');
98
-
const subDid = parts[0];
99
-
const collection = parts[1];
100
-
const subRkey = parts[2];
92
+
// Fetch all subfs records in parallel
93
+
const subfsRecords = await Promise.all(
94
+
subfsUris.map(async ({ uri, path }) => {
95
+
try {
96
+
// Parse URI: at://did/collection/rkey
97
+
const parts = uri.replace('at://', '').split('/');
98
+
const subDid = parts[0];
99
+
const collection = parts[1];
100
+
const subRkey = parts[2];
101
101
102
-
const record = await agent.com.atproto.repo.getRecord({
103
-
repo: subDid,
104
-
collection: collection,
105
-
rkey: subRkey
106
-
});
102
+
const record = await agent.com.atproto.repo.getRecord({
103
+
repo: subDid,
104
+
collection: collection,
105
+
rkey: subRkey
106
+
});
107
107
108
-
return { record: record.data.value as any, mountPath: path };
109
-
} catch (err: any) {
110
-
logger.warn(`Failed to fetch subfs record ${uri}: ${err?.message}`, err);
111
-
return null;
112
-
}
113
-
})
114
-
);
108
+
return { record: record.data.value as any, mountPath: path };
109
+
} catch (err: any) {
110
+
logger.warn(`Failed to fetch subfs record ${uri}: ${err?.message}`, err);
111
+
return null;
112
+
}
113
+
})
114
+
);
115
115
116
-
// Merge blob maps from all subfs records
117
-
let totalSubfsBlobs = 0;
118
-
for (const subfsData of subfsRecords) {
119
-
if (subfsData && subfsData.record && 'root' in subfsData.record) {
120
-
// Extract blobs with the correct mount path prefix
121
-
const subfsMap = extractBlobMap(subfsData.record.root, subfsData.mountPath);
122
-
subfsMap.forEach((value, key) => {
123
-
existingBlobMap.set(key, value);
124
-
totalSubfsBlobs++;
125
-
});
126
-
}
127
-
}
116
+
// Merge blob maps from all subfs records
117
+
let totalSubfsBlobs = 0;
118
+
for (const subfsData of subfsRecords) {
119
+
if (subfsData && subfsData.record && 'root' in subfsData.record) {
120
+
// Extract blobs with the correct mount path prefix
121
+
const subfsMap = extractBlobMap(subfsData.record.root, subfsData.mountPath);
122
+
subfsMap.forEach((value, key) => {
123
+
existingBlobMap.set(key, value);
124
+
totalSubfsBlobs++;
125
+
});
126
+
}
127
+
}
128
128
129
-
console.log(`Merged ${totalSubfsBlobs} files from ${subfsUris.length} subfs records`);
130
-
logger.info(`Total blob map: ${existingBlobMap.size} files (main + subfs)`);
131
-
}
129
+
console.log(`Merged ${totalSubfsBlobs} files from ${subfsUris.length} subfs records`);
130
+
logger.info(`Total blob map: ${existingBlobMap.size} files (main + subfs)`);
131
+
}
132
132
133
-
console.log(`Total existing blobs for reuse: ${existingBlobMap.size} files`);
134
-
logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
135
-
}
136
-
} catch (error: any) {
137
-
console.log('No existing record found or error:', error?.message || error);
138
-
if (error?.status !== 400 && error?.error !== 'RecordNotFound') {
139
-
logger.warn('Failed to fetch existing record, proceeding with full upload', error);
140
-
}
141
-
}
133
+
console.log(`Total existing blobs for reuse: ${existingBlobMap.size} files`);
134
+
logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
135
+
}
136
+
} catch (error: any) {
137
+
console.log('No existing record found or error:', error?.message || error);
138
+
if (error?.status !== 400 && error?.error !== 'RecordNotFound') {
139
+
logger.warn('Failed to fetch existing record, proceeding with full upload', error);
140
+
}
141
+
}
142
142
143
-
// Convert File objects to UploadedFile format
144
-
const uploadedFiles: UploadedFile[] = [];
145
-
const skippedFiles: Array<{ name: string; reason: string }> = [];
143
+
// Convert File objects to UploadedFile format
144
+
const uploadedFiles: UploadedFile[] = [];
145
+
const skippedFiles: Array<{ name: string; reason: string }> = [];
146
146
147
-
console.log('Processing files, count:', fileArray.length);
148
-
updateJobProgress(jobId, { phase: 'compressing' });
147
+
console.log('Processing files, count:', fileArray.length);
148
+
updateJobProgress(jobId, { phase: 'compressing' });
149
149
150
-
for (let i = 0; i < fileArray.length; i++) {
151
-
const file = fileArray[i];
150
+
for (let i = 0; i < fileArray.length; i++) {
151
+
const file = fileArray[i];
152
152
153
-
// Skip undefined/null files
154
-
if (!file || !file.name) {
155
-
console.log(`Skipping undefined file at index ${i}`);
156
-
skippedFiles.push({
157
-
name: `[undefined file at index ${i}]`,
158
-
reason: 'Invalid file object'
159
-
});
160
-
continue;
161
-
}
153
+
// Skip undefined/null files
154
+
if (!file || !file.name) {
155
+
console.log(`Skipping undefined file at index ${i}`);
156
+
skippedFiles.push({
157
+
name: `[undefined file at index ${i}]`,
158
+
reason: 'Invalid file object'
159
+
});
160
+
continue;
161
+
}
162
162
163
-
console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
164
-
updateJobProgress(jobId, {
165
-
filesProcessed: i + 1,
166
-
currentFile: file.name
167
-
});
163
+
console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
164
+
updateJobProgress(jobId, {
165
+
filesProcessed: i + 1,
166
+
currentFile: file.name
167
+
});
168
168
169
-
// Skip .git directory files
170
-
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
171
-
if (normalizedPath.startsWith('.git/') || normalizedPath === '.git') {
172
-
console.log(`Skipping .git file: ${file.name}`);
173
-
skippedFiles.push({
174
-
name: file.name,
175
-
reason: '.git directory excluded'
176
-
});
177
-
continue;
178
-
}
169
+
// Skip .git directory files
170
+
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
171
+
if (normalizedPath.startsWith('.git/') || normalizedPath === '.git') {
172
+
console.log(`Skipping .git file: ${file.name}`);
173
+
skippedFiles.push({
174
+
name: file.name,
175
+
reason: '.git directory excluded'
176
+
});
177
+
continue;
178
+
}
179
179
180
-
// Skip files that are too large
181
-
const maxSize = MAX_FILE_SIZE;
182
-
if (file.size > maxSize) {
183
-
skippedFiles.push({
184
-
name: file.name,
185
-
reason: `file too large (${(file.size / 1024 / 1024).toFixed(2)}MB, max 100MB)`
186
-
});
187
-
continue;
188
-
}
180
+
// Skip files that are too large
181
+
const maxSize = MAX_FILE_SIZE;
182
+
if (file.size > maxSize) {
183
+
skippedFiles.push({
184
+
name: file.name,
185
+
reason: `file too large (${(file.size / 1024 / 1024).toFixed(2)}MB, max 100MB)`
186
+
});
187
+
continue;
188
+
}
189
189
190
-
const arrayBuffer = await file.arrayBuffer();
191
-
const originalContent = Buffer.from(arrayBuffer);
192
-
const originalMimeType = file.type || 'application/octet-stream';
190
+
const arrayBuffer = await file.arrayBuffer();
191
+
const originalContent = Buffer.from(arrayBuffer);
192
+
const originalMimeType = file.type || 'application/octet-stream';
193
193
194
-
// Determine if file should be compressed
195
-
const shouldCompress = shouldCompressFile(originalMimeType);
194
+
// Determine if file should be compressed
195
+
const shouldCompress = shouldCompressFile(originalMimeType);
196
196
197
-
// Text files (HTML/CSS/JS) need base64 encoding to prevent PDS content sniffing
198
-
// Audio files just need compression without base64
199
-
const needsBase64 = originalMimeType.startsWith('text/') ||
200
-
originalMimeType.includes('html') ||
201
-
originalMimeType.includes('javascript') ||
202
-
originalMimeType.includes('css') ||
203
-
originalMimeType.includes('json') ||
204
-
originalMimeType.includes('xml') ||
205
-
originalMimeType.includes('svg');
197
+
// Text files (HTML/CSS/JS) need base64 encoding to prevent PDS content sniffing
198
+
// Audio files just need compression without base64
199
+
const needsBase64 =
200
+
originalMimeType.startsWith('text/') ||
201
+
originalMimeType.startsWith('application/json') ||
202
+
originalMimeType.startsWith('application/xml') ||
203
+
originalMimeType === 'image/svg+xml';
206
204
207
-
let finalContent: Buffer;
208
-
let compressed = false;
209
-
let base64Encoded = false;
205
+
let finalContent: Buffer;
206
+
let compressed = false;
207
+
let base64Encoded = false;
210
208
211
-
if (shouldCompress) {
212
-
const compressedContent = compressFile(originalContent);
213
-
compressed = true;
209
+
if (shouldCompress) {
210
+
const compressedContent = compressFile(originalContent);
211
+
compressed = true;
214
212
215
-
if (needsBase64) {
216
-
// Text files: compress AND base64 encode
217
-
finalContent = Buffer.from(compressedContent.toString('base64'), 'binary');
218
-
base64Encoded = true;
219
-
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
220
-
console.log(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
221
-
logger.info(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
222
-
} else {
223
-
// Audio files: just compress, no base64
224
-
finalContent = compressedContent;
225
-
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
226
-
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
227
-
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
228
-
}
229
-
} else {
230
-
// Binary files: upload directly
231
-
finalContent = originalContent;
232
-
console.log(`Uploading ${file.name} directly: ${originalContent.length} bytes (no compression)`);
233
-
logger.info(`Uploading ${file.name} directly: ${originalContent.length} bytes (binary)`);
234
-
}
213
+
if (needsBase64) {
214
+
// Text files: compress AND base64 encode
215
+
finalContent = Buffer.from(compressedContent.toString('base64'), 'binary');
216
+
base64Encoded = true;
217
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
218
+
console.log(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
219
+
logger.info(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
220
+
} else {
221
+
// Audio files: just compress, no base64
222
+
finalContent = compressedContent;
223
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
224
+
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
225
+
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
226
+
}
227
+
} else {
228
+
// Binary files: upload directly
229
+
finalContent = originalContent;
230
+
console.log(`Uploading ${file.name} directly: ${originalContent.length} bytes (no compression)`);
231
+
logger.info(`Uploading ${file.name} directly: ${originalContent.length} bytes (binary)`);
232
+
}
235
233
236
-
uploadedFiles.push({
237
-
name: file.name,
238
-
content: finalContent,
239
-
mimeType: originalMimeType,
240
-
size: finalContent.length,
241
-
compressed,
242
-
base64Encoded,
243
-
originalMimeType
244
-
});
245
-
}
234
+
uploadedFiles.push({
235
+
name: file.name,
236
+
content: finalContent,
237
+
mimeType: originalMimeType,
238
+
size: finalContent.length,
239
+
compressed,
240
+
base64Encoded,
241
+
originalMimeType
242
+
});
243
+
}
246
244
247
-
// Update total file count after filtering (important for progress tracking)
248
-
updateJobProgress(jobId, {
249
-
totalFiles: uploadedFiles.length
250
-
});
245
+
// Update total file count after filtering (important for progress tracking)
246
+
updateJobProgress(jobId, {
247
+
totalFiles: uploadedFiles.length
248
+
});
251
249
252
-
// Check total size limit
253
-
const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
254
-
const maxTotalSize = MAX_SITE_SIZE;
250
+
// Check total size limit
251
+
const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
252
+
const maxTotalSize = MAX_SITE_SIZE;
255
253
256
-
if (totalSize > maxTotalSize) {
257
-
throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
258
-
}
254
+
if (totalSize > maxTotalSize) {
255
+
throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
256
+
}
259
257
260
-
// Check file count limit
261
-
if (uploadedFiles.length > MAX_FILE_COUNT) {
262
-
throw new Error(`File count ${uploadedFiles.length} exceeds ${MAX_FILE_COUNT} files limit`);
263
-
}
258
+
// Check file count limit
259
+
if (uploadedFiles.length > MAX_FILE_COUNT) {
260
+
throw new Error(`File count ${uploadedFiles.length} exceeds ${MAX_FILE_COUNT} files limit`);
261
+
}
264
262
265
-
console.log(`After filtering: ${uploadedFiles.length} files to process (${skippedFiles.length} skipped)`);
263
+
console.log(`After filtering: ${uploadedFiles.length} files to process (${skippedFiles.length} skipped)`);
266
264
267
-
if (uploadedFiles.length === 0) {
268
-
// Create empty manifest
269
-
const emptyManifest = {
270
-
$type: 'place.wisp.fs',
271
-
site: siteName,
272
-
root: {
273
-
type: 'directory',
274
-
entries: []
275
-
},
276
-
fileCount: 0,
277
-
createdAt: new Date().toISOString()
278
-
};
265
+
if (uploadedFiles.length === 0) {
266
+
// Create empty manifest
267
+
const emptyManifest = {
268
+
$type: 'place.wisp.fs',
269
+
site: siteName,
270
+
root: {
271
+
type: 'directory',
272
+
entries: []
273
+
},
274
+
fileCount: 0,
275
+
createdAt: new Date().toISOString()
276
+
};
279
277
280
-
const validationResult = validateRecord(emptyManifest);
281
-
if (!validationResult.success) {
282
-
throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
283
-
}
278
+
const validationResult = validateRecord(emptyManifest);
279
+
if (!validationResult.success) {
280
+
throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
281
+
}
284
282
285
-
const rkey = siteName;
286
-
updateJobProgress(jobId, { phase: 'finalizing' });
283
+
const rkey = siteName;
284
+
updateJobProgress(jobId, { phase: 'finalizing' });
287
285
288
-
const record = await agent.com.atproto.repo.putRecord({
289
-
repo: did,
290
-
collection: 'place.wisp.fs',
291
-
rkey: rkey,
292
-
record: emptyManifest
293
-
});
286
+
const record = await agent.com.atproto.repo.putRecord({
287
+
repo: did,
288
+
collection: 'place.wisp.fs',
289
+
rkey: rkey,
290
+
record: emptyManifest
291
+
});
294
292
295
-
await upsertSite(did, rkey, siteName);
293
+
await upsertSite(did, rkey, siteName);
296
294
297
-
completeUploadJob(jobId, {
298
-
success: true,
299
-
uri: record.data.uri,
300
-
cid: record.data.cid,
301
-
fileCount: 0,
302
-
siteName,
303
-
skippedFiles
304
-
});
305
-
return;
306
-
}
295
+
completeUploadJob(jobId, {
296
+
success: true,
297
+
uri: record.data.uri,
298
+
cid: record.data.cid,
299
+
fileCount: 0,
300
+
siteName,
301
+
skippedFiles
302
+
});
303
+
return;
304
+
}
307
305
308
-
// Process files into directory structure
309
-
console.log('Processing uploaded files into directory structure...');
310
-
const validUploadedFiles = uploadedFiles.filter((f, i) => {
311
-
if (!f || !f.name || !f.content) {
312
-
console.error(`Filtering out invalid file at index ${i}`);
313
-
return false;
314
-
}
315
-
return true;
316
-
});
306
+
// Process files into directory structure
307
+
console.log('Processing uploaded files into directory structure...');
308
+
const validUploadedFiles = uploadedFiles.filter((f, i) => {
309
+
if (!f || !f.name || !f.content) {
310
+
console.error(`Filtering out invalid file at index ${i}`);
311
+
return false;
312
+
}
313
+
return true;
314
+
});
317
315
318
-
const { directory, fileCount } = processUploadedFiles(validUploadedFiles);
319
-
console.log('Directory structure created, file count:', fileCount);
316
+
const { directory, fileCount } = processUploadedFiles(validUploadedFiles);
317
+
console.log('Directory structure created, file count:', fileCount);
320
318
321
-
// Upload files as blobs with retry logic for DPoP nonce conflicts
322
-
console.log('Starting blob upload/reuse phase...');
323
-
updateJobProgress(jobId, { phase: 'uploading' });
319
+
// Upload files as blobs with retry logic for DPoP nonce conflicts
320
+
console.log('Starting blob upload/reuse phase...');
321
+
updateJobProgress(jobId, { phase: 'uploading' });
324
322
325
-
// Helper function to upload blob with exponential backoff retry and timeout
326
-
const uploadBlobWithRetry = async (
327
-
agent: Agent,
328
-
content: Buffer,
329
-
mimeType: string,
330
-
fileName: string,
331
-
maxRetries = 5
332
-
) => {
333
-
for (let attempt = 0; attempt < maxRetries; attempt++) {
334
-
try {
335
-
console.log(`[File Upload] Starting upload attempt ${attempt + 1}/${maxRetries} for ${fileName} (${content.length} bytes, ${mimeType})`);
323
+
// Helper function to upload blob with exponential backoff retry and timeout
324
+
const uploadBlobWithRetry = async (
325
+
agent: Agent,
326
+
content: Buffer,
327
+
mimeType: string,
328
+
fileName: string,
329
+
maxRetries = 5
330
+
) => {
331
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
332
+
const controller = new AbortController();
333
+
const timeoutMs = 300000; // 5 minute timeout per upload
334
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
336
335
337
-
// Add timeout wrapper to prevent hanging requests
338
-
const uploadPromise = agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
339
-
const timeoutMs = 300000; // 5 minute timeout per upload
336
+
try {
337
+
console.log(`[File Upload] Starting upload attempt ${attempt + 1}/${maxRetries} for ${fileName} (${content.length} bytes, ${mimeType})`);
340
338
341
-
const timeoutPromise = new Promise((_, reject) => {
342
-
setTimeout(() => reject(new Error('Upload timeout')), timeoutMs);
343
-
});
339
+
const result = await agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
340
+
clearTimeout(timeoutId);
341
+
console.log(`[File Upload] ✅ Successfully uploaded ${fileName} on attempt ${attempt + 1}`);
342
+
return result;
343
+
} catch (error: any) {
344
+
clearTimeout(timeoutId);
344
345
345
-
const result = await Promise.race([uploadPromise, timeoutPromise]) as any;
346
-
console.log(`[File Upload] ✅ Successfully uploaded ${fileName} on attempt ${attempt + 1}`);
347
-
return result;
348
-
} catch (error: any) {
349
-
const isDPoPNonceError =
350
-
error?.message?.toLowerCase().includes('nonce') ||
351
-
error?.message?.toLowerCase().includes('dpop') ||
352
-
error?.status === 409;
346
+
const isDPoPNonceError =
347
+
error?.message?.toLowerCase().includes('nonce') ||
348
+
error?.message?.toLowerCase().includes('dpop') ||
349
+
error?.status === 409;
353
350
354
-
const isTimeout = error?.message === 'Upload timeout';
355
-
const isRateLimited = error?.status === 429 || error?.message?.toLowerCase().includes('rate');
351
+
const isTimeout = error?.name === 'AbortError' || error?.message === 'Upload timeout';
352
+
const isRateLimited = error?.status === 429 || error?.message?.toLowerCase().includes('rate');
356
353
357
-
// Retry on DPoP nonce conflicts, timeouts, or rate limits
358
-
if ((isDPoPNonceError || isTimeout || isRateLimited) && attempt < maxRetries - 1) {
359
-
let backoffMs: number;
360
-
if (isRateLimited) {
361
-
backoffMs = 2000 * Math.pow(2, attempt); // 2s, 4s, 8s, 16s for rate limits
362
-
} else if (isTimeout) {
363
-
backoffMs = 1000 * Math.pow(2, attempt); // 1s, 2s, 4s, 8s for timeouts
364
-
} else {
365
-
backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms for DPoP
366
-
}
354
+
// Retry on DPoP nonce conflicts, timeouts, or rate limits
355
+
if ((isDPoPNonceError || isTimeout || isRateLimited) && attempt < maxRetries - 1) {
356
+
let backoffMs: number;
357
+
if (isRateLimited) {
358
+
backoffMs = 2000 * Math.pow(2, attempt); // 2s, 4s, 8s, 16s for rate limits
359
+
} else if (isTimeout) {
360
+
backoffMs = 1000 * Math.pow(2, attempt); // 1s, 2s, 4s, 8s for timeouts
361
+
} else {
362
+
backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms for DPoP
363
+
}
367
364
368
-
const reason = isDPoPNonceError ? 'DPoP nonce conflict' : isTimeout ? 'timeout' : 'rate limit';
369
-
logger.info(`[File Upload] 🔄 ${reason} for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
370
-
console.log(`[File Upload] 🔄 ${reason} for ${fileName}, retrying in ${backoffMs}ms`);
371
-
await new Promise(resolve => setTimeout(resolve, backoffMs));
372
-
continue;
373
-
}
365
+
const reason = isDPoPNonceError ? 'DPoP nonce conflict' : isTimeout ? 'timeout' : 'rate limit';
366
+
logger.info(`[File Upload] 🔄 ${reason} for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
367
+
console.log(`[File Upload] 🔄 ${reason} for ${fileName}, retrying in ${backoffMs}ms`);
368
+
await new Promise(resolve => setTimeout(resolve, backoffMs));
369
+
continue;
370
+
}
374
371
375
-
// Log detailed error information before throwing
376
-
logger.error(`[File Upload] ❌ Upload failed for ${fileName} (size: ${content.length} bytes, mimeType: ${mimeType}, attempt: ${attempt + 1}/${maxRetries})`, {
377
-
error: error?.error || error?.message || 'Unknown error',
378
-
status: error?.status,
379
-
headers: error?.headers,
380
-
success: error?.success
381
-
});
382
-
console.error(`[File Upload] ❌ Upload failed for ${fileName}:`, {
383
-
error: error?.error || error?.message || 'Unknown error',
384
-
status: error?.status,
385
-
size: content.length,
386
-
mimeType,
387
-
attempt: attempt + 1
388
-
});
389
-
throw error;
390
-
}
391
-
}
392
-
throw new Error(`Failed to upload ${fileName} after ${maxRetries} attempts`);
393
-
};
372
+
// Log detailed error information before throwing
373
+
logger.error(`[File Upload] ❌ Upload failed for ${fileName} (size: ${content.length} bytes, mimeType: ${mimeType}, attempt: ${attempt + 1}/${maxRetries})`, {
374
+
error: error?.error || error?.message || 'Unknown error',
375
+
status: error?.status,
376
+
headers: error?.headers,
377
+
success: error?.success
378
+
});
379
+
console.error(`[File Upload] ❌ Upload failed for ${fileName}:`, {
380
+
error: error?.error || error?.message || 'Unknown error',
381
+
status: error?.status,
382
+
size: content.length,
383
+
mimeType,
384
+
attempt: attempt + 1
385
+
});
386
+
throw error;
387
+
}
388
+
}
389
+
throw new Error(`Failed to upload ${fileName} after ${maxRetries} attempts`);
390
+
};
394
391
395
-
// Use sliding window concurrency for maximum throughput
396
-
const CONCURRENCY_LIMIT = 20; // Maximum concurrent uploads
397
-
const uploadedBlobs: Array<{
398
-
result: FileUploadResult;
399
-
filePath: string;
400
-
sentMimeType: string;
401
-
returnedMimeType: string;
402
-
reused: boolean;
403
-
}> = [];
404
-
const failedFiles: Array<{
405
-
name: string;
406
-
index: number;
407
-
error: string;
408
-
size: number;
409
-
}> = [];
392
+
// Use sliding window concurrency for maximum throughput
393
+
const CONCURRENCY_LIMIT = 20; // Maximum concurrent uploads
394
+
const uploadedBlobs: Array<{
395
+
result: FileUploadResult;
396
+
filePath: string;
397
+
sentMimeType: string;
398
+
returnedMimeType: string;
399
+
reused: boolean;
400
+
}> = [];
401
+
const failedFiles: Array<{
402
+
name: string;
403
+
index: number;
404
+
error: string;
405
+
size: number;
406
+
}> = [];
410
407
411
-
// Process file with sliding window concurrency
412
-
const processFile = async (file: UploadedFile, index: number) => {
413
-
try {
414
-
if (!file || !file.name) {
415
-
throw new Error(`Undefined file at index ${index}`);
416
-
}
408
+
// Process file with sliding window concurrency
409
+
const processFile = async (file: UploadedFile, index: number) => {
410
+
try {
411
+
if (!file || !file.name) {
412
+
throw new Error(`Undefined file at index ${index}`);
413
+
}
417
414
418
-
const fileCID = computeCID(file.content);
419
-
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
420
-
const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name);
415
+
const fileCID = computeCID(file.content);
416
+
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
417
+
const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name);
421
418
422
-
if (existingBlob && existingBlob.cid === fileCID) {
423
-
logger.info(`[File Upload] ♻️ Reused: ${file.name} (unchanged, CID: ${fileCID})`);
424
-
updateJobProgress(jobId, {
425
-
filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1
426
-
});
419
+
if (existingBlob && existingBlob.cid === fileCID) {
420
+
logger.info(`[File Upload] ♻️ Reused: ${file.name} (unchanged, CID: ${fileCID})`);
421
+
updateJobProgress(jobId, {
422
+
filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1
423
+
});
427
424
428
-
return {
429
-
result: {
430
-
hash: existingBlob.cid,
431
-
blobRef: existingBlob.blobRef,
432
-
...(file.compressed && {
433
-
encoding: 'gzip' as const,
434
-
mimeType: file.originalMimeType || file.mimeType,
435
-
base64: file.base64Encoded || false
436
-
})
437
-
},
438
-
filePath: file.name,
439
-
sentMimeType: file.mimeType,
440
-
returnedMimeType: existingBlob.blobRef.mimeType,
441
-
reused: true
442
-
};
443
-
}
425
+
return {
426
+
result: {
427
+
hash: existingBlob.cid,
428
+
blobRef: existingBlob.blobRef,
429
+
...(file.compressed && {
430
+
encoding: 'gzip' as const,
431
+
mimeType: file.originalMimeType || file.mimeType,
432
+
base64: file.base64Encoded || false
433
+
})
434
+
},
435
+
filePath: file.name,
436
+
sentMimeType: file.mimeType,
437
+
returnedMimeType: existingBlob.blobRef.mimeType,
438
+
reused: true
439
+
};
440
+
}
444
441
445
-
const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
446
-
? 'application/octet-stream'
447
-
: file.mimeType;
442
+
const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
443
+
? 'application/octet-stream'
444
+
: file.mimeType;
448
445
449
-
const compressionInfo = file.compressed ? ' (gzipped)' : '';
450
-
const fileSizeMB = (file.size / 1024 / 1024).toFixed(2);
451
-
logger.info(`[File Upload] ⬆️ Uploading: ${file.name} (${fileSizeMB}MB${compressionInfo})`);
446
+
const compressionInfo = file.compressed ? ' (gzipped)' : '';
447
+
const fileSizeMB = (file.size / 1024 / 1024).toFixed(2);
448
+
logger.info(`[File Upload] ⬆️ Uploading: ${file.name} (${fileSizeMB}MB${compressionInfo})`);
452
449
453
-
const uploadResult = await uploadBlobWithRetry(
454
-
agent,
455
-
file.content,
456
-
uploadMimeType,
457
-
file.name
458
-
);
450
+
const uploadResult = await uploadBlobWithRetry(
451
+
agent,
452
+
file.content,
453
+
uploadMimeType,
454
+
file.name
455
+
);
459
456
460
-
const returnedBlobRef = uploadResult.data.blob;
461
-
updateJobProgress(jobId, {
462
-
filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1
463
-
});
464
-
logger.info(`[File Upload] ✅ Uploaded: ${file.name} (CID: ${fileCID})`);
457
+
const returnedBlobRef = uploadResult.data.blob;
458
+
updateJobProgress(jobId, {
459
+
filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1
460
+
});
461
+
logger.info(`[File Upload] ✅ Uploaded: ${file.name} (CID: ${fileCID})`);
465
462
466
-
return {
467
-
result: {
468
-
hash: returnedBlobRef.ref.toString(),
469
-
blobRef: returnedBlobRef,
470
-
...(file.compressed && {
471
-
encoding: 'gzip' as const,
472
-
mimeType: file.originalMimeType || file.mimeType,
473
-
base64: file.base64Encoded || false
474
-
})
475
-
},
476
-
filePath: file.name,
477
-
sentMimeType: file.mimeType,
478
-
returnedMimeType: returnedBlobRef.mimeType,
479
-
reused: false
480
-
};
481
-
} catch (uploadError) {
482
-
const fileName = file?.name || 'unknown';
483
-
const fileSize = file?.size || 0;
484
-
const errorMessage = uploadError instanceof Error ? uploadError.message : 'Unknown error';
485
-
const errorDetails = {
486
-
fileName,
487
-
fileSize,
488
-
index,
489
-
error: errorMessage,
490
-
stack: uploadError instanceof Error ? uploadError.stack : undefined
491
-
};
492
-
logger.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
493
-
console.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
463
+
return {
464
+
result: {
465
+
hash: returnedBlobRef.ref.toString(),
466
+
blobRef: returnedBlobRef,
467
+
...(file.compressed && {
468
+
encoding: 'gzip' as const,
469
+
mimeType: file.originalMimeType || file.mimeType,
470
+
base64: file.base64Encoded || false
471
+
})
472
+
},
473
+
filePath: file.name,
474
+
sentMimeType: file.mimeType,
475
+
returnedMimeType: returnedBlobRef.mimeType,
476
+
reused: false
477
+
};
478
+
} catch (uploadError) {
479
+
const fileName = file?.name || 'unknown';
480
+
const fileSize = file?.size || 0;
481
+
const errorMessage = uploadError instanceof Error ? uploadError.message : 'Unknown error';
482
+
const errorDetails = {
483
+
fileName,
484
+
fileSize,
485
+
index,
486
+
error: errorMessage,
487
+
stack: uploadError instanceof Error ? uploadError.stack : undefined
488
+
};
489
+
logger.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
490
+
console.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
494
491
495
-
// Track failed file but don't throw - continue with other files
496
-
failedFiles.push({
497
-
name: fileName,
498
-
index,
499
-
error: errorMessage,
500
-
size: fileSize
501
-
});
492
+
// Track failed file but don't throw - continue with other files
493
+
failedFiles.push({
494
+
name: fileName,
495
+
index,
496
+
error: errorMessage,
497
+
size: fileSize
498
+
});
502
499
503
-
return null; // Return null to indicate failure
504
-
}
505
-
};
500
+
return null; // Return null to indicate failure
501
+
}
502
+
};
506
503
507
-
// Sliding window concurrency control
508
-
const processWithConcurrency = async () => {
509
-
const results: any[] = [];
510
-
let fileIndex = 0;
511
-
const executing = new Map<Promise<void>, { index: number; name: string }>();
504
+
// Sliding window concurrency control
505
+
const processWithConcurrency = async () => {
506
+
const results: any[] = [];
507
+
let fileIndex = 0;
508
+
const executing = new Map<Promise<void>, { index: number; name: string }>();
512
509
513
-
for (const file of validUploadedFiles) {
514
-
const currentIndex = fileIndex++;
510
+
for (const file of validUploadedFiles) {
511
+
const currentIndex = fileIndex++;
515
512
516
-
const promise = processFile(file, currentIndex)
517
-
.then(result => {
518
-
results[currentIndex] = result;
519
-
console.log(`[Concurrency] File ${currentIndex} (${file.name}) completed successfully`);
520
-
})
521
-
.catch(error => {
522
-
// This shouldn't happen since processFile catches errors, but just in case
523
-
logger.error(`Unexpected error processing file at index ${currentIndex}`, error);
524
-
console.error(`[Concurrency] File ${currentIndex} (${file.name}) had unexpected error:`, error);
525
-
results[currentIndex] = null;
526
-
})
527
-
.finally(() => {
528
-
executing.delete(promise);
529
-
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
530
-
console.log(`[Concurrency] File ${currentIndex} (${file.name}) removed. Remaining ${executing.size}: [${remaining.join(', ')}]`);
531
-
});
513
+
const promise = processFile(file, currentIndex)
514
+
.then(result => {
515
+
results[currentIndex] = result;
516
+
})
517
+
.catch(error => {
518
+
// This shouldn't happen since processFile catches errors, but just in case
519
+
logger.error(`Unexpected error processing file at index ${currentIndex}`, error);
520
+
results[currentIndex] = null;
521
+
})
522
+
.finally(() => {
523
+
executing.delete(promise);
524
+
});
532
525
533
-
executing.set(promise, { index: currentIndex, name: file.name });
534
-
const current = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
535
-
console.log(`[Concurrency] Added file ${currentIndex} (${file.name}). Total ${executing.size}: [${current.join(', ')}]`);
526
+
executing.set(promise, { index: currentIndex, name: file.name });
536
527
537
-
if (executing.size >= CONCURRENCY_LIMIT) {
538
-
console.log(`[Concurrency] Hit limit (${CONCURRENCY_LIMIT}), waiting for one to complete...`);
539
-
await Promise.race(executing.keys());
540
-
console.log(`[Concurrency] One completed, continuing. Remaining: ${executing.size}`);
541
-
}
542
-
}
528
+
if (executing.size >= CONCURRENCY_LIMIT) {
529
+
await Promise.race(executing.keys());
530
+
}
531
+
}
543
532
544
-
// Wait for remaining uploads
545
-
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
546
-
console.log(`[Concurrency] Waiting for ${executing.size} remaining uploads: [${remaining.join(', ')}]`);
547
-
await Promise.all(executing.keys());
548
-
console.log(`[Concurrency] All uploads complete!`);
549
-
return results.filter(r => r !== undefined && r !== null); // Filter out null (failed) and undefined entries
550
-
};
533
+
// Wait for remaining uploads
534
+
await Promise.all(executing.keys());
535
+
return results.filter(r => r !== undefined && r !== null); // Filter out null (failed) and undefined entries
536
+
};
551
537
552
-
const allResults = await processWithConcurrency();
553
-
uploadedBlobs.push(...allResults);
538
+
const allResults = await processWithConcurrency();
539
+
uploadedBlobs.push(...allResults);
554
540
555
-
const currentReused = uploadedBlobs.filter(b => b.reused).length;
556
-
const currentUploaded = uploadedBlobs.filter(b => !b.reused).length;
557
-
const successfulCount = uploadedBlobs.length;
558
-
const failedCount = failedFiles.length;
541
+
const currentReused = uploadedBlobs.filter(b => b.reused).length;
542
+
const currentUploaded = uploadedBlobs.filter(b => !b.reused).length;
543
+
const successfulCount = uploadedBlobs.length;
544
+
const failedCount = failedFiles.length;
559
545
560
-
logger.info(`[File Upload] 🎉 Upload complete → ${successfulCount}/${validUploadedFiles.length} files succeeded (${currentUploaded} uploaded, ${currentReused} reused), ${failedCount} failed`);
546
+
logger.info(`[File Upload] 🎉 Upload complete → ${successfulCount}/${validUploadedFiles.length} files succeeded (${currentUploaded} uploaded, ${currentReused} reused), ${failedCount} failed`);
561
547
562
-
if (failedCount > 0) {
563
-
logger.warn(`[File Upload] ⚠️ Failed files:`, failedFiles);
564
-
console.warn(`[File Upload] ⚠️ ${failedCount} files failed to upload:`, failedFiles.map(f => f.name).join(', '));
565
-
}
548
+
if (failedCount > 0) {
549
+
logger.warn(`[File Upload] ⚠️ Failed files:`, failedFiles);
550
+
console.warn(`[File Upload] ⚠️ ${failedCount} files failed to upload:`, failedFiles.map(f => f.name).join(', '));
551
+
}
566
552
567
-
const reusedCount = uploadedBlobs.filter(b => b.reused).length;
568
-
const uploadedCount = uploadedBlobs.filter(b => !b.reused).length;
569
-
logger.info(`[File Upload] 🎉 Upload phase complete! Total: ${successfulCount} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
553
+
const reusedCount = uploadedBlobs.filter(b => b.reused).length;
554
+
const uploadedCount = uploadedBlobs.filter(b => !b.reused).length;
555
+
logger.info(`[File Upload] 🎉 Upload phase complete! Total: ${successfulCount} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
570
556
571
-
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
572
-
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
557
+
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
558
+
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
573
559
574
-
// Update directory with file blobs
575
-
console.log('Updating directory with blob references...');
576
-
updateJobProgress(jobId, { phase: 'creating_manifest' });
577
-
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
560
+
// Update directory with file blobs
561
+
console.log('Updating directory with blob references...');
562
+
updateJobProgress(jobId, { phase: 'creating_manifest' });
563
+
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
578
564
579
-
// Check if we need to split into subfs records
580
-
// Split proactively if we have lots of files to avoid hitting manifest size limits
581
-
const MAX_MANIFEST_SIZE = 140 * 1024; // 140KB to be safe (PDS limit is 150KB)
582
-
const FILE_COUNT_THRESHOLD = 250; // Start splitting early
583
-
const subfsRecords: Array<{ uri: string; path: string }> = [];
584
-
let workingDirectory = updatedDirectory;
585
-
let currentFileCount = fileCount;
565
+
// Check if we need to split into subfs records
566
+
// Split proactively if we have lots of files to avoid hitting manifest size limits
567
+
const MAX_MANIFEST_SIZE = 140 * 1024; // 140KB to be safe (PDS limit is 150KB)
568
+
const FILE_COUNT_THRESHOLD = 250; // Start splitting early
569
+
const subfsRecords: Array<{ uri: string; path: string }> = [];
570
+
let workingDirectory = updatedDirectory;
571
+
let currentFileCount = fileCount;
586
572
587
-
// Create initial manifest to check size
588
-
let manifest = createManifest(siteName, workingDirectory, fileCount);
589
-
let manifestSize = JSON.stringify(manifest).length;
573
+
// Create initial manifest to check size
574
+
let manifest = createManifest(siteName, workingDirectory, fileCount);
575
+
let manifestSize = JSON.stringify(manifest).length;
590
576
591
-
// Split if we have lots of files OR if manifest is already too large
592
-
if (fileCount >= FILE_COUNT_THRESHOLD || manifestSize > MAX_MANIFEST_SIZE) {
593
-
console.log(`⚠️ Large site detected (${fileCount} files, ${(manifestSize / 1024).toFixed(1)}KB), splitting into subfs records...`);
594
-
logger.info(`Large site with ${fileCount} files, splitting into subfs records`);
577
+
// Split if we have lots of files OR if manifest is already too large
578
+
if (fileCount >= FILE_COUNT_THRESHOLD || manifestSize > MAX_MANIFEST_SIZE) {
579
+
console.log(`⚠️ Large site detected (${fileCount} files, ${(manifestSize / 1024).toFixed(1)}KB), splitting into subfs records...`);
580
+
logger.info(`Large site with ${fileCount} files, splitting into subfs records`);
595
581
596
-
// Keep splitting until manifest fits under limit
597
-
let attempts = 0;
598
-
const MAX_ATTEMPTS = 100; // Allow many splits for very large sites
582
+
// Keep splitting until manifest fits under limit
583
+
let attempts = 0;
584
+
const MAX_ATTEMPTS = 100; // Allow many splits for very large sites
599
585
600
-
while (manifestSize > MAX_MANIFEST_SIZE && attempts < MAX_ATTEMPTS) {
601
-
attempts++;
586
+
while (manifestSize > MAX_MANIFEST_SIZE && attempts < MAX_ATTEMPTS) {
587
+
attempts++;
602
588
603
-
// Find all directories sorted by size (largest first)
604
-
const directories = findLargeDirectories(workingDirectory);
605
-
directories.sort((a, b) => b.size - a.size);
589
+
// Find all directories sorted by size (largest first)
590
+
const directories = findLargeDirectories(workingDirectory);
591
+
directories.sort((a, b) => b.size - a.size);
606
592
607
-
if (directories.length === 0) {
608
-
// No more directories to split - this should be very rare
609
-
throw new Error(
610
-
`Cannot split manifest further - no subdirectories available. ` +
611
-
`Current size: ${(manifestSize / 1024).toFixed(1)}KB. ` +
612
-
`Try organizing files into subdirectories.`
613
-
);
614
-
}
593
+
if (directories.length === 0) {
594
+
// No more directories to split - this should be very rare
595
+
throw new Error(
596
+
`Cannot split manifest further - no subdirectories available. ` +
597
+
`Current size: ${(manifestSize / 1024).toFixed(1)}KB. ` +
598
+
`Try organizing files into subdirectories.`
599
+
);
600
+
}
615
601
616
-
// Pick the largest directory
617
-
const largestDir = directories[0];
618
-
console.log(` Split #${attempts}: ${largestDir.path} (${largestDir.fileCount} files, ${(largestDir.size / 1024).toFixed(1)}KB)`);
602
+
// Pick the largest directory
603
+
const largestDir = directories[0];
604
+
console.log(` Split #${attempts}: ${largestDir.path} (${largestDir.fileCount} files, ${(largestDir.size / 1024).toFixed(1)}KB)`);
619
605
620
-
// Create a subfs record for this directory
621
-
const subfsRkey = TID.nextStr();
622
-
const subfsManifest = {
623
-
$type: 'place.wisp.subfs' as const,
624
-
root: largestDir.directory,
625
-
fileCount: largestDir.fileCount,
626
-
createdAt: new Date().toISOString()
627
-
};
606
+
// Create a subfs record for this directory
607
+
const subfsRkey = TID.nextStr();
608
+
const subfsManifest = {
609
+
$type: 'place.wisp.subfs' as const,
610
+
root: largestDir.directory,
611
+
fileCount: largestDir.fileCount,
612
+
createdAt: new Date().toISOString()
613
+
};
628
614
629
-
// Validate subfs record
630
-
const subfsValidation = validateSubfsRecord(subfsManifest);
631
-
if (!subfsValidation.success) {
632
-
throw new Error(`Invalid subfs manifest: ${subfsValidation.error?.message || 'Validation failed'}`);
633
-
}
615
+
// Validate subfs record
616
+
const subfsValidation = validateSubfsRecord(subfsManifest);
617
+
if (!subfsValidation.success) {
618
+
throw new Error(`Invalid subfs manifest: ${subfsValidation.error?.message || 'Validation failed'}`);
619
+
}
634
620
635
-
// Upload subfs record to PDS
636
-
const subfsRecord = await agent.com.atproto.repo.putRecord({
637
-
repo: did,
638
-
collection: 'place.wisp.subfs',
639
-
rkey: subfsRkey,
640
-
record: subfsManifest
641
-
});
621
+
// Upload subfs record to PDS
622
+
const subfsRecord = await agent.com.atproto.repo.putRecord({
623
+
repo: did,
624
+
collection: 'place.wisp.subfs',
625
+
rkey: subfsRkey,
626
+
record: subfsManifest
627
+
});
642
628
643
-
const subfsUri = subfsRecord.data.uri;
644
-
subfsRecords.push({ uri: subfsUri, path: largestDir.path });
645
-
console.log(` ✅ Created subfs: ${subfsUri}`);
646
-
logger.info(`Created subfs record for ${largestDir.path}: ${subfsUri}`);
629
+
const subfsUri = subfsRecord.data.uri;
630
+
subfsRecords.push({ uri: subfsUri, path: largestDir.path });
631
+
console.log(` ✅ Created subfs: ${subfsUri}`);
632
+
logger.info(`Created subfs record for ${largestDir.path}: ${subfsUri}`);
647
633
648
-
// Replace directory with subfs node in the main tree
649
-
workingDirectory = replaceDirectoryWithSubfs(workingDirectory, largestDir.path, subfsUri);
634
+
// Replace directory with subfs node in the main tree
635
+
workingDirectory = replaceDirectoryWithSubfs(workingDirectory, largestDir.path, subfsUri);
650
636
651
-
// Recreate manifest and check new size
652
-
currentFileCount -= largestDir.fileCount;
653
-
manifest = createManifest(siteName, workingDirectory, fileCount);
654
-
manifestSize = JSON.stringify(manifest).length;
655
-
const newSizeKB = (manifestSize / 1024).toFixed(1);
656
-
console.log(` → Manifest now ${newSizeKB}KB with ${currentFileCount} files (${subfsRecords.length} subfs total)`);
637
+
// Recreate manifest and check new size
638
+
currentFileCount -= largestDir.fileCount;
639
+
manifest = createManifest(siteName, workingDirectory, fileCount);
640
+
manifestSize = JSON.stringify(manifest).length;
641
+
const newSizeKB = (manifestSize / 1024).toFixed(1);
642
+
console.log(` → Manifest now ${newSizeKB}KB with ${currentFileCount} files (${subfsRecords.length} subfs total)`);
657
643
658
-
// Check if we're under the limit now
659
-
if (manifestSize <= MAX_MANIFEST_SIZE) {
660
-
console.log(` ✅ Manifest fits! (${newSizeKB}KB < 140KB)`);
661
-
break;
662
-
}
663
-
}
644
+
// Check if we're under the limit now
645
+
if (manifestSize <= MAX_MANIFEST_SIZE) {
646
+
console.log(` ✅ Manifest fits! (${newSizeKB}KB < 140KB)`);
647
+
break;
648
+
}
649
+
}
664
650
665
-
if (manifestSize > MAX_MANIFEST_SIZE) {
666
-
throw new Error(
667
-
`Failed to fit manifest after splitting ${attempts} directories. ` +
668
-
`Current size: ${(manifestSize / 1024).toFixed(1)}KB. ` +
669
-
`This should never happen - please report this issue.`
670
-
);
671
-
}
651
+
if (manifestSize > MAX_MANIFEST_SIZE) {
652
+
throw new Error(
653
+
`Failed to fit manifest after splitting ${attempts} directories. ` +
654
+
`Current size: ${(manifestSize / 1024).toFixed(1)}KB. ` +
655
+
`This should never happen - please report this issue.`
656
+
);
657
+
}
672
658
673
-
console.log(`✅ Split complete: ${subfsRecords.length} subfs records, ${currentFileCount} files in main, ${(manifestSize / 1024).toFixed(1)}KB manifest`);
674
-
logger.info(`Split into ${subfsRecords.length} subfs records, ${currentFileCount} files remaining in main tree`);
675
-
} else {
676
-
const manifestSizeKB = (manifestSize / 1024).toFixed(1);
677
-
console.log(`Manifest created (${fileCount} files, ${manifestSizeKB}KB JSON) - no splitting needed`);
678
-
}
659
+
console.log(`✅ Split complete: ${subfsRecords.length} subfs records, ${currentFileCount} files in main, ${(manifestSize / 1024).toFixed(1)}KB manifest`);
660
+
logger.info(`Split into ${subfsRecords.length} subfs records, ${currentFileCount} files remaining in main tree`);
661
+
} else {
662
+
const manifestSizeKB = (manifestSize / 1024).toFixed(1);
663
+
console.log(`Manifest created (${fileCount} files, ${manifestSizeKB}KB JSON) - no splitting needed`);
664
+
}
679
665
680
-
const rkey = siteName;
681
-
updateJobProgress(jobId, { phase: 'finalizing' });
666
+
const rkey = siteName;
667
+
updateJobProgress(jobId, { phase: 'finalizing' });
682
668
683
-
console.log('Putting record to PDS with rkey:', rkey);
684
-
const record = await agent.com.atproto.repo.putRecord({
685
-
repo: did,
686
-
collection: 'place.wisp.fs',
687
-
rkey: rkey,
688
-
record: manifest
689
-
});
690
-
console.log('Record successfully created on PDS:', record.data.uri);
669
+
console.log('Putting record to PDS with rkey:', rkey);
670
+
const record = await agent.com.atproto.repo.putRecord({
671
+
repo: did,
672
+
collection: 'place.wisp.fs',
673
+
rkey: rkey,
674
+
record: manifest
675
+
});
676
+
console.log('Record successfully created on PDS:', record.data.uri);
691
677
692
-
// Store site in database cache
693
-
await upsertSite(did, rkey, siteName);
678
+
// Store site in database cache
679
+
await upsertSite(did, rkey, siteName);
694
680
695
-
// Clean up old subfs records if we had any
696
-
if (oldSubfsUris.length > 0) {
697
-
console.log(`Cleaning up ${oldSubfsUris.length} old subfs records...`);
698
-
logger.info(`Cleaning up ${oldSubfsUris.length} old subfs records`);
681
+
// Clean up old subfs records if we had any
682
+
if (oldSubfsUris.length > 0) {
683
+
console.log(`Cleaning up ${oldSubfsUris.length} old subfs records...`);
684
+
logger.info(`Cleaning up ${oldSubfsUris.length} old subfs records`);
699
685
700
-
// Delete old subfs records in parallel (don't wait for completion)
701
-
Promise.all(
702
-
oldSubfsUris.map(async ({ uri }) => {
703
-
try {
704
-
// Parse URI: at://did/collection/rkey
705
-
const parts = uri.replace('at://', '').split('/');
706
-
const subRkey = parts[2];
686
+
// Delete old subfs records in parallel (don't wait for completion)
687
+
Promise.all(
688
+
oldSubfsUris.map(async ({ uri }) => {
689
+
try {
690
+
// Parse URI: at://did/collection/rkey
691
+
const parts = uri.replace('at://', '').split('/');
692
+
const subRkey = parts[2];
707
693
708
-
await agent.com.atproto.repo.deleteRecord({
709
-
repo: did,
710
-
collection: 'place.wisp.subfs',
711
-
rkey: subRkey
712
-
});
694
+
await agent.com.atproto.repo.deleteRecord({
695
+
repo: did,
696
+
collection: 'place.wisp.subfs',
697
+
rkey: subRkey
698
+
});
713
699
714
-
console.log(` 🗑️ Deleted old subfs: ${uri}`);
715
-
logger.info(`Deleted old subfs record: ${uri}`);
716
-
} catch (err: any) {
717
-
// Don't fail the whole upload if cleanup fails
718
-
console.warn(`Failed to delete old subfs ${uri}:`, err?.message);
719
-
logger.warn(`Failed to delete old subfs ${uri}`, err);
720
-
}
721
-
})
722
-
).catch(err => {
723
-
// Log but don't fail if cleanup fails
724
-
logger.warn('Some subfs cleanup operations failed', err);
725
-
});
726
-
}
700
+
console.log(` 🗑️ Deleted old subfs: ${uri}`);
701
+
logger.info(`Deleted old subfs record: ${uri}`);
702
+
} catch (err: any) {
703
+
// Don't fail the whole upload if cleanup fails
704
+
console.warn(`Failed to delete old subfs ${uri}:`, err?.message);
705
+
logger.warn(`Failed to delete old subfs ${uri}`, err);
706
+
}
707
+
})
708
+
).catch(err => {
709
+
// Log but don't fail if cleanup fails
710
+
logger.warn('Some subfs cleanup operations failed', err);
711
+
});
712
+
}
727
713
728
-
completeUploadJob(jobId, {
729
-
success: true,
730
-
uri: record.data.uri,
731
-
cid: record.data.cid,
732
-
fileCount,
733
-
siteName,
734
-
skippedFiles,
735
-
failedFiles,
736
-
uploadedCount: validUploadedFiles.length - failedFiles.length,
737
-
hasFailures: failedFiles.length > 0
738
-
});
714
+
completeUploadJob(jobId, {
715
+
success: true,
716
+
uri: record.data.uri,
717
+
cid: record.data.cid,
718
+
fileCount,
719
+
siteName,
720
+
skippedFiles,
721
+
failedFiles,
722
+
uploadedCount: validUploadedFiles.length - failedFiles.length,
723
+
hasFailures: failedFiles.length > 0
724
+
});
739
725
740
-
console.log('=== UPLOAD FILES COMPLETE ===');
741
-
} catch (error) {
742
-
console.error('=== UPLOAD ERROR ===');
743
-
console.error('Error details:', error);
744
-
logger.error('Upload error', error);
745
-
failUploadJob(jobId, error instanceof Error ? error.message : 'Unknown error');
746
-
}
726
+
console.log('=== UPLOAD FILES COMPLETE ===');
727
+
} catch (error) {
728
+
console.error('=== UPLOAD ERROR ===');
729
+
console.error('Error details:', error);
730
+
logger.error('Upload error', error);
731
+
failUploadJob(jobId, error instanceof Error ? error.message : 'Unknown error');
732
+
}
747
733
}
748
734
749
735
export const wispRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
750
-
new Elysia({
751
-
prefix: '/wisp',
752
-
cookie: {
753
-
secrets: cookieSecret,
754
-
sign: ['did']
755
-
}
756
-
})
757
-
.derive(async ({ cookie }) => {
758
-
const auth = await requireAuth(client, cookie)
759
-
return { auth }
760
-
})
761
-
.get(
762
-
'/upload-progress/:jobId',
763
-
async ({ params: { jobId }, auth, set }) => {
764
-
const job = getUploadJob(jobId);
736
+
new Elysia({
737
+
prefix: '/wisp',
738
+
cookie: {
739
+
secrets: cookieSecret,
740
+
sign: ['did']
741
+
}
742
+
})
743
+
.derive(async ({ cookie }) => {
744
+
const auth = await requireAuth(client, cookie)
745
+
return { auth }
746
+
})
747
+
.get(
748
+
'/upload-progress/:jobId',
749
+
async ({ params: { jobId }, auth, set }) => {
750
+
const job = getUploadJob(jobId);
765
751
766
-
if (!job) {
767
-
set.status = 404;
768
-
return { error: 'Job not found' };
769
-
}
752
+
if (!job) {
753
+
set.status = 404;
754
+
return { error: 'Job not found' };
755
+
}
770
756
771
-
// Verify job belongs to authenticated user
772
-
if (job.did !== auth.did) {
773
-
set.status = 403;
774
-
return { error: 'Unauthorized' };
775
-
}
757
+
// Verify job belongs to authenticated user
758
+
if (job.did !== auth.did) {
759
+
set.status = 403;
760
+
return { error: 'Unauthorized' };
761
+
}
776
762
777
-
// Set up SSE headers
778
-
set.headers = {
779
-
'Content-Type': 'text/event-stream',
780
-
'Cache-Control': 'no-cache',
781
-
'Connection': 'keep-alive'
782
-
};
763
+
// Set up SSE headers
764
+
set.headers = {
765
+
'Content-Type': 'text/event-stream',
766
+
'Cache-Control': 'no-cache',
767
+
'Connection': 'keep-alive'
768
+
};
783
769
784
-
const stream = new ReadableStream({
785
-
start(controller) {
786
-
const encoder = new TextEncoder();
770
+
const stream = new ReadableStream({
771
+
start(controller) {
772
+
const encoder = new TextEncoder();
787
773
788
-
// Send initial state
789
-
const sendEvent = (event: string, data: any) => {
790
-
try {
791
-
const message = `event: ${event}\ndata: ${JSON.stringify(data)}\n\n`;
792
-
controller.enqueue(encoder.encode(message));
793
-
} catch (err) {
794
-
// Controller closed, ignore
795
-
}
796
-
};
774
+
// Send initial state
775
+
const sendEvent = (event: string, data: any) => {
776
+
try {
777
+
const message = `event: ${event}\ndata: ${JSON.stringify(data)}\n\n`;
778
+
controller.enqueue(encoder.encode(message));
779
+
} catch (err) {
780
+
// Controller closed, ignore
781
+
}
782
+
};
797
783
798
-
// Send keepalive comment every 15 seconds to prevent timeout
799
-
const keepaliveInterval = setInterval(() => {
800
-
try {
801
-
controller.enqueue(encoder.encode(': keepalive\n\n'));
802
-
} catch (err) {
803
-
// Controller closed, stop sending keepalives
804
-
clearInterval(keepaliveInterval);
805
-
}
806
-
}, 15000);
784
+
// Send keepalive comment every 15 seconds to prevent timeout
785
+
const keepaliveInterval = setInterval(() => {
786
+
try {
787
+
controller.enqueue(encoder.encode(': keepalive\n\n'));
788
+
} catch (err) {
789
+
// Controller closed, stop sending keepalives
790
+
clearInterval(keepaliveInterval);
791
+
}
792
+
}, 15000);
807
793
808
-
// Send current job state immediately
809
-
sendEvent('progress', {
810
-
status: job.status,
811
-
progress: job.progress,
812
-
result: job.result,
813
-
error: job.error
814
-
});
794
+
// Send current job state immediately
795
+
sendEvent('progress', {
796
+
status: job.status,
797
+
progress: job.progress,
798
+
result: job.result,
799
+
error: job.error
800
+
});
815
801
816
-
// If job is already completed or failed, close the stream
817
-
if (job.status === 'completed' || job.status === 'failed') {
818
-
clearInterval(keepaliveInterval);
819
-
controller.close();
820
-
return;
821
-
}
802
+
// If job is already completed or failed, close the stream
803
+
if (job.status === 'completed' || job.status === 'failed') {
804
+
clearInterval(keepaliveInterval);
805
+
controller.close();
806
+
return;
807
+
}
822
808
823
-
// Listen for updates
824
-
const cleanup = addJobListener(jobId, (event, data) => {
825
-
sendEvent(event, data);
809
+
// Listen for updates
810
+
const cleanup = addJobListener(jobId, (event, data) => {
811
+
sendEvent(event, data);
826
812
827
-
// Close stream after done or error event
828
-
if (event === 'done' || event === 'error') {
829
-
clearInterval(keepaliveInterval);
830
-
setTimeout(() => {
831
-
try {
832
-
controller.close();
833
-
} catch (err) {
834
-
// Already closed
835
-
}
836
-
}, 100);
837
-
}
838
-
});
813
+
// Close stream after done or error event
814
+
if (event === 'done' || event === 'error') {
815
+
clearInterval(keepaliveInterval);
816
+
setTimeout(() => {
817
+
try {
818
+
controller.close();
819
+
} catch (err) {
820
+
// Already closed
821
+
}
822
+
}, 100);
823
+
}
824
+
});
839
825
840
-
// Cleanup on disconnect
841
-
return () => {
842
-
clearInterval(keepaliveInterval);
843
-
cleanup();
844
-
};
845
-
}
846
-
});
826
+
// Cleanup on disconnect
827
+
return () => {
828
+
clearInterval(keepaliveInterval);
829
+
cleanup();
830
+
};
831
+
}
832
+
});
847
833
848
-
return new Response(stream);
849
-
}
850
-
)
851
-
.post(
852
-
'/upload-files',
853
-
async ({ body, auth }) => {
854
-
const { siteName, files } = body as {
855
-
siteName: string;
856
-
files: File | File[]
857
-
};
834
+
return new Response(stream);
835
+
}
836
+
)
837
+
.post(
838
+
'/upload-files',
839
+
async ({ body, auth }) => {
840
+
const { siteName, files } = body as {
841
+
siteName: string;
842
+
files: File | File[]
843
+
};
858
844
859
-
console.log('=== UPLOAD FILES START ===');
860
-
console.log('Site name:', siteName);
861
-
console.log('Files received:', Array.isArray(files) ? files.length : 'single file');
845
+
console.log('=== UPLOAD FILES START ===');
846
+
console.log('Site name:', siteName);
847
+
console.log('Files received:', Array.isArray(files) ? files.length : 'single file');
862
848
863
-
try {
864
-
if (!siteName) {
865
-
throw new Error('Site name is required')
866
-
}
849
+
try {
850
+
if (!siteName) {
851
+
throw new Error('Site name is required')
852
+
}
867
853
868
-
if (!isValidSiteName(siteName)) {
869
-
throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
870
-
}
854
+
if (!isValidSiteName(siteName)) {
855
+
throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
856
+
}
871
857
872
-
// Check if files were provided
873
-
const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
858
+
// Check if files were provided
859
+
const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
874
860
875
-
if (!hasFiles) {
876
-
// Handle empty upload synchronously (fast operation)
877
-
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
861
+
if (!hasFiles) {
862
+
// Handle empty upload synchronously (fast operation)
863
+
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
878
864
879
-
const emptyManifest = {
880
-
$type: 'place.wisp.fs',
881
-
site: siteName,
882
-
root: {
883
-
type: 'directory',
884
-
entries: []
885
-
},
886
-
fileCount: 0,
887
-
createdAt: new Date().toISOString()
888
-
};
865
+
const emptyManifest = {
866
+
$type: 'place.wisp.fs',
867
+
site: siteName,
868
+
root: {
869
+
type: 'directory',
870
+
entries: []
871
+
},
872
+
fileCount: 0,
873
+
createdAt: new Date().toISOString()
874
+
};
889
875
890
-
const validationResult = validateRecord(emptyManifest);
891
-
if (!validationResult.success) {
892
-
throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
893
-
}
876
+
const validationResult = validateRecord(emptyManifest);
877
+
if (!validationResult.success) {
878
+
throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
879
+
}
894
880
895
-
const rkey = siteName;
881
+
const rkey = siteName;
896
882
897
-
const record = await agent.com.atproto.repo.putRecord({
898
-
repo: auth.did,
899
-
collection: 'place.wisp.fs',
900
-
rkey: rkey,
901
-
record: emptyManifest
902
-
});
883
+
const record = await agent.com.atproto.repo.putRecord({
884
+
repo: auth.did,
885
+
collection: 'place.wisp.fs',
886
+
rkey: rkey,
887
+
record: emptyManifest
888
+
});
903
889
904
-
await upsertSite(auth.did, rkey, siteName);
890
+
await upsertSite(auth.did, rkey, siteName);
905
891
906
-
return {
907
-
success: true,
908
-
uri: record.data.uri,
909
-
cid: record.data.cid,
910
-
fileCount: 0,
911
-
siteName
912
-
};
913
-
}
892
+
return {
893
+
success: true,
894
+
uri: record.data.uri,
895
+
cid: record.data.cid,
896
+
fileCount: 0,
897
+
siteName
898
+
};
899
+
}
914
900
915
-
// For file uploads, create a job and process in background
916
-
const fileArray = Array.isArray(files) ? files : [files];
917
-
const jobId = createUploadJob(auth.did, siteName, fileArray.length);
901
+
// For file uploads, create a job and process in background
902
+
const fileArray = Array.isArray(files) ? files : [files];
903
+
const jobId = createUploadJob(auth.did, siteName, fileArray.length);
918
904
919
-
// Create agent with OAuth session
920
-
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
921
-
console.log('Agent created for DID:', auth.did);
922
-
console.log('Created upload job:', jobId);
905
+
// Create agent with OAuth session
906
+
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
907
+
console.log('Agent created for DID:', auth.did);
908
+
console.log('Created upload job:', jobId);
923
909
924
-
// Start background processing (don't await)
925
-
processUploadInBackground(jobId, agent, auth.did, siteName, fileArray).catch(err => {
926
-
console.error('Background upload process failed:', err);
927
-
logger.error('Background upload process failed', err);
928
-
});
910
+
// Start background processing (don't await)
911
+
processUploadInBackground(jobId, agent, auth.did, siteName, fileArray).catch(err => {
912
+
console.error('Background upload process failed:', err);
913
+
logger.error('Background upload process failed', err);
914
+
});
929
915
930
-
// Return immediately with job ID
931
-
return {
932
-
success: true,
933
-
jobId,
934
-
message: 'Upload started. Connect to /wisp/upload-progress/' + jobId + ' for progress updates.'
935
-
};
936
-
} catch (error) {
937
-
console.error('=== UPLOAD ERROR ===');
938
-
console.error('Error details:', error);
939
-
logger.error('Upload error', error);
940
-
throw new Error(`Failed to upload files: ${error instanceof Error ? error.message : 'Unknown error'}`);
941
-
}
942
-
}
943
-
)
916
+
// Return immediately with job ID
917
+
return {
918
+
success: true,
919
+
jobId,
920
+
message: 'Upload started. Connect to /wisp/upload-progress/' + jobId + ' for progress updates.'
921
+
};
922
+
} catch (error) {
923
+
console.error('=== UPLOAD ERROR ===');
924
+
console.error('Error details:', error);
925
+
logger.error('Upload error', error);
926
+
throw new Error(`Failed to upload files: ${error instanceof Error ? error.message : 'Unknown error'}`);
927
+
}
928
+
}
929
+
)