the home site for me: also iteration 3 or 4 of my site
1#!/usr/bin/env bun
2
3import fs from "fs";
4import path from "path";
5import { glob } from "glob";
6import cliProgress from "cli-progress";
7import sharp from "sharp";
8
9const UPLOAD_URL = "https://l4.dunkirk.sh/upload";
10const AUTH_TOKEN = "crumpets";
11const contentDir = process.argv[2] || "content";
12const CONCURRENCY = 15; // Number of parallel uploads
13const MAX_DIMENSION = 1920; // Max dimension for either width or height
14
15interface ImageMatch {
16 filePath: string;
17 originalUrl: string;
18 line: number;
19}
20
21interface UploadResult {
22 url: string;
23 newUrl: string | null;
24 error?: string;
25}
26
27async function uploadImage(
28 url: string,
29 progressBar?: cliProgress.SingleBar,
30): Promise<{ newUrl: string | null; error?: string }> {
31 try {
32 const response = await fetch(url, {
33 signal: AbortSignal.timeout(30000), // 30 second timeout
34 });
35
36 if (!response.ok) {
37 progressBar?.increment();
38 return {
39 newUrl: null,
40 error: `Download failed: ${response.status} ${response.statusText}`,
41 };
42 }
43
44 const blob = await response.blob();
45 let buffer = Buffer.from(await blob.arrayBuffer());
46
47 // Get file extension from URL or content type
48 const urlExt = url.split(".").pop()?.split("?")[0];
49 const contentType = response.headers.get("content-type") || "";
50 let ext = urlExt || "jpg";
51
52 if (contentType.includes("png")) ext = "png";
53 else if (contentType.includes("jpeg") || contentType.includes("jpg"))
54 ext = "jpg";
55 else if (contentType.includes("gif")) ext = "gif";
56 else if (contentType.includes("webp")) ext = "webp";
57
58 // Resize image if it's too large
59 try {
60 const image = sharp(buffer);
61 const metadata = await image.metadata();
62
63 if (metadata.width && metadata.height) {
64 const maxDimension = Math.max(metadata.width, metadata.height);
65
66 if (maxDimension > MAX_DIMENSION) {
67 // Resize so the longest side is MAX_DIMENSION
68 const isLandscape = metadata.width > metadata.height;
69 buffer = await image
70 .resize(
71 isLandscape ? MAX_DIMENSION : undefined,
72 isLandscape ? undefined : MAX_DIMENSION,
73 {
74 fit: 'inside',
75 withoutEnlargement: true,
76 }
77 )
78 .toBuffer();
79 }
80 }
81 } catch (resizeError) {
82 // If resize fails, continue with original buffer
83 console.error(`\nWarning: Failed to resize ${url}:`, resizeError);
84 }
85
86 const filename = `image_${Date.now()}_${Math.random().toString(36).slice(2)}.${ext}`;
87
88 // Create form data
89 const formData = new FormData();
90 const file = new File([buffer], filename, { type: contentType });
91 formData.append("file", file);
92
93 const uploadResponse = await fetch(UPLOAD_URL, {
94 method: "POST",
95 headers: {
96 Authorization: `Bearer ${AUTH_TOKEN}`,
97 },
98 body: formData,
99 signal: AbortSignal.timeout(30000),
100 });
101
102 if (!uploadResponse.ok) {
103 const errorText = await uploadResponse.text();
104 progressBar?.increment();
105 return {
106 newUrl: null,
107 error: `Upload failed: ${uploadResponse.status} ${uploadResponse.statusText} - ${errorText}`,
108 };
109 }
110
111 const result = await uploadResponse.json();
112 progressBar?.increment();
113 return { newUrl: result.url };
114 } catch (error) {
115 progressBar?.increment();
116 return {
117 newUrl: null,
118 error: `Exception: ${error instanceof Error ? error.message : String(error)}`,
119 };
120 }
121}
122
123async function processInBatches<T, R>(
124 items: T[],
125 batchSize: number,
126 processor: (item: T) => Promise<R>,
127): Promise<R[]> {
128 const results: R[] = [];
129
130 for (let i = 0; i < items.length; i += batchSize) {
131 const batch = items.slice(i, i + batchSize);
132 const batchResults = await Promise.all(batch.map(processor));
133 results.push(...batchResults);
134 }
135
136 return results;
137}
138
139function findImages(filePath: string): ImageMatch[] {
140 const content = fs.readFileSync(filePath, "utf8");
141 const lines = content.split("\n");
142 const images: ImageMatch[] = [];
143
144 for (let i = 0; i < lines.length; i++) {
145 const line = lines[i];
146
147 // Find all image URLs in standard markdown:  or {attrs}
148 const singleImageRegex = /!\[([^\]]*)\]\(([^)]+)\)(?:\{[^}]+\})?/g;
149 let match;
150
151 while ((match = singleImageRegex.exec(line)) !== null) {
152 const url = match[2];
153 // Only process hel1 cdn URLs, skip gifs
154 if (
155 url.includes("hc-cdn.hel1.your-objectstorage.com") &&
156 !url.toLowerCase().endsWith(".gif")
157 ) {
158 images.push({
159 filePath,
160 originalUrl: url,
161 line: i + 1,
162 });
163 }
164 }
165
166 // Find all image URLs in multi-image format: ![alt2](url2){attrs}
167 const multiImageRegex = /!!(\[([^\]]*)\]\(([^)]+)\))+(?:\{[^}]+\})?/g;
168 while ((match = multiImageRegex.exec(line)) !== null) {
169 const urlMatches = [...match[0].matchAll(/\[([^\]]*)\]\(([^)]+)\)/g)];
170 for (const urlMatch of urlMatches) {
171 const url = urlMatch[2];
172 // Only process hel1 cdn URLs, skip gifs
173 if (
174 url.includes("hc-cdn.hel1.your-objectstorage.com") &&
175 !url.toLowerCase().endsWith(".gif")
176 ) {
177 images.push({
178 filePath,
179 originalUrl: url,
180 line: i + 1,
181 });
182 }
183 }
184 }
185 }
186
187 return images;
188}
189
190function replaceImageUrl(
191 filePath: string,
192 oldUrl: string,
193 newUrl: string,
194): void {
195 let content = fs.readFileSync(filePath, "utf8");
196
197 // Replace all occurrences of the old URL with the new one
198 // This handles both single and multi-image formats
199 content = content.replaceAll(oldUrl, newUrl);
200
201 fs.writeFileSync(filePath, content);
202}
203
204async function main() {
205 const files = glob.sync(`${contentDir}/**/*.md`);
206 const allImages: ImageMatch[] = [];
207
208 // Find all images across all files
209 for (const file of files) {
210 const images = findImages(file);
211 allImages.push(...images);
212 }
213
214 if (allImages.length === 0) {
215 console.log("No external images found to rehost.");
216 return;
217 }
218
219 const uniqueUrls = [...new Set(allImages.map((img) => img.originalUrl))];
220 console.log(
221 `Found ${uniqueUrls.length} unique images to rehost (${allImages.length} total references)\n`,
222 );
223
224 // Create progress bar
225 const progressBar = new cliProgress.SingleBar({
226 format:
227 "Uploading |{bar}| {percentage}% | {value}/{total} images | ETA: {eta}s",
228 barCompleteChar: "\u2588",
229 barIncompleteChar: "\u2591",
230 hideCursor: true,
231 });
232
233 progressBar.start(uniqueUrls.length, 0);
234
235 // Process URLs in parallel with concurrency limit
236 const urlMap = new Map<string, string>();
237 const failedUploads: { url: string; error: string }[] = [];
238
239 const results = await processInBatches(
240 uniqueUrls,
241 CONCURRENCY,
242 async (url) => {
243 const result = await uploadImage(url, progressBar);
244 return { url, ...result };
245 },
246 );
247
248 progressBar.stop();
249
250 // Build URL map and collect errors
251 for (const { url, newUrl, error } of results) {
252 if (newUrl) {
253 urlMap.set(url, newUrl);
254 } else if (error) {
255 failedUploads.push({ url, error });
256 }
257 }
258
259 const successCount = urlMap.size;
260 const failCount = uniqueUrls.length - successCount;
261
262 if (failCount > 0) {
263 console.log(
264 `\n⚠️ Failed to upload ${failCount} image${failCount === 1 ? "" : "s"}`,
265 );
266
267 // Group errors by type
268 const errorGroups = new Map<string, string[]>();
269 for (const { url, error } of failedUploads) {
270 const errorType = error.split(":")[0];
271 if (!errorGroups.has(errorType)) {
272 errorGroups.set(errorType, []);
273 }
274 errorGroups.get(errorType)!.push(url);
275 }
276
277 console.log("\nError summary:");
278 for (const [errorType, urls] of errorGroups.entries()) {
279 console.log(
280 ` ${errorType}: ${urls.length} image${urls.length === 1 ? "" : "s"}`,
281 );
282 if (urls.length <= 3) {
283 urls.forEach((url) => console.log(` - ${url}`));
284 } else {
285 urls.slice(0, 2).forEach((url) => console.log(` - ${url}`));
286 console.log(` ... and ${urls.length - 2} more`);
287 }
288 }
289 }
290
291 if (urlMap.size === 0) {
292 console.log("\n❌ No images were successfully uploaded.");
293 return;
294 }
295
296 // Replace URLs in files
297 console.log(
298 `\n✓ Successfully uploaded ${successCount} image${successCount === 1 ? "" : "s"}`,
299 );
300 console.log("\nUpdating markdown files...");
301
302 let filesUpdated = 0;
303 const updatedFiles = new Set<string>();
304
305 for (const [oldUrl, newUrl] of urlMap.entries()) {
306 const affectedImages = allImages.filter(
307 (img) => img.originalUrl === oldUrl,
308 );
309 for (const img of affectedImages) {
310 replaceImageUrl(img.filePath, oldUrl, newUrl);
311 updatedFiles.add(img.filePath);
312 }
313 }
314
315 console.log(
316 `✓ Updated ${updatedFiles.size} file${updatedFiles.size === 1 ? "" : "s"}\n`,
317 );
318}
319
320main();