Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol. wisp.place

check manifest and calculate CIDs then compare if we need to reupload blobs

authored by nekomimi.pet and committed by nekomimi.pet b3f9896c 19aba496

+18 -1
bun.lock
··· 25 "elysia": "latest", 26 "iron-session": "^8.0.4", 27 "lucide-react": "^0.546.0", 28 "react": "^19.2.0", 29 "react-dom": "^19.2.0", 30 "react-shiki": "^0.9.0", ··· 641 642 "ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 643 644 - "multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 645 646 "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], 647 ··· 857 858 "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], 859 860 "@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], 861 862 "@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], ··· 882 "send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], 883 884 "send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], 885 886 "@tokenizer/inflate/debug/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], 887
··· 25 "elysia": "latest", 26 "iron-session": "^8.0.4", 27 "lucide-react": "^0.546.0", 28 + "multiformats": "^13.4.1", 29 "react": "^19.2.0", 30 "react-dom": "^19.2.0", 31 "react-shiki": "^0.9.0", ··· 642 643 "ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 644 645 + "multiformats": ["multiformats@13.4.1", "", {}, "sha512-VqO6OSvLrFVAYYjgsr8tyv62/rCQhPgsZUXLTqoFLSgdkgiUYKYeArbt1uWLlEpkjxQe+P0+sHlbPEte1Bi06Q=="], 646 647 "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], 648 ··· 858 859 "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], 860 861 + "@atproto/api/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 862 + 863 + "@atproto/common/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 864 + 865 + "@atproto/common-web/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 866 + 867 + "@atproto/jwk/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 868 + 869 + "@atproto/lexicon/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 870 + 871 + "@atproto/oauth-client/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 872 + 873 + "@ipld/dag-cbor/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 874 + 875 "@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], 876 877 "@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], ··· 897 "send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], 898 899 "send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], 900 + 901 + "uint8arrays/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="], 902 903 "@tokenizer/inflate/debug/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], 904
+1
package.json
··· 29 "elysia": "latest", 30 "iron-session": "^8.0.4", 31 "lucide-react": "^0.546.0", 32 "react": "^19.2.0", 33 "react-dom": "^19.2.0", 34 "react-shiki": "^0.9.0",
··· 29 "elysia": "latest", 30 "iron-session": "^8.0.4", 31 "lucide-react": "^0.546.0", 32 + "multiformats": "^13.4.1", 33 "react": "^19.2.0", 34 "react-dom": "^19.2.0", 35 "react-shiki": "^0.9.0",
+2 -2
public/editor/editor.tsx
··· 748 749 <div className="p-4 bg-muted/30 rounded-lg border-l-4 border-yellow-500/50"> 750 <div className="flex items-start gap-2"> 751 - <AlertCircle className="w-4 h-4 text-yellow-600 dark:text-yellow-400 mt-0.5 flex-shrink-0" /> 752 <div className="flex-1 space-y-1"> 753 <p className="text-xs font-semibold text-yellow-600 dark:text-yellow-400"> 754 Note about sites.wisp.place URLs ··· 1120 {skippedFiles.length > 0 && ( 1121 <div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg"> 1122 <div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2"> 1123 - <AlertCircle className="w-4 h-4 mt-0.5 flex-shrink-0" /> 1124 <div className="flex-1"> 1125 <span className="font-medium"> 1126 {skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
··· 748 749 <div className="p-4 bg-muted/30 rounded-lg border-l-4 border-yellow-500/50"> 750 <div className="flex items-start gap-2"> 751 + <AlertCircle className="w-4 h-4 text-yellow-600 dark:text-yellow-400 mt-0.5 shrink-0" /> 752 <div className="flex-1 space-y-1"> 753 <p className="text-xs font-semibold text-yellow-600 dark:text-yellow-400"> 754 Note about sites.wisp.place URLs ··· 1120 {skippedFiles.length > 0 && ( 1121 <div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg"> 1122 <div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2"> 1123 + <AlertCircle className="w-4 h-4 mt-0.5 shrink-0" /> 1124 <div className="flex-1"> 1125 <span className="font-medium"> 1126 {skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
-7
src/lib/db.ts
··· 244 245 const stateStore = { 246 async set(key: string, data: any) { 247 - console.debug('[stateStore] set', key) 248 const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT; 249 await db` 250 INSERT INTO oauth_states (key, data, created_at, expires_at) ··· 253 `; 254 }, 255 async get(key: string) { 256 - console.debug('[stateStore] get', key) 257 const now = Math.floor(Date.now() / 1000); 258 const result = await db` 259 SELECT data, expires_at ··· 265 // Check if expired 266 const expiresAt = Number(result[0].expires_at); 267 if (expiresAt && now > expiresAt) { 268 - console.debug('[stateStore] State expired, deleting', key); 269 await db`DELETE FROM oauth_states WHERE key = ${key}`; 270 return undefined; 271 } ··· 273 return JSON.parse(result[0].data); 274 }, 275 async del(key: string) { 276 - console.debug('[stateStore] del', key) 277 await db`DELETE FROM oauth_states WHERE key = ${key}`; 278 } 279 }; 280 281 const sessionStore = { 282 async set(sub: string, data: any) { 283 - console.debug('[sessionStore] set', sub) 284 const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT; 285 await db` 286 INSERT INTO oauth_sessions (sub, data, updated_at, expires_at) ··· 292 `; 293 }, 294 async get(sub: string) { 295 - console.debug('[sessionStore] get', sub) 296 const now = Math.floor(Date.now() / 1000); 297 const result = await db` 298 SELECT data, expires_at ··· 312 return JSON.parse(result[0].data); 313 }, 314 async del(sub: string) { 315 - console.debug('[sessionStore] del', sub) 316 await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`; 317 } 318 };
··· 244 245 const stateStore = { 246 async set(key: string, data: any) { 247 const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT; 248 await db` 249 INSERT INTO oauth_states (key, data, created_at, expires_at) ··· 252 `; 253 }, 254 async get(key: string) { 255 const now = Math.floor(Date.now() / 1000); 256 const result = await db` 257 SELECT data, expires_at ··· 263 // Check if expired 264 const expiresAt = Number(result[0].expires_at); 265 if (expiresAt && now > expiresAt) { 266 await db`DELETE FROM oauth_states WHERE key = ${key}`; 267 return undefined; 268 } ··· 270 return JSON.parse(result[0].data); 271 }, 272 async del(key: string) { 273 await db`DELETE FROM oauth_states WHERE key = ${key}`; 274 } 275 }; 276 277 const sessionStore = { 278 async set(sub: string, data: any) { 279 const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT; 280 await db` 281 INSERT INTO oauth_sessions (sub, data, updated_at, expires_at) ··· 287 `; 288 }, 289 async get(sub: string) { 290 const now = Math.floor(Date.now() / 1000); 291 const result = await db` 292 SELECT data, expires_at ··· 306 return JSON.parse(result[0].data); 307 }, 308 async del(sub: string) { 309 await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`; 310 } 311 };
-1
src/lib/oauth-client.ts
··· 58 `; 59 }, 60 async get(sub: string) { 61 - console.debug('[sessionStore] get', sub) 62 const now = Math.floor(Date.now() / 1000); 63 const result = await db` 64 SELECT data, expires_at
··· 58 `; 59 }, 60 async get(sub: string) { 61 const now = Math.floor(Date.now() / 1000); 62 const result = await db` 63 SELECT data, expires_at
+360
src/lib/wisp-utils.test.ts
··· 5 processUploadedFiles, 6 createManifest, 7 updateFileBlobs, 8 type UploadedFile, 9 type FileUploadResult, 10 } from './wisp-utils' ··· 637 } 638 }) 639 })
··· 5 processUploadedFiles, 6 createManifest, 7 updateFileBlobs, 8 + computeCID, 9 + extractBlobMap, 10 type UploadedFile, 11 type FileUploadResult, 12 } from './wisp-utils' ··· 639 } 640 }) 641 }) 642 + 643 + describe('computeCID', () => { 644 + test('should compute CID for gzipped+base64 encoded content', () => { 645 + // This simulates the actual flow: gzip -> base64 -> compute CID 646 + const originalContent = Buffer.from('Hello, World!') 647 + const gzipped = compressFile(originalContent) 648 + const base64Content = Buffer.from(gzipped.toString('base64'), 'binary') 649 + 650 + const cid = computeCID(base64Content) 651 + 652 + // CID should be a valid CIDv1 string starting with 'bafkrei' 653 + expect(cid).toMatch(/^bafkrei[a-z0-9]+$/) 654 + expect(cid.length).toBeGreaterThan(10) 655 + }) 656 + 657 + test('should compute deterministic CIDs for identical content', () => { 658 + const content = Buffer.from('Test content for CID calculation') 659 + const gzipped = compressFile(content) 660 + const base64Content = Buffer.from(gzipped.toString('base64'), 'binary') 661 + 662 + const cid1 = computeCID(base64Content) 663 + const cid2 = computeCID(base64Content) 664 + 665 + expect(cid1).toBe(cid2) 666 + }) 667 + 668 + test('should compute different CIDs for different content', () => { 669 + const content1 = Buffer.from('Content A') 670 + const content2 = Buffer.from('Content B') 671 + 672 + const gzipped1 = compressFile(content1) 673 + const gzipped2 = compressFile(content2) 674 + 675 + const base64Content1 = Buffer.from(gzipped1.toString('base64'), 'binary') 676 + const base64Content2 = Buffer.from(gzipped2.toString('base64'), 'binary') 677 + 678 + const cid1 = computeCID(base64Content1) 679 + const cid2 = computeCID(base64Content2) 680 + 681 + expect(cid1).not.toBe(cid2) 682 + }) 683 + 684 + test('should handle empty content', () => { 685 + const emptyContent = Buffer.from('') 686 + const gzipped = compressFile(emptyContent) 687 + const base64Content = Buffer.from(gzipped.toString('base64'), 'binary') 688 + 689 + const cid = computeCID(base64Content) 690 + 691 + expect(cid).toMatch(/^bafkrei[a-z0-9]+$/) 692 + }) 693 + 694 + test('should compute same CID as PDS for base64-encoded content', () => { 695 + // Test that binary encoding produces correct bytes for CID calculation 696 + const testContent = Buffer.from('<!DOCTYPE html><html><body>Hello</body></html>') 697 + const gzipped = compressFile(testContent) 698 + const base64Content = Buffer.from(gzipped.toString('base64'), 'binary') 699 + 700 + // Compute CID twice to ensure consistency 701 + const cid1 = computeCID(base64Content) 702 + const cid2 = computeCID(base64Content) 703 + 704 + expect(cid1).toBe(cid2) 705 + expect(cid1).toMatch(/^bafkrei/) 706 + }) 707 + 708 + test('should use binary encoding for base64 strings', () => { 709 + // This test verifies we're using the correct encoding method 710 + // For base64 strings, 'binary' encoding ensures each character becomes exactly one byte 711 + const content = Buffer.from('Test content') 712 + const gzipped = compressFile(content) 713 + const base64String = gzipped.toString('base64') 714 + 715 + // Using binary encoding (what we use in production) 716 + const base64Content = Buffer.from(base64String, 'binary') 717 + 718 + // Verify the length matches the base64 string length 719 + expect(base64Content.length).toBe(base64String.length) 720 + 721 + // Verify CID is computed correctly 722 + const cid = computeCID(base64Content) 723 + expect(cid).toMatch(/^bafkrei/) 724 + }) 725 + }) 726 + 727 + describe('extractBlobMap', () => { 728 + test('should extract blob map from flat directory structure', () => { 729 + const mockCid = CID.parse(TEST_CID_STRING) 730 + const mockBlob = new BlobRef(mockCid, 'text/html', 100) 731 + 732 + const directory: Directory = { 733 + $type: 'place.wisp.fs#directory', 734 + type: 'directory', 735 + entries: [ 736 + { 737 + name: 'index.html', 738 + node: { 739 + $type: 'place.wisp.fs#file', 740 + type: 'file', 741 + blob: mockBlob, 742 + }, 743 + }, 744 + ], 745 + } 746 + 747 + const blobMap = extractBlobMap(directory) 748 + 749 + expect(blobMap.size).toBe(1) 750 + expect(blobMap.has('index.html')).toBe(true) 751 + 752 + const entry = blobMap.get('index.html') 753 + expect(entry?.cid).toBe(TEST_CID_STRING) 754 + expect(entry?.blobRef).toBe(mockBlob) 755 + }) 756 + 757 + test('should extract blob map from nested directory structure', () => { 758 + const mockCid1 = CID.parse(TEST_CID_STRING) 759 + const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi') 760 + 761 + const mockBlob1 = new BlobRef(mockCid1, 'text/html', 100) 762 + const mockBlob2 = new BlobRef(mockCid2, 'text/css', 50) 763 + 764 + const directory: Directory = { 765 + $type: 'place.wisp.fs#directory', 766 + type: 'directory', 767 + entries: [ 768 + { 769 + name: 'index.html', 770 + node: { 771 + $type: 'place.wisp.fs#file', 772 + type: 'file', 773 + blob: mockBlob1, 774 + }, 775 + }, 776 + { 777 + name: 'assets', 778 + node: { 779 + $type: 'place.wisp.fs#directory', 780 + type: 'directory', 781 + entries: [ 782 + { 783 + name: 'styles.css', 784 + node: { 785 + $type: 'place.wisp.fs#file', 786 + type: 'file', 787 + blob: mockBlob2, 788 + }, 789 + }, 790 + ], 791 + }, 792 + }, 793 + ], 794 + } 795 + 796 + const blobMap = extractBlobMap(directory) 797 + 798 + expect(blobMap.size).toBe(2) 799 + expect(blobMap.has('index.html')).toBe(true) 800 + expect(blobMap.has('assets/styles.css')).toBe(true) 801 + 802 + expect(blobMap.get('index.html')?.cid).toBe(TEST_CID_STRING) 803 + expect(blobMap.get('assets/styles.css')?.cid).toBe('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi') 804 + }) 805 + 806 + test('should handle deeply nested directory structures', () => { 807 + const mockCid = CID.parse(TEST_CID_STRING) 808 + const mockBlob = new BlobRef(mockCid, 'text/javascript', 200) 809 + 810 + const directory: Directory = { 811 + $type: 'place.wisp.fs#directory', 812 + type: 'directory', 813 + entries: [ 814 + { 815 + name: 'src', 816 + node: { 817 + $type: 'place.wisp.fs#directory', 818 + type: 'directory', 819 + entries: [ 820 + { 821 + name: 'lib', 822 + node: { 823 + $type: 'place.wisp.fs#directory', 824 + type: 'directory', 825 + entries: [ 826 + { 827 + name: 'utils.js', 828 + node: { 829 + $type: 'place.wisp.fs#file', 830 + type: 'file', 831 + blob: mockBlob, 832 + }, 833 + }, 834 + ], 835 + }, 836 + }, 837 + ], 838 + }, 839 + }, 840 + ], 841 + } 842 + 843 + const blobMap = extractBlobMap(directory) 844 + 845 + expect(blobMap.size).toBe(1) 846 + expect(blobMap.has('src/lib/utils.js')).toBe(true) 847 + expect(blobMap.get('src/lib/utils.js')?.cid).toBe(TEST_CID_STRING) 848 + }) 849 + 850 + test('should handle empty directory', () => { 851 + const directory: Directory = { 852 + $type: 'place.wisp.fs#directory', 853 + type: 'directory', 854 + entries: [], 855 + } 856 + 857 + const blobMap = extractBlobMap(directory) 858 + 859 + expect(blobMap.size).toBe(0) 860 + }) 861 + 862 + test('should correctly extract CID from BlobRef instances (not plain objects)', () => { 863 + // This test verifies the fix: AT Protocol SDK returns BlobRef instances, 864 + // not plain objects with $type and $link properties 865 + const mockCid = CID.parse(TEST_CID_STRING) 866 + const mockBlob = new BlobRef(mockCid, 'application/octet-stream', 500) 867 + 868 + const directory: Directory = { 869 + $type: 'place.wisp.fs#directory', 870 + type: 'directory', 871 + entries: [ 872 + { 873 + name: 'test.bin', 874 + node: { 875 + $type: 'place.wisp.fs#file', 876 + type: 'file', 877 + blob: mockBlob, 878 + }, 879 + }, 880 + ], 881 + } 882 + 883 + const blobMap = extractBlobMap(directory) 884 + 885 + // The fix: we call .toString() on the CID instance instead of accessing $link 886 + expect(blobMap.get('test.bin')?.cid).toBe(TEST_CID_STRING) 887 + expect(blobMap.get('test.bin')?.blobRef.ref.toString()).toBe(TEST_CID_STRING) 888 + }) 889 + 890 + test('should handle multiple files in same directory', () => { 891 + const mockCid1 = CID.parse(TEST_CID_STRING) 892 + const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi') 893 + const mockCid3 = CID.parse('bafkreieb3ixgchss44kw7xiavnkns47emdfsqbhcdfluo3p6n3o53fl3vq') 894 + 895 + const mockBlob1 = new BlobRef(mockCid1, 'image/png', 1000) 896 + const mockBlob2 = new BlobRef(mockCid2, 'image/png', 2000) 897 + const mockBlob3 = new BlobRef(mockCid3, 'image/png', 3000) 898 + 899 + const directory: Directory = { 900 + $type: 'place.wisp.fs#directory', 901 + type: 'directory', 902 + entries: [ 903 + { 904 + name: 'images', 905 + node: { 906 + $type: 'place.wisp.fs#directory', 907 + type: 'directory', 908 + entries: [ 909 + { 910 + name: 'logo.png', 911 + node: { 912 + $type: 'place.wisp.fs#file', 913 + type: 'file', 914 + blob: mockBlob1, 915 + }, 916 + }, 917 + { 918 + name: 'banner.png', 919 + node: { 920 + $type: 'place.wisp.fs#file', 921 + type: 'file', 922 + blob: mockBlob2, 923 + }, 924 + }, 925 + { 926 + name: 'icon.png', 927 + node: { 928 + $type: 'place.wisp.fs#file', 929 + type: 'file', 930 + blob: mockBlob3, 931 + }, 932 + }, 933 + ], 934 + }, 935 + }, 936 + ], 937 + } 938 + 939 + const blobMap = extractBlobMap(directory) 940 + 941 + expect(blobMap.size).toBe(3) 942 + expect(blobMap.has('images/logo.png')).toBe(true) 943 + expect(blobMap.has('images/banner.png')).toBe(true) 944 + expect(blobMap.has('images/icon.png')).toBe(true) 945 + }) 946 + 947 + test('should handle mixed directory and file structure', () => { 948 + const mockCid1 = CID.parse(TEST_CID_STRING) 949 + const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi') 950 + const mockCid3 = CID.parse('bafkreieb3ixgchss44kw7xiavnkns47emdfsqbhcdfluo3p6n3o53fl3vq') 951 + 952 + const directory: Directory = { 953 + $type: 'place.wisp.fs#directory', 954 + type: 'directory', 955 + entries: [ 956 + { 957 + name: 'index.html', 958 + node: { 959 + $type: 'place.wisp.fs#file', 960 + type: 'file', 961 + blob: new BlobRef(mockCid1, 'text/html', 100), 962 + }, 963 + }, 964 + { 965 + name: 'assets', 966 + node: { 967 + $type: 'place.wisp.fs#directory', 968 + type: 'directory', 969 + entries: [ 970 + { 971 + name: 'styles.css', 972 + node: { 973 + $type: 'place.wisp.fs#file', 974 + type: 'file', 975 + blob: new BlobRef(mockCid2, 'text/css', 50), 976 + }, 977 + }, 978 + ], 979 + }, 980 + }, 981 + { 982 + name: 'README.md', 983 + node: { 984 + $type: 'place.wisp.fs#file', 985 + type: 'file', 986 + blob: new BlobRef(mockCid3, 'text/markdown', 200), 987 + }, 988 + }, 989 + ], 990 + } 991 + 992 + const blobMap = extractBlobMap(directory) 993 + 994 + expect(blobMap.size).toBe(3) 995 + expect(blobMap.has('index.html')).toBe(true) 996 + expect(blobMap.has('assets/styles.css')).toBe(true) 997 + expect(blobMap.has('README.md')).toBe(true) 998 + }) 999 + })
+65 -2
src/lib/wisp-utils.ts
··· 2 import type { Record, Directory, File, Entry } from "../lexicons/types/place/wisp/fs"; 3 import { validateRecord } from "../lexicons/types/place/wisp/fs"; 4 import { gzipSync } from 'zlib'; 5 6 export interface UploadedFile { 7 name: string; ··· 48 } 49 50 /** 51 - * Compress a file using gzip 52 */ 53 export function compressFile(content: Buffer): Buffer { 54 - return gzipSync(content, { level: 9 }); 55 } 56 57 /** ··· 65 const directoryMap = new Map<string, UploadedFile[]>(); 66 67 for (const file of files) { 68 // Remove any base folder name from the path 69 const normalizedPath = file.name.replace(/^[^\/]*\//, ''); 70 const parts = normalizedPath.split('/'); ··· 239 240 return result; 241 }
··· 2 import type { Record, Directory, File, Entry } from "../lexicons/types/place/wisp/fs"; 3 import { validateRecord } from "../lexicons/types/place/wisp/fs"; 4 import { gzipSync } from 'zlib'; 5 + import { CID } from 'multiformats/cid'; 6 + import { sha256 } from 'multiformats/hashes/sha2'; 7 + import * as raw from 'multiformats/codecs/raw'; 8 + import { createHash } from 'crypto'; 9 + import * as mf from 'multiformats'; 10 11 export interface UploadedFile { 12 name: string; ··· 53 } 54 55 /** 56 + * Compress a file using gzip with deterministic output 57 + * Sets mtime to 0 to ensure identical content produces identical compressed output 58 */ 59 export function compressFile(content: Buffer): Buffer { 60 + return gzipSync(content, { 61 + level: 9, 62 + mtime: 0 // Fixed timestamp for deterministic compression 63 + }); 64 } 65 66 /** ··· 74 const directoryMap = new Map<string, UploadedFile[]>(); 75 76 for (const file of files) { 77 + // Skip undefined/null files (defensive) 78 + if (!file || !file.name) { 79 + console.error('Skipping undefined or invalid file in processUploadedFiles'); 80 + continue; 81 + } 82 + 83 // Remove any base folder name from the path 84 const normalizedPath = file.name.replace(/^[^\/]*\//, ''); 85 const parts = normalizedPath.split('/'); ··· 254 255 return result; 256 } 257 + 258 + /** 259 + * Compute CID (Content Identifier) for blob content 260 + * Uses the same algorithm as AT Protocol: CIDv1 with raw codec and SHA-256 261 + * Based on @atproto/common/src/ipld.ts sha256RawToCid implementation 262 + */ 263 + export function computeCID(content: Buffer): string { 264 + // Use node crypto to compute sha256 hash (same as AT Protocol) 265 + const hash = createHash('sha256').update(content).digest(); 266 + // Create digest object from hash bytes 267 + const digest = mf.digest.create(sha256.code, hash); 268 + // Create CIDv1 with raw codec 269 + const cid = CID.createV1(raw.code, digest); 270 + return cid.toString(); 271 + } 272 + 273 + /** 274 + * Extract blob information from a directory tree 275 + * Returns a map of file paths to their blob refs and CIDs 276 + */ 277 + export function extractBlobMap( 278 + directory: Directory, 279 + currentPath: string = '' 280 + ): Map<string, { blobRef: BlobRef; cid: string }> { 281 + const blobMap = new Map<string, { blobRef: BlobRef; cid: string }>(); 282 + 283 + for (const entry of directory.entries) { 284 + const fullPath = currentPath ? `${currentPath}/${entry.name}` : entry.name; 285 + 286 + if ('type' in entry.node && entry.node.type === 'file') { 287 + const fileNode = entry.node as File; 288 + // AT Protocol SDK returns BlobRef class instances, not plain objects 289 + // The ref is a CID instance that can be converted to string 290 + if (fileNode.blob && fileNode.blob.ref) { 291 + const cidString = fileNode.blob.ref.toString(); 292 + blobMap.set(fullPath, { 293 + blobRef: fileNode.blob, 294 + cid: cidString 295 + }); 296 + } 297 + } else if ('type' in entry.node && entry.node.type === 'directory') { 298 + const subMap = extractBlobMap(entry.node as Directory, fullPath); 299 + subMap.forEach((value, key) => blobMap.set(key, value)); 300 + } 301 + } 302 + 303 + return blobMap; 304 + }
+130 -10
src/routes/wisp.ts
··· 9 createManifest, 10 updateFileBlobs, 11 shouldCompressFile, 12 - compressFile 13 } from '../lib/wisp-utils' 14 import { upsertSite } from '../lib/db' 15 import { logger } from '../lib/observability' ··· 48 siteName: string; 49 files: File | File[] 50 }; 51 52 try { 53 if (!siteName) { ··· 106 107 // Create agent with OAuth session 108 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init)) 109 110 // Convert File objects to UploadedFile format 111 // Elysia gives us File objects directly, handle both single file and array ··· 113 const uploadedFiles: UploadedFile[] = []; 114 const skippedFiles: Array<{ name: string; reason: string }> = []; 115 116 - 117 118 for (let i = 0; i < fileArray.length; i++) { 119 const file = fileArray[i]; 120 121 // Skip files that are too large (limit to 100MB per file) 122 const maxSize = MAX_FILE_SIZE; // 100MB ··· 135 // Compress and base64 encode ALL files 136 const compressedContent = compressFile(originalContent); 137 // Base64 encode the gzipped content to prevent PDS content sniffing 138 - const base64Content = Buffer.from(compressedContent.toString('base64'), 'utf-8'); 139 const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1); 140 logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`); 141 142 uploadedFiles.push({ 143 name: file.name, 144 - content: base64Content, 145 mimeType: originalMimeType, 146 size: base64Content.length, 147 compressed: true, ··· 206 } 207 208 // Process files into directory structure 209 - const { directory, fileCount } = processUploadedFiles(uploadedFiles); 210 211 - // Upload files as blobs in parallel 212 // For compressed files, we upload as octet-stream and store the original MIME type in metadata 213 // For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues 214 - const uploadPromises = uploadedFiles.map(async (file, i) => { 215 try { 216 // If compressed, always upload as octet-stream 217 // Otherwise, workaround: PDS incorrectly processes text/html through image pipeline 218 const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html') ··· 220 : file.mimeType; 221 222 const compressionInfo = file.compressed ? ' (gzipped)' : ''; 223 - logger.info(`[File Upload] Uploading file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo})`); 224 225 const uploadResult = await agent.com.atproto.repo.uploadBlob( 226 file.content, ··· 244 }, 245 filePath: file.name, 246 sentMimeType: file.mimeType, 247 - returnedMimeType: returnedBlobRef.mimeType 248 }; 249 } catch (uploadError) { 250 logger.error('Upload failed for file', uploadError); ··· 255 // Wait for all uploads to complete 256 const uploadedBlobs = await Promise.all(uploadPromises); 257 258 // Extract results and file paths in correct order 259 const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result); 260 const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath); 261 262 // Update directory with file blobs 263 const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths); 264 265 // Create manifest 266 const manifest = createManifest(siteName, updatedDirectory, fileCount); 267 268 // Use site name as rkey 269 const rkey = siteName; 270 271 let record; 272 try { 273 record = await agent.com.atproto.repo.putRecord({ 274 repo: auth.did, 275 collection: 'place.wisp.fs', 276 rkey: rkey, 277 record: manifest 278 }); 279 } catch (putRecordError: any) { 280 logger.error('Failed to create record on PDS', putRecordError); 281 282 throw putRecordError; ··· 292 fileCount, 293 siteName, 294 skippedFiles, 295 - uploadedCount: uploadedFiles.length 296 }; 297 298 return result; 299 } catch (error) { 300 logger.error('Upload error', error, { 301 message: error instanceof Error ? error.message : 'Unknown error', 302 name: error instanceof Error ? error.name : undefined
··· 9 createManifest, 10 updateFileBlobs, 11 shouldCompressFile, 12 + compressFile, 13 + computeCID, 14 + extractBlobMap 15 } from '../lib/wisp-utils' 16 import { upsertSite } from '../lib/db' 17 import { logger } from '../lib/observability' ··· 50 siteName: string; 51 files: File | File[] 52 }; 53 + 54 + console.log('=== UPLOAD FILES START ==='); 55 + console.log('Site name:', siteName); 56 + console.log('Files received:', Array.isArray(files) ? files.length : 'single file'); 57 58 try { 59 if (!siteName) { ··· 112 113 // Create agent with OAuth session 114 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init)) 115 + console.log('Agent created for DID:', auth.did); 116 + 117 + // Try to fetch existing record to enable incremental updates 118 + let existingBlobMap = new Map<string, { blobRef: any; cid: string }>(); 119 + console.log('Attempting to fetch existing record...'); 120 + try { 121 + const rkey = siteName; 122 + const existingRecord = await agent.com.atproto.repo.getRecord({ 123 + repo: auth.did, 124 + collection: 'place.wisp.fs', 125 + rkey: rkey 126 + }); 127 + console.log('Existing record found!'); 128 + 129 + if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) { 130 + const manifest = existingRecord.data.value as any; 131 + existingBlobMap = extractBlobMap(manifest.root); 132 + console.log(`Found existing manifest with ${existingBlobMap.size} files for incremental update`); 133 + logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`); 134 + } 135 + } catch (error: any) { 136 + console.log('No existing record found or error:', error?.message || error); 137 + // Record doesn't exist yet, this is a new site 138 + if (error?.status !== 400 && error?.error !== 'RecordNotFound') { 139 + logger.warn('Failed to fetch existing record, proceeding with full upload', error); 140 + } 141 + } 142 143 // Convert File objects to UploadedFile format 144 // Elysia gives us File objects directly, handle both single file and array ··· 146 const uploadedFiles: UploadedFile[] = []; 147 const skippedFiles: Array<{ name: string; reason: string }> = []; 148 149 + console.log('Processing files, count:', fileArray.length); 150 151 for (let i = 0; i < fileArray.length; i++) { 152 const file = fileArray[i]; 153 + console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes'); 154 155 // Skip files that are too large (limit to 100MB per file) 156 const maxSize = MAX_FILE_SIZE; // 100MB ··· 169 // Compress and base64 encode ALL files 170 const compressedContent = compressFile(originalContent); 171 // Base64 encode the gzipped content to prevent PDS content sniffing 172 + // Convert base64 string to bytes using binary encoding (each char becomes exactly one byte) 173 + // This is what PDS receives and computes CID on 174 + const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary'); 175 const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1); 176 + console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`); 177 logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`); 178 179 uploadedFiles.push({ 180 name: file.name, 181 + content: base64Content, // This is the gzipped+base64 content that will be uploaded and CID-computed 182 mimeType: originalMimeType, 183 size: base64Content.length, 184 compressed: true, ··· 243 } 244 245 // Process files into directory structure 246 + console.log('Processing uploaded files into directory structure...'); 247 + console.log('uploadedFiles array length:', uploadedFiles.length); 248 + console.log('uploadedFiles contents:', uploadedFiles.map((f, i) => `${i}: ${f?.name || 'UNDEFINED'}`)); 249 250 + // Filter out any undefined/null/invalid entries (defensive) 251 + const validUploadedFiles = uploadedFiles.filter((f, i) => { 252 + if (!f) { 253 + console.error(`Filtering out undefined/null file at index ${i}`); 254 + return false; 255 + } 256 + if (!f.name) { 257 + console.error(`Filtering out file with no name at index ${i}:`, f); 258 + return false; 259 + } 260 + if (!f.content) { 261 + console.error(`Filtering out file with no content at index ${i}:`, f.name); 262 + return false; 263 + } 264 + return true; 265 + }); 266 + if (validUploadedFiles.length !== uploadedFiles.length) { 267 + console.warn(`Filtered out ${uploadedFiles.length - validUploadedFiles.length} invalid files`); 268 + } 269 + console.log('validUploadedFiles length:', validUploadedFiles.length); 270 + 271 + const { directory, fileCount } = processUploadedFiles(validUploadedFiles); 272 + console.log('Directory structure created, file count:', fileCount); 273 + 274 + // Upload files as blobs in parallel (or reuse existing blobs with matching CIDs) 275 + console.log('Starting blob upload/reuse phase...'); 276 // For compressed files, we upload as octet-stream and store the original MIME type in metadata 277 // For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues 278 + const uploadPromises = validUploadedFiles.map(async (file, i) => { 279 try { 280 + // Skip undefined files (shouldn't happen after filter, but defensive) 281 + if (!file || !file.name) { 282 + console.error(`ERROR: Undefined file at index ${i} in validUploadedFiles!`); 283 + throw new Error(`Undefined file at index ${i}`); 284 + } 285 + 286 + // Compute CID for this file to check if it already exists 287 + // Note: file.content is already gzipped+base64 encoded 288 + const fileCID = computeCID(file.content); 289 + 290 + // Normalize the file path for comparison (remove base folder prefix like "cobblemon/") 291 + const normalizedPath = file.name.replace(/^[^\/]*\//, ''); 292 + 293 + // Check if we have an existing blob with the same CID 294 + // Try both the normalized path and the full path 295 + const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name); 296 + 297 + if (existingBlob && existingBlob.cid === fileCID) { 298 + // Reuse existing blob - no need to upload 299 + logger.info(`[File Upload] Reusing existing blob for: ${file.name} (CID: ${fileCID})`); 300 + 301 + return { 302 + result: { 303 + hash: existingBlob.cid, 304 + blobRef: existingBlob.blobRef, 305 + ...(file.compressed && { 306 + encoding: 'gzip' as const, 307 + mimeType: file.originalMimeType || file.mimeType, 308 + base64: true 309 + }) 310 + }, 311 + filePath: file.name, 312 + sentMimeType: file.mimeType, 313 + returnedMimeType: existingBlob.blobRef.mimeType, 314 + reused: true 315 + }; 316 + } 317 + 318 + // File is new or changed - upload it 319 // If compressed, always upload as octet-stream 320 // Otherwise, workaround: PDS incorrectly processes text/html through image pipeline 321 const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html') ··· 323 : file.mimeType; 324 325 const compressionInfo = file.compressed ? ' (gzipped)' : ''; 326 + logger.info(`[File Upload] Uploading new/changed file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo}, CID: ${fileCID})`); 327 328 const uploadResult = await agent.com.atproto.repo.uploadBlob( 329 file.content, ··· 347 }, 348 filePath: file.name, 349 sentMimeType: file.mimeType, 350 + returnedMimeType: returnedBlobRef.mimeType, 351 + reused: false 352 }; 353 } catch (uploadError) { 354 logger.error('Upload failed for file', uploadError); ··· 359 // Wait for all uploads to complete 360 const uploadedBlobs = await Promise.all(uploadPromises); 361 362 + // Count reused vs uploaded blobs 363 + const reusedCount = uploadedBlobs.filter(b => (b as any).reused).length; 364 + const uploadedCount = uploadedBlobs.filter(b => !(b as any).reused).length; 365 + console.log(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`); 366 + logger.info(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`); 367 + 368 // Extract results and file paths in correct order 369 const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result); 370 const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath); 371 372 // Update directory with file blobs 373 + console.log('Updating directory with blob references...'); 374 const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths); 375 376 // Create manifest 377 + console.log('Creating manifest...'); 378 const manifest = createManifest(siteName, updatedDirectory, fileCount); 379 + console.log('Manifest created successfully'); 380 381 // Use site name as rkey 382 const rkey = siteName; 383 384 let record; 385 try { 386 + console.log('Putting record to PDS with rkey:', rkey); 387 record = await agent.com.atproto.repo.putRecord({ 388 repo: auth.did, 389 collection: 'place.wisp.fs', 390 rkey: rkey, 391 record: manifest 392 }); 393 + console.log('Record successfully created on PDS:', record.data.uri); 394 } catch (putRecordError: any) { 395 + console.error('FAILED to create record on PDS:', putRecordError); 396 logger.error('Failed to create record on PDS', putRecordError); 397 398 throw putRecordError; ··· 408 fileCount, 409 siteName, 410 skippedFiles, 411 + uploadedCount: validUploadedFiles.length 412 }; 413 414 + console.log('=== UPLOAD FILES COMPLETE ==='); 415 return result; 416 } catch (error) { 417 + console.error('=== UPLOAD ERROR ==='); 418 + console.error('Error details:', error); 419 + console.error('Stack trace:', error instanceof Error ? error.stack : 'N/A'); 420 logger.error('Upload error', error, { 421 message: error instanceof Error ? error.message : 'Unknown error', 422 name: error instanceof Error ? error.name : undefined