A third party ATProto appview

Refactor: Optimize CAR import and Vite config

Batch insert records in CAR import for performance.
Add PDS service check in direct-import and inspect-car.
Update Vite config for dynamic plugin loading.

Co-authored-by: dollspacegay <dollspacegay@gmail.com>

+1 -1
create-oauth-keyset.ts
··· 10 10 11 11 writeFileSync('oauth-keyset.json', JSON.stringify(keyset, null, 2)); 12 12 console.log('✅ OAuth keyset created successfully'); 13 - console.log('Contents:', JSON.stringify(keyset, null, 2)); 13 + console.log('Keyset file written to oauth-keyset.json');
+4
direct-import.ts
··· 20 20 s.id === '#atproto_pds' || s.type === 'AtprotoPersonalDataServer' 21 21 ); 22 22 23 + if (!pdsService) { 24 + throw new Error('PDS service not found in DID document'); 25 + } 26 + 23 27 const pdsUrl = pdsService.serviceEndpoint; 24 28 console.log(`[DIRECT_IMPORT] PDS: ${pdsUrl}`); 25 29
+27
import-car-batch.ts
··· 10 10 const BATCH_SIZE = 500; 11 11 const didResolver = new IdResolver(); 12 12 13 + // Helper function to extract blob CID from various formats 14 + function extractBlobCid(blob: any): string | null { 15 + if (!blob) return null; 16 + 17 + // Handle string CID 18 + if (typeof blob === 'string') { 19 + return blob === 'undefined' ? null : blob; 20 + } 21 + 22 + // Handle ref.$link format 23 + if (blob.ref) { 24 + if (typeof blob.ref === 'string') { 25 + return blob.ref === 'undefined' ? null : blob.ref; 26 + } 27 + if (blob.ref.$link) { 28 + return blob.ref.$link === 'undefined' ? null : blob.ref.$link; 29 + } 30 + } 31 + 32 + // Handle direct cid property 33 + if (blob.cid) { 34 + return blob.cid === 'undefined' ? null : blob.cid; 35 + } 36 + 37 + return null; 38 + } 39 + 13 40 async function importCar() { 14 41 console.log(`[CAR_IMPORT] Starting import for ${DID}...`); 15 42
+133 -86
import-car-no-fk.ts
··· 18 18 s.id === '#atproto_pds' || s.type === 'AtprotoPersonalDataServer' 19 19 ); 20 20 21 + if (!pdsService || !pdsService.serviceEndpoint) { 22 + throw new Error('PDS service not found in DID document'); 23 + } 24 + 21 25 const pdsUrl = typeof pdsService.serviceEndpoint === 'string' 22 26 ? pdsService.serviceEndpoint 23 27 : pdsService.serviceEndpoint.toString(); ··· 89 93 `); 90 94 console.log(`[CAR_IMPORT] ✓ User: ${handle}`); 91 95 92 - // Insert posts 96 + // Batch insert posts 93 97 console.log(`[CAR_IMPORT] Importing ${records.posts.length} posts...`); 98 + const BATCH_SIZE = 500; 94 99 let postsCreated = 0; 95 - for (const { rkey, record, cid } of records.posts) { 96 - const uri = `at://${DID}/app.bsky.feed.post/${rkey}`; 97 - await db.execute(sql` 98 - INSERT INTO posts (uri, cid, author_did, text, parent_uri, root_uri, created_at, indexed_at) 99 - VALUES ( 100 - ${uri}, 101 - ${cid?.toString() || 'unknown'}, 102 - ${DID}, 103 - ${(record as any).text || ''}, 104 - ${(record as any).reply?.parent?.uri || null}, 105 - ${(record as any).reply?.root?.uri || null}, 106 - ${new Date((record as any).createdAt)}, 107 - ${new Date()} 108 - ) 109 - ON CONFLICT (uri) DO NOTHING 110 - `); 111 - postsCreated++; 112 - if (postsCreated % 500 === 0) { 113 - console.log(`[CAR_IMPORT] ${postsCreated}/${records.posts.length} posts...`); 100 + 101 + for (let i = 0; i < records.posts.length; i += BATCH_SIZE) { 102 + const batch = records.posts.slice(i, i + BATCH_SIZE); 103 + 104 + if (batch.length > 0) { 105 + const uris = batch.map(({ rkey }) => `at://${DID}/app.bsky.feed.post/${rkey}`); 106 + const cids = batch.map(({ cid }) => cid?.toString() || 'unknown'); 107 + const texts = batch.map(({ record }) => (record as any).text || ''); 108 + const parentUris = batch.map(({ record }) => (record as any).reply?.parent?.uri || null); 109 + const rootUris = batch.map(({ record }) => (record as any).reply?.root?.uri || null); 110 + const createdAts = batch.map(({ record }) => new Date((record as any).createdAt)); 111 + const indexedAt = new Date(); 112 + 113 + await db.execute(sql` 114 + INSERT INTO posts (uri, cid, author_did, text, parent_uri, root_uri, created_at, indexed_at) 115 + SELECT 116 + unnest(${uris}::text[]), 117 + unnest(${cids}::text[]), 118 + ${DID}, 119 + unnest(${texts}::text[]), 120 + unnest(${parentUris}::text[]), 121 + unnest(${rootUris}::text[]), 122 + unnest(${createdAts}::timestamp[]), 123 + ${indexedAt} 124 + ON CONFLICT (uri) DO NOTHING 125 + `); 126 + 127 + postsCreated += batch.length; 128 + console.log(`[CAR_IMPORT] ${Math.min(i + BATCH_SIZE, records.posts.length)}/${records.posts.length} posts...`); 114 129 } 115 130 } 116 131 console.log(`[CAR_IMPORT] ✓ Posts: ${postsCreated}`); 117 132 118 - // Insert likes (ALL of them, even external posts) 133 + // Batch insert likes (ALL of them, even external posts) 119 134 console.log(`[CAR_IMPORT] Importing ${records.likes.length} likes...`); 120 135 let likesCreated = 0; 121 - for (const { rkey, record } of records.likes) { 122 - const uri = `at://${DID}/app.bsky.feed.like/${rkey}`; 123 - await db.execute(sql` 124 - INSERT INTO likes (uri, user_did, post_uri, created_at, indexed_at) 125 - VALUES ( 126 - ${uri}, 127 - ${DID}, 128 - ${(record as any).subject?.uri || ''}, 129 - ${new Date((record as any).createdAt)}, 130 - ${new Date()} 131 - ) 132 - ON CONFLICT (uri) DO NOTHING 133 - `); 134 - likesCreated++; 135 - if (likesCreated % 1000 === 0) { 136 - console.log(`[CAR_IMPORT] ${likesCreated}/${records.likes.length} likes...`); 136 + 137 + for (let i = 0; i < records.likes.length; i += BATCH_SIZE) { 138 + const batch = records.likes.slice(i, i + BATCH_SIZE); 139 + 140 + if (batch.length > 0) { 141 + const uris = batch.map(({ rkey }) => `at://${DID}/app.bsky.feed.like/${rkey}`); 142 + const postUris = batch.map(({ record }) => (record as any).subject?.uri || ''); 143 + const createdAts = batch.map(({ record }) => new Date((record as any).createdAt)); 144 + const indexedAt = new Date(); 145 + 146 + await db.execute(sql` 147 + INSERT INTO likes (uri, user_did, post_uri, created_at, indexed_at) 148 + SELECT 149 + unnest(${uris}::text[]), 150 + ${DID}, 151 + unnest(${postUris}::text[]), 152 + unnest(${createdAts}::timestamp[]), 153 + ${indexedAt} 154 + ON CONFLICT (uri) DO NOTHING 155 + `); 156 + 157 + likesCreated += batch.length; 158 + if (i + BATCH_SIZE >= records.likes.length || (i + BATCH_SIZE) % 1000 === 0) { 159 + console.log(`[CAR_IMPORT] ${Math.min(i + BATCH_SIZE, records.likes.length)}/${records.likes.length} likes...`); 160 + } 137 161 } 138 162 } 139 163 console.log(`[CAR_IMPORT] ✓ Likes: ${likesCreated}`); 140 164 141 - // Insert reposts (ALL of them) 165 + // Batch insert reposts (ALL of them) 142 166 console.log(`[CAR_IMPORT] Importing ${records.reposts.length} reposts...`); 143 167 let repostsCreated = 0; 144 - for (const { rkey, record } of records.reposts) { 145 - const uri = `at://${DID}/app.bsky.feed.repost/${rkey}`; 146 - await db.execute(sql` 147 - INSERT INTO reposts (uri, user_did, post_uri, created_at, indexed_at) 148 - VALUES ( 149 - ${uri}, 150 - ${DID}, 151 - ${(record as any).subject?.uri || ''}, 152 - ${new Date((record as any).createdAt)}, 153 - ${new Date()} 154 - ) 155 - ON CONFLICT (uri) DO NOTHING 156 - `); 157 - repostsCreated++; 158 - if (repostsCreated % 500 === 0) { 159 - console.log(`[CAR_IMPORT] ${repostsCreated}/${records.reposts.length} reposts...`); 168 + 169 + for (let i = 0; i < records.reposts.length; i += BATCH_SIZE) { 170 + const batch = records.reposts.slice(i, i + BATCH_SIZE); 171 + 172 + if (batch.length > 0) { 173 + const uris = batch.map(({ rkey }) => `at://${DID}/app.bsky.feed.repost/${rkey}`); 174 + const postUris = batch.map(({ record }) => (record as any).subject?.uri || ''); 175 + const createdAts = batch.map(({ record }) => new Date((record as any).createdAt)); 176 + const indexedAt = new Date(); 177 + 178 + await db.execute(sql` 179 + INSERT INTO reposts (uri, user_did, post_uri, created_at, indexed_at) 180 + SELECT 181 + unnest(${uris}::text[]), 182 + ${DID}, 183 + unnest(${postUris}::text[]), 184 + unnest(${createdAts}::timestamp[]), 185 + ${indexedAt} 186 + ON CONFLICT (uri) DO NOTHING 187 + `); 188 + 189 + repostsCreated += batch.length; 190 + console.log(`[CAR_IMPORT] ${Math.min(i + BATCH_SIZE, records.reposts.length)}/${records.reposts.length} reposts...`); 160 191 } 161 192 } 162 193 console.log(`[CAR_IMPORT] ✓ Reposts: ${repostsCreated}`); 163 194 164 - // Insert follows 195 + // Batch insert follows 165 196 console.log(`[CAR_IMPORT] Importing ${records.follows.length} follows...`); 166 197 let followsCreated = 0; 167 - for (const { rkey, record } of records.follows) { 168 - const uri = `at://${DID}/app.bsky.graph.follow/${rkey}`; 169 - await db.execute(sql` 170 - INSERT INTO follows (uri, follower_did, following_did, created_at, indexed_at) 171 - VALUES ( 172 - ${uri}, 173 - ${DID}, 174 - ${(record as any).subject || ''}, 175 - ${new Date((record as any).createdAt)}, 176 - ${new Date()} 177 - ) 178 - ON CONFLICT (uri) DO NOTHING 179 - `); 180 - followsCreated++; 181 - if (followsCreated % 200 === 0) { 182 - console.log(`[CAR_IMPORT] ${followsCreated}/${records.follows.length} follows...`); 198 + 199 + for (let i = 0; i < records.follows.length; i += BATCH_SIZE) { 200 + const batch = records.follows.slice(i, i + BATCH_SIZE); 201 + 202 + if (batch.length > 0) { 203 + const uris = batch.map(({ rkey }) => `at://${DID}/app.bsky.graph.follow/${rkey}`); 204 + const followingDids = batch.map(({ record }) => (record as any).subject || ''); 205 + const createdAts = batch.map(({ record }) => new Date((record as any).createdAt)); 206 + const indexedAt = new Date(); 207 + 208 + await db.execute(sql` 209 + INSERT INTO follows (uri, follower_did, following_did, created_at, indexed_at) 210 + SELECT 211 + unnest(${uris}::text[]), 212 + ${DID}, 213 + unnest(${followingDids}::text[]), 214 + unnest(${createdAts}::timestamp[]), 215 + ${indexedAt} 216 + ON CONFLICT (uri) DO NOTHING 217 + `); 218 + 219 + followsCreated += batch.length; 220 + console.log(`[CAR_IMPORT] ${Math.min(i + BATCH_SIZE, records.follows.length)}/${records.follows.length} follows...`); 183 221 } 184 222 } 185 223 console.log(`[CAR_IMPORT] ✓ Follows: ${followsCreated}`); 186 224 187 - // Insert blocks 225 + // Batch insert blocks 188 226 console.log(`[CAR_IMPORT] Importing ${records.blocks.length} blocks...`); 189 227 let blocksCreated = 0; 190 - for (const { rkey, record } of records.blocks) { 191 - const uri = `at://${DID}/app.bsky.graph.block/${rkey}`; 192 - await db.execute(sql` 193 - INSERT INTO blocks (uri, blocker_did, blocked_did, created_at, indexed_at) 194 - VALUES ( 195 - ${uri}, 196 - ${DID}, 197 - ${(record as any).subject || ''}, 198 - ${new Date((record as any).createdAt)}, 199 - ${new Date()} 200 - ) 201 - ON CONFLICT (uri) DO NOTHING 202 - `); 203 - blocksCreated++; 228 + 229 + for (let i = 0; i < records.blocks.length; i += BATCH_SIZE) { 230 + const batch = records.blocks.slice(i, i + BATCH_SIZE); 231 + 232 + if (batch.length > 0) { 233 + const uris = batch.map(({ rkey }) => `at://${DID}/app.bsky.graph.block/${rkey}`); 234 + const blockedDids = batch.map(({ record }) => (record as any).subject || ''); 235 + const createdAts = batch.map(({ record }) => new Date((record as any).createdAt)); 236 + const indexedAt = new Date(); 237 + 238 + await db.execute(sql` 239 + INSERT INTO blocks (uri, blocker_did, blocked_did, created_at, indexed_at) 240 + SELECT 241 + unnest(${uris}::text[]), 242 + ${DID}, 243 + unnest(${blockedDids}::text[]), 244 + unnest(${createdAts}::timestamp[]), 245 + ${indexedAt} 246 + ON CONFLICT (uri) DO NOTHING 247 + `); 248 + 249 + blocksCreated += batch.length; 250 + } 204 251 } 205 252 console.log(`[CAR_IMPORT] ✓ Blocks: ${blocksCreated}`); 206 253
+4
inspect-car.ts
··· 15 15 s.id === '#atproto_pds' || s.type === 'AtprotoPersonalDataServer' 16 16 ); 17 17 18 + if (!pdsService) { 19 + throw new Error('PDS service not found in DID document'); 20 + } 21 + 18 22 const pdsUrl = pdsService.serviceEndpoint; 19 23 console.log(`[INSPECT] PDS: ${pdsUrl}`); 20 24
+14 -14
vite.config.ts
··· 3 3 import path from "path"; 4 4 import runtimeErrorOverlay from "@replit/vite-plugin-runtime-error-modal"; 5 5 6 - export default defineConfig({ 7 - plugins: [ 6 + export default defineConfig(async ({ command, mode }) => { 7 + const basePlugins = [ 8 8 react(), 9 9 runtimeErrorOverlay(), 10 - ...(process.env.NODE_ENV !== "production" && 11 - process.env.REPL_ID !== undefined 12 - ? [ 13 - await import("@replit/vite-plugin-cartographer").then((m) => 14 - m.cartographer(), 15 - ), 16 - await import("@replit/vite-plugin-dev-banner").then((m) => 17 - m.devBanner(), 18 - ), 19 - ] 20 - : []), 21 - ], 10 + ]; 11 + 12 + if (process.env.NODE_ENV !== "production" && process.env.REPL_ID !== undefined) { 13 + basePlugins.push( 14 + (await import("@replit/vite-plugin-cartographer")).cartographer(), 15 + (await import("@replit/vite-plugin-dev-banner")).devBanner() 16 + ); 17 + } 18 + 19 + return { 20 + plugins: basePlugins, 22 21 resolve: { 23 22 alias: { 24 23 "@": path.resolve(import.meta.dirname, "client", "src"), ··· 37 36 deny: ["**/.*"], 38 37 }, 39 38 }, 39 + }; 40 40 });