a tool for shared writing and social publishing

Compare changes

Choose any two refs to compare.

+317 -74
+6 -3
actions/publishToPublication.ts
··· 199 } 200 201 // Determine the collection to use - preserve existing schema if updating 202 - const existingCollection = existingDocUri ? new AtUri(existingDocUri).collection : undefined; 203 const documentType = getDocumentType(existingCollection); 204 205 // Build the pages array (used by both formats) ··· 228 if (documentType === "site.standard.document") { 229 // site.standard.document format 230 // For standalone docs, use HTTPS URL; for publication docs, use the publication AT-URI 231 - const siteUri = publication_uri || `https://leaflet.pub/p/${credentialSession.did}`; 232 233 record = { 234 $type: "site.standard.document", 235 title: title || "Untitled", 236 site: siteUri, 237 - path: rkey, 238 publishedAt: 239 publishedAt || existingRecord.publishedAt || new Date().toISOString(), 240 ...(description && { description }),
··· 199 } 200 201 // Determine the collection to use - preserve existing schema if updating 202 + const existingCollection = existingDocUri 203 + ? new AtUri(existingDocUri).collection 204 + : undefined; 205 const documentType = getDocumentType(existingCollection); 206 207 // Build the pages array (used by both formats) ··· 230 if (documentType === "site.standard.document") { 231 // site.standard.document format 232 // For standalone docs, use HTTPS URL; for publication docs, use the publication AT-URI 233 + const siteUri = 234 + publication_uri || `https://leaflet.pub/p/${credentialSession.did}`; 235 236 record = { 237 $type: "site.standard.document", 238 title: title || "Untitled", 239 site: siteUri, 240 + path: "/" + rkey, 241 publishedAt: 242 publishedAt || existingRecord.publishedAt || new Date().toISOString(), 243 ...(description && { description }),
+10
app/api/inngest/client.ts
··· 26 did: string; 27 }; 28 }; 29 }; 30 31 // Create a client to send and receive events
··· 26 did: string; 27 }; 28 }; 29 + "user/cleanup-expired-oauth-sessions": { 30 + data: {}; 31 + }; 32 + "user/check-oauth-session": { 33 + data: { 34 + identityId: string; 35 + did: string; 36 + tokenCount: number; 37 + }; 38 + }; 39 }; 40 41 // Create a client to send and receive events
+123
app/api/inngest/functions/cleanup_expired_oauth_sessions.ts
···
··· 1 + import { supabaseServerClient } from "supabase/serverClient"; 2 + import { inngest } from "../client"; 3 + import { restoreOAuthSession } from "src/atproto-oauth"; 4 + 5 + // Main function that fetches identities and publishes events for each one 6 + export const cleanup_expired_oauth_sessions = inngest.createFunction( 7 + { id: "cleanup_expired_oauth_sessions" }, 8 + { event: "user/cleanup-expired-oauth-sessions" }, 9 + async ({ step }) => { 10 + // Get all identities with an atp_did (OAuth users) that have at least one auth token 11 + const identities = await step.run("fetch-oauth-identities", async () => { 12 + const { data, error } = await supabaseServerClient 13 + .from("identities") 14 + .select("id, atp_did, email_auth_tokens(count)") 15 + .not("atp_did", "is", null); 16 + 17 + if (error) { 18 + throw new Error(`Failed to fetch identities: ${error.message}`); 19 + } 20 + 21 + // Filter to only include identities with at least one auth token 22 + return (data || []) 23 + .filter((identity) => { 24 + const tokenCount = identity.email_auth_tokens?.[0]?.count ?? 0; 25 + return tokenCount > 0; 26 + }) 27 + .map((identity) => ({ 28 + id: identity.id, 29 + atp_did: identity.atp_did!, 30 + tokenCount: identity.email_auth_tokens?.[0]?.count ?? 0, 31 + })); 32 + }); 33 + 34 + console.log( 35 + `Found ${identities.length} OAuth identities with active sessions to check`, 36 + ); 37 + 38 + // Publish events for each identity in batches 39 + const BATCH_SIZE = 100; 40 + let totalSent = 0; 41 + 42 + for (let i = 0; i < identities.length; i += BATCH_SIZE) { 43 + const batch = identities.slice(i, i + BATCH_SIZE); 44 + 45 + await step.run(`send-events-batch-${i}`, async () => { 46 + const events = batch.map((identity) => ({ 47 + name: "user/check-oauth-session" as const, 48 + data: { 49 + identityId: identity.id, 50 + did: identity.atp_did, 51 + tokenCount: identity.tokenCount, 52 + }, 53 + })); 54 + 55 + await inngest.send(events); 56 + return events.length; 57 + }); 58 + 59 + totalSent += batch.length; 60 + } 61 + 62 + console.log(`Published ${totalSent} check-oauth-session events`); 63 + 64 + return { 65 + success: true, 66 + identitiesQueued: totalSent, 67 + }; 68 + }, 69 + ); 70 + 71 + // Function that checks a single identity's OAuth session and cleans up if expired 72 + export const check_oauth_session = inngest.createFunction( 73 + { id: "check_oauth_session" }, 74 + { event: "user/check-oauth-session" }, 75 + async ({ event, step }) => { 76 + const { identityId, did, tokenCount } = event.data; 77 + 78 + const result = await step.run("check-and-cleanup", async () => { 79 + console.log(`Checking OAuth session for DID: ${did} (${tokenCount} tokens)`); 80 + 81 + const sessionResult = await restoreOAuthSession(did); 82 + 83 + if (sessionResult.ok) { 84 + console.log(` Session valid for ${did}`); 85 + return { valid: true, tokensDeleted: 0 }; 86 + } 87 + 88 + // Session is expired/invalid - delete associated auth tokens 89 + console.log( 90 + ` Session expired for ${did}: ${sessionResult.error.message}`, 91 + ); 92 + 93 + const { error: deleteError } = await supabaseServerClient 94 + .from("email_auth_tokens") 95 + .delete() 96 + .eq("identity", identityId); 97 + 98 + if (deleteError) { 99 + console.error( 100 + ` Error deleting tokens for identity ${identityId}: ${deleteError.message}`, 101 + ); 102 + return { 103 + valid: false, 104 + tokensDeleted: 0, 105 + error: deleteError.message, 106 + }; 107 + } 108 + 109 + console.log(` Deleted ${tokenCount} auth tokens for identity ${identityId}`); 110 + 111 + return { 112 + valid: false, 113 + tokensDeleted: tokenCount, 114 + }; 115 + }); 116 + 117 + return { 118 + identityId, 119 + did, 120 + ...result, 121 + }; 122 + }, 123 + );
+61 -52
app/api/inngest/functions/migrate_user_to_standard.ts
··· 44 }; 45 46 // Step 1: Verify OAuth session is valid 47 - await step.run("verify-oauth-session", async () => { 48 const result = await restoreOAuthSession(did); 49 if (!result.ok) { 50 - throw new Error( 51 - `Failed to restore OAuth session: ${result.error.message}`, 52 - ); 53 } 54 return { success: true }; 55 }); 56 57 // Step 2: Get user's pub.leaflet.publication records 58 const oldPublications = await step.run( ··· 109 }) 110 .filter((x) => x !== null); 111 112 - // Run all PDS writes in parallel 113 - const pubPdsResults = await Promise.all( 114 - publicationsToMigrate.map(({ pub, rkey, newRecord }) => 115 - step.run(`pds-write-publication-${pub.uri}`, async () => { 116 const agent = await createAuthenticatedAgent(did); 117 const putResult = await agent.com.atproto.repo.putRecord({ 118 repo: did, ··· 121 record: newRecord, 122 validate: false, 123 }); 124 - return { oldUri: pub.uri, newUri: putResult.data.uri }; 125 - }), 126 - ), 127 - ); 128 129 - // Run all DB writes in parallel 130 - const pubDbResults = await Promise.all( 131 - publicationsToMigrate.map(({ pub, normalized, newRecord }, index) => { 132 - const newUri = pubPdsResults[index].newUri; 133 - return step.run(`db-write-publication-${pub.uri}`, async () => { 134 const { error: dbError } = await supabaseServerClient 135 .from("publications") 136 .upsert({ ··· 149 }; 150 } 151 return { success: true as const, oldUri: pub.uri, newUri }; 152 - }); 153 - }), 154 ); 155 156 // Process results 157 - for (const result of pubDbResults) { 158 if (result.success) { 159 publicationUriMap[result.oldUri] = result.newUri; 160 stats.publicationsMigrated++; ··· 239 $type: "site.standard.document", 240 title: normalized.title || "Untitled", 241 site: siteValue, 242 - path: rkey, 243 publishedAt: normalized.publishedAt || new Date().toISOString(), 244 description: normalized.description, 245 content: normalized.content, ··· 252 }) 253 .filter((x) => x !== null); 254 255 - // Run all PDS writes in parallel 256 - const docPdsResults = await Promise.all( 257 - documentsToMigrate.map(({ doc, rkey, newRecord }) => 258 - step.run(`pds-write-document-${doc.uri}`, async () => { 259 const agent = await createAuthenticatedAgent(did); 260 const putResult = await agent.com.atproto.repo.putRecord({ 261 repo: did, ··· 264 record: newRecord, 265 validate: false, 266 }); 267 - return { oldUri: doc.uri, newUri: putResult.data.uri }; 268 - }), 269 - ), 270 - ); 271 272 - // Run all DB writes in parallel 273 - const docDbResults = await Promise.all( 274 - documentsToMigrate.map(({ doc, newRecord, oldPubUri }, index) => { 275 - const newUri = docPdsResults[index].newUri; 276 - return step.run(`db-write-document-${doc.uri}`, async () => { 277 const { error: dbError } = await supabaseServerClient 278 .from("documents") 279 .upsert({ ··· 302 } 303 304 return { success: true as const, oldUri: doc.uri, newUri }; 305 - }); 306 - }), 307 ); 308 309 // Process results 310 - for (const result of docDbResults) { 311 if (result.success) { 312 documentUriMap[result.oldUri] = result.newUri; 313 stats.documentsMigrated++; ··· 428 }) 429 .filter((x) => x !== null); 430 431 - // Run all PDS writes in parallel 432 - const subPdsResults = await Promise.all( 433 subscriptionsToMigrate.map(({ sub, rkey, newRecord }) => 434 - step.run(`pds-write-subscription-${sub.uri}`, async () => { 435 const agent = await createAuthenticatedAgent(did); 436 const putResult = await agent.com.atproto.repo.putRecord({ 437 repo: did, ··· 440 record: newRecord, 441 validate: false, 442 }); 443 - return { oldUri: sub.uri, newUri: putResult.data.uri }; 444 - }), 445 - ), 446 - ); 447 448 - // Run all DB writes in parallel 449 - const subDbResults = await Promise.all( 450 - subscriptionsToMigrate.map(({ sub, newRecord }, index) => { 451 - const newUri = subPdsResults[index].newUri; 452 - return step.run(`db-write-subscription-${sub.uri}`, async () => { 453 const { error: dbError } = await supabaseServerClient 454 .from("publication_subscriptions") 455 .update({ ··· 467 }; 468 } 469 return { success: true as const, oldUri: sub.uri, newUri }; 470 - }); 471 - }), 472 ); 473 474 // Process results 475 - for (const result of subDbResults) { 476 if (result.success) { 477 userSubscriptionUriMap[result.oldUri] = result.newUri; 478 stats.userSubscriptionsMigrated++; ··· 489 // 2. External references (e.g., from other AT Proto apps) to old URIs continue to work 490 // 3. The normalization layer handles both schemas transparently for reads 491 // Old records are also kept on the user's PDS so existing AT-URI references remain valid. 492 493 return { 494 success: stats.errors.length === 0,
··· 44 }; 45 46 // Step 1: Verify OAuth session is valid 47 + const oauthValid = await step.run("verify-oauth-session", async () => { 48 const result = await restoreOAuthSession(did); 49 if (!result.ok) { 50 + // Mark identity as needing migration so we can retry later 51 + await supabaseServerClient 52 + .from("identities") 53 + .update({ 54 + metadata: { needsStandardSiteMigration: true }, 55 + }) 56 + .eq("atp_did", did); 57 + 58 + return { success: false, error: result.error.message }; 59 } 60 return { success: true }; 61 }); 62 + 63 + if (!oauthValid.success) { 64 + return { 65 + success: false, 66 + error: `Failed to restore OAuth session`, 67 + stats, 68 + publicationUriMap: {}, 69 + documentUriMap: {}, 70 + userSubscriptionUriMap: {}, 71 + }; 72 + } 73 74 // Step 2: Get user's pub.leaflet.publication records 75 const oldPublications = await step.run( ··· 126 }) 127 .filter((x) => x !== null); 128 129 + // Run PDS + DB writes together for each publication 130 + const pubResults = await Promise.all( 131 + publicationsToMigrate.map(({ pub, rkey, normalized, newRecord }) => 132 + step.run(`migrate-publication-${pub.uri}`, async () => { 133 + // PDS write 134 const agent = await createAuthenticatedAgent(did); 135 const putResult = await agent.com.atproto.repo.putRecord({ 136 repo: did, ··· 139 record: newRecord, 140 validate: false, 141 }); 142 + const newUri = putResult.data.uri; 143 144 + // DB write 145 const { error: dbError } = await supabaseServerClient 146 .from("publications") 147 .upsert({ ··· 160 }; 161 } 162 return { success: true as const, oldUri: pub.uri, newUri }; 163 + }), 164 + ), 165 ); 166 167 // Process results 168 + for (const result of pubResults) { 169 if (result.success) { 170 publicationUriMap[result.oldUri] = result.newUri; 171 stats.publicationsMigrated++; ··· 250 $type: "site.standard.document", 251 title: normalized.title || "Untitled", 252 site: siteValue, 253 + path: "/" + rkey, 254 publishedAt: normalized.publishedAt || new Date().toISOString(), 255 description: normalized.description, 256 content: normalized.content, ··· 263 }) 264 .filter((x) => x !== null); 265 266 + // Run PDS + DB writes together for each document 267 + const docResults = await Promise.all( 268 + documentsToMigrate.map(({ doc, rkey, newRecord, oldPubUri }) => 269 + step.run(`migrate-document-${doc.uri}`, async () => { 270 + // PDS write 271 const agent = await createAuthenticatedAgent(did); 272 const putResult = await agent.com.atproto.repo.putRecord({ 273 repo: did, ··· 276 record: newRecord, 277 validate: false, 278 }); 279 + const newUri = putResult.data.uri; 280 281 + // DB write 282 const { error: dbError } = await supabaseServerClient 283 .from("documents") 284 .upsert({ ··· 307 } 308 309 return { success: true as const, oldUri: doc.uri, newUri }; 310 + }), 311 + ), 312 ); 313 314 // Process results 315 + for (const result of docResults) { 316 if (result.success) { 317 documentUriMap[result.oldUri] = result.newUri; 318 stats.documentsMigrated++; ··· 433 }) 434 .filter((x) => x !== null); 435 436 + // Run PDS + DB writes together for each subscription 437 + const subResults = await Promise.all( 438 subscriptionsToMigrate.map(({ sub, rkey, newRecord }) => 439 + step.run(`migrate-subscription-${sub.uri}`, async () => { 440 + // PDS write 441 const agent = await createAuthenticatedAgent(did); 442 const putResult = await agent.com.atproto.repo.putRecord({ 443 repo: did, ··· 446 record: newRecord, 447 validate: false, 448 }); 449 + const newUri = putResult.data.uri; 450 451 + // DB write 452 const { error: dbError } = await supabaseServerClient 453 .from("publication_subscriptions") 454 .update({ ··· 466 }; 467 } 468 return { success: true as const, oldUri: sub.uri, newUri }; 469 + }), 470 + ), 471 ); 472 473 // Process results 474 + for (const result of subResults) { 475 if (result.success) { 476 userSubscriptionUriMap[result.oldUri] = result.newUri; 477 stats.userSubscriptionsMigrated++; ··· 488 // 2. External references (e.g., from other AT Proto apps) to old URIs continue to work 489 // 3. The normalization layer handles both schemas transparently for reads 490 // Old records are also kept on the user's PDS so existing AT-URI references remain valid. 491 + 492 + // Clear the migration flag on success 493 + if (stats.errors.length === 0) { 494 + await step.run("clear-migration-flag", async () => { 495 + await supabaseServerClient 496 + .from("identities") 497 + .update({ metadata: null }) 498 + .eq("atp_did", did); 499 + }); 500 + } 501 502 return { 503 success: stats.errors.length === 0,
+6
app/api/inngest/route.tsx
··· 5 import { batched_update_profiles } from "./functions/batched_update_profiles"; 6 import { index_follows } from "./functions/index_follows"; 7 import { migrate_user_to_standard } from "./functions/migrate_user_to_standard"; 8 9 export const { GET, POST, PUT } = serve({ 10 client: inngest, ··· 14 batched_update_profiles, 15 index_follows, 16 migrate_user_to_standard, 17 ], 18 });
··· 5 import { batched_update_profiles } from "./functions/batched_update_profiles"; 6 import { index_follows } from "./functions/index_follows"; 7 import { migrate_user_to_standard } from "./functions/migrate_user_to_standard"; 8 + import { 9 + cleanup_expired_oauth_sessions, 10 + check_oauth_session, 11 + } from "./functions/cleanup_expired_oauth_sessions"; 12 13 export const { GET, POST, PUT } = serve({ 14 client: inngest, ··· 18 batched_update_profiles, 19 index_follows, 20 migrate_user_to_standard, 21 + cleanup_expired_oauth_sessions, 22 + check_oauth_session, 23 ], 24 });
+11
app/api/oauth/[route]/route.ts
··· 11 ActionAfterSignIn, 12 parseActionFromSearchParam, 13 } from "./afterSignInActions"; 14 15 type OauthRequestClientState = { 16 redirect: string | null; ··· 84 .single(); 85 identity = data; 86 } 87 let { data: token } = await supabaseServerClient 88 .from("email_auth_tokens") 89 .insert({
··· 11 ActionAfterSignIn, 12 parseActionFromSearchParam, 13 } from "./afterSignInActions"; 14 + import { inngest } from "app/api/inngest/client"; 15 16 type OauthRequestClientState = { 17 redirect: string | null; ··· 85 .single(); 86 identity = data; 87 } 88 + 89 + // Trigger migration if identity needs it 90 + const metadata = identity?.metadata as Record<string, unknown> | null; 91 + if (metadata?.needsStandardSiteMigration) { 92 + await inngest.send({ 93 + name: "user/migrate-to-standard", 94 + data: { did: session.did }, 95 + }); 96 + } 97 + 98 let { data: token } = await supabaseServerClient 99 .from("email_auth_tokens") 100 .insert({
+34
app/lish/[did]/[publication]/.well-known/site.standard.publication/route.ts
···
··· 1 + import { publicationNameOrUriFilter } from "src/utils/uriHelpers"; 2 + import { supabaseServerClient } from "supabase/serverClient"; 3 + 4 + export async function GET( 5 + req: Request, 6 + props: { 7 + params: Promise<{ publication: string; did: string }>; 8 + }, 9 + ) { 10 + let params = await props.params; 11 + let did = decodeURIComponent(params.did); 12 + let publication_name = decodeURIComponent(params.publication); 13 + let [{ data: publications }] = await Promise.all([ 14 + supabaseServerClient 15 + .from("publications") 16 + .select( 17 + `*, 18 + publication_subscriptions(*), 19 + documents_in_publications(documents( 20 + *, 21 + comments_on_documents(count), 22 + document_mentions_in_bsky(count) 23 + )) 24 + `, 25 + ) 26 + .eq("identity_did", did) 27 + .or(publicationNameOrUriFilter(did, publication_name)) 28 + .order("uri", { ascending: false }) 29 + .limit(1), 30 + ]); 31 + let publication = publications?.[0]; 32 + if (!did || !publication) return new Response(null, { status: 404 }); 33 + return new Response(publication.uri); 34 + }
+7 -4
app/lish/[did]/[publication]/[rkey]/page.tsx
··· 35 sizes: "32x32", 36 type: "image/png", 37 }, 38 - other: { 39 - rel: "alternate", 40 - url: document.uri, 41 - }, 42 }, 43 title: 44 docRecord.title +
··· 35 sizes: "32x32", 36 type: "image/png", 37 }, 38 + other: [ 39 + { 40 + rel: "alternate", 41 + url: document.uri, 42 + }, 43 + { rel: "site.standard.document", url: document.uri }, 44 + ], 45 }, 46 title: 47 docRecord.title +
+7 -4
app/p/[didOrHandle]/[rkey]/page.tsx
··· 38 39 return { 40 icons: { 41 - other: { 42 - rel: "alternate", 43 - url: document.uri, 44 - }, 45 }, 46 title: docRecord.title, 47 description: docRecord?.description || "",
··· 38 39 return { 40 icons: { 41 + other: [ 42 + { 43 + rel: "alternate", 44 + url: document.uri, 45 + }, 46 + { rel: "site.standard.document", url: document.uri }, 47 + ], 48 }, 49 title: docRecord.title, 50 description: docRecord?.description || "",
+2 -1
components/Blocks/PublicationPollBlock.tsx
··· 27 setAreYouSure?: (value: boolean) => void; 28 }, 29 ) => { 30 - let { data: publicationData } = useLeafletPublicationData(); 31 let isSelected = useUIState((s) => 32 s.selectedBlocks.find((b) => b.value === props.entityID), 33 );
··· 27 setAreYouSure?: (value: boolean) => void; 28 }, 29 ) => { 30 + let { data: publicationData, normalizedDocument } = 31 + useLeafletPublicationData(); 32 let isSelected = useUIState((s) => 33 s.selectedBlocks.find((b) => b.value === props.entityID), 34 );
+1
drizzle/schema.ts
··· 140 email: text("email"), 141 atp_did: text("atp_did"), 142 interface_state: jsonb("interface_state"), 143 }, 144 (table) => { 145 return {
··· 140 email: text("email"), 141 atp_did: text("atp_did"), 142 interface_state: jsonb("interface_state"), 143 + metadata: jsonb("metadata"), 144 }, 145 (table) => { 146 return {
+2 -2
lexicons/api/lexicons.ts
··· 2215 type: 'ref', 2216 }, 2217 theme: { 2218 - type: 'ref', 2219 - ref: 'lex:pub.leaflet.publication#theme', 2220 }, 2221 description: { 2222 maxGraphemes: 300,
··· 2215 type: 'ref', 2216 }, 2217 theme: { 2218 + type: 'union', 2219 + refs: ['lex:pub.leaflet.publication#theme'], 2220 }, 2221 description: { 2222 maxGraphemes: 300,
+1 -1
lexicons/api/types/site/standard/publication.ts
··· 15 export interface Record { 16 $type: 'site.standard.publication' 17 basicTheme?: SiteStandardThemeBasic.Main 18 - theme?: PubLeafletPublication.Theme 19 description?: string 20 icon?: BlobRef 21 name: string
··· 15 export interface Record { 16 $type: 'site.standard.publication' 17 basicTheme?: SiteStandardThemeBasic.Main 18 + theme?: $Typed<PubLeafletPublication.Theme> | { $type: string } 19 description?: string 20 icon?: BlobRef 21 name: string
+2 -2
lexicons/site/standard/publication.json
··· 9 "type": "ref" 10 }, 11 "theme": { 12 - "type": "ref", 13 - "ref": "pub.leaflet.publication#theme" 14 }, 15 "description": { 16 "maxGraphemes": 300,
··· 9 "type": "ref" 10 }, 11 "theme": { 12 + "type": "union", 13 + "refs": ["pub.leaflet.publication#theme"] 14 }, 15 "description": { 16 "maxGraphemes": 300,
+40 -5
lexicons/src/normalize.ts
··· 14 */ 15 16 import type * as PubLeafletDocument from "../api/types/pub/leaflet/document"; 17 - import type * as PubLeafletPublication from "../api/types/pub/leaflet/publication"; 18 import type * as PubLeafletContent from "../api/types/pub/leaflet/content"; 19 import type * as SiteStandardDocument from "../api/types/site/standard/document"; 20 import type * as SiteStandardPublication from "../api/types/site/standard/publication"; ··· 31 }; 32 33 // Normalized publication type - uses the generated site.standard.publication type 34 - export type NormalizedPublication = SiteStandardPublication.Record; 35 36 /** 37 * Checks if the record is a pub.leaflet.document ··· 210 ): NormalizedPublication | null { 211 if (!record || typeof record !== "object") return null; 212 213 - // Pass through site.standard records directly 214 if (isStandardPublication(record)) { 215 - return record; 216 } 217 218 if (isLeafletPublication(record)) { ··· 225 226 const basicTheme = leafletThemeToBasicTheme(record.theme); 227 228 // Convert preferences to site.standard format (strip/replace $type) 229 const preferences: SiteStandardPublication.Preferences | undefined = 230 record.preferences ··· 243 description: record.description, 244 icon: record.icon, 245 basicTheme, 246 - theme: record.theme, 247 preferences, 248 }; 249 }
··· 14 */ 15 16 import type * as PubLeafletDocument from "../api/types/pub/leaflet/document"; 17 + import * as PubLeafletPublication from "../api/types/pub/leaflet/publication"; 18 import type * as PubLeafletContent from "../api/types/pub/leaflet/content"; 19 import type * as SiteStandardDocument from "../api/types/site/standard/document"; 20 import type * as SiteStandardPublication from "../api/types/site/standard/publication"; ··· 31 }; 32 33 // Normalized publication type - uses the generated site.standard.publication type 34 + // with the theme narrowed to only the valid pub.leaflet.publication#theme type 35 + // (isTheme validates that $type is present, so we use $Typed) 36 + // Note: We explicitly list fields rather than using Omit because the generated Record type 37 + // has an index signature [k: string]: unknown that interferes with property typing 38 + export type NormalizedPublication = { 39 + $type: "site.standard.publication"; 40 + name: string; 41 + url: string; 42 + description?: string; 43 + icon?: SiteStandardPublication.Record["icon"]; 44 + basicTheme?: SiteStandardThemeBasic.Main; 45 + theme?: $Typed<PubLeafletPublication.Theme>; 46 + preferences?: SiteStandardPublication.Preferences; 47 + }; 48 49 /** 50 * Checks if the record is a pub.leaflet.document ··· 223 ): NormalizedPublication | null { 224 if (!record || typeof record !== "object") return null; 225 226 + // Pass through site.standard records directly, but validate the theme 227 if (isStandardPublication(record)) { 228 + // Validate theme - only keep if it's a valid pub.leaflet.publication#theme 229 + const theme = PubLeafletPublication.isTheme(record.theme) 230 + ? (record.theme as $Typed<PubLeafletPublication.Theme>) 231 + : undefined; 232 + return { 233 + ...record, 234 + theme, 235 + }; 236 } 237 238 if (isLeafletPublication(record)) { ··· 245 246 const basicTheme = leafletThemeToBasicTheme(record.theme); 247 248 + // Validate theme - only keep if it's a valid pub.leaflet.publication#theme with $type set 249 + // For legacy records without $type, add it during normalization 250 + let theme: $Typed<PubLeafletPublication.Theme> | undefined; 251 + if (record.theme) { 252 + if (PubLeafletPublication.isTheme(record.theme)) { 253 + theme = record.theme as $Typed<PubLeafletPublication.Theme>; 254 + } else { 255 + // Legacy theme without $type - add it 256 + theme = { 257 + ...record.theme, 258 + $type: "pub.leaflet.publication#theme", 259 + }; 260 + } 261 + } 262 + 263 // Convert preferences to site.standard format (strip/replace $type) 264 const preferences: SiteStandardPublication.Preferences | undefined = 265 record.preferences ··· 278 description: record.description, 279 icon: record.icon, 280 basicTheme, 281 + theme, 282 preferences, 283 }; 284 }
+3
supabase/database.types.ts
··· 551 home_page: string 552 id: string 553 interface_state: Json | null 554 } 555 Insert: { 556 atp_did?: string | null ··· 559 home_page?: string 560 id?: string 561 interface_state?: Json | null 562 } 563 Update: { 564 atp_did?: string | null ··· 567 home_page?: string 568 id?: string 569 interface_state?: Json | null 570 } 571 Relationships: [ 572 {
··· 551 home_page: string 552 id: string 553 interface_state: Json | null 554 + metadata: Json | null 555 } 556 Insert: { 557 atp_did?: string | null ··· 560 home_page?: string 561 id?: string 562 interface_state?: Json | null 563 + metadata?: Json | null 564 } 565 Update: { 566 atp_did?: string | null ··· 569 home_page?: string 570 id?: string 571 interface_state?: Json | null 572 + metadata?: Json | null 573 } 574 Relationships: [ 575 {
+1
supabase/migrations/20260123000000_add_metadata_to_identities.sql
···
··· 1 + alter table "public"."identities" add column "metadata" jsonb;