atproto utils for zig zat.dev
atproto sdk zig

Compare changes

Choose any two refs to compare.

Changed files
+332 -204
.tangled
workflows
scripts
site
src
internal
+2 -2
.tangled/workflows/deploy-docs.yml
··· 14 14 WISP_SITE_NAME: "docs" 15 15 16 16 steps: 17 - - name: build docs site 17 + - name: build site 18 18 command: | 19 - bun ./scripts/build-wisp-docs.mjs 19 + bun ./scripts/build-site.mjs 20 20 21 21 - name: deploy docs to wisp 22 22 command: |
+4
CHANGELOG.md
··· 1 1 # changelog 2 2 3 + ## 0.1.2 4 + 5 + - `extractAt` now logs diagnostic info on parse failures (enable with `.zat` debug scope) 6 + 3 7 ## 0.1.1 4 8 5 9 - xrpc client sets `Content-Type: application/json` for POST requests
+215
scripts/build-site.mjs
··· 1 + import { 2 + readdir, 3 + readFile, 4 + mkdir, 5 + rm, 6 + cp, 7 + writeFile, 8 + access, 9 + } from "node:fs/promises"; 10 + import path from "node:path"; 11 + import { execFile } from "node:child_process"; 12 + import { promisify } from "node:util"; 13 + 14 + const repoRoot = path.resolve(new URL("..", import.meta.url).pathname); 15 + const docsDir = path.join(repoRoot, "docs"); 16 + const devlogDir = path.join(repoRoot, "devlog"); 17 + const siteSrcDir = path.join(repoRoot, "site"); 18 + const outDir = path.join(repoRoot, "site-out"); 19 + const outDocsDir = path.join(outDir, "docs"); 20 + 21 + const execFileAsync = promisify(execFile); 22 + 23 + async function exists(filePath) { 24 + try { 25 + await access(filePath); 26 + return true; 27 + } catch { 28 + return false; 29 + } 30 + } 31 + 32 + function isMarkdown(filePath) { 33 + return filePath.toLowerCase().endsWith(".md"); 34 + } 35 + 36 + async function listMarkdownFiles(dir, prefix = "") { 37 + const entries = await readdir(dir, { withFileTypes: true }); 38 + const out = []; 39 + for (const e of entries) { 40 + if (e.name.startsWith(".")) continue; 41 + const rel = path.join(prefix, e.name); 42 + const abs = path.join(dir, e.name); 43 + if (e.isDirectory()) { 44 + out.push(...(await listMarkdownFiles(abs, rel))); 45 + } else if (e.isFile() && isMarkdown(e.name)) { 46 + out.push(rel.replaceAll(path.sep, "/")); 47 + } 48 + } 49 + return out.sort((a, b) => a.localeCompare(b)); 50 + } 51 + 52 + function titleFromMarkdown(md, fallback) { 53 + const lines = md.split(/\r?\n/); 54 + for (const line of lines) { 55 + const m = /^#\s+(.+)\s*$/.exec(line); 56 + if (m) return m[1].trim(); 57 + } 58 + return fallback.replace(/\.md$/i, ""); 59 + } 60 + 61 + function normalizeTitle(title) { 62 + let t = String(title || "").trim(); 63 + // Strip markdown links: [text](url) -> text 64 + t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1"); 65 + // If pages follow a "zat - ..." style, drop the redundant prefix in the nav. 66 + t = t.replace(/^zat\s*-\s*/i, ""); 67 + // Cheaply capitalize (keeps the rest as-authored). 68 + if (t.length) t = t[0].toUpperCase() + t.slice(1); 69 + return t; 70 + } 71 + 72 + async function getBuildId() { 73 + try { 74 + const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], { 75 + cwd: repoRoot, 76 + }); 77 + const full = String(stdout || "").trim(); 78 + if (full) return full.slice(0, 12); 79 + } catch { 80 + // ignore 81 + } 82 + return String(Date.now()); 83 + } 84 + 85 + async function main() { 86 + await rm(outDir, { recursive: true, force: true }); 87 + await mkdir(outDir, { recursive: true }); 88 + 89 + // Copy static site shell 90 + await cp(siteSrcDir, outDir, { recursive: true }); 91 + 92 + // Cache-bust immutable assets on Wisp by appending a per-commit query string. 93 + const buildId = await getBuildId(); 94 + const outIndex = path.join(outDir, "index.html"); 95 + if (await exists(outIndex)) { 96 + let html = await readFile(outIndex, "utf8"); 97 + html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`); 98 + html = html.replaceAll( 99 + 'src="./vendor/marked.min.js"', 100 + `src="./vendor/marked.min.js?v=${buildId}"`, 101 + ); 102 + html = html.replaceAll( 103 + 'src="./app.js"', 104 + `src="./app.js?v=${buildId}"`, 105 + ); 106 + html = html.replaceAll( 107 + 'href="./favicon.svg"', 108 + `href="./favicon.svg?v=${buildId}"`, 109 + ); 110 + await writeFile(outIndex, html, "utf8"); 111 + } 112 + 113 + // Copy docs 114 + await mkdir(outDocsDir, { recursive: true }); 115 + 116 + const pages = []; 117 + 118 + // Prefer an explicit docs homepage if present; otherwise use repo README as index. 119 + const docsIndex = path.join(docsDir, "index.md"); 120 + if (!(await exists(docsIndex))) { 121 + const readme = path.join(repoRoot, "README.md"); 122 + if (await exists(readme)) { 123 + let md = await readFile(readme, "utf8"); 124 + // Strip docs/ prefix from links since we're now inside the docs context. 125 + md = md.replace(/\]\(docs\//g, "]("); 126 + await writeFile(path.join(outDocsDir, "index.md"), md, "utf8"); 127 + pages.push({ 128 + path: "index.md", 129 + title: normalizeTitle(titleFromMarkdown(md, "index.md")), 130 + }); 131 + } 132 + } 133 + 134 + const changelog = path.join(repoRoot, "CHANGELOG.md"); 135 + const docsChangelog = path.join(docsDir, "changelog.md"); 136 + if ((await exists(changelog)) && !(await exists(docsChangelog))) { 137 + const md = await readFile(changelog, "utf8"); 138 + await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8"); 139 + pages.push({ 140 + path: "changelog.md", 141 + title: normalizeTitle(titleFromMarkdown(md, "changelog.md")), 142 + }); 143 + } 144 + 145 + const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : []; 146 + 147 + // Copy all markdown under docs/ (including archives), but only include non-archive 148 + // paths in the sidebar manifest. 149 + for (const rel of mdFiles) { 150 + const src = path.join(docsDir, rel); 151 + const dst = path.join(outDocsDir, rel); 152 + await mkdir(path.dirname(dst), { recursive: true }); 153 + await cp(src, dst); 154 + 155 + const md = await readFile(src, "utf8"); 156 + if (!rel.startsWith("archive/")) { 157 + pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) }); 158 + } 159 + } 160 + 161 + // Copy devlog files to docs/devlog/ and generate an index 162 + const devlogFiles = (await exists(devlogDir)) ? await listMarkdownFiles(devlogDir) : []; 163 + const devlogEntries = []; 164 + 165 + for (const rel of devlogFiles) { 166 + const src = path.join(devlogDir, rel); 167 + const dst = path.join(outDocsDir, "devlog", rel); 168 + await mkdir(path.dirname(dst), { recursive: true }); 169 + await cp(src, dst); 170 + 171 + const md = await readFile(src, "utf8"); 172 + devlogEntries.push({ 173 + path: `devlog/${rel}`, 174 + title: titleFromMarkdown(md, rel), 175 + }); 176 + } 177 + 178 + // Generate devlog index listing all entries (newest first by filename) 179 + if (devlogEntries.length > 0) { 180 + devlogEntries.sort((a, b) => b.path.localeCompare(a.path)); 181 + const indexMd = [ 182 + "# devlog", 183 + "", 184 + ...devlogEntries.map((e) => `- [${e.title}](${e.path})`), 185 + "", 186 + ].join("\n"); 187 + await writeFile(path.join(outDocsDir, "devlog", "index.md"), indexMd, "utf8"); 188 + } 189 + 190 + // Stable nav order: README homepage, then roadmap, then changelog, then the rest. 191 + pages.sort((a, b) => { 192 + const order = (p) => { 193 + if (p === "index.md") return 0; 194 + if (p === "roadmap.md") return 1; 195 + if (p === "changelog.md") return 2; 196 + return 3; 197 + }; 198 + const ao = order(a.path); 199 + const bo = order(b.path); 200 + if (ao !== bo) return ao - bo; 201 + return a.title.localeCompare(b.title); 202 + }); 203 + 204 + await writeFile( 205 + path.join(outDir, "manifest.json"), 206 + JSON.stringify({ pages }, null, 2) + "\n", 207 + "utf8", 208 + ); 209 + 210 + process.stdout.write( 211 + `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`, 212 + ); 213 + } 214 + 215 + await main();
-195
scripts/build-wisp-docs.mjs
··· 1 - import { 2 - readdir, 3 - readFile, 4 - mkdir, 5 - rm, 6 - cp, 7 - writeFile, 8 - access, 9 - } from "node:fs/promises"; 10 - import path from "node:path"; 11 - import { execFile } from "node:child_process"; 12 - import { promisify } from "node:util"; 13 - 14 - const repoRoot = path.resolve(new URL("..", import.meta.url).pathname); 15 - const docsDir = path.join(repoRoot, "docs"); 16 - const devlogDir = path.join(repoRoot, "devlog"); 17 - const siteSrcDir = path.join(repoRoot, "site"); 18 - const outDir = path.join(repoRoot, "site-out"); 19 - const outDocsDir = path.join(outDir, "docs"); 20 - 21 - const execFileAsync = promisify(execFile); 22 - 23 - async function exists(filePath) { 24 - try { 25 - await access(filePath); 26 - return true; 27 - } catch { 28 - return false; 29 - } 30 - } 31 - 32 - function isMarkdown(filePath) { 33 - return filePath.toLowerCase().endsWith(".md"); 34 - } 35 - 36 - async function listMarkdownFiles(dir, prefix = "") { 37 - const entries = await readdir(dir, { withFileTypes: true }); 38 - const out = []; 39 - for (const e of entries) { 40 - if (e.name.startsWith(".")) continue; 41 - const rel = path.join(prefix, e.name); 42 - const abs = path.join(dir, e.name); 43 - if (e.isDirectory()) { 44 - out.push(...(await listMarkdownFiles(abs, rel))); 45 - } else if (e.isFile() && isMarkdown(e.name)) { 46 - out.push(rel.replaceAll(path.sep, "/")); 47 - } 48 - } 49 - return out.sort((a, b) => a.localeCompare(b)); 50 - } 51 - 52 - function titleFromMarkdown(md, fallback) { 53 - const lines = md.split(/\r?\n/); 54 - for (const line of lines) { 55 - const m = /^#\s+(.+)\s*$/.exec(line); 56 - if (m) return m[1].trim(); 57 - } 58 - return fallback.replace(/\.md$/i, ""); 59 - } 60 - 61 - function normalizeTitle(title) { 62 - let t = String(title || "").trim(); 63 - // Strip markdown links: [text](url) -> text 64 - t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1"); 65 - // If pages follow a "zat - ..." style, drop the redundant prefix in the nav. 66 - t = t.replace(/^zat\s*-\s*/i, ""); 67 - // Cheaply capitalize (keeps the rest as-authored). 68 - if (t.length) t = t[0].toUpperCase() + t.slice(1); 69 - return t; 70 - } 71 - 72 - async function getBuildId() { 73 - try { 74 - const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], { 75 - cwd: repoRoot, 76 - }); 77 - const full = String(stdout || "").trim(); 78 - if (full) return full.slice(0, 12); 79 - } catch { 80 - // ignore 81 - } 82 - return String(Date.now()); 83 - } 84 - 85 - async function main() { 86 - await rm(outDir, { recursive: true, force: true }); 87 - await mkdir(outDir, { recursive: true }); 88 - 89 - // Copy static site shell 90 - await cp(siteSrcDir, outDir, { recursive: true }); 91 - 92 - // Cache-bust immutable assets on Wisp by appending a per-commit query string. 93 - const buildId = await getBuildId(); 94 - const outIndex = path.join(outDir, "index.html"); 95 - if (await exists(outIndex)) { 96 - let html = await readFile(outIndex, "utf8"); 97 - html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`); 98 - html = html.replaceAll( 99 - 'src="./vendor/marked.min.js"', 100 - `src="./vendor/marked.min.js?v=${buildId}"`, 101 - ); 102 - html = html.replaceAll( 103 - 'src="./app.js"', 104 - `src="./app.js?v=${buildId}"`, 105 - ); 106 - html = html.replaceAll( 107 - 'href="./favicon.svg"', 108 - `href="./favicon.svg?v=${buildId}"`, 109 - ); 110 - await writeFile(outIndex, html, "utf8"); 111 - } 112 - 113 - // Copy docs 114 - await mkdir(outDocsDir, { recursive: true }); 115 - 116 - const pages = []; 117 - 118 - // Prefer an explicit docs homepage if present; otherwise use repo README as index. 119 - const docsIndex = path.join(docsDir, "index.md"); 120 - if (!(await exists(docsIndex))) { 121 - const readme = path.join(repoRoot, "README.md"); 122 - if (await exists(readme)) { 123 - let md = await readFile(readme, "utf8"); 124 - // Strip docs/ prefix from links since we're now inside the docs context. 125 - md = md.replace(/\]\(docs\//g, "]("); 126 - await writeFile(path.join(outDocsDir, "index.md"), md, "utf8"); 127 - pages.push({ 128 - path: "index.md", 129 - title: normalizeTitle(titleFromMarkdown(md, "index.md")), 130 - }); 131 - } 132 - } 133 - 134 - const changelog = path.join(repoRoot, "CHANGELOG.md"); 135 - const docsChangelog = path.join(docsDir, "changelog.md"); 136 - if ((await exists(changelog)) && !(await exists(docsChangelog))) { 137 - const md = await readFile(changelog, "utf8"); 138 - await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8"); 139 - pages.push({ 140 - path: "changelog.md", 141 - title: normalizeTitle(titleFromMarkdown(md, "changelog.md")), 142 - }); 143 - } 144 - 145 - const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : []; 146 - 147 - // Copy all markdown under docs/ (including archives), but only include non-archive 148 - // paths in the sidebar manifest. 149 - for (const rel of mdFiles) { 150 - const src = path.join(docsDir, rel); 151 - const dst = path.join(outDocsDir, rel); 152 - await mkdir(path.dirname(dst), { recursive: true }); 153 - await cp(src, dst); 154 - 155 - const md = await readFile(src, "utf8"); 156 - if (!rel.startsWith("archive/")) { 157 - pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) }); 158 - } 159 - } 160 - 161 - // Copy devlog files to docs/devlog/ (accessible via SPA but not in sidebar) 162 - const devlogFiles = (await exists(devlogDir)) ? await listMarkdownFiles(devlogDir) : []; 163 - for (const rel of devlogFiles) { 164 - const src = path.join(devlogDir, rel); 165 - const dst = path.join(outDocsDir, "devlog", rel); 166 - await mkdir(path.dirname(dst), { recursive: true }); 167 - await cp(src, dst); 168 - } 169 - 170 - // Stable nav order: README homepage, then roadmap, then changelog, then the rest. 171 - pages.sort((a, b) => { 172 - const order = (p) => { 173 - if (p === "index.md") return 0; 174 - if (p === "roadmap.md") return 1; 175 - if (p === "changelog.md") return 2; 176 - return 3; 177 - }; 178 - const ao = order(a.path); 179 - const bo = order(b.path); 180 - if (ao !== bo) return ao - bo; 181 - return a.title.localeCompare(b.title); 182 - }); 183 - 184 - await writeFile( 185 - path.join(outDir, "manifest.json"), 186 - JSON.stringify({ pages }, null, 2) + "\n", 187 - "utf8", 188 - ); 189 - 190 - process.stdout.write( 191 - `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`, 192 - ); 193 - } 194 - 195 - await main();
+2 -2
scripts/publish-docs.zig
··· 14 14 15 15 /// devlog entries 16 16 const devlog = [_]DocEntry{ 17 - .{ .path = "/001", .file = "devlog/001-self-publishing-docs.md" }, 17 + .{ .path = "/devlog/001", .file = "devlog/001-self-publishing-docs.md" }, 18 18 }; 19 19 20 20 pub fn main() !void { ··· 87 87 // devlog publication (clock_id 100 to separate from docs) 88 88 const devlog_tid = zat.Tid.fromTimestamp(1704067200000000, 100); 89 89 const devlog_pub = Publication{ 90 - .url = "https://zat.dev/devlog", 90 + .url = "https://zat.dev", 91 91 .name = "zat devlog", 92 92 .description = "building zat in public", 93 93 };
+1 -1
site/app.js
··· 139 139 } 140 140 141 141 try { 142 - const md = await fetchText(`./docs/${encodeURIComponent(activePath)}`); 142 + const md = await fetchText(`./docs/${activePath}`); 143 143 const html = globalThis.marked.parse(md); 144 144 contentEl.innerHTML = html; 145 145
+1 -1
site/index.html
··· 16 16 </button> 17 17 <a class="brand" href="./">zat.dev</a> 18 18 <div class="header-links"> 19 - <a class="header-link" href="#devlog/001-self-publishing-docs.md">devlog</a> 19 + <a class="header-link" href="#devlog/index.md">devlog</a> 20 20 <a class="header-link" href="https://tangled.sh/zat.dev/zat" target="_blank" rel="noopener noreferrer">repo</a> 21 21 </div> 22 22 </header>
+107 -3
src/internal/json.zig
··· 6 6 //! two approaches: 7 7 //! - runtime paths: getString(value, "embed.external.uri") - for dynamic paths 8 8 //! - comptime paths: extractAt(T, alloc, value, .{"embed", "external"}) - for static paths with type safety 9 + //! 10 + //! debug logging: 11 + //! enable with `pub const std_options = .{ .log_scope_levels = &.{.{ .scope = .zat, .level = .debug }} };` 9 12 10 13 const std = @import("std"); 14 + const log = std.log.scoped(.zat); 11 15 12 16 /// navigate a json value by dot-separated path 13 17 /// returns null if any segment is missing or wrong type ··· 92 96 /// extract a typed struct from a nested path 93 97 /// uses comptime tuple for path segments - no runtime string parsing 94 98 /// leverages std.json.parseFromValueLeaky for type-safe extraction 99 + /// 100 + /// on failure, logs diagnostic info when debug logging is enabled for .zat scope 95 101 pub fn extractAt( 96 102 comptime T: type, 97 103 allocator: std.mem.Allocator, ··· 101 107 var current = value; 102 108 inline for (path) |segment| { 103 109 current = switch (current) { 104 - .object => |obj| obj.get(segment) orelse return error.MissingField, 105 - else => return error.UnexpectedToken, 110 + .object => |obj| obj.get(segment) orelse { 111 + log.debug("extractAt: missing field \"{s}\" in path {any}, expected {s}", .{ 112 + segment, 113 + path, 114 + @typeName(T), 115 + }); 116 + return error.MissingField; 117 + }, 118 + else => { 119 + log.debug("extractAt: expected object at \"{s}\" in path {any}, got {s}", .{ 120 + segment, 121 + path, 122 + @tagName(current), 123 + }); 124 + return error.UnexpectedToken; 125 + }, 106 126 }; 107 127 } 108 - return std.json.parseFromValueLeaky(T, allocator, current, .{}); 128 + return std.json.parseFromValueLeaky(T, allocator, current, .{ .ignore_unknown_fields = true }) catch |err| { 129 + log.debug("extractAt: parse failed for {s} at path {any}: {s} (json type: {s})", .{ 130 + @typeName(T), 131 + path, 132 + @errorName(err), 133 + @tagName(current), 134 + }); 135 + return err; 136 + }; 109 137 } 110 138 111 139 /// extract a typed value, returning null if path doesn't exist ··· 278 306 const missing = extractAtOptional(Thing, arena.allocator(), parsed.value, .{"missing"}); 279 307 try std.testing.expect(missing == null); 280 308 } 309 + 310 + test "extractAt logs diagnostic on enum parse failure" { 311 + // simulates the issue: unknown enum value from external API 312 + const json_str = 313 + \\{"op": {"action": "archive", "path": "app.bsky.feed.post/abc"}} 314 + ; 315 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 316 + defer arena.deinit(); 317 + 318 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 319 + 320 + const Action = enum { create, update, delete }; 321 + const Op = struct { 322 + action: Action, 323 + path: []const u8, 324 + }; 325 + 326 + // "archive" is not a valid Action variant - this should fail 327 + // with debug logging enabled, you'd see: 328 + // debug(zat): extractAt: parse failed for json.Op at path { "op" }: InvalidEnumTag (json type: object) 329 + const result = extractAtOptional(Op, arena.allocator(), parsed.value, .{"op"}); 330 + try std.testing.expect(result == null); 331 + } 332 + 333 + test "extractAt logs diagnostic on missing field" { 334 + const json_str = 335 + \\{"data": {"name": "test"}} 336 + ; 337 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 338 + defer arena.deinit(); 339 + 340 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 341 + 342 + const Thing = struct { value: i64 }; 343 + 344 + // path "data.missing" doesn't exist 345 + // with debug logging enabled, you'd see: 346 + // debug(zat): extractAt: missing field "missing" in path { "data", "missing" }, expected json.Thing 347 + const result = extractAtOptional(Thing, arena.allocator(), parsed.value, .{ "data", "missing" }); 348 + try std.testing.expect(result == null); 349 + } 350 + 351 + test "extractAt ignores unknown fields" { 352 + // real-world case: TAP messages have extra fields (live, rev, cid) that we don't need 353 + const json_str = 354 + \\{ 355 + \\ "record": { 356 + \\ "live": true, 357 + \\ "did": "did:plc:abc123", 358 + \\ "rev": "3mbspmpaidl2a", 359 + \\ "collection": "pub.leaflet.document", 360 + \\ "rkey": "xyz789", 361 + \\ "action": "create", 362 + \\ "cid": "bafyreitest" 363 + \\ } 364 + \\} 365 + ; 366 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 367 + defer arena.deinit(); 368 + 369 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 370 + 371 + // only extract the fields we care about 372 + const Record = struct { 373 + collection: []const u8, 374 + action: []const u8, 375 + did: []const u8, 376 + rkey: []const u8, 377 + }; 378 + 379 + const rec = try extractAt(Record, arena.allocator(), parsed.value, .{"record"}); 380 + try std.testing.expectEqualStrings("pub.leaflet.document", rec.collection); 381 + try std.testing.expectEqualStrings("create", rec.action); 382 + try std.testing.expectEqualStrings("did:plc:abc123", rec.did); 383 + try std.testing.expectEqualStrings("xyz789", rec.rkey); 384 + }