atproto utils for zig zat.dev
atproto sdk zig

Compare changes

Choose any two refs to compare.

+3 -5
.tangled/workflows/deploy-docs.yml
··· 6 6 7 7 dependencies: 8 8 nixpkgs: 9 - - nodejs 10 - - coreutils 9 + - bun 11 10 - curl 12 11 13 12 environment: 14 - # Use DID directly to avoid handle-resolution issues in CI. 15 13 WISP_DID: "did:plc:mkqt76xvfgxuemlwlx6ruc3w" 16 14 WISP_SITE_NAME: "docs" 17 15 18 16 steps: 19 - - name: build docs site 17 + - name: build site 20 18 command: | 21 - node ./scripts/build-wisp-docs.mjs 19 + bun ./scripts/build-site.mjs 22 20 23 21 - name: deploy docs to wisp 24 22 command: |
+15
.tangled/workflows/publish-docs.yml
··· 1 + when: 2 + - event: push 3 + tag: "v*" 4 + 5 + engine: nixery 6 + 7 + dependencies: 8 + nixpkgs: 9 + - zig 10 + 11 + steps: 12 + - name: build and publish docs to ATProto 13 + command: | 14 + zig build 15 + ./zig-out/bin/publish-docs
+10 -72
CHANGELOG.md
··· 1 1 # changelog 2 2 3 - ## 0.1.0 3 + ## 0.1.2 4 + 5 + - `extractAt` now logs diagnostic info on parse failures (enable with `.zat` debug scope) 6 + 7 + ## 0.1.1 4 8 5 - first feature release. adds protocol-level enums for firehose consumption. 9 + - xrpc client sets `Content-Type: application/json` for POST requests 10 + - docs published as `site.standard.document` records on tag releases 6 11 7 - ### what's new 12 + ## 0.1.0 8 13 9 - **sync types** - enums from `com.atproto.sync.subscribeRepos` lexicon: 14 + sync types for firehose consumption: 10 15 11 16 - `CommitAction` - `.create`, `.update`, `.delete` 12 17 - `EventKind` - `.commit`, `.sync`, `.identity`, `.account`, `.info` 13 18 - `AccountStatus` - `.takendown`, `.suspended`, `.deleted`, `.deactivated`, `.desynchronized`, `.throttled` 14 19 15 - these integrate with zig's `std.json` for automatic parsing. define struct fields as enums instead of strings, and get exhaustive switch checking. 16 - 17 - ### migration 18 - 19 - if you're currently doing string comparisons: 20 - 21 - ```zig 22 - // before: string comparisons everywhere 23 - const TapRecord = struct { 24 - action: []const u8, 25 - collection: []const u8, 26 - // ... 27 - }; 28 - 29 - if (mem.eql(u8, rec.action, "create") or mem.eql(u8, rec.action, "update")) { 30 - // handle upsert 31 - } else if (mem.eql(u8, rec.action, "delete")) { 32 - // handle delete 33 - } 34 - ``` 35 - 36 - switch to enum fields: 37 - 38 - ```zig 39 - // after: type-safe enums 40 - const TapRecord = struct { 41 - action: zat.CommitAction, // parsed automatically by std.json 42 - collection: []const u8, 43 - // ... 44 - }; 45 - 46 - switch (rec.action) { 47 - .create, .update => processUpsert(rec), 48 - .delete => processDelete(rec), 49 - } 50 - ``` 51 - 52 - the compiler enforces exhaustive handling - if AT Protocol adds a new action, your code won't compile until you handle it. 53 - 54 - **this is backwards compatible.** your existing code continues to work. adopt the new types when you're ready. 55 - 56 - ### library overview 57 - 58 - zat provides zig primitives for AT Protocol: 59 - 60 - | feature | description | 61 - |---------|-------------| 62 - | string primitives | `Tid`, `Did`, `Handle`, `Nsid`, `Rkey`, `AtUri` - parsing and validation | 63 - | did resolution | resolve `did:plc` and `did:web` to documents | 64 - | handle resolution | resolve handles to DIDs via HTTP well-known | 65 - | xrpc client | call AT Protocol endpoints (queries and procedures) | 66 - | sync types | enums for firehose consumption | 67 - | json helpers | navigate nested json without verbose if-chains | 68 - | jwt verification | verify service auth tokens (ES256, ES256K) | 69 - | multibase/multicodec | decode public keys from DID documents | 70 - 71 - ### install 72 - 73 - ```bash 74 - zig fetch --save https://tangled.sh/zzstoatzz.io/zat/archive/main 75 - ``` 76 - 77 - ```zig 78 - // build.zig 79 - const zat = b.dependency("zat", .{}).module("zat"); 80 - exe.root_module.addImport("zat", zat); 81 - ``` 20 + these integrate with `std.json` for automatic parsing. 82 21 83 22 ## 0.0.2 84 23 ··· 87 26 88 27 ## 0.0.1 89 28 90 - - initial release 91 29 - string primitives (Tid, Did, Handle, Nsid, Rkey, AtUri) 92 30 - did/handle resolution 93 31 - json helpers
+15
CONTRIBUTING.md
··· 1 + # contributing 2 + 3 + ## before committing 4 + 5 + ```sh 6 + just fmt 7 + ``` 8 + 9 + or without just: 10 + 11 + ```sh 12 + zig fmt . 13 + ``` 14 + 15 + CI runs `zig fmt --check .` and will fail on unformatted code.
+16 -3
README.md
··· 1 - # zat 1 + # [zat](https://zat.dev) 2 + 3 + AT Protocol building blocks for zig. 4 + 5 + <details> 6 + <summary><strong>this readme is an ATProto record</strong></summary> 7 + 8 + โ†’ [view in zat.dev's repository](https://at-me.zzstoatzz.io/view?handle=zat.dev) 2 9 3 - zig primitives for AT Protocol. 10 + zat publishes these docs as [`site.standard.document`](https://standard.site) records, signed by its DID. 11 + 12 + </details> 4 13 5 14 ## install 6 15 7 16 ```bash 8 - zig fetch --save https://tangled.sh/zzstoatzz.io/zat/archive/main 17 + zig fetch --save https://tangled.sh/zat.dev/zat/archive/main 9 18 ``` 10 19 11 20 then in `build.zig`: ··· 185 194 ## license 186 195 187 196 MIT 197 + 198 + --- 199 + 200 + [roadmap](docs/roadmap.md) ยท [changelog](CHANGELOG.md)
+12
build.zig
··· 15 15 16 16 const test_step = b.step("test", "run unit tests"); 17 17 test_step.dependOn(&run_tests.step); 18 + 19 + // publish-docs script (uses zat to publish docs to ATProto) 20 + const publish_docs = b.addExecutable(.{ 21 + .name = "publish-docs", 22 + .root_module = b.createModule(.{ 23 + .root_source_file = b.path("scripts/publish-docs.zig"), 24 + .target = target, 25 + .optimize = optimize, 26 + .imports = &.{.{ .name = "zat", .module = mod }}, 27 + }), 28 + }); 29 + b.installArtifact(publish_docs); 18 30 }
+35
devlog/001-self-publishing-docs.md
··· 1 + # zat publishes its own docs to ATProto 2 + 3 + zat uses itself to publish these docs as `site.standard.document` records. here's how. 4 + 5 + ## the idea 6 + 7 + i'm working on [search for leaflet](https://leaflet-search.pages.dev/) and more generally, search for [standard.site](https://standard.site/) records. many are [currently thinking about how to facilitate better idea sharing on atproto right now](https://bsky.app/profile/eugenevinitsky.bsky.social/post/3mbpqpylv3s2e). 8 + 9 + this is me doing a rep of shipping a "standard.site", so i know what i'll be searching through, and to better understand why blogging platforms choose their schema extensions etc for i start indexing/searching their record types. 10 + 11 + ## what we built 12 + 13 + a zig script ([`scripts/publish-docs.zig`](https://tangled.sh/zat.dev/zat/tree/main/scripts/publish-docs.zig)) that: 14 + 15 + 1. authenticates with the PDS via `com.atproto.server.createSession` 16 + 2. creates a `site.standard.publication` record 17 + 3. publishes each doc as a `site.standard.document` pointing to that publication 18 + 4. uses deterministic TIDs so records get the same rkey every time (idempotent updates) 19 + 20 + ## the mechanics 21 + 22 + ### TIDs 23 + 24 + timestamp identifiers. base32-sortable. we use a fixed base timestamp with incrementing clock_id so each doc gets a stable rkey: 25 + 26 + ```zig 27 + const pub_tid = zat.Tid.fromTimestamp(1704067200000000, 0); // publication 28 + const doc_tid = zat.Tid.fromTimestamp(1704067200000000, i + 1); // docs get 1, 2, 3... 29 + ``` 30 + 31 + ### CI 32 + 33 + [`.tangled/workflows/publish-docs.yml`](https://tangled.sh/zat.dev/zat/tree/main/.tangled/workflows/publish-docs.yml) triggers on `v*` tags. tag a release, docs publish automatically. 34 + 35 + `putRecord` with the same rkey overwrites, so the CI job overwrites `standard.site` records when you cut a tag.
+24 -24
docs/roadmap.md
··· 1 1 # roadmap 2 2 3 - `zat` is a grab bag of **AT Protocol building blocks** in Zig: parsers, validators, resolvers, and small protocol helpers. 3 + zat started as a small set of string primitives for AT Protocol - the types everyone reimplements (`Tid`, `Did`, `Handle`, `Nsid`, `Rkey`, `AtUri`). the scope grew based on real usage. 4 4 5 - This roadmap is intentionally short. If it doesnโ€™t fit into one file, it probably belongs in issues. 5 + ## history 6 6 7 - ## now 7 + **initial scope** - string primitives with parsing and validation. the philosophy: primitives not frameworks, layered design, zig idioms, minimal scope. 8 8 9 - - keep current APIs stable (0.x semver) 10 - - tighten docs/examples as real apps discover sharp edges 11 - - keep the โ€œprimitives, not frameworkโ€ ethos 9 + **what grew from usage:** 10 + - DID resolution was originally "out of scope" - real projects needed it, so `DidResolver` and `DidDocument` got added 11 + - XRPC client and JSON helpers - same story 12 + - JWT verification for service auth 13 + - handle resolution via HTTP well-known 14 + - handle resolution via DNS-over-HTTP (community contribution) 15 + - sync types for firehose consumption (`CommitAction`, `EventKind`, `AccountStatus`) 12 16 13 - ## next 17 + this pattern - start minimal, expand based on real pain - continues. 14 18 15 - ### polish 19 + ## now 16 20 17 - - improve docs around common workflows: 18 - - resolving handle โ†’ DID โ†’ PDS 19 - - making XRPC calls + parsing JSON 20 - - verifying JWTs from DID documents 21 - - add more integration tests that hit real-world edge cases (without becoming flaky) 21 + use zat in real projects. let usage drive what's next. 22 22 23 - ### primitives 23 + the primitives are reasonably complete. what's missing will show up when people build things. until then, no speculative features. 24 24 25 - - fill gaps that show up repeatedly in other atproto projects: 26 - - CIDs and common multiformats plumbing 27 - - richer `AtUri` helpers (safe joins, parsing variants) 28 - - more ergonomic JSON navigation patterns (still optional, no forced codegen) 25 + ## maybe later 29 26 30 - ## later (maybe) 27 + these stay out of scope unless real demand emerges: 31 28 32 - - lexicon codegen is still โ€œprobably a separate projectโ€ 33 - - higher-level clients/frameworks stay out of scope 29 + - lexicon codegen - probably a separate project 30 + - higher-level clients/frameworks - too opinionated 31 + - token refresh/session management - app-specific 32 + - feed generator scaffolding - each feed is unique 34 33 35 34 ## non-goals 36 35 37 - - token refresh/session frameworks 38 - - opinionated app scaffolding 39 - - โ€œone true SDKโ€ that tries to do everything 36 + zat is not trying to be: 40 37 38 + - a "one true SDK" that does everything 39 + - an opinionated app framework 40 + - a replacement for understanding the protocol
+17
justfile
··· 1 + # zat 2 + 3 + # show available commands 4 + default: 5 + @just --list 6 + 7 + # format code 8 + fmt: 9 + zig fmt . 10 + 11 + # check formatting (CI) 12 + check: 13 + zig fmt --check . 14 + 15 + # run tests 16 + test: 17 + zig build test
+215
scripts/build-site.mjs
··· 1 + import { 2 + readdir, 3 + readFile, 4 + mkdir, 5 + rm, 6 + cp, 7 + writeFile, 8 + access, 9 + } from "node:fs/promises"; 10 + import path from "node:path"; 11 + import { execFile } from "node:child_process"; 12 + import { promisify } from "node:util"; 13 + 14 + const repoRoot = path.resolve(new URL("..", import.meta.url).pathname); 15 + const docsDir = path.join(repoRoot, "docs"); 16 + const devlogDir = path.join(repoRoot, "devlog"); 17 + const siteSrcDir = path.join(repoRoot, "site"); 18 + const outDir = path.join(repoRoot, "site-out"); 19 + const outDocsDir = path.join(outDir, "docs"); 20 + 21 + const execFileAsync = promisify(execFile); 22 + 23 + async function exists(filePath) { 24 + try { 25 + await access(filePath); 26 + return true; 27 + } catch { 28 + return false; 29 + } 30 + } 31 + 32 + function isMarkdown(filePath) { 33 + return filePath.toLowerCase().endsWith(".md"); 34 + } 35 + 36 + async function listMarkdownFiles(dir, prefix = "") { 37 + const entries = await readdir(dir, { withFileTypes: true }); 38 + const out = []; 39 + for (const e of entries) { 40 + if (e.name.startsWith(".")) continue; 41 + const rel = path.join(prefix, e.name); 42 + const abs = path.join(dir, e.name); 43 + if (e.isDirectory()) { 44 + out.push(...(await listMarkdownFiles(abs, rel))); 45 + } else if (e.isFile() && isMarkdown(e.name)) { 46 + out.push(rel.replaceAll(path.sep, "/")); 47 + } 48 + } 49 + return out.sort((a, b) => a.localeCompare(b)); 50 + } 51 + 52 + function titleFromMarkdown(md, fallback) { 53 + const lines = md.split(/\r?\n/); 54 + for (const line of lines) { 55 + const m = /^#\s+(.+)\s*$/.exec(line); 56 + if (m) return m[1].trim(); 57 + } 58 + return fallback.replace(/\.md$/i, ""); 59 + } 60 + 61 + function normalizeTitle(title) { 62 + let t = String(title || "").trim(); 63 + // Strip markdown links: [text](url) -> text 64 + t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1"); 65 + // If pages follow a "zat - ..." style, drop the redundant prefix in the nav. 66 + t = t.replace(/^zat\s*-\s*/i, ""); 67 + // Cheaply capitalize (keeps the rest as-authored). 68 + if (t.length) t = t[0].toUpperCase() + t.slice(1); 69 + return t; 70 + } 71 + 72 + async function getBuildId() { 73 + try { 74 + const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], { 75 + cwd: repoRoot, 76 + }); 77 + const full = String(stdout || "").trim(); 78 + if (full) return full.slice(0, 12); 79 + } catch { 80 + // ignore 81 + } 82 + return String(Date.now()); 83 + } 84 + 85 + async function main() { 86 + await rm(outDir, { recursive: true, force: true }); 87 + await mkdir(outDir, { recursive: true }); 88 + 89 + // Copy static site shell 90 + await cp(siteSrcDir, outDir, { recursive: true }); 91 + 92 + // Cache-bust immutable assets on Wisp by appending a per-commit query string. 93 + const buildId = await getBuildId(); 94 + const outIndex = path.join(outDir, "index.html"); 95 + if (await exists(outIndex)) { 96 + let html = await readFile(outIndex, "utf8"); 97 + html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`); 98 + html = html.replaceAll( 99 + 'src="./vendor/marked.min.js"', 100 + `src="./vendor/marked.min.js?v=${buildId}"`, 101 + ); 102 + html = html.replaceAll( 103 + 'src="./app.js"', 104 + `src="./app.js?v=${buildId}"`, 105 + ); 106 + html = html.replaceAll( 107 + 'href="./favicon.svg"', 108 + `href="./favicon.svg?v=${buildId}"`, 109 + ); 110 + await writeFile(outIndex, html, "utf8"); 111 + } 112 + 113 + // Copy docs 114 + await mkdir(outDocsDir, { recursive: true }); 115 + 116 + const pages = []; 117 + 118 + // Prefer an explicit docs homepage if present; otherwise use repo README as index. 119 + const docsIndex = path.join(docsDir, "index.md"); 120 + if (!(await exists(docsIndex))) { 121 + const readme = path.join(repoRoot, "README.md"); 122 + if (await exists(readme)) { 123 + let md = await readFile(readme, "utf8"); 124 + // Strip docs/ prefix from links since we're now inside the docs context. 125 + md = md.replace(/\]\(docs\//g, "]("); 126 + await writeFile(path.join(outDocsDir, "index.md"), md, "utf8"); 127 + pages.push({ 128 + path: "index.md", 129 + title: normalizeTitle(titleFromMarkdown(md, "index.md")), 130 + }); 131 + } 132 + } 133 + 134 + const changelog = path.join(repoRoot, "CHANGELOG.md"); 135 + const docsChangelog = path.join(docsDir, "changelog.md"); 136 + if ((await exists(changelog)) && !(await exists(docsChangelog))) { 137 + const md = await readFile(changelog, "utf8"); 138 + await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8"); 139 + pages.push({ 140 + path: "changelog.md", 141 + title: normalizeTitle(titleFromMarkdown(md, "changelog.md")), 142 + }); 143 + } 144 + 145 + const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : []; 146 + 147 + // Copy all markdown under docs/ (including archives), but only include non-archive 148 + // paths in the sidebar manifest. 149 + for (const rel of mdFiles) { 150 + const src = path.join(docsDir, rel); 151 + const dst = path.join(outDocsDir, rel); 152 + await mkdir(path.dirname(dst), { recursive: true }); 153 + await cp(src, dst); 154 + 155 + const md = await readFile(src, "utf8"); 156 + if (!rel.startsWith("archive/")) { 157 + pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) }); 158 + } 159 + } 160 + 161 + // Copy devlog files to docs/devlog/ and generate an index 162 + const devlogFiles = (await exists(devlogDir)) ? await listMarkdownFiles(devlogDir) : []; 163 + const devlogEntries = []; 164 + 165 + for (const rel of devlogFiles) { 166 + const src = path.join(devlogDir, rel); 167 + const dst = path.join(outDocsDir, "devlog", rel); 168 + await mkdir(path.dirname(dst), { recursive: true }); 169 + await cp(src, dst); 170 + 171 + const md = await readFile(src, "utf8"); 172 + devlogEntries.push({ 173 + path: `devlog/${rel}`, 174 + title: titleFromMarkdown(md, rel), 175 + }); 176 + } 177 + 178 + // Generate devlog index listing all entries (newest first by filename) 179 + if (devlogEntries.length > 0) { 180 + devlogEntries.sort((a, b) => b.path.localeCompare(a.path)); 181 + const indexMd = [ 182 + "# devlog", 183 + "", 184 + ...devlogEntries.map((e) => `- [${e.title}](${e.path})`), 185 + "", 186 + ].join("\n"); 187 + await writeFile(path.join(outDocsDir, "devlog", "index.md"), indexMd, "utf8"); 188 + } 189 + 190 + // Stable nav order: README homepage, then roadmap, then changelog, then the rest. 191 + pages.sort((a, b) => { 192 + const order = (p) => { 193 + if (p === "index.md") return 0; 194 + if (p === "roadmap.md") return 1; 195 + if (p === "changelog.md") return 2; 196 + return 3; 197 + }; 198 + const ao = order(a.path); 199 + const bo = order(b.path); 200 + if (ao !== bo) return ao - bo; 201 + return a.title.localeCompare(b.title); 202 + }); 203 + 204 + await writeFile( 205 + path.join(outDir, "manifest.json"), 206 + JSON.stringify({ pages }, null, 2) + "\n", 207 + "utf8", 208 + ); 209 + 210 + process.stdout.write( 211 + `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`, 212 + ); 213 + } 214 + 215 + await main();
-181
scripts/build-wisp-docs.mjs
··· 1 - import { 2 - readdir, 3 - readFile, 4 - mkdir, 5 - rm, 6 - cp, 7 - writeFile, 8 - access, 9 - } from "node:fs/promises"; 10 - import path from "node:path"; 11 - import { execFile } from "node:child_process"; 12 - import { promisify } from "node:util"; 13 - 14 - const repoRoot = path.resolve(new URL("..", import.meta.url).pathname); 15 - const docsDir = path.join(repoRoot, "docs"); 16 - const siteSrcDir = path.join(repoRoot, "site"); 17 - const outDir = path.join(repoRoot, "site-out"); 18 - const outDocsDir = path.join(outDir, "docs"); 19 - 20 - const execFileAsync = promisify(execFile); 21 - 22 - async function exists(filePath) { 23 - try { 24 - await access(filePath); 25 - return true; 26 - } catch { 27 - return false; 28 - } 29 - } 30 - 31 - function isMarkdown(filePath) { 32 - return filePath.toLowerCase().endsWith(".md"); 33 - } 34 - 35 - async function listMarkdownFiles(dir, prefix = "") { 36 - const entries = await readdir(dir, { withFileTypes: true }); 37 - const out = []; 38 - for (const e of entries) { 39 - if (e.name.startsWith(".")) continue; 40 - const rel = path.join(prefix, e.name); 41 - const abs = path.join(dir, e.name); 42 - if (e.isDirectory()) { 43 - out.push(...(await listMarkdownFiles(abs, rel))); 44 - } else if (e.isFile() && isMarkdown(e.name)) { 45 - out.push(rel.replaceAll(path.sep, "/")); 46 - } 47 - } 48 - return out.sort((a, b) => a.localeCompare(b)); 49 - } 50 - 51 - function titleFromMarkdown(md, fallback) { 52 - const lines = md.split(/\r?\n/); 53 - for (const line of lines) { 54 - const m = /^#\s+(.+)\s*$/.exec(line); 55 - if (m) return m[1].trim(); 56 - } 57 - return fallback.replace(/\.md$/i, ""); 58 - } 59 - 60 - function normalizeTitle(title) { 61 - let t = String(title || "").trim(); 62 - // If pages follow a "zat - ..." style, drop the redundant prefix in the nav. 63 - t = t.replace(/^zat\s*-\s*/i, ""); 64 - // Cheaply capitalize (keeps the rest as-authored). 65 - if (t.length) t = t[0].toUpperCase() + t.slice(1); 66 - return t; 67 - } 68 - 69 - async function getBuildId() { 70 - try { 71 - const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], { 72 - cwd: repoRoot, 73 - }); 74 - const full = String(stdout || "").trim(); 75 - if (full) return full.slice(0, 12); 76 - } catch { 77 - // ignore 78 - } 79 - return String(Date.now()); 80 - } 81 - 82 - async function main() { 83 - await rm(outDir, { recursive: true, force: true }); 84 - await mkdir(outDir, { recursive: true }); 85 - 86 - // Copy static site shell 87 - await cp(siteSrcDir, outDir, { recursive: true }); 88 - 89 - // Cache-bust immutable assets on Wisp by appending a per-commit query string. 90 - const buildId = await getBuildId(); 91 - const outIndex = path.join(outDir, "index.html"); 92 - if (await exists(outIndex)) { 93 - let html = await readFile(outIndex, "utf8"); 94 - html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`); 95 - html = html.replaceAll( 96 - 'src="./vendor/marked.min.js"', 97 - `src="./vendor/marked.min.js?v=${buildId}"`, 98 - ); 99 - html = html.replaceAll( 100 - 'src="./app.js"', 101 - `src="./app.js?v=${buildId}"`, 102 - ); 103 - html = html.replaceAll( 104 - 'href="./favicon.svg"', 105 - `href="./favicon.svg?v=${buildId}"`, 106 - ); 107 - await writeFile(outIndex, html, "utf8"); 108 - } 109 - 110 - // Copy docs 111 - await mkdir(outDocsDir, { recursive: true }); 112 - 113 - const pages = []; 114 - 115 - // Prefer an explicit docs homepage if present; otherwise use repo README as index. 116 - const docsIndex = path.join(docsDir, "index.md"); 117 - if (!(await exists(docsIndex))) { 118 - const readme = path.join(repoRoot, "README.md"); 119 - if (await exists(readme)) { 120 - const md = await readFile(readme, "utf8"); 121 - await writeFile(path.join(outDocsDir, "index.md"), md, "utf8"); 122 - pages.push({ 123 - path: "index.md", 124 - title: normalizeTitle(titleFromMarkdown(md, "index.md")), 125 - }); 126 - } 127 - } 128 - 129 - const changelog = path.join(repoRoot, "CHANGELOG.md"); 130 - const docsChangelog = path.join(docsDir, "changelog.md"); 131 - if ((await exists(changelog)) && !(await exists(docsChangelog))) { 132 - const md = await readFile(changelog, "utf8"); 133 - await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8"); 134 - pages.push({ 135 - path: "changelog.md", 136 - title: normalizeTitle(titleFromMarkdown(md, "changelog.md")), 137 - }); 138 - } 139 - 140 - const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : []; 141 - 142 - // Copy all markdown under docs/ (including archives), but only include non-archive 143 - // paths in the sidebar manifest. 144 - for (const rel of mdFiles) { 145 - const src = path.join(docsDir, rel); 146 - const dst = path.join(outDocsDir, rel); 147 - await mkdir(path.dirname(dst), { recursive: true }); 148 - await cp(src, dst); 149 - 150 - const md = await readFile(src, "utf8"); 151 - if (!rel.startsWith("archive/")) { 152 - pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) }); 153 - } 154 - } 155 - 156 - // Stable nav order: README homepage, then roadmap, then changelog, then the rest. 157 - pages.sort((a, b) => { 158 - const order = (p) => { 159 - if (p === "index.md") return 0; 160 - if (p === "roadmap.md") return 1; 161 - if (p === "changelog.md") return 2; 162 - return 3; 163 - }; 164 - const ao = order(a.path); 165 - const bo = order(b.path); 166 - if (ao !== bo) return ao - bo; 167 - return a.title.localeCompare(b.title); 168 - }); 169 - 170 - await writeFile( 171 - path.join(outDir, "manifest.json"), 172 - JSON.stringify({ pages }, null, 2) + "\n", 173 - "utf8", 174 - ); 175 - 176 - process.stdout.write( 177 - `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`, 178 - ); 179 - } 180 - 181 - await main();
+263
scripts/publish-docs.zig
··· 1 + const std = @import("std"); 2 + const zat = @import("zat"); 3 + 4 + const Allocator = std.mem.Allocator; 5 + 6 + const DocEntry = struct { path: []const u8, file: []const u8 }; 7 + 8 + /// docs to publish as site.standard.document records 9 + const docs = [_]DocEntry{ 10 + .{ .path = "/", .file = "README.md" }, 11 + .{ .path = "/roadmap", .file = "docs/roadmap.md" }, 12 + .{ .path = "/changelog", .file = "CHANGELOG.md" }, 13 + }; 14 + 15 + /// devlog entries 16 + const devlog = [_]DocEntry{ 17 + .{ .path = "/devlog/001", .file = "devlog/001-self-publishing-docs.md" }, 18 + }; 19 + 20 + pub fn main() !void { 21 + // use page_allocator for CLI tool - OS reclaims on exit 22 + const allocator = std.heap.page_allocator; 23 + 24 + const handle = "zat.dev"; 25 + 26 + const password = std.posix.getenv("ATPROTO_PASSWORD") orelse { 27 + std.debug.print("error: ATPROTO_PASSWORD not set\n", .{}); 28 + return error.MissingEnv; 29 + }; 30 + 31 + const pds = std.posix.getenv("ATPROTO_PDS") orelse "https://bsky.social"; 32 + 33 + var client = zat.XrpcClient.init(allocator, pds); 34 + defer client.deinit(); 35 + 36 + const session = try createSession(&client, allocator, handle, password); 37 + defer { 38 + allocator.free(session.did); 39 + allocator.free(session.access_token); 40 + } 41 + 42 + std.debug.print("authenticated as {s}\n", .{session.did}); 43 + client.setAuth(session.access_token); 44 + 45 + // generate TID for publication (fixed timestamp for deterministic rkey) 46 + // using 2024-01-01 00:00:00 UTC as base timestamp (1704067200 seconds = 1704067200000000 microseconds) 47 + const pub_tid = zat.Tid.fromTimestamp(1704067200000000, 0); 48 + const pub_record = Publication{ 49 + .url = "https://zat.dev", 50 + .name = "zat", 51 + .description = "AT Protocol building blocks for zig", 52 + }; 53 + 54 + try putRecord(&client, allocator, session.did, "site.standard.publication", pub_tid.str(), pub_record); 55 + std.debug.print("created publication: at://{s}/site.standard.publication/{s}\n", .{ session.did, pub_tid.str() }); 56 + 57 + var pub_uri_buf: std.ArrayList(u8) = .empty; 58 + defer pub_uri_buf.deinit(allocator); 59 + try pub_uri_buf.print(allocator, "at://{s}/site.standard.publication/{s}", .{ session.did, pub_tid.str() }); 60 + const pub_uri = pub_uri_buf.items; 61 + 62 + // publish each doc with deterministic TIDs (same base timestamp, incrementing clock_id) 63 + const now = timestamp(); 64 + 65 + for (docs, 0..) |doc, i| { 66 + const content = std.fs.cwd().readFileAlloc(allocator, doc.file, 1024 * 1024) catch |err| { 67 + std.debug.print("warning: could not read {s}: {}\n", .{ doc.file, err }); 68 + continue; 69 + }; 70 + defer allocator.free(content); 71 + 72 + const title = extractTitle(content) orelse doc.file; 73 + const tid = zat.Tid.fromTimestamp(1704067200000000, @intCast(i + 1)); // clock_id 1, 2, 3... 74 + 75 + const doc_record = Document{ 76 + .site = pub_uri, 77 + .title = title, 78 + .path = doc.path, 79 + .textContent = content, 80 + .publishedAt = &now, 81 + }; 82 + 83 + try putRecord(&client, allocator, session.did, "site.standard.document", tid.str(), doc_record); 84 + std.debug.print("published: {s} -> at://{s}/site.standard.document/{s}\n", .{ doc.file, session.did, tid.str() }); 85 + } 86 + 87 + // devlog publication (clock_id 100 to separate from docs) 88 + const devlog_tid = zat.Tid.fromTimestamp(1704067200000000, 100); 89 + const devlog_pub = Publication{ 90 + .url = "https://zat.dev", 91 + .name = "zat devlog", 92 + .description = "building zat in public", 93 + }; 94 + 95 + try putRecord(&client, allocator, session.did, "site.standard.publication", devlog_tid.str(), devlog_pub); 96 + std.debug.print("created publication: at://{s}/site.standard.publication/{s}\n", .{ session.did, devlog_tid.str() }); 97 + 98 + var devlog_uri_buf: std.ArrayList(u8) = .empty; 99 + defer devlog_uri_buf.deinit(allocator); 100 + try devlog_uri_buf.print(allocator, "at://{s}/site.standard.publication/{s}", .{ session.did, devlog_tid.str() }); 101 + const devlog_uri = devlog_uri_buf.items; 102 + 103 + // publish devlog entries (clock_id 101, 102, ...) 104 + for (devlog, 0..) |entry, i| { 105 + const content = std.fs.cwd().readFileAlloc(allocator, entry.file, 1024 * 1024) catch |err| { 106 + std.debug.print("warning: could not read {s}: {}\n", .{ entry.file, err }); 107 + continue; 108 + }; 109 + defer allocator.free(content); 110 + 111 + const title = extractTitle(content) orelse entry.file; 112 + const tid = zat.Tid.fromTimestamp(1704067200000000, @intCast(101 + i)); 113 + 114 + const doc_record = Document{ 115 + .site = devlog_uri, 116 + .title = title, 117 + .path = entry.path, 118 + .textContent = content, 119 + .publishedAt = &now, 120 + }; 121 + 122 + try putRecord(&client, allocator, session.did, "site.standard.document", tid.str(), doc_record); 123 + std.debug.print("published: {s} -> at://{s}/site.standard.document/{s}\n", .{ entry.file, session.did, tid.str() }); 124 + } 125 + 126 + std.debug.print("done\n", .{}); 127 + } 128 + 129 + const Publication = struct { 130 + @"$type": []const u8 = "site.standard.publication", 131 + url: []const u8, 132 + name: []const u8, 133 + description: ?[]const u8 = null, 134 + }; 135 + 136 + const Document = struct { 137 + @"$type": []const u8 = "site.standard.document", 138 + site: []const u8, 139 + title: []const u8, 140 + path: ?[]const u8 = null, 141 + textContent: ?[]const u8 = null, 142 + publishedAt: []const u8, 143 + }; 144 + 145 + const Session = struct { 146 + did: []const u8, 147 + access_token: []const u8, 148 + }; 149 + 150 + fn createSession(client: *zat.XrpcClient, allocator: Allocator, handle: []const u8, password: []const u8) !Session { 151 + const CreateSessionInput = struct { 152 + identifier: []const u8, 153 + password: []const u8, 154 + }; 155 + 156 + var buf: std.ArrayList(u8) = .empty; 157 + defer buf.deinit(allocator); 158 + try buf.print(allocator, "{f}", .{std.json.fmt(CreateSessionInput{ 159 + .identifier = handle, 160 + .password = password, 161 + }, .{})}); 162 + 163 + const nsid = zat.Nsid.parse("com.atproto.server.createSession").?; 164 + var response = try client.procedure(nsid, buf.items); 165 + defer response.deinit(); 166 + 167 + if (!response.ok()) { 168 + std.debug.print("createSession failed: {s}\n", .{response.body}); 169 + return error.AuthFailed; 170 + } 171 + 172 + var parsed = try response.json(); 173 + defer parsed.deinit(); 174 + 175 + const did = zat.json.getString(parsed.value, "did") orelse return error.MissingDid; 176 + const token = zat.json.getString(parsed.value, "accessJwt") orelse return error.MissingToken; 177 + 178 + return .{ 179 + .did = try allocator.dupe(u8, did), 180 + .access_token = try allocator.dupe(u8, token), 181 + }; 182 + } 183 + 184 + fn putRecord(client: *zat.XrpcClient, allocator: Allocator, repo: []const u8, collection: []const u8, rkey: []const u8, record: anytype) !void { 185 + // serialize record to json 186 + var record_buf: std.ArrayList(u8) = .empty; 187 + defer record_buf.deinit(allocator); 188 + try record_buf.print(allocator, "{f}", .{std.json.fmt(record, .{})}); 189 + 190 + // build request body 191 + var body: std.ArrayList(u8) = .empty; 192 + defer body.deinit(allocator); 193 + 194 + try body.appendSlice(allocator, "{\"repo\":\""); 195 + try body.appendSlice(allocator, repo); 196 + try body.appendSlice(allocator, "\",\"collection\":\""); 197 + try body.appendSlice(allocator, collection); 198 + try body.appendSlice(allocator, "\",\"rkey\":\""); 199 + try body.appendSlice(allocator, rkey); 200 + try body.appendSlice(allocator, "\",\"record\":"); 201 + try body.appendSlice(allocator, record_buf.items); 202 + try body.append(allocator, '}'); 203 + 204 + const nsid = zat.Nsid.parse("com.atproto.repo.putRecord").?; 205 + var response = try client.procedure(nsid, body.items); 206 + defer response.deinit(); 207 + 208 + if (!response.ok()) { 209 + std.debug.print("putRecord failed: {s}\n", .{response.body}); 210 + return error.PutFailed; 211 + } 212 + } 213 + 214 + fn extractTitle(content: []const u8) ?[]const u8 { 215 + var lines = std.mem.splitScalar(u8, content, '\n'); 216 + while (lines.next()) |line| { 217 + const trimmed = std.mem.trim(u8, line, " \t\r"); 218 + if (trimmed.len > 2 and trimmed[0] == '#' and trimmed[1] == ' ') { 219 + var title = trimmed[2..]; 220 + // strip markdown link: [text](url) -> text 221 + if (std.mem.indexOf(u8, title, "](")) |bracket| { 222 + if (title[0] == '[') { 223 + title = title[1..bracket]; 224 + } 225 + } 226 + return title; 227 + } 228 + } 229 + return null; 230 + } 231 + 232 + fn timestamp() [20]u8 { 233 + const epoch_seconds = std.time.timestamp(); 234 + const days: i32 = @intCast(@divFloor(epoch_seconds, std.time.s_per_day)); 235 + const day_secs: u32 = @intCast(@mod(epoch_seconds, std.time.s_per_day)); 236 + 237 + // calculate year/month/day from days since epoch (1970-01-01) 238 + var y: i32 = 1970; 239 + var remaining = days; 240 + while (true) { 241 + const year_days: i32 = if (@mod(y, 4) == 0 and (@mod(y, 100) != 0 or @mod(y, 400) == 0)) 366 else 365; 242 + if (remaining < year_days) break; 243 + remaining -= year_days; 244 + y += 1; 245 + } 246 + 247 + const is_leap = @mod(y, 4) == 0 and (@mod(y, 100) != 0 or @mod(y, 400) == 0); 248 + const month_days = [12]u8{ 31, if (is_leap) 29 else 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }; 249 + var m: usize = 0; 250 + while (m < 12 and remaining >= month_days[m]) : (m += 1) { 251 + remaining -= month_days[m]; 252 + } 253 + 254 + const hours = day_secs / 3600; 255 + const mins = (day_secs % 3600) / 60; 256 + const secs = day_secs % 60; 257 + 258 + var buf: [20]u8 = undefined; 259 + _ = std.fmt.bufPrint(&buf, "{d:0>4}-{d:0>2}-{d:0>2}T{d:0>2}:{d:0>2}:{d:0>2}Z", .{ 260 + @as(u32, @intCast(y)), @as(u32, @intCast(m + 1)), @as(u32, @intCast(remaining + 1)), hours, mins, secs, 261 + }) catch unreachable; 262 + return buf; 263 + }
+20 -1
site/app.js
··· 1 1 const navEl = document.getElementById("nav"); 2 2 const contentEl = document.getElementById("content"); 3 + const menuToggle = document.querySelector(".menu-toggle"); 4 + const sidebar = document.querySelector(".sidebar"); 5 + const overlay = document.querySelector(".overlay"); 6 + 7 + function toggleMenu(open) { 8 + const isOpen = open ?? !sidebar.classList.contains("open"); 9 + sidebar.classList.toggle("open", isOpen); 10 + overlay?.classList.toggle("open", isOpen); 11 + menuToggle?.setAttribute("aria-expanded", isOpen); 12 + document.body.style.overflow = isOpen ? "hidden" : ""; 13 + } 14 + 15 + menuToggle?.addEventListener("click", () => toggleMenu()); 16 + overlay?.addEventListener("click", () => toggleMenu(false)); 17 + 18 + // Close menu when nav link clicked (mobile) 19 + navEl?.addEventListener("click", (e) => { 20 + if (e.target.closest("a")) toggleMenu(false); 21 + }); 3 22 4 23 const buildId = new URL(import.meta.url).searchParams.get("v") || ""; 5 24 ··· 120 139 } 121 140 122 141 try { 123 - const md = await fetchText(`./docs/${encodeURIComponent(activePath)}`); 142 + const md = await fetchText(`./docs/${activePath}`); 124 143 const html = globalThis.marked.parse(md); 125 144 contentEl.innerHTML = html; 126 145
+8 -8
site/index.html
··· 11 11 <body> 12 12 <div class="app"> 13 13 <header class="header"> 14 + <button class="menu-toggle" aria-label="Toggle navigation" aria-expanded="false"> 15 + <span></span> 16 + </button> 14 17 <a class="brand" href="./">zat.dev</a> 15 - <a 16 - class="header-link" 17 - href="https://tangled.org/zzstoatzz.io/zat" 18 - target="_blank" 19 - rel="noopener noreferrer" 20 - > 21 - repo 22 - </a> 18 + <div class="header-links"> 19 + <a class="header-link" href="#devlog/index.md">devlog</a> 20 + <a class="header-link" href="https://tangled.sh/zat.dev/zat" target="_blank" rel="noopener noreferrer">repo</a> 21 + </div> 23 22 </header> 24 23 24 + <div class="overlay"></div> 25 25 <div class="layout"> 26 26 <nav class="sidebar"> 27 27 <div id="nav" class="nav"></div>
+202 -60
site/style.css
··· 10 10 --shadow: rgba(0, 0, 0, 0.35); 11 11 --max: 900px; 12 12 --radius: 12px; 13 + --gutter: 16px; 13 14 --mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", 14 15 "Courier New", monospace; 15 16 --sans: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica, ··· 27 28 --codebg: rgba(0, 0, 0, 0.04); 28 29 --shadow: rgba(0, 0, 0, 0.08); 29 30 } 31 + } 32 + 33 + * { 34 + box-sizing: border-box; 30 35 } 31 36 32 37 html, ··· 49 54 text-decoration: underline; 50 55 } 51 56 57 + /* App shell */ 52 58 .app { 53 59 min-height: 100%; 54 60 display: flex; 55 61 flex-direction: column; 56 62 } 57 63 64 + /* Header */ 58 65 .header { 59 66 position: sticky; 60 67 top: 0; 61 - z-index: 5; 68 + z-index: 20; 62 69 display: flex; 63 - gap: 12px; 64 70 align-items: center; 65 - padding: 12px 16px; 71 + gap: 12px; 72 + padding: 12px var(--gutter); 66 73 border-bottom: 1px solid var(--border); 67 74 background: color-mix(in srgb, var(--panel) 92%, transparent); 68 75 backdrop-filter: blur(10px); 69 76 } 70 77 78 + .menu-toggle { 79 + display: none; 80 + align-items: center; 81 + justify-content: center; 82 + width: 36px; 83 + height: 36px; 84 + padding: 0; 85 + background: transparent; 86 + border: 1px solid var(--border); 87 + border-radius: 8px; 88 + cursor: pointer; 89 + flex-shrink: 0; 90 + } 91 + .menu-toggle span { 92 + display: block; 93 + width: 16px; 94 + height: 2px; 95 + background: var(--text); 96 + border-radius: 1px; 97 + position: relative; 98 + } 99 + .menu-toggle span::before, 100 + .menu-toggle span::after { 101 + content: ""; 102 + position: absolute; 103 + left: 0; 104 + width: 16px; 105 + height: 2px; 106 + background: var(--text); 107 + border-radius: 1px; 108 + transition: transform 0.2s; 109 + } 110 + .menu-toggle span::before { 111 + top: -5px; 112 + } 113 + .menu-toggle span::after { 114 + top: 5px; 115 + } 116 + .menu-toggle[aria-expanded="true"] span { 117 + background: transparent; 118 + } 119 + .menu-toggle[aria-expanded="true"] span::before { 120 + transform: translateY(5px) rotate(45deg); 121 + } 122 + .menu-toggle[aria-expanded="true"] span::after { 123 + transform: translateY(-5px) rotate(-45deg); 124 + } 125 + 71 126 .brand { 72 127 font-weight: 700; 73 - letter-spacing: 0.2px; 74 - padding: 6px 10px; 75 - border-radius: 10px; 128 + font-size: 15px; 129 + color: var(--text); 130 + padding: 6px 0; 76 131 } 77 132 .brand:hover { 78 - background: color-mix(in srgb, var(--codebg) 70%, transparent); 79 133 text-decoration: none; 134 + opacity: 0.8; 135 + } 136 + 137 + .header-links { 138 + display: flex; 139 + gap: 8px; 140 + margin-left: auto; 80 141 } 81 142 82 143 .header-link { 83 - margin-left: auto; 84 - padding: 8px 10px; 85 - border-radius: 10px; 144 + padding: 6px 12px; 145 + font-size: 14px; 146 + border-radius: 8px; 86 147 border: 1px solid var(--border); 87 148 color: var(--text); 88 - opacity: 0.9; 89 149 } 90 150 .header-link:hover { 91 - background: color-mix(in srgb, var(--codebg) 70%, transparent); 151 + background: var(--codebg); 92 152 text-decoration: none; 93 - opacity: 1; 94 153 } 95 154 155 + /* Overlay */ 156 + .overlay { 157 + display: none; 158 + position: fixed; 159 + inset: 0; 160 + z-index: 15; 161 + background: rgba(0, 0, 0, 0.5); 162 + } 163 + .overlay.open { 164 + display: block; 165 + } 166 + 167 + /* Layout */ 96 168 .layout { 97 - display: grid; 98 - grid-template-columns: 280px 1fr; 99 - gap: 16px; 100 - padding: 16px; 169 + display: flex; 170 + gap: var(--gutter); 171 + padding: var(--gutter); 101 172 flex: 1; 102 - } 103 - 104 - @media (max-width: 980px) { 105 - .layout { 106 - grid-template-columns: 1fr; 107 - } 108 - .sidebar { 109 - position: relative; 110 - top: auto; 111 - max-height: none; 112 - } 173 + max-width: 1200px; 174 + margin: 0 auto; 175 + width: 100%; 113 176 } 114 177 178 + /* Sidebar */ 115 179 .sidebar { 180 + width: 240px; 181 + flex-shrink: 0; 116 182 position: sticky; 117 - top: 64px; 118 - align-self: start; 119 - max-height: calc(100vh - 84px); 120 - overflow: auto; 183 + top: 72px; 184 + align-self: flex-start; 185 + max-height: calc(100vh - 88px); 186 + overflow-y: auto; 121 187 border: 1px solid var(--border); 122 188 border-radius: var(--radius); 123 189 background: var(--panel); 124 - box-shadow: 0 12px 40px var(--shadow); 125 190 } 126 191 127 192 .nav { ··· 133 198 134 199 .nav a { 135 200 display: block; 136 - padding: 8px 10px; 137 - border-radius: 10px; 201 + padding: 10px 12px; 202 + border-radius: 8px; 138 203 color: var(--text); 139 - opacity: 0.9; 204 + font-size: 14px; 140 205 } 141 206 .nav a:hover { 142 - background: color-mix(in srgb, var(--codebg) 70%, transparent); 207 + background: var(--codebg); 143 208 text-decoration: none; 144 209 } 145 210 .nav a[aria-current="page"] { 146 - background: color-mix(in srgb, var(--link) 14%, var(--codebg)); 147 - border: 1px solid color-mix(in srgb, var(--link) 20%, var(--border)); 211 + background: color-mix(in srgb, var(--link) 15%, transparent); 148 212 } 149 213 214 + /* Main content */ 150 215 .main { 151 - display: flex; 152 - justify-content: center; 216 + flex: 1; 217 + min-width: 0; 153 218 } 154 219 155 220 .content { 156 - width: min(var(--max), 100%); 157 221 border: 1px solid var(--border); 158 222 border-radius: var(--radius); 159 223 background: var(--panel); 160 - box-shadow: 0 12px 40px var(--shadow); 161 224 padding: 24px; 162 225 } 163 226 227 + /* Footer */ 164 228 .site-footer { 165 - display: flex; 166 - justify-content: center; 167 - padding: 12px 16px; 229 + padding: 16px var(--gutter); 230 + text-align: center; 168 231 border-top: 1px solid var(--border); 169 - background: var(--panel); 170 232 } 171 233 172 234 .footer-link { 173 235 font-size: 13px; 174 236 color: var(--muted); 175 - padding: 6px 10px; 176 - border-radius: 10px; 177 - border: 1px solid transparent; 178 237 } 179 238 .footer-link:hover { 180 239 color: var(--text); 181 - background: color-mix(in srgb, var(--codebg) 70%, transparent); 182 - border-color: var(--border); 183 240 text-decoration: none; 184 241 } 185 242 243 + /* Content typography */ 186 244 .content h1, 187 245 .content h2, 188 246 .content h3 { 189 - scroll-margin-top: 84px; 247 + scroll-margin-top: 80px; 190 248 } 191 249 192 250 .content h1 { 193 251 margin-top: 0; 194 - font-size: 34px; 252 + font-size: 28px; 253 + } 254 + 255 + .content h2 { 256 + font-size: 20px; 257 + margin-top: 32px; 258 + } 259 + 260 + .content h3 { 261 + font-size: 16px; 262 + margin-top: 24px; 195 263 } 196 264 197 265 .content p, 198 266 .content li { 199 - line-height: 1.6; 267 + line-height: 1.65; 200 268 } 201 269 202 270 .content code { 203 271 font-family: var(--mono); 204 - font-size: 0.95em; 272 + font-size: 0.9em; 205 273 background: var(--codebg); 206 274 padding: 2px 6px; 207 - border-radius: 8px; 275 + border-radius: 6px; 208 276 } 209 277 210 278 .content pre { 211 - overflow: auto; 212 - padding: 14px 16px; 213 - border-radius: 12px; 279 + overflow-x: auto; 280 + padding: 16px; 281 + border-radius: 10px; 214 282 background: var(--codebg); 215 283 border: 1px solid var(--border); 284 + font-size: 14px; 285 + line-height: 1.5; 216 286 } 217 287 218 288 .content pre code { ··· 220 290 padding: 0; 221 291 } 222 292 293 + .content details { 294 + margin: 16px 0; 295 + } 296 + 297 + .content details summary { 298 + cursor: pointer; 299 + padding: 8px 0; 300 + } 301 + 223 302 .empty { 224 303 color: var(--muted); 225 304 } 305 + 306 + /* Mobile */ 307 + @media (max-width: 768px) { 308 + :root { 309 + --gutter: 16px; 310 + } 311 + 312 + .menu-toggle { 313 + display: flex; 314 + } 315 + 316 + .layout { 317 + flex-direction: column; 318 + } 319 + 320 + .sidebar { 321 + position: fixed; 322 + top: 0; 323 + left: 0; 324 + bottom: 0; 325 + width: 280px; 326 + max-width: 80vw; 327 + z-index: 16; 328 + border: none; 329 + border-radius: 0; 330 + border-right: 1px solid var(--border); 331 + max-height: none; 332 + padding-top: 60px; 333 + transform: translateX(-100%); 334 + transition: transform 0.2s ease-out; 335 + } 336 + 337 + .sidebar.open { 338 + transform: translateX(0); 339 + } 340 + 341 + .nav { 342 + padding: 12px; 343 + } 344 + 345 + .nav a { 346 + padding: 12px 14px; 347 + font-size: 15px; 348 + } 349 + 350 + .content { 351 + padding: 20px; 352 + border-radius: 10px; 353 + } 354 + 355 + .content h1 { 356 + font-size: 24px; 357 + } 358 + 359 + .content h2 { 360 + font-size: 18px; 361 + } 362 + 363 + .content pre { 364 + font-size: 13px; 365 + padding: 14px; 366 + } 367 + }
+1 -1
src/internal/handle_resolver.zig
··· 70 70 return try self.allocator.dupe(u8, did_str); 71 71 } 72 72 73 - /// resolve via DoH default: https://cloudflare-dns.com/dns-query 73 + /// resolve via DoH default: https://cloudflare-dns.com/dns-query 74 74 pub fn resolveDns(self: *HandleResolver, handle: Handle) ![]const u8 { 75 75 const dns_name = try std.fmt.allocPrint( 76 76 self.allocator,
+107 -3
src/internal/json.zig
··· 6 6 //! two approaches: 7 7 //! - runtime paths: getString(value, "embed.external.uri") - for dynamic paths 8 8 //! - comptime paths: extractAt(T, alloc, value, .{"embed", "external"}) - for static paths with type safety 9 + //! 10 + //! debug logging: 11 + //! enable with `pub const std_options = .{ .log_scope_levels = &.{.{ .scope = .zat, .level = .debug }} };` 9 12 10 13 const std = @import("std"); 14 + const log = std.log.scoped(.zat); 11 15 12 16 /// navigate a json value by dot-separated path 13 17 /// returns null if any segment is missing or wrong type ··· 92 96 /// extract a typed struct from a nested path 93 97 /// uses comptime tuple for path segments - no runtime string parsing 94 98 /// leverages std.json.parseFromValueLeaky for type-safe extraction 99 + /// 100 + /// on failure, logs diagnostic info when debug logging is enabled for .zat scope 95 101 pub fn extractAt( 96 102 comptime T: type, 97 103 allocator: std.mem.Allocator, ··· 101 107 var current = value; 102 108 inline for (path) |segment| { 103 109 current = switch (current) { 104 - .object => |obj| obj.get(segment) orelse return error.MissingField, 105 - else => return error.UnexpectedToken, 110 + .object => |obj| obj.get(segment) orelse { 111 + log.debug("extractAt: missing field \"{s}\" in path {any}, expected {s}", .{ 112 + segment, 113 + path, 114 + @typeName(T), 115 + }); 116 + return error.MissingField; 117 + }, 118 + else => { 119 + log.debug("extractAt: expected object at \"{s}\" in path {any}, got {s}", .{ 120 + segment, 121 + path, 122 + @tagName(current), 123 + }); 124 + return error.UnexpectedToken; 125 + }, 106 126 }; 107 127 } 108 - return std.json.parseFromValueLeaky(T, allocator, current, .{}); 128 + return std.json.parseFromValueLeaky(T, allocator, current, .{ .ignore_unknown_fields = true }) catch |err| { 129 + log.debug("extractAt: parse failed for {s} at path {any}: {s} (json type: {s})", .{ 130 + @typeName(T), 131 + path, 132 + @errorName(err), 133 + @tagName(current), 134 + }); 135 + return err; 136 + }; 109 137 } 110 138 111 139 /// extract a typed value, returning null if path doesn't exist ··· 278 306 const missing = extractAtOptional(Thing, arena.allocator(), parsed.value, .{"missing"}); 279 307 try std.testing.expect(missing == null); 280 308 } 309 + 310 + test "extractAt logs diagnostic on enum parse failure" { 311 + // simulates the issue: unknown enum value from external API 312 + const json_str = 313 + \\{"op": {"action": "archive", "path": "app.bsky.feed.post/abc"}} 314 + ; 315 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 316 + defer arena.deinit(); 317 + 318 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 319 + 320 + const Action = enum { create, update, delete }; 321 + const Op = struct { 322 + action: Action, 323 + path: []const u8, 324 + }; 325 + 326 + // "archive" is not a valid Action variant - this should fail 327 + // with debug logging enabled, you'd see: 328 + // debug(zat): extractAt: parse failed for json.Op at path { "op" }: InvalidEnumTag (json type: object) 329 + const result = extractAtOptional(Op, arena.allocator(), parsed.value, .{"op"}); 330 + try std.testing.expect(result == null); 331 + } 332 + 333 + test "extractAt logs diagnostic on missing field" { 334 + const json_str = 335 + \\{"data": {"name": "test"}} 336 + ; 337 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 338 + defer arena.deinit(); 339 + 340 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 341 + 342 + const Thing = struct { value: i64 }; 343 + 344 + // path "data.missing" doesn't exist 345 + // with debug logging enabled, you'd see: 346 + // debug(zat): extractAt: missing field "missing" in path { "data", "missing" }, expected json.Thing 347 + const result = extractAtOptional(Thing, arena.allocator(), parsed.value, .{ "data", "missing" }); 348 + try std.testing.expect(result == null); 349 + } 350 + 351 + test "extractAt ignores unknown fields" { 352 + // real-world case: TAP messages have extra fields (live, rev, cid) that we don't need 353 + const json_str = 354 + \\{ 355 + \\ "record": { 356 + \\ "live": true, 357 + \\ "did": "did:plc:abc123", 358 + \\ "rev": "3mbspmpaidl2a", 359 + \\ "collection": "pub.leaflet.document", 360 + \\ "rkey": "xyz789", 361 + \\ "action": "create", 362 + \\ "cid": "bafyreitest" 363 + \\ } 364 + \\} 365 + ; 366 + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); 367 + defer arena.deinit(); 368 + 369 + const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{}); 370 + 371 + // only extract the fields we care about 372 + const Record = struct { 373 + collection: []const u8, 374 + action: []const u8, 375 + did: []const u8, 376 + rkey: []const u8, 377 + }; 378 + 379 + const rec = try extractAt(Record, arena.allocator(), parsed.value, .{"record"}); 380 + try std.testing.expectEqualStrings("pub.leaflet.document", rec.collection); 381 + try std.testing.expectEqualStrings("create", rec.action); 382 + try std.testing.expectEqualStrings("did:plc:abc123", rec.did); 383 + try std.testing.expectEqualStrings("xyz789", rec.rkey); 384 + }
+5 -1
src/internal/xrpc.zig
··· 18 18 /// bearer token for authenticated requests 19 19 access_token: ?[]const u8 = null, 20 20 21 + /// atproto JWTs are ~1KB; buffer needs room for "Bearer " prefix 22 + const max_auth_header_len = 2048; 23 + 21 24 pub fn init(allocator: std.mem.Allocator, host: []const u8) XrpcClient { 22 25 return .{ 23 26 .allocator = allocator, ··· 89 92 // https://github.com/ziglang/zig/issues/25021 90 93 var extra_headers: std.http.Client.Request.Headers = .{ 91 94 .accept_encoding = .{ .override = "identity" }, 95 + .content_type = if (body != null) .{ .override = "application/json" } else .default, 92 96 }; 93 - var auth_header_buf: [256]u8 = undefined; 97 + var auth_header_buf: [max_auth_header_len]u8 = undefined; 94 98 if (self.access_token) |token| { 95 99 const auth_value = try std.fmt.bufPrint(&auth_header_buf, "Bearer {s}", .{token}); 96 100 extra_headers.authorization = .{ .override = auth_value };