···1414 WISP_SITE_NAME: "docs"
15151616steps:
1717- - name: build docs site
1717+ - name: build site
1818 command: |
1919- bun ./scripts/build-wisp-docs.mjs
1919+ bun ./scripts/build-site.mjs
20202121 - name: deploy docs to wisp
2222 command: |
+4
CHANGELOG.md
···11# changelog
2233+## 0.1.2
44+55+- `extractAt` now logs diagnostic info on parse failures (enable with `.zat` debug scope)
66+37## 0.1.1
4859- xrpc client sets `Content-Type: application/json` for POST requests
+35
devlog/001-self-publishing-docs.md
···11+# zat publishes its own docs to ATProto
22+33+zat uses itself to publish these docs as `site.standard.document` records. here's how.
44+55+## the idea
66+77+i'm working on [search for leaflet](https://leaflet-search.pages.dev/) and more generally, search for [standard.site](https://standard.site/) records. many are [currently thinking about how to facilitate better idea sharing on atproto right now](https://bsky.app/profile/eugenevinitsky.bsky.social/post/3mbpqpylv3s2e).
88+99+this is me doing a rep of shipping a "standard.site", so i know what i'll be searching through, and to better understand why blogging platforms choose their schema extensions etc for i start indexing/searching their record types.
1010+1111+## what we built
1212+1313+a zig script ([`scripts/publish-docs.zig`](https://tangled.sh/zat.dev/zat/tree/main/scripts/publish-docs.zig)) that:
1414+1515+1. authenticates with the PDS via `com.atproto.server.createSession`
1616+2. creates a `site.standard.publication` record
1717+3. publishes each doc as a `site.standard.document` pointing to that publication
1818+4. uses deterministic TIDs so records get the same rkey every time (idempotent updates)
1919+2020+## the mechanics
2121+2222+### TIDs
2323+2424+timestamp identifiers. base32-sortable. we use a fixed base timestamp with incrementing clock_id so each doc gets a stable rkey:
2525+2626+```zig
2727+const pub_tid = zat.Tid.fromTimestamp(1704067200000000, 0); // publication
2828+const doc_tid = zat.Tid.fromTimestamp(1704067200000000, i + 1); // docs get 1, 2, 3...
2929+```
3030+3131+### CI
3232+3333+[`.tangled/workflows/publish-docs.yml`](https://tangled.sh/zat.dev/zat/tree/main/.tangled/workflows/publish-docs.yml) triggers on `v*` tags. tag a release, docs publish automatically.
3434+3535+`putRecord` with the same rkey overwrites, so the CI job overwrites `standard.site` records when you cut a tag.
+215
scripts/build-site.mjs
···11+import {
22+ readdir,
33+ readFile,
44+ mkdir,
55+ rm,
66+ cp,
77+ writeFile,
88+ access,
99+} from "node:fs/promises";
1010+import path from "node:path";
1111+import { execFile } from "node:child_process";
1212+import { promisify } from "node:util";
1313+1414+const repoRoot = path.resolve(new URL("..", import.meta.url).pathname);
1515+const docsDir = path.join(repoRoot, "docs");
1616+const devlogDir = path.join(repoRoot, "devlog");
1717+const siteSrcDir = path.join(repoRoot, "site");
1818+const outDir = path.join(repoRoot, "site-out");
1919+const outDocsDir = path.join(outDir, "docs");
2020+2121+const execFileAsync = promisify(execFile);
2222+2323+async function exists(filePath) {
2424+ try {
2525+ await access(filePath);
2626+ return true;
2727+ } catch {
2828+ return false;
2929+ }
3030+}
3131+3232+function isMarkdown(filePath) {
3333+ return filePath.toLowerCase().endsWith(".md");
3434+}
3535+3636+async function listMarkdownFiles(dir, prefix = "") {
3737+ const entries = await readdir(dir, { withFileTypes: true });
3838+ const out = [];
3939+ for (const e of entries) {
4040+ if (e.name.startsWith(".")) continue;
4141+ const rel = path.join(prefix, e.name);
4242+ const abs = path.join(dir, e.name);
4343+ if (e.isDirectory()) {
4444+ out.push(...(await listMarkdownFiles(abs, rel)));
4545+ } else if (e.isFile() && isMarkdown(e.name)) {
4646+ out.push(rel.replaceAll(path.sep, "/"));
4747+ }
4848+ }
4949+ return out.sort((a, b) => a.localeCompare(b));
5050+}
5151+5252+function titleFromMarkdown(md, fallback) {
5353+ const lines = md.split(/\r?\n/);
5454+ for (const line of lines) {
5555+ const m = /^#\s+(.+)\s*$/.exec(line);
5656+ if (m) return m[1].trim();
5757+ }
5858+ return fallback.replace(/\.md$/i, "");
5959+}
6060+6161+function normalizeTitle(title) {
6262+ let t = String(title || "").trim();
6363+ // Strip markdown links: [text](url) -> text
6464+ t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1");
6565+ // If pages follow a "zat - ..." style, drop the redundant prefix in the nav.
6666+ t = t.replace(/^zat\s*-\s*/i, "");
6767+ // Cheaply capitalize (keeps the rest as-authored).
6868+ if (t.length) t = t[0].toUpperCase() + t.slice(1);
6969+ return t;
7070+}
7171+7272+async function getBuildId() {
7373+ try {
7474+ const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], {
7575+ cwd: repoRoot,
7676+ });
7777+ const full = String(stdout || "").trim();
7878+ if (full) return full.slice(0, 12);
7979+ } catch {
8080+ // ignore
8181+ }
8282+ return String(Date.now());
8383+}
8484+8585+async function main() {
8686+ await rm(outDir, { recursive: true, force: true });
8787+ await mkdir(outDir, { recursive: true });
8888+8989+ // Copy static site shell
9090+ await cp(siteSrcDir, outDir, { recursive: true });
9191+9292+ // Cache-bust immutable assets on Wisp by appending a per-commit query string.
9393+ const buildId = await getBuildId();
9494+ const outIndex = path.join(outDir, "index.html");
9595+ if (await exists(outIndex)) {
9696+ let html = await readFile(outIndex, "utf8");
9797+ html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`);
9898+ html = html.replaceAll(
9999+ 'src="./vendor/marked.min.js"',
100100+ `src="./vendor/marked.min.js?v=${buildId}"`,
101101+ );
102102+ html = html.replaceAll(
103103+ 'src="./app.js"',
104104+ `src="./app.js?v=${buildId}"`,
105105+ );
106106+ html = html.replaceAll(
107107+ 'href="./favicon.svg"',
108108+ `href="./favicon.svg?v=${buildId}"`,
109109+ );
110110+ await writeFile(outIndex, html, "utf8");
111111+ }
112112+113113+ // Copy docs
114114+ await mkdir(outDocsDir, { recursive: true });
115115+116116+ const pages = [];
117117+118118+ // Prefer an explicit docs homepage if present; otherwise use repo README as index.
119119+ const docsIndex = path.join(docsDir, "index.md");
120120+ if (!(await exists(docsIndex))) {
121121+ const readme = path.join(repoRoot, "README.md");
122122+ if (await exists(readme)) {
123123+ let md = await readFile(readme, "utf8");
124124+ // Strip docs/ prefix from links since we're now inside the docs context.
125125+ md = md.replace(/\]\(docs\//g, "](");
126126+ await writeFile(path.join(outDocsDir, "index.md"), md, "utf8");
127127+ pages.push({
128128+ path: "index.md",
129129+ title: normalizeTitle(titleFromMarkdown(md, "index.md")),
130130+ });
131131+ }
132132+ }
133133+134134+ const changelog = path.join(repoRoot, "CHANGELOG.md");
135135+ const docsChangelog = path.join(docsDir, "changelog.md");
136136+ if ((await exists(changelog)) && !(await exists(docsChangelog))) {
137137+ const md = await readFile(changelog, "utf8");
138138+ await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8");
139139+ pages.push({
140140+ path: "changelog.md",
141141+ title: normalizeTitle(titleFromMarkdown(md, "changelog.md")),
142142+ });
143143+ }
144144+145145+ const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : [];
146146+147147+ // Copy all markdown under docs/ (including archives), but only include non-archive
148148+ // paths in the sidebar manifest.
149149+ for (const rel of mdFiles) {
150150+ const src = path.join(docsDir, rel);
151151+ const dst = path.join(outDocsDir, rel);
152152+ await mkdir(path.dirname(dst), { recursive: true });
153153+ await cp(src, dst);
154154+155155+ const md = await readFile(src, "utf8");
156156+ if (!rel.startsWith("archive/")) {
157157+ pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) });
158158+ }
159159+ }
160160+161161+ // Copy devlog files to docs/devlog/ and generate an index
162162+ const devlogFiles = (await exists(devlogDir)) ? await listMarkdownFiles(devlogDir) : [];
163163+ const devlogEntries = [];
164164+165165+ for (const rel of devlogFiles) {
166166+ const src = path.join(devlogDir, rel);
167167+ const dst = path.join(outDocsDir, "devlog", rel);
168168+ await mkdir(path.dirname(dst), { recursive: true });
169169+ await cp(src, dst);
170170+171171+ const md = await readFile(src, "utf8");
172172+ devlogEntries.push({
173173+ path: `devlog/${rel}`,
174174+ title: titleFromMarkdown(md, rel),
175175+ });
176176+ }
177177+178178+ // Generate devlog index listing all entries (newest first by filename)
179179+ if (devlogEntries.length > 0) {
180180+ devlogEntries.sort((a, b) => b.path.localeCompare(a.path));
181181+ const indexMd = [
182182+ "# devlog",
183183+ "",
184184+ ...devlogEntries.map((e) => `- [${e.title}](${e.path})`),
185185+ "",
186186+ ].join("\n");
187187+ await writeFile(path.join(outDocsDir, "devlog", "index.md"), indexMd, "utf8");
188188+ }
189189+190190+ // Stable nav order: README homepage, then roadmap, then changelog, then the rest.
191191+ pages.sort((a, b) => {
192192+ const order = (p) => {
193193+ if (p === "index.md") return 0;
194194+ if (p === "roadmap.md") return 1;
195195+ if (p === "changelog.md") return 2;
196196+ return 3;
197197+ };
198198+ const ao = order(a.path);
199199+ const bo = order(b.path);
200200+ if (ao !== bo) return ao - bo;
201201+ return a.title.localeCompare(b.title);
202202+ });
203203+204204+ await writeFile(
205205+ path.join(outDir, "manifest.json"),
206206+ JSON.stringify({ pages }, null, 2) + "\n",
207207+ "utf8",
208208+ );
209209+210210+ process.stdout.write(
211211+ `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`,
212212+ );
213213+}
214214+215215+await main();
-185
scripts/build-wisp-docs.mjs
···11-import {
22- readdir,
33- readFile,
44- mkdir,
55- rm,
66- cp,
77- writeFile,
88- access,
99-} from "node:fs/promises";
1010-import path from "node:path";
1111-import { execFile } from "node:child_process";
1212-import { promisify } from "node:util";
1313-1414-const repoRoot = path.resolve(new URL("..", import.meta.url).pathname);
1515-const docsDir = path.join(repoRoot, "docs");
1616-const siteSrcDir = path.join(repoRoot, "site");
1717-const outDir = path.join(repoRoot, "site-out");
1818-const outDocsDir = path.join(outDir, "docs");
1919-2020-const execFileAsync = promisify(execFile);
2121-2222-async function exists(filePath) {
2323- try {
2424- await access(filePath);
2525- return true;
2626- } catch {
2727- return false;
2828- }
2929-}
3030-3131-function isMarkdown(filePath) {
3232- return filePath.toLowerCase().endsWith(".md");
3333-}
3434-3535-async function listMarkdownFiles(dir, prefix = "") {
3636- const entries = await readdir(dir, { withFileTypes: true });
3737- const out = [];
3838- for (const e of entries) {
3939- if (e.name.startsWith(".")) continue;
4040- const rel = path.join(prefix, e.name);
4141- const abs = path.join(dir, e.name);
4242- if (e.isDirectory()) {
4343- out.push(...(await listMarkdownFiles(abs, rel)));
4444- } else if (e.isFile() && isMarkdown(e.name)) {
4545- out.push(rel.replaceAll(path.sep, "/"));
4646- }
4747- }
4848- return out.sort((a, b) => a.localeCompare(b));
4949-}
5050-5151-function titleFromMarkdown(md, fallback) {
5252- const lines = md.split(/\r?\n/);
5353- for (const line of lines) {
5454- const m = /^#\s+(.+)\s*$/.exec(line);
5555- if (m) return m[1].trim();
5656- }
5757- return fallback.replace(/\.md$/i, "");
5858-}
5959-6060-function normalizeTitle(title) {
6161- let t = String(title || "").trim();
6262- // Strip markdown links: [text](url) -> text
6363- t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1");
6464- // If pages follow a "zat - ..." style, drop the redundant prefix in the nav.
6565- t = t.replace(/^zat\s*-\s*/i, "");
6666- // Cheaply capitalize (keeps the rest as-authored).
6767- if (t.length) t = t[0].toUpperCase() + t.slice(1);
6868- return t;
6969-}
7070-7171-async function getBuildId() {
7272- try {
7373- const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], {
7474- cwd: repoRoot,
7575- });
7676- const full = String(stdout || "").trim();
7777- if (full) return full.slice(0, 12);
7878- } catch {
7979- // ignore
8080- }
8181- return String(Date.now());
8282-}
8383-8484-async function main() {
8585- await rm(outDir, { recursive: true, force: true });
8686- await mkdir(outDir, { recursive: true });
8787-8888- // Copy static site shell
8989- await cp(siteSrcDir, outDir, { recursive: true });
9090-9191- // Cache-bust immutable assets on Wisp by appending a per-commit query string.
9292- const buildId = await getBuildId();
9393- const outIndex = path.join(outDir, "index.html");
9494- if (await exists(outIndex)) {
9595- let html = await readFile(outIndex, "utf8");
9696- html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`);
9797- html = html.replaceAll(
9898- 'src="./vendor/marked.min.js"',
9999- `src="./vendor/marked.min.js?v=${buildId}"`,
100100- );
101101- html = html.replaceAll(
102102- 'src="./app.js"',
103103- `src="./app.js?v=${buildId}"`,
104104- );
105105- html = html.replaceAll(
106106- 'href="./favicon.svg"',
107107- `href="./favicon.svg?v=${buildId}"`,
108108- );
109109- await writeFile(outIndex, html, "utf8");
110110- }
111111-112112- // Copy docs
113113- await mkdir(outDocsDir, { recursive: true });
114114-115115- const pages = [];
116116-117117- // Prefer an explicit docs homepage if present; otherwise use repo README as index.
118118- const docsIndex = path.join(docsDir, "index.md");
119119- if (!(await exists(docsIndex))) {
120120- const readme = path.join(repoRoot, "README.md");
121121- if (await exists(readme)) {
122122- let md = await readFile(readme, "utf8");
123123- // Strip docs/ prefix from links since we're now inside the docs context.
124124- md = md.replace(/\]\(docs\//g, "](");
125125- await writeFile(path.join(outDocsDir, "index.md"), md, "utf8");
126126- pages.push({
127127- path: "index.md",
128128- title: normalizeTitle(titleFromMarkdown(md, "index.md")),
129129- });
130130- }
131131- }
132132-133133- const changelog = path.join(repoRoot, "CHANGELOG.md");
134134- const docsChangelog = path.join(docsDir, "changelog.md");
135135- if ((await exists(changelog)) && !(await exists(docsChangelog))) {
136136- const md = await readFile(changelog, "utf8");
137137- await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8");
138138- pages.push({
139139- path: "changelog.md",
140140- title: normalizeTitle(titleFromMarkdown(md, "changelog.md")),
141141- });
142142- }
143143-144144- const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : [];
145145-146146- // Copy all markdown under docs/ (including archives), but only include non-archive
147147- // paths in the sidebar manifest.
148148- for (const rel of mdFiles) {
149149- const src = path.join(docsDir, rel);
150150- const dst = path.join(outDocsDir, rel);
151151- await mkdir(path.dirname(dst), { recursive: true });
152152- await cp(src, dst);
153153-154154- const md = await readFile(src, "utf8");
155155- if (!rel.startsWith("archive/")) {
156156- pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) });
157157- }
158158- }
159159-160160- // Stable nav order: README homepage, then roadmap, then changelog, then the rest.
161161- pages.sort((a, b) => {
162162- const order = (p) => {
163163- if (p === "index.md") return 0;
164164- if (p === "roadmap.md") return 1;
165165- if (p === "changelog.md") return 2;
166166- return 3;
167167- };
168168- const ao = order(a.path);
169169- const bo = order(b.path);
170170- if (ao !== bo) return ao - bo;
171171- return a.title.localeCompare(b.title);
172172- });
173173-174174- await writeFile(
175175- path.join(outDir, "manifest.json"),
176176- JSON.stringify({ pages }, null, 2) + "\n",
177177- "utf8",
178178- );
179179-180180- process.stdout.write(
181181- `Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`,
182182- );
183183-}
184184-185185-await main();