Monorepo for Aesthetic.Computer
aesthetic.computer
1#!/usr/bin/env node
2// papers cli — build, deploy, and track all AC papers
3//
4// Usage:
5// node papers/cli.mjs build [lang] Build PDFs (all langs, or: en, da, es, zh)
6// node papers/cli.mjs build --force Rebuild everything (skip mtime check)
7// node papers/cli.mjs deploy Copy built PDFs to site directory
8// node papers/cli.mjs publish Build all + deploy + update index
9// node papers/cli.mjs publish --force Full pipeline, force-rebuilding everything
10// node papers/cli.mjs status Show build status for all papers
11// node papers/cli.mjs log Show build log
12//
13// Builds are incremental: a paper is only rebuilt when its source files (.tex,
14// .bib, .sty, figures/*) are newer than the output PDF. Pass --force to bypass.
15//
16// Examples:
17// node papers/cli.mjs build Build changed papers (en + da + es + zh)
18// node papers/cli.mjs build en Build changed English PDFs only
19// node papers/cli.mjs build --force Rebuild everything
20// node papers/cli.mjs publish Full incremental pipeline
21
22import { execSync } from "child_process";
23import {
24 existsSync,
25 copyFileSync,
26 mkdirSync,
27 readFileSync,
28 writeFileSync,
29 statSync,
30 readdirSync,
31} from "fs";
32import { join, basename } from "path";
33
34const PAPERS_DIR = new URL(".", import.meta.url).pathname;
35const SITE_DIR = join(
36 PAPERS_DIR,
37 "../system/public/papers.aesthetic.computer",
38);
39const BUILDLOG = join(PAPERS_DIR, "BUILDLOG.md");
40const METADATA_PATH = join(PAPERS_DIR, "metadata.json");
41const LANGS = ["en", "da", "es", "zh"];
42const LANG_NAMES = { en: "English", da: "Danish", es: "Spanish", zh: "Chinese" };
43
44function loadMetadata() {
45 if (!existsSync(METADATA_PATH)) return {};
46 return JSON.parse(readFileSync(METADATA_PATH, "utf8"));
47}
48
49function saveMetadata(meta) {
50 writeFileSync(METADATA_PATH, JSON.stringify(meta, null, 2) + "\n", "utf8");
51}
52
53// Translation key from paper dir (matches keys in index.html inline translations)
54function translationKey(dir) {
55 const key = dir.replace("arxiv-", "");
56 const overrides = { "kidlisp-reference": "kidlisp-ref", "sustainability": "who-pays" };
57 return overrides[key] || key;
58}
59
60// Map paper dir → tex base name + site PDF name
61const PAPER_MAP = {
62 "arxiv-ac": {
63 base: "ac",
64 siteName: "aesthetic-computer-26-arxiv",
65 title: "Aesthetic Computer '26",
66 },
67 "arxiv-api": {
68 base: "api",
69 siteName: "piece-api-26-arxiv",
70 title: "From setup() to boot()",
71 },
72 "arxiv-archaeology": {
73 base: "archaeology",
74 siteName: "repo-archaeology-26-arxiv",
75 title: "Repository Archaeology",
76 },
77 "arxiv-dead-ends": {
78 base: "dead-ends",
79 siteName: "dead-ends-26-arxiv",
80 title: "Vestigial Features",
81 },
82 "arxiv-diversity": {
83 base: "diversity",
84 siteName: "citation-diversity-audit-26",
85 title: "Citation Diversity Audit",
86 },
87 "arxiv-goodiepal": {
88 base: "goodiepal",
89 siteName: "radical-computer-art-26-arxiv",
90 title: "Radical Computer Art",
91 },
92 "arxiv-kidlisp": {
93 base: "kidlisp",
94 siteName: "kidlisp-26-arxiv",
95 title: "KidLisp '26",
96 },
97 "arxiv-kidlisp-reference": {
98 base: "kidlisp-reference",
99 siteName: "kidlisp-reference-26-arxiv",
100 title: "KidLisp Language Reference",
101 },
102 "arxiv-network-audit": {
103 base: "network-audit",
104 siteName: "network-audit-26-arxiv",
105 title: "Network Audit",
106 },
107 "arxiv-notepat": {
108 base: "notepat",
109 siteName: "notepat-26-arxiv",
110 title: "notepat.com",
111 },
112 "arxiv-os": {
113 base: "os",
114 siteName: "ac-native-os-26-arxiv",
115 title: "AC Native OS",
116 },
117 "arxiv-pieces": {
118 base: "pieces",
119 siteName: "pieces-not-programs-26-arxiv",
120 title: "Pieces Not Programs",
121 },
122 "arxiv-sustainability": {
123 base: "sustainability",
124 siteName: "who-pays-for-creative-tools-26-arxiv",
125 title: "Who Pays for Creative Tools?",
126 },
127 "arxiv-whistlegraph": {
128 base: "whistlegraph",
129 siteName: "whistlegraph-26-arxiv",
130 title: "Whistlegraph",
131 },
132 "arxiv-plork": {
133 base: "plork",
134 siteName: "plorking-the-planet-26-arxiv",
135 title: "PLOrk'ing the Planet",
136 },
137 "arxiv-folk-songs": {
138 base: "folk-songs",
139 siteName: "folk-songs-26-arxiv",
140 title: "Playable Folk Songs",
141 },
142 "arxiv-complex": {
143 base: "complex",
144 siteName: "sucking-on-the-complex-26-arxiv",
145 title: "Sucking on the Complex",
146 },
147 "arxiv-kidlisp-cards": {
148 base: "kidlisp-cards",
149 siteName: "kidlisp-cards-26-arxiv",
150 title: "KidLisp Cards",
151 },
152 "arxiv-score-analysis": {
153 base: "score-analysis",
154 siteName: "reading-the-score-26-arxiv",
155 title: "Reading the Score",
156 },
157 "arxiv-calarts": {
158 base: "calarts",
159 siteName: "calarts-callouts-papers-26-arxiv",
160 title: "CalArts, Callouts, and Papers",
161 psycho: true,
162 },
163 "arxiv-open-schools": {
164 base: "open-schools",
165 siteName: "open-schools-26-arxiv",
166 title: "Get Closed Source Out of Schools",
167 },
168 "arxiv-futures": {
169 base: "futures",
170 siteName: "five-years-from-now-26-arxiv",
171 title: "Five Years from Now",
172 },
173 "arxiv-identity": {
174 base: "identity",
175 siteName: "handle-identity-atproto-26-arxiv",
176 title: "Handle Identity on the AT Protocol",
177 },
178 "arxiv-ucla-arts": {
179 base: "ucla-arts",
180 siteName: "ucla-arts-funding-26-arxiv",
181 title: "Two Departments, One Building",
182 },
183 "arxiv-holden": {
184 base: "holden",
185 siteName: "potter-and-prompt-26-arxiv",
186 title: "The Potter and the Prompt",
187 },
188 "arxiv-url-tradition": {
189 base: "url-tradition",
190 siteName: "url-tradition-26-arxiv",
191 title: "The URL Tradition",
192 psycho: true,
193 },
194};
195
196function texName(base, lang) {
197 return lang === "en" ? base : `${base}-${lang}`;
198}
199
200function sitePdfName(siteName, lang) {
201 return lang === "en" ? `${siteName}.pdf` : `${siteName}-${lang}.pdf`;
202}
203
204function findAll(langFilter) {
205 const results = [];
206 for (const [dir, info] of Object.entries(PAPER_MAP)) {
207 const paperDir = join(PAPERS_DIR, dir);
208 if (!existsSync(paperDir)) continue;
209 const langs = langFilter ? [langFilter] : LANGS;
210 for (const lang of langs) {
211 const tex = texName(info.base, lang);
212 const texFile = join(paperDir, `${tex}.tex`);
213 const pdfFile = join(paperDir, `${tex}.pdf`);
214 const sitePdf = join(SITE_DIR, sitePdfName(info.siteName, lang));
215 results.push({
216 dir,
217 lang,
218 base: info.base,
219 title: info.title,
220 siteName: info.siteName,
221 psycho: !!info.psycho,
222 texFile,
223 pdfFile,
224 texExists: existsSync(texFile),
225 pdfExists: existsSync(pdfFile),
226 sitePdf,
227 sitePdfExists: existsSync(sitePdf),
228 });
229 }
230 // Auto-detect cards version: if {base}-cards.tex exists, add it as a build entry
231 if (!langFilter || langFilter === "en") {
232 const cardsTex = join(paperDir, `${info.base}-cards.tex`);
233 const cardsPdf = join(paperDir, `${info.base}-cards.pdf`);
234 const cardsSitePdf = join(SITE_DIR, `${info.siteName}-cards.pdf`);
235 if (existsSync(cardsTex)) {
236 results.push({
237 dir,
238 lang: "cards",
239 base: info.base,
240 title: info.title,
241 siteName: info.siteName,
242 psycho: !!info.psycho,
243 texFile: cardsTex,
244 pdfFile: cardsPdf,
245 texExists: true,
246 pdfExists: existsSync(cardsPdf),
247 sitePdf: cardsSitePdf,
248 sitePdfExists: existsSync(cardsSitePdf),
249 });
250 }
251 }
252 }
253 return results;
254}
255
256// Shared style files at the papers/ root — changes here affect all papers.
257const SHARED_STY = [
258 join(PAPERS_DIR, "ac-paper-layout.sty"),
259 join(PAPERS_DIR, "ac-paper-cards.sty"),
260].filter(existsSync);
261
262// Collect mtimes of all source files that could affect a paper's output.
263// Returns the most recent mtime (ms), or Infinity if any file is missing.
264function sourcesMtime(entry) {
265 const paperDir = join(PAPERS_DIR, entry.dir);
266 const sources = [];
267
268 // The .tex file itself
269 sources.push(entry.texFile);
270
271 // All .bib and .sty files in the paper directory
272 try {
273 for (const f of readdirSync(paperDir)) {
274 if (f.endsWith(".bib") || f.endsWith(".sty")) {
275 sources.push(join(paperDir, f));
276 }
277 }
278 } catch {}
279
280 // Figures directory (all files)
281 const figDir = join(paperDir, "figures");
282 try {
283 for (const f of readdirSync(figDir)) {
284 sources.push(join(figDir, f));
285 }
286 } catch {}
287
288 // Shared style files
289 sources.push(...SHARED_STY);
290
291 let newest = 0;
292 for (const src of sources) {
293 try {
294 const mt = statSync(src).mtimeMs;
295 if (mt > newest) newest = mt;
296 } catch {
297 return Infinity; // missing source → must rebuild
298 }
299 }
300 return newest;
301}
302
303// Returns true if the paper needs rebuilding (source newer than PDF, or no PDF).
304function needsRebuild(entry) {
305 if (!entry.pdfExists) return true;
306 try {
307 const pdfMtime = statSync(entry.pdfFile).mtimeMs;
308 return sourcesMtime(entry) > pdfMtime;
309 } catch {
310 return true;
311 }
312}
313
314function buildOne(entry) {
315 if (!entry.texExists) {
316 console.log(` SKIP ${entry.dir}/${texName(entry.base, entry.lang)}.tex (not found)`);
317 return false;
318 }
319 const paperDir = join(PAPERS_DIR, entry.dir);
320 const tex = texName(entry.base, entry.lang);
321 console.log(` BUILD ${entry.dir}/${tex}.tex ...`);
322 try {
323 // Run xelatex 3-pass with bibtex. Use semicolons (not &&) so bibtex
324 // warnings don't kill the chain. Check for PDF existence, not exit code.
325 execSync(
326 `cd "${paperDir}" && xelatex -interaction=nonstopmode "${tex}.tex"; bibtex "${tex}" 2>/dev/null; xelatex -interaction=nonstopmode "${tex}.tex"; xelatex -interaction=nonstopmode "${tex}.tex"`,
327 { stdio: "pipe", timeout: 180000 },
328 );
329 } catch (e) {
330 // xelatex may return non-zero on warnings but still produce a PDF.
331 // Only log as warning, don't fail yet.
332 }
333 // Check if PDF was actually produced (the real success criterion).
334 const pdfPath = join(paperDir, `${tex}.pdf`);
335 if (existsSync(pdfPath)) {
336 console.log(` OK ${tex}.pdf`);
337 return true;
338 } else {
339 console.error(` FAIL ${tex}.tex — no PDF produced`);
340 try {
341 const log = execSync(`tail -20 "${join(paperDir, tex + ".log")}"`, {
342 encoding: "utf8",
343 });
344 console.error(` LOG:\n${log}`);
345 } catch (_) {}
346 return false;
347 }
348}
349
350function deployOne(entry) {
351 if (!entry.pdfExists) return false;
352 mkdirSync(SITE_DIR, { recursive: true });
353 copyFileSync(entry.pdfFile, entry.sitePdf);
354 console.log(` DEPLOY ${basename(entry.sitePdf)}`);
355 return true;
356}
357
358function now() {
359 return new Date().toISOString().replace("T", " ").slice(0, 16);
360}
361
362function appendBuildLog(built, failed) {
363 const stamp = now();
364 const lines = [`\n## ${stamp}\n`];
365 if (built.length) {
366 lines.push("Built:");
367 for (const e of built)
368 lines.push(`- ${e.title} [${e.lang}] → ${basename(e.sitePdf)}`);
369 }
370 if (failed.length) {
371 lines.push("\nFailed:");
372 for (const e of failed)
373 lines.push(`- ${e.title} [${e.lang}]`);
374 }
375 lines.push("");
376
377 if (!existsSync(BUILDLOG)) {
378 writeFileSync(
379 BUILDLOG,
380 `# Papers Build Log\n\nGeneration history for all AC paper PDFs.\n${lines.join("\n")}`,
381 );
382 } else {
383 const existing = readFileSync(BUILDLOG, "utf8");
384 writeFileSync(BUILDLOG, existing + lines.join("\n"));
385 }
386 console.log(`\n Build log updated: ${BUILDLOG}`);
387}
388
389function updateIndex(entries) {
390 const indexPath = join(SITE_DIR, "index.html");
391 if (!existsSync(indexPath)) {
392 console.log(" SKIP index update (index.html not found)");
393 return;
394 }
395
396 const meta = loadMetadata();
397
398 // Importance ranking — curated order for 2026 impact
399 const IMPORTANCE = {
400 "aesthetic-computer-26-arxiv": 1,
401 "kidlisp-26-arxiv": 2,
402 "plorking-the-planet-26-arxiv": 3,
403 "ac-native-os-26-arxiv": 4,
404 "piece-api-26-arxiv": 5,
405 "who-pays-for-creative-tools-26-arxiv": 6,
406 "pieces-not-programs-26-arxiv": 7,
407 "notepat-26-arxiv": 8,
408 "radical-computer-art-26-arxiv": 9,
409 "whistlegraph-26-arxiv": 10,
410 "sucking-on-the-complex-26-arxiv": 11,
411 "dead-ends-26-arxiv": 12,
412 "folk-songs-26-arxiv": 13,
413 "repo-archaeology-26-arxiv": 14,
414 "network-audit-26-arxiv": 15,
415 "kidlisp-reference-26-arxiv": 16,
416 "citation-diversity-audit-26": 17,
417 "open-schools-26-arxiv": 18,
418 "five-years-from-now-26-arxiv": 19,
419 "calarts-callouts-papers-26-arxiv": 20,
420 "handle-identity-atproto-26-arxiv": 21,
421 "ucla-arts-funding-26-arxiv": 22,
422 "potter-and-prompt-26-arxiv": 23,
423 };
424
425 // Collect deployed English PDFs sorted by importance
426 const papers = [];
427 for (const e of entries.filter((e) => e.lang === "en" && e.sitePdfExists)) {
428 const stat = statSync(e.sitePdf);
429 const m = meta[e.dir] || {};
430 const rank = IMPORTANCE[e.siteName] || 99;
431 // Prefer stored updated timestamp over file mtime (deploy copies all PDFs, clobbering mtime)
432 const updated = m.updated ? new Date(m.updated) : stat.mtime;
433 papers.push({ ...e, mtime: updated, created: m.created || null, revisions: m.revisions || 0, rank });
434 }
435 papers.sort((a, b) => a.rank - b.rank);
436
437 // Also include JOSS/ELS papers that aren't in PAPER_MAP
438 const extraPdfs = [
439 {
440 file: "aesthetic-computer-26-joss.pdf",
441 title: "Aesthetic Computer '26",
442 detail: "JOSS Summary · 2pp",
443 abstract:
444 "A compact JOSS summary of Aesthetic Computer for archival and citation purposes. It distills the platform into a conventional software paper format.",
445 metaKey: "joss-ac",
446 },
447 {
448 file: "kidlisp-26-joss.pdf",
449 title: "KidLisp '26",
450 detail: "JOSS Summary · 3pp",
451 abstract:
452 "A compact JOSS summary of KidLisp for archival and citation purposes. It frames the language as a small but expressive tool for generative art.",
453 metaKey: "joss-kidlisp",
454 },
455 {
456 file: "kidlisp-els-2026.pdf",
457 title: "KidLisp (ELS 2026)",
458 detail:
459 "A Minimal Lisp for Generative Art with Social Composition · ELS ACM SIGS 4pp",
460 abstract:
461 "An ELS conference version of KidLisp that emphasizes social composition. It positions the language as a shared practice rather than a solo scripting environment.",
462 metaKey: "els-kidlisp",
463 },
464 ];
465
466 // Guest papers — moved to platter readings (OCR'd text files)
467 const guestPdfs = [];
468 const extras = [];
469 for (const ex of extraPdfs) {
470 const fp = join(SITE_DIR, ex.file);
471 if (existsSync(fp)) {
472 const stat = statSync(fp);
473 const m = meta[ex.metaKey] || {};
474 const updated = m.updated ? new Date(m.updated) : stat.mtime;
475 extras.push({ ...ex, mtime: updated, created: m.created || null, revisions: m.revisions || 0 });
476 }
477 }
478 extras.sort((a, b) => b.mtime - a.mtime);
479
480 // Paper detail descriptions and short previews, keyed by siteName.
481 const PAPER_COPY = {
482 "aesthetic-computer-26-arxiv": {
483 detail: "A Mobile-First Runtime for Creative Computing · arXiv 5pp",
484 abstract:
485 "Aesthetic Computer is presented as a mobile-first creative computing runtime where the interface, publishing flow, and community feedback loop are part of the medium. The paper argues that small pieces can make software feel more social, more portable, and easier to share.",
486 },
487 "kidlisp-26-arxiv": {
488 detail: "A Minimal Lisp for Generative Art on a Social Platform · arXiv 6pp",
489 abstract:
490 "KidLisp is the platform's tiny Lisp for building visual and musical pieces in the browser. The paper shows how a minimal language can stay approachable while still supporting generative art and composition.",
491 },
492 "plorking-the-planet-26-arxiv": {
493 detail: "Laptop Orchestras, PLOrk Heritage, and Aesthetic Computer · arXiv",
494 abstract:
495 "This paper connects Aesthetic Computer to laptop orchestras and the collaborative traditions of PLOrk. It treats the browser as a place for ensemble practice, not just solo desktop programming.",
496 },
497 "ac-native-os-26-arxiv": {
498 detail: "A Bare-Metal Creative Computing Operating System · arXiv 5pp",
499 abstract:
500 "AC Native OS describes a bare-metal runtime for creative computing. It focuses on boot-time simplicity and the idea that the operating system itself can be a programmable art surface.",
501 },
502 "piece-api-26-arxiv": {
503 detail: "Processing at the Core of the Piece API · arXiv 7pp",
504 abstract:
505 "The Piece API rethinks creative software around composable pieces instead of monolithic apps. It uses Processing's lineage to connect setup(), boot(), and the act of publishing.",
506 },
507 "who-pays-for-creative-tools-26-arxiv": {
508 detail: "Funding, Burnout, and Survival in Open-Source Creative Computing · arXiv 5pp",
509 abstract:
510 "A short look at who supports open-source creative tools and what that labor costs. The paper connects funding, burnout, and long-term maintenance to the life of artistic software.",
511 },
512 "pieces-not-programs-26-arxiv": {
513 detail: "The Piece as a Unit of Creative Cognition · arXiv 4pp",
514 abstract:
515 "A piece is treated here as the basic unit of creative cognition in AC. The paper argues that smaller, shareable pieces encourage composition, remix, and publication.",
516 },
517 "notepat-26-arxiv": {
518 detail: "From Keyboard Toy to System Front Door · arXiv 5pp",
519 abstract:
520 "notepat.com is framed as a keyboard-first front door to the system. The paper follows the toy-like input surface as it grows into a fuller creative interface.",
521 },
522 "radical-computer-art-26-arxiv": {
523 detail: "Goodiepalian Approaches in Aesthetic Computer · arXiv 5pp",
524 abstract:
525 "This paper treats Goodiepalian practice as a model for radical computer art. It emphasizes play, notation, and the social life of systems over polished product design.",
526 },
527 "whistlegraph-26-arxiv": {
528 detail: "Drawing, Singing, and the Graphic Score as Viral Form · arXiv 4pp",
529 abstract:
530 "Whistlegraph explores drawing, singing, and score-making as forms that can spread like software. The paper links graphic notation to performance, remix, and browser-native sharing.",
531 },
532 "sucking-on-the-complex-26-arxiv": {
533 detail: "Platform Hegemony, Critique-as-Content, and Anti-Environments · arXiv 5pp",
534 abstract:
535 "Sucking on the Complex critiques platform hegemony and the way critique becomes content. It looks for anti-environments that stay messy, resistant, and alive.",
536 },
537 "dead-ends-26-arxiv": {
538 detail: "Dormant Paths, Evolutionary Branches, and Abandoned Approaches · arXiv 4pp",
539 abstract:
540 "The paper catalogs dormant branches, abandoned experiments, and paths that never became default. It treats dead ends as useful history rather than failure.",
541 },
542 "folk-songs-26-arxiv": {
543 detail: "Oral Tradition Meets the Browser Keyboard · arXiv",
544 abstract:
545 "Playable Folk Songs brings oral tradition into the browser keyboard. The paper asks how simple interaction can carry collective memory and repetition.",
546 },
547 "repo-archaeology-26-arxiv": {
548 detail: 'Tracing the Evolution of AC Through Its Git History · arXiv 3pp · <a href="/ac-repo-archaeology">interactive timeline</a>',
549 abstract:
550 "Repository Archaeology traces the project through its git history. The paper shows how version control can become a narrative medium for design evolution.",
551 },
552 "network-audit-26-arxiv": {
553 detail: "Who Uses Aesthetic Computer and What Do They Make? · arXiv 4pp",
554 abstract:
555 "Network Audit asks who uses Aesthetic Computer and what they make with it. The paper turns usage patterns into a portrait of a community in motion.",
556 },
557 "kidlisp-reference-26-arxiv": {
558 detail: "118 Built-ins in 12 Categories · arXiv 4pp",
559 abstract:
560 "The KidLisp reference compresses the language into a usable field guide. It groups 118 built-ins into 12 categories for quick browsing and recall.",
561 },
562 "citation-diversity-audit-26": {
563 detail: "Diversity and Inclusion in AC Paper Citations · 4pp",
564 abstract:
565 "Citation Diversity Audit looks at who gets cited in the papers and where the archive is thin. The paper uses citation patterns as a proxy for inclusion and intellectual range.",
566 },
567 "open-schools-26-arxiv": {
568 detail: "",
569 abstract:
570 "Get Closed Source Out of Schools makes the case that creative computing should be teachable, inspectable, and modifiable. The paper argues for open tools as infrastructure for learning.",
571 },
572 "five-years-from-now-26-arxiv": {
573 detail: "",
574 abstract:
575 "Five Years from Now is a projection paper about where the project could go if current habits continue. It uses the near future to test the consequences of today's decisions.",
576 },
577 "calarts-callouts-papers-26-arxiv": {
578 detail: "",
579 abstract:
580 "CalArts, Callouts, and Papers turns a local institutional context into a study of friction, attention, and production. The paper leans into psycho style to show how academic labor is staged and performed.",
581 },
582 "handle-identity-atproto-26-arxiv": {
583 detail: "",
584 abstract:
585 "Handle Identity on the AT Protocol treats naming as a social and technical problem. The paper explores how handles, identity, and publishing can be tied together without losing portability.",
586 },
587 "ucla-arts-funding-26-arxiv": {
588 detail: "",
589 abstract:
590 "Two Departments, One Building examines how funding and infrastructure shape creative work in shared spaces. The paper looks at administrative boundaries as part of the artistic system.",
591 },
592 "kidlisp-cards-26-arxiv": {
593 detail: "",
594 abstract:
595 "KidLisp Cards condenses the language into a pocketable card format. It is meant to make the language easier to browse, teach, and carry.",
596 },
597 "reading-the-score-26-arxiv": {
598 detail: "",
599 abstract:
600 "Reading the Score looks at the graphic score as an interface for interpretation and collaboration. The paper treats notation as a computational and social object.",
601 },
602 "potter-and-prompt-26-arxiv": {
603 detail: "John Holden's Proto-Cognitive Music Theory and Aesthetic Computer · arXiv 7pp",
604 abstract:
605 "The Potter and the Prompt argues that AC independently converges on the core principles of John Holden's 1770 proto-cognitive music theory. It proposes AC as a computational laboratory for advancing Holden's unfinished program on grouping, attention, and the module.",
606 },
607 "url-tradition-26-arxiv": {
608 detail: "Addressable Creative Computing from HyperCard to Aesthetic Computer · arXiv 5pp",
609 abstract:
610 "The URL Tradition traces how URL-addressability reshapes creative computing. From HyperCard's landlocked stacks to AC's prompt-as-address-bar, the paper argues the URL is not a feature but a medium property that transforms pedagogy, distribution, authorship, and social interaction.",
611 },
612 };
613
614 function fmtTime(d) {
615 const m = d.toLocaleString("en-US", { month: "short", timeZone: "America/Los_Angeles" });
616 const day = d.getDate();
617 const h = String(d.getHours()).padStart(2, "0");
618 const min = String(d.getMinutes()).padStart(2, "0");
619 return `${m} ${day} ${h}:${min}`;
620 }
621
622 function fmtDate(d) {
623 return `${d.slice(5, 7)}/${d.slice(8, 10)}`;
624 }
625
626 function paperCopy(key) {
627 return PAPER_COPY[key] || {};
628 }
629
630 // Build paper entries HTML
631 let papersHtml = "";
632 for (const p of papers) {
633 const copy = paperCopy(p.siteName);
634 const detail = copy.detail || "";
635 const abstract = copy.abstract || "";
636 const hasCards = existsSync(join(SITE_DIR, `${p.siteName}-cards.pdf`));
637 const createdStr = p.created ? fmtDate(p.created) : "";
638 const revStr = p.revisions > 0 ? `r${p.revisions}` : "";
639 const tKey = translationKey(p.dir);
640 const updatedISO = p.mtime.toISOString();
641 papersHtml += `
642 <div class="p" data-paper-id="${tKey}"${hasCards ? "" : ` data-no-cards="1"`}${p.psycho ? ` data-psycho="1"` : ""} data-created="${p.created || ""}" data-updated="${updatedISO}">
643 <div class="title"><a href="/${p.siteName}.pdf" data-base="/${p.siteName}">${p.title}</a></div>
644 <div class="detail">${detail}</div>
645 <div class="abstract">${abstract}</div>
646 <div class="meta-row"><span class="author">@jeffrey</span>${createdStr ? `<span class="created" title="Created">${createdStr}</span>` : ""}<span class="revisions" title="Revision count">revision ${p.revisions || 1}</span><span class="updated" title="Last updated">${fmtTime(p.mtime)}</span></div>
647 </div>\n`;
648 }
649 for (const ex of extras) {
650 const createdStr = ex.created ? fmtDate(ex.created) : "";
651 const revStr = ex.revisions > 0 ? `r${ex.revisions}` : "";
652 const exKey = { "joss-ac": "joss-ac", "joss-kidlisp": "joss-kidlisp", "els-kidlisp": "els" }[ex.metaKey] || ex.metaKey;
653 papersHtml += `
654 <div class="p" data-paper-id="${exKey}">
655 <div class="title"><a href="/${ex.file}">${ex.title}</a></div>
656 <div class="detail">${ex.detail}</div>
657 <div class="abstract">${ex.abstract}</div>
658 <div class="meta-row"><span class="created" title="Created">${createdStr}</span><span class="revisions" title="Revisions">${revStr}</span><span class="updated" title="Last updated">${fmtTime(ex.mtime)}</span></div>
659 </div>\n`;
660 }
661
662 // Build guest papers HTML
663 let guestHtml = "";
664 for (const g of guestPdfs) {
665 const fp = join(SITE_DIR, g.file);
666 if (existsSync(fp)) {
667 guestHtml += `
668 <div class="p guest" data-paper-id="${g.metaKey}" data-no-cards="1" data-created="${g.year}-01-01" data-updated="${g.year}-01-01T00:00:00.000Z">
669 <div class="title"><a href="/${g.file}">${g.title}</a></div>
670 <div class="detail">${g.detail}</div>
671 <div class="abstract">${g.abstract}</div>
672 <div class="meta-row"><span class="author">${g.author}</span><span class="created" title="Published">${g.year}</span></div>
673 </div>\n`;
674 }
675 }
676
677 // Read current index, replace paper entries between markers
678 let html = readFileSync(indexPath, "utf8");
679
680 // Replace everything between the sub line and the footer
681 const startMarker = "<!-- papers-start -->";
682 const endMarker = "<!-- papers-end -->";
683
684 if (html.includes(startMarker)) {
685 const before = html.slice(0, html.indexOf(startMarker) + startMarker.length);
686 const after = html.slice(html.indexOf(endMarker));
687 html = before + "\n" + papersHtml + "\n " + after;
688 } else {
689 // Add markers on first run — replace from first .p div to footer
690 const firstP = html.indexOf('<div class="p">');
691 const footer = html.indexOf('<div class="footer">');
692 if (firstP !== -1 && footer !== -1) {
693 html =
694 html.slice(0, firstP) +
695 startMarker +
696 "\n" +
697 papersHtml +
698 "\n " +
699 endMarker +
700 "\n\n " +
701 html.slice(footer);
702 }
703 }
704
705 // Replace guest papers between guest markers
706 const guestStart = "<!-- guest-start -->";
707 const guestEnd = "<!-- guest-end -->";
708 if (guestHtml && html.includes(guestStart)) {
709 const gBefore = html.slice(0, html.indexOf(guestStart) + guestStart.length);
710 const gAfter = html.slice(html.indexOf(guestEnd));
711 html = gBefore + "\n" + guestHtml + "\n " + gAfter;
712 }
713
714 writeFileSync(indexPath, html);
715 const guestCount = guestPdfs.filter(g => existsSync(join(SITE_DIR, g.file))).length;
716 console.log(` INDEX updated with ${papers.length + extras.length} papers + ${guestCount} guest papers.`);
717}
718
719function verify() {
720 const indexPath = join(SITE_DIR, "index.html");
721 if (!existsSync(indexPath)) {
722 console.log(" SKIP verify (index.html not found)");
723 return true;
724 }
725 const html = readFileSync(indexPath, "utf8");
726
727 // Extract all href links to PDFs from the generated paper entries
728 const hrefRe = /href="\/([^"]+\.pdf)"/g;
729 let match;
730 const linked = new Set();
731 while ((match = hrefRe.exec(html)) !== null) {
732 linked.add(match[1]);
733 }
734
735 // Also check cards links — only for papers that have cards (no data-no-cards attr)
736 const paperBlockRe = /<div class="p"(?:(?!data-no-cards)[^>])*>[\s\S]*?data-base="\/([^"]+)"[\s\S]*?<\/div>\s*<\/div>/g;
737 while ((match = paperBlockRe.exec(html)) !== null) {
738 linked.add(`${match[1]}-cards.pdf`);
739 }
740
741 let ok = 0;
742 let broken = 0;
743 for (const pdf of [...linked].sort()) {
744 const fp = join(SITE_DIR, pdf);
745 if (existsSync(fp)) {
746 ok++;
747 } else {
748 console.log(` BROKEN /${pdf}`);
749 broken++;
750 }
751 }
752 console.log(` ${ok} OK, ${broken} broken link${broken !== 1 ? "s" : ""}`);
753 if (broken > 0) {
754 console.log(
755 "\n ⚠ Some papers have broken PDF links. Build cards or remove dead links.",
756 );
757 }
758 return broken === 0;
759}
760
761// --- CLI ---
762const args = process.argv.slice(2);
763const force = args.includes("--force");
764const positional = args.filter((a) => !a.startsWith("--"));
765const [cmd, langFilter] = positional;
766
767if (cmd === "status" || !cmd) {
768 const files = findAll();
769 console.log("\nPapers Build Status\n");
770 console.log(
771 "Paper".padEnd(32) + LANGS.map((l) => LANG_NAMES[l].padEnd(12)).join(""),
772 );
773 console.log("-".repeat(32 + LANGS.length * 12));
774 let currentDir = "";
775 for (const f of files) {
776 if (f.dir !== currentDir) {
777 currentDir = f.dir;
778 process.stdout.write(f.dir.padEnd(32));
779 }
780 const hasTex = f.texExists;
781 const hasPdf = f.sitePdfExists;
782 const status = hasPdf ? "OK" : hasTex ? "tex" : "---";
783 process.stdout.write(status.padEnd(12));
784 if (LANGS.indexOf(f.lang) === LANGS.length - 1) process.stdout.write("\n");
785 }
786 console.log();
787} else if (cmd === "build") {
788 const filter = langFilter && LANGS.includes(langFilter) ? langFilter : null;
789 const files = findAll(filter);
790 const candidates = files.filter((f) => f.texExists);
791 const toBuild = force ? candidates : candidates.filter(needsRebuild);
792 const skipped = candidates.length - toBuild.length;
793 const mode = force ? " (forced)" : "";
794 console.log(
795 `\nBuilding ${toBuild.length} paper${toBuild.length !== 1 ? "s" : ""}${filter ? ` (${LANG_NAMES[filter]})` : " (all languages)"}${mode}...`,
796 );
797 if (skipped > 0) console.log(` (${skipped} up-to-date, skipped)`);
798 console.log();
799 const built = [];
800 const failed = [];
801 for (const entry of toBuild) {
802 if (buildOne(entry)) built.push(entry);
803 else failed.push(entry);
804 }
805 console.log(`\nDone: ${built.length} built, ${skipped} skipped, ${failed.length} failed.\n`);
806 if (built.length) appendBuildLog(built, failed);
807} else if (cmd === "deploy") {
808 const files = findAll();
809 const toDeploy = files.filter((f) => f.pdfExists);
810 console.log(
811 `\nDeploying ${toDeploy.length} PDF${toDeploy.length !== 1 ? "s" : ""}...\n`,
812 );
813 for (const entry of toDeploy) {
814 deployOne(entry);
815 }
816 console.log("\nDone.\n");
817} else if (cmd === "publish") {
818 // Full pipeline: build (incremental) → deploy → update index → verify
819 const mode = force ? " (forced)" : " (incremental)";
820 console.log(`\n=== PUBLISH${mode}: build → deploy → update index → verify ===\n`);
821
822 const files = findAll();
823 const candidates = files.filter((f) => f.texExists);
824 const toBuild = force ? candidates : candidates.filter(needsRebuild);
825 const skipped = candidates.length - toBuild.length;
826 console.log(`Building ${toBuild.length} papers (all languages)...`);
827 if (skipped > 0) console.log(` (${skipped} up-to-date, skipped)`);
828 console.log();
829 const built = [];
830 const failed = [];
831 for (const entry of toBuild) {
832 if (buildOne(entry)) built.push(entry);
833 else failed.push(entry);
834 }
835 console.log(`\nBuild: ${built.length} OK, ${skipped} skipped, ${failed.length} failed.\n`);
836
837 // Re-scan after build to pick up new PDFs
838 const deployFiles = findAll();
839 const toDeploy = deployFiles.filter((f) => f.pdfExists);
840 console.log(`Deploying ${toDeploy.length} PDFs...\n`);
841 for (const entry of toDeploy) {
842 deployOne(entry);
843 }
844
845 // Increment revisions in metadata for all built papers (English only to avoid double-counting)
846 const meta = loadMetadata();
847 const now = new Date().toISOString();
848 const builtDirs = new Set(built.filter((e) => e.lang === "en").map((e) => e.dir));
849 for (const dir of builtDirs) {
850 if (!meta[dir]) meta[dir] = { created: now.slice(0, 10), revisions: 0 };
851 meta[dir].revisions = (meta[dir].revisions || 0) + 1;
852 meta[dir].updated = now;
853 }
854 saveMetadata(meta);
855 console.log(` METADATA updated (${builtDirs.size} papers incremented).\n`);
856
857 // Update index
858 console.log("Updating index...\n");
859 const indexEntries = findAll();
860 // Refresh sitePdfExists after deploy
861 for (const e of indexEntries) {
862 e.sitePdfExists = existsSync(e.sitePdf);
863 }
864 updateIndex(indexEntries);
865
866 if (built.length) appendBuildLog(built, failed);
867
868 // Verify all linked PDFs exist
869 console.log("\n=== VERIFY ===\n");
870 verify();
871
872 console.log("\nPublish complete.\n");
873} else if (cmd === "index") {
874 console.log("\nUpdating index...\n");
875 const indexEntries = findAll();
876 for (const e of indexEntries) {
877 e.sitePdfExists = existsSync(e.sitePdf);
878 }
879 updateIndex(indexEntries);
880 console.log("\nDone.\n");
881} else if (cmd === "verify") {
882 console.log("\n=== VERIFY: checking all linked PDFs ===\n");
883 const allOk = verify();
884 process.exit(allOk ? 0 : 1);
885} else if (cmd === "log") {
886 if (existsSync(BUILDLOG)) {
887 console.log(readFileSync(BUILDLOG, "utf8"));
888 } else {
889 console.log("No build log yet. Run 'build' or 'publish' first.");
890 }
891} else {
892 console.log(`
893papers cli — build, deploy, and track all AC papers
894
895Usage:
896 node papers/cli.mjs build [lang] Build changed PDFs (all langs, or: en, da, es, zh)
897 node papers/cli.mjs build --force Rebuild all PDFs (skip mtime check)
898 node papers/cli.mjs deploy Copy built PDFs to site directory
899 node papers/cli.mjs publish Incremental build + deploy + update index + verify
900 node papers/cli.mjs publish --force Full rebuild + deploy + update index + verify
901 node papers/cli.mjs status Show build status for all papers
902 node papers/cli.mjs verify Check all linked PDFs exist
903 node papers/cli.mjs log Show build log
904
905Builds are incremental by default — only papers with source files newer than
906their output PDF are rebuilt. Use --force to bypass.
907`);
908}