Monorepo for Aesthetic.Computer
aesthetic.computer
1#!/usr/bin/env node
2// Render KidLisp piece to animated WebP + WAV audio via self-contained WASM.
3// Usage: node sonify.mjs <piece.lisp> [frames] [fps] [size] [sampleRate]
4
5import { readFileSync, writeFileSync, mkdirSync } from "fs";
6import { basename } from "path";
7import { execSync } from "child_process";
8import sharp from "sharp";
9import { Compiler } from "./compiler.mjs";
10
11const OUT_DIR = new URL("./output/", import.meta.url).pathname;
12mkdirSync(OUT_DIR, { recursive: true });
13
14const input = process.argv[2] || "roz.lisp";
15const FRAMES = parseInt(process.argv[3]) || 300;
16const FPS = parseInt(process.argv[4]) || 30;
17const SIZE = parseInt(process.argv[5]) || 256;
18const SAMPLE_RATE = parseInt(process.argv[6]) || 44100;
19const SAMPLES_PER_FRAME = Math.floor(SAMPLE_RATE / FPS);
20
21const path = new URL(input, import.meta.url).pathname;
22const source = readFileSync(path, "utf-8");
23const name = basename(input, ".lisp");
24
25console.log(`Compiling ${input}...`);
26const compiler = new Compiler();
27const wasmBytes = compiler.compile(source);
28const mathImports = {
29 math: {
30 sin: (x) => Math.fround(Math.sin(x)),
31 cos: (x) => Math.fround(Math.cos(x)),
32 random: () => Math.fround(Math.random()),
33 },
34};
35const { instance } = await WebAssembly.instantiate(wasmBytes, mathImports);
36console.log(
37 `WASM: ${wasmBytes.length} bytes | ${FRAMES} frames @ ${FPS}fps | ${SIZE}x${SIZE} | ${SAMPLE_RATE}Hz`,
38);
39
40// Audio buffer offset: w * h * 8 (after pixel + temp buffers)
41const audioOffset = SIZE * SIZE * 8;
42const totalSamples = FRAMES * SAMPLES_PER_FRAME;
43const allAudio = new Float32Array(totalSamples);
44
45// Render frames
46const framePngs = [];
47for (let f = 0; f < FRAMES; f++) {
48 instance.exports.paint(SIZE, SIZE, f);
49 instance.exports.sound(SAMPLE_RATE, FPS, f);
50
51 // Read pixels
52 const mem = new Uint8Array(instance.exports.memory.buffer);
53 const pixels = Buffer.from(mem.slice(0, SIZE * SIZE * 4));
54 const png = await sharp(pixels, {
55 raw: { width: SIZE, height: SIZE, channels: 4 },
56 })
57 .png()
58 .toBuffer();
59 framePngs.push(png);
60
61 // Read audio samples (f32 at audioOffset)
62 const audioView = new Float32Array(
63 instance.exports.memory.buffer,
64 audioOffset,
65 SAMPLES_PER_FRAME,
66 );
67 allAudio.set(audioView, f * SAMPLES_PER_FRAME);
68
69 if ((f + 1) % 30 === 0 || f === FRAMES - 1) {
70 process.stdout.write(`\r Rendered ${f + 1}/${FRAMES} frames`);
71 }
72}
73console.log();
74
75// ─── Write WAV ───────────────────────────────────────────────────────
76function writeWav(filename, samples, sampleRate) {
77 const numSamples = samples.length;
78 const bytesPerSample = 2; // 16-bit PCM
79 const dataSize = numSamples * bytesPerSample;
80 const buffer = Buffer.alloc(44 + dataSize);
81
82 // RIFF header
83 buffer.write("RIFF", 0);
84 buffer.writeUInt32LE(36 + dataSize, 4);
85 buffer.write("WAVE", 8);
86
87 // fmt chunk
88 buffer.write("fmt ", 12);
89 buffer.writeUInt32LE(16, 16); // chunk size
90 buffer.writeUInt16LE(1, 20); // PCM
91 buffer.writeUInt16LE(1, 22); // mono
92 buffer.writeUInt32LE(sampleRate, 24);
93 buffer.writeUInt32LE(sampleRate * bytesPerSample, 28); // byte rate
94 buffer.writeUInt16LE(bytesPerSample, 32); // block align
95 buffer.writeUInt16LE(16, 34); // bits per sample
96
97 // data chunk
98 buffer.write("data", 36);
99 buffer.writeUInt32LE(dataSize, 40);
100
101 // Convert f32 samples to 16-bit PCM
102 for (let i = 0; i < numSamples; i++) {
103 const s = Math.max(-1, Math.min(1, samples[i]));
104 const pcm = Math.round(s * 32767);
105 buffer.writeInt16LE(pcm, 44 + i * 2);
106 }
107
108 writeFileSync(filename, buffer);
109}
110
111const wavPath = `${OUT_DIR}${name}.wav`;
112writeWav(wavPath, allAudio, SAMPLE_RATE);
113const wavSize = (allAudio.length * 2) / 1024;
114console.log(`${name}.wav (${SAMPLE_RATE}Hz mono, ${(totalSamples / SAMPLE_RATE).toFixed(1)}s, ${wavSize.toFixed(0)}KB)`);
115
116// ─── Encode WebP ─────────────────────────────────────────────────────
117console.log("Encoding animated WebP...");
118const tmpDir = `${OUT_DIR}.frames-${name}`;
119mkdirSync(tmpDir, { recursive: true });
120
121for (let f = 0; f < framePngs.length; f++) {
122 const framePath = `${tmpDir}/frame-${String(f).padStart(5, "0")}.png`;
123 await sharp(framePngs[f]).toFile(framePath);
124}
125
126const webpPath = `${OUT_DIR}${name}.webp`;
127execSync(
128 `ffmpeg -y -framerate ${FPS} -i "${tmpDir}/frame-%05d.png" -loop 0 -lossless 1 "${webpPath}" 2>/dev/null`,
129);
130execSync(`rm -rf "${tmpDir}"`);
131
132const { statSync } = await import("fs");
133const webpSize = statSync(webpPath).size;
134console.log(`${name}.webp (${SIZE}x${SIZE}, ${FRAMES} frames, ${(webpSize / 1024).toFixed(1)}KB)`);
135
136// ─── Mux video + audio to mp4 ────────────────────────────────────────
137try {
138 const mp4Path = `${OUT_DIR}${name}.mp4`;
139 const mp4Video = `${OUT_DIR}${name}_video.mp4`;
140
141 // Step 1: encode video-only mp4 from frames
142 const frameDir2 = `${OUT_DIR}.frames2-${name}`;
143 mkdirSync(frameDir2, { recursive: true });
144 for (let f = 0; f < framePngs.length; f++) {
145 await sharp(framePngs[f]).toFile(
146 `${frameDir2}/frame-${String(f).padStart(5, "0")}.png`,
147 );
148 }
149 execSync(
150 `ffmpeg -y -framerate ${FPS} -i "${frameDir2}/frame-%05d.png" -c:v libx264 -pix_fmt yuv420p "${mp4Video}" 2>/dev/null`,
151 );
152 execSync(`rm -rf "${frameDir2}"`);
153
154 // Step 2: mux video + audio
155 execSync(
156 `ffmpeg -y -i "${mp4Video}" -i "${wavPath}" -map 0:v -map 1:a -c:v copy -c:a libmp3lame -ar 24000 -ac 2 -b:a 160k -movflags +faststart -shortest "${mp4Path}" 2>/dev/null`,
157 );
158 execSync(`rm -f "${mp4Video}"`);
159
160 const mp4Size = statSync(mp4Path).size;
161 console.log(`${name}.mp4 (video+audio, ${(mp4Size / 1024).toFixed(1)}KB)`);
162} catch {
163 console.log("(ffmpeg mp4 mux not available, WAV + WebP saved separately)");
164}