Monorepo for Aesthetic.Computer aesthetic.computer

kidlisp-wasm: add audio synthesis — pixel buffer becomes sound

WASM module now exports a `sound(sampleRate, fps, frame)` function that
reads the middle row of the pixel buffer, interleaves R/G/B channels as
sub-samples, and writes f32 audio to memory. Visual state directly drives
the audio — vortex patterns create evolving timbral changes.

New sonify.mjs runner renders video + WAV + muxed MP4 (h264 + MP3 for
VS Code compatibility on Fedora).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

+225 -1
+61 -1
kidlisp-wasm/compiler.mjs
··· 115 115 // Memory 116 116 st8(off = 0) { this.b.push(0x3a, 0x00, ...uleb128(off)); return this; } 117 117 ld8u(off = 0){ this.b.push(0x2d, 0x00, ...uleb128(off)); return this; } 118 + stf32(off = 0){ this.b.push(0x38, 0x02, ...uleb128(off)); return this; } // f32.store 119 + ldf32(off = 0){ this.b.push(0x2a, 0x02, ...uleb128(off)); return this; } // f32.load 118 120 // Control 119 121 if_() { this.b.push(0x04, 0x40); return this; } 120 122 else_(){ this.b.push(0x05); return this; } ··· 244 246 const F_SPIN = 12; // (f32) → () 245 247 const F_ZOOM = 13; // (f32) → () 246 248 const F_CONTRAST = 14; // (f32) → () 247 - const F_PAINT = 15; // (f32, f32, f32) → () 249 + const F_SOUND = 15; // (f32, f32, f32) → () [sampleRate, fps, frame] 250 + const F_PAINT = 16; // (f32, f32, f32) → () 248 251 249 252 // ─── Runtime Function Emitters ────────────────────────────────────── 250 253 ··· 853 856 return { locals: [[4, I32], [1, F32]], code: e.bytes() }; 854 857 } 855 858 859 + // $sound(sampleRate: f32, fps: f32, frame: f32) — generate audio from pixel buffer 860 + // Scans the middle row horizontally as a waveform: pixel RGB → sample amplitude. 861 + // The visual state directly becomes the sound — vortex patterns create timbral evolution. 862 + function emitSound() { 863 + const e = new E(); 864 + // params: 0=sampleRate, 1=fps, 2=frame 865 + // f32 locals: 3=sample 866 + // i32 locals: 4=spf, 5=i, 6=row, 7=col, 8=pixOff, 9=audioOff, 10=globalSample, 11=chanSum 867 + 868 + // spf = floor(sampleRate / fps) 869 + e.lg(0).lg(1).fdiv().ffloor().f2i().ls(4); 870 + // row = h / 2 (middle row) 871 + e.gg(G_H).i32c(2).idiv().ls(6); 872 + // audioOff = w * h * 8 (after pixel + temp buffers) 873 + e.gg(G_W).gg(G_H).imul().i32c(8).imul().ls(9); 874 + 875 + // for i = 0; i < spf; i++ 876 + e.i32c(0).ls(5); 877 + e.block().loop(); 878 + e.lg(5).lg(4).ige().brif(1); 879 + 880 + // globalSample = frame * spf + i 881 + e.lg(2).f2i().lg(4).imul().lg(5).iadd().ls(10); 882 + 883 + // col = globalSample % w (scan horizontally across the row) 884 + e.lg(10).gg(G_W).irem().ls(7); 885 + e.lg(7).i32c(0).ilt().if_(); 886 + e.lg(7).gg(G_W).iadd().ls(7); 887 + e.end(); 888 + 889 + // Interleave R, G, B: each channel is a separate sub-sample 890 + // pixelIdx = (globalSample / 3) % w, channel = globalSample % 3 891 + e.lg(10).i32c(3).idiv().gg(G_W).irem().ls(7); // col = pixelIdx 892 + e.lg(7).i32c(0).ilt().if_(); e.lg(7).gg(G_W).iadd().ls(7); e.end(); 893 + e.lg(10).i32c(3).irem().ls(11); // channel index (0=R, 1=G, 2=B) 894 + 895 + // pixOff = (row * w + col) * 4 + channel 896 + e.lg(6).gg(G_W).imul().lg(7).iadd().i32c(4).imul().lg(11).iadd().ls(8); 897 + 898 + // sample = (pixelValue - 128) / 128.0 * 0.8 899 + e.lg(8).ld8u().i2f().f32c(128).fsub().f32c(128).fdiv().f32c(0.8).fmul().ls(3); 900 + 901 + // f32.store at audioOff + i * 4 902 + e.lg(9).lg(5).i32c(4).imul().iadd(); 903 + e.lg(3); 904 + e.stf32(); 905 + 906 + e.lg(5).i32c(1).iadd().ls(5); 907 + e.br(0); 908 + e.end().end(); 909 + e.end(); 910 + return { locals: [[1, F32], [8, I32]], code: e.bytes() }; 911 + } 912 + 856 913 // ─── Compiler ─────────────────────────────────────────────────────── 857 914 858 915 export class Compiler { ··· 1296 1353 T_F32_VOID, // spin 1297 1354 T_F32_VOID, // zoom 1298 1355 T_F32_VOID, // contrast 1356 + T_F32_F32_F32, // sound 1299 1357 T_F32_F32_F32, // paint 1300 1358 ]; 1301 1359 out.push(...section(3, vecOf(funcTypes.map(t => [...uleb128(t)])))); ··· 1319 1377 // ── Exports ── 1320 1378 const exports = [ 1321 1379 [...encodeString("paint"), 0x00, ...uleb128(F_PAINT)], 1380 + [...encodeString("sound"), 0x00, ...uleb128(F_SOUND)], 1322 1381 [...encodeString("memory"), 0x02, ...uleb128(0)], 1323 1382 ]; 1324 1383 out.push(...section(7, vecOf(exports))); ··· 1337 1396 emitSpin(), 1338 1397 emitZoom(), 1339 1398 emitContrast(), 1399 + emitSound(), 1340 1400 ]; 1341 1401 1342 1402 const paintBody = { locals: this.paintLocals, code: [...this.code.bytes(), 0x0b] };
+164
kidlisp-wasm/sonify.mjs
··· 1 + #!/usr/bin/env node 2 + // Render KidLisp piece to animated WebP + WAV audio via self-contained WASM. 3 + // Usage: node sonify.mjs <piece.lisp> [frames] [fps] [size] [sampleRate] 4 + 5 + import { readFileSync, writeFileSync, mkdirSync } from "fs"; 6 + import { basename } from "path"; 7 + import { execSync } from "child_process"; 8 + import sharp from "sharp"; 9 + import { Compiler } from "./compiler.mjs"; 10 + 11 + const OUT_DIR = new URL("./output/", import.meta.url).pathname; 12 + mkdirSync(OUT_DIR, { recursive: true }); 13 + 14 + const input = process.argv[2] || "roz.lisp"; 15 + const FRAMES = parseInt(process.argv[3]) || 300; 16 + const FPS = parseInt(process.argv[4]) || 30; 17 + const SIZE = parseInt(process.argv[5]) || 256; 18 + const SAMPLE_RATE = parseInt(process.argv[6]) || 44100; 19 + const SAMPLES_PER_FRAME = Math.floor(SAMPLE_RATE / FPS); 20 + 21 + const path = new URL(input, import.meta.url).pathname; 22 + const source = readFileSync(path, "utf-8"); 23 + const name = basename(input, ".lisp"); 24 + 25 + console.log(`Compiling ${input}...`); 26 + const compiler = new Compiler(); 27 + const wasmBytes = compiler.compile(source); 28 + const mathImports = { 29 + math: { 30 + sin: (x) => Math.fround(Math.sin(x)), 31 + cos: (x) => Math.fround(Math.cos(x)), 32 + random: () => Math.fround(Math.random()), 33 + }, 34 + }; 35 + const { instance } = await WebAssembly.instantiate(wasmBytes, mathImports); 36 + console.log( 37 + `WASM: ${wasmBytes.length} bytes | ${FRAMES} frames @ ${FPS}fps | ${SIZE}x${SIZE} | ${SAMPLE_RATE}Hz`, 38 + ); 39 + 40 + // Audio buffer offset: w * h * 8 (after pixel + temp buffers) 41 + const audioOffset = SIZE * SIZE * 8; 42 + const totalSamples = FRAMES * SAMPLES_PER_FRAME; 43 + const allAudio = new Float32Array(totalSamples); 44 + 45 + // Render frames 46 + const framePngs = []; 47 + for (let f = 0; f < FRAMES; f++) { 48 + instance.exports.paint(SIZE, SIZE, f); 49 + instance.exports.sound(SAMPLE_RATE, FPS, f); 50 + 51 + // Read pixels 52 + const mem = new Uint8Array(instance.exports.memory.buffer); 53 + const pixels = Buffer.from(mem.slice(0, SIZE * SIZE * 4)); 54 + const png = await sharp(pixels, { 55 + raw: { width: SIZE, height: SIZE, channels: 4 }, 56 + }) 57 + .png() 58 + .toBuffer(); 59 + framePngs.push(png); 60 + 61 + // Read audio samples (f32 at audioOffset) 62 + const audioView = new Float32Array( 63 + instance.exports.memory.buffer, 64 + audioOffset, 65 + SAMPLES_PER_FRAME, 66 + ); 67 + allAudio.set(audioView, f * SAMPLES_PER_FRAME); 68 + 69 + if ((f + 1) % 30 === 0 || f === FRAMES - 1) { 70 + process.stdout.write(`\r Rendered ${f + 1}/${FRAMES} frames`); 71 + } 72 + } 73 + console.log(); 74 + 75 + // ─── Write WAV ─────────────────────────────────────────────────────── 76 + function writeWav(filename, samples, sampleRate) { 77 + const numSamples = samples.length; 78 + const bytesPerSample = 2; // 16-bit PCM 79 + const dataSize = numSamples * bytesPerSample; 80 + const buffer = Buffer.alloc(44 + dataSize); 81 + 82 + // RIFF header 83 + buffer.write("RIFF", 0); 84 + buffer.writeUInt32LE(36 + dataSize, 4); 85 + buffer.write("WAVE", 8); 86 + 87 + // fmt chunk 88 + buffer.write("fmt ", 12); 89 + buffer.writeUInt32LE(16, 16); // chunk size 90 + buffer.writeUInt16LE(1, 20); // PCM 91 + buffer.writeUInt16LE(1, 22); // mono 92 + buffer.writeUInt32LE(sampleRate, 24); 93 + buffer.writeUInt32LE(sampleRate * bytesPerSample, 28); // byte rate 94 + buffer.writeUInt16LE(bytesPerSample, 32); // block align 95 + buffer.writeUInt16LE(16, 34); // bits per sample 96 + 97 + // data chunk 98 + buffer.write("data", 36); 99 + buffer.writeUInt32LE(dataSize, 40); 100 + 101 + // Convert f32 samples to 16-bit PCM 102 + for (let i = 0; i < numSamples; i++) { 103 + const s = Math.max(-1, Math.min(1, samples[i])); 104 + const pcm = Math.round(s * 32767); 105 + buffer.writeInt16LE(pcm, 44 + i * 2); 106 + } 107 + 108 + writeFileSync(filename, buffer); 109 + } 110 + 111 + const wavPath = `${OUT_DIR}${name}.wav`; 112 + writeWav(wavPath, allAudio, SAMPLE_RATE); 113 + const wavSize = (allAudio.length * 2) / 1024; 114 + console.log(`${name}.wav (${SAMPLE_RATE}Hz mono, ${(totalSamples / SAMPLE_RATE).toFixed(1)}s, ${wavSize.toFixed(0)}KB)`); 115 + 116 + // ─── Encode WebP ───────────────────────────────────────────────────── 117 + console.log("Encoding animated WebP..."); 118 + const tmpDir = `${OUT_DIR}.frames-${name}`; 119 + mkdirSync(tmpDir, { recursive: true }); 120 + 121 + for (let f = 0; f < framePngs.length; f++) { 122 + const framePath = `${tmpDir}/frame-${String(f).padStart(5, "0")}.png`; 123 + await sharp(framePngs[f]).toFile(framePath); 124 + } 125 + 126 + const webpPath = `${OUT_DIR}${name}.webp`; 127 + execSync( 128 + `ffmpeg -y -framerate ${FPS} -i "${tmpDir}/frame-%05d.png" -loop 0 -lossless 1 "${webpPath}" 2>/dev/null`, 129 + ); 130 + execSync(`rm -rf "${tmpDir}"`); 131 + 132 + const { statSync } = await import("fs"); 133 + const webpSize = statSync(webpPath).size; 134 + console.log(`${name}.webp (${SIZE}x${SIZE}, ${FRAMES} frames, ${(webpSize / 1024).toFixed(1)}KB)`); 135 + 136 + // ─── Mux video + audio to mp4 ──────────────────────────────────────── 137 + try { 138 + const mp4Path = `${OUT_DIR}${name}.mp4`; 139 + const mp4Video = `${OUT_DIR}${name}_video.mp4`; 140 + 141 + // Step 1: encode video-only mp4 from frames 142 + const frameDir2 = `${OUT_DIR}.frames2-${name}`; 143 + mkdirSync(frameDir2, { recursive: true }); 144 + for (let f = 0; f < framePngs.length; f++) { 145 + await sharp(framePngs[f]).toFile( 146 + `${frameDir2}/frame-${String(f).padStart(5, "0")}.png`, 147 + ); 148 + } 149 + execSync( 150 + `ffmpeg -y -framerate ${FPS} -i "${frameDir2}/frame-%05d.png" -c:v libx264 -pix_fmt yuv420p "${mp4Video}" 2>/dev/null`, 151 + ); 152 + execSync(`rm -rf "${frameDir2}"`); 153 + 154 + // Step 2: mux video + audio 155 + execSync( 156 + `ffmpeg -y -i "${mp4Video}" -i "${wavPath}" -map 0:v -map 1:a -c:v copy -c:a libmp3lame -ar 24000 -ac 2 -b:a 160k -movflags +faststart -shortest "${mp4Path}" 2>/dev/null`, 157 + ); 158 + execSync(`rm -f "${mp4Video}"`); 159 + 160 + const mp4Size = statSync(mp4Path).size; 161 + console.log(`${name}.mp4 (video+audio, ${(mp4Size / 1024).toFixed(1)}KB)`); 162 + } catch { 163 + console.log("(ffmpeg mp4 mux not available, WAV + WebP saved separately)"); 164 + }