/** * bootstrap.js — ES module entry point. * * The PatchRouter class mirrors the visual patch bay into a Web Audio graph. * Sound only flows when the Output node has a cable connected; the signal * travels through whatever chain the user has assembled. The engine's * AnalyserNode sits after the Output node, so the oscilloscope and spectrum * always reflect exactly what feeds the Output. */ import init, { AudioEngine, OscilloscopeView, SpectrumView, PatchBay, VirtualKeyboard, SynthParams, } from "./pkg/synth_visualiser.js"; // ── Canvas buffer sizing ────────────────────────────────────────────────────── function fitCanvas(canvas) { const w = Math.round(canvas.clientWidth); const h = Math.round(canvas.clientHeight); if (w > 0 && h > 0 && (canvas.width !== w || canvas.height !== h)) { canvas.width = w; canvas.height = h; } } // ── Resize handle ───────────────────────────────────────────────────────────── function initResizeHandle() { const handle = document.getElementById("resize-handle"); const panel = document.getElementById("patchbay-panel"); let dragging = false, startY = 0, startH = 0; handle.addEventListener("pointerdown", e => { dragging = true; startY = e.clientY; startH = panel.offsetHeight; handle.setPointerCapture(e.pointerId); handle.classList.add("active"); }); handle.addEventListener("pointermove", e => { if (!dragging) return; panel.style.height = Math.max(80, startH - (e.clientY - startY)) + "px"; }); const stop = () => { dragging = false; handle.classList.remove("active"); }; handle.addEventListener("pointerup", stop); handle.addEventListener("pointercancel", stop); } // ── PatchRouter ─────────────────────────────────────────────────────────────── // // Reads get_patch_json() whenever patch_version() changes, then rebuilds a // Web Audio graph that mirrors the visual patch bay topology. // // Module → Web Audio mapping: // vco → OscillatorNode (sawtooth default; frequency driven by keyboard) // svf → BiquadFilterNode (cutoff + Q from params; LFO can modulate frequency) // vca → GainNode (gain from param; driven by ADSR envelope if patched) // adsr → (no node) drives the VCA GainNode gain via parameter automation // lfo → OscillatorNode + GainNode (low-freq oscillator modulating AudioParams) // out → GainNode (level param) → masterEnv → engine analyser → speakers // // Signal only reaches the speakers when an "out" module has an audio_in cable. class PatchRouter { constructor(audioCtx, engineInputNode) { this.ctx = audioCtx; // masterEnv: a final GainNode between the Out module and the engine // input (analyser). When no ADSR envelope is in the chain, noteOn/Off // drives this gain directly. When an ADSR drives a VCA, masterEnv // stays at 1.0 and the VCA gain carries the envelope shape. this.masterEnv = audioCtx.createGain(); this.masterEnv.gain.value = 0; this.masterEnv.connect(engineInputNode); this.nodes = new Map(); // moduleId → nodeInfo object this.lastVersion = -1; this.outHasCable = false; // true when Out.audio_in has a cable this.hasAdsrVca = false; // true when a VCA is driven by ADSR this.activeNote = -1; } // Call each frame. Rebuilds the audio graph only when the patch changes. sync(version, patchJson) { if (version === this.lastVersion) return; this.lastVersion = version; this._rebuild(JSON.parse(patchJson)); } // Call each frame when params_version changes. Updates audio params in-place // without tearing down and rebuilding the Web Audio graph. updateParams(patchJson) { const patch = JSON.parse(patchJson); for (const m of patch.modules) { const info = this.nodes.get(m.id); if (!info) continue; const now = this.ctx.currentTime; switch (m.kind) { case "vco": { if (this.activeNote < 0) { info.node.frequency.setTargetAtTime(m.params.freq_hz ?? 440, now, 0.01); } const wt = (["sine","sawtooth","square","triangle","sawtooth"])[Math.round(m.params.waveform ?? 1)] ?? "sawtooth"; if (info.node.type !== wt) info.node.type = wt; break; } case "svf": info.node.frequency.setTargetAtTime(m.params.cutoff_hz ?? 2000, now, 0.01); info.node.Q.setTargetAtTime(0.5 + (m.params.resonance ?? 0.5) * 19.5, now, 0.01); break; case "vca": if (!info.adsrControlled) { info.node.gain.setTargetAtTime(m.params.gain ?? 1.0, now, 0.01); } info.staticGain = m.params.gain ?? 1.0; break; case "adsr": info.attack = m.params.attack_s ?? 0.01; info.decay = m.params.decay_s ?? 0.1; info.sustain = m.params.sustain ?? 0.7; info.release = m.params.release_s ?? 0.3; break; case "lfo": info.oscNode.frequency.setTargetAtTime(m.params.rate_hz ?? 2, now, 0.01); info.node.gain.setTargetAtTime((m.params.depth ?? 0.5) * 600, now, 0.01); break; case "out": info.node.gain.setTargetAtTime(m.params.level ?? 0.8, now, 0.01); break; } } } // ── noteOn / noteOff ────────────────────────────────────────────────────── noteOn(midiNote, retrigger = true) { // Unblock AudioContext on first user gesture if (this.ctx.state !== "running") this.ctx.resume(); const wasActive = this.activeNote >= 0; this.activeNote = midiNote; if (!this.outHasCable) return; const hz = this._midiHz(midiNote); const now = this.ctx.currentTime; // Update all VCO frequencies for (const [, info] of this.nodes) { if (info.kind !== "vco") continue; if (wasActive && !retrigger) { // Glide: smooth frequency transition, no envelope retrigger info.node.frequency.setTargetAtTime(hz, now, 0.02); } else { info.node.frequency.setValueAtTime(hz, now); } } if (!retrigger && wasActive) return; // glide: done // Trigger envelope if (this.hasAdsrVca) { // ADSR-driven VCA: masterEnv is a pass-through at 1.0 this.masterEnv.gain.cancelScheduledValues(now); this.masterEnv.gain.setValueAtTime(1.0, now); for (const [, info] of this.nodes) { if (info.kind !== "vca" || !info.adsrControlled) continue; const adsr = this.nodes.get(info.adsrId); if (!adsr) continue; const g = info.node.gain; g.cancelScheduledValues(now); g.setValueAtTime(0.0001, now); g.linearRampToValueAtTime(info.staticGain, now + adsr.attack); g.linearRampToValueAtTime(info.staticGain * adsr.sustain, now + adsr.attack + adsr.decay); } } else { // No ADSR: simple attack gate on masterEnv const g = this.masterEnv.gain; g.cancelScheduledValues(now); g.setValueAtTime(0.0001, now); g.exponentialRampToValueAtTime(0.3, now + 0.015); } } noteOff() { this.activeNote = -1; const now = this.ctx.currentTime; if (this.hasAdsrVca) { for (const [, info] of this.nodes) { if (info.kind !== "vca" || !info.adsrControlled) continue; const adsr = this.nodes.get(info.adsrId); if (!adsr) continue; const g = info.node.gain; g.cancelScheduledValues(now); g.setValueAtTime(Math.max(g.value, 0.0001), now); g.exponentialRampToValueAtTime(0.0001, now + adsr.release); } } else { const g = this.masterEnv.gain; g.cancelScheduledValues(now); g.setValueAtTime(Math.max(g.value, 0.0001), now); g.exponentialRampToValueAtTime(0.0001, now + 0.2); } } isOutputPatched() { return this.outHasCable; } // ── Private ─────────────────────────────────────────────────────────────── _rebuild(patch) { // Tear down: stop oscillators, disconnect all nodes for (const [, info] of this.nodes) { if (info.oscNode) { try { info.oscNode.stop(); info.oscNode.disconnect(); } catch(_){} } if (info.node) { try { info.node.disconnect(); } catch(_){} } } this.nodes.clear(); // Create Web Audio nodes for each visual module for (const m of patch.modules) { const info = this._makeNode(m); if (info) this.nodes.set(m.id, info); } // Determine topology flags before wiring this.outHasCable = patch.cables.some(c => { const dst = patch.modules.find(m => m.id === c.dst); return dst?.kind === "out" && c.dst_jack === "audio_in"; }); // Mark VCAs that have an ADSR patched into any of their CV param inputs for (const c of patch.cables) { if (!c.dst_jack.startsWith("cv_")) continue; const srcMod = patch.modules.find(m => m.id === c.src); const dstInfo = this.nodes.get(c.dst); if (srcMod?.kind === "adsr" && dstInfo?.kind === "vca") { dstInfo.adsrControlled = true; dstInfo.adsrId = c.src; } } this.hasAdsrVca = [...this.nodes.values()].some(n => n.kind === "vca" && n.adsrControlled); // Wire audio and CV cables for (const c of patch.cables) this._wire(c, patch); // If a note is held across the rebuild, reapply it if (this.activeNote >= 0 && this.outHasCable) { this.noteOn(this.activeNote, true); } else if (!this.outHasCable) { // Ensure silence immediately when output becomes unpatched this.masterEnv.gain.cancelScheduledValues(0); this.masterEnv.gain.setValueAtTime(0, 0); } } _makeNode(m) { const ctx = this.ctx; switch (m.kind) { case "vco": { const osc = ctx.createOscillator(); osc.type = (["sine","sawtooth","square","triangle","sawtooth"])[Math.round(m.params.waveform ?? 1)] ?? "sawtooth"; osc.frequency.value = m.params.freq_hz ?? 440; osc.start(); return { kind: "vco", node: osc, oscNode: osc }; } case "svf": { const f = ctx.createBiquadFilter(); f.type = "lowpass"; f.frequency.value = m.params.cutoff_hz ?? 2000; f.Q.value = 0.5 + (m.params.resonance ?? 0.5) * 19.5; return { kind: "svf", node: f }; } case "vca": { const g = ctx.createGain(); g.gain.value = m.params.gain ?? 1.0; return { kind: "vca", node: g, adsrControlled: false, adsrId: null, staticGain: m.params.gain ?? 1.0 }; } case "adsr": { // Pure data — drives VCA gain automation in noteOn/Off, no Web Audio node return { kind: "adsr", node: null, attack: m.params.attack_s ?? 0.01, decay: m.params.decay_s ?? 0.1, sustain: m.params.sustain ?? 0.7, release: m.params.release_s ?? 0.3 }; } case "lfo": { const osc = ctx.createOscillator(); osc.type = "sine"; osc.frequency.value = m.params.rate_hz ?? 2; const gain = ctx.createGain(); gain.gain.value = (m.params.depth ?? 0.5) * 600; // modulation depth in Hz osc.connect(gain); osc.start(); return { kind: "lfo", node: gain, oscNode: osc }; } case "out": { const g = ctx.createGain(); g.gain.value = m.params.level ?? 0.8; g.connect(this.masterEnv); // always routes to the analyser chain return { kind: "out", node: g }; } } return null; } _wire(cable, patch) { const src = this.nodes.get(cable.src); const dst = this.nodes.get(cable.dst); if (!src?.node || !dst?.node) return; // ADSR has node:null — skip // ── Audio signal ─────────────────────────────────────────────────── if (cable.dst_jack === "audio_in") { try { src.node.connect(dst.node); } catch(_) {} return; } // ── Per-parameter CV modulation: cv_{param_id} ──────────────────── if (cable.dst_jack.startsWith("cv_")) { // ADSR → VCA: handled by parameter automation in noteOn/Off (not a node connection) if (src.kind === "adsr") return; const ap = this._getAudioParam(dst, cable.dst_jack.slice(3)); if (ap) try { src.node.connect(ap); } catch(_) {} } } // Returns the Web Audio AudioParam that corresponds to a param id on a node, // or null if no mapping exists. _getAudioParam(nodeInfo, paramId) { switch (nodeInfo.kind) { case "vco": if (paramId === "freq_hz") return nodeInfo.node.frequency; break; case "svf": if (paramId === "cutoff_hz") return nodeInfo.node.frequency; if (paramId === "resonance") return nodeInfo.node.Q; break; case "vca": if (paramId === "gain") return nodeInfo.node.gain; break; case "lfo": if (paramId === "rate_hz") return nodeInfo.oscNode.frequency; if (paramId === "depth") return nodeInfo.node.gain; break; case "out": if (paramId === "level") return nodeInfo.node.gain; break; case "adsr": // ADSR has no Web Audio node; envelope is driven via parameter automation break; } return null; } _midiHz(note) { return 440 * Math.pow(2, (note - 69) / 12); } } // ── Computer keyboard map (standard DAW layout) ─────────────────────────────── const KEY_NOTE = { z:48, s:49, x:50, d:51, c:52, v:53, g:54, b:55, h:56, n:57, j:58, m:59, q:60, 2:61, w:62, 3:63, e:64, r:65, 5:66, t:67, 6:68, y:69, 7:70, u:71, }; // ── Bootstrap ───────────────────────────────────────────────────────────────── const loader = document.getElementById("loader"); const status = document.getElementById("status"); const srLabel = document.getElementById("sample-rate"); const frameTime = document.getElementById("frame-time"); async function bootstrap() { initResizeHandle(); try { await init(); // ── WASM objects ────────────────────────────────────────────────────── const engine = new AudioEngine(); await engine.attach(); const analyser = engine.analyser_node(); const oscilloscope = new OscilloscopeView("oscilloscope-canvas", analyser); const spectrum = new SpectrumView("spectrum-canvas", analyser); const patchbay = new PatchBay("patchbay-canvas"); const keyboard = new VirtualKeyboard("keyboard-canvas"); // ── Canvas sizing ───────────────────────────────────────────────────── const pbCanvas = document.getElementById("patchbay-canvas"); const oscCanvas = document.getElementById("oscilloscope-canvas"); const spCanvas = document.getElementById("spectrum-canvas"); const kbCanvas = document.getElementById("keyboard-canvas"); const allCanvases = [oscCanvas, spCanvas, pbCanvas, kbCanvas]; allCanvases.forEach(fitCanvas); const ro = new ResizeObserver(() => allCanvases.forEach(fitCanvas)); allCanvases.forEach(c => ro.observe(c)); // ── Default patch bay layout ────────────────────────────────────────── const cw = pbCanvas.width || pbCanvas.clientWidth || 800; patchbay.add_module("vco", cw * 0.08, 80); patchbay.add_module("adsr", cw * 0.26, 80); patchbay.add_module("svf", cw * 0.46, 80); patchbay.add_module("vca", cw * 0.66, 80); patchbay.add_module("out", cw * 0.84, 80); // ── Patch bay pointer events ────────────────────────────────────────── pbCanvas.addEventListener("pointerdown", e => patchbay.on_pointer_down(e.offsetX, e.offsetY)); pbCanvas.addEventListener("pointermove", e => patchbay.on_pointer_move(e.offsetX, e.offsetY)); pbCanvas.addEventListener("pointerup", e => patchbay.on_pointer_up(e.offsetX, e.offsetY)); pbCanvas.addEventListener("dblclick", e => patchbay.on_double_click(e.offsetX, e.offsetY)); // ── Patch router: Web Audio graph driven by patch bay topology ──────── const audioCtx = engine.audio_context(); const inputNode = engine.input_node(); const router = new PatchRouter(audioCtx, inputNode); // ── Virtual keyboard pointer events ─────────────────────────────────── let kbDown = false; kbCanvas.addEventListener("pointerdown", e => { kbDown = true; kbCanvas.setPointerCapture(e.pointerId); const note = keyboard.on_pointer_down(e.offsetX, e.offsetY); if (note >= 0) router.noteOn(note, true); else router.noteOff(); }); kbCanvas.addEventListener("pointermove", e => { const result = keyboard.on_pointer_move(e.offsetX, e.offsetY); if (result === -2) return; // hover only if (kbDown) { if (result >= 0) router.noteOn(result, false); // glide else router.noteOff(); } }); kbCanvas.addEventListener("pointerup", e => { kbDown = false; keyboard.on_pointer_up(e.offsetX, e.offsetY); router.noteOff(); }); kbCanvas.addEventListener("pointerleave", () => { if (kbDown) { kbDown = false; keyboard.on_pointer_leave(); router.noteOff(); } else { keyboard.on_pointer_leave(); } }); // ── Computer keyboard events ────────────────────────────────────────── const heldKeys = new Set(); document.addEventListener("keydown", e => { if (e.repeat || e.ctrlKey || e.metaKey || e.altKey) return; const note = KEY_NOTE[e.key.toLowerCase()]; if (note === undefined) return; heldKeys.add(e.key.toLowerCase()); keyboard.set_active_note(note); router.noteOn(note, !heldKeys.size > 1); // retrigger only if first key }); document.addEventListener("keyup", e => { const key = e.key.toLowerCase(); heldKeys.delete(key); if (KEY_NOTE[key] === undefined) return; const remaining = [...heldKeys].filter(k => KEY_NOTE[k] !== undefined); if (remaining.length > 0) { const last = KEY_NOTE[remaining.at(-1)]; keyboard.set_active_note(last); router.noteOn(last, false); // glide to last held key } else { keyboard.set_active_note(-1); router.noteOff(); } }); // ── Params + engine start ───────────────────────────────────────────── const params = new SynthParams(); engine.set_params(params.to_json()); srLabel.textContent = `SR: ${engine.sample_rate()} Hz`; status.textContent = "Running"; engine.start(); // ── Render loop ─────────────────────────────────────────────────────── let last = performance.now(); let lastParamsVersion = -1; function frame(now) { // Sync Web Audio graph to patch bay topology when patch changes router.sync(patchbay.patch_version(), patchbay.get_patch_json()); // Apply live param changes without full graph rebuild const pv = patchbay.params_version(); if (pv !== lastParamsVersion) { lastParamsVersion = pv; router.updateParams(patchbay.get_patch_json()); } // Update output-node status label const outLabel = router.isOutputPatched() ? "patched" : "unpatched"; status.textContent = router.isOutputPatched() ? "Running · output patched" : "Running · output unpatched"; oscilloscope.draw(); spectrum.draw(); patchbay.draw(now); keyboard.draw(); frameTime.textContent = `frame: ${(now - last).toFixed(1)} ms`; last = now; requestAnimationFrame(frame); } requestAnimationFrame(frame); loader.classList.add("hidden"); // Brief keyboard hint const hint = document.getElementById("kb-hint"); if (hint) { hint.style.transition = "opacity 1s"; setTimeout(() => { hint.style.opacity = "1"; }, 2000); setTimeout(() => { hint.style.opacity = "0"; }, 7000); } } catch (err) { console.error("[bootstrap] Fatal:", err); loader.textContent = `Error: ${err.message ?? err}`; } } bootstrap();