You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
462 lines
17 KiB
462 lines
17 KiB
// recording.js
|
|
// Records per-synth segments and merges them into one WAV on stop.
|
|
// Attach to window.recording
|
|
|
|
(function () {
|
|
window.recording = {
|
|
// internal state
|
|
_currentRecorder: null,
|
|
_currentDestination: null,
|
|
_currentChunks: null,
|
|
_currentSynth: null,
|
|
segments: [], // array of Blob segments in order
|
|
|
|
// Start recording for the given synth.
|
|
// If a recorder is already running, it will stop that segment first.
|
|
async start(synth) {
|
|
if (!synth || !synth.audioContext || !synth.masterGain) {
|
|
console.warn("recording.start: invalid synth");
|
|
return;
|
|
}
|
|
|
|
// If currently recording on another synth, stop that segment first
|
|
if (this._currentRecorder) {
|
|
await this._stopCurrentSegment(); // pushes to segments
|
|
}
|
|
|
|
// create a destination inside this synth's audioContext
|
|
const dest = synth.audioContext.createMediaStreamDestination();
|
|
try {
|
|
// connect synth audio into this destination
|
|
synth.masterGain.connect(dest);
|
|
} catch (e) {
|
|
console.error("recording.start: connect failed", e);
|
|
return;
|
|
}
|
|
|
|
const mime = this._chooseMimeType();
|
|
const recorder = new MediaRecorder(dest.stream, mime ? { mimeType: mime } : undefined);
|
|
|
|
this._currentRecorder = recorder;
|
|
this._currentDestination = dest;
|
|
this._currentChunks = [];
|
|
this._currentSynth = synth;
|
|
|
|
recorder.ondataavailable = (ev) => {
|
|
if (ev.data && ev.data.size) this._currentChunks.push(ev.data);
|
|
};
|
|
|
|
recorder.onstart = () => {
|
|
console.log("Recording segment started (synth):", synth.constructor ? synth.constructor.name : synth);
|
|
};
|
|
|
|
recorder.onerror = (e) => {
|
|
console.error("MediaRecorder error:", e);
|
|
};
|
|
|
|
recorder.start();
|
|
|
|
window.recording.visualize.switch(synth.masterGain, synth.audioContext);
|
|
},
|
|
|
|
// Switch recording to a new synth (stop previous segment, start a new one)
|
|
async switchTo(synth) {
|
|
await this.start(synth);
|
|
|
|
},
|
|
|
|
// Stop current recorder and keep the segment, but do not merge/download yet.
|
|
async stopSegment() {
|
|
if (!this._currentRecorder) {
|
|
console.warn("recording.stopSegment: nothing to stop");
|
|
return;
|
|
}
|
|
await this._stopCurrentSegment(); // pushes to segments
|
|
},
|
|
|
|
// Stop all recording and merge segments into a single WAV, then trigger download.
|
|
// filename default: music-of-life.wav
|
|
async stopAndMerge(filename = `music-of-life-${Date.now()}.wav`) {
|
|
// stop current active segment if any
|
|
if (this._currentRecorder) {
|
|
await this._stopCurrentSegment();
|
|
}
|
|
|
|
if (!this.segments || this.segments.length === 0) {
|
|
console.warn("recording.stopAndMerge: no segments recorded");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
// 1) decode all blobs to AudioBuffers
|
|
const decodedBuffers = await this._decodeAllSegments(this.segments);
|
|
|
|
// 2) choose a target sampleRate (use max to avoid upsampling many)
|
|
const targetSampleRate = decodedBuffers.reduce((m, b) => Math.max(m, b.sampleRate), 44100);
|
|
|
|
// 3) resample buffers that need it
|
|
const resampled = await Promise.all(
|
|
decodedBuffers.map((buf) =>
|
|
buf.sampleRate === targetSampleRate ? Promise.resolve(buf) : this._resampleBuffer(buf, targetSampleRate)
|
|
)
|
|
);
|
|
|
|
// 4) compute total length and channel count
|
|
const maxChannels = resampled.reduce((m, b) => Math.max(m, b.numberOfChannels), 1);
|
|
const totalLength = resampled.reduce((sum, b) => sum + b.length, 0);
|
|
|
|
// 5) create an OfflineAudioContext to render the concatenated audio
|
|
const offline = new OfflineAudioContext(maxChannels, totalLength, targetSampleRate);
|
|
|
|
// 6) schedule each buffer sequentially
|
|
let writeOffset = 0; // in sample frames
|
|
for (const buf of resampled) {
|
|
const src = offline.createBufferSource();
|
|
// make sure channel count equals offline destination: if buffer has fewer channels, that's OK
|
|
src.buffer = buf;
|
|
src.connect(offline.destination);
|
|
src.start(writeOffset / targetSampleRate);
|
|
writeOffset += buf.length;
|
|
}
|
|
|
|
// 7) render final buffer
|
|
const finalBuffer = await offline.startRendering();
|
|
|
|
// 8) encode to WAV (PCM16) and download
|
|
const wavBlob = this._audioBufferToWavBlob(finalBuffer);
|
|
this._downloadBlob(wavBlob, filename);
|
|
} catch (e) {
|
|
console.error("recording.stopAndMerge error:", e);
|
|
} finally {
|
|
// clear stored segments (we consumed them)
|
|
this.segments.length = 0;
|
|
}
|
|
},
|
|
|
|
// Internal: stop current recorder, push blob to segments
|
|
_stopCurrentSegment() {
|
|
const self = this;
|
|
return new Promise((resolve) => {
|
|
if (!self._currentRecorder) return resolve();
|
|
|
|
const currentRecorder = self._currentRecorder;
|
|
const chunks = self._currentChunks || [];
|
|
const dest = self._currentDestination;
|
|
const synth = self._currentSynth;
|
|
|
|
currentRecorder.onstop = async () => {
|
|
try {
|
|
const blob = new Blob(chunks, { type: currentRecorder.mimeType || "audio/webm" });
|
|
self.segments.push(blob);
|
|
console.log("Recording segment saved. segments:", self.segments.length);
|
|
// disconnect the synth from the destination
|
|
if (synth && synth.masterGain && dest) {
|
|
try { synth.masterGain.disconnect(dest); } catch (e) { /* ignore */ }
|
|
}
|
|
} catch (err) {
|
|
console.error("Error finishing segment:", err);
|
|
} finally {
|
|
// reset current recorder state
|
|
self._currentRecorder = null;
|
|
self._currentChunks = null;
|
|
self._currentDestination = null;
|
|
self._currentSynth = null;
|
|
resolve();
|
|
}
|
|
};
|
|
|
|
try {
|
|
currentRecorder.stop();
|
|
} catch (e) {
|
|
console.warn("Error stopping recorder:", e);
|
|
// fall back: still try to clean up
|
|
try {
|
|
if (synth && synth.masterGain && dest) synth.masterGain.disconnect(dest);
|
|
} catch (_) { }
|
|
self._currentRecorder = null;
|
|
self._currentChunks = null;
|
|
self._currentDestination = null;
|
|
self._currentSynth = null;
|
|
resolve();
|
|
}
|
|
});
|
|
},
|
|
|
|
// decode blobs -> AudioBuffer[] using a temporary AudioContext
|
|
async _decodeAllSegments(blobs) {
|
|
const ac = new (window.AudioContext || window.webkitAudioContext)();
|
|
try {
|
|
const buffers = [];
|
|
for (const b of blobs) {
|
|
const ab = await b.arrayBuffer();
|
|
// decodeAudioData returns a promise in modern browsers
|
|
const decoded = await ac.decodeAudioData(ab.slice(0));
|
|
buffers.push(decoded);
|
|
}
|
|
return buffers;
|
|
} finally {
|
|
// close decode context
|
|
try { ac.close(); } catch (e) { }
|
|
}
|
|
},
|
|
|
|
// resample an AudioBuffer to targetSampleRate using OfflineAudioContext
|
|
_resampleBuffer(buffer, targetSampleRate) {
|
|
return new Promise(async (resolve, reject) => {
|
|
try {
|
|
const channels = buffer.numberOfChannels;
|
|
const duration = buffer.duration;
|
|
const frames = Math.ceil(duration * targetSampleRate);
|
|
const offline = new OfflineAudioContext(channels, frames, targetSampleRate);
|
|
const src = offline.createBufferSource();
|
|
src.buffer = buffer;
|
|
src.connect(offline.destination);
|
|
src.start(0);
|
|
const rendered = await offline.startRendering();
|
|
resolve(rendered);
|
|
} catch (e) {
|
|
reject(e);
|
|
}
|
|
});
|
|
},
|
|
|
|
// convert AudioBuffer to WAV Blob (16-bit PCM)
|
|
_audioBufferToWavBlob(buffer) {
|
|
const numChannels = buffer.numberOfChannels;
|
|
const sampleRate = buffer.sampleRate;
|
|
const format = 1; // PCM
|
|
const bitsPerSample = 16;
|
|
|
|
// interleave channels
|
|
const length = buffer.length * numChannels * (bitsPerSample / 8);
|
|
const headerLength = 44;
|
|
const totalLength = headerLength + length;
|
|
const arrayBuffer = new ArrayBuffer(totalLength);
|
|
const view = new DataView(arrayBuffer);
|
|
|
|
let offset = 0;
|
|
|
|
function writeString(s) {
|
|
for (let i = 0; i < s.length; i++) {
|
|
view.setUint8(offset + i, s.charCodeAt(i));
|
|
}
|
|
offset += s.length;
|
|
}
|
|
|
|
// write RIFF header
|
|
writeString("RIFF");
|
|
view.setUint32(offset, totalLength - 8, true); offset += 4; // file length - 8
|
|
writeString("WAVE");
|
|
writeString("fmt ");
|
|
view.setUint32(offset, 16, true); offset += 4; // fmt chunk length
|
|
view.setUint16(offset, format, true); offset += 2; // audio format (1 = PCM)
|
|
view.setUint16(offset, numChannels, true); offset += 2;
|
|
view.setUint32(offset, sampleRate, true); offset += 4;
|
|
view.setUint32(offset, sampleRate * numChannels * bitsPerSample / 8, true); offset += 4; // byte rate
|
|
view.setUint16(offset, numChannels * bitsPerSample / 8, true); offset += 2; // block align
|
|
view.setUint16(offset, bitsPerSample, true); offset += 2;
|
|
writeString("data");
|
|
view.setUint32(offset, totalLength - headerLength, true); offset += 4;
|
|
|
|
// write PCM samples
|
|
const interleaved = new Float32Array(buffer.length * numChannels);
|
|
// read per channel and interleave
|
|
for (let ch = 0; ch < numChannels; ch++) {
|
|
const channelData = buffer.getChannelData(ch);
|
|
for (let i = 0; i < channelData.length; i++) {
|
|
interleaved[i * numChannels + ch] = channelData[i];
|
|
}
|
|
}
|
|
|
|
// write samples as 16-bit PCM
|
|
let index = 0;
|
|
for (let i = 0; i < interleaved.length; i++, index += 2) {
|
|
let s = Math.max(-1, Math.min(1, interleaved[i]));
|
|
s = s < 0 ? s * 0x8000 : s * 0x7fff;
|
|
view.setInt16(offset + index, s, true);
|
|
}
|
|
|
|
const wavBlob = new Blob([view], { type: "audio/wav" });
|
|
return wavBlob;
|
|
},
|
|
|
|
_downloadBlob(blob, filename) {
|
|
const url = URL.createObjectURL(blob);
|
|
const a = document.createElement("a");
|
|
a.style.display = "none";
|
|
a.href = url;
|
|
a.download = filename;
|
|
document.body.appendChild(a);
|
|
a.click();
|
|
setTimeout(() => {
|
|
URL.revokeObjectURL(url);
|
|
a.remove();
|
|
}, 1000);
|
|
},
|
|
|
|
_chooseMimeType() {
|
|
// prefer webm/opus if available
|
|
if (MediaRecorder.isTypeSupported && MediaRecorder.isTypeSupported("audio/webm;codecs=opus")) {
|
|
return "audio/webm;codecs=opus";
|
|
}
|
|
if (MediaRecorder.isTypeSupported && MediaRecorder.isTypeSupported("audio/ogg")) {
|
|
return "audio/ogg";
|
|
}
|
|
return null;
|
|
}
|
|
};
|
|
})();
|
|
|
|
|
|
// --- Timer & Waveform visualization ---
|
|
(function () {
|
|
let analyser = null;
|
|
let dataArray = null;
|
|
let animationId = null;
|
|
let timerInterval = null;
|
|
let elapsedTime = 0; // total elapsed seconds
|
|
let lastUpdate = 0; // for pausing/resuming
|
|
|
|
const waveformCanvas = document.createElement('canvas');
|
|
waveformCanvas.width = 400;
|
|
waveformCanvas.height = 80;
|
|
waveformCanvas.style.border = '1px solid #ccc';
|
|
waveformCanvas.style.display = 'block';
|
|
waveformCanvas.style.marginTop = '6px';
|
|
document.body.appendChild(waveformCanvas);
|
|
const ctx = waveformCanvas.getContext('2d');
|
|
|
|
const timerDisplay = document.createElement('div');
|
|
timerDisplay.style.color = '#cfe7ff';
|
|
timerDisplay.style.margin = '4px 0';
|
|
timerDisplay.textContent = '00:00';
|
|
document.body.appendChild(timerDisplay);
|
|
|
|
function ensureAnalyser(audioContext) {
|
|
if (analyser && analyser.source) {
|
|
try { analyser.source.disconnect(analyser); } catch { }
|
|
}
|
|
analyser = null;
|
|
if (!analyser) {
|
|
analyser = audioContext.createAnalyser();
|
|
analyser.fftSize = 2048;
|
|
dataArray = new Uint8Array(analyser.fftSize);
|
|
}
|
|
}
|
|
|
|
function attachSynth(synthNode, audioContext) {
|
|
if (!synthNode) return;
|
|
ensureAnalyser(audioContext);
|
|
|
|
// Disconnect old node if needed
|
|
if (synthNode !== analyser.source) {
|
|
try { synthNode.disconnect(analyser); } catch { }
|
|
try { synthNode.disconnect(audioContext.destination); } catch { }
|
|
synthNode.connect(analyser);
|
|
analyser.connect(audioContext.destination); // pass-through
|
|
analyser.source = synthNode; // track current node
|
|
}
|
|
}
|
|
|
|
function drawWaveform() {
|
|
if (!analyser) return;
|
|
analyser.getByteTimeDomainData(dataArray);
|
|
|
|
ctx.fillStyle = '#071020';
|
|
ctx.fillRect(0, 0, waveformCanvas.width, waveformCanvas.height);
|
|
|
|
ctx.lineWidth = 2;
|
|
ctx.strokeStyle = '#4fd1c5';
|
|
ctx.beginPath();
|
|
|
|
const sliceWidth = waveformCanvas.width / dataArray.length;
|
|
let x = 0;
|
|
const midY = waveformCanvas.height / 2;
|
|
|
|
for (let i = 0; i < dataArray.length; i++) {
|
|
// scale v from [0..255] to [-1..1]
|
|
const v = (dataArray[i] - 128) / 128;
|
|
// scale to canvas height, almost full height
|
|
const y = midY + v * midY * 0.95; // 0.95 to avoid touching edges
|
|
if (i === 0) ctx.moveTo(x, y);
|
|
else ctx.lineTo(x, y);
|
|
x += sliceWidth;
|
|
}
|
|
ctx.stroke();
|
|
|
|
animationId = requestAnimationFrame(drawWaveform);
|
|
}
|
|
|
|
|
|
function startTimer() {
|
|
lastUpdate = Date.now();
|
|
if (timerInterval) return; // already running
|
|
|
|
timerInterval = setInterval(() => {
|
|
const now = Date.now();
|
|
elapsedTime += (now - lastUpdate) / 1000;
|
|
lastUpdate = now;
|
|
|
|
const mins = String(Math.floor(elapsedTime / 60)).padStart(2, '0');
|
|
const secs = String(Math.floor(elapsedTime % 60)).padStart(2, '0');
|
|
const tenths = Math.floor((elapsedTime % 1) * 10);
|
|
timerDisplay.textContent = `${mins}:${secs}.${tenths}`;
|
|
}, 100);
|
|
}
|
|
|
|
function pauseTimer() {
|
|
if (timerInterval) {
|
|
clearInterval(timerInterval);
|
|
timerInterval = null;
|
|
}
|
|
}
|
|
|
|
function resetTimer() {
|
|
pauseTimer();
|
|
elapsedTime = 0;
|
|
lastUpdate = 0;
|
|
timerDisplay.textContent = '00:00.0';
|
|
}
|
|
|
|
// Expose hooks
|
|
window.recording.visualize = {
|
|
start(synthNode, audioContext) {
|
|
attachSynth(synthNode, audioContext);
|
|
if (!animationId) drawWaveform();
|
|
startTimer();
|
|
},
|
|
pause() {
|
|
pauseTimer();
|
|
},
|
|
resume() {
|
|
lastUpdate = Date.now();
|
|
startTimer();
|
|
},
|
|
stop() {
|
|
cancelAnimationFrame(animationId);
|
|
animationId = null;
|
|
if (analyser && analyser.source) {
|
|
try { analyser.source.disconnect(analyser); } catch { }
|
|
}
|
|
analyser = null;
|
|
dataArray = null;
|
|
resetTimer();
|
|
ctx.clearRect(0, 0, waveformCanvas.width, waveformCanvas.height);
|
|
},
|
|
attachSynth,
|
|
switch(synthNode, audioContext) {
|
|
cancelAnimationFrame(animationId);
|
|
animationId = null;
|
|
if (analyser && analyser.source) {
|
|
try { analyser.source.disconnect(analyser); } catch { }
|
|
}
|
|
analyser = null;
|
|
dataArray = null;
|
|
ctx.clearRect(0, 0, waveformCanvas.width, waveformCanvas.height);
|
|
|
|
attachSynth(synthNode, audioContext);
|
|
if (!animationId) drawWaveform();
|
|
}
|
|
};
|
|
})();
|