Files
rsgp/src/lib/audio/processors/ConvolutionReverb.ts

256 lines
7.9 KiB
TypeScript

import type { AudioProcessor, ProcessorCategory } from './AudioProcessor';
type RoomType = 'small' | 'medium' | 'large' | 'hall' | 'plate' | 'chamber';
interface ReverbParams {
roomType: RoomType;
decayTime: number;
preDelay: number;
wetMix: number;
earlyReflections: number;
diffusion: number;
}
export class ConvolutionReverb implements AudioProcessor {
private readonly sampleRate = 44100;
getName(): string {
return 'Convolution Reverb';
}
getDescription(): string {
return 'Realistic room ambience using Web Audio ConvolverNode with synthetic impulse responses';
}
getCategory(): ProcessorCategory {
return 'Space';
}
async process(
leftChannel: Float32Array,
rightChannel: Float32Array
): Promise<[Float32Array, Float32Array]> {
const inputLength = leftChannel.length;
const params = this.randomParams();
// Create offline context for processing
const offlineContext = new OfflineAudioContext(2, inputLength, this.sampleRate);
// Create input buffer from our channels (copy to ensure proper ArrayBuffer type)
const inputBuffer = offlineContext.createBuffer(2, inputLength, this.sampleRate);
inputBuffer.copyToChannel(new Float32Array(leftChannel), 0);
inputBuffer.copyToChannel(new Float32Array(rightChannel), 1);
// Generate impulse response with slight stereo differences
const irBuffer = this.generateImpulseResponseBuffer(offlineContext, params);
// Create audio graph: source -> convolver -> destination
const source = offlineContext.createBufferSource();
source.buffer = inputBuffer;
const convolver = offlineContext.createConvolver();
convolver.buffer = irBuffer;
convolver.normalize = false; // We normalize the IR ourselves for consistent gain
const dryGain = offlineContext.createGain();
const wetGain = offlineContext.createGain();
// Equal-power crossfade
dryGain.gain.value = Math.sqrt(1 - params.wetMix);
wetGain.gain.value = Math.sqrt(params.wetMix);
// Connect dry path
source.connect(dryGain);
dryGain.connect(offlineContext.destination);
// Connect wet path
source.connect(convolver);
convolver.connect(wetGain);
wetGain.connect(offlineContext.destination);
source.start(0);
// Render offline
const renderedBuffer = await offlineContext.startRendering();
// Extract channels and ensure exact length match
const outLeft = new Float32Array(inputLength);
const outRight = new Float32Array(inputLength);
renderedBuffer.copyFromChannel(outLeft, 0);
renderedBuffer.copyFromChannel(outRight, 1);
// Normalize to prevent clipping while maintaining dynamics
this.normalizeInPlace(outLeft, outRight);
return [outLeft, outRight];
}
private randomParams(): ReverbParams {
const roomTypes: RoomType[] = ['small', 'medium', 'large', 'hall', 'plate', 'chamber'];
const roomType = roomTypes[Math.floor(Math.random() * roomTypes.length)];
let decayTime: number;
let diffusion: number;
switch (roomType) {
case 'small':
decayTime = Math.random() * 0.3 + 0.2;
diffusion = Math.random() * 0.3 + 0.5;
break;
case 'medium':
decayTime = Math.random() * 0.5 + 0.4;
diffusion = Math.random() * 0.2 + 0.6;
break;
case 'large':
decayTime = Math.random() * 0.7 + 0.6;
diffusion = Math.random() * 0.2 + 0.7;
break;
case 'hall':
decayTime = Math.random() * 0.5 + 0.8;
diffusion = Math.random() * 0.15 + 0.75;
break;
case 'plate':
decayTime = Math.random() * 0.4 + 0.5;
diffusion = Math.random() * 0.3 + 0.6;
break;
case 'chamber':
decayTime = Math.random() * 0.6 + 0.5;
diffusion = Math.random() * 0.2 + 0.65;
break;
}
return {
roomType,
decayTime,
preDelay: Math.random() * 0.025 + 0.005,
wetMix: Math.random() * 0.4 + 0.3,
earlyReflections: Math.random() * 0.3 + 0.4,
diffusion
};
}
private generateImpulseResponseBuffer(context: OfflineAudioContext, params: ReverbParams): AudioBuffer {
// IR length based on decay time, with reasonable maximum
const irDuration = Math.min(params.decayTime * 4, 4.0);
const irLength = Math.floor(irDuration * this.sampleRate);
const irBuffer = context.createBuffer(2, irLength, this.sampleRate);
const irLeft = new Float32Array(irLength);
const irRight = new Float32Array(irLength);
// Generate stereo IRs with slight differences for width
this.generateImpulseResponse(irLeft, params, 0);
this.generateImpulseResponse(irRight, params, 1);
irBuffer.copyToChannel(irLeft, 0);
irBuffer.copyToChannel(irRight, 1);
return irBuffer;
}
private generateImpulseResponse(ir: Float32Array, params: ReverbParams, channel: number): void {
const irLength = ir.length;
const preDelaySamples = Math.floor(params.preDelay * this.sampleRate);
// Room-specific parameters
const roomSizes: Record<RoomType, number> = {
small: 0.2,
medium: 0.4,
large: 0.6,
hall: 0.85,
plate: 0.5,
chamber: 0.55
};
const roomSize = roomSizes[params.roomType];
const earlyDecayTime = roomSize * 0.05;
// Generate early reflections as sparse impulses
const numEarlyReflections = Math.floor(roomSize * 30) + 8;
const earlyReflectionTime = earlyDecayTime * this.sampleRate;
for (let i = 0; i < numEarlyReflections; i++) {
const time = preDelaySamples + Math.pow(Math.random(), 1.5) * earlyReflectionTime;
const index = Math.floor(time);
if (index < irLength) {
const distanceAttenuation = 1.0 / (1.0 + time / this.sampleRate);
const amplitude = params.earlyReflections * distanceAttenuation * (0.5 + Math.random() * 0.5);
// Stereo width variation
const stereoOffset = (channel === 0 ? -1 : 1) * Math.random() * 0.3;
ir[index] += amplitude * (1.0 + stereoOffset);
}
}
// Generate diffuse tail using filtered noise
const diffuseStart = preDelaySamples + Math.floor(earlyReflectionTime);
const decaySamples = params.decayTime * this.sampleRate;
// Multi-band diffusion for realistic reverb character
const numBands = 4;
const bandGains = new Float32Array(numBands);
for (let band = 0; band < numBands; band++) {
bandGains[band] = 0.7 + Math.random() * 0.3;
}
// Generate noise with exponential decay and frequency-dependent damping
for (let i = diffuseStart; i < irLength; i++) {
const t = (i - diffuseStart) / decaySamples;
const envelope = Math.exp(-t * 6.0);
if (envelope < 0.001) break;
let sample = 0;
// Multi-band noise for frequency-dependent decay
for (let band = 0; band < numBands; band++) {
const noise = (Math.random() * 2 - 1) * bandGains[band];
const bandEnvelope = Math.exp(-t * (3.0 + band * 2.0));
sample += noise * bandEnvelope;
}
sample *= envelope * params.diffusion;
// Apply low-pass filtering for natural high-frequency damping
if (i > diffuseStart) {
const damping = params.roomType === 'plate' ? 0.4 : 0.5;
sample = sample * (1 - damping) + ir[i - 1] * damping;
}
ir[i] += sample;
}
// Normalize IR to consistent level
let maxAmp = 0;
for (let i = 0; i < irLength; i++) {
maxAmp = Math.max(maxAmp, Math.abs(ir[i]));
}
if (maxAmp > 0) {
const normGain = 0.5 / maxAmp;
for (let i = 0; i < irLength; i++) {
ir[i] *= normGain;
}
}
}
private normalizeInPlace(left: Float32Array, right: Float32Array): void {
let maxAmp = 0;
for (let i = 0; i < left.length; i++) {
maxAmp = Math.max(maxAmp, Math.abs(left[i]), Math.abs(right[i]));
}
if (maxAmp > 0.98) {
const gain = 0.95 / maxAmp;
for (let i = 0; i < left.length; i++) {
left[i] *= gain;
right[i] *= gain;
}
}
}
}