Adding more effects

This commit is contained in:
2025-10-11 16:47:20 +02:00
parent be7ba5fad8
commit 7f150e8bb4
19 changed files with 2495 additions and 9 deletions

View File

@ -4,7 +4,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
## Project Overview
This is a Svelte + TypeScript audio synthesis application that generates and manipulates sounds using various synthesis recipes (modes). Each recipe is a different flavour of audio synthesis, generating random audio samples that musicians can use in their compositions. Users can generate random sounds, mutate existing ones, visualize waveforms, and export audio as WAV files.
This is a Svelte + TypeScript audio synthesis application that generates and manipulates sounds using various synthesis recipes (modes). Each recipe is a different flavour of audio synthesis, generating random audio samples that musicians can use in their compositions. Users can generate random sounds, mutate existing ones, apply audio processors to transform sounds, visualize waveforms, and export audio as WAV files.
## Build System
@ -19,21 +19,28 @@ This is a Svelte + TypeScript audio synthesis application that generates and man
### Audio Pipeline
The audio system follows a layered architecture:
The audio system follows a layered architecture: **Engine → Processor → Output**
1. **SynthEngine interface** (`src/lib/audio/engines/SynthEngine.ts`): Abstract interface for synthesis engines
- Defines `generate()`, `randomParams()`, and `mutateParams()` methods
- All engines must generate stereo output: `[Float32Array, Float32Array]`
- Time-based parameters (envelopes, LFOs) stored as ratios (0-1) and scaled by duration during generation
2. **Engines**
2. **Engines**: Registered in `src/lib/audio/engines/registry.ts`
3. **AudioService** (`src/lib/audio/services/AudioService.ts`): Web Audio API wrapper
3. **AudioProcessor interface** (`src/lib/audio/processors/AudioProcessor.ts`): Abstract interface for audio processors
- Defines `process()` method that transforms existing audio buffers
- Takes stereo input and returns stereo output: `[Float32Array, Float32Array]`
- Applied after engine generation, before final output
4. **Processors**: Registered in `src/lib/audio/processors/registry.ts`
5. **AudioService** (`src/lib/audio/services/AudioService.ts`): Web Audio API wrapper
- Manages AudioContext, gain node, and playback
- Provides playback position tracking via animation frames
- Fixed sample rate: 44100 Hz
4. **WAVEncoder** (`src/lib/audio/utils/WAVEncoder.ts`): Audio export functionality
6. **WAVEncoder** (`src/lib/audio/utils/WAVEncoder.ts`): Audio export functionality
### State Management
@ -64,6 +71,27 @@ The audio system follows a layered architecture:
8. **Register the engine** by adding it to the `engines` array in `src/lib/audio/engines/registry.ts`
9. The mode buttons in the UI will automatically update to include your new engine
### Adding New Audio Processors
**CRITICAL: Each processor must be completely self-contained in a single file.** Do not create separate utility files, helper classes, or subdirectories for processor components. All DSP code, algorithms, and processing logic should be private methods within the processor class.
1. Implement the `AudioProcessor` interface in a single file under `src/lib/audio/processors/`
2. Implement `getName()` to return the processor's display name
3. Implement `getDescription()` to return a brief description of the processor
4. Ensure `process()` takes stereo input and returns stereo output: `[Float32Array, Float32Array]`
5. Processors operate on existing audio buffers and should not generate new sounds from scratch
6. Keep all helper functions, enums, and types in the same file
7. **Register the processor** by adding it to the `processors` array in `src/lib/audio/processors/registry.ts`
8. Processors are randomly selected when the user clicks "Process"
### User Workflow
1. **Generate**: User clicks "Random" to generate a raw, unprocessed sound using the current engine
2. **Refine**: User can "Mutate" the sound (adjusting parameters) or generate a new random sound
3. **Process**: User clicks "Process" to apply a random audio processor to the sound
4. **Iterate**: After processing, "Mutate" disappears but "Process" remains available for multiple processing passes
5. **Reset**: Clicking "Random" generates a new raw sound and returns to the initial state
### Duration Handling
Duration is user-adjustable. All time-based synthesis parameters (attack, decay, release, LFO rates) must scale with duration. Store envelope timings as ratios of total duration, not absolute seconds.

View File

@ -13,6 +13,11 @@
saveDuration,
} from "./lib/utils/settings";
import { generateRandomColor } from "./lib/utils/colors";
import {
getRandomProcessor,
getAllProcessors,
} from "./lib/audio/processors/registry";
import type { AudioProcessor } from "./lib/audio/processors/AudioProcessor";
let currentEngineIndex = 0;
let engine = engines[currentEngineIndex];
@ -26,6 +31,11 @@
let playbackPosition = -1;
let waveformColor = generateRandomColor();
let showModal = true;
let isProcessed = false;
let showProcessorPopup = false;
let popupTimeout: ReturnType<typeof setTimeout> | null = null;
const allProcessors = getAllProcessors();
onMount(() => {
audioService.setVolume(volume);
@ -38,6 +48,7 @@
function generateRandom() {
currentParams = engine.randomParams();
waveformColor = generateRandomColor();
isProcessed = false;
regenerateBuffer();
}
@ -71,6 +82,65 @@
downloadWAV(currentBuffer, "synth-sound.wav");
}
function processSound() {
if (!currentBuffer) return;
const processor = getRandomProcessor();
applyProcessor(processor);
}
function processWithSpecificProcessor(processor: AudioProcessor) {
if (!currentBuffer) return;
hideProcessorPopup();
applyProcessor(processor);
}
function handlePopupMouseEnter() {
if (popupTimeout) {
clearTimeout(popupTimeout);
}
showProcessorPopup = true;
popupTimeout = setTimeout(() => {
showProcessorPopup = false;
}, 2000);
}
function handlePopupMouseLeave() {
if (popupTimeout) {
clearTimeout(popupTimeout);
}
popupTimeout = setTimeout(() => {
showProcessorPopup = false;
}, 200);
}
function hideProcessorPopup() {
if (popupTimeout) {
clearTimeout(popupTimeout);
}
showProcessorPopup = false;
}
async function applyProcessor(processor: AudioProcessor) {
if (!currentBuffer) return;
const leftChannel = currentBuffer.getChannelData(0);
const rightChannel = currentBuffer.getChannelData(1);
const [processedLeft, processedRight] = await processor.process(
leftChannel,
rightChannel,
);
currentBuffer = audioService.createAudioBuffer([
processedLeft,
processedRight,
]);
isProcessed = true;
audioService.play(currentBuffer);
}
function handleVolumeChange(event: Event) {
const target = event.target as HTMLInputElement;
volume = parseFloat(target.value);
@ -114,6 +184,9 @@
case "r":
generateRandom();
break;
case "p":
processSound();
break;
case "s":
download();
break;
@ -201,7 +274,30 @@
/>
<div class="bottom-controls">
<button onclick={generateRandom}>Random (R)</button>
<button onclick={mutate}>Mutate (M)</button>
{#if !isProcessed}
<button onclick={mutate}>Mutate (M)</button>
{/if}
<div
class="process-button-container"
role="group"
onmouseenter={handlePopupMouseEnter}
onmouseleave={handlePopupMouseLeave}
>
<button onclick={processSound}>Process (P)</button>
{#if showProcessorPopup}
<div class="processor-popup">
{#each allProcessors as processor}
<button
class="processor-tile"
data-description={processor.getDescription()}
onclick={() => processWithSpecificProcessor(processor)}
>
{processor.getName()}
</button>
{/each}
</div>
{/if}
</div>
<button onclick={download}>Download (D)</button>
</div>
</div>
@ -290,7 +386,7 @@
.engine-button::after {
content: attr(data-description);
position: absolute;
top: calc(100% + 8px);
top: 100%;
left: 0;
padding: 0.5rem 0.75rem;
background-color: #0a0a0a;
@ -490,4 +586,70 @@
.modal-close:hover {
background-color: #ddd;
}
.process-button-container {
position: relative;
}
.processor-popup {
position: absolute;
bottom: 100%;
left: 50%;
transform: translateX(-50%);
background-color: #000;
border: 2px solid #fff;
padding: 0.75rem;
z-index: 1000;
display: grid;
grid-template-columns: repeat(3, 1fr);
gap: 0.5rem;
width: 450px;
margin-bottom: 0.5rem;
}
.processor-tile {
background-color: #1a1a1a;
border: 1px solid #444;
padding: 0.6rem 0.4rem;
text-align: center;
cursor: pointer;
transition: background-color 0.2s, border-color 0.2s;
font-size: 0.85rem;
color: #fff;
position: relative;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.processor-tile:hover {
background-color: #2a2a2a;
border-color: #646cff;
}
.processor-tile::after {
content: attr(data-description);
position: absolute;
bottom: 100%;
left: 50%;
transform: translateX(-50%);
padding: 0.5rem 0.75rem;
background-color: #0a0a0a;
border: 1px solid #444;
color: #ccc;
font-size: 0.85rem;
width: max-content;
max-width: 300px;
white-space: normal;
word-wrap: break-word;
pointer-events: none;
opacity: 0;
transition: opacity 0.2s;
z-index: 1001;
margin-bottom: 0.25rem;
}
.processor-tile:hover::after {
opacity: 1;
}
</style>

View File

@ -0,0 +1,8 @@
export interface AudioProcessor {
getName(): string;
getDescription(): string;
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] | Promise<[Float32Array, Float32Array]>;
}

View File

@ -0,0 +1,156 @@
import type { AudioProcessor } from './AudioProcessor';
export class Chorus implements AudioProcessor {
private readonly sampleRate = 44100;
getName(): string {
return 'Chorus';
}
getDescription(): string {
return 'Multiple delayed copies with pitch modulation for thick, ensemble sounds';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const leftIn = leftChannel;
const rightIn = rightChannel;
const length = leftIn.length;
// Random parameters with better ranges
const numVoices = Math.floor(Math.random() * 3) + 2; // 2-4 voices
const baseDelay = Math.random() * 0.015 + 0.015; // 15-30ms (increased for better effect)
const lfoDepth = Math.random() * 0.003 + 0.001; // 1-4ms modulation depth
const mix = Math.random() * 0.3 + 0.35; // 35-65% wet (better balance)
const feedback = Math.random() * 0.15 + 0.05; // 5-20% feedback (reduced for stability)
const stereoSpread = Math.random() * 0.3 + 0.2; // 20-50% stereo width
// Convert times to samples
const baseDelaySamples = Math.floor(baseDelay * this.sampleRate);
const lfoDepthSamples = lfoDepth * this.sampleRate;
const maxDelaySamples = Math.ceil(baseDelaySamples + lfoDepthSamples + 1);
// Create delay buffers with enough space for the entire input plus delay
const bufferSize = length + maxDelaySamples;
const leftDelayBuffer = new Float32Array(bufferSize);
const rightDelayBuffer = new Float32Array(bufferSize);
// Initialize delay buffers with input signal
leftDelayBuffer.set(leftIn, maxDelaySamples);
rightDelayBuffer.set(rightIn, maxDelaySamples);
// Output buffers
const leftOut = new Float32Array(length);
const rightOut = new Float32Array(length);
// Pre-calculate LFO parameters for each voice
const voices: Array<{
lfoPhase: number;
lfoIncrement: number;
panLeft: number;
panRight: number;
}> = [];
for (let voice = 0; voice < numVoices; voice++) {
const phaseOffset = (voice / numVoices) * Math.PI * 2;
const rateVariation = 0.8 + Math.random() * 0.4; // 0.8-1.2x rate variation
const lfoRate = (0.5 + Math.random() * 2) * rateVariation; // 0.4-2.5 Hz per voice
// Stereo panning for each voice
const pan = (voice / (numVoices - 1) - 0.5) * stereoSpread;
voices.push({
lfoPhase: phaseOffset,
lfoIncrement: (lfoRate * 2 * Math.PI) / this.sampleRate,
panLeft: Math.cos((pan + 0.5) * Math.PI * 0.5),
panRight: Math.sin((pan + 0.5) * Math.PI * 0.5)
});
}
// Process each sample
for (let i = 0; i < length; i++) {
let leftWet = 0;
let rightWet = 0;
// Process each chorus voice
for (let v = 0; v < numVoices; v++) {
const voice = voices[v];
// Calculate LFO value
const lfo = Math.sin(voice.lfoPhase);
voice.lfoPhase += voice.lfoIncrement;
if (voice.lfoPhase > Math.PI * 2) {
voice.lfoPhase -= Math.PI * 2;
}
// Calculate modulated delay time in samples
const delaySamples = baseDelaySamples + lfo * lfoDepthSamples;
// Calculate read position with fractional delay
const readPos = i + maxDelaySamples - delaySamples;
const readPosInt = Math.floor(readPos);
const frac = readPos - readPosInt;
// Ensure we're within bounds
if (readPosInt >= 0 && readPosInt < bufferSize - 1) {
// Linear interpolation for fractional delay
const leftDelayed = this.lerp(
leftDelayBuffer[readPosInt],
leftDelayBuffer[readPosInt + 1],
frac
);
const rightDelayed = this.lerp(
rightDelayBuffer[readPosInt],
rightDelayBuffer[readPosInt + 1],
frac
);
// Apply panning to each voice for stereo width
leftWet += leftDelayed * voice.panLeft;
rightWet += rightDelayed * voice.panRight;
}
}
// Normalize wet signal to maintain level
const wetGain = 1.0 / Math.sqrt(numVoices);
leftWet *= wetGain;
rightWet *= wetGain;
// Apply feedback (write it to future position to avoid immediate feedback)
const feedbackPos = i + maxDelaySamples;
if (feedbackPos < bufferSize) {
leftDelayBuffer[feedbackPos] += leftWet * feedback;
rightDelayBuffer[feedbackPos] += rightWet * feedback;
}
// Mix dry and wet signals with proper gain compensation
const dryGain = Math.sqrt(1 - mix);
const wetGain2 = Math.sqrt(mix) * 1.5; // Slight boost for presence
leftOut[i] = leftIn[i] * dryGain + leftWet * wetGain2;
rightOut[i] = rightIn[i] * dryGain + rightWet * wetGain2;
// Soft clipping to prevent harsh distortion
leftOut[i] = this.softClip(leftOut[i]);
rightOut[i] = this.softClip(rightOut[i]);
}
return [leftOut, rightOut];
}
private lerp(a: number, b: number, t: number): number {
return a + (b - a) * t;
}
private softClip(sample: number): number {
const threshold = 0.95;
if (Math.abs(sample) < threshold) {
return sample;
}
const sign = sample < 0 ? -1 : 1;
const abs = Math.abs(sample);
return sign * (threshold + (1 - threshold) * Math.tanh((abs - threshold) / (1 - threshold)));
}
}

View File

@ -0,0 +1,251 @@
import type { AudioProcessor } from './AudioProcessor';
type RoomType = 'small' | 'medium' | 'large' | 'hall' | 'plate' | 'chamber';
interface ReverbParams {
roomType: RoomType;
decayTime: number;
preDelay: number;
wetMix: number;
earlyReflections: number;
diffusion: number;
}
export class ConvolutionReverb implements AudioProcessor {
private readonly sampleRate = 44100;
getName(): string {
return 'Convolution Reverb';
}
getDescription(): string {
return 'Realistic room ambience using Web Audio ConvolverNode with synthetic impulse responses';
}
async process(
leftChannel: Float32Array,
rightChannel: Float32Array
): Promise<[Float32Array, Float32Array]> {
const inputLength = leftChannel.length;
const params = this.randomParams();
// Create offline context for processing
const offlineContext = new OfflineAudioContext(2, inputLength, this.sampleRate);
// Create input buffer from our channels (copy to ensure proper ArrayBuffer type)
const inputBuffer = offlineContext.createBuffer(2, inputLength, this.sampleRate);
inputBuffer.copyToChannel(new Float32Array(leftChannel), 0);
inputBuffer.copyToChannel(new Float32Array(rightChannel), 1);
// Generate impulse response with slight stereo differences
const irBuffer = this.generateImpulseResponseBuffer(offlineContext, params);
// Create audio graph: source -> convolver -> destination
const source = offlineContext.createBufferSource();
source.buffer = inputBuffer;
const convolver = offlineContext.createConvolver();
convolver.buffer = irBuffer;
convolver.normalize = false; // We normalize the IR ourselves for consistent gain
const dryGain = offlineContext.createGain();
const wetGain = offlineContext.createGain();
// Equal-power crossfade
dryGain.gain.value = Math.sqrt(1 - params.wetMix);
wetGain.gain.value = Math.sqrt(params.wetMix);
// Connect dry path
source.connect(dryGain);
dryGain.connect(offlineContext.destination);
// Connect wet path
source.connect(convolver);
convolver.connect(wetGain);
wetGain.connect(offlineContext.destination);
source.start(0);
// Render offline
const renderedBuffer = await offlineContext.startRendering();
// Extract channels and ensure exact length match
const outLeft = new Float32Array(inputLength);
const outRight = new Float32Array(inputLength);
renderedBuffer.copyFromChannel(outLeft, 0);
renderedBuffer.copyFromChannel(outRight, 1);
// Normalize to prevent clipping while maintaining dynamics
this.normalizeInPlace(outLeft, outRight);
return [outLeft, outRight];
}
private randomParams(): ReverbParams {
const roomTypes: RoomType[] = ['small', 'medium', 'large', 'hall', 'plate', 'chamber'];
const roomType = roomTypes[Math.floor(Math.random() * roomTypes.length)];
let decayTime: number;
let diffusion: number;
switch (roomType) {
case 'small':
decayTime = Math.random() * 0.3 + 0.2;
diffusion = Math.random() * 0.3 + 0.5;
break;
case 'medium':
decayTime = Math.random() * 0.5 + 0.4;
diffusion = Math.random() * 0.2 + 0.6;
break;
case 'large':
decayTime = Math.random() * 0.7 + 0.6;
diffusion = Math.random() * 0.2 + 0.7;
break;
case 'hall':
decayTime = Math.random() * 0.5 + 0.8;
diffusion = Math.random() * 0.15 + 0.75;
break;
case 'plate':
decayTime = Math.random() * 0.4 + 0.5;
diffusion = Math.random() * 0.3 + 0.6;
break;
case 'chamber':
decayTime = Math.random() * 0.6 + 0.5;
diffusion = Math.random() * 0.2 + 0.65;
break;
}
return {
roomType,
decayTime,
preDelay: Math.random() * 0.025 + 0.005,
wetMix: Math.random() * 0.4 + 0.3,
earlyReflections: Math.random() * 0.3 + 0.4,
diffusion
};
}
private generateImpulseResponseBuffer(context: OfflineAudioContext, params: ReverbParams): AudioBuffer {
// IR length based on decay time, with reasonable maximum
const irDuration = Math.min(params.decayTime * 4, 4.0);
const irLength = Math.floor(irDuration * this.sampleRate);
const irBuffer = context.createBuffer(2, irLength, this.sampleRate);
const irLeft = new Float32Array(irLength);
const irRight = new Float32Array(irLength);
// Generate stereo IRs with slight differences for width
this.generateImpulseResponse(irLeft, params, 0);
this.generateImpulseResponse(irRight, params, 1);
irBuffer.copyToChannel(irLeft, 0);
irBuffer.copyToChannel(irRight, 1);
return irBuffer;
}
private generateImpulseResponse(ir: Float32Array, params: ReverbParams, channel: number): void {
const irLength = ir.length;
const preDelaySamples = Math.floor(params.preDelay * this.sampleRate);
// Room-specific parameters
const roomSizes: Record<RoomType, number> = {
small: 0.2,
medium: 0.4,
large: 0.6,
hall: 0.85,
plate: 0.5,
chamber: 0.55
};
const roomSize = roomSizes[params.roomType];
const earlyDecayTime = roomSize * 0.05;
// Generate early reflections as sparse impulses
const numEarlyReflections = Math.floor(roomSize * 30) + 8;
const earlyReflectionTime = earlyDecayTime * this.sampleRate;
for (let i = 0; i < numEarlyReflections; i++) {
const time = preDelaySamples + Math.pow(Math.random(), 1.5) * earlyReflectionTime;
const index = Math.floor(time);
if (index < irLength) {
const distanceAttenuation = 1.0 / (1.0 + time / this.sampleRate);
const amplitude = params.earlyReflections * distanceAttenuation * (0.5 + Math.random() * 0.5);
// Stereo width variation
const stereoOffset = (channel === 0 ? -1 : 1) * Math.random() * 0.3;
ir[index] += amplitude * (1.0 + stereoOffset);
}
}
// Generate diffuse tail using filtered noise
const diffuseStart = preDelaySamples + Math.floor(earlyReflectionTime);
const decaySamples = params.decayTime * this.sampleRate;
// Multi-band diffusion for realistic reverb character
const numBands = 4;
const bandGains = new Float32Array(numBands);
for (let band = 0; band < numBands; band++) {
bandGains[band] = 0.7 + Math.random() * 0.3;
}
// Generate noise with exponential decay and frequency-dependent damping
for (let i = diffuseStart; i < irLength; i++) {
const t = (i - diffuseStart) / decaySamples;
const envelope = Math.exp(-t * 6.0);
if (envelope < 0.001) break;
let sample = 0;
// Multi-band noise for frequency-dependent decay
for (let band = 0; band < numBands; band++) {
const noise = (Math.random() * 2 - 1) * bandGains[band];
const bandEnvelope = Math.exp(-t * (3.0 + band * 2.0));
sample += noise * bandEnvelope;
}
sample *= envelope * params.diffusion;
// Apply low-pass filtering for natural high-frequency damping
if (i > diffuseStart) {
const damping = params.roomType === 'plate' ? 0.4 : 0.5;
sample = sample * (1 - damping) + ir[i - 1] * damping;
}
ir[i] += sample;
}
// Normalize IR to consistent level
let maxAmp = 0;
for (let i = 0; i < irLength; i++) {
maxAmp = Math.max(maxAmp, Math.abs(ir[i]));
}
if (maxAmp > 0) {
const normGain = 0.5 / maxAmp;
for (let i = 0; i < irLength; i++) {
ir[i] *= normGain;
}
}
}
private normalizeInPlace(left: Float32Array, right: Float32Array): void {
let maxAmp = 0;
for (let i = 0; i < left.length; i++) {
maxAmp = Math.max(maxAmp, Math.abs(left[i]), Math.abs(right[i]));
}
if (maxAmp > 0.98) {
const gain = 0.95 / maxAmp;
for (let i = 0; i < left.length; i++) {
left[i] *= gain;
right[i] *= gain;
}
}
}
}

View File

@ -0,0 +1,164 @@
import type { AudioProcessor } from './AudioProcessor';
export class DopplerEffect implements AudioProcessor {
getName(): string {
return 'Doppler Effect';
}
getDescription(): string {
return 'Simulates pitch and amplitude changes from a moving source passing by';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
const duration = length / sampleRate;
// Random parameters for movement
const approachSpeed = 30 + Math.random() * 70; // 30-100 m/s (108-360 km/h)
const closestDistance = 5 + Math.random() * 20; // 5-25 meters
const sourceHeight = Math.random() * 10; // 0-10 meters above listener
const leftToRight = Math.random() > 0.5; // Direction of movement
// Physics constants
const speedOfSound = 343; // m/s at 20°C
const startDistance = approachSpeed * duration * 0.5;
const endDistance = startDistance;
// Pre-calculate delay buffer size needed
const maxDelayTime = 0.1; // 100ms maximum delay
const maxDelaySamples = Math.ceil(maxDelayTime * sampleRate);
// Process each channel with delay-based pitch shifting
const outputLeft = this.processDopplerChannel(
leftToRight ? leftChannel : rightChannel,
length,
sampleRate,
approachSpeed,
closestDistance,
sourceHeight,
startDistance,
endDistance,
speedOfSound,
maxDelaySamples,
-2 // Left ear offset
);
const outputRight = this.processDopplerChannel(
leftToRight ? rightChannel : leftChannel,
length,
sampleRate,
approachSpeed,
closestDistance,
sourceHeight,
startDistance,
endDistance,
speedOfSound,
maxDelaySamples,
2 // Right ear offset
);
return [outputLeft, outputRight];
}
private processDopplerChannel(
input: Float32Array,
length: number,
sampleRate: number,
speed: number,
closestDistance: number,
height: number,
startDistance: number,
endDistance: number,
speedOfSound: number,
maxDelaySamples: number,
earOffset: number
): Float32Array {
const output = new Float32Array(length);
const delayBuffer = new Float32Array(input.length + maxDelaySamples);
// Copy input to delay buffer with padding
for (let i = 0; i < input.length; i++) {
delayBuffer[i + maxDelaySamples] = input[i];
}
// Variables for delay line interpolation
let previousDelay = 0;
const smoothingFactor = 0.995; // Smooth delay changes to avoid clicks
for (let i = 0; i < length; i++) {
const time = i / sampleRate;
const normalizedTime = time / (length / sampleRate);
// Calculate source position along path
const alongPath = -startDistance + (startDistance + endDistance) * normalizedTime;
// Calculate actual distance including lateral offset and height
const lateralDistance = Math.sqrt(
closestDistance * closestDistance +
height * height +
earOffset * earOffset
);
const distance = Math.sqrt(alongPath * alongPath + lateralDistance * lateralDistance);
// Calculate radial velocity (component of velocity towards listener)
const radialVelocity = speed * alongPath / distance;
// Doppler frequency shift factor
const dopplerFactor = speedOfSound / (speedOfSound - radialVelocity);
// Time-varying delay for pitch shift (in samples)
const targetDelay = (dopplerFactor - 1) * 50; // Scale factor for audible effect
const currentDelay = previousDelay * smoothingFactor + targetDelay * (1 - smoothingFactor);
previousDelay = currentDelay;
// Calculate amplitude based on distance (inverse square law)
const referenceDistance = 10;
const amplitude = Math.min(1, (referenceDistance / distance) * (referenceDistance / distance));
// Read from delay buffer with cubic interpolation
const readPos = i + maxDelaySamples + currentDelay;
const readIdx = Math.floor(readPos);
const frac = readPos - readIdx;
if (readIdx >= 1 && readIdx < delayBuffer.length - 2) {
const y0 = delayBuffer[readIdx - 1];
const y1 = delayBuffer[readIdx];
const y2 = delayBuffer[readIdx + 1];
const y3 = delayBuffer[readIdx + 2];
// Cubic interpolation for smooth pitch transitions
const interpolated = this.cubicInterpolate(y0, y1, y2, y3, frac);
output[i] = interpolated * amplitude;
} else if (readIdx >= 0 && readIdx < delayBuffer.length) {
output[i] = delayBuffer[readIdx] * amplitude;
}
}
// Apply fade in/out to prevent clicks
const fadeLength = Math.min(1000, Math.floor(length * 0.01));
for (let i = 0; i < fadeLength; i++) {
const fade = i / fadeLength;
output[i] *= fade;
output[length - 1 - i] *= fade;
}
return output;
}
private cubicInterpolate(y0: number, y1: number, y2: number, y3: number, x: number): number {
// Catmull-Rom cubic interpolation for smooth sample interpolation
const x2 = x * x;
const x3 = x2 * x;
const a0 = -0.5 * y0 + 1.5 * y1 - 1.5 * y2 + 0.5 * y3;
const a1 = y0 - 2.5 * y1 + 2 * y2 - 0.5 * y3;
const a2 = -0.5 * y0 + 0.5 * y2;
const a3 = y1;
return a0 * x3 + a1 * x2 + a2 * x + a3;
}
}

View File

@ -0,0 +1,190 @@
import type { AudioProcessor } from './AudioProcessor';
export class MicroPitch implements AudioProcessor {
getName(): string {
return 'Micro Pitch';
}
getDescription(): string {
return 'Applies subtle random pitch variations for analog warmth and character';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
// Random parameters for pitch variation
const maxCents = 1 + Math.random() * 4; // 1-5 cents maximum deviation
const lfoCount = 2 + Math.floor(Math.random() * 2); // 2-3 LFOs per channel
const stereoSpread = 0.3 + Math.random() * 0.7; // 30-100% stereo difference
// Process each channel independently for stereo variation
const outputLeft = this.processMicroPitchChannel(
leftChannel,
length,
sampleRate,
maxCents,
lfoCount,
Math.random() // Random seed for left channel
);
const outputRight = this.processMicroPitchChannel(
rightChannel,
length,
sampleRate,
maxCents * (1 - stereoSpread * 0.5 + stereoSpread * Math.random()),
lfoCount,
Math.random() // Different seed for right channel
);
return [outputLeft, outputRight];
}
private processMicroPitchChannel(
input: Float32Array,
length: number,
sampleRate: number,
maxCents: number,
lfoCount: number,
seed: number
): Float32Array {
const output = new Float32Array(length);
// Initialize multiple LFOs with random parameters
const lfos: Array<{
frequency: number;
phase: number;
amplitude: number;
type: 'sine' | 'triangle' | 'random-walk';
}> = [];
for (let i = 0; i < lfoCount; i++) {
const randomValue = this.pseudoRandom(seed + i * 0.137);
lfos.push({
frequency: 0.1 + randomValue * 2, // 0.1-2.1 Hz
phase: this.pseudoRandom(seed + i * 0.271) * Math.PI * 2,
amplitude: 0.3 + this.pseudoRandom(seed + i * 0.419) * 0.7,
type: this.pseudoRandom(seed + i * 0.613) > 0.6 ? 'triangle' :
this.pseudoRandom(seed + i * 0.613) > 0.3 ? 'random-walk' : 'sine'
});
}
// Random walk state for smooth random variations
let randomWalkValue = 0;
let randomWalkTarget = 0;
const randomWalkSmooth = 0.999; // Very slow changes
// Delay buffer for pitch shifting
const maxDelayMs = 50; // Maximum delay in milliseconds
const maxDelaySamples = Math.ceil(maxDelayMs * sampleRate / 1000);
const delayBuffer = new Float32Array(length + maxDelaySamples * 2);
// Copy input to delay buffer with padding
for (let i = 0; i < input.length; i++) {
delayBuffer[i + maxDelaySamples] = input[i];
}
// State for smooth delay interpolation
let currentDelay = 0;
const delaySmoothing = 0.9995; // Very smooth delay changes to avoid artifacts
for (let i = 0; i < length; i++) {
const time = i / sampleRate;
// Calculate combined pitch deviation from all LFOs
let pitchDeviation = 0;
for (const lfo of lfos) {
let lfoValue = 0;
if (lfo.type === 'sine') {
lfoValue = Math.sin(2 * Math.PI * lfo.frequency * time + lfo.phase);
} else if (lfo.type === 'triangle') {
const phase = (lfo.frequency * time + lfo.phase / (2 * Math.PI)) % 1;
lfoValue = 4 * Math.abs(phase - 0.5) - 1;
} else if (lfo.type === 'random-walk') {
// Update random walk every ~100 samples
if (i % 100 === 0) {
randomWalkTarget = (this.pseudoRandom(seed + i * 0.00001) - 0.5) * 2;
}
randomWalkValue = randomWalkValue * randomWalkSmooth +
randomWalkTarget * (1 - randomWalkSmooth);
lfoValue = randomWalkValue;
}
pitchDeviation += lfoValue * lfo.amplitude;
}
// Normalize and scale pitch deviation
pitchDeviation = (pitchDeviation / lfoCount) * (maxCents / 100);
// Convert cents to pitch ratio (very small changes)
const pitchRatio = Math.pow(2, pitchDeviation / 12);
// Calculate time-varying delay for pitch shift
const targetDelay = (pitchRatio - 1) * 1000; // Scaled for subtle effect
currentDelay = currentDelay * delaySmoothing + targetDelay * (1 - delaySmoothing);
// Read from delay buffer with high-quality interpolation
const readPos = i + maxDelaySamples + currentDelay;
const readIdx = Math.floor(readPos);
const frac = readPos - readIdx;
if (readIdx >= 2 && readIdx < delayBuffer.length - 3) {
// 4-point Hermite interpolation for highest quality
const y0 = delayBuffer[readIdx - 1];
const y1 = delayBuffer[readIdx];
const y2 = delayBuffer[readIdx + 1];
const y3 = delayBuffer[readIdx + 2];
output[i] = this.hermiteInterpolate(y0, y1, y2, y3, frac);
} else if (readIdx >= 0 && readIdx < delayBuffer.length - 1) {
// Linear interpolation fallback at boundaries
const y1 = delayBuffer[readIdx];
const y2 = readIdx + 1 < delayBuffer.length ? delayBuffer[readIdx + 1] : y1;
output[i] = y1 + (y2 - y1) * frac;
} else if (readIdx >= 0 && readIdx < delayBuffer.length) {
output[i] = delayBuffer[readIdx];
}
}
// Apply very subtle crossfade at boundaries to ensure smoothness
const fadeLength = Math.min(100, Math.floor(length * 0.002));
for (let i = 0; i < fadeLength; i++) {
const fade = i / fadeLength;
const smoothFade = 0.5 - 0.5 * Math.cos(Math.PI * fade);
output[i] = input[i] * (1 - smoothFade) + output[i] * smoothFade;
const endIdx = length - 1 - i;
output[endIdx] = input[endIdx] * (1 - smoothFade) + output[endIdx] * smoothFade;
}
return output;
}
private hermiteInterpolate(y0: number, y1: number, y2: number, y3: number, x: number): number {
// 4-point Hermite interpolation for smooth, high-quality interpolation
const x2 = x * x;
const x3 = x2 * x;
// Hermite basis functions
const h00 = 2 * x3 - 3 * x2 + 1;
const h10 = x3 - 2 * x2 + x;
const h01 = -2 * x3 + 3 * x2;
const h11 = x3 - x2;
// Tangents (using Catmull-Rom)
const m0 = 0.5 * (y2 - y0);
const m1 = 0.5 * (y3 - y1);
return h00 * y1 + h10 * m0 + h01 * y2 + h11 * m1;
}
private pseudoRandom(seed: number): number {
// Simple deterministic pseudo-random number generator
const x = Math.sin(seed * 12.9898 + seed * 78.233) * 43758.5453;
return x - Math.floor(x);
}
}

View File

@ -0,0 +1,64 @@
import type { AudioProcessor } from './AudioProcessor';
export class PanShuffler implements AudioProcessor {
getName(): string {
return 'Pan Shuffler';
}
getDescription(): string {
return 'Smoothly pans segments across the stereo field';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const numSegments = Math.floor(Math.random() * 8) + 4;
const segmentSize = Math.floor(length / numSegments);
const transitionLength = Math.min(4410, Math.floor(segmentSize / 4));
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
// Generate random pan positions from -1 (left) to 1 (right)
const panPositions: number[] = [];
for (let i = 0; i < numSegments; i++) {
panPositions.push(Math.random() * 2 - 1);
}
let writePos = 0;
for (let i = 0; i < numSegments; i++) {
const start = i * segmentSize;
const end = i === numSegments - 1 ? length : (i + 1) * segmentSize;
const segLen = end - start;
const currentPan = panPositions[i];
const nextPan = i < numSegments - 1 ? panPositions[i + 1] : currentPan;
for (let j = 0; j < segLen && writePos < length; j++, writePos++) {
let pan = currentPan;
// Smooth transition to next segment's pan position
if (i < numSegments - 1 && j >= segLen - transitionLength) {
const transitionPos = (j - (segLen - transitionLength)) / transitionLength;
const smoothTransition = 0.5 * (1.0 - Math.cos(Math.PI * transitionPos));
pan = currentPan + (nextPan - currentPan) * smoothTransition;
}
// Constant-power panning
const panAngle = (pan + 1) * 0.25 * Math.PI;
const leftGain = Math.cos(panAngle);
const rightGain = Math.sin(panAngle);
// Mix both channels and apply panning
const mono = (leftChannel[start + j] + rightChannel[start + j]) * 0.5;
outputLeft[writePos] = mono * leftGain;
outputRight[writePos] = mono * rightGain;
}
}
return [outputLeft, outputRight];
}
}

View File

@ -0,0 +1,160 @@
import type { AudioProcessor } from './AudioProcessor';
export class PitchShifter implements AudioProcessor {
getName(): string {
return 'Pitch Shifter';
}
getDescription(): string {
return 'Transposes audio up or down in semitones without changing duration';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
// Random pitch shift in semitones (-12 to +12)
const semitones = (Math.random() - 0.5) * 24;
const pitchRatio = Math.pow(2, semitones / 12);
// Phase vocoder parameters
const frameSize = 2048;
const hopSize = frameSize / 4;
const outputLeft = this.pitchShiftPSOLA(leftChannel, pitchRatio, frameSize, hopSize);
const outputRight = this.pitchShiftPSOLA(rightChannel, pitchRatio, frameSize, hopSize);
// Ensure output matches input length exactly
const finalLeft = new Float32Array(length);
const finalRight = new Float32Array(length);
const copyLength = Math.min(length, outputLeft.length);
for (let i = 0; i < copyLength; i++) {
finalLeft[i] = outputLeft[i];
finalRight[i] = outputRight[i];
}
// Normalize to prevent clipping
const maxAmp = this.findMaxAmplitude(finalLeft, finalRight);
if (maxAmp > 0.95) {
const scale = 0.95 / maxAmp;
for (let i = 0; i < length; i++) {
finalLeft[i] *= scale;
finalRight[i] *= scale;
}
}
return [finalLeft, finalRight];
}
private pitchShiftPSOLA(
input: Float32Array,
pitchRatio: number,
frameSize: number,
hopSize: number
): Float32Array {
const length = input.length;
const output = new Float32Array(length + frameSize * 2);
// Analysis hop size (input reading)
const analysisHop = hopSize;
// Synthesis hop size (output writing) - this maintains duration
const synthesisHop = hopSize;
let analysisPos = 0;
let synthesisPos = 0;
let phase = 0;
while (analysisPos + frameSize < length && synthesisPos + frameSize < output.length) {
// Extract frame with windowing
const frame = new Float32Array(frameSize);
for (let i = 0; i < frameSize; i++) {
const window = this.hannWindow(i / frameSize);
const inputIdx = Math.floor(analysisPos + i);
if (inputIdx < length) {
frame[i] = input[inputIdx] * window;
}
}
// Resample frame to achieve pitch shift
const resampledFrame = this.resampleFrame(frame, pitchRatio);
// Overlap-add to output
for (let i = 0; i < resampledFrame.length; i++) {
const outputIdx = Math.floor(synthesisPos + i);
if (outputIdx < output.length) {
output[outputIdx] += resampledFrame[i];
}
}
// Update positions
// Key insight: read position moves by pitchRatio to get correct pitch
// Write position moves by fixed hop to maintain duration
analysisPos += analysisHop * pitchRatio;
synthesisPos += synthesisHop;
// Wrap read position if needed (for extreme pitch down)
if (analysisPos >= length - frameSize && synthesisPos < length / 2) {
analysisPos = analysisPos % (length - frameSize);
}
}
return output;
}
private resampleFrame(frame: Float32Array, pitchRatio: number): Float32Array {
// Resample the frame to original size but with pitch shift
const outputSize = frame.length;
const output = new Float32Array(outputSize);
for (let i = 0; i < outputSize; i++) {
// Read from frame at pitch-shifted position
const srcPos = i * pitchRatio;
const srcIdx = Math.floor(srcPos);
const frac = srcPos - srcIdx;
if (srcIdx < frame.length - 1) {
// Cubic interpolation for better quality
const y0 = srcIdx > 0 ? frame[srcIdx - 1] : frame[0];
const y1 = frame[srcIdx];
const y2 = frame[srcIdx + 1];
const y3 = srcIdx < frame.length - 2 ? frame[srcIdx + 2] : frame[frame.length - 1];
output[i] = this.cubicInterpolate(y0, y1, y2, y3, frac);
} else if (srcIdx < frame.length) {
output[i] = frame[srcIdx];
}
}
// Apply window again to smooth the resampled frame
for (let i = 0; i < outputSize; i++) {
output[i] *= this.hannWindow(i / outputSize);
}
return output;
}
private cubicInterpolate(y0: number, y1: number, y2: number, y3: number, x: number): number {
// Catmull-Rom cubic interpolation
const x2 = x * x;
const x3 = x2 * x;
return y1 + 0.5 * x * (y2 - y0 +
x * (2 * y0 - 5 * y1 + 4 * y2 - y3 +
x * (3 * (y1 - y2) + y3 - y0)));
}
private hannWindow(position: number): number {
return 0.5 - 0.5 * Math.cos(2 * Math.PI * position);
}
private findMaxAmplitude(left: Float32Array, right: Float32Array): number {
let max = 0;
for (let i = 0; i < left.length; i++) {
max = Math.max(max, Math.abs(left[i]), Math.abs(right[i]));
}
return max;
}
}

View File

@ -0,0 +1,114 @@
import type { AudioProcessor } from './AudioProcessor';
export class PitchWobble implements AudioProcessor {
getName(): string {
return 'Pitch Wobble';
}
getDescription(): string {
return 'Variable-rate playback with LFO modulation for tape wow/vibrato effects';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
// Random parameters
const lfoRate = 0.5 + Math.random() * 4.5; // 0.5-5 Hz
const depth = 0.001 + Math.random() * 0.019; // 0.1% to 2% pitch deviation
const lfoShape = Math.random(); // 0-1, controls sine vs triangle mix
// Secondary LFO for more organic movement
const lfo2Rate = lfoRate * (2 + Math.random() * 3);
const lfo2Depth = depth * 0.3;
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
// Delay line size - enough for max pitch deviation
const maxDelay = Math.ceil(sampleRate * 0.1); // 100ms max delay
const delayLineL = new Float32Array(length + maxDelay);
const delayLineR = new Float32Array(length + maxDelay);
// Copy input to delay lines with padding
for (let i = 0; i < length; i++) {
delayLineL[i + maxDelay / 2] = leftChannel[i];
delayLineR[i + maxDelay / 2] = rightChannel[i];
}
// Phase accumulator for continuous phase
let readPos = maxDelay / 2;
for (let i = 0; i < length; i++) {
const t = i / sampleRate;
// Primary LFO - mix of sine and triangle
const sineLfo = Math.sin(2 * Math.PI * lfoRate * t);
const triangleLfo = 2 * Math.abs(2 * ((lfoRate * t) % 1) - 1) - 1;
const lfo1 = sineLfo * (1 - lfoShape) + triangleLfo * lfoShape;
// Secondary LFO for complexity
const lfo2 = Math.sin(2 * Math.PI * lfo2Rate * t + Math.PI / 3);
// Combined modulation
const pitchMod = 1 + (lfo1 * depth + lfo2 * lfo2Depth);
// Variable playback rate
const currentRate = pitchMod;
// Cubic interpolation for smooth pitch changes
outputLeft[i] = this.cubicInterpolate(delayLineL, readPos);
outputRight[i] = this.cubicInterpolate(delayLineR, readPos);
// Advance read position with variable rate
readPos += currentRate;
}
// Soft limiting to prevent clipping
for (let i = 0; i < length; i++) {
outputLeft[i] = this.softClip(outputLeft[i]);
outputRight[i] = this.softClip(outputRight[i]);
}
return [outputLeft, outputRight];
}
private cubicInterpolate(buffer: Float32Array, position: number): number {
const index = Math.floor(position);
const fraction = position - index;
// Bounds checking
if (index < 1 || index >= buffer.length - 2) {
return buffer[Math.min(Math.max(index, 0), buffer.length - 1)];
}
const y0 = buffer[index - 1];
const y1 = buffer[index];
const y2 = buffer[index + 1];
const y3 = buffer[index + 2];
// Cubic Hermite interpolation
const a0 = y3 - y2 - y0 + y1;
const a1 = y0 - y1 - a0;
const a2 = y2 - y0;
const a3 = y1;
const fraction2 = fraction * fraction;
const fraction3 = fraction2 * fraction;
return a0 * fraction3 + a1 * fraction2 + a2 * fraction + a3;
}
private softClip(sample: number): number {
const threshold = 0.95;
if (Math.abs(sample) < threshold) {
return sample;
}
const sign = sample > 0 ? 1 : -1;
const excess = Math.abs(sample) - threshold;
return sign * (threshold + Math.tanh(excess * 2) * (1 - threshold));
}
}

View File

@ -0,0 +1,27 @@
import type { AudioProcessor } from './AudioProcessor';
export class Reverser implements AudioProcessor {
getName(): string {
return 'Reverser';
}
getDescription(): string {
return 'Plays the sound backwards';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
for (let i = 0; i < length; i++) {
outputLeft[i] = leftChannel[length - 1 - i];
outputRight[i] = rightChannel[length - 1 - i];
}
return [outputLeft, outputRight];
}
}

View File

@ -0,0 +1,116 @@
import type { AudioProcessor } from './AudioProcessor';
export class SegmentShuffler implements AudioProcessor {
getName(): string {
return 'Segment Shuffler';
}
getDescription(): string {
return 'Randomly reorganizes and swaps parts of the sound';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const numSegments = Math.floor(Math.random() * 13) + 4;
const segmentSize = Math.floor(length / numSegments);
// Crossfade length: 441 samples (10ms at 44100Hz) - aggressive smoothing
// Using longer fades to ensure no clicks even with very abrupt transitions
const crossfadeLength = 441;
const segments: Array<{ left: Float32Array; right: Float32Array }> = [];
for (let i = 0; i < numSegments; i++) {
const start = i * segmentSize;
const end = i === numSegments - 1 ? length : (i + 1) * segmentSize;
segments.push({
left: leftChannel.slice(start, end),
right: rightChannel.slice(start, end)
});
}
this.shuffleArray(segments);
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
let writePos = 0;
for (let i = 0; i < segments.length; i++) {
const segment = segments[i];
const segLen = segment.left.length;
const isFirstSegment = i === 0;
const isLastSegment = i === segments.length - 1;
// Calculate actual fade length - limit to segment length to prevent overlap issues
// If segment is very short, use a single Hann window over entire segment
let actualFadeIn = isFirstSegment ? 0 : Math.min(crossfadeLength, Math.floor(segLen / 2));
let actualFadeOut = isLastSegment ? 0 : Math.min(crossfadeLength, Math.floor(segLen / 2));
// Check if fades would overlap - if so, reduce them proportionally
if (actualFadeIn + actualFadeOut > segLen) {
const scale = segLen / (actualFadeIn + actualFadeOut);
actualFadeIn = Math.floor(actualFadeIn * scale);
actualFadeOut = Math.floor(actualFadeOut * scale);
}
for (let j = 0; j < segLen && writePos < length; j++, writePos++) {
let leftSample = segment.left[j];
let rightSample = segment.right[j];
let gain = 1.0;
// Raised cosine (Hann) fade-in at segment start
// Formula: 0.5 * (1 - cos(π * t)) where t goes from 0 to 1
// This has continuous first and second derivatives (C² continuity)
if (actualFadeIn > 0 && j < actualFadeIn) {
const t = j / actualFadeIn;
gain *= 0.5 * (1.0 - Math.cos(Math.PI * t));
}
// Raised cosine (Hann) fade-out at segment end
// Formula: 0.5 * (1 + cos(π * t)) where t goes from 0 to 1
// This mirrors the fade-in curve for symmetry
if (actualFadeOut > 0 && j >= segLen - actualFadeOut) {
const samplesIntoFadeOut = j - (segLen - actualFadeOut);
const t = samplesIntoFadeOut / actualFadeOut;
gain *= 0.5 * (1.0 + Math.cos(Math.PI * t));
}
outputLeft[writePos] = leftSample * gain;
outputRight[writePos] = rightSample * gain;
}
}
// Master fade in/out using raised cosine for buffer boundaries
// This ensures the entire buffer starts and ends at zero
const masterFadeLength = 441;
// Raised cosine fade-in at buffer start
for (let i = 0; i < Math.min(masterFadeLength, length); i++) {
const t = i / masterFadeLength;
const gain = 0.5 * (1.0 - Math.cos(Math.PI * t));
outputLeft[i] *= gain;
outputRight[i] *= gain;
}
// Raised cosine fade-out at buffer end
for (let i = 0; i < Math.min(masterFadeLength, length); i++) {
const pos = length - 1 - i;
const samplesIntoFadeOut = i;
const t = samplesIntoFadeOut / masterFadeLength;
const gain = 0.5 * (1.0 + Math.cos(Math.PI * t));
outputLeft[pos] *= gain;
outputRight[pos] *= gain;
}
return [outputLeft, outputRight];
}
private shuffleArray<T>(array: T[]): void {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}
}

View File

@ -0,0 +1,456 @@
import type { AudioProcessor } from './AudioProcessor';
export class SpectralBlur implements AudioProcessor {
getName(): string {
return 'Spectral Blur';
}
getDescription(): string {
return 'Smears frequency content across neighboring bins for dreamy, diffused textures';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
// Random parameters
const blurAmount = 0.3 + Math.random() * 0.6; // 0.3 to 0.9
const frequencyRangeLow = Math.random() * 0.2; // 0 to 0.2 (low freq cutoff)
const frequencyRangeHigh = 0.5 + Math.random() * 0.5; // 0.5 to 1.0 (high freq cutoff)
// FFT parameters for quality spectral processing
const fftSize = 4096;
const hopSize = Math.floor(fftSize * 0.25); // 75% overlap
const overlap = fftSize - hopSize;
const outputLeft = this.processChannel(
leftChannel,
fftSize,
hopSize,
blurAmount,
frequencyRangeLow,
frequencyRangeHigh
);
const outputRight = this.processChannel(
rightChannel,
fftSize,
hopSize,
blurAmount,
frequencyRangeLow,
frequencyRangeHigh
);
// Ensure output matches input length
const finalLeft = new Float32Array(length);
const finalRight = new Float32Array(length);
const copyLength = Math.min(length, outputLeft.length);
for (let i = 0; i < copyLength; i++) {
finalLeft[i] = outputLeft[i];
finalRight[i] = outputRight[i];
}
// Match output RMS to input RMS to maintain perceived loudness
const inputRMS = this.calculateRMS(leftChannel, rightChannel);
const outputRMS = this.calculateRMS(finalLeft, finalRight);
if (outputRMS > 0.0001) {
const rmsScale = inputRMS / outputRMS;
for (let i = 0; i < length; i++) {
finalLeft[i] *= rmsScale;
finalRight[i] *= rmsScale;
}
}
// Safety limiter to prevent clipping
const maxAmp = this.findMaxAmplitude(finalLeft, finalRight);
if (maxAmp > 0.99) {
const scale = 0.99 / maxAmp;
for (let i = 0; i < length; i++) {
finalLeft[i] *= scale;
finalRight[i] *= scale;
}
}
return [finalLeft, finalRight];
}
private processChannel(
input: Float32Array,
fftSize: number,
hopSize: number,
blurAmount: number,
freqRangeLow: number,
freqRangeHigh: number
): Float32Array {
const length = input.length;
const output = new Float32Array(length + fftSize);
const window = this.createHannWindow(fftSize);
// Calculate COLA normalization factor for 75% overlap with Hann window
// With hopSize = fftSize * 0.25, we have 4x overlap
const overlap = fftSize / hopSize;
const colaNorm = this.calculateCOLANorm(window, hopSize);
let inputPos = 0;
let outputPos = 0;
while (inputPos + fftSize <= length) {
// Extract windowed frame
const frame = new Float32Array(fftSize);
for (let i = 0; i < fftSize; i++) {
frame[i] = input[inputPos + i] * window[i];
}
// FFT
const spectrum = this.fft(frame);
// Apply spectral blur
const blurredSpectrum = this.applySpectralBlur(
spectrum,
blurAmount,
freqRangeLow,
freqRangeHigh
);
// IFFT
const processedFrame = this.ifft(blurredSpectrum);
// Overlap-add with proper gain compensation
for (let i = 0; i < fftSize; i++) {
output[outputPos + i] += processedFrame[i].real * colaNorm;
}
inputPos += hopSize;
outputPos += hopSize;
}
// Process remaining samples if any
if (inputPos < length) {
const remainingSamples = length - inputPos;
const frame = new Float32Array(fftSize);
for (let i = 0; i < remainingSamples; i++) {
frame[i] = input[inputPos + i] * window[i];
}
const spectrum = this.fft(frame);
const blurredSpectrum = this.applySpectralBlur(
spectrum,
blurAmount,
freqRangeLow,
freqRangeHigh
);
const processedFrame = this.ifft(blurredSpectrum);
for (let i = 0; i < fftSize; i++) {
if (outputPos + i < output.length) {
output[outputPos + i] += processedFrame[i].real * colaNorm;
}
}
}
return output;
}
private applySpectralBlur(
spectrum: Complex[],
blurAmount: number,
freqRangeLow: number,
freqRangeHigh: number
): Complex[] {
const result = new Array<Complex>(spectrum.length);
const halfSize = Math.floor(spectrum.length / 2);
// Create gaussian blur kernel
const kernelSize = Math.floor(blurAmount * 20) | 1; // Ensure odd
const kernel = this.createGaussianKernel(kernelSize, blurAmount * 5);
const kernelCenter = Math.floor(kernelSize / 2);
// Calculate frequency bin range to process
const binLow = Math.floor(freqRangeLow * halfSize);
const binHigh = Math.floor(freqRangeHigh * halfSize);
// Process positive frequencies
for (let i = 0; i < halfSize; i++) {
if (i >= binLow && i <= binHigh) {
// Apply blur in the specified frequency range
let realSum = 0;
let imagSum = 0;
let weightSum = 0;
for (let k = 0; k < kernelSize; k++) {
const binIdx = i + k - kernelCenter;
if (binIdx >= 0 && binIdx < halfSize) {
const weight = kernel[k];
realSum += spectrum[binIdx].real * weight;
imagSum += spectrum[binIdx].imag * weight;
weightSum += weight;
}
}
if (weightSum > 0) {
result[i] = {
real: realSum / weightSum,
imag: imagSum / weightSum
};
} else {
result[i] = spectrum[i];
}
} else {
// Keep original spectrum outside the blur range
result[i] = spectrum[i];
}
}
// Mirror for negative frequencies (maintain Hermitian symmetry)
for (let i = halfSize; i < spectrum.length; i++) {
const mirrorIdx = spectrum.length - i;
if (mirrorIdx > 0 && mirrorIdx < halfSize) {
result[i] = {
real: result[mirrorIdx].real,
imag: -result[mirrorIdx].imag
};
} else {
result[i] = spectrum[i];
}
}
return result;
}
private createGaussianKernel(size: number, sigma: number): Float32Array {
const kernel = new Float32Array(size);
const center = Math.floor(size / 2);
let sum = 0;
for (let i = 0; i < size; i++) {
const x = i - center;
kernel[i] = Math.exp(-(x * x) / (2 * sigma * sigma));
sum += kernel[i];
}
// Normalize
for (let i = 0; i < size; i++) {
kernel[i] /= sum;
}
return kernel;
}
private createHannWindow(size: number): Float32Array {
const window = new Float32Array(size);
for (let i = 0; i < size; i++) {
window[i] = 0.5 - 0.5 * Math.cos((2 * Math.PI * i) / (size - 1));
}
return window;
}
private calculateCOLANorm(window: Float32Array, hopSize: number): number {
// Calculate the sum of overlapping windows at any point
const fftSize = window.length;
let windowSum = 0;
// Sum all overlapping windows at the center point
for (let offset = 0; offset < fftSize; offset += hopSize) {
const idx = Math.floor(fftSize / 2) - offset;
if (idx >= 0 && idx < fftSize) {
windowSum += window[idx] * window[idx];
}
}
// Also check overlap from the other direction
for (let offset = hopSize; offset < fftSize; offset += hopSize) {
const idx = Math.floor(fftSize / 2) + offset;
if (idx >= 0 && idx < fftSize) {
windowSum += window[idx] * window[idx];
}
}
return windowSum > 0 ? 1.0 / windowSum : 1.0;
}
// Cooley-Tukey FFT implementation
private fft(input: Float32Array): Complex[] {
const n = input.length;
const output: Complex[] = new Array(n);
// Initialize with input as real values
for (let i = 0; i < n; i++) {
output[i] = { real: input[i], imag: 0 };
}
// Bit reversal
const bits = Math.log2(n);
for (let i = 0; i < n; i++) {
const j = this.bitReverse(i, bits);
if (j > i) {
const temp = output[i];
output[i] = output[j];
output[j] = temp;
}
}
// Cooley-Tukey butterfly operations
for (let size = 2; size <= n; size *= 2) {
const halfSize = size / 2;
const angleStep = -2 * Math.PI / size;
for (let start = 0; start < n; start += size) {
for (let i = 0; i < halfSize; i++) {
const angle = angleStep * i;
const twiddle = {
real: Math.cos(angle),
imag: Math.sin(angle)
};
const evenIdx = start + i;
const oddIdx = start + i + halfSize;
const even = output[evenIdx];
const odd = output[oddIdx];
const twiddledOdd = {
real: odd.real * twiddle.real - odd.imag * twiddle.imag,
imag: odd.real * twiddle.imag + odd.imag * twiddle.real
};
output[evenIdx] = {
real: even.real + twiddledOdd.real,
imag: even.imag + twiddledOdd.imag
};
output[oddIdx] = {
real: even.real - twiddledOdd.real,
imag: even.imag - twiddledOdd.imag
};
}
}
}
return output;
}
// Inverse FFT
private ifft(input: Complex[]): Complex[] {
const n = input.length;
const output: Complex[] = new Array(n);
// Copy input
for (let i = 0; i < n; i++) {
output[i] = { real: input[i].real, imag: input[i].imag };
}
// Conjugate
for (let i = 0; i < n; i++) {
output[i].imag = -output[i].imag;
}
// Forward FFT
const transformed = this.fftComplex(output);
// Conjugate and scale
const scale = 1 / n;
for (let i = 0; i < n; i++) {
transformed[i].real *= scale;
transformed[i].imag *= -scale;
}
return transformed;
}
// FFT for complex input (used by IFFT)
private fftComplex(input: Complex[]): Complex[] {
const n = input.length;
const output: Complex[] = new Array(n);
// Copy input
for (let i = 0; i < n; i++) {
output[i] = { real: input[i].real, imag: input[i].imag };
}
// Bit reversal
const bits = Math.log2(n);
for (let i = 0; i < n; i++) {
const j = this.bitReverse(i, bits);
if (j > i) {
const temp = output[i];
output[i] = output[j];
output[j] = temp;
}
}
// Cooley-Tukey butterfly operations
for (let size = 2; size <= n; size *= 2) {
const halfSize = size / 2;
const angleStep = -2 * Math.PI / size;
for (let start = 0; start < n; start += size) {
for (let i = 0; i < halfSize; i++) {
const angle = angleStep * i;
const twiddle = {
real: Math.cos(angle),
imag: Math.sin(angle)
};
const evenIdx = start + i;
const oddIdx = start + i + halfSize;
const even = output[evenIdx];
const odd = output[oddIdx];
const twiddledOdd = {
real: odd.real * twiddle.real - odd.imag * twiddle.imag,
imag: odd.real * twiddle.imag + odd.imag * twiddle.real
};
output[evenIdx] = {
real: even.real + twiddledOdd.real,
imag: even.imag + twiddledOdd.imag
};
output[oddIdx] = {
real: even.real - twiddledOdd.real,
imag: even.imag - twiddledOdd.imag
};
}
}
}
return output;
}
private bitReverse(n: number, bits: number): number {
let reversed = 0;
for (let i = 0; i < bits; i++) {
reversed = (reversed << 1) | ((n >> i) & 1);
}
return reversed;
}
private findMaxAmplitude(left: Float32Array, right: Float32Array): number {
let max = 0;
for (let i = 0; i < left.length; i++) {
max = Math.max(max, Math.abs(left[i]), Math.abs(right[i]));
}
return max;
}
private calculateRMS(left: Float32Array, right: Float32Array): number {
let sumSquares = 0;
const length = left.length;
for (let i = 0; i < length; i++) {
sumSquares += left[i] * left[i] + right[i] * right[i];
}
return Math.sqrt(sumSquares / (2 * length));
}
}
interface Complex {
real: number;
imag: number;
}

View File

@ -0,0 +1,427 @@
import type { AudioProcessor } from './AudioProcessor';
export class SpectralShift implements AudioProcessor {
getName(): string {
return 'Spectral Shift';
}
getDescription(): string {
return 'Shifts all frequencies by a fixed Hz amount creating inharmonic, metallic timbres';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
// Random parameters for frequency shift
const shiftAmount = (Math.random() - 0.5) * 6000; // -3000 to +3000 Hz
const feedbackAmount = Math.random() * 0.3; // 0 to 0.3 for subtle feedback
const dryWet = 0.5 + Math.random() * 0.5; // 0.5 to 1.0 (always some effect)
// FFT parameters
const fftSize = 2048;
const hopSize = Math.floor(fftSize * 0.25); // 75% overlap
const outputLeft = this.processChannel(
leftChannel,
fftSize,
hopSize,
shiftAmount,
feedbackAmount,
dryWet,
sampleRate
);
const outputRight = this.processChannel(
rightChannel,
fftSize,
hopSize,
shiftAmount * (0.9 + Math.random() * 0.2), // Slight stereo variation
feedbackAmount,
dryWet,
sampleRate
);
// Ensure output matches input length
const finalLeft = new Float32Array(length);
const finalRight = new Float32Array(length);
const copyLength = Math.min(length, outputLeft.length);
for (let i = 0; i < copyLength; i++) {
finalLeft[i] = outputLeft[i];
finalRight[i] = outputRight[i];
}
// Match output RMS to input RMS to maintain perceived loudness
const inputRMS = this.calculateRMS(leftChannel, rightChannel);
const outputRMS = this.calculateRMS(finalLeft, finalRight);
if (outputRMS > 0.0001) {
const rmsScale = inputRMS / outputRMS;
for (let i = 0; i < length; i++) {
finalLeft[i] *= rmsScale;
finalRight[i] *= rmsScale;
}
}
// Safety limiter to prevent clipping
const maxAmp = this.findMaxAmplitude(finalLeft, finalRight);
if (maxAmp > 0.99) {
const scale = 0.99 / maxAmp;
for (let i = 0; i < length; i++) {
finalLeft[i] *= scale;
finalRight[i] *= scale;
}
}
return [finalLeft, finalRight];
}
private processChannel(
input: Float32Array,
fftSize: number,
hopSize: number,
shiftHz: number,
feedbackAmount: number,
dryWet: number,
sampleRate: number
): Float32Array {
const length = input.length;
const output = new Float32Array(length + fftSize);
const window = this.createHannWindow(fftSize);
const feedback = new Float32Array(fftSize);
// Calculate COLA normalization factor for 75% overlap with Hann window
const colaNorm = this.calculateCOLANorm(window, hopSize);
// Convert Hz shift to bin shift
const binShift = Math.round((shiftHz * fftSize) / sampleRate);
let inputPos = 0;
let outputPos = 0;
while (inputPos + fftSize <= length) {
// Extract windowed frame
const frame = new Float32Array(fftSize);
for (let i = 0; i < fftSize; i++) {
const drySignal = input[inputPos + i];
const fbSignal = feedback[i] * feedbackAmount;
frame[i] = (drySignal + fbSignal) * window[i];
}
// FFT
const spectrum = this.fft(frame);
// Apply frequency shift
const shiftedSpectrum = this.applyFrequencyShift(spectrum, binShift);
// IFFT
const processedFrame = this.ifft(shiftedSpectrum);
// Store for feedback
for (let i = 0; i < fftSize; i++) {
feedback[i] = processedFrame[i].real * 0.5;
}
// Overlap-add with proper gain compensation and dry/wet mix
for (let i = 0; i < fftSize; i++) {
const wet = processedFrame[i].real * colaNorm;
const dry = input[inputPos + i];
output[outputPos + i] += dry * (1 - dryWet) + wet * dryWet;
}
inputPos += hopSize;
outputPos += hopSize;
}
// Process remaining samples
if (inputPos < length) {
const remainingSamples = length - inputPos;
const frame = new Float32Array(fftSize);
for (let i = 0; i < remainingSamples; i++) {
frame[i] = input[inputPos + i] * window[i];
}
const spectrum = this.fft(frame);
const shiftedSpectrum = this.applyFrequencyShift(spectrum, binShift);
const processedFrame = this.ifft(shiftedSpectrum);
for (let i = 0; i < fftSize && outputPos + i < output.length; i++) {
const wet = processedFrame[i].real * colaNorm;
const dry = i < remainingSamples ? input[inputPos + i] : 0;
output[outputPos + i] += dry * (1 - dryWet) + wet * dryWet;
}
}
return output;
}
private applyFrequencyShift(spectrum: Complex[], binShift: number): Complex[] {
const result: Complex[] = new Array(spectrum.length);
const halfSize = Math.floor(spectrum.length / 2);
// Initialize with zeros
for (let i = 0; i < spectrum.length; i++) {
result[i] = { real: 0, imag: 0 };
}
// Shift positive frequencies
for (let i = 0; i < halfSize; i++) {
const newBin = i + binShift;
if (newBin >= 0 && newBin < halfSize) {
// Simple shift within bounds
result[newBin] = {
real: spectrum[i].real,
imag: spectrum[i].imag
};
} else if (newBin < 0) {
// Frequency shifted below 0 Hz - mirror to positive with phase inversion
const mirrorBin = Math.abs(newBin);
if (mirrorBin < halfSize) {
// Add to existing content (creates interesting beating)
result[mirrorBin] = {
real: result[mirrorBin].real + spectrum[i].real,
imag: result[mirrorBin].imag - spectrum[i].imag // Phase inversion
};
}
}
// Frequencies shifted above Nyquist are discarded (natural lowpass)
}
// Apply spectral smoothing to reduce artifacts
const smoothed = this.smoothSpectrum(result, halfSize);
// Reconstruct negative frequencies for Hermitian symmetry
for (let i = 1; i < halfSize; i++) {
const negIdx = spectrum.length - i;
smoothed[negIdx] = {
real: smoothed[i].real,
imag: -smoothed[i].imag
};
}
// DC and Nyquist should be real
smoothed[0].imag = 0;
if (halfSize * 2 === spectrum.length) {
smoothed[halfSize].imag = 0;
}
return smoothed;
}
private smoothSpectrum(spectrum: Complex[], halfSize: number): Complex[] {
const result: Complex[] = new Array(spectrum.length);
// Copy spectrum
for (let i = 0; i < spectrum.length; i++) {
result[i] = { real: spectrum[i].real, imag: spectrum[i].imag };
}
// Apply 3-point smoothing to reduce artifacts
for (let i = 1; i < halfSize - 1; i++) {
result[i] = {
real: (spectrum[i - 1].real * 0.25 + spectrum[i].real * 0.5 + spectrum[i + 1].real * 0.25),
imag: (spectrum[i - 1].imag * 0.25 + spectrum[i].imag * 0.5 + spectrum[i + 1].imag * 0.25)
};
}
return result;
}
private createHannWindow(size: number): Float32Array {
const window = new Float32Array(size);
for (let i = 0; i < size; i++) {
window[i] = 0.5 - 0.5 * Math.cos((2 * Math.PI * i) / (size - 1));
}
return window;
}
private calculateCOLANorm(window: Float32Array, hopSize: number): number {
// Calculate the sum of overlapping windows at any point
const fftSize = window.length;
let windowSum = 0;
// Sum all overlapping windows at the center point
for (let offset = 0; offset < fftSize; offset += hopSize) {
const idx = Math.floor(fftSize / 2) - offset;
if (idx >= 0 && idx < fftSize) {
windowSum += window[idx] * window[idx];
}
}
// Also check overlap from the other direction
for (let offset = hopSize; offset < fftSize; offset += hopSize) {
const idx = Math.floor(fftSize / 2) + offset;
if (idx >= 0 && idx < fftSize) {
windowSum += window[idx] * window[idx];
}
}
return windowSum > 0 ? 1.0 / windowSum : 1.0;
}
// Cooley-Tukey FFT implementation
private fft(input: Float32Array): Complex[] {
const n = input.length;
const output: Complex[] = new Array(n);
// Initialize with input as real values
for (let i = 0; i < n; i++) {
output[i] = { real: input[i], imag: 0 };
}
// Bit reversal
const bits = Math.log2(n);
for (let i = 0; i < n; i++) {
const j = this.bitReverse(i, bits);
if (j > i) {
const temp = output[i];
output[i] = output[j];
output[j] = temp;
}
}
// Cooley-Tukey butterfly operations
for (let size = 2; size <= n; size *= 2) {
const halfSize = size / 2;
const angleStep = -2 * Math.PI / size;
for (let start = 0; start < n; start += size) {
for (let i = 0; i < halfSize; i++) {
const angle = angleStep * i;
const twiddle = {
real: Math.cos(angle),
imag: Math.sin(angle)
};
const evenIdx = start + i;
const oddIdx = start + i + halfSize;
const even = output[evenIdx];
const odd = output[oddIdx];
const twiddledOdd = {
real: odd.real * twiddle.real - odd.imag * twiddle.imag,
imag: odd.real * twiddle.imag + odd.imag * twiddle.real
};
output[evenIdx] = {
real: even.real + twiddledOdd.real,
imag: even.imag + twiddledOdd.imag
};
output[oddIdx] = {
real: even.real - twiddledOdd.real,
imag: even.imag - twiddledOdd.imag
};
}
}
}
return output;
}
// Inverse FFT
private ifft(input: Complex[]): Complex[] {
const n = input.length;
const output: Complex[] = new Array(n);
// Copy and conjugate input
for (let i = 0; i < n; i++) {
output[i] = { real: input[i].real, imag: -input[i].imag };
}
// Bit reversal
const bits = Math.log2(n);
for (let i = 0; i < n; i++) {
const j = this.bitReverse(i, bits);
if (j > i) {
const temp = output[i];
output[i] = output[j];
output[j] = temp;
}
}
// Cooley-Tukey butterfly operations (same as forward FFT)
for (let size = 2; size <= n; size *= 2) {
const halfSize = size / 2;
const angleStep = -2 * Math.PI / size;
for (let start = 0; start < n; start += size) {
for (let i = 0; i < halfSize; i++) {
const angle = angleStep * i;
const twiddle = {
real: Math.cos(angle),
imag: Math.sin(angle)
};
const evenIdx = start + i;
const oddIdx = start + i + halfSize;
const even = output[evenIdx];
const odd = output[oddIdx];
const twiddledOdd = {
real: odd.real * twiddle.real - odd.imag * twiddle.imag,
imag: odd.real * twiddle.imag + odd.imag * twiddle.real
};
output[evenIdx] = {
real: even.real + twiddledOdd.real,
imag: even.imag + twiddledOdd.imag
};
output[oddIdx] = {
real: even.real - twiddledOdd.real,
imag: even.imag - twiddledOdd.imag
};
}
}
}
// Conjugate and scale
const scale = 1 / n;
for (let i = 0; i < n; i++) {
output[i].real *= scale;
output[i].imag *= -scale;
}
return output;
}
private bitReverse(n: number, bits: number): number {
let reversed = 0;
for (let i = 0; i < bits; i++) {
reversed = (reversed << 1) | ((n >> i) & 1);
}
return reversed;
}
private findMaxAmplitude(left: Float32Array, right: Float32Array): number {
let max = 0;
for (let i = 0; i < left.length; i++) {
max = Math.max(max, Math.abs(left[i]), Math.abs(right[i]));
}
return max;
}
private calculateRMS(left: Float32Array, right: Float32Array): number {
let sumSquares = 0;
const length = left.length;
for (let i = 0; i < length; i++) {
sumSquares += left[i] * left[i] + right[i] * right[i];
}
return Math.sqrt(sumSquares / (2 * length));
}
}
interface Complex {
real: number;
imag: number;
}

View File

@ -0,0 +1,18 @@
import type { AudioProcessor } from './AudioProcessor';
export class StereoSwap implements AudioProcessor {
getName(): string {
return 'Stereo Swap';
}
getDescription(): string {
return 'Swaps left and right channels';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
return [rightChannel, leftChannel];
}
}

View File

@ -0,0 +1,73 @@
import type { AudioProcessor } from './AudioProcessor';
export class Stutter implements AudioProcessor {
getName(): string {
return 'Stutter';
}
getDescription(): string {
return 'Rapidly repeats small fragments with smooth crossfades';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
const minFragmentMs = 10;
const maxFragmentMs = 50;
const minFragmentSamples = Math.floor((minFragmentMs / 1000) * sampleRate);
const maxFragmentSamples = Math.floor((maxFragmentMs / 1000) * sampleRate);
const crossfadeSamples = Math.floor(sampleRate * 0.002);
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
let readPos = 0;
let writePos = 0;
while (readPos < length && writePos < length) {
const fragmentSize = Math.floor(
Math.random() * (maxFragmentSamples - minFragmentSamples) + minFragmentSamples
);
const actualFragmentSize = Math.min(fragmentSize, length - readPos);
const numRepeats = Math.floor(Math.random() * 4) + 1;
for (let repeat = 0; repeat < numRepeats; repeat++) {
for (let i = 0; i < actualFragmentSize && writePos < length; i++, writePos++) {
let leftSample = leftChannel[readPos + i];
let rightSample = rightChannel[readPos + i];
if (repeat > 0 && i < crossfadeSamples) {
const crossfadePos = i / crossfadeSamples;
const fadeIn = this.easeInOut(crossfadePos);
const fadeOut = 1.0 - fadeIn;
const prevIdx = readPos + actualFragmentSize - crossfadeSamples + i;
if (prevIdx >= 0 && prevIdx < length) {
leftSample = leftChannel[prevIdx] * fadeOut + leftSample * fadeIn;
rightSample = rightChannel[prevIdx] * fadeOut + rightSample * fadeIn;
}
}
outputLeft[writePos] = leftSample;
outputRight[writePos] = rightSample;
}
if (writePos >= length) break;
}
readPos += actualFragmentSize;
}
return [outputLeft, outputRight];
}
private easeInOut(t: number): number {
return t < 0.5 ? 2 * t * t : 1 - Math.pow(-2 * t + 2, 2) / 2;
}
}

View File

@ -0,0 +1,36 @@
import type { AudioProcessor } from './AudioProcessor';
export class Tremolo implements AudioProcessor {
getName(): string {
return 'Tremolo';
}
getDescription(): string {
return 'Applies rhythmic volume modulation';
}
process(
leftChannel: Float32Array,
rightChannel: Float32Array
): [Float32Array, Float32Array] {
const length = leftChannel.length;
const sampleRate = 44100;
const lfoFreq = Math.random() * 8 + 2;
const depth = 1.0;
const outputLeft = new Float32Array(length);
const outputRight = new Float32Array(length);
for (let i = 0; i < length; i++) {
const t = i / sampleRate;
const lfo = Math.sin(2 * Math.PI * lfoFreq * t);
const gain = 0.5 + (depth * 0.5) * lfo;
outputLeft[i] = leftChannel[i] * gain;
outputRight[i] = rightChannel[i] * gain;
}
return [outputLeft, outputRight];
}
}

View File

@ -0,0 +1,36 @@
import type { AudioProcessor } from './AudioProcessor';
import { SegmentShuffler } from './SegmentShuffler';
import { Reverser } from './Reverser';
import { StereoSwap } from './StereoSwap';
import { PanShuffler } from './PanShuffler';
import { Tremolo } from './Tremolo';
import { Chorus } from './Chorus';
import { PitchWobble } from './PitchWobble';
import { PitchShifter } from './PitchShifter';
import { MicroPitch } from './MicroPitch';
import { SpectralBlur } from './SpectralBlur';
import { SpectralShift } from './SpectralShift';
import { ConvolutionReverb } from './ConvolutionReverb';
const processors: AudioProcessor[] = [
new SegmentShuffler(),
new Reverser(),
new StereoSwap(),
new PanShuffler(),
new Tremolo(),
new Chorus(),
new PitchWobble(),
new PitchShifter(),
new MicroPitch(),
new SpectralBlur(),
new SpectralShift(),
new ConvolutionReverb(),
];
export function getRandomProcessor(): AudioProcessor {
return processors[Math.floor(Math.random() * processors.length)];
}
export function getAllProcessors(): AudioProcessor[] {
return processors;
}

View File

@ -43,8 +43,8 @@ export class AudioService {
const ctx = this.getContext();
const [leftChannel, rightChannel] = stereoData;
const buffer = ctx.createBuffer(2, leftChannel.length, DEFAULT_SAMPLE_RATE);
buffer.copyToChannel(leftChannel, 0);
buffer.copyToChannel(rightChannel, 1);
buffer.copyToChannel(new Float32Array(leftChannel), 0);
buffer.copyToChannel(new Float32Array(rightChannel), 1);
return buffer;
}