388 lines
14 KiB
Rust
388 lines
14 KiB
Rust
mod app;
|
|
mod commands;
|
|
mod engine;
|
|
mod input;
|
|
mod midi;
|
|
mod model;
|
|
mod page;
|
|
mod services;
|
|
mod settings;
|
|
mod state;
|
|
mod theme;
|
|
mod views;
|
|
mod widgets;
|
|
|
|
use std::io;
|
|
use std::path::PathBuf;
|
|
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU32, AtomicU64, Ordering};
|
|
use std::sync::Arc;
|
|
use std::time::Duration;
|
|
|
|
use clap::Parser;
|
|
use crossterm::event::{self, DisableBracketedPaste, EnableBracketedPaste, Event};
|
|
use crossterm::terminal::{
|
|
disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen,
|
|
};
|
|
use crossterm::ExecutableCommand;
|
|
use doux::EngineMetrics;
|
|
use ratatui::prelude::CrosstermBackend;
|
|
use ratatui::Terminal;
|
|
|
|
use app::App;
|
|
use engine::{
|
|
build_stream, spawn_sequencer, AudioStreamConfig, LinkState, ScopeBuffer, SequencerConfig,
|
|
SpectrumBuffer,
|
|
};
|
|
use input::{handle_key, InputContext, InputResult};
|
|
use settings::Settings;
|
|
use state::audio::RefreshRate;
|
|
|
|
#[derive(Parser)]
|
|
#[command(name = "cagire", version, about = "Forth-based live coding sequencer")]
|
|
struct Args {
|
|
/// Directory containing audio samples to load (can be specified multiple times)
|
|
#[arg(short, long)]
|
|
samples: Vec<PathBuf>,
|
|
|
|
/// Output audio device (name or index)
|
|
#[arg(short, long)]
|
|
output: Option<String>,
|
|
|
|
/// Input audio device (name or index)
|
|
#[arg(short, long)]
|
|
input: Option<String>,
|
|
|
|
/// Number of output channels
|
|
#[arg(short, long)]
|
|
channels: Option<u16>,
|
|
|
|
/// Audio buffer size in samples
|
|
#[arg(short, long)]
|
|
buffer: Option<u32>,
|
|
}
|
|
|
|
fn main() -> io::Result<()> {
|
|
// Lock memory BEFORE any threads are spawned to prevent page faults in RT context
|
|
#[cfg(unix)]
|
|
engine::realtime::lock_memory();
|
|
|
|
let args = Args::parse();
|
|
let settings = Settings::load();
|
|
|
|
let link = Arc::new(LinkState::new(settings.link.tempo, settings.link.quantum));
|
|
if settings.link.enabled {
|
|
link.enable();
|
|
}
|
|
|
|
let playing = Arc::new(AtomicBool::new(true));
|
|
let nudge_us = Arc::new(AtomicI64::new(0));
|
|
|
|
let mut app = App::new();
|
|
|
|
app.playback
|
|
.queued_changes
|
|
.push(crate::state::StagedChange {
|
|
change: engine::PatternChange::Start {
|
|
bank: 0,
|
|
pattern: 0,
|
|
},
|
|
quantization: crate::model::LaunchQuantization::Immediate,
|
|
sync_mode: crate::model::SyncMode::Reset,
|
|
});
|
|
|
|
app.audio.config.output_device = args.output.or(settings.audio.output_device);
|
|
app.audio.config.input_device = args.input.or(settings.audio.input_device);
|
|
app.audio.config.channels = args.channels.unwrap_or(settings.audio.channels);
|
|
app.audio.config.buffer_size = args.buffer.unwrap_or(settings.audio.buffer_size);
|
|
app.audio.config.max_voices = settings.audio.max_voices;
|
|
app.audio.config.lookahead_ms = settings.audio.lookahead_ms;
|
|
app.audio.config.sample_paths = args.samples;
|
|
app.audio.config.refresh_rate = RefreshRate::from_fps(settings.display.fps);
|
|
app.ui.runtime_highlight = settings.display.runtime_highlight;
|
|
app.audio.config.show_scope = settings.display.show_scope;
|
|
app.audio.config.show_spectrum = settings.display.show_spectrum;
|
|
app.ui.show_completion = settings.display.show_completion;
|
|
app.ui.color_scheme = settings.display.color_scheme;
|
|
app.ui.hue_rotation = settings.display.hue_rotation;
|
|
app.audio.config.layout = settings.display.layout;
|
|
let base_theme = settings.display.color_scheme.to_theme();
|
|
let rotated =
|
|
cagire_ratatui::theme::transform::rotate_theme(base_theme, settings.display.hue_rotation);
|
|
theme::set(rotated);
|
|
|
|
// Load MIDI settings
|
|
let outputs = midi::list_midi_outputs();
|
|
let inputs = midi::list_midi_inputs();
|
|
for (slot, name) in settings.midi.output_devices.iter().enumerate() {
|
|
if !name.is_empty() {
|
|
if let Some(idx) = outputs.iter().position(|d| &d.name == name) {
|
|
let _ = app.midi.connect_output(slot, idx);
|
|
}
|
|
}
|
|
}
|
|
for (slot, name) in settings.midi.input_devices.iter().enumerate() {
|
|
if !name.is_empty() {
|
|
if let Some(idx) = inputs.iter().position(|d| &d.name == name) {
|
|
let _ = app.midi.connect_input(slot, idx);
|
|
}
|
|
}
|
|
}
|
|
|
|
let metrics = Arc::new(EngineMetrics::default());
|
|
let scope_buffer = Arc::new(ScopeBuffer::new());
|
|
let spectrum_buffer = Arc::new(SpectrumBuffer::new());
|
|
|
|
let audio_sample_pos = Arc::new(AtomicU64::new(0));
|
|
let sample_rate_shared = Arc::new(AtomicU32::new(44100));
|
|
let lookahead_ms = Arc::new(AtomicU32::new(settings.audio.lookahead_ms));
|
|
|
|
let mut initial_samples = Vec::new();
|
|
for path in &app.audio.config.sample_paths {
|
|
let index = doux::sampling::scan_samples_dir(path);
|
|
app.audio.config.sample_count += index.len();
|
|
initial_samples.extend(index);
|
|
}
|
|
|
|
#[cfg(feature = "desktop")]
|
|
let mouse_x = Arc::new(AtomicU32::new(0.5_f32.to_bits()));
|
|
#[cfg(feature = "desktop")]
|
|
let mouse_y = Arc::new(AtomicU32::new(0.5_f32.to_bits()));
|
|
#[cfg(feature = "desktop")]
|
|
let mouse_down = Arc::new(AtomicU32::new(0.0_f32.to_bits()));
|
|
|
|
let seq_config = SequencerConfig {
|
|
audio_sample_pos: Arc::clone(&audio_sample_pos),
|
|
sample_rate: Arc::clone(&sample_rate_shared),
|
|
lookahead_ms: Arc::clone(&lookahead_ms),
|
|
cc_access: Some(Arc::new(app.midi.cc_memory.clone()) as Arc<dyn crate::model::CcAccess>),
|
|
#[cfg(feature = "desktop")]
|
|
mouse_x: Arc::clone(&mouse_x),
|
|
#[cfg(feature = "desktop")]
|
|
mouse_y: Arc::clone(&mouse_y),
|
|
#[cfg(feature = "desktop")]
|
|
mouse_down: Arc::clone(&mouse_down),
|
|
};
|
|
|
|
let (sequencer, initial_audio_rx, mut midi_rx) = spawn_sequencer(
|
|
Arc::clone(&link),
|
|
Arc::clone(&playing),
|
|
Arc::clone(&app.variables),
|
|
Arc::clone(&app.dict),
|
|
Arc::clone(&app.rng),
|
|
settings.link.quantum,
|
|
Arc::clone(&app.live_keys),
|
|
Arc::clone(&nudge_us),
|
|
seq_config,
|
|
);
|
|
|
|
let stream_config = AudioStreamConfig {
|
|
output_device: app.audio.config.output_device.clone(),
|
|
channels: app.audio.config.channels,
|
|
buffer_size: app.audio.config.buffer_size,
|
|
max_voices: app.audio.config.max_voices,
|
|
};
|
|
|
|
let (mut _stream, mut _analysis_handle) = match build_stream(
|
|
&stream_config,
|
|
initial_audio_rx,
|
|
Arc::clone(&scope_buffer),
|
|
Arc::clone(&spectrum_buffer),
|
|
Arc::clone(&metrics),
|
|
initial_samples,
|
|
Arc::clone(&audio_sample_pos),
|
|
) {
|
|
Ok((s, info, analysis)) => {
|
|
app.audio.config.sample_rate = info.sample_rate;
|
|
app.audio.config.host_name = info.host_name;
|
|
app.audio.config.channels = info.channels;
|
|
sample_rate_shared.store(info.sample_rate as u32, Ordering::Relaxed);
|
|
(Some(s), Some(analysis))
|
|
}
|
|
Err(e) => {
|
|
app.ui.set_status(format!("Audio failed: {e}"));
|
|
app.audio.error = Some(e);
|
|
(None, None)
|
|
}
|
|
};
|
|
app.mark_all_patterns_dirty();
|
|
|
|
enable_raw_mode()?;
|
|
io::stdout().execute(EnableBracketedPaste)?;
|
|
io::stdout().execute(EnterAlternateScreen)?;
|
|
let backend = CrosstermBackend::new(io::stdout());
|
|
let mut terminal = Terminal::new(backend)?;
|
|
terminal.clear()?;
|
|
|
|
loop {
|
|
if app.audio.restart_pending {
|
|
app.audio.restart_pending = false;
|
|
_stream = None;
|
|
_analysis_handle = None;
|
|
|
|
let new_audio_rx = sequencer.swap_audio_channel();
|
|
midi_rx = sequencer.swap_midi_channel();
|
|
|
|
let new_config = AudioStreamConfig {
|
|
output_device: app.audio.config.output_device.clone(),
|
|
channels: app.audio.config.channels,
|
|
buffer_size: app.audio.config.buffer_size,
|
|
max_voices: app.audio.config.max_voices,
|
|
};
|
|
|
|
let mut restart_samples = Vec::new();
|
|
for path in &app.audio.config.sample_paths {
|
|
let index = doux::sampling::scan_samples_dir(path);
|
|
restart_samples.extend(index);
|
|
}
|
|
app.audio.config.sample_count = restart_samples.len();
|
|
|
|
audio_sample_pos.store(0, Ordering::Relaxed);
|
|
|
|
match build_stream(
|
|
&new_config,
|
|
new_audio_rx,
|
|
Arc::clone(&scope_buffer),
|
|
Arc::clone(&spectrum_buffer),
|
|
Arc::clone(&metrics),
|
|
restart_samples,
|
|
Arc::clone(&audio_sample_pos),
|
|
) {
|
|
Ok((new_stream, info, new_analysis)) => {
|
|
_stream = Some(new_stream);
|
|
_analysis_handle = Some(new_analysis);
|
|
app.audio.config.sample_rate = info.sample_rate;
|
|
app.audio.config.host_name = info.host_name;
|
|
app.audio.config.channels = info.channels;
|
|
sample_rate_shared.store(info.sample_rate as u32, Ordering::Relaxed);
|
|
app.audio.error = None;
|
|
app.ui.set_status("Audio restarted".to_string());
|
|
}
|
|
Err(e) => {
|
|
app.audio.error = Some(e.clone());
|
|
app.ui.set_status(format!("Audio failed: {e}"));
|
|
}
|
|
}
|
|
}
|
|
|
|
app.playback.playing = playing.load(Ordering::Relaxed);
|
|
|
|
// Process pending MIDI commands
|
|
while let Ok(midi_cmd) = midi_rx.try_recv() {
|
|
match midi_cmd {
|
|
engine::MidiCommand::NoteOn {
|
|
device,
|
|
channel,
|
|
note,
|
|
velocity,
|
|
} => {
|
|
app.midi.send_note_on(device, channel, note, velocity);
|
|
}
|
|
engine::MidiCommand::NoteOff {
|
|
device,
|
|
channel,
|
|
note,
|
|
} => {
|
|
app.midi.send_note_off(device, channel, note);
|
|
}
|
|
engine::MidiCommand::CC {
|
|
device,
|
|
channel,
|
|
cc,
|
|
value,
|
|
} => {
|
|
app.midi.send_cc(device, channel, cc, value);
|
|
}
|
|
engine::MidiCommand::PitchBend {
|
|
device,
|
|
channel,
|
|
value,
|
|
} => {
|
|
app.midi.send_pitch_bend(device, channel, value);
|
|
}
|
|
engine::MidiCommand::Pressure {
|
|
device,
|
|
channel,
|
|
value,
|
|
} => {
|
|
app.midi.send_pressure(device, channel, value);
|
|
}
|
|
engine::MidiCommand::ProgramChange {
|
|
device,
|
|
channel,
|
|
program,
|
|
} => {
|
|
app.midi.send_program_change(device, channel, program);
|
|
}
|
|
engine::MidiCommand::Clock { device } => app.midi.send_realtime(device, 0xF8),
|
|
engine::MidiCommand::Start { device } => app.midi.send_realtime(device, 0xFA),
|
|
engine::MidiCommand::Stop { device } => app.midi.send_realtime(device, 0xFC),
|
|
engine::MidiCommand::Continue { device } => app.midi.send_realtime(device, 0xFB),
|
|
}
|
|
}
|
|
|
|
{
|
|
app.metrics.active_voices = metrics.active_voices.load(Ordering::Relaxed) as usize;
|
|
app.metrics.peak_voices = app.metrics.peak_voices.max(app.metrics.active_voices);
|
|
app.metrics.cpu_load = metrics.load.get_load();
|
|
app.metrics.schedule_depth = metrics.schedule_depth.load(Ordering::Relaxed) as usize;
|
|
app.metrics.scope = scope_buffer.read();
|
|
(app.metrics.peak_left, app.metrics.peak_right) = scope_buffer.peaks();
|
|
app.metrics.spectrum = spectrum_buffer.read();
|
|
app.metrics.nudge_ms = nudge_us.load(Ordering::Relaxed) as f64 / 1000.0;
|
|
}
|
|
|
|
let seq_snapshot = sequencer.snapshot();
|
|
app.metrics.event_count = seq_snapshot.event_count;
|
|
app.metrics.dropped_events = seq_snapshot.dropped_events;
|
|
|
|
app.flush_queued_changes(&sequencer.cmd_tx);
|
|
app.flush_dirty_patterns(&sequencer.cmd_tx);
|
|
|
|
let had_event = event::poll(Duration::from_millis(
|
|
app.audio.config.refresh_rate.millis(),
|
|
))?;
|
|
|
|
if had_event {
|
|
match event::read()? {
|
|
Event::Key(key) => {
|
|
let mut ctx = InputContext {
|
|
app: &mut app,
|
|
link: &link,
|
|
snapshot: &seq_snapshot,
|
|
playing: &playing,
|
|
audio_tx: &sequencer.audio_tx,
|
|
seq_cmd_tx: &sequencer.cmd_tx,
|
|
nudge_us: &nudge_us,
|
|
lookahead_ms: &lookahead_ms,
|
|
};
|
|
|
|
if let InputResult::Quit = handle_key(&mut ctx, key) {
|
|
break;
|
|
}
|
|
}
|
|
Event::Paste(text) => {
|
|
if matches!(app.ui.modal, state::Modal::Editor) {
|
|
app.editor_ctx.editor.insert_str(&text);
|
|
}
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
|
|
if app.playback.playing || had_event || app.ui.show_title {
|
|
if app.ui.show_title {
|
|
app.ui.sparkles.tick(terminal.get_frame().area());
|
|
}
|
|
terminal.draw(|frame| views::render(frame, &app, &link, &seq_snapshot))?;
|
|
}
|
|
}
|
|
|
|
disable_raw_mode()?;
|
|
io::stdout().execute(DisableBracketedPaste)?;
|
|
io::stdout().execute(LeaveAlternateScreen)?;
|
|
|
|
sequencer.shutdown();
|
|
|
|
Ok(())
|
|
}
|