Files
Cagire/src/main.rs

326 lines
11 KiB
Rust

mod app;
mod commands;
mod engine;
mod input;
mod midi;
mod model;
mod page;
mod services;
mod settings;
mod state;
mod theme;
mod views;
mod widgets;
use std::io;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU32, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::Duration;
use clap::Parser;
use crossterm::event::{self, DisableBracketedPaste, EnableBracketedPaste, Event};
use crossterm::terminal::{
disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen,
};
use crossterm::ExecutableCommand;
use doux::EngineMetrics;
use ratatui::prelude::CrosstermBackend;
use ratatui::Terminal;
use app::App;
use engine::{
build_stream, spawn_sequencer, AudioStreamConfig, LinkState, ScopeBuffer, SequencerConfig,
SpectrumBuffer,
};
use input::{handle_key, InputContext, InputResult};
use settings::Settings;
use state::audio::RefreshRate;
#[derive(Parser)]
#[command(name = "cagire", about = "A step sequencer with Ableton Link support")]
struct Args {
/// Directory containing audio samples to load (can be specified multiple times)
#[arg(short, long)]
samples: Vec<PathBuf>,
/// Output audio device (name or index)
#[arg(short, long)]
output: Option<String>,
/// Input audio device (name or index)
#[arg(short, long)]
input: Option<String>,
/// Number of output channels
#[arg(short, long)]
channels: Option<u16>,
/// Audio buffer size in samples
#[arg(short, long)]
buffer: Option<u32>,
}
fn main() -> io::Result<()> {
let args = Args::parse();
let settings = Settings::load();
let link = Arc::new(LinkState::new(settings.link.tempo, settings.link.quantum));
if settings.link.enabled {
link.enable();
}
let playing = Arc::new(AtomicBool::new(true));
let nudge_us = Arc::new(AtomicI64::new(0));
let mut app = App::new();
app.playback
.queued_changes
.push(crate::state::StagedChange {
change: engine::PatternChange::Start {
bank: 0,
pattern: 0,
},
quantization: crate::model::LaunchQuantization::Immediate,
sync_mode: crate::model::SyncMode::Reset,
});
app.audio.config.output_device = args.output.or(settings.audio.output_device);
app.audio.config.input_device = args.input.or(settings.audio.input_device);
app.audio.config.channels = args.channels.unwrap_or(settings.audio.channels);
app.audio.config.buffer_size = args.buffer.unwrap_or(settings.audio.buffer_size);
app.audio.config.max_voices = settings.audio.max_voices;
app.audio.config.lookahead_ms = settings.audio.lookahead_ms;
app.audio.config.sample_paths = args.samples;
app.audio.config.refresh_rate = RefreshRate::from_fps(settings.display.fps);
app.ui.runtime_highlight = settings.display.runtime_highlight;
app.audio.config.show_scope = settings.display.show_scope;
app.audio.config.show_spectrum = settings.display.show_spectrum;
app.ui.show_completion = settings.display.show_completion;
app.ui.flash_brightness = settings.display.flash_brightness;
app.ui.color_scheme = settings.display.color_scheme;
theme::set(settings.display.color_scheme.to_theme());
// Load MIDI settings
if let Some(output_name) = &settings.midi.output_device {
let outputs = midi::list_midi_outputs();
if let Some(idx) = outputs.iter().position(|d| &d.name == output_name) {
let _ = app.midi.connect_output(idx);
}
}
if let Some(input_name) = &settings.midi.input_device {
let inputs = midi::list_midi_inputs();
if let Some(idx) = inputs.iter().position(|d| &d.name == input_name) {
let _ = app.midi.connect_input(idx);
}
}
let metrics = Arc::new(EngineMetrics::default());
let scope_buffer = Arc::new(ScopeBuffer::new());
let spectrum_buffer = Arc::new(SpectrumBuffer::new());
let audio_sample_pos = Arc::new(AtomicU64::new(0));
let sample_rate_shared = Arc::new(AtomicU32::new(44100));
let lookahead_ms = Arc::new(AtomicU32::new(settings.audio.lookahead_ms));
let mut initial_samples = Vec::new();
for path in &app.audio.config.sample_paths {
let index = doux::sampling::scan_samples_dir(path);
app.audio.config.sample_count += index.len();
initial_samples.extend(index);
}
let seq_config = SequencerConfig {
audio_sample_pos: Arc::clone(&audio_sample_pos),
sample_rate: Arc::clone(&sample_rate_shared),
lookahead_ms: Arc::clone(&lookahead_ms),
cc_memory: Some(Arc::clone(&app.midi.cc_memory)),
};
let (sequencer, initial_audio_rx, mut midi_rx) = spawn_sequencer(
Arc::clone(&link),
Arc::clone(&playing),
Arc::clone(&app.variables),
Arc::clone(&app.dict),
Arc::clone(&app.rng),
settings.link.quantum,
Arc::clone(&app.live_keys),
Arc::clone(&nudge_us),
seq_config,
);
let stream_config = AudioStreamConfig {
output_device: app.audio.config.output_device.clone(),
channels: app.audio.config.channels,
buffer_size: app.audio.config.buffer_size,
max_voices: app.audio.config.max_voices,
};
let (mut _stream, mut _analysis_handle) = match build_stream(
&stream_config,
initial_audio_rx,
Arc::clone(&scope_buffer),
Arc::clone(&spectrum_buffer),
Arc::clone(&metrics),
initial_samples,
Arc::clone(&audio_sample_pos),
) {
Ok((s, sample_rate, analysis)) => {
app.audio.config.sample_rate = sample_rate;
sample_rate_shared.store(sample_rate as u32, Ordering::Relaxed);
(Some(s), Some(analysis))
}
Err(e) => {
app.ui.set_status(format!("Audio failed: {e}"));
app.audio.error = Some(e);
(None, None)
}
};
app.mark_all_patterns_dirty();
enable_raw_mode()?;
io::stdout().execute(EnableBracketedPaste)?;
io::stdout().execute(EnterAlternateScreen)?;
let backend = CrosstermBackend::new(io::stdout());
let mut terminal = Terminal::new(backend)?;
terminal.clear()?;
loop {
if app.audio.restart_pending {
app.audio.restart_pending = false;
_stream = None;
_analysis_handle = None;
let new_audio_rx = sequencer.swap_audio_channel();
midi_rx = sequencer.swap_midi_channel();
let new_config = AudioStreamConfig {
output_device: app.audio.config.output_device.clone(),
channels: app.audio.config.channels,
buffer_size: app.audio.config.buffer_size,
max_voices: app.audio.config.max_voices,
};
let mut restart_samples = Vec::new();
for path in &app.audio.config.sample_paths {
let index = doux::sampling::scan_samples_dir(path);
restart_samples.extend(index);
}
app.audio.config.sample_count = restart_samples.len();
audio_sample_pos.store(0, Ordering::Relaxed);
match build_stream(
&new_config,
new_audio_rx,
Arc::clone(&scope_buffer),
Arc::clone(&spectrum_buffer),
Arc::clone(&metrics),
restart_samples,
Arc::clone(&audio_sample_pos),
) {
Ok((new_stream, sr, new_analysis)) => {
_stream = Some(new_stream);
_analysis_handle = Some(new_analysis);
app.audio.config.sample_rate = sr;
sample_rate_shared.store(sr as u32, Ordering::Relaxed);
app.audio.error = None;
app.ui.set_status("Audio restarted".to_string());
}
Err(e) => {
app.audio.error = Some(e.clone());
app.ui.set_status(format!("Audio failed: {e}"));
}
}
}
app.playback.playing = playing.load(Ordering::Relaxed);
// Process pending MIDI commands
while let Ok(midi_cmd) = midi_rx.try_recv() {
match midi_cmd {
engine::MidiCommand::NoteOn { channel, note, velocity } => {
app.midi.send_note_on(channel, note, velocity);
}
engine::MidiCommand::NoteOff { channel, note } => {
app.midi.send_note_off(channel, note);
}
engine::MidiCommand::CC { channel, cc, value } => {
app.midi.send_cc(channel, cc, value);
}
}
}
{
app.metrics.active_voices = metrics.active_voices.load(Ordering::Relaxed) as usize;
app.metrics.peak_voices = app.metrics.peak_voices.max(app.metrics.active_voices);
app.metrics.cpu_load = metrics.load.get_load();
app.metrics.schedule_depth = metrics.schedule_depth.load(Ordering::Relaxed) as usize;
app.metrics.scope = scope_buffer.read();
(app.metrics.peak_left, app.metrics.peak_right) = scope_buffer.peaks();
app.metrics.spectrum = spectrum_buffer.read();
app.metrics.nudge_ms = nudge_us.load(Ordering::Relaxed) as f64 / 1000.0;
}
let seq_snapshot = sequencer.snapshot();
app.metrics.event_count = seq_snapshot.event_count;
app.metrics.dropped_events = seq_snapshot.dropped_events;
app.ui.event_flash = (app.ui.event_flash - 0.1).max(0.0);
let new_events = app
.metrics
.event_count
.saturating_sub(app.ui.last_event_count);
if new_events > 0 {
app.ui.event_flash = (new_events as f32 * 0.4).min(1.0);
}
app.ui.last_event_count = app.metrics.event_count;
app.flush_queued_changes(&sequencer.cmd_tx);
app.flush_dirty_patterns(&sequencer.cmd_tx);
if app.ui.show_title {
app.ui.sparkles.tick(terminal.get_frame().area());
}
terminal.draw(|frame| views::render(frame, &app, &link, &seq_snapshot))?;
if event::poll(Duration::from_millis(
app.audio.config.refresh_rate.millis(),
))? {
match event::read()? {
Event::Key(key) => {
let mut ctx = InputContext {
app: &mut app,
link: &link,
snapshot: &seq_snapshot,
playing: &playing,
audio_tx: &sequencer.audio_tx,
seq_cmd_tx: &sequencer.cmd_tx,
nudge_us: &nudge_us,
lookahead_ms: &lookahead_ms,
};
if let InputResult::Quit = handle_key(&mut ctx, key) {
break;
}
}
Event::Paste(text) => {
if matches!(app.ui.modal, state::Modal::Editor) {
app.editor_ctx.editor.insert_str(&text);
}
}
_ => {}
}
}
}
disable_raw_mode()?;
io::stdout().execute(DisableBracketedPaste)?;
io::stdout().execute(LeaveAlternateScreen)?;
sequencer.shutdown();
Ok(())
}