Files
Cagire/src/main.rs

260 lines
8.6 KiB
Rust

mod app;
mod commands;
mod engine;
mod input;
mod model;
mod page;
mod services;
mod settings;
mod state;
mod views;
mod widgets;
use std::io;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicI64, Ordering};
use std::sync::Arc;
use std::time::Duration;
use clap::Parser;
use crossterm::event::{self, DisableBracketedPaste, EnableBracketedPaste, Event};
use crossterm::terminal::{
disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen,
};
use crossterm::ExecutableCommand;
use doux::EngineMetrics;
use ratatui::prelude::CrosstermBackend;
use ratatui::Terminal;
use app::App;
use engine::{
build_stream, spawn_sequencer, AudioStreamConfig, LinkState, ScopeBuffer, SpectrumBuffer,
};
use input::{handle_key, InputContext, InputResult};
use settings::Settings;
use state::audio::RefreshRate;
#[derive(Parser)]
#[command(name = "cagire", about = "A step sequencer with Ableton Link support")]
struct Args {
/// Directory containing audio samples to load (can be specified multiple times)
#[arg(short, long)]
samples: Vec<PathBuf>,
/// Output audio device (name or index)
#[arg(short, long)]
output: Option<String>,
/// Input audio device (name or index)
#[arg(short, long)]
input: Option<String>,
/// Number of output channels
#[arg(short, long)]
channels: Option<u16>,
/// Audio buffer size in samples
#[arg(short, long)]
buffer: Option<u32>,
}
fn main() -> io::Result<()> {
let args = Args::parse();
let settings = Settings::load();
let link = Arc::new(LinkState::new(settings.link.tempo, settings.link.quantum));
if settings.link.enabled {
link.enable();
}
let playing = Arc::new(AtomicBool::new(true));
let nudge_us = Arc::new(AtomicI64::new(0));
let mut app = App::new();
app.playback
.queued_changes
.push(crate::state::StagedChange {
change: engine::PatternChange::Start {
bank: 0,
pattern: 0,
},
quantization: crate::model::LaunchQuantization::Immediate,
sync_mode: crate::model::SyncMode::Reset,
});
app.audio.config.output_device = args.output.or(settings.audio.output_device);
app.audio.config.input_device = args.input.or(settings.audio.input_device);
app.audio.config.channels = args.channels.unwrap_or(settings.audio.channels);
app.audio.config.buffer_size = args.buffer.unwrap_or(settings.audio.buffer_size);
app.audio.config.max_voices = settings.audio.max_voices;
app.audio.config.sample_paths = args.samples;
app.audio.config.refresh_rate = RefreshRate::from_fps(settings.display.fps);
app.ui.runtime_highlight = settings.display.runtime_highlight;
app.audio.config.show_scope = settings.display.show_scope;
app.audio.config.show_spectrum = settings.display.show_spectrum;
app.ui.show_completion = settings.display.show_completion;
let metrics = Arc::new(EngineMetrics::default());
let scope_buffer = Arc::new(ScopeBuffer::new());
let spectrum_buffer = Arc::new(SpectrumBuffer::new());
let mut initial_samples = Vec::new();
for path in &app.audio.config.sample_paths {
let index = doux::loader::scan_samples_dir(path);
app.audio.config.sample_count += index.len();
initial_samples.extend(index);
}
let (sequencer, initial_audio_rx) = spawn_sequencer(
Arc::clone(&link),
Arc::clone(&playing),
Arc::clone(&app.variables),
Arc::clone(&app.dict),
Arc::clone(&app.rng),
settings.link.quantum,
Arc::clone(&app.live_keys),
Arc::clone(&nudge_us),
);
let stream_config = AudioStreamConfig {
output_device: app.audio.config.output_device.clone(),
channels: app.audio.config.channels,
buffer_size: app.audio.config.buffer_size,
max_voices: app.audio.config.max_voices,
};
let (mut _stream, mut _analysis_handle) = match build_stream(
&stream_config,
initial_audio_rx,
Arc::clone(&scope_buffer),
Arc::clone(&spectrum_buffer),
Arc::clone(&metrics),
initial_samples,
) {
Ok((s, sample_rate, analysis)) => {
app.audio.config.sample_rate = sample_rate;
(Some(s), Some(analysis))
}
Err(e) => {
app.ui.set_status(format!("Audio failed: {e}"));
app.audio.error = Some(e);
(None, None)
}
};
app.mark_all_patterns_dirty();
enable_raw_mode()?;
io::stdout().execute(EnableBracketedPaste)?;
io::stdout().execute(EnterAlternateScreen)?;
let backend = CrosstermBackend::new(io::stdout());
let mut terminal = Terminal::new(backend)?;
terminal.clear()?;
loop {
if app.audio.restart_pending {
app.audio.restart_pending = false;
_stream = None;
_analysis_handle = None;
let new_audio_rx = sequencer.swap_audio_channel();
let new_config = AudioStreamConfig {
output_device: app.audio.config.output_device.clone(),
channels: app.audio.config.channels,
buffer_size: app.audio.config.buffer_size,
max_voices: app.audio.config.max_voices,
};
let mut restart_samples = Vec::new();
for path in &app.audio.config.sample_paths {
let index = doux::loader::scan_samples_dir(path);
restart_samples.extend(index);
}
app.audio.config.sample_count = restart_samples.len();
match build_stream(
&new_config,
new_audio_rx,
Arc::clone(&scope_buffer),
Arc::clone(&spectrum_buffer),
Arc::clone(&metrics),
restart_samples,
) {
Ok((new_stream, sr, new_analysis)) => {
_stream = Some(new_stream);
_analysis_handle = Some(new_analysis);
app.audio.config.sample_rate = sr;
app.audio.error = None;
app.ui.set_status("Audio restarted".to_string());
}
Err(e) => {
app.audio.error = Some(e.clone());
app.ui.set_status(format!("Audio failed: {e}"));
}
}
}
app.playback.playing = playing.load(Ordering::Relaxed);
{
app.metrics.active_voices = metrics.active_voices.load(Ordering::Relaxed) as usize;
app.metrics.peak_voices = app.metrics.peak_voices.max(app.metrics.active_voices);
app.metrics.cpu_load = metrics.load.get_load();
app.metrics.schedule_depth = metrics.schedule_depth.load(Ordering::Relaxed) as usize;
app.metrics.scope = scope_buffer.read();
(app.metrics.peak_left, app.metrics.peak_right) = scope_buffer.peaks();
app.metrics.spectrum = spectrum_buffer.read();
app.metrics.nudge_ms = nudge_us.load(Ordering::Relaxed) as f64 / 1000.0;
}
let seq_snapshot = sequencer.snapshot();
app.metrics.event_count = seq_snapshot.event_count;
app.metrics.dropped_events = seq_snapshot.dropped_events;
app.flush_queued_changes(&sequencer.cmd_tx);
app.flush_dirty_patterns(&sequencer.cmd_tx);
if app.ui.show_title {
app.ui.sparkles.tick(terminal.get_frame().area());
}
terminal.draw(|frame| views::render(frame, &app, &link, &seq_snapshot))?;
if event::poll(Duration::from_millis(
app.audio.config.refresh_rate.millis(),
))? {
match event::read()? {
Event::Key(key) => {
let mut ctx = InputContext {
app: &mut app,
link: &link,
snapshot: &seq_snapshot,
playing: &playing,
audio_tx: &sequencer.audio_tx,
seq_cmd_tx: &sequencer.cmd_tx,
nudge_us: &nudge_us,
};
if let InputResult::Quit = handle_key(&mut ctx, key) {
break;
}
}
Event::Paste(text) => {
if matches!(app.ui.modal, state::Modal::Editor) {
app.editor_ctx.editor.insert_str(&text);
}
}
_ => {}
}
}
}
disable_raw_mode()?;
io::stdout().execute(DisableBracketedPaste)?;
io::stdout().execute(LeaveAlternateScreen)?;
sequencer.shutdown();
Ok(())
}