275 lines
9.1 KiB
Rust
275 lines
9.1 KiB
Rust
mod app;
|
|
mod init;
|
|
mod commands;
|
|
mod engine;
|
|
mod input;
|
|
mod midi;
|
|
mod model;
|
|
mod page;
|
|
mod services;
|
|
mod settings;
|
|
mod state;
|
|
mod theme;
|
|
mod views;
|
|
mod widgets;
|
|
|
|
use std::io;
|
|
use std::path::PathBuf;
|
|
use std::sync::atomic::Ordering;
|
|
use std::sync::Arc;
|
|
use std::time::{Duration, Instant};
|
|
|
|
use clap::Parser;
|
|
use crossterm::event::{self, DisableBracketedPaste, EnableBracketedPaste, Event};
|
|
use crossterm::terminal::{
|
|
disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen,
|
|
};
|
|
use crossterm::ExecutableCommand;
|
|
use ratatui::prelude::CrosstermBackend;
|
|
use ratatui::Terminal;
|
|
|
|
use engine::{build_stream, AudioStreamConfig};
|
|
use init::InitArgs;
|
|
use input::{handle_key, InputContext, InputResult};
|
|
|
|
#[derive(Parser)]
|
|
#[command(name = "cagire", version, about = "Forth-based live coding sequencer")]
|
|
struct Args {
|
|
/// Directory containing audio samples to load (can be specified multiple times)
|
|
#[arg(short, long)]
|
|
samples: Vec<PathBuf>,
|
|
|
|
/// Output audio device (name or index)
|
|
#[arg(short, long)]
|
|
output: Option<String>,
|
|
|
|
/// Input audio device (name or index)
|
|
#[arg(short, long)]
|
|
input: Option<String>,
|
|
|
|
/// Number of output channels
|
|
#[arg(short, long)]
|
|
channels: Option<u16>,
|
|
|
|
/// Audio buffer size in samples
|
|
#[arg(short, long)]
|
|
buffer: Option<u32>,
|
|
}
|
|
|
|
fn main() -> io::Result<()> {
|
|
#[cfg(unix)]
|
|
engine::realtime::lock_memory();
|
|
|
|
let args = Args::parse();
|
|
|
|
let b = init::init(InitArgs {
|
|
samples: args.samples,
|
|
output: args.output,
|
|
input: args.input,
|
|
channels: args.channels,
|
|
buffer: args.buffer,
|
|
});
|
|
|
|
let mut app = b.app;
|
|
let link = b.link;
|
|
let sequencer = b.sequencer;
|
|
let playing = b.playing;
|
|
let nudge_us = b.nudge_us;
|
|
let lookahead_ms = b.lookahead_ms;
|
|
let metrics = b.metrics;
|
|
let scope_buffer = b.scope_buffer;
|
|
let spectrum_buffer = b.spectrum_buffer;
|
|
let audio_sample_pos = b.audio_sample_pos;
|
|
let sample_rate_shared = b.sample_rate_shared;
|
|
let mut _stream = b.stream;
|
|
let mut _analysis_handle = b.analysis_handle;
|
|
let mut midi_rx = b.midi_rx;
|
|
|
|
enable_raw_mode()?;
|
|
io::stdout().execute(EnableBracketedPaste)?;
|
|
io::stdout().execute(EnterAlternateScreen)?;
|
|
let backend = CrosstermBackend::new(io::stdout());
|
|
let mut terminal = Terminal::new(backend)?;
|
|
terminal.clear()?;
|
|
|
|
let mut last_frame = Instant::now();
|
|
|
|
loop {
|
|
if app.audio.restart_pending {
|
|
app.audio.restart_pending = false;
|
|
_stream = None;
|
|
_analysis_handle = None;
|
|
|
|
let new_audio_rx = sequencer.swap_audio_channel();
|
|
midi_rx = sequencer.swap_midi_channel();
|
|
|
|
let new_config = AudioStreamConfig {
|
|
output_device: app.audio.config.output_device.clone(),
|
|
channels: app.audio.config.channels,
|
|
buffer_size: app.audio.config.buffer_size,
|
|
max_voices: app.audio.config.max_voices,
|
|
};
|
|
|
|
let mut restart_samples = Vec::new();
|
|
for path in &app.audio.config.sample_paths {
|
|
let index = doux::sampling::scan_samples_dir(path);
|
|
restart_samples.extend(index);
|
|
}
|
|
app.audio.config.sample_count = restart_samples.len();
|
|
|
|
audio_sample_pos.store(0, Ordering::Relaxed);
|
|
|
|
match build_stream(
|
|
&new_config,
|
|
new_audio_rx,
|
|
Arc::clone(&scope_buffer),
|
|
Arc::clone(&spectrum_buffer),
|
|
Arc::clone(&metrics),
|
|
restart_samples,
|
|
Arc::clone(&audio_sample_pos),
|
|
) {
|
|
Ok((new_stream, info, new_analysis)) => {
|
|
_stream = Some(new_stream);
|
|
_analysis_handle = Some(new_analysis);
|
|
app.audio.config.sample_rate = info.sample_rate;
|
|
app.audio.config.host_name = info.host_name;
|
|
app.audio.config.channels = info.channels;
|
|
sample_rate_shared.store(info.sample_rate as u32, Ordering::Relaxed);
|
|
app.audio.error = None;
|
|
app.ui.set_status("Audio restarted".to_string());
|
|
}
|
|
Err(e) => {
|
|
app.audio.error = Some(e.clone());
|
|
app.ui.set_status(format!("Audio failed: {e}"));
|
|
}
|
|
}
|
|
}
|
|
|
|
app.playback.playing = playing.load(Ordering::Relaxed);
|
|
|
|
while let Ok(midi_cmd) = midi_rx.try_recv() {
|
|
match midi_cmd {
|
|
engine::MidiCommand::NoteOn {
|
|
device,
|
|
channel,
|
|
note,
|
|
velocity,
|
|
} => {
|
|
app.midi.send_note_on(device, channel, note, velocity);
|
|
}
|
|
engine::MidiCommand::NoteOff {
|
|
device,
|
|
channel,
|
|
note,
|
|
} => {
|
|
app.midi.send_note_off(device, channel, note);
|
|
}
|
|
engine::MidiCommand::CC {
|
|
device,
|
|
channel,
|
|
cc,
|
|
value,
|
|
} => {
|
|
app.midi.send_cc(device, channel, cc, value);
|
|
}
|
|
engine::MidiCommand::PitchBend {
|
|
device,
|
|
channel,
|
|
value,
|
|
} => {
|
|
app.midi.send_pitch_bend(device, channel, value);
|
|
}
|
|
engine::MidiCommand::Pressure {
|
|
device,
|
|
channel,
|
|
value,
|
|
} => {
|
|
app.midi.send_pressure(device, channel, value);
|
|
}
|
|
engine::MidiCommand::ProgramChange {
|
|
device,
|
|
channel,
|
|
program,
|
|
} => {
|
|
app.midi.send_program_change(device, channel, program);
|
|
}
|
|
engine::MidiCommand::Clock { device } => app.midi.send_realtime(device, 0xF8),
|
|
engine::MidiCommand::Start { device } => app.midi.send_realtime(device, 0xFA),
|
|
engine::MidiCommand::Stop { device } => app.midi.send_realtime(device, 0xFC),
|
|
engine::MidiCommand::Continue { device } => app.midi.send_realtime(device, 0xFB),
|
|
}
|
|
}
|
|
|
|
{
|
|
app.metrics.active_voices = metrics.active_voices.load(Ordering::Relaxed) as usize;
|
|
app.metrics.peak_voices = app.metrics.peak_voices.max(app.metrics.active_voices);
|
|
app.metrics.cpu_load = metrics.load.get_load();
|
|
app.metrics.schedule_depth = metrics.schedule_depth.load(Ordering::Relaxed) as usize;
|
|
app.metrics.scope = scope_buffer.read();
|
|
(app.metrics.peak_left, app.metrics.peak_right) = scope_buffer.peaks();
|
|
app.metrics.spectrum = spectrum_buffer.read();
|
|
app.metrics.nudge_ms = nudge_us.load(Ordering::Relaxed) as f64 / 1000.0;
|
|
}
|
|
|
|
let seq_snapshot = sequencer.snapshot();
|
|
app.metrics.event_count = seq_snapshot.event_count;
|
|
|
|
app.flush_queued_changes(&sequencer.cmd_tx);
|
|
app.flush_dirty_patterns(&sequencer.cmd_tx);
|
|
|
|
let had_event = event::poll(Duration::from_millis(
|
|
app.audio.config.refresh_rate.millis(),
|
|
))?;
|
|
|
|
if had_event {
|
|
match event::read()? {
|
|
Event::Key(key) => {
|
|
let mut ctx = InputContext {
|
|
app: &mut app,
|
|
link: &link,
|
|
snapshot: &seq_snapshot,
|
|
playing: &playing,
|
|
audio_tx: &sequencer.audio_tx,
|
|
seq_cmd_tx: &sequencer.cmd_tx,
|
|
nudge_us: &nudge_us,
|
|
lookahead_ms: &lookahead_ms,
|
|
};
|
|
|
|
if let InputResult::Quit = handle_key(&mut ctx, key) {
|
|
break;
|
|
}
|
|
}
|
|
Event::Paste(text) => {
|
|
if matches!(app.ui.modal, state::Modal::Editor) {
|
|
app.editor_ctx.editor.insert_str(&text);
|
|
}
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
|
|
state::effects::tick_effects(&mut app.ui, app.page);
|
|
|
|
let elapsed = last_frame.elapsed();
|
|
last_frame = Instant::now();
|
|
|
|
let effects_active = app.ui.effects.borrow().is_running()
|
|
|| app.ui.modal_fx.borrow().is_some()
|
|
|| app.ui.title_fx.borrow().is_some();
|
|
if app.playback.playing || had_event || app.ui.show_title || effects_active {
|
|
if app.ui.show_title {
|
|
app.ui.sparkles.tick(terminal.get_frame().area());
|
|
}
|
|
terminal.draw(|frame| views::render(frame, &app, &link, &seq_snapshot, elapsed))?;
|
|
}
|
|
}
|
|
|
|
disable_raw_mode()?;
|
|
io::stdout().execute(DisableBracketedPaste)?;
|
|
io::stdout().execute(LeaveAlternateScreen)?;
|
|
|
|
sequencer.shutdown();
|
|
|
|
Ok(())
|
|
}
|