use iced::mouse; use iced::widget::{self, Canvas, button, canvas, column, container, image, mouse_area, text}; use iced::{Color, Font, Length, Point, Rectangle, Size}; use iced::{Subscription, Task, Theme, time}; use std::collections::VecDeque; use std::path::PathBuf; use std::sync::Mutex; use std::sync::{ Arc, atomic::{AtomicBool, Ordering}, }; use std::time::{Duration, Instant}; use streamtools_core::{MicInfo, SharedLevels, spawn_mic_listener}; #[derive(Debug)] struct Model { is_active: bool, flag: Arc, /// current image (if any) current_image: Option, /// path to last used image last_image_path: Option, show_panel: bool, /// copy of the recent mic RMS values for drawing levels: SharedLevels, /// device metadata info: MicInfo, /// track last click for double-click detection last_click: Option, /// current audio level for border intensity (0.0 - 1.0) current_level: f32, } #[derive(Debug, Clone)] enum Message { Tick, WindowEvent(iced::window::Event), ReplaceImagePressed, ImageChosen(Option), ImageClicked, ClosePanel, } const JETBRAINS_MONO: Font = Font::with_name("JetBrains Mono"); /// Waveform visualizer for audio levels #[derive(Debug)] struct Waveform { levels: SharedLevels, } impl canvas::Program for Waveform { type State = (); /// Some Notes: /// /// We amplify the visual representation significantly for better visibility (bright cyan/green) /// Using 50x amplification -> typical mic levels are 0.01-0.1 RMS fn draw( &self, _: &Self::State, renderer: &iced::Renderer, _: &Theme, bounds: Rectangle, _: mouse::Cursor, ) -> Vec { let mut frame = canvas::Frame::new(renderer, bounds.size()); frame.fill_rectangle(Point::new(0.0, 0.0), bounds.size(), Color::from_rgb(0.1, 0.1, 0.12)); if let Ok(levels) = self.levels.lock() { if !levels.is_empty() { let width = bounds.width; let height = bounds.height; let count = levels.len(); let bar_width = width / count as f32; for (i, &level) in levels.iter().enumerate() { let x = i as f32 * bar_width; let amplified = (level * 50.0).min(1.0); let bar_height = (amplified * height).min(height); let y = height - bar_height; frame.fill_rectangle( Point::new(x, y), Size::new(bar_width * 0.9, bar_height), Color::from_rgb(0.2, 1.0, 0.8), ); } } } vec![frame.into_geometry()] } } impl Model { fn update(&mut self, message: Message) -> iced::Task { match message { Message::Tick => { let was_active = self.is_active; self.is_active = self.flag.load(Ordering::Relaxed); if let Ok(levels) = self.levels.lock() { self.current_level = levels.back().copied().unwrap_or(0.0); } if was_active != self.is_active { tracing::info!("Mic activity changed: is_active={}", self.is_active); } Task::none() } Message::ImageClicked => { let now = Instant::now(); if let Some(last) = self.last_click { let elapsed = now.duration_since(last); if elapsed < Duration::from_millis(500) { self.show_panel = !self.show_panel; self.last_click = None; } else { self.last_click = Some(now); } } else { self.last_click = Some(now); } Task::none() } Message::WindowEvent(iced::window::Event::FileDropped(path)) => { if let Some(ext) = path.extension().and_then(|s| s.to_str()) { let ext_lower = ext.to_ascii_lowercase(); if !["png", "jpg", "jpeg", "gif", "webp"].contains(&ext_lower.as_str()) { eprintln!("Unsupported image type: {ext_lower}"); return Task::none(); } } let handle = image::Handle::from_path(&path); self.current_image = Some(handle); self.last_image_path = Some(path.clone()); if let Some(config_file) = Self::get_config_file() { if let Err(e) = std::fs::write(config_file, path.to_string_lossy().as_ref()) { eprintln!("failed to write config file: {e}"); } } Task::none() } Message::ReplaceImagePressed => { let path = rfd::FileDialog::new() .add_filter("Images", &["png", "jpg", "jpeg", "gif", "webp"]) .set_title("Choose mic indicator image") .pick_file(); Task::done(Message::ImageChosen(path)) } Message::ImageChosen(Some(path)) => { let handle = image::Handle::from_path(&path); self.current_image = Some(handle); self.last_image_path = Some(path.clone()); if let Some(config_file) = Self::get_config_file() { let _ = std::fs::write(config_file, path.to_string_lossy().as_ref()); } Task::none() } Message::ImageChosen(None) | Message::WindowEvent(_) => Task::none(), Message::ClosePanel => { self.show_panel = false; Task::none() } } } fn get_config_file() -> Option { dirs::config_dir().map(|mut config| { config.push("streamtools"); std::fs::create_dir_all(&config).ok()?; config.push("last_image.txt"); Some(config) })? } fn load_last_image() -> (Option, Option) { if let Some(config_file) = Self::get_config_file() { if let Ok(path) = std::fs::read_to_string(config_file) { let path = PathBuf::from(path.trim()); if path.exists() { let handle = image::Handle::from_path(&path); return (Some(path), Some(handle)); } } } (None, None) } fn new(flag: Arc, levels: SharedLevels, info: MicInfo) -> Self { let (last_image_path, current_image) = Self::load_last_image(); Self { show_panel: false, is_active: false, flag, current_image, last_image_path, levels, info, last_click: None, current_level: 0.0, } } fn build_panel(&self) -> iced::Element<'_, Message> { let close_button = button(text("Close").font(JETBRAINS_MONO)) .on_press(Message::ClosePanel) .padding(12); let info_text = column![ text("Microphone Information").size(18).font(JETBRAINS_MONO), text(format!("Device: {}", self.info.name)) .size(14) .font(JETBRAINS_MONO), text(format!("Sample Rate: {} Hz", self.info.sample_rate)) .size(14) .font(JETBRAINS_MONO), text(format!("Channels: {}", self.info.channels)) .size(14) .font(JETBRAINS_MONO), ] .spacing(8); let waveform = Canvas::new(Waveform { levels: self.levels.clone() }) .width(Length::Fill) .height(Length::Fixed(150.0)); let replace_button = button(text("Replace Image")) .on_press(Message::ReplaceImagePressed) .padding(10); let panel_content = column![close_button, info_text, waveform, replace_button] .spacing(16) .padding(20); container(panel_content) .width(Length::Fill) .height(Length::Fill) .padding(10) .style(|_: &Theme| container::Style { background: Some(Color::from_rgb(0.3, 0.3, 0.35).into()), border: iced::Border { color: Color::from_rgb(0.4, 0.4, 0.45), width: 1.0, radius: 0.0.into() }, ..Default::default() }) .into() } fn view(&self) -> iced::Element<'_, Message> { let image_content: iced::Element<'_, Message> = if let Some(img) = &self.current_image { container( mouse_area(widget::image(img.clone()).width(Length::Shrink).height(Length::Shrink)) .on_press(Message::ImageClicked), ) .padding(8) .style(move |_: &Theme| Self::mic_style(self.current_level)) .into() } else { widget::text("Drag an image onto this window to use it as the mic indicator.") .size(20) .into() }; let image_with_border = container(image_content).padding(20); if self.show_panel { column![ container(image_with_border) .center_x(Length::Fill) .width(Length::Fill) .height(Length::FillPortion(3)), container(self.build_panel()) .width(Length::Fill) .height(Length::FillPortion(2)) ] .width(Length::Fill) .height(Length::Fill) .into() } else { container(image_with_border) .center_x(Length::Fill) .center_y(Length::Fill) .width(Length::Fill) .height(Length::Fill) .into() } } fn mic_style(level: f32) -> container::Style { let amplified = (level * 10.0).min(1.0); let emerald_green = Color::from_rgb(0.0, 0.84, 0.47); container::Style { background: None, border: iced::Border { color: if amplified > 0.01 { emerald_green } else { Color::from_rgba(0.0, 0.0, 0.0, 0.0) }, width: if amplified > 0.01 { 5.0 } else { 0.0 }, radius: 4.0.into(), }, ..Default::default() } } fn subscription(&self) -> Subscription { Subscription::batch([ time::every(std::time::Duration::from_millis(100)).map(|_| Message::Tick), iced::window::events().map(|(_, ev)| Message::WindowEvent(ev)), ]) } } pub fn main() -> iced::Result { tracing_subscriber::fmt() .with_env_filter( tracing_subscriber::EnvFilter::from_default_env().add_directive("streamtools=info".parse().unwrap()), ) .init(); tracing::info!("Starting StreamTools"); tracing::info!("Note: On macOS, you may be prompted to grant microphone permissions"); let flag = Arc::new(AtomicBool::new(false)); let shared_levels: SharedLevels = Arc::new(Mutex::new(VecDeque::new())); let mic_info = MicInfo::new(); tracing::info!("Microphone device: {}", mic_info.name); spawn_mic_listener(flag.clone(), shared_levels.clone()).expect("failed to start mic listener"); tracing::info!("Microphone listener started successfully"); iced::application("Mic Activity", Model::update, Model::view) .subscription(Model::subscription) .font(include_bytes!("../../fonts/JetBrainsMono-VariableFont_wght.ttf").as_slice()) .centered() .run_with(move || { ( Model::new(flag.clone(), shared_levels.clone(), mic_info.clone()), iced::Task::none(), ) }) }