//! Analytics and time-series data queries. //! //! This module handles database operations for generating analytics data, //! including sparkline time-series data for record indexing activity. use super::client::Database; use crate::errors::DatabaseError; use crate::models::SparklinePoint; use std::collections::HashMap; impl Database { /// Gets sparkline data for multiple slices in a single query batch. /// /// Generates time-bucketed counts of indexed records for visualization. /// /// # Arguments /// * `slice_uris` - Array of slice URIs to get data for /// * `interval` - Time bucket size: "minute", "hour", or "day" /// * `duration_hours` - How many hours of history to include /// /// # Returns /// HashMap mapping slice_uri -> array of (timestamp, count) data points pub async fn get_batch_sparkline_data( &self, slice_uris: &[String], interval: &str, duration_hours: i32, ) -> Result>, DatabaseError> { use chrono::{Duration, Utc}; let cutoff_time = Utc::now() - Duration::hours(duration_hours as i64); let mut sparklines = HashMap::new(); for slice_uri in slice_uris { let interval_validated = match interval { "minute" => "minute", "day" => "day", _ => "hour", }; let query = format!( r#" SELECT date_trunc('{}', indexed_at) as bucket, COUNT(*) as count FROM record WHERE indexed_at >= $1 AND slice_uri = $2 GROUP BY bucket ORDER BY bucket "#, interval_validated ); let rows = sqlx::query_as::<_, (Option>, Option)>(&query) .bind(cutoff_time) .bind(slice_uri) .fetch_all(&self.pool) .await?; let data_points = rows .into_iter() .map(|(bucket, count)| SparklinePoint { timestamp: bucket.unwrap().to_rfc3339(), count: count.unwrap_or(0), }) .collect(); sparklines.insert(slice_uri.clone(), data_points); } Ok(sparklines) } }