//! Cache module providing flexible caching with Redis and in-memory backends. //! //! This module provides a unified caching interface with automatic fallback //! from Redis to in-memory caching when Redis is unavailable. mod in_memory; mod redis; use anyhow::Result; use async_trait::async_trait; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use tracing::{info, warn}; pub use in_memory::InMemoryCache; pub use redis::RedisCache; /// Generic cache trait implemented by all cache backends. /// /// Provides async methods for get, set, delete, and bulk operations. #[async_trait] pub trait Cache: Send + Sync { /// Get a value from cache, returning None if not found or expired. async fn get(&mut self, key: &str) -> Result> where T: for<'de> Deserialize<'de> + Send; /// Set a value in cache with optional TTL in seconds. async fn set(&mut self, key: &str, value: &T, ttl_seconds: Option) -> Result<()> where T: Serialize + Send + Sync; /// Delete a key from cache. async fn delete(&mut self, key: &str) -> Result<()>; /// Set multiple key-value pairs efficiently (uses pipelining for Redis). async fn set_multiple(&mut self, items: Vec<(&str, &T, Option)>) -> Result<()> where T: Serialize + Send + Sync; /// Test cache connection/health. async fn ping(&mut self) -> Result; /// Get cache info/statistics for monitoring. async fn get_info(&mut self) -> Result; } /// Cache backend implementation enum to avoid trait object overhead. pub enum CacheBackendImpl { InMemory(InMemoryCache), Redis(RedisCache), } impl CacheBackendImpl { pub async fn get(&mut self, key: &str) -> Result> where T: for<'de> Deserialize<'de> + Send, { match self { CacheBackendImpl::InMemory(cache) => cache.get(key).await, CacheBackendImpl::Redis(cache) => cache.get(key).await, } } pub async fn set(&mut self, key: &str, value: &T, ttl_seconds: Option) -> Result<()> where T: Serialize + Send + Sync, { match self { CacheBackendImpl::InMemory(cache) => cache.set(key, value, ttl_seconds).await, CacheBackendImpl::Redis(cache) => cache.set(key, value, ttl_seconds).await, } } pub async fn delete(&mut self, key: &str) -> Result<()> { match self { CacheBackendImpl::InMemory(cache) => cache.delete(key).await, CacheBackendImpl::Redis(cache) => cache.delete(key).await, } } pub async fn set_multiple(&mut self, items: Vec<(&str, &T, Option)>) -> Result<()> where T: Serialize + Send + Sync, { match self { CacheBackendImpl::InMemory(cache) => cache.set_multiple(items).await, CacheBackendImpl::Redis(cache) => cache.set_multiple(items).await, } } pub async fn ping(&mut self) -> Result { match self { CacheBackendImpl::InMemory(cache) => cache.ping().await, CacheBackendImpl::Redis(cache) => cache.ping().await, } } pub async fn get_info(&mut self) -> Result { match self { CacheBackendImpl::InMemory(cache) => cache.get_info().await, CacheBackendImpl::Redis(cache) => cache.get_info().await, } } } /// Domain-specific cache wrapper with convenience methods for Slices operations. /// /// Provides typed methods for actors, lexicons, domains, collections, auth, and DID resolution. pub struct SliceCache { cache: CacheBackendImpl, } impl SliceCache { pub fn new(cache: CacheBackendImpl) -> Self { Self { cache } } /// Actor cache methods (permanent cache - no TTL) pub async fn is_actor(&mut self, did: &str, slice_uri: &str) -> Result> { let key = format!("actor:{}:{}", did, slice_uri); self.cache.get::(&key).await } pub async fn cache_actor_exists(&mut self, did: &str, slice_uri: &str) -> Result<()> { let key = format!("actor:{}:{}", did, slice_uri); self.cache.set(&key, &true, None).await } pub async fn remove_actor(&mut self, did: &str, slice_uri: &str) -> Result<()> { let key = format!("actor:{}:{}", did, slice_uri); self.cache.delete(&key).await } pub async fn preload_actors(&mut self, actors: Vec<(String, String)>) -> Result<()> { if actors.is_empty() { return Ok(()); } let items: Vec<(String, bool, Option)> = actors .into_iter() .map(|(did, slice_uri)| (format!("actor:{}:{}", did, slice_uri), true, None)) .collect(); let items_ref: Vec<(&str, &bool, Option)> = items .iter() .map(|(key, value, ttl)| (key.as_str(), value, *ttl)) .collect(); self.cache.set_multiple(items_ref).await } /// Lexicon cache methods (2 hour TTL) pub async fn cache_lexicons( &mut self, slice_uri: &str, lexicons: &Vec, ) -> Result<()> { let key = format!("lexicons:{}", slice_uri); self.cache.set(&key, lexicons, Some(7200)).await } pub async fn get_lexicons( &mut self, slice_uri: &str, ) -> Result>> { let key = format!("lexicons:{}", slice_uri); self.cache.get::>(&key).await } /// Domain cache methods (4 hour TTL) pub async fn cache_slice_domain(&mut self, slice_uri: &str, domain: &str) -> Result<()> { let key = format!("domain:{}", slice_uri); self.cache.set(&key, &domain.to_string(), Some(14400)).await } pub async fn get_slice_domain(&mut self, slice_uri: &str) -> Result> { let key = format!("domain:{}", slice_uri); self.cache.get::(&key).await } /// Collections cache methods (2 hour TTL) pub async fn cache_slice_collections( &mut self, slice_uri: &str, collections: &HashSet, ) -> Result<()> { let key = format!("collections:{}", slice_uri); self.cache.set(&key, collections, Some(7200)).await } pub async fn get_slice_collections( &mut self, slice_uri: &str, ) -> Result>> { let key = format!("collections:{}", slice_uri); self.cache.get::>(&key).await } /// Auth cache methods (5 minute TTL) pub async fn get_cached_oauth_userinfo( &mut self, token: &str, ) -> Result> { let key = format!("oauth_userinfo:{}", token); self.cache.get(&key).await } pub async fn cache_oauth_userinfo( &mut self, token: &str, userinfo: &serde_json::Value, ttl_seconds: u64, ) -> Result<()> { let key = format!("oauth_userinfo:{}", token); self.cache.set(&key, userinfo, Some(ttl_seconds)).await } pub async fn get_cached_atproto_session( &mut self, token: &str, ) -> Result> { let key = format!("atproto_session:{}", token); self.cache.get(&key).await } pub async fn cache_atproto_session( &mut self, token: &str, session: &serde_json::Value, ttl_seconds: u64, ) -> Result<()> { let key = format!("atproto_session:{}", token); self.cache.set(&key, session, Some(ttl_seconds)).await } /// DID resolution cache methods (24 hour TTL) pub async fn get_cached_did_resolution( &mut self, did: &str, ) -> Result> { let key = format!("did_resolution:{}", did); self.cache.get(&key).await } pub async fn cache_did_resolution( &mut self, did: &str, actor_data: &serde_json::Value, ) -> Result<()> { let key = format!("did_resolution:{}", did); self.cache.set(&key, actor_data, Some(86400)).await } pub async fn invalidate_did_resolution(&mut self, did: &str) -> Result<()> { let key = format!("did_resolution:{}", did); self.cache.delete(&key).await } /// Generic get/set for custom caching needs pub async fn get(&mut self, key: &str) -> Result> where T: for<'de> Deserialize<'de> + Send, { self.cache.get(key).await } pub async fn set(&mut self, key: &str, value: &T, ttl_seconds: Option) -> Result<()> where T: Serialize + Send + Sync, { self.cache.set(key, value, ttl_seconds).await } /// Utility methods pub async fn ping(&mut self) -> Result { self.cache.ping().await } pub async fn get_info(&mut self) -> Result { self.cache.get_info().await } } /// Cache backend configuration enum. #[derive(Debug, Clone)] pub enum CacheBackend { InMemory { ttl_seconds: Option, }, Redis { url: String, ttl_seconds: Option, }, } /// Factory for creating cache instances with automatic Redis fallback. pub struct CacheFactory; impl CacheFactory { /// Create a cache backend, falling back to in-memory if Redis fails. pub async fn create_cache(backend: CacheBackend) -> Result { match backend { CacheBackend::InMemory { ttl_seconds } => { let ttl_display = ttl_seconds .map(|t| format!("{}s", t)) .unwrap_or_else(|| "default".to_string()); info!("Creating in-memory cache with TTL: {}", ttl_display); Ok(CacheBackendImpl::InMemory(InMemoryCache::new(ttl_seconds))) } CacheBackend::Redis { url, ttl_seconds } => { info!("Attempting to create Redis cache at: {}", url); match RedisCache::new(&url, ttl_seconds).await { Ok(redis_cache) => { info!("Created Redis cache successfully"); Ok(CacheBackendImpl::Redis(redis_cache)) } Err(e) => { warn!( error = ?e, "Failed to create Redis cache, falling back to in-memory" ); Ok(CacheBackendImpl::InMemory(InMemoryCache::new(ttl_seconds))) } } } } } /// Create a SliceCache with the specified backend. pub async fn create_slice_cache(backend: CacheBackend) -> Result { let cache = Self::create_cache(backend).await?; Ok(SliceCache::new(cache)) } }