Rust library to generate static websites

Compare changes

Choose any two refs to compare.

+4865 -485
+1 -1
.github/workflows/benchmark.yaml
··· 41 41 uses: actions/setup-node@v4 42 42 with: 43 43 node-version: latest 44 - cache: 'pnpm' 44 + cache: "pnpm" 45 45 46 46 - name: Install dependencies 47 47 run: pnpm install
+4 -4
.github/workflows/ci.yaml
··· 38 38 uses: actions/setup-node@v4 39 39 with: 40 40 node-version: latest 41 - cache: 'pnpm' 41 + cache: "pnpm" 42 42 43 43 - name: Install dependencies 44 44 run: pnpm install ··· 66 66 uses: actions/setup-node@v4 67 67 with: 68 68 node-version: latest 69 - cache: 'pnpm' 69 + cache: "pnpm" 70 70 71 71 - name: Install dependencies 72 72 run: pnpm install ··· 94 94 uses: actions/setup-node@v4 95 95 with: 96 96 node-version: latest 97 - cache: 'pnpm' 97 + cache: "pnpm" 98 98 99 99 - name: Install dependencies 100 100 run: pnpm install ··· 126 126 uses: actions/setup-node@v4 127 127 with: 128 128 node-version: latest 129 - cache: 'pnpm' 129 + cache: "pnpm" 130 130 131 131 - name: Install dependencies 132 132 run: pnpm install
+1 -1
.github/workflows/release.yml
··· 30 30 uses: actions/setup-node@v4 31 31 with: 32 32 node-version: latest 33 - cache: 'pnpm' 33 + cache: "pnpm" 34 34 35 35 - name: Install dependencies 36 36 run: pnpm install
+2 -6
.vscode/extensions.json
··· 1 1 { 2 - "recommendations": [ 3 - "oxc.oxc-vscode", 4 - "TypeScriptTeam.native-preview", 5 - "rust-lang.rust-analyzer" 6 - ] 7 - } 2 + "recommendations": ["oxc.oxc-vscode", "TypeScriptTeam.native-preview", "rust-lang.rust-analyzer"] 3 + }
+14 -14
.vscode/settings.json
··· 1 1 { 2 - "typescript.experimental.useTsgo": true, 3 - "editor.defaultFormatter": "oxc.oxc-vscode", 4 - "oxc.typeAware": true, 5 - "oxc.fixKind": "safe_fix", 6 - "oxc.unusedDisableDirectives": "deny", 7 - "[rust]": { 8 - "editor.defaultFormatter": "rust-lang.rust-analyzer" 9 - }, 10 - "editor.codeActionsOnSave": { 11 - "source.fixAll.oxc": "explicit" 12 - }, 13 - "biome.enabled": false, 14 - "css.lint.unknownAtRules": "ignore", 15 - } 2 + "typescript.experimental.useTsgo": true, 3 + "editor.defaultFormatter": "oxc.oxc-vscode", 4 + "oxc.typeAware": true, 5 + "oxc.fixKind": "safe_fix", 6 + "oxc.unusedDisableDirectives": "deny", 7 + "[rust]": { 8 + "editor.defaultFormatter": "rust-lang.rust-analyzer" 9 + }, 10 + "editor.codeActionsOnSave": { 11 + "source.fixAll.oxc": "explicit" 12 + }, 13 + "biome.enabled": false, 14 + "css.lint.unknownAtRules": "ignore" 15 + }
+75 -2
Cargo.lock
··· 1330 1330 checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" 1331 1331 1332 1332 [[package]] 1333 + name = "depinfo" 1334 + version = "0.7.3" 1335 + source = "registry+https://github.com/rust-lang/crates.io-index" 1336 + checksum = "ef6dbc1a9be8240ab2bf1f337cd232ca39f361f698227fae35eff7b11690278f" 1337 + dependencies = [ 1338 + "thiserror 2.0.18", 1339 + ] 1340 + 1341 + [[package]] 1333 1342 name = "deranged" 1334 1343 version = "0.5.5" 1335 1344 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 1667 1676 dependencies = [ 1668 1677 "maud", 1669 1678 "maudit", 1679 + ] 1680 + 1681 + [[package]] 1682 + name = "fixtures-incremental-build" 1683 + version = "0.1.0" 1684 + dependencies = [ 1685 + "maud", 1686 + "maudit", 1687 + "serde", 1670 1688 ] 1671 1689 1672 1690 [[package]] ··· 2574 2592 dependencies = [ 2575 2593 "base64", 2576 2594 "brk_rolldown", 2595 + "brk_rolldown_common", 2577 2596 "brk_rolldown_plugin_replace", 2578 2597 "chrono", 2579 2598 "colored 3.1.1", ··· 2592 2611 "rayon", 2593 2612 "rustc-hash", 2594 2613 "serde", 2614 + "serde_json", 2595 2615 "serde_yaml", 2596 2616 "slug", 2597 2617 "syntect", ··· 2611 2631 "chrono", 2612 2632 "clap", 2613 2633 "colored 3.1.1", 2634 + "depinfo", 2614 2635 "flate2", 2615 2636 "futures", 2616 2637 "inquire", ··· 2622 2643 "serde_json", 2623 2644 "spinach", 2624 2645 "tar", 2646 + "tempfile", 2625 2647 "tokio", 2626 2648 "tokio-util", 2649 + "toml", 2627 2650 "toml_edit 0.24.0+spec-1.1.0", 2628 2651 "tower-http", 2629 2652 "tracing", ··· 4522 4545 ] 4523 4546 4524 4547 [[package]] 4548 + name = "serde_spanned" 4549 + version = "0.6.9" 4550 + source = "registry+https://github.com/rust-lang/crates.io-index" 4551 + checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" 4552 + dependencies = [ 4553 + "serde", 4554 + ] 4555 + 4556 + [[package]] 4525 4557 name = "serde_urlencoded" 4526 4558 version = "0.7.1" 4527 4559 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 5019 5051 ] 5020 5052 5021 5053 [[package]] 5054 + name = "toml" 5055 + version = "0.8.23" 5056 + source = "registry+https://github.com/rust-lang/crates.io-index" 5057 + checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" 5058 + dependencies = [ 5059 + "serde", 5060 + "serde_spanned", 5061 + "toml_datetime 0.6.11", 5062 + "toml_edit 0.22.27", 5063 + ] 5064 + 5065 + [[package]] 5066 + name = "toml_datetime" 5067 + version = "0.6.11" 5068 + source = "registry+https://github.com/rust-lang/crates.io-index" 5069 + checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" 5070 + dependencies = [ 5071 + "serde", 5072 + ] 5073 + 5074 + [[package]] 5022 5075 name = "toml_datetime" 5023 5076 version = "0.7.5+spec-1.1.0" 5024 5077 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 5029 5082 5030 5083 [[package]] 5031 5084 name = "toml_edit" 5085 + version = "0.22.27" 5086 + source = "registry+https://github.com/rust-lang/crates.io-index" 5087 + checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" 5088 + dependencies = [ 5089 + "indexmap", 5090 + "serde", 5091 + "serde_spanned", 5092 + "toml_datetime 0.6.11", 5093 + "toml_write", 5094 + "winnow", 5095 + ] 5096 + 5097 + [[package]] 5098 + name = "toml_edit" 5032 5099 version = "0.23.10+spec-1.0.0" 5033 5100 source = "registry+https://github.com/rust-lang/crates.io-index" 5034 5101 checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" 5035 5102 dependencies = [ 5036 5103 "indexmap", 5037 - "toml_datetime", 5104 + "toml_datetime 0.7.5+spec-1.1.0", 5038 5105 "toml_parser", 5039 5106 "winnow", 5040 5107 ] ··· 5046 5113 checksum = "8c740b185920170a6d9191122cafef7010bd6270a3824594bff6784c04d7f09e" 5047 5114 dependencies = [ 5048 5115 "indexmap", 5049 - "toml_datetime", 5116 + "toml_datetime 0.7.5+spec-1.1.0", 5050 5117 "toml_parser", 5051 5118 "toml_writer", 5052 5119 "winnow", ··· 5060 5127 dependencies = [ 5061 5128 "winnow", 5062 5129 ] 5130 + 5131 + [[package]] 5132 + name = "toml_write" 5133 + version = "0.1.2" 5134 + source = "registry+https://github.com/rust-lang/crates.io-index" 5135 + checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" 5063 5136 5064 5137 [[package]] 5065 5138 name = "toml_writer"
+4 -1
benchmarks/realistic-blog/src/routes/article.rs
··· 16 16 &self, 17 17 ctx: &mut DynamicRouteContext, 18 18 ) -> Pages<ArticlesParams, PaginatedContentPage<ArticleContent>> { 19 - let articles = &ctx.content.get_source::<ArticleContent>("articles").entries; 19 + let articles = ctx 20 + .content 21 + .get_source::<ArticleContent>("articles") 22 + .entries(); 20 23 21 24 let mut articles = articles.to_vec(); 22 25 articles.sort_by(|a, b| b.data(ctx).date.cmp(&a.data(ctx).date));
+3 -4
benchmarks/realistic-blog/src/routes/index.rs
··· 5 5 content::ArticleContent, 6 6 layout::layout, 7 7 routes::{ 8 - Article, Articles, 9 8 article::{ArticleParams, ArticlesParams}, 9 + Article, Articles, 10 10 }, 11 11 }; 12 12 ··· 18 18 let mut articles = ctx 19 19 .content 20 20 .get_source::<ArticleContent>("articles") 21 - .entries 22 - .iter() 23 - .collect::<Vec<_>>(); // Collect into a Vec to allow sorting 21 + .entries() 22 + .to_vec(); // Clone into a Vec to allow sorting 24 23 25 24 // Sort by date, newest first 26 25 articles.sort_by(|a, b| b.data(ctx).date.cmp(&a.data(ctx).date));
+3 -1
crates/maudit/Cargo.toml
··· 23 23 24 24 # TODO: Allow making those optional 25 25 rolldown = { package = "brk_rolldown", version = "0.8.0" } 26 + rolldown_common = { package = "brk_rolldown_common", version = "0.8.0" } 26 27 serde = { workspace = true } 28 + serde_json = "1.0" 27 29 serde_yaml = "0.9.34" 28 30 pulldown-cmark = "0.13.0" 29 31 tokio = { version = "1", features = ["macros", "rt-multi-thread"] } ··· 48 50 rayon = "1.11.0" 49 51 rapidhash = "4.2.1" 50 52 pathdiff = "0.2.3" 51 - rolldown_plugin_replace = {package = "brk_rolldown_plugin_replace", version = "0.8.0"} 53 + rolldown_plugin_replace = { package = "brk_rolldown_plugin_replace", version = "0.8.0" } 52 54 53 55 [dev-dependencies] 54 56 tempfile = "3.24.0"
+4 -8
crates/maudit/src/assets/image_cache.rs
··· 338 338 339 339 #[test] 340 340 fn test_build_options_integration() { 341 - use crate::build::options::{AssetsOptions, BuildOptions}; 341 + use crate::build::options::BuildOptions; 342 342 343 343 // Test that BuildOptions can configure the cache directory 344 344 let custom_cache = PathBuf::from("/tmp/custom_maudit_cache"); 345 345 let build_options = BuildOptions { 346 - assets: AssetsOptions { 347 - image_cache_dir: custom_cache.clone(), 348 - ..Default::default() 349 - }, 346 + cache_dir: custom_cache.clone(), 350 347 ..Default::default() 351 348 }; 352 349 353 - // Create cache with build options 354 - let cache = ImageCache::with_cache_dir(&build_options.assets.image_cache_dir); 350 + let cache = ImageCache::with_cache_dir(build_options.assets_cache_dir()); 355 351 356 352 // Verify it uses the configured directory 357 - assert_eq!(cache.get_cache_dir(), custom_cache); 353 + assert_eq!(cache.get_cache_dir(), custom_cache.join("assets")); 358 354 } 359 355 360 356 #[test]
+104 -13
crates/maudit/src/build/options.rs
··· 1 - use std::{env, path::PathBuf}; 1 + use std::{fs, path::PathBuf}; 2 2 3 3 use crate::{assets::RouteAssetsOptions, is_dev, sitemap::SitemapOptions}; 4 4 ··· 36 36 /// assets: AssetsOptions { 37 37 /// assets_dir: "_assets".into(), 38 38 /// tailwind_binary_path: "./node_modules/.bin/tailwindcss".into(), 39 - /// image_cache_dir: ".cache/maudit/images".into(), 40 39 /// ..Default::default() 41 40 /// }, 42 41 /// prefetch: PrefetchOptions { ··· 61 60 /// At the speed Maudit operates at, not cleaning the output directory may offer a significant performance improvement at the cost of potentially serving stale content. 62 61 pub clean_output_dir: bool, 63 62 63 + /// Whether to enable incremental builds. 64 + /// 65 + /// When enabled, Maudit tracks which assets are used by which routes and only rebuilds 66 + /// routes affected by changed files. This can significantly speed up rebuilds when only 67 + /// a few files have changed. 68 + /// 69 + /// Defaults to `true` in dev mode (`maudit dev`) and `false` in production builds. 70 + pub incremental: bool, 71 + 72 + /// Directory for build cache storage (incremental build state, etc.). 73 + /// 74 + /// Defaults to `target/maudit_cache/{package_name}` where `{package_name}` is derived 75 + /// from the current directory name. 76 + pub cache_dir: PathBuf, 77 + 78 + /// Directory for caching processed assets (images, etc.). 79 + /// 80 + /// If `None`, defaults to `{cache_dir}/assets`. 81 + pub assets_cache_dir: Option<PathBuf>, 82 + 64 83 pub assets: AssetsOptions, 65 84 66 85 pub prefetch: PrefetchOptions, ··· 124 143 hashing_strategy: self.assets.hashing_strategy, 125 144 } 126 145 } 146 + 147 + /// Returns the directory for caching processed assets (images, etc.). 148 + /// Uses `assets_cache_dir` if set, otherwise defaults to `{cache_dir}/assets`. 149 + pub fn assets_cache_dir(&self) -> PathBuf { 150 + self.assets_cache_dir 151 + .clone() 152 + .unwrap_or_else(|| self.cache_dir.join("assets")) 153 + } 127 154 } 128 155 129 156 #[derive(Clone)] ··· 139 166 /// Note that this value is not automatically joined with the `output_dir` in `BuildOptions`. Use [`BuildOptions::route_assets_options()`] to get a `RouteAssetsOptions` with the correct final path. 140 167 pub assets_dir: PathBuf, 141 168 142 - /// Directory to use for image cache storage. 143 - /// Defaults to `target/maudit_cache/images`. 144 - /// 145 - /// This cache is used to store processed images and their placeholders to speed up subsequent builds. 146 - pub image_cache_dir: PathBuf, 147 - 148 169 /// Strategy to use when hashing assets for fingerprinting. 149 170 /// 150 171 /// Defaults to [`AssetHashingStrategy::Precise`] in production builds, and [`AssetHashingStrategy::FastImprecise`] in development builds. Note that this means that the cache isn't shared between dev and prod builds by default, if you have a lot of assets you may want to set this to the same value in both environments. ··· 164 185 Self { 165 186 tailwind_binary_path: "tailwindcss".into(), 166 187 assets_dir: "_maudit".into(), 167 - image_cache_dir: { 168 - let target_dir = 169 - env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string()); 170 - PathBuf::from(target_dir).join("maudit_cache/images") 171 - }, 172 188 hashing_strategy: if is_dev() { 173 189 AssetHashingStrategy::FastImprecise 174 190 } else { ··· 196 212 /// ``` 197 213 impl Default for BuildOptions { 198 214 fn default() -> Self { 215 + let site_name = get_site_name(); 216 + let cache_dir = find_target_dir() 217 + .unwrap_or_else(|_| PathBuf::from("target")) 218 + .join("maudit_cache") 219 + .join(&site_name); 220 + 199 221 Self { 200 222 base_url: None, 201 223 output_dir: "dist".into(), 202 224 static_dir: "static".into(), 203 225 clean_output_dir: true, 226 + incremental: is_dev(), 227 + cache_dir, 228 + assets_cache_dir: None, 204 229 prefetch: PrefetchOptions::default(), 205 230 assets: AssetsOptions::default(), 206 231 sitemap: SitemapOptions::default(), 207 232 } 208 233 } 209 234 } 235 + 236 + /// Get the site name for cache directory purposes. 237 + /// 238 + /// Uses the current executable's name (which matches the package/binary name), 239 + /// falling back to the current directory name. 240 + fn get_site_name() -> String { 241 + // Get the binary name from the current executable 242 + std::env::current_exe() 243 + .ok() 244 + .and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string())) 245 + .unwrap_or_else(|| { 246 + // Fallback to current directory name 247 + std::env::current_dir() 248 + .ok() 249 + .and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string())) 250 + .unwrap_or_else(|| "default".to_string()) 251 + }) 252 + } 253 + 254 + /// Find the target directory using multiple strategies 255 + /// 256 + /// This function tries multiple approaches to locate the target directory: 257 + /// 1. CARGO_TARGET_DIR / CARGO_BUILD_TARGET_DIR environment variables 258 + /// 2. Local ./target directory 259 + /// 3. Workspace root target directory (walking up to find [workspace]) 260 + /// 4. Fallback to relative "target" path 261 + fn find_target_dir() -> Result<PathBuf, std::io::Error> { 262 + // 1. Check CARGO_TARGET_DIR and CARGO_BUILD_TARGET_DIR environment variables 263 + for env_var in ["CARGO_TARGET_DIR", "CARGO_BUILD_TARGET_DIR"] { 264 + if let Ok(target_dir) = std::env::var(env_var) { 265 + let path = PathBuf::from(&target_dir); 266 + if path.exists() { 267 + return Ok(path); 268 + } 269 + } 270 + } 271 + 272 + // 2. Look for target directory in current directory 273 + let local_target = PathBuf::from("target"); 274 + if local_target.exists() { 275 + return Ok(local_target); 276 + } 277 + 278 + // 3. Try to find workspace root by looking for Cargo.toml with [workspace] 279 + let mut current = std::env::current_dir()?; 280 + loop { 281 + let cargo_toml = current.join("Cargo.toml"); 282 + if cargo_toml.exists() 283 + && let Ok(content) = fs::read_to_string(&cargo_toml) 284 + && content.contains("[workspace]") 285 + { 286 + let workspace_target = current.join("target"); 287 + if workspace_target.exists() { 288 + return Ok(workspace_target); 289 + } 290 + } 291 + 292 + // Move up to parent directory 293 + if !current.pop() { 294 + break; 295 + } 296 + } 297 + 298 + // 4. Final fallback to relative path 299 + Ok(PathBuf::from("target")) 300 + }
+1137
crates/maudit/src/build/state.rs
··· 1 + use rustc_hash::{FxHashMap, FxHashSet}; 2 + use serde::{Deserialize, Serialize}; 3 + use std::fs; 4 + use std::path::{Path, PathBuf}; 5 + 6 + /// Identifies a specific route or variant for incremental rebuilds 7 + #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] 8 + pub enum RouteIdentifier { 9 + /// A base route with optional page parameters 10 + /// Params are stored as a sorted Vec for hashing purposes 11 + Base { 12 + route_path: String, 13 + params: Option<Vec<(String, Option<String>)>>, 14 + }, 15 + /// A variant route with optional page parameters 16 + /// Params are stored as a sorted Vec for hashing purposes 17 + Variant { 18 + variant_id: String, 19 + variant_path: String, 20 + params: Option<Vec<(String, Option<String>)>>, 21 + }, 22 + } 23 + 24 + impl RouteIdentifier { 25 + pub fn base(route_path: String, params: Option<FxHashMap<String, Option<String>>>) -> Self { 26 + Self::Base { 27 + route_path, 28 + params: params.map(|p| { 29 + let mut sorted: Vec<_> = p.into_iter().collect(); 30 + sorted.sort_by(|a, b| a.0.cmp(&b.0)); 31 + sorted 32 + }), 33 + } 34 + } 35 + 36 + pub fn variant( 37 + variant_id: String, 38 + variant_path: String, 39 + params: Option<FxHashMap<String, Option<String>>>, 40 + ) -> Self { 41 + Self::Variant { 42 + variant_id, 43 + variant_path, 44 + params: params.map(|p| { 45 + let mut sorted: Vec<_> = p.into_iter().collect(); 46 + sorted.sort_by(|a, b| a.0.cmp(&b.0)); 47 + sorted 48 + }), 49 + } 50 + } 51 + } 52 + 53 + /// Tracks build state for incremental builds 54 + #[derive(Debug, Default, Serialize, Deserialize)] 55 + pub struct BuildState { 56 + /// Maps asset paths to routes that use them 57 + /// Key: canonicalized asset path 58 + /// Value: set of routes using this asset 59 + pub asset_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>, 60 + 61 + /// Maps source file paths to routes defined in them 62 + /// Key: canonicalized source file path (e.g., src/pages/index.rs) 63 + /// Value: set of routes defined in this source file 64 + pub source_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>, 65 + 66 + /// Maps content file paths to routes that use them 67 + /// Key: canonicalized content file path (e.g., content/articles/hello.md) 68 + /// Value: set of routes using this specific content file 69 + /// This provides granular tracking - if only hello.md changes, only routes 70 + /// that accessed hello.md need to be rebuilt. 71 + pub content_file_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>, 72 + 73 + /// Maps content file paths to the content source that owns them 74 + /// Key: canonicalized content file path (e.g., content/articles/hello.md) 75 + /// Value: content source name (e.g., "articles") 76 + /// This allows selective re-initialization of only the content sources 77 + /// whose files have changed. 78 + pub content_file_to_source: FxHashMap<PathBuf, String>, 79 + 80 + /// Stores all bundler input paths from the last build 81 + /// This needs to be preserved to ensure consistent bundling 82 + pub bundler_inputs: Vec<String>, 83 + } 84 + 85 + impl BuildState { 86 + pub fn new() -> Self { 87 + Self::default() 88 + } 89 + 90 + /// Load build state from disk cache 91 + pub fn load(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> { 92 + let state_path = cache_dir.join("build_state.json"); 93 + 94 + if !state_path.exists() { 95 + return Ok(Self::new()); 96 + } 97 + 98 + let content = fs::read_to_string(&state_path)?; 99 + let state: BuildState = serde_json::from_str(&content)?; 100 + Ok(state) 101 + } 102 + 103 + /// Save build state to disk cache 104 + pub fn save(&self, cache_dir: &Path) -> Result<(), Box<dyn std::error::Error>> { 105 + fs::create_dir_all(cache_dir)?; 106 + let state_path = cache_dir.join("build_state.json"); 107 + let content = serde_json::to_string_pretty(self)?; 108 + fs::write(state_path, content)?; 109 + Ok(()) 110 + } 111 + 112 + /// Add an asset->route mapping 113 + pub fn track_asset(&mut self, asset_path: PathBuf, route_id: RouteIdentifier) { 114 + self.asset_to_routes 115 + .entry(asset_path) 116 + .or_default() 117 + .insert(route_id); 118 + } 119 + 120 + /// Add a source file->route mapping 121 + /// This tracks which .rs file defines which routes for incremental rebuilds 122 + pub fn track_source_file(&mut self, source_path: PathBuf, route_id: RouteIdentifier) { 123 + self.source_to_routes 124 + .entry(source_path) 125 + .or_default() 126 + .insert(route_id); 127 + } 128 + 129 + /// Add a content file->route mapping 130 + /// This tracks which specific content files are used by which routes for incremental rebuilds. 131 + /// This provides granular tracking - only routes that actually accessed a specific file 132 + /// will be rebuilt when that file changes. 133 + /// 134 + /// The file path is canonicalized before storage to ensure consistent lookups when 135 + /// comparing against absolute paths from the file watcher. 136 + pub fn track_content_file(&mut self, file_path: PathBuf, route_id: RouteIdentifier) { 137 + // Canonicalize the path to ensure consistent matching with absolute paths from the watcher 138 + let canonical_path = file_path.canonicalize().unwrap_or(file_path); 139 + self.content_file_to_routes 140 + .entry(canonical_path) 141 + .or_default() 142 + .insert(route_id); 143 + } 144 + 145 + /// Add a content file->source mapping 146 + /// This tracks which content source owns each file, allowing selective re-initialization 147 + /// of only the sources whose files have changed. 148 + /// 149 + /// The file path is canonicalized before storage to ensure consistent lookups. 150 + pub fn track_content_file_source(&mut self, file_path: PathBuf, source_name: String) { 151 + let canonical_path = file_path.canonicalize().unwrap_or(file_path); 152 + self.content_file_to_source 153 + .insert(canonical_path, source_name); 154 + } 155 + 156 + /// Get the names of content sources that have files in the changed files list. 157 + /// Returns `None` if any changed content file is not tracked (new file), indicating 158 + /// that all content sources should be re-initialized. 159 + /// 160 + /// Only considers files that look like content files (have common content extensions). 161 + pub fn get_affected_content_sources( 162 + &self, 163 + changed_files: &[PathBuf], 164 + ) -> Option<FxHashSet<String>> { 165 + let content_extensions = ["md", "mdx", "yaml", "yml", "json", "toml"]; 166 + let mut affected_sources = FxHashSet::default(); 167 + 168 + for changed_file in changed_files { 169 + // Skip files that don't look like content files 170 + let is_content_file = changed_file 171 + .extension() 172 + .and_then(|ext| ext.to_str()) 173 + .map(|ext| content_extensions.contains(&ext)) 174 + .unwrap_or(false); 175 + 176 + if !is_content_file { 177 + continue; 178 + } 179 + 180 + // Try to find the source for this file 181 + let canonical = changed_file.canonicalize().ok(); 182 + 183 + let source = canonical 184 + .as_ref() 185 + .and_then(|c| self.content_file_to_source.get(c)) 186 + .or_else(|| self.content_file_to_source.get(changed_file)); 187 + 188 + match source { 189 + Some(source_name) => { 190 + affected_sources.insert(source_name.clone()); 191 + } 192 + None => { 193 + // Unknown content file - could be a new file 194 + // Fall back to re-initializing all sources 195 + return None; 196 + } 197 + } 198 + } 199 + 200 + Some(affected_sources) 201 + } 202 + 203 + /// Get all routes affected by changes to specific files. 204 + /// 205 + /// Returns `Some(routes)` if all changed files were found in the mappings, 206 + /// or `None` if any changed file is untracked (meaning we need a full rebuild). 207 + /// 208 + /// This handles the case where files like those referenced by `include_str!()` 209 + /// are not tracked at the route level - when these change, we fall back to 210 + /// rebuilding all routes to ensure correctness. 211 + /// 212 + /// Note: Existing directories are not considered "untracked" - they are checked 213 + /// via prefix matching, but a new/unknown directory won't trigger a full rebuild. 214 + pub fn get_affected_routes( 215 + &self, 216 + changed_files: &[PathBuf], 217 + ) -> Option<FxHashSet<RouteIdentifier>> { 218 + let mut affected_routes = FxHashSet::default(); 219 + let mut has_untracked_file = false; 220 + 221 + for changed_file in changed_files { 222 + let mut file_was_tracked = false; 223 + 224 + // Canonicalize the changed file path for consistent comparison 225 + // All asset paths in asset_to_routes are stored as canonical paths 226 + let canonical_changed = changed_file.canonicalize().ok(); 227 + 228 + // Check source file mappings first (for .rs files) 229 + if let Some(canonical) = &canonical_changed 230 + && let Some(routes) = self.source_to_routes.get(canonical) 231 + { 232 + affected_routes.extend(routes.iter().cloned()); 233 + file_was_tracked = true; 234 + // Continue to also check asset mappings (a file could be both) 235 + } 236 + 237 + // Also check with original path for source files 238 + if let Some(routes) = self.source_to_routes.get(changed_file) { 239 + affected_routes.extend(routes.iter().cloned()); 240 + file_was_tracked = true; 241 + } 242 + 243 + // Try exact match with canonical path for assets 244 + if let Some(canonical) = &canonical_changed 245 + && let Some(routes) = self.asset_to_routes.get(canonical) 246 + { 247 + affected_routes.extend(routes.iter().cloned()); 248 + file_was_tracked = true; 249 + } 250 + 251 + // Fallback: try exact match with original path (shouldn't normally match) 252 + if let Some(routes) = self.asset_to_routes.get(changed_file) { 253 + affected_routes.extend(routes.iter().cloned()); 254 + file_was_tracked = true; 255 + } 256 + 257 + // Check if this is a content file with direct file->route tracking 258 + if let Some(canonical) = &canonical_changed 259 + && let Some(routes) = self.content_file_to_routes.get(canonical) 260 + { 261 + affected_routes.extend(routes.iter().cloned()); 262 + file_was_tracked = true; 263 + } 264 + 265 + // Also check with original path for content files 266 + if let Some(routes) = self.content_file_to_routes.get(changed_file) { 267 + affected_routes.extend(routes.iter().cloned()); 268 + file_was_tracked = true; 269 + } 270 + 271 + // Directory prefix check: find all routes using assets within this directory. 272 + // This handles two cases: 273 + // 1. A directory was modified - rebuild all routes using assets in that dir 274 + // 2. A directory was renamed/deleted - the old path no longer exists but we 275 + // still need to rebuild routes that used assets under that path 276 + // 277 + // We do this check if: 278 + // - The path currently exists as a directory, OR 279 + // - The path doesn't exist (could be a deleted/renamed directory) 280 + let is_existing_directory = changed_file.is_dir(); 281 + let path_does_not_exist = !changed_file.exists(); 282 + 283 + if is_existing_directory || path_does_not_exist { 284 + // Use original path for prefix matching (canonical won't exist for deleted dirs) 285 + for (asset_path, routes) in &self.asset_to_routes { 286 + if asset_path.starts_with(changed_file) { 287 + affected_routes.extend(routes.iter().cloned()); 288 + file_was_tracked = true; 289 + } 290 + } 291 + // Also check source files for directory prefix 292 + for (source_path, routes) in &self.source_to_routes { 293 + if source_path.starts_with(changed_file) { 294 + affected_routes.extend(routes.iter().cloned()); 295 + file_was_tracked = true; 296 + } 297 + } 298 + // Also check content files for directory prefix 299 + for (content_path, routes) in &self.content_file_to_routes { 300 + if content_path.starts_with(changed_file) { 301 + affected_routes.extend(routes.iter().cloned()); 302 + file_was_tracked = true; 303 + } 304 + } 305 + } 306 + 307 + // Flag as untracked (triggering full rebuild) if: 308 + // 1. The file wasn't found in any mapping, AND 309 + // 2. It's not a currently-existing directory (new directories are OK to ignore) 310 + // 311 + // For non-existent paths that weren't matched: 312 + // - If the path has a file extension, treat it as a deleted file โ†’ full rebuild 313 + // - If the path has no extension, it might be a deleted directory โ†’ allow 314 + // (we already checked prefix matching above) 315 + // 316 + // This is conservative: we'd rather rebuild too much than too little. 317 + if !file_was_tracked && !is_existing_directory { 318 + if path_does_not_exist { 319 + // For deleted paths, check if it looks like a file (has extension) 320 + // If it has an extension, it was probably a file โ†’ trigger full rebuild 321 + // If no extension, it might have been a directory โ†’ don't trigger 322 + let has_extension = changed_file 323 + .extension() 324 + .map(|ext| !ext.is_empty()) 325 + .unwrap_or(false); 326 + 327 + if has_extension { 328 + has_untracked_file = true; 329 + } 330 + } else { 331 + // Path exists but wasn't tracked โ†’ definitely untracked file 332 + has_untracked_file = true; 333 + } 334 + } 335 + } 336 + 337 + if has_untracked_file { 338 + // Some files weren't tracked - caller should do a full rebuild 339 + None 340 + } else { 341 + Some(affected_routes) 342 + } 343 + } 344 + 345 + /// Clear all tracked data (for full rebuild) 346 + pub fn clear(&mut self) { 347 + self.asset_to_routes.clear(); 348 + self.source_to_routes.clear(); 349 + self.content_file_to_routes.clear(); 350 + self.content_file_to_source.clear(); 351 + self.bundler_inputs.clear(); 352 + } 353 + 354 + /// Clear the content file to routes mapping. 355 + /// This should be called before re-tracking content files after content sources are re-initialized. 356 + pub fn clear_content_file_mappings(&mut self) { 357 + self.content_file_to_routes.clear(); 358 + } 359 + 360 + /// Clear content file mappings for specific sources. 361 + /// This removes both file->routes and file->source mappings for files owned by the given sources. 362 + /// Called when selectively re-initializing specific content sources. 363 + pub fn clear_content_mappings_for_sources(&mut self, source_names: &FxHashSet<String>) { 364 + // Find all files that belong to the specified sources 365 + let files_to_remove: Vec<PathBuf> = self 366 + .content_file_to_source 367 + .iter() 368 + .filter(|(_, source)| source_names.contains(*source)) 369 + .map(|(path, _)| path.clone()) 370 + .collect(); 371 + 372 + // Remove file->source mappings only 373 + // We DON'T clear file->routes mappings here because: 374 + // 1. Routes not being rebuilt should keep their mappings 375 + // 2. Routes being rebuilt will have their mappings cleared separately 376 + // via clear_content_file_mappings_for_routes() 377 + for file in &files_to_remove { 378 + self.content_file_to_source.remove(file); 379 + } 380 + } 381 + 382 + /// Remove content file mappings for specific routes. 383 + /// This is used during incremental builds to clear only the mappings for routes 384 + /// that will be rebuilt, preserving mappings for routes that won't change. 385 + pub fn clear_content_file_mappings_for_routes(&mut self, routes: &FxHashSet<RouteIdentifier>) { 386 + for routes_set in self.content_file_to_routes.values_mut() { 387 + routes_set.retain(|route| !routes.contains(route)); 388 + } 389 + // Remove any entries that have no routes left 390 + self.content_file_to_routes 391 + .retain(|_, routes_set| !routes_set.is_empty()); 392 + } 393 + 394 + /// Check if a file path is a known content file. 395 + /// This is used to determine if a new file might be a content file. 396 + #[allow(dead_code)] // Used in tests and potentially useful for debugging 397 + pub fn is_known_content_file(&self, file_path: &Path) -> bool { 398 + if self.content_file_to_routes.contains_key(file_path) { 399 + return true; 400 + } 401 + 402 + // Try with canonicalized path 403 + if let Ok(canonical) = file_path.canonicalize() { 404 + return self.content_file_to_routes.contains_key(&canonical); 405 + } 406 + 407 + false 408 + } 409 + } 410 + 411 + #[cfg(test)] 412 + mod tests { 413 + use super::*; 414 + 415 + fn make_route(path: &str) -> RouteIdentifier { 416 + RouteIdentifier::base(path.to_string(), None) 417 + } 418 + 419 + #[test] 420 + fn test_get_affected_routes_exact_match() { 421 + let mut state = BuildState::new(); 422 + let asset_path = PathBuf::from("/project/src/assets/logo.png"); 423 + let route = make_route("/"); 424 + 425 + state.track_asset(asset_path.clone(), route.clone()); 426 + 427 + // Exact match should work and return Some 428 + let affected = state.get_affected_routes(&[asset_path]).unwrap(); 429 + assert_eq!(affected.len(), 1); 430 + assert!(affected.contains(&route)); 431 + } 432 + 433 + #[test] 434 + fn test_get_affected_routes_untracked_file() { 435 + use std::fs; 436 + use tempfile::TempDir; 437 + 438 + let mut state = BuildState::new(); 439 + 440 + // Create temp files 441 + let temp_dir = TempDir::new().unwrap(); 442 + let tracked_file = temp_dir.path().join("logo.png"); 443 + let untracked_file = temp_dir.path().join("other.png"); 444 + fs::write(&tracked_file, "tracked").unwrap(); 445 + fs::write(&untracked_file, "untracked").unwrap(); 446 + 447 + let route = make_route("/"); 448 + state.track_asset(tracked_file.clone(), route); 449 + 450 + // Untracked file that EXISTS should return None (triggers full rebuild) 451 + let affected = state.get_affected_routes(&[untracked_file]); 452 + assert!(affected.is_none()); 453 + } 454 + 455 + #[test] 456 + fn test_get_affected_routes_mixed_tracked_untracked() { 457 + use std::fs; 458 + use tempfile::TempDir; 459 + 460 + let mut state = BuildState::new(); 461 + 462 + // Create temp files 463 + let temp_dir = TempDir::new().unwrap(); 464 + let tracked_file = temp_dir.path().join("logo.png"); 465 + let untracked_file = temp_dir.path().join("other.png"); 466 + fs::write(&tracked_file, "tracked").unwrap(); 467 + fs::write(&untracked_file, "untracked").unwrap(); 468 + 469 + let route = make_route("/"); 470 + state.track_asset(tracked_file.canonicalize().unwrap(), route); 471 + 472 + // If any file is untracked, return None (even if some are tracked) 473 + let affected = state.get_affected_routes(&[tracked_file, untracked_file]); 474 + assert!(affected.is_none()); 475 + } 476 + 477 + #[test] 478 + fn test_get_affected_routes_deleted_directory() { 479 + let mut state = BuildState::new(); 480 + 481 + // Track assets under a directory path 482 + let asset1 = PathBuf::from("/project/src/assets/icons/logo.png"); 483 + let asset2 = PathBuf::from("/project/src/assets/icons/favicon.ico"); 484 + let asset3 = PathBuf::from("/project/src/assets/styles.css"); 485 + let route1 = make_route("/"); 486 + let route2 = make_route("/about"); 487 + 488 + state.track_asset(asset1, route1.clone()); 489 + state.track_asset(asset2, route1.clone()); 490 + state.track_asset(asset3, route2.clone()); 491 + 492 + // Simulate a deleted/renamed directory (path doesn't exist) 493 + // The "icons" directory was renamed, so the old path doesn't exist 494 + let deleted_dir = PathBuf::from("/project/src/assets/icons"); 495 + 496 + // Since the path doesn't exist, it should check prefix matching 497 + let affected = state.get_affected_routes(&[deleted_dir]).unwrap(); 498 + 499 + // Should find route1 (uses assets under /icons/) but not route2 500 + assert_eq!(affected.len(), 1); 501 + assert!(affected.contains(&route1)); 502 + } 503 + 504 + #[test] 505 + fn test_get_affected_routes_multiple_routes_same_asset() { 506 + let mut state = BuildState::new(); 507 + let asset_path = PathBuf::from("/project/src/assets/shared.css"); 508 + let route1 = make_route("/"); 509 + let route2 = make_route("/about"); 510 + 511 + state.track_asset(asset_path.clone(), route1.clone()); 512 + state.track_asset(asset_path.clone(), route2.clone()); 513 + 514 + let affected = state.get_affected_routes(&[asset_path]).unwrap(); 515 + assert_eq!(affected.len(), 2); 516 + assert!(affected.contains(&route1)); 517 + assert!(affected.contains(&route2)); 518 + } 519 + 520 + #[test] 521 + fn test_get_affected_routes_source_file() { 522 + let mut state = BuildState::new(); 523 + let source_path = PathBuf::from("/project/src/pages/index.rs"); 524 + let route1 = make_route("/"); 525 + let route2 = make_route("/about"); 526 + 527 + // Track routes to their source files 528 + state.track_source_file(source_path.clone(), route1.clone()); 529 + state.track_source_file(source_path.clone(), route2.clone()); 530 + 531 + // When the source file changes, both routes should be affected 532 + let affected = state.get_affected_routes(&[source_path]).unwrap(); 533 + assert_eq!(affected.len(), 2); 534 + assert!(affected.contains(&route1)); 535 + assert!(affected.contains(&route2)); 536 + } 537 + 538 + #[test] 539 + fn test_get_affected_routes_source_file_only_matching() { 540 + let mut state = BuildState::new(); 541 + let source_index = PathBuf::from("/project/src/pages/index.rs"); 542 + let source_about = PathBuf::from("/project/src/pages/about.rs"); 543 + let route_index = make_route("/"); 544 + let route_about = make_route("/about"); 545 + 546 + state.track_source_file(source_index.clone(), route_index.clone()); 547 + state.track_source_file(source_about.clone(), route_about.clone()); 548 + 549 + // Changing only index.rs should only affect the index route 550 + let affected = state.get_affected_routes(&[source_index]).unwrap(); 551 + assert_eq!(affected.len(), 1); 552 + assert!(affected.contains(&route_index)); 553 + assert!(!affected.contains(&route_about)); 554 + } 555 + 556 + #[test] 557 + fn test_clear_also_clears_source_files() { 558 + let mut state = BuildState::new(); 559 + let source_path = PathBuf::from("/project/src/pages/index.rs"); 560 + let asset_path = PathBuf::from("/project/src/assets/logo.png"); 561 + let route = make_route("/"); 562 + 563 + state.track_source_file(source_path.clone(), route.clone()); 564 + state.track_asset(asset_path.clone(), route.clone()); 565 + 566 + assert!(!state.source_to_routes.is_empty()); 567 + assert!(!state.asset_to_routes.is_empty()); 568 + 569 + state.clear(); 570 + 571 + assert!(state.source_to_routes.is_empty()); 572 + assert!(state.asset_to_routes.is_empty()); 573 + } 574 + 575 + #[test] 576 + fn test_get_affected_routes_new_directory_not_untracked() { 577 + use std::fs; 578 + use tempfile::TempDir; 579 + 580 + let mut state = BuildState::new(); 581 + 582 + // Create a temporary directory to simulate the "new directory" scenario 583 + let temp_dir = TempDir::new().unwrap(); 584 + let new_dir = temp_dir.path().join("new-folder"); 585 + fs::create_dir(&new_dir).unwrap(); 586 + 587 + // Track some asset under a different path 588 + let asset_path = PathBuf::from("/project/src/assets/logo.png"); 589 + let route = make_route("/"); 590 + state.track_asset(asset_path.clone(), route.clone()); 591 + 592 + // When a new directory appears (e.g., from renaming another folder), 593 + // it should NOT trigger a full rebuild (return None), even though 594 + // we don't have any assets tracked under it. 595 + let affected = state.get_affected_routes(&[new_dir]); 596 + 597 + // Should return Some (not None), meaning we don't trigger full rebuild 598 + // The set should be empty since no assets are under this new directory 599 + assert!( 600 + affected.is_some(), 601 + "New directory should not trigger full rebuild" 602 + ); 603 + assert!(affected.unwrap().is_empty()); 604 + } 605 + 606 + #[test] 607 + fn test_get_affected_routes_folder_rename_scenario() { 608 + use std::fs; 609 + use tempfile::TempDir; 610 + 611 + let mut state = BuildState::new(); 612 + 613 + // Create temp directories to simulate folder rename 614 + let temp_dir = TempDir::new().unwrap(); 615 + let new_dir = temp_dir.path().join("icons-renamed"); 616 + fs::create_dir(&new_dir).unwrap(); 617 + 618 + // Track assets under the OLD folder path (which no longer exists) 619 + let old_dir = PathBuf::from("/project/src/assets/icons"); 620 + let asset1 = PathBuf::from("/project/src/assets/icons/logo.png"); 621 + let route = make_route("/blog"); 622 + state.track_asset(asset1, route.clone()); 623 + 624 + // Simulate folder rename: old path doesn't exist, new path is a directory 625 + // Both paths are passed as "changed" 626 + let affected = state.get_affected_routes(&[old_dir, new_dir]); 627 + 628 + // Should return Some (not None) - we found the affected route via prefix matching 629 + // and the new directory doesn't trigger "untracked file" behavior 630 + assert!( 631 + affected.is_some(), 632 + "Folder rename should not trigger full rebuild" 633 + ); 634 + let routes = affected.unwrap(); 635 + assert_eq!(routes.len(), 1); 636 + assert!(routes.contains(&route)); 637 + } 638 + 639 + #[test] 640 + fn test_get_affected_routes_deleted_untracked_file() { 641 + let mut state = BuildState::new(); 642 + 643 + // Track some assets 644 + let tracked_asset = PathBuf::from("/project/src/assets/logo.png"); 645 + let route = make_route("/"); 646 + state.track_asset(tracked_asset, route); 647 + 648 + // Simulate a deleted file that was NEVER tracked 649 + // (e.g., a file used via include_str! that we don't know about) 650 + // This path doesn't exist and isn't in any mapping 651 + let deleted_untracked_file = PathBuf::from("/project/src/content/data.txt"); 652 + 653 + let affected = state.get_affected_routes(&[deleted_untracked_file]); 654 + 655 + // Since the deleted path has a file extension (.txt), we treat it as 656 + // a deleted file that might have been a dependency we don't track. 657 + // We should trigger a full rebuild (return None) to be safe. 658 + assert!( 659 + affected.is_none(), 660 + "Deleted untracked file with extension should trigger full rebuild" 661 + ); 662 + } 663 + 664 + #[test] 665 + fn test_get_affected_routes_deleted_untracked_directory() { 666 + let mut state = BuildState::new(); 667 + 668 + // Track some assets 669 + let tracked_asset = PathBuf::from("/project/src/assets/logo.png"); 670 + let route = make_route("/"); 671 + state.track_asset(tracked_asset, route); 672 + 673 + // Simulate a deleted directory that was NEVER tracked 674 + // This path doesn't exist, isn't in any mapping, and has no extension 675 + let deleted_untracked_dir = PathBuf::from("/project/src/content"); 676 + 677 + let affected = state.get_affected_routes(&[deleted_untracked_dir]); 678 + 679 + // Since the path has no extension, it might have been a directory. 680 + // We already did prefix matching (found nothing), so we allow this 681 + // without triggering a full rebuild. 682 + assert!( 683 + affected.is_some(), 684 + "Deleted path without extension (possible directory) should not trigger full rebuild" 685 + ); 686 + assert!(affected.unwrap().is_empty()); 687 + } 688 + 689 + #[test] 690 + fn test_get_affected_routes_deleted_tracked_file() { 691 + use std::fs; 692 + use tempfile::TempDir; 693 + 694 + let mut state = BuildState::new(); 695 + 696 + // Create a temp file, track it, then delete it 697 + let temp_dir = TempDir::new().unwrap(); 698 + let tracked_file = temp_dir.path().join("logo.png"); 699 + fs::write(&tracked_file, "content").unwrap(); 700 + 701 + let canonical_path = tracked_file.canonicalize().unwrap(); 702 + let route = make_route("/"); 703 + state.track_asset(canonical_path.clone(), route.clone()); 704 + 705 + // Now delete the file 706 + fs::remove_file(&tracked_file).unwrap(); 707 + 708 + // The file no longer exists, but its canonical path is still in our mapping 709 + // When we get the change event, notify gives us the original path 710 + let affected = state.get_affected_routes(std::slice::from_ref(&tracked_file)); 711 + 712 + // This SHOULD find the route because we track by canonical path 713 + // and the original path should match via the mapping lookup 714 + println!("Result for deleted tracked file: {:?}", affected); 715 + 716 + // The path doesn't exist anymore, so canonicalize() fails. 717 + // We fall back to prefix matching, but exact path matching on 718 + // the non-canonical path should still work if stored that way. 719 + // Let's check what actually happens... 720 + match affected { 721 + Some(routes) => { 722 + // If we found routes, great - the system works 723 + assert!( 724 + routes.contains(&route), 725 + "Should find the route for deleted tracked file" 726 + ); 727 + } 728 + None => { 729 + // If None, that means we triggered a full rebuild, which is also safe 730 + // This happens because the file doesn't exist and wasn't found in mappings 731 + println!("Deleted tracked file triggered full rebuild (safe behavior)"); 732 + } 733 + } 734 + } 735 + 736 + #[test] 737 + fn test_track_content_file() { 738 + let mut state = BuildState::new(); 739 + let route = make_route("/"); 740 + let content_file = PathBuf::from("/project/content/articles/hello.md"); 741 + 742 + state.track_content_file(content_file.clone(), route.clone()); 743 + 744 + assert_eq!(state.content_file_to_routes.len(), 1); 745 + assert!(state.content_file_to_routes.contains_key(&content_file)); 746 + assert!(state.content_file_to_routes[&content_file].contains(&route)); 747 + } 748 + 749 + #[test] 750 + fn test_track_content_file_multiple_routes() { 751 + let mut state = BuildState::new(); 752 + let route1 = make_route("/"); 753 + let route2 = make_route("/blog"); 754 + let content_file = PathBuf::from("/project/content/articles/hello.md"); 755 + 756 + state.track_content_file(content_file.clone(), route1.clone()); 757 + state.track_content_file(content_file.clone(), route2.clone()); 758 + 759 + assert_eq!(state.content_file_to_routes.len(), 1); 760 + assert_eq!(state.content_file_to_routes[&content_file].len(), 2); 761 + assert!(state.content_file_to_routes[&content_file].contains(&route1)); 762 + assert!(state.content_file_to_routes[&content_file].contains(&route2)); 763 + } 764 + 765 + #[test] 766 + fn test_track_content_file_multiple_files() { 767 + let mut state = BuildState::new(); 768 + let route = make_route("/"); 769 + let file1 = PathBuf::from("/project/content/articles/hello.md"); 770 + let file2 = PathBuf::from("/project/content/articles/world.md"); 771 + 772 + state.track_content_file(file1.clone(), route.clone()); 773 + state.track_content_file(file2.clone(), route.clone()); 774 + 775 + assert_eq!(state.content_file_to_routes.len(), 2); 776 + assert!(state.content_file_to_routes[&file1].contains(&route)); 777 + assert!(state.content_file_to_routes[&file2].contains(&route)); 778 + } 779 + 780 + #[test] 781 + fn test_clear_also_clears_content_files() { 782 + let mut state = BuildState::new(); 783 + let route = make_route("/"); 784 + let content_file = PathBuf::from("/project/content/articles/hello.md"); 785 + 786 + state.track_content_file(content_file, route); 787 + 788 + assert!(!state.content_file_to_routes.is_empty()); 789 + 790 + state.clear(); 791 + 792 + assert!(state.content_file_to_routes.is_empty()); 793 + } 794 + 795 + #[test] 796 + fn test_get_affected_routes_content_file() { 797 + let mut state = BuildState::new(); 798 + let route1 = make_route("/"); 799 + let route2 = make_route("/blog/[slug]"); 800 + let route3 = make_route("/about"); 801 + 802 + // Track content file -> route mappings directly 803 + let article1 = PathBuf::from("/project/content/articles/hello.md"); 804 + let article2 = PathBuf::from("/project/content/articles/world.md"); 805 + let page1 = PathBuf::from("/project/content/pages/about.md"); 806 + 807 + // Route "/" uses article1 and article2 808 + state.track_content_file(article1.clone(), route1.clone()); 809 + state.track_content_file(article2.clone(), route1.clone()); 810 + // Route "/blog/[slug]" uses only article1 811 + state.track_content_file(article1.clone(), route2.clone()); 812 + // Route "/about" uses page1 813 + state.track_content_file(page1.clone(), route3.clone()); 814 + 815 + // When article1 changes, only routes that used article1 should be affected 816 + let affected = state.get_affected_routes(&[article1]).unwrap(); 817 + assert_eq!(affected.len(), 2); 818 + assert!(affected.contains(&route1)); 819 + assert!(affected.contains(&route2)); 820 + assert!(!affected.contains(&route3)); 821 + 822 + // When article2 changes, only route1 should be affected (granular!) 823 + let affected = state.get_affected_routes(&[article2]).unwrap(); 824 + assert_eq!(affected.len(), 1); 825 + assert!(affected.contains(&route1)); 826 + assert!(!affected.contains(&route2)); 827 + assert!(!affected.contains(&route3)); 828 + 829 + // When page1 changes, only route3 should be affected 830 + let affected = state.get_affected_routes(&[page1]).unwrap(); 831 + assert_eq!(affected.len(), 1); 832 + assert!(affected.contains(&route3)); 833 + assert!(!affected.contains(&route1)); 834 + assert!(!affected.contains(&route2)); 835 + } 836 + 837 + #[test] 838 + fn test_get_affected_routes_content_file_multiple_files_changed() { 839 + let mut state = BuildState::new(); 840 + let route1 = make_route("/"); 841 + let route2 = make_route("/about"); 842 + 843 + // Track content files 844 + let article = PathBuf::from("/project/content/articles/hello.md"); 845 + let page = PathBuf::from("/project/content/pages/about.md"); 846 + 847 + state.track_content_file(article.clone(), route1.clone()); 848 + state.track_content_file(page.clone(), route2.clone()); 849 + 850 + // When both files change, both routes should be affected 851 + let affected = state.get_affected_routes(&[article, page]).unwrap(); 852 + assert_eq!(affected.len(), 2); 853 + assert!(affected.contains(&route1)); 854 + assert!(affected.contains(&route2)); 855 + } 856 + 857 + #[test] 858 + fn test_get_affected_routes_content_file_mixed_with_asset() { 859 + let mut state = BuildState::new(); 860 + let route1 = make_route("/"); 861 + let route2 = make_route("/about"); 862 + 863 + // Track a content file for route1 864 + let article = PathBuf::from("/project/content/articles/hello.md"); 865 + state.track_content_file(article.clone(), route1.clone()); 866 + 867 + // Track an asset used by route2 868 + let style = PathBuf::from("/project/src/styles.css"); 869 + state.track_asset(style.clone(), route2.clone()); 870 + 871 + // When both content file and asset change 872 + let affected = state.get_affected_routes(&[article, style]).unwrap(); 873 + assert_eq!(affected.len(), 2); 874 + assert!(affected.contains(&route1)); 875 + assert!(affected.contains(&route2)); 876 + } 877 + 878 + #[test] 879 + fn test_get_affected_routes_unknown_content_file() { 880 + let mut state = BuildState::new(); 881 + let route = make_route("/"); 882 + 883 + // Track a content file 884 + let article = PathBuf::from("/project/content/articles/hello.md"); 885 + state.track_content_file(article, route); 886 + 887 + // A new/unknown .md file that isn't tracked 888 + // This could be a newly created file 889 + let new_file = PathBuf::from("/project/content/articles/new-post.md"); 890 + 891 + // Should trigger full rebuild since it's an untracked file with extension 892 + let affected = state.get_affected_routes(&[new_file]); 893 + assert!( 894 + affected.is_none(), 895 + "New untracked content file should trigger full rebuild" 896 + ); 897 + } 898 + 899 + #[test] 900 + fn test_is_known_content_file() { 901 + let mut state = BuildState::new(); 902 + let route = make_route("/"); 903 + let content_file = PathBuf::from("/project/content/articles/hello.md"); 904 + 905 + state.track_content_file(content_file.clone(), route); 906 + 907 + assert!(state.is_known_content_file(&content_file)); 908 + assert!(!state.is_known_content_file(Path::new("/project/content/articles/unknown.md"))); 909 + } 910 + 911 + #[test] 912 + fn test_content_file_directory_prefix() { 913 + let mut state = BuildState::new(); 914 + let route = make_route("/"); 915 + 916 + // Track content files under a directory 917 + let article1 = PathBuf::from("/project/content/articles/hello.md"); 918 + let article2 = PathBuf::from("/project/content/articles/world.md"); 919 + state.track_content_file(article1, route.clone()); 920 + state.track_content_file(article2, route.clone()); 921 + 922 + // When the parent directory changes (e.g., renamed), should find affected routes 923 + let content_dir = PathBuf::from("/project/content/articles"); 924 + let affected = state.get_affected_routes(&[content_dir]).unwrap(); 925 + assert_eq!(affected.len(), 1); 926 + assert!(affected.contains(&route)); 927 + } 928 + 929 + #[test] 930 + fn test_clear_content_file_mappings_for_routes() { 931 + let mut state = BuildState::new(); 932 + let route1 = make_route("/articles"); 933 + let route2 = make_route("/articles/[slug]"); 934 + let route3 = make_route("/about"); 935 + 936 + // Article 1 is accessed by routes 1 and 2 937 + let article1 = PathBuf::from("/project/content/articles/hello.md"); 938 + state.track_content_file(article1.clone(), route1.clone()); 939 + state.track_content_file(article1.clone(), route2.clone()); 940 + 941 + // Article 2 is accessed by routes 1 and 2 942 + let article2 = PathBuf::from("/project/content/articles/world.md"); 943 + state.track_content_file(article2.clone(), route1.clone()); 944 + state.track_content_file(article2.clone(), route2.clone()); 945 + 946 + // Route 3 uses a different file 947 + let page = PathBuf::from("/project/content/pages/about.md"); 948 + state.track_content_file(page.clone(), route3.clone()); 949 + 950 + assert_eq!(state.content_file_to_routes.len(), 3); 951 + 952 + // Clear mappings only for route2 953 + let mut routes_to_clear = FxHashSet::default(); 954 + routes_to_clear.insert(route2.clone()); 955 + state.clear_content_file_mappings_for_routes(&routes_to_clear); 956 + 957 + // route2 should be removed from article1 and article2 mappings 958 + assert!(!state.content_file_to_routes[&article1].contains(&route2)); 959 + assert!(state.content_file_to_routes[&article1].contains(&route1)); 960 + 961 + assert!(!state.content_file_to_routes[&article2].contains(&route2)); 962 + assert!(state.content_file_to_routes[&article2].contains(&route1)); 963 + 964 + // route3's mapping should be unaffected 965 + assert!(state.content_file_to_routes[&page].contains(&route3)); 966 + } 967 + 968 + #[test] 969 + fn test_clear_content_file_mappings_for_routes_removes_empty_entries() { 970 + let mut state = BuildState::new(); 971 + let route1 = make_route("/articles/first"); 972 + let route2 = make_route("/articles/second"); 973 + 974 + // Route1 uses only article1 975 + let article1 = PathBuf::from("/project/content/articles/first.md"); 976 + state.track_content_file(article1.clone(), route1.clone()); 977 + 978 + // Route2 uses only article2 979 + let article2 = PathBuf::from("/project/content/articles/second.md"); 980 + state.track_content_file(article2.clone(), route2.clone()); 981 + 982 + assert_eq!(state.content_file_to_routes.len(), 2); 983 + 984 + // Clear mappings for route1 985 + let mut routes_to_clear = FxHashSet::default(); 986 + routes_to_clear.insert(route1); 987 + state.clear_content_file_mappings_for_routes(&routes_to_clear); 988 + 989 + // article1 entry should be completely removed (no routes left) 990 + assert!(!state.content_file_to_routes.contains_key(&article1)); 991 + 992 + // article2 entry should still exist 993 + assert!(state.content_file_to_routes.contains_key(&article2)); 994 + assert!(state.content_file_to_routes[&article2].contains(&route2)); 995 + } 996 + 997 + #[test] 998 + fn test_track_content_file_source() { 999 + let mut state = BuildState::new(); 1000 + let file = PathBuf::from("/project/content/articles/hello.md"); 1001 + 1002 + state.track_content_file_source(file.clone(), "articles".to_string()); 1003 + 1004 + assert_eq!(state.content_file_to_source.len(), 1); 1005 + assert_eq!( 1006 + state.content_file_to_source.get(&file), 1007 + Some(&"articles".to_string()) 1008 + ); 1009 + } 1010 + 1011 + #[test] 1012 + fn test_get_affected_content_sources_single_source() { 1013 + let mut state = BuildState::new(); 1014 + let article1 = PathBuf::from("/project/content/articles/hello.md"); 1015 + let article2 = PathBuf::from("/project/content/articles/world.md"); 1016 + 1017 + state.track_content_file_source(article1.clone(), "articles".to_string()); 1018 + state.track_content_file_source(article2.clone(), "articles".to_string()); 1019 + 1020 + // Change one article file 1021 + let affected = state.get_affected_content_sources(&[article1]).unwrap(); 1022 + assert_eq!(affected.len(), 1); 1023 + assert!(affected.contains("articles")); 1024 + } 1025 + 1026 + #[test] 1027 + fn test_get_affected_content_sources_multiple_sources() { 1028 + let mut state = BuildState::new(); 1029 + let article = PathBuf::from("/project/content/articles/hello.md"); 1030 + let page = PathBuf::from("/project/content/pages/about.md"); 1031 + 1032 + state.track_content_file_source(article.clone(), "articles".to_string()); 1033 + state.track_content_file_source(page.clone(), "pages".to_string()); 1034 + 1035 + // Change both files 1036 + let affected = state 1037 + .get_affected_content_sources(&[article, page]) 1038 + .unwrap(); 1039 + assert_eq!(affected.len(), 2); 1040 + assert!(affected.contains("articles")); 1041 + assert!(affected.contains("pages")); 1042 + } 1043 + 1044 + #[test] 1045 + fn test_get_affected_content_sources_unknown_file_returns_none() { 1046 + let mut state = BuildState::new(); 1047 + let article = PathBuf::from("/project/content/articles/hello.md"); 1048 + state.track_content_file_source(article, "articles".to_string()); 1049 + 1050 + // A new file that's not tracked 1051 + let new_file = PathBuf::from("/project/content/articles/new-post.md"); 1052 + 1053 + // Should return None (need to re-init all sources) 1054 + let affected = state.get_affected_content_sources(&[new_file]); 1055 + assert!(affected.is_none()); 1056 + } 1057 + 1058 + #[test] 1059 + fn test_get_affected_content_sources_ignores_non_content_files() { 1060 + let mut state = BuildState::new(); 1061 + let article = PathBuf::from("/project/content/articles/hello.md"); 1062 + state.track_content_file_source(article.clone(), "articles".to_string()); 1063 + 1064 + // A non-content file (e.g., .rs file) - should be ignored 1065 + let rust_file = PathBuf::from("/project/src/pages/index.rs"); 1066 + 1067 + // Should return empty set (no content sources affected) 1068 + let affected = state 1069 + .get_affected_content_sources(std::slice::from_ref(&rust_file)) 1070 + .unwrap(); 1071 + assert!(affected.is_empty()); 1072 + 1073 + // Mixed: content file + non-content file 1074 + let affected = state 1075 + .get_affected_content_sources(&[article, rust_file]) 1076 + .unwrap(); 1077 + assert_eq!(affected.len(), 1); 1078 + assert!(affected.contains("articles")); 1079 + } 1080 + 1081 + #[test] 1082 + fn test_clear_content_mappings_for_sources() { 1083 + let mut state = BuildState::new(); 1084 + let route1 = make_route("/articles"); 1085 + let route2 = make_route("/pages"); 1086 + 1087 + // Set up articles source 1088 + let article1 = PathBuf::from("/project/content/articles/hello.md"); 1089 + let article2 = PathBuf::from("/project/content/articles/world.md"); 1090 + state.track_content_file_source(article1.clone(), "articles".to_string()); 1091 + state.track_content_file_source(article2.clone(), "articles".to_string()); 1092 + state.track_content_file(article1.clone(), route1.clone()); 1093 + state.track_content_file(article2.clone(), route1.clone()); 1094 + 1095 + // Set up pages source 1096 + let page = PathBuf::from("/project/content/pages/about.md"); 1097 + state.track_content_file_source(page.clone(), "pages".to_string()); 1098 + state.track_content_file(page.clone(), route2.clone()); 1099 + 1100 + assert_eq!(state.content_file_to_source.len(), 3); 1101 + assert_eq!(state.content_file_to_routes.len(), 3); 1102 + 1103 + // Clear only the articles source 1104 + let mut sources_to_clear = FxHashSet::default(); 1105 + sources_to_clear.insert("articles".to_string()); 1106 + state.clear_content_mappings_for_sources(&sources_to_clear); 1107 + 1108 + // Articles source mappings should be removed 1109 + assert!(!state.content_file_to_source.contains_key(&article1)); 1110 + assert!(!state.content_file_to_source.contains_key(&article2)); 1111 + 1112 + // But routes mappings should be preserved (cleared separately per-route) 1113 + assert!(state.content_file_to_routes.contains_key(&article1)); 1114 + assert!(state.content_file_to_routes.contains_key(&article2)); 1115 + 1116 + // Pages should remain completely unchanged 1117 + assert!(state.content_file_to_source.contains_key(&page)); 1118 + assert!(state.content_file_to_routes.contains_key(&page)); 1119 + assert_eq!( 1120 + state.content_file_to_source.get(&page), 1121 + Some(&"pages".to_string()) 1122 + ); 1123 + } 1124 + 1125 + #[test] 1126 + fn test_clear_also_clears_content_file_to_source() { 1127 + let mut state = BuildState::new(); 1128 + let file = PathBuf::from("/project/content/articles/hello.md"); 1129 + state.track_content_file_source(file, "articles".to_string()); 1130 + 1131 + assert!(!state.content_file_to_source.is_empty()); 1132 + 1133 + state.clear(); 1134 + 1135 + assert!(state.content_file_to_source.is_empty()); 1136 + } 1137 + }
+694 -143
crates/maudit/src/build.rs
··· 14 14 self, HashAssetType, HashConfig, PrefetchPlugin, RouteAssets, Script, TailwindPlugin, 15 15 calculate_hash, image_cache::ImageCache, prefetch, 16 16 }, 17 - build::{images::process_image, options::PrefetchStrategy}, 18 - content::ContentSources, 17 + build::{ 18 + images::process_image, 19 + options::PrefetchStrategy, 20 + state::{BuildState, RouteIdentifier}, 21 + }, 22 + content::{ContentSources, finish_tracking_content_files, start_tracking_content_files}, 19 23 is_dev, 20 24 logging::print_title, 21 25 route::{CachedRoute, DynamicRouteContext, FullRoute, InternalRoute, PageContext, PageParams}, ··· 26 30 use log::{debug, info, trace, warn}; 27 31 use pathdiff::diff_paths; 28 32 use rolldown::{Bundler, BundlerOptions, InputItem, ModuleType}; 33 + use rolldown_common::Output; 29 34 use rolldown_plugin_replace::ReplacePlugin; 30 35 use rustc_hash::{FxHashMap, FxHashSet}; 31 36 ··· 36 41 pub mod images; 37 42 pub mod metadata; 38 43 pub mod options; 44 + pub mod state; 45 + 46 + /// Helper to check if a route should be rebuilt during incremental builds. 47 + /// Returns `true` for full builds (when `routes_to_rebuild` is `None`). 48 + fn should_rebuild_route( 49 + route_id: Option<&RouteIdentifier>, 50 + routes_to_rebuild: &Option<FxHashSet<RouteIdentifier>>, 51 + ) -> bool { 52 + match routes_to_rebuild { 53 + Some(set) => { 54 + // Incremental build - need route_id to check 55 + let route_id = route_id.expect("route_id required for incremental builds"); 56 + let result = set.contains(route_id); 57 + if !result { 58 + trace!(target: "build", "Skipping route {:?} (not in rebuild set)", route_id); 59 + } 60 + result 61 + } 62 + None => true, // Full build - always rebuild 63 + } 64 + } 65 + 66 + /// Helper to track all assets and source files used by a route. 67 + /// Only performs work when incremental builds are enabled and route_id is provided. 68 + fn track_route_assets( 69 + build_state: &mut BuildState, 70 + route_id: Option<&RouteIdentifier>, 71 + route_assets: &RouteAssets, 72 + ) { 73 + // Skip tracking entirely when route_id is not provided (incremental disabled) 74 + let Some(route_id) = route_id else { 75 + return; 76 + }; 77 + 78 + // Track images 79 + for image in &route_assets.images { 80 + if let Ok(canonical) = image.path().canonicalize() { 81 + build_state.track_asset(canonical, route_id.clone()); 82 + } 83 + } 84 + 85 + // Track scripts 86 + for script in &route_assets.scripts { 87 + if let Ok(canonical) = script.path().canonicalize() { 88 + build_state.track_asset(canonical, route_id.clone()); 89 + } 90 + } 91 + 92 + // Track styles 93 + for style in &route_assets.styles { 94 + if let Ok(canonical) = style.path().canonicalize() { 95 + build_state.track_asset(canonical, route_id.clone()); 96 + } 97 + } 98 + } 99 + 100 + /// Helper to track the source file where a route is defined. 101 + /// Only performs work when incremental builds are enabled and route_id is provided. 102 + fn track_route_source_file( 103 + build_state: &mut BuildState, 104 + route_id: Option<&RouteIdentifier>, 105 + source_file: &str, 106 + ) { 107 + // Skip tracking entirely when route_id is not provided (incremental disabled) 108 + let Some(route_id) = route_id else { 109 + return; 110 + }; 111 + 112 + // The file!() macro returns a path relative to the cargo workspace root. 113 + // We need to canonicalize it to match against changed file paths (which are absolute). 114 + let source_path = PathBuf::from(source_file); 115 + 116 + // Try direct canonicalization first (works if CWD is workspace root) 117 + if let Ok(canonical) = source_path.canonicalize() { 118 + build_state.track_source_file(canonical, route_id.clone()); 119 + return; 120 + } 121 + 122 + // The file!() macro path is relative to the workspace root at compile time. 123 + // At runtime, we're typically running from the package directory. 124 + // Try to find the file by walking up from CWD until we find it. 125 + if let Ok(cwd) = std::env::current_dir() { 126 + let mut current = cwd.as_path(); 127 + loop { 128 + let candidate = current.join(&source_path); 129 + if let Ok(canonical) = candidate.canonicalize() { 130 + build_state.track_source_file(canonical, route_id.clone()); 131 + return; 132 + } 133 + match current.parent() { 134 + Some(parent) => current = parent, 135 + None => break, 136 + } 137 + } 138 + } 139 + 140 + // Last resort: store the relative path (won't match absolute changed files) 141 + debug!(target: "build", "Could not canonicalize source file path: {}", source_file); 142 + build_state.track_source_file(source_path, route_id.clone()); 143 + } 144 + 145 + /// Helper to track content files accessed during page rendering. 146 + /// Only performs work when incremental builds are enabled and route_id is provided. 147 + /// This should be called after `finish_tracking_content_files()` to get the accessed files. 148 + fn track_route_content_files( 149 + build_state: &mut BuildState, 150 + route_id: Option<&RouteIdentifier>, 151 + accessed_files: Option<FxHashSet<PathBuf>>, 152 + ) { 153 + // Skip tracking entirely when route_id is not provided (incremental disabled) 154 + let Some(route_id) = route_id else { 155 + return; 156 + }; 157 + 158 + // Skip if no files were tracked 159 + let Some(files) = accessed_files else { 160 + return; 161 + }; 162 + 163 + for file_path in files { 164 + build_state.track_content_file(file_path, route_id.clone()); 165 + } 166 + } 39 167 40 168 pub fn execute_build( 41 169 routes: &[&dyn FullRoute], 42 170 content_sources: &mut ContentSources, 43 171 options: &BuildOptions, 172 + changed_files: Option<&[PathBuf]>, 44 173 async_runtime: &tokio::runtime::Runtime, 45 174 ) -> Result<BuildOutput, Box<dyn std::error::Error>> { 46 - async_runtime.block_on(async { build(routes, content_sources, options).await }) 175 + async_runtime.block_on(async { build(routes, content_sources, options, changed_files).await }) 47 176 } 48 177 49 178 pub async fn build( 50 179 routes: &[&dyn FullRoute], 51 180 content_sources: &mut ContentSources, 52 181 options: &BuildOptions, 182 + changed_files: Option<&[PathBuf]>, 53 183 ) -> Result<BuildOutput, Box<dyn std::error::Error>> { 54 184 let build_start = Instant::now(); 55 185 let mut build_metadata = BuildOutput::new(build_start); ··· 57 187 // Create a directory for the output 58 188 trace!(target: "build", "Setting up required directories..."); 59 189 60 - let clean_up_handle = if options.clean_output_dir { 190 + // Use cache directory from options 191 + let build_cache_dir = &options.cache_dir; 192 + 193 + // Load build state for incremental builds (only if incremental is enabled) 194 + let mut build_state = if options.incremental { 195 + BuildState::load(build_cache_dir).unwrap_or_else(|e| { 196 + debug!(target: "build", "Failed to load build state: {}", e); 197 + BuildState::new() 198 + }) 199 + } else { 200 + BuildState::new() 201 + }; 202 + 203 + debug!(target: "build", "Loaded build state with {} asset mappings, {} source mappings, {} content file mappings", build_state.asset_to_routes.len(), build_state.source_to_routes.len(), build_state.content_file_to_routes.len()); 204 + debug!(target: "build", "options.incremental: {}, changed_files.is_some(): {}", options.incremental, changed_files.is_some()); 205 + 206 + // Determine if this is an incremental build 207 + // We need either asset mappings OR source file mappings to do incremental builds 208 + let has_build_state = 209 + !build_state.asset_to_routes.is_empty() || !build_state.source_to_routes.is_empty(); 210 + let is_incremental = options.incremental && changed_files.is_some() && has_build_state; 211 + 212 + let routes_to_rebuild = if is_incremental { 213 + let changed = changed_files.unwrap(); 214 + info!(target: "build", "Incremental build: {} files changed", changed.len()); 215 + info!(target: "build", "Changed files: {:?}", changed); 216 + 217 + info!(target: "build", "Build state has {} asset mappings, {} source mappings, {} content file mappings", build_state.asset_to_routes.len(), build_state.source_to_routes.len(), build_state.content_file_to_routes.len()); 218 + 219 + match build_state.get_affected_routes(changed) { 220 + Some(affected) => { 221 + info!(target: "build", "Rebuilding {} affected routes", affected.len()); 222 + info!(target: "build", "Affected routes: {:?}", affected); 223 + Some(affected) 224 + } 225 + None => { 226 + // Some changed files weren't tracked (e.g., include_str! dependencies) 227 + // Fall back to full rebuild to ensure correctness 228 + info!(target: "build", "Untracked files changed, falling back to full rebuild"); 229 + build_state.clear(); 230 + None 231 + } 232 + } 233 + } else { 234 + if changed_files.is_some() { 235 + info!(target: "build", "Full build (first run after recompilation)"); 236 + } 237 + // Full build - clear old state 238 + build_state.clear(); 239 + None 240 + }; 241 + 242 + // Check if we should rebundle during incremental builds 243 + // Rebundle if a changed file is either: 244 + // 1. A direct bundler input (entry point) 245 + // 2. A transitive dependency tracked in asset_to_routes (any file the bundler processed) 246 + let should_rebundle = if is_incremental && !build_state.bundler_inputs.is_empty() { 247 + let changed = changed_files.unwrap(); 248 + let should = changed.iter().any(|changed_file| { 249 + // Check if it's a direct bundler input 250 + let is_bundler_input = build_state.bundler_inputs.iter().any(|bundler_input| { 251 + if let (Ok(changed_canonical), Ok(bundler_canonical)) = ( 252 + changed_file.canonicalize(), 253 + PathBuf::from(bundler_input).canonicalize(), 254 + ) { 255 + changed_canonical == bundler_canonical 256 + } else { 257 + false 258 + } 259 + }); 260 + 261 + if is_bundler_input { 262 + return true; 263 + } 264 + 265 + // Check if it's a transitive dependency tracked by the bundler 266 + // (JS/TS modules, CSS files, or assets like images/fonts referenced via url()) 267 + if let Ok(canonical) = changed_file.canonicalize() { 268 + return build_state.asset_to_routes.contains_key(&canonical); 269 + } 270 + 271 + false 272 + }); 273 + 274 + if should { 275 + info!(target: "build", "Rebundling needed: changed file affects bundled assets"); 276 + } else { 277 + info!(target: "build", "Skipping bundler: no changed files affect bundled assets"); 278 + } 279 + 280 + should 281 + } else { 282 + // Not incremental or no previous bundler inputs 283 + false 284 + }; 285 + 286 + let clean_up_handle = if options.clean_output_dir && !is_incremental { 61 287 let old_dist_tmp_dir = { 62 288 let duration = SystemTime::now().duration_since(UNIX_EPOCH)?; 63 289 let num = (duration.as_secs() + duration.subsec_nanos() as u64) % 100000; ··· 74 300 }; 75 301 76 302 // Create the image cache early so it can be shared across routes 77 - let image_cache = ImageCache::with_cache_dir(&options.assets.image_cache_dir); 303 + let image_cache = ImageCache::with_cache_dir(options.assets_cache_dir()); 78 304 let _ = fs::create_dir_all(image_cache.get_cache_dir()); 79 305 80 306 // Create route_assets_options with the image cache ··· 84 310 85 311 let content_sources_start = Instant::now(); 86 312 print_title("initializing content sources"); 87 - content_sources.sources_mut().iter_mut().for_each(|source| { 88 - let source_start = Instant::now(); 89 - source.init(); 313 + 314 + // Determine which content sources need to be initialized 315 + // For incremental builds with specific routes to rebuild, only re-init sources whose files have changed 316 + // If routes_to_rebuild is None (full rebuild), always init all sources 317 + let sources_to_init: Option<FxHashSet<String>> = if routes_to_rebuild.is_some() { 318 + if let Some(changed) = changed_files { 319 + build_state.get_affected_content_sources(changed) 320 + } else { 321 + None // Full init 322 + } 323 + } else { 324 + None // Full init (routes_to_rebuild is None means full rebuild) 325 + }; 326 + 327 + // Initialize content sources (all or selective) 328 + let initialized_sources: Vec<String> = match &sources_to_init { 329 + Some(source_names) if !source_names.is_empty() => { 330 + info!(target: "content", "Selectively initializing {} content source(s): {:?}", source_names.len(), source_names); 331 + 332 + // Clear mappings for sources being re-initialized before init 333 + build_state.clear_content_mappings_for_sources(source_names); 334 + 335 + // Initialize only the affected sources 336 + let mut initialized = Vec::new(); 337 + for source in content_sources.sources_mut() { 338 + if source_names.contains(source.get_name()) { 339 + let source_start = Instant::now(); 340 + source.init(); 341 + info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default())); 342 + initialized.push(source.get_name().to_string()); 343 + } else { 344 + info!(target: "content", "{} (unchanged, skipped)", source.get_name()); 345 + } 346 + } 347 + initialized 348 + } 349 + Some(_) => { 350 + // Empty set means no content files changed, skip all initialization 351 + info!(target: "content", "No content files changed, skipping content source initialization"); 352 + Vec::new() 353 + } 354 + None => { 355 + // Full initialization (first build, unknown files, or non-incremental) 356 + info!(target: "content", "Initializing all content sources"); 357 + 358 + // Clear all content mappings for full init 359 + build_state.clear_content_file_mappings(); 360 + build_state.content_file_to_source.clear(); 361 + 362 + let mut initialized = Vec::new(); 363 + for source in content_sources.sources_mut() { 364 + let source_start = Instant::now(); 365 + source.init(); 366 + info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default())); 367 + initialized.push(source.get_name().to_string()); 368 + } 369 + initialized 370 + } 371 + }; 90 372 91 - info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default())); 92 - }); 373 + // Track file->source mappings for all initialized sources 374 + for source in content_sources.sources() { 375 + if initialized_sources.contains(&source.get_name().to_string()) { 376 + let source_name = source.get_name().to_string(); 377 + for file_path in source.get_entry_file_paths() { 378 + build_state.track_content_file_source(file_path, source_name.clone()); 379 + } 380 + } 381 + } 93 382 94 383 info!(target: "content", "{}", format!("Content sources initialized in {}", format_elapsed_time( 95 384 content_sources_start.elapsed(), 96 385 &FormatElapsedTimeOptions::default(), 97 386 )).bold()); 387 + 388 + // Clear content file->routes mappings for routes being rebuilt 389 + // (so they get fresh tracking during this build) 390 + if let Some(ref routes) = routes_to_rebuild { 391 + build_state.clear_content_file_mappings_for_routes(routes); 392 + } 98 393 99 394 print_title("generating pages"); 100 395 let pages_start = Instant::now(); ··· 183 478 184 479 // Static base route 185 480 if base_params.is_empty() { 186 - let mut route_assets = RouteAssets::with_default_assets( 187 - &route_assets_options, 188 - Some(image_cache.clone()), 189 - default_scripts.clone(), 190 - vec![], 191 - ); 481 + // Only create RouteIdentifier when incremental builds are enabled 482 + let route_id = if options.incremental { 483 + Some(RouteIdentifier::base(base_path.clone(), None)) 484 + } else { 485 + None 486 + }; 192 487 193 - let params = PageParams::default(); 194 - let url = cached_route.url(&params); 488 + // Check if we need to rebuild this route 489 + if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) { 490 + let mut route_assets = RouteAssets::with_default_assets( 491 + &route_assets_options, 492 + Some(image_cache.clone()), 493 + default_scripts.clone(), 494 + vec![], 495 + ); 195 496 196 - let result = route.build(&mut PageContext::from_static_route( 197 - content_sources, 198 - &mut route_assets, 199 - &url, 200 - &options.base_url, 201 - None, 202 - ))?; 497 + let params = PageParams::default(); 498 + let url = cached_route.url(&params); 203 499 204 - let file_path = cached_route.file_path(&params, &options.output_dir); 500 + // Start tracking content file access for incremental builds 501 + if options.incremental { 502 + start_tracking_content_files(); 503 + } 205 504 206 - write_route_file(&result, &file_path)?; 505 + let result = route.build(&mut PageContext::from_static_route( 506 + content_sources, 507 + &mut route_assets, 508 + &url, 509 + &options.base_url, 510 + None, 511 + ))?; 207 512 208 - info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options)); 513 + // Finish tracking and record accessed content files 514 + let accessed_files = if options.incremental { 515 + finish_tracking_content_files() 516 + } else { 517 + None 518 + }; 209 519 210 - build_pages_images.extend(route_assets.images); 211 - build_pages_scripts.extend(route_assets.scripts); 212 - build_pages_styles.extend(route_assets.styles); 520 + let file_path = cached_route.file_path(&params, &options.output_dir); 213 521 214 - build_metadata.add_page( 215 - base_path.clone(), 216 - file_path.to_string_lossy().to_string(), 217 - None, 218 - ); 522 + write_route_file(&result, &file_path)?; 219 523 220 - add_sitemap_entry( 221 - &mut sitemap_entries, 222 - normalized_base_url, 223 - &url, 224 - base_path, 225 - &route.sitemap_metadata(), 226 - &options.sitemap, 227 - ); 524 + info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options)); 525 + 526 + // Track assets, source file, and content files for this route 527 + track_route_assets(&mut build_state, route_id.as_ref(), &route_assets); 528 + track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file()); 529 + track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files); 228 530 229 - page_count += 1; 531 + build_pages_images.extend(route_assets.images); 532 + build_pages_scripts.extend(route_assets.scripts); 533 + build_pages_styles.extend(route_assets.styles); 534 + 535 + build_metadata.add_page( 536 + base_path.clone(), 537 + file_path.to_string_lossy().to_string(), 538 + None, 539 + ); 540 + 541 + add_sitemap_entry( 542 + &mut sitemap_entries, 543 + normalized_base_url, 544 + &url, 545 + base_path, 546 + &route.sitemap_metadata(), 547 + &options.sitemap, 548 + ); 549 + 550 + page_count += 1; 551 + } else { 552 + trace!(target: "build", "Skipping unchanged route: {}", base_path); 553 + } 230 554 } else { 231 555 // Dynamic base route 232 556 let mut route_assets = RouteAssets::with_default_assets( ··· 250 574 251 575 // Build all pages for this route 252 576 for page in pages { 253 - let page_start = Instant::now(); 254 - let url = cached_route.url(&page.0); 255 - let file_path = cached_route.file_path(&page.0, &options.output_dir); 577 + // Only create RouteIdentifier when incremental builds are enabled 578 + let route_id = if options.incremental { 579 + Some(RouteIdentifier::base(base_path.clone(), Some(page.0.0.clone()))) 580 + } else { 581 + None 582 + }; 583 + 584 + // Check if we need to rebuild this specific page 585 + if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) { 586 + let page_start = Instant::now(); 587 + let url = cached_route.url(&page.0); 588 + let file_path = cached_route.file_path(&page.0, &options.output_dir); 589 + 590 + // Start tracking content file access for incremental builds 591 + if options.incremental { 592 + start_tracking_content_files(); 593 + } 594 + 595 + let content = route.build(&mut PageContext::from_dynamic_route( 596 + &page, 597 + content_sources, 598 + &mut route_assets, 599 + &url, 600 + &options.base_url, 601 + None, 602 + ))?; 603 + 604 + // Finish tracking and record accessed content files 605 + let accessed_files = if options.incremental { 606 + finish_tracking_content_files() 607 + } else { 608 + None 609 + }; 256 610 257 - let content = route.build(&mut PageContext::from_dynamic_route( 258 - &page, 259 - content_sources, 260 - &mut route_assets, 261 - &url, 262 - &options.base_url, 263 - None, 264 - ))?; 611 + write_route_file(&content, &file_path)?; 265 612 266 - write_route_file(&content, &file_path)?; 613 + info!(target: "pages", "โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options)); 267 614 268 - info!(target: "pages", "โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options)); 615 + // Track assets, source file, and content files for this page 616 + track_route_assets(&mut build_state, route_id.as_ref(), &route_assets); 617 + track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file()); 618 + track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files); 269 619 270 - build_metadata.add_page( 271 - base_path.clone(), 272 - file_path.to_string_lossy().to_string(), 273 - Some(page.0.0.clone()), 274 - ); 620 + build_metadata.add_page( 621 + base_path.clone(), 622 + file_path.to_string_lossy().to_string(), 623 + Some(page.0.0.clone()), 624 + ); 275 625 276 - add_sitemap_entry( 277 - &mut sitemap_entries, 278 - normalized_base_url, 279 - &url, 280 - base_path, 281 - &route.sitemap_metadata(), 282 - &options.sitemap, 283 - ); 626 + add_sitemap_entry( 627 + &mut sitemap_entries, 628 + normalized_base_url, 629 + &url, 630 + base_path, 631 + &route.sitemap_metadata(), 632 + &options.sitemap, 633 + ); 284 634 285 - page_count += 1; 635 + page_count += 1; 636 + } else { 637 + trace!(target: "build", "Skipping unchanged page: {} with params {:?}", base_path, page.0.0); 638 + } 286 639 } 287 640 } 288 641 ··· 299 652 300 653 if variant_params.is_empty() { 301 654 // Static variant 302 - let mut route_assets = RouteAssets::with_default_assets( 303 - &route_assets_options, 304 - Some(image_cache.clone()), 305 - default_scripts.clone(), 306 - vec![], 307 - ); 655 + // Only create RouteIdentifier when incremental builds are enabled 656 + let route_id = if options.incremental { 657 + Some(RouteIdentifier::variant(variant_id.clone(), variant_path.clone(), None)) 658 + } else { 659 + None 660 + }; 661 + 662 + // Check if we need to rebuild this variant 663 + if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) { 664 + let mut route_assets = RouteAssets::with_default_assets( 665 + &route_assets_options, 666 + Some(image_cache.clone()), 667 + default_scripts.clone(), 668 + vec![], 669 + ); 670 + 671 + let params = PageParams::default(); 672 + let url = cached_route.variant_url(&params, &variant_id)?; 673 + let file_path = cached_route.variant_file_path( 674 + &params, 675 + &options.output_dir, 676 + &variant_id, 677 + )?; 678 + 679 + // Start tracking content file access for incremental builds 680 + if options.incremental { 681 + start_tracking_content_files(); 682 + } 308 683 309 - let params = PageParams::default(); 310 - let url = cached_route.variant_url(&params, &variant_id)?; 311 - let file_path = 312 - cached_route.variant_file_path(&params, &options.output_dir, &variant_id)?; 684 + let result = route.build(&mut PageContext::from_static_route( 685 + content_sources, 686 + &mut route_assets, 687 + &url, 688 + &options.base_url, 689 + Some(variant_id.clone()), 690 + ))?; 313 691 314 - let result = route.build(&mut PageContext::from_static_route( 315 - content_sources, 316 - &mut route_assets, 317 - &url, 318 - &options.base_url, 319 - Some(variant_id.clone()), 320 - ))?; 692 + // Finish tracking and record accessed content files 693 + let accessed_files = if options.incremental { 694 + finish_tracking_content_files() 695 + } else { 696 + None 697 + }; 321 698 322 - write_route_file(&result, &file_path)?; 699 + write_route_file(&result, &file_path)?; 323 700 324 - info!(target: "pages", "โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options)); 701 + info!(target: "pages", "โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options)); 325 702 326 - build_pages_images.extend(route_assets.images); 327 - build_pages_scripts.extend(route_assets.scripts); 328 - build_pages_styles.extend(route_assets.styles); 703 + // Track assets, source file, and content files for this variant 704 + track_route_assets(&mut build_state, route_id.as_ref(), &route_assets); 705 + track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file()); 706 + track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files); 329 707 330 - build_metadata.add_page( 331 - variant_path.clone(), 332 - file_path.to_string_lossy().to_string(), 333 - None, 334 - ); 708 + build_pages_images.extend(route_assets.images); 709 + build_pages_scripts.extend(route_assets.scripts); 710 + build_pages_styles.extend(route_assets.styles); 335 711 336 - add_sitemap_entry( 337 - &mut sitemap_entries, 338 - normalized_base_url, 339 - &url, 340 - &variant_path, 341 - &route.sitemap_metadata(), 342 - &options.sitemap, 343 - ); 712 + build_metadata.add_page( 713 + variant_path.clone(), 714 + file_path.to_string_lossy().to_string(), 715 + None, 716 + ); 717 + 718 + add_sitemap_entry( 719 + &mut sitemap_entries, 720 + normalized_base_url, 721 + &url, 722 + &variant_path, 723 + &route.sitemap_metadata(), 724 + &options.sitemap, 725 + ); 344 726 345 - page_count += 1; 727 + page_count += 1; 728 + } else { 729 + trace!(target: "build", "Skipping unchanged variant: {}", variant_path); 730 + } 346 731 } else { 347 732 // Dynamic variant 348 733 let mut route_assets = RouteAssets::with_default_assets( ··· 365 750 366 751 // Build all pages for this variant group 367 752 for page in pages { 368 - let variant_page_start = Instant::now(); 369 - let url = cached_route.variant_url(&page.0, &variant_id)?; 370 - let file_path = cached_route.variant_file_path( 371 - &page.0, 372 - &options.output_dir, 373 - &variant_id, 374 - )?; 753 + // Only create RouteIdentifier when incremental builds are enabled 754 + let route_id = if options.incremental { 755 + Some(RouteIdentifier::variant( 756 + variant_id.clone(), 757 + variant_path.clone(), 758 + Some(page.0.0.clone()), 759 + )) 760 + } else { 761 + None 762 + }; 375 763 376 - let content = route.build(&mut PageContext::from_dynamic_route( 377 - &page, 378 - content_sources, 379 - &mut route_assets, 380 - &url, 381 - &options.base_url, 382 - Some(variant_id.clone()), 383 - ))?; 764 + // Check if we need to rebuild this specific variant page 765 + if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) { 766 + let variant_page_start = Instant::now(); 767 + let url = cached_route.variant_url(&page.0, &variant_id)?; 768 + let file_path = cached_route.variant_file_path( 769 + &page.0, 770 + &options.output_dir, 771 + &variant_id, 772 + )?; 384 773 385 - write_route_file(&content, &file_path)?; 774 + // Start tracking content file access for incremental builds 775 + if options.incremental { 776 + start_tracking_content_files(); 777 + } 778 + 779 + let content = route.build(&mut PageContext::from_dynamic_route( 780 + &page, 781 + content_sources, 782 + &mut route_assets, 783 + &url, 784 + &options.base_url, 785 + Some(variant_id.clone()), 786 + ))?; 787 + 788 + // Finish tracking and record accessed content files 789 + let accessed_files = if options.incremental { 790 + finish_tracking_content_files() 791 + } else { 792 + None 793 + }; 386 794 387 - info!(target: "pages", "โ”‚ โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options)); 795 + write_route_file(&content, &file_path)?; 388 796 389 - build_metadata.add_page( 390 - variant_path.clone(), 391 - file_path.to_string_lossy().to_string(), 392 - Some(page.0.0.clone()), 393 - ); 797 + info!(target: "pages", "โ”‚ โ”œโ”€ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options)); 394 798 395 - add_sitemap_entry( 396 - &mut sitemap_entries, 397 - normalized_base_url, 398 - &url, 399 - &variant_path, 400 - &route.sitemap_metadata(), 401 - &options.sitemap, 402 - ); 799 + // Track assets, source file, and content files for this variant page 800 + track_route_assets(&mut build_state, route_id.as_ref(), &route_assets); 801 + track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file()); 802 + track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files); 803 + 804 + build_metadata.add_page( 805 + variant_path.clone(), 806 + file_path.to_string_lossy().to_string(), 807 + Some(page.0.0.clone()), 808 + ); 809 + 810 + add_sitemap_entry( 811 + &mut sitemap_entries, 812 + normalized_base_url, 813 + &url, 814 + &variant_path, 815 + &route.sitemap_metadata(), 816 + &options.sitemap, 817 + ); 403 818 404 - page_count += 1; 819 + page_count += 1; 820 + } else { 821 + trace!(target: "build", "Skipping unchanged variant page: {} with params {:?}", variant_path, page.0.0); 822 + } 405 823 } 406 824 } 407 825 ··· 421 839 fs::create_dir_all(&route_assets_options.output_assets_dir)?; 422 840 } 423 841 424 - if !build_pages_styles.is_empty() || !build_pages_scripts.is_empty() { 842 + if !build_pages_styles.is_empty() 843 + || !build_pages_scripts.is_empty() 844 + || (is_incremental && should_rebundle) 845 + { 425 846 let assets_start = Instant::now(); 426 847 print_title("generating assets"); 427 848 ··· 439 860 }) 440 861 .collect::<Vec<InputItem>>(); 441 862 442 - let bundler_inputs = build_pages_scripts 863 + let mut bundler_inputs = build_pages_scripts 443 864 .iter() 444 865 .map(|script| InputItem { 445 866 import: script.path().to_string_lossy().to_string(), ··· 454 875 .chain(css_inputs.into_iter()) 455 876 .collect::<Vec<InputItem>>(); 456 877 878 + // During incremental builds, merge with previous bundler inputs 879 + // to ensure we bundle all assets, not just from rebuilt routes 880 + if is_incremental && !build_state.bundler_inputs.is_empty() { 881 + debug!(target: "bundling", "Merging with {} previous bundler inputs", build_state.bundler_inputs.len()); 882 + 883 + let current_imports: FxHashSet<String> = bundler_inputs 884 + .iter() 885 + .map(|input| input.import.clone()) 886 + .collect(); 887 + 888 + // Add previous inputs that aren't in the current set 889 + for prev_input in &build_state.bundler_inputs { 890 + if !current_imports.contains(prev_input) { 891 + bundler_inputs.push(InputItem { 892 + import: prev_input.clone(), 893 + name: Some( 894 + PathBuf::from(prev_input) 895 + .file_stem() 896 + .unwrap_or_default() 897 + .to_string_lossy() 898 + .to_string(), 899 + ), 900 + }); 901 + } 902 + } 903 + } 904 + 457 905 debug!( 458 906 target: "bundling", 459 907 "Bundler inputs: {:?}", ··· 463 911 .collect::<Vec<String>>() 464 912 ); 465 913 914 + // Store bundler inputs in build state for next incremental build 915 + if options.incremental { 916 + build_state.bundler_inputs = bundler_inputs 917 + .iter() 918 + .map(|input| input.import.clone()) 919 + .collect(); 920 + } 921 + 466 922 if !bundler_inputs.is_empty() { 467 923 let mut module_types_hashmap = FxHashMap::default(); 924 + // Fonts 468 925 module_types_hashmap.insert("woff".to_string(), ModuleType::Asset); 469 926 module_types_hashmap.insert("woff2".to_string(), ModuleType::Asset); 927 + module_types_hashmap.insert("ttf".to_string(), ModuleType::Asset); 928 + module_types_hashmap.insert("otf".to_string(), ModuleType::Asset); 929 + module_types_hashmap.insert("eot".to_string(), ModuleType::Asset); 930 + // Images 931 + module_types_hashmap.insert("png".to_string(), ModuleType::Asset); 932 + module_types_hashmap.insert("jpg".to_string(), ModuleType::Asset); 933 + module_types_hashmap.insert("jpeg".to_string(), ModuleType::Asset); 934 + module_types_hashmap.insert("gif".to_string(), ModuleType::Asset); 935 + module_types_hashmap.insert("svg".to_string(), ModuleType::Asset); 936 + module_types_hashmap.insert("webp".to_string(), ModuleType::Asset); 937 + module_types_hashmap.insert("avif".to_string(), ModuleType::Asset); 938 + module_types_hashmap.insert("ico".to_string(), ModuleType::Asset); 470 939 471 940 let mut bundler = Bundler::with_plugins( 472 941 BundlerOptions { ··· 500 969 ], 501 970 )?; 502 971 503 - let _result = bundler.write().await?; 972 + let result = bundler.write().await?; 504 973 505 - // TODO: Add outputted chunks to build_metadata 974 + // Track transitive dependencies from bundler output 975 + // For each chunk, map all its modules to the routes that use the entry point 976 + // For assets (images, fonts via CSS url()), map them to all routes using any entry point 977 + if options.incremental { 978 + // First, collect all routes that use any bundler entry point 979 + let mut all_bundler_routes: FxHashSet<RouteIdentifier> = FxHashSet::default(); 980 + 981 + for output in &result.assets { 982 + if let Output::Chunk(chunk) = output { 983 + // Get the entry point for this chunk 984 + if let Some(facade_module_id) = &chunk.facade_module_id { 985 + // Try to find routes using this entry point 986 + let entry_path = PathBuf::from(facade_module_id.as_str()); 987 + let canonical_entry = entry_path.canonicalize().ok(); 988 + 989 + // Look up routes for this entry point 990 + let routes = canonical_entry 991 + .as_ref() 992 + .and_then(|p| build_state.asset_to_routes.get(p)) 993 + .cloned(); 994 + 995 + if let Some(routes) = routes { 996 + // Collect routes for asset tracking later 997 + all_bundler_routes.extend(routes.iter().cloned()); 998 + 999 + // Register all modules in this chunk as dependencies for those routes 1000 + let mut transitive_count = 0; 1001 + for module_id in &chunk.module_ids { 1002 + let module_path = PathBuf::from(module_id.as_str()); 1003 + if let Ok(canonical_module) = module_path.canonicalize() { 1004 + // Skip the entry point itself (already tracked) 1005 + if Some(&canonical_module) != canonical_entry.as_ref() { 1006 + for route in &routes { 1007 + build_state.track_asset( 1008 + canonical_module.clone(), 1009 + route.clone(), 1010 + ); 1011 + } 1012 + transitive_count += 1; 1013 + } 1014 + } 1015 + } 1016 + if transitive_count > 0 { 1017 + debug!(target: "build", "Tracked {} transitive dependencies for {}", transitive_count, facade_module_id); 1018 + } 1019 + } 1020 + } 1021 + } 1022 + } 1023 + 1024 + // Now track Output::Asset items (images, fonts, etc. referenced via CSS url() or JS imports) 1025 + // These are mapped to all routes that use any bundler entry point 1026 + if !all_bundler_routes.is_empty() { 1027 + let mut asset_count = 0; 1028 + for output in &result.assets { 1029 + if let Output::Asset(asset) = output { 1030 + for original_file in &asset.original_file_names { 1031 + let asset_path = PathBuf::from(original_file); 1032 + if let Ok(canonical_asset) = asset_path.canonicalize() { 1033 + for route in &all_bundler_routes { 1034 + build_state 1035 + .track_asset(canonical_asset.clone(), route.clone()); 1036 + } 1037 + asset_count += 1; 1038 + } 1039 + } 1040 + } 1041 + } 1042 + if asset_count > 0 { 1043 + debug!(target: "build", "Tracked {} bundler assets for {} routes", asset_count, all_bundler_routes.len()); 1044 + } 1045 + } 1046 + } 506 1047 } 507 1048 508 1049 info!(target: "build", "{}", format!("Assets generated in {}", format_elapsed_time(assets_start.elapsed(), &section_format_options)).bold()); ··· 598 1139 info!(target: "SKIP_FORMAT", "{}", ""); 599 1140 info!(target: "build", "{}", format!("Build completed in {}", format_elapsed_time(build_start.elapsed(), &section_format_options)).bold()); 600 1141 1142 + // Save build state for next incremental build (only if incremental is enabled) 1143 + if options.incremental { 1144 + if let Err(e) = build_state.save(build_cache_dir) { 1145 + warn!(target: "build", "Failed to save build state: {}", e); 1146 + } else { 1147 + debug!(target: "build", "Build state saved to {}", build_cache_dir.join("build_state.json").display()); 1148 + } 1149 + } 1150 + 601 1151 if let Some(clean_up_handle) = clean_up_handle { 602 1152 clean_up_handle.await?; 603 1153 } ··· 680 1230 fs::create_dir_all(parent_dir)? 681 1231 } 682 1232 1233 + trace!(target: "build", "Writing HTML file: {}", file_path.display()); 683 1234 fs::write(file_path, content)?; 684 1235 685 1236 Ok(())
+110 -6
crates/maudit/src/content.rs
··· 1 1 //! Core functions and structs to define the content sources of your website. 2 2 //! 3 3 //! Content sources represent the content of your website, such as articles, blog posts, etc. Then, content sources can be passed to [`coronate()`](crate::coronate), through the [`content_sources!`](crate::content_sources) macro, to be loaded. 4 - use std::{any::Any, path::PathBuf, sync::Arc}; 4 + use std::{ 5 + any::Any, 6 + cell::RefCell, 7 + path::{Path, PathBuf}, 8 + sync::Arc, 9 + }; 5 10 6 - use rustc_hash::FxHashMap; 11 + use rustc_hash::{FxHashMap, FxHashSet}; 7 12 8 13 mod highlight; 9 14 pub mod markdown; ··· 25 30 }; 26 31 27 32 pub use highlight::{HighlightOptions, highlight_code}; 33 + 34 + // Thread-local storage for tracking content file access during page rendering. 35 + // This allows us to transparently track which content files a page uses 36 + // without requiring changes to user code. 37 + thread_local! { 38 + static ACCESSED_CONTENT_FILES: RefCell<Option<FxHashSet<PathBuf>>> = const { RefCell::new(None) }; 39 + } 40 + 41 + /// Start tracking content file access for a page render. 42 + /// Call this before rendering a page, then call `finish_tracking_content_files()` 43 + /// after rendering to get the set of accessed content files. 44 + pub(crate) fn start_tracking_content_files() { 45 + ACCESSED_CONTENT_FILES.with(|cell| { 46 + *cell.borrow_mut() = Some(FxHashSet::default()); 47 + }); 48 + } 49 + 50 + /// Finish tracking content file access and return the set of accessed files. 51 + /// Returns `None` if tracking was not started. 52 + pub(crate) fn finish_tracking_content_files() -> Option<FxHashSet<PathBuf>> { 53 + ACCESSED_CONTENT_FILES.with(|cell| cell.borrow_mut().take()) 54 + } 55 + 56 + /// Record that a content file was accessed. 57 + /// This is called internally when entries are accessed. 58 + fn track_content_file_access(file_path: &Path) { 59 + ACCESSED_CONTENT_FILES.with(|cell| { 60 + if let Some(ref mut set) = *cell.borrow_mut() { 61 + set.insert(file_path.to_path_buf()); 62 + } 63 + }); 64 + } 28 65 29 66 /// Helps implement a struct as a Markdown content entry. 30 67 /// ··· 302 339 } 303 340 } 304 341 342 + /// Initialize only the content sources with the given names. 343 + /// Sources not in the set are left untouched (their entries remain as-is). 344 + /// Returns the names of sources that were actually initialized. 345 + pub fn init_sources(&mut self, source_names: &rustc_hash::FxHashSet<String>) -> Vec<String> { 346 + let mut initialized = Vec::new(); 347 + for source in &mut self.0 { 348 + if source_names.contains(source.get_name()) { 349 + source.init(); 350 + initialized.push(source.get_name().to_string()); 351 + } 352 + } 353 + initialized 354 + } 355 + 305 356 pub fn get_untyped_source(&self, name: &str) -> &ContentSource<Untyped> { 306 357 self.get_source::<Untyped>(name) 307 358 } ··· 337 388 /// A source of content such as articles, blog posts, etc. 338 389 pub struct ContentSource<T = Untyped> { 339 390 pub name: String, 340 - pub entries: Vec<Arc<EntryInner<T>>>, 391 + entries: Vec<Arc<EntryInner<T>>>, 341 392 pub(crate) init_method: ContentSourceInitMethod<T>, 342 393 } 343 394 ··· 354 405 } 355 406 356 407 pub fn get_entry(&self, id: &str) -> &Entry<T> { 357 - self.entries 408 + let entry = self 409 + .entries 358 410 .iter() 359 411 .find(|entry| entry.id == id) 360 - .unwrap_or_else(|| panic!("Entry with id '{}' not found", id)) 412 + .unwrap_or_else(|| panic!("Entry with id '{}' not found", id)); 413 + 414 + // Track file access for incremental builds 415 + if let Some(ref file_path) = entry.file_path { 416 + track_content_file_access(file_path); 417 + } 418 + 419 + entry 361 420 } 362 421 363 422 pub fn get_entry_safe(&self, id: &str) -> Option<&Entry<T>> { 364 - self.entries.iter().find(|entry| entry.id == id) 423 + let entry = self.entries.iter().find(|entry| entry.id == id); 424 + 425 + // Track file access for incremental builds 426 + if let Some(entry) = &entry 427 + && let Some(ref file_path) = entry.file_path 428 + { 429 + track_content_file_access(file_path); 430 + } 431 + 432 + entry 365 433 } 366 434 367 435 pub fn into_params<P>(&self, cb: impl FnMut(&Entry<T>) -> P) -> Vec<P> 368 436 where 369 437 P: Into<PageParams>, 370 438 { 439 + // Track all entries accessed for incremental builds 440 + for entry in &self.entries { 441 + if let Some(ref file_path) = entry.file_path { 442 + track_content_file_access(file_path); 443 + } 444 + } 371 445 self.entries.iter().map(cb).collect() 372 446 } 373 447 ··· 378 452 where 379 453 Params: Into<PageParams>, 380 454 { 455 + // Track all entries accessed for incremental builds 456 + for entry in &self.entries { 457 + if let Some(ref file_path) = entry.file_path { 458 + track_content_file_access(file_path); 459 + } 460 + } 381 461 self.entries.iter().map(cb).collect() 382 462 } 463 + 464 + /// Get all entries, tracking access for incremental builds. 465 + /// 466 + /// This returns a slice of all entries in the content source. 467 + /// You can use standard slice methods like `.iter()`, `.len()`, `.is_empty()`, etc. 468 + pub fn entries(&self) -> &[Entry<T>] { 469 + // Track all entries accessed for incremental builds 470 + for entry in &self.entries { 471 + if let Some(ref file_path) = entry.file_path { 472 + track_content_file_access(file_path); 473 + } 474 + } 475 + &self.entries 476 + } 383 477 } 384 478 385 479 #[doc(hidden)] ··· 389 483 fn init(&mut self); 390 484 fn get_name(&self) -> &str; 391 485 fn as_any(&self) -> &dyn Any; // Used for type checking at runtime 486 + 487 + /// Get all file paths for entries in this content source. 488 + /// Used for incremental builds to map content files to their source. 489 + fn get_entry_file_paths(&self) -> Vec<PathBuf>; 392 490 } 393 491 394 492 impl<T: 'static + Sync + Send> ContentSourceInternal for ContentSource<T> { ··· 400 498 } 401 499 fn as_any(&self) -> &dyn Any { 402 500 self 501 + } 502 + fn get_entry_file_paths(&self) -> Vec<PathBuf> { 503 + self.entries 504 + .iter() 505 + .filter_map(|entry| entry.file_path.clone()) 506 + .collect() 403 507 } 404 508 }
+22 -3
crates/maudit/src/lib.rs
··· 54 54 // Internal modules 55 55 mod logging; 56 56 57 - use std::env; 57 + use std::sync::LazyLock; 58 + use std::{env, path::PathBuf}; 58 59 59 60 use build::execute_build; 60 61 use content::ContentSources; 61 62 use logging::init_logging; 62 63 use route::FullRoute; 63 64 65 + static IS_DEV: LazyLock<bool> = LazyLock::new(|| { 66 + std::env::var("MAUDIT_DEV") 67 + .map(|v| v == "true") 68 + .unwrap_or(false) 69 + }); 70 + 64 71 /// Returns whether Maudit is running in development mode (through `maudit dev`). 65 72 /// 66 73 /// This can be useful to conditionally enable features or logging that should only be active during development. 67 74 /// Oftentimes, this is used to disable some expensive operations that would slow down build times during development. 68 75 pub fn is_dev() -> bool { 69 - env::var("MAUDIT_DEV").map(|v| v == "true").unwrap_or(false) 76 + *IS_DEV 70 77 } 71 78 72 79 #[macro_export] ··· 212 219 .enable_all() 213 220 .build()?; 214 221 215 - execute_build(routes, &mut content_sources, &options, &async_runtime) 222 + // Check for changed files from environment variable (set by CLI in dev mode) 223 + let changed_files = env::var("MAUDIT_CHANGED_FILES") 224 + .ok() 225 + .and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok()) 226 + .map(|paths| paths.into_iter().map(PathBuf::from).collect::<Vec<_>>()); 227 + 228 + execute_build( 229 + routes, 230 + &mut content_sources, 231 + &options, 232 + changed_files.as_deref(), 233 + &async_runtime, 234 + ) 216 235 }
+6 -2
crates/maudit/src/logging.rs
··· 29 29 30 30 let _ = Builder::from_env(logging_env) 31 31 .format(|buf, record| { 32 - if std::env::args().any(|arg| arg == "--quiet") || std::env::var("MAUDIT_QUIET").is_ok() 33 - { 32 + if std::env::args().any(|arg| arg == "--quiet") { 33 + return Ok(()); 34 + } 35 + 36 + // In quiet mode, only show build target logs (for debugging incremental builds) 37 + if std::env::var("MAUDIT_QUIET").is_ok() && record.target() != "build" { 34 38 return Ok(()); 35 39 } 36 40
+18 -5
crates/maudit/src/route.rs
··· 9 9 use std::any::Any; 10 10 use std::path::{Path, PathBuf}; 11 11 12 - use lol_html::{RewriteStrSettings, element, rewrite_str}; 12 + use lol_html::{element, rewrite_str, RewriteStrSettings}; 13 13 14 14 /// The result of a page render, can be either text, raw bytes, or an error. 15 15 /// ··· 282 282 /// impl Route for Index { 283 283 /// fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 284 284 /// let logo = ctx.assets.add_image("logo.png")?; 285 - /// let last_entries = &ctx.content.get_source::<ArticleContent>("articles").entries; 285 + /// let last_entries = ctx.content.get_source::<ArticleContent>("articles").entries(); 286 286 /// 287 287 /// Ok(html! { 288 288 /// main { ··· 504 504 pub trait InternalRoute { 505 505 fn route_raw(&self) -> Option<String>; 506 506 507 + /// Returns the source file path where this route is defined. 508 + /// This is used for incremental builds to track which routes are affected 509 + /// when a source file changes. 510 + fn source_file(&self) -> &'static str; 511 + 507 512 fn variants(&self) -> Vec<(String, String)> { 508 513 vec![] 509 514 } ··· 796 801 self.inner.route_raw() 797 802 } 798 803 804 + fn source_file(&self) -> &'static str { 805 + self.inner.source_file() 806 + } 807 + 799 808 fn variants(&self) -> Vec<(String, String)> { 800 809 self.inner.variants() 801 810 } ··· 957 966 //! use maudit::route::prelude::*; 958 967 //! ``` 959 968 pub use super::{ 960 - CachedRoute, DynamicRouteContext, FullRoute, Page, PageContext, PageParams, Pages, 961 - PaginatedContentPage, PaginationPage, RenderResult, Route, RouteExt, paginate, redirect, 969 + paginate, redirect, CachedRoute, DynamicRouteContext, FullRoute, Page, PageContext, 970 + PageParams, Pages, PaginatedContentPage, PaginationPage, RenderResult, Route, RouteExt, 962 971 }; 963 972 pub use crate::assets::{ 964 973 Asset, Image, ImageFormat, ImageOptions, ImagePlaceholder, RenderWithAlt, Script, Style, 965 974 StyleOptions, 966 975 }; 967 976 pub use crate::content::{ContentContext, ContentEntry, Entry, EntryInner, MarkdownContent}; 968 - pub use maudit_macros::{Params, route}; 977 + pub use maudit_macros::{route, Params}; 969 978 } 970 979 971 980 #[cfg(test)] ··· 982 991 impl InternalRoute for TestPage { 983 992 fn route_raw(&self) -> Option<String> { 984 993 Some(self.route.clone()) 994 + } 995 + 996 + fn source_file(&self) -> &'static str { 997 + file!() 985 998 } 986 999 } 987 1000
+6
crates/maudit-cli/Cargo.toml
··· 28 28 ureq = "3.1.4" 29 29 tar = "0.4.44" 30 30 toml_edit = "0.24.0" 31 + toml = "0.8" 31 32 local-ip-address = "0.6.9" 32 33 flate2 = "1.1.8" 33 34 quanta = "0.12.6" 34 35 serde_json = "1.0" 35 36 tokio-util = "0.7" 36 37 cargo_metadata = "0.23.1" 38 + depinfo = "0.7.3" 39 + 40 + [dev-dependencies] 41 + tempfile = "3.24.0" 42 + tokio = { version = "1", features = ["macros", "rt-multi-thread", "test-util"] }
+521 -149
crates/maudit-cli/src/dev/build.rs
··· 1 1 use cargo_metadata::Message; 2 2 use quanta::Instant; 3 - use server::{StatusType, WebSocketMessage, update_status}; 3 + use std::path::PathBuf; 4 4 use std::sync::Arc; 5 5 use tokio::process::Command; 6 - use tokio::sync::broadcast; 6 + use tokio::sync::RwLock; 7 7 use tokio_util::sync::CancellationToken; 8 - use tracing::{debug, error, info}; 8 + use tracing::{debug, error, info, warn}; 9 9 10 10 use crate::{ 11 - dev::server, 11 + dev::server::{StatusManager, StatusType}, 12 12 logging::{FormatElapsedTimeOptions, format_elapsed_time}, 13 13 }; 14 14 15 + use super::dep_tracker::{DependencyTracker, find_target_dir}; 16 + 17 + /// Internal state shared across all BuildManager handles. 18 + struct BuildManagerState { 19 + current_cancel: RwLock<Option<CancellationToken>>, 20 + build_semaphore: tokio::sync::Semaphore, 21 + status_manager: StatusManager, 22 + dep_tracker: RwLock<Option<DependencyTracker>>, 23 + binary_path: RwLock<Option<PathBuf>>, 24 + // Cached values computed once at startup 25 + target_dir: Option<PathBuf>, 26 + binary_name: Option<String>, 27 + } 28 + 29 + /// Manages cargo build processes with cancellation support. 30 + /// Cheap to clone - all clones share the same underlying state. 15 31 #[derive(Clone)] 16 32 pub struct BuildManager { 17 - current_cancel: Arc<tokio::sync::RwLock<Option<CancellationToken>>>, 18 - build_semaphore: Arc<tokio::sync::Semaphore>, 19 - websocket_tx: broadcast::Sender<WebSocketMessage>, 20 - current_status: Arc<tokio::sync::RwLock<Option<server::PersistentStatus>>>, 33 + state: Arc<BuildManagerState>, 21 34 } 22 35 23 36 impl BuildManager { 24 - pub fn new(websocket_tx: broadcast::Sender<WebSocketMessage>) -> Self { 37 + pub fn new(status_manager: StatusManager) -> Self { 38 + // Try to determine target directory and binary name at startup 39 + let target_dir = find_target_dir().ok(); 40 + let binary_name = Self::get_binary_name_from_cargo_toml().ok(); 41 + 42 + if let Some(ref name) = binary_name { 43 + debug!(name: "build", "Detected binary name at startup: {}", name); 44 + } 45 + if let Some(ref dir) = target_dir { 46 + debug!(name: "build", "Using target directory: {:?}", dir); 47 + } 48 + 25 49 Self { 26 - current_cancel: Arc::new(tokio::sync::RwLock::new(None)), 27 - build_semaphore: Arc::new(tokio::sync::Semaphore::new(1)), // Only one build at a time 28 - websocket_tx, 29 - current_status: Arc::new(tokio::sync::RwLock::new(None)), 50 + state: Arc::new(BuildManagerState { 51 + current_cancel: RwLock::new(None), 52 + build_semaphore: tokio::sync::Semaphore::new(1), 53 + status_manager, 54 + dep_tracker: RwLock::new(None), 55 + binary_path: RwLock::new(None), 56 + target_dir, 57 + binary_name, 58 + }), 30 59 } 31 60 } 32 61 33 - /// Get a reference to the current status for use with the web server 34 - pub fn current_status(&self) -> Arc<tokio::sync::RwLock<Option<server::PersistentStatus>>> { 35 - self.current_status.clone() 62 + /// Check if the given paths require recompilation based on dependency tracking. 63 + /// Returns true if recompilation is needed, false if we can just rerun the binary. 64 + pub async fn needs_recompile(&self, changed_paths: &[PathBuf]) -> bool { 65 + let dep_tracker = self.state.dep_tracker.read().await; 66 + 67 + if let Some(tracker) = dep_tracker.as_ref() 68 + && tracker.has_dependencies() 69 + { 70 + let needs_recompile = tracker.needs_recompile(changed_paths); 71 + if !needs_recompile { 72 + debug!(name: "build", "Changed files are not dependencies, rerun binary without recompile"); 73 + } 74 + return needs_recompile; 75 + } 76 + 77 + // If we don't have a dependency tracker yet, always recompile 78 + true 36 79 } 37 80 38 - /// Do initial build that can be cancelled (but isn't stored as current build) 39 - pub async fn do_initial_build(&self) -> Result<bool, Box<dyn std::error::Error>> { 40 - self.internal_build(true).await 81 + /// Rerun the binary without recompiling. 82 + pub async fn rerun_binary( 83 + &self, 84 + changed_paths: &[PathBuf], 85 + ) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> { 86 + // Get binary path with limited lock scope 87 + let path = { 88 + let guard = self.state.binary_path.read().await; 89 + match guard.as_ref() { 90 + Some(p) if p.exists() => p.clone(), 91 + Some(p) => { 92 + warn!(name: "build", "Binary at {:?} no longer exists, falling back to full rebuild", p); 93 + return self.start_build(Some(changed_paths)).await; 94 + } 95 + None => { 96 + warn!(name: "build", "No binary path available, falling back to full rebuild"); 97 + return self.start_build(Some(changed_paths)).await; 98 + } 99 + } 100 + }; 101 + 102 + // Log that we're doing an incremental build 103 + debug!(name: "build", "Incremental build: {} files changed", changed_paths.len()); 104 + debug!(name: "build", "Changed files: {:?}", changed_paths); 105 + debug!(name: "build", "Rerunning binary without recompilation..."); 106 + 107 + self.state 108 + .status_manager 109 + .update(StatusType::Info, "Rerunning...") 110 + .await; 111 + 112 + let build_start_time = Instant::now(); 113 + 114 + // Serialize changed paths to JSON for the binary 115 + let changed_files_json = serde_json::to_string(changed_paths)?; 116 + 117 + let child = Command::new(&path) 118 + .envs([ 119 + ("MAUDIT_DEV", "true"), 120 + ("MAUDIT_QUIET", "true"), 121 + ("MAUDIT_CHANGED_FILES", changed_files_json.as_str()), 122 + ]) 123 + .stdout(std::process::Stdio::piped()) 124 + .stderr(std::process::Stdio::piped()) 125 + .spawn()?; 126 + 127 + let output = child.wait_with_output().await?; 128 + 129 + let duration = build_start_time.elapsed(); 130 + let formatted_elapsed_time = 131 + format_elapsed_time(duration, &FormatElapsedTimeOptions::default_dev()); 132 + 133 + if output.status.success() { 134 + if std::env::var("MAUDIT_SHOW_BINARY_OUTPUT").is_ok() { 135 + let stdout = String::from_utf8_lossy(&output.stdout); 136 + let stderr = String::from_utf8_lossy(&output.stderr); 137 + for line in stdout.lines().chain(stderr.lines()) { 138 + if !line.trim().is_empty() { 139 + info!(name: "build", "{}", line); 140 + } 141 + } 142 + } 143 + info!(name: "build", "Binary rerun finished {}", formatted_elapsed_time); 144 + self.state 145 + .status_manager 146 + .update(StatusType::Success, "Binary rerun finished successfully") 147 + .await; 148 + Ok(true) 149 + } else { 150 + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); 151 + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); 152 + error!(name: "build", "Binary rerun failed {}\nstdout: {}\nstderr: {}", 153 + formatted_elapsed_time, stdout, stderr); 154 + self.state 155 + .status_manager 156 + .update( 157 + StatusType::Error, 158 + &format!("Binary rerun failed:\n{}\n{}", stdout, stderr), 159 + ) 160 + .await; 161 + Ok(false) 162 + } 41 163 } 42 164 43 - /// Start a new build, cancelling any previous one 44 - pub async fn start_build(&self) -> Result<bool, Box<dyn std::error::Error>> { 45 - self.internal_build(false).await 165 + /// Do initial build that can be cancelled. 166 + pub async fn do_initial_build(&self) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> { 167 + self.internal_build(true, None).await 168 + } 169 + 170 + /// Start a new build, cancelling any previous one. 171 + /// If changed_paths is provided, they will be passed to the binary for incremental builds. 172 + pub async fn start_build( 173 + &self, 174 + changed_paths: Option<&[PathBuf]>, 175 + ) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> { 176 + self.internal_build(false, changed_paths).await 46 177 } 47 178 48 - /// Internal build method that handles both initial and regular builds 49 - async fn internal_build(&self, is_initial: bool) -> Result<bool, Box<dyn std::error::Error>> { 179 + async fn internal_build( 180 + &self, 181 + is_initial: bool, 182 + changed_paths: Option<&[PathBuf]>, 183 + ) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> { 50 184 // Cancel any existing build immediately 51 185 let cancel = CancellationToken::new(); 52 186 { 53 - let mut current_cancel = self.current_cancel.write().await; 187 + let mut current_cancel = self.state.current_cancel.write().await; 54 188 if let Some(old_cancel) = current_cancel.replace(cancel.clone()) { 55 189 old_cancel.cancel(); 56 190 } 57 191 } 58 192 59 193 // Acquire semaphore to ensure only one build runs at a time 60 - // This prevents resource conflicts if cancellation fails 61 - let _ = self.build_semaphore.acquire().await?; 194 + let _permit = self.state.build_semaphore.acquire().await?; 62 195 63 - // Notify that build is starting 64 - update_status( 65 - &self.websocket_tx, 66 - self.current_status.clone(), 67 - StatusType::Info, 68 - "Building...", 69 - ) 70 - .await; 196 + self.state 197 + .status_manager 198 + .update(StatusType::Info, "Building...") 199 + .await; 200 + 201 + // Build environment variables 202 + let mut envs: Vec<(&str, String)> = vec![ 203 + ("MAUDIT_DEV", "true".to_string()), 204 + ("MAUDIT_QUIET", "true".to_string()), 205 + ("CARGO_TERM_COLOR", "always".to_string()), 206 + ]; 207 + 208 + // Add changed files if provided (for incremental builds after recompilation) 209 + if let Some(paths) = changed_paths 210 + && let Ok(json) = serde_json::to_string(paths) { 211 + debug!(name: "build", "Passing MAUDIT_CHANGED_FILES to cargo: {}", json); 212 + envs.push(("MAUDIT_CHANGED_FILES", json)); 213 + } 71 214 72 215 let mut child = Command::new("cargo") 73 216 .args([ ··· 76 219 "--message-format", 77 220 "json-diagnostic-rendered-ansi", 78 221 ]) 79 - .envs([ 80 - ("MAUDIT_DEV", "true"), 81 - ("MAUDIT_QUIET", "true"), 82 - ("CARGO_TERM_COLOR", "always"), 83 - ]) 222 + .envs(envs.iter().map(|(k, v)| (*k, v.as_str()))) 84 223 .stdout(std::process::Stdio::piped()) 85 224 .stderr(std::process::Stdio::piped()) 86 225 .spawn()?; 87 226 88 - // Take the stderr stream for manual handling 89 - let mut stdout = child.stdout.take().unwrap(); 90 - let mut stderr = child.stderr.take().unwrap(); 227 + // Take stdout/stderr before select! so we can use them in the completion branch 228 + // while still being able to kill the child in the cancellation branch 229 + let stdout = child.stdout.take().unwrap(); 230 + let stderr = child.stderr.take().unwrap(); 91 231 92 - let websocket_tx = self.websocket_tx.clone(); 93 - let current_status = self.current_status.clone(); 94 232 let build_start_time = Instant::now(); 95 233 96 - // Create a channel to get the build result back 97 - let (result_tx, mut result_rx) = tokio::sync::mpsc::channel::<bool>(1); 234 + tokio::select! { 235 + _ = cancel.cancelled() => { 236 + debug!(name: "build", "Build cancelled"); 237 + let _ = child.kill().await; 238 + self.state.status_manager.update(StatusType::Info, "Build cancelled").await; 239 + Ok(false) 240 + } 241 + result = self.run_build_to_completion(&mut child, stdout, stderr, is_initial, build_start_time) => { 242 + result 243 + } 244 + } 245 + } 98 246 99 - // Spawn watcher task to monitor the child process 100 - tokio::spawn(async move { 101 - let output_future = async { 102 - // Read stdout concurrently with waiting for process to finish 103 - let stdout_task = tokio::spawn(async move { 104 - let mut out = Vec::new(); 105 - tokio::io::copy(&mut stdout, &mut out).await.unwrap_or(0); 247 + /// Run the cargo build process to completion and handle the output. 248 + async fn run_build_to_completion( 249 + &self, 250 + child: &mut tokio::process::Child, 251 + mut stdout: tokio::process::ChildStdout, 252 + mut stderr: tokio::process::ChildStderr, 253 + is_initial: bool, 254 + build_start_time: Instant, 255 + ) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> { 256 + // Read stdout and stderr concurrently 257 + let stdout_task = tokio::spawn(async move { 258 + let mut out = Vec::new(); 259 + tokio::io::copy(&mut stdout, &mut out).await.unwrap_or(0); 106 260 107 - let mut rendered_messages: Vec<String> = Vec::new(); 261 + let mut rendered_messages: Vec<String> = Vec::new(); 108 262 109 - // Ideally we'd stream things as they come, but I can't figure it out 110 - for message in cargo_metadata::Message::parse_stream( 111 - String::from_utf8_lossy(&out).to_string().as_bytes(), 112 - ) { 113 - match message { 114 - Err(e) => { 115 - error!(name: "build", "Failed to parse cargo message: {}", e); 116 - continue; 117 - } 118 - Ok(message) => { 119 - match message { 120 - // Compiler wants to tell us something 121 - Message::CompilerMessage(msg) => { 122 - // TODO: For now, just send through the rendered messages, but in the future let's send 123 - // structured messages to the frontend so we can do better formatting 124 - if let Some(rendered) = &msg.message.rendered { 125 - info!("{}", rendered); 126 - rendered_messages.push(rendered.to_string()); 127 - } 128 - } 129 - // Random text came in, just log it 130 - Message::TextLine(msg) => { 131 - info!("{}", msg); 132 - } 133 - _ => {} 134 - } 135 - } 263 + for message in cargo_metadata::Message::parse_stream( 264 + String::from_utf8_lossy(&out).to_string().as_bytes(), 265 + ) { 266 + match message { 267 + Err(e) => { 268 + error!(name: "build", "Failed to parse cargo message: {}", e); 269 + } 270 + Ok(Message::CompilerMessage(msg)) => { 271 + if let Some(rendered) = &msg.message.rendered { 272 + info!("{}", rendered); 273 + rendered_messages.push(rendered.to_string()); 136 274 } 137 275 } 276 + Ok(Message::TextLine(msg)) => { 277 + info!("{}", msg); 278 + } 279 + _ => {} 280 + } 281 + } 138 282 139 - (out, rendered_messages) 140 - }); 283 + (out, rendered_messages) 284 + }); 141 285 142 - let stderr_task = tokio::spawn(async move { 143 - let mut err = Vec::new(); 144 - tokio::io::copy(&mut stderr, &mut err).await.unwrap_or(0); 286 + let stderr_task = tokio::spawn(async move { 287 + let mut err = Vec::new(); 288 + tokio::io::copy(&mut stderr, &mut err).await.unwrap_or(0); 289 + err 290 + }); 145 291 146 - err 147 - }); 292 + let status = child.wait().await?; 293 + let (_stdout_bytes, rendered_messages) = stdout_task.await.unwrap_or_default(); 294 + let stderr_bytes = stderr_task.await.unwrap_or_default(); 148 295 149 - let status = child.wait().await?; 150 - let stdout_data = stdout_task.await.unwrap_or_default(); 151 - let stderr_data = stderr_task.await.unwrap_or_default(); 296 + let duration = build_start_time.elapsed(); 297 + let formatted_elapsed_time = 298 + format_elapsed_time(duration, &FormatElapsedTimeOptions::default_dev()); 152 299 153 - Ok::<(std::process::Output, Vec<String>), Box<dyn std::error::Error + Send + Sync>>( 154 - ( 155 - std::process::Output { 156 - status, 157 - stdout: stdout_data.0, 158 - stderr: stderr_data, 159 - }, 160 - stdout_data.1, 161 - ), 162 - ) 300 + if status.success() { 301 + let build_type = if is_initial { 302 + "Initial build" 303 + } else { 304 + "Rebuild" 163 305 }; 306 + info!(name: "build", "{} finished {}", build_type, formatted_elapsed_time); 307 + self.state 308 + .status_manager 309 + .update(StatusType::Success, "Build finished successfully") 310 + .await; 164 311 165 - tokio::select! { 166 - _ = cancel.cancelled() => { 167 - debug!(name: "build", "Build cancelled"); 168 - let _ = child.kill().await; 169 - update_status(&websocket_tx, current_status, StatusType::Info, "Build cancelled").await; 170 - let _ = result_tx.send(false).await; // Build failed due to cancellation 171 - } 172 - res = output_future => { 173 - let duration = build_start_time.elapsed(); 174 - let formatted_elapsed_time = format_elapsed_time( 175 - duration, 176 - &FormatElapsedTimeOptions::default_dev(), 177 - ); 312 + self.update_dependency_tracker().await; 178 313 179 - let success = match res { 180 - Ok(output) => { 181 - let (output, rendered_messages) = output; 182 - if output.status.success() { 183 - let build_type = if is_initial { "Initial build" } else { "Rebuild" }; 184 - info!(name: "build", "{} finished {}", build_type, formatted_elapsed_time); 185 - update_status(&websocket_tx, current_status, StatusType::Success, "Build finished successfully").await; 186 - true 187 - } else { 188 - let stderr = String::from_utf8_lossy(&output.stderr).to_string(); 189 - println!("{}", stderr); // Raw stderr sometimes has something to say whenever cargo fails, even if the errors messages are actually in stdout 190 - let build_type = if is_initial { "Initial build" } else { "Rebuild" }; 191 - error!(name: "build", "{} failed with errors {}", build_type, formatted_elapsed_time); 192 - if is_initial { 193 - error!(name: "build", "Initial build needs to succeed before we can start the dev server"); 194 - update_status(&websocket_tx, current_status, StatusType::Error, "Initial build failed - fix errors and save to retry").await; 195 - } else { 196 - update_status(&websocket_tx, current_status, StatusType::Error, &rendered_messages.join("\n")).await; 197 - } 198 - false 199 - } 200 - } 201 - Err(e) => { 202 - error!(name: "build", "Failed to wait for build: {}", e); 203 - update_status(&websocket_tx, current_status, StatusType::Error, &format!("Failed to wait for build: {}", e)).await; 204 - false 205 - } 206 - }; 207 - let _ = result_tx.send(success).await; 208 - } 314 + Ok(true) 315 + } else { 316 + let stderr_str = String::from_utf8_lossy(&stderr_bytes).to_string(); 317 + // Raw stderr sometimes has something to say whenever cargo fails 318 + println!("{}", stderr_str); 319 + 320 + let build_type = if is_initial { 321 + "Initial build" 322 + } else { 323 + "Rebuild" 324 + }; 325 + error!(name: "build", "{} failed with errors {}", build_type, formatted_elapsed_time); 326 + 327 + if is_initial { 328 + error!(name: "build", "Initial build needs to succeed before we can start the dev server"); 329 + self.state 330 + .status_manager 331 + .update( 332 + StatusType::Error, 333 + "Initial build failed - fix errors and save to retry", 334 + ) 335 + .await; 336 + } else { 337 + self.state 338 + .status_manager 339 + .update(StatusType::Error, &rendered_messages.join("\n")) 340 + .await; 209 341 } 210 - }); 342 + 343 + Ok(false) 344 + } 345 + } 346 + 347 + /// Update the dependency tracker after a successful build. 348 + async fn update_dependency_tracker(&self) { 349 + let Some(ref name) = self.state.binary_name else { 350 + debug!(name: "build", "No binary name available, skipping dependency tracker update"); 351 + return; 352 + }; 353 + 354 + let Some(ref target) = self.state.target_dir else { 355 + debug!(name: "build", "No target directory available, skipping dependency tracker update"); 356 + return; 357 + }; 358 + 359 + // Update binary path 360 + let bin_path = target.join(name); 361 + if bin_path.exists() { 362 + *self.state.binary_path.write().await = Some(bin_path.clone()); 363 + debug!(name: "build", "Binary path set to: {:?}", bin_path); 364 + } else { 365 + debug!(name: "build", "Binary not found at expected path: {:?}", bin_path); 366 + } 367 + 368 + // Reload the dependency tracker from the .d file 369 + match DependencyTracker::load_from_binary_name(name) { 370 + Ok(tracker) => { 371 + debug!(name: "build", "Loaded {} dependencies for tracking", tracker.get_dependencies().len()); 372 + *self.state.dep_tracker.write().await = Some(tracker); 373 + } 374 + Err(e) => { 375 + debug!(name: "build", "Could not load dependency tracker: {}", e); 376 + } 377 + } 378 + } 379 + 380 + fn get_binary_name_from_cargo_toml() -> Result<String, Box<dyn std::error::Error + Send + Sync>> 381 + { 382 + let cargo_toml_path = PathBuf::from("Cargo.toml"); 383 + if !cargo_toml_path.exists() { 384 + return Err("Cargo.toml not found in current directory".into()); 385 + } 386 + 387 + let cargo_toml_content = std::fs::read_to_string(&cargo_toml_path)?; 388 + let cargo_toml: toml::Value = toml::from_str(&cargo_toml_content)?; 389 + 390 + if let Some(package_name) = cargo_toml 391 + .get("package") 392 + .and_then(|p| p.get("name")) 393 + .and_then(|n| n.as_str()) 394 + { 395 + // Check if there's a [[bin]] section with a different name 396 + if let Some(bins) = cargo_toml.get("bin").and_then(|b| b.as_array()) 397 + && let Some(first_bin) = bins.first() 398 + && let Some(bin_name) = first_bin.get("name").and_then(|n| n.as_str()) 399 + { 400 + return Ok(bin_name.to_string()); 401 + } 402 + 403 + return Ok(package_name.to_string()); 404 + } 405 + 406 + Err("Could not find package name in Cargo.toml".into()) 407 + } 408 + 409 + /// Set the dependency tracker directly (for testing). 410 + #[cfg(test)] 411 + pub(crate) async fn set_dep_tracker(&self, tracker: Option<DependencyTracker>) { 412 + *self.state.dep_tracker.write().await = tracker; 413 + } 414 + 415 + /// Set the binary path directly (for testing). 416 + #[cfg(test)] 417 + pub(crate) async fn set_binary_path(&self, path: Option<PathBuf>) { 418 + *self.state.binary_path.write().await = path; 419 + } 420 + 421 + /// Get the current binary path (for testing). 422 + #[cfg(test)] 423 + pub(crate) async fn get_binary_path(&self) -> Option<PathBuf> { 424 + self.state.binary_path.read().await.clone() 425 + } 426 + 427 + /// Create a BuildManager with custom target_dir and binary_name (for testing). 428 + #[cfg(test)] 429 + pub(crate) fn new_with_config( 430 + status_manager: StatusManager, 431 + target_dir: Option<PathBuf>, 432 + binary_name: Option<String>, 433 + ) -> Self { 434 + Self { 435 + state: Arc::new(BuildManagerState { 436 + current_cancel: RwLock::new(None), 437 + build_semaphore: tokio::sync::Semaphore::new(1), 438 + status_manager, 439 + dep_tracker: RwLock::new(None), 440 + binary_path: RwLock::new(None), 441 + target_dir, 442 + binary_name, 443 + }), 444 + } 445 + } 446 + } 447 + 448 + #[cfg(test)] 449 + mod tests { 450 + use super::*; 451 + use std::collections::HashMap; 452 + use std::time::SystemTime; 453 + use tempfile::TempDir; 454 + 455 + fn create_test_manager() -> BuildManager { 456 + let status_manager = StatusManager::new(); 457 + BuildManager::new_with_config(status_manager, None, None) 458 + } 459 + 460 + fn create_test_manager_with_config( 461 + target_dir: Option<PathBuf>, 462 + binary_name: Option<String>, 463 + ) -> BuildManager { 464 + let status_manager = StatusManager::new(); 465 + BuildManager::new_with_config(status_manager, target_dir, binary_name) 466 + } 467 + 468 + #[tokio::test] 469 + async fn test_build_manager_clone_shares_state() { 470 + let manager1 = create_test_manager(); 471 + let manager2 = manager1.clone(); 472 + 473 + // Set binary path via one clone 474 + let test_path = PathBuf::from("/test/path"); 475 + manager1.set_binary_path(Some(test_path.clone())).await; 476 + 477 + // Should be visible via the other clone 478 + assert_eq!(manager2.get_binary_path().await, Some(test_path)); 479 + } 480 + 481 + #[tokio::test] 482 + async fn test_needs_recompile_without_tracker() { 483 + let manager = create_test_manager(); 484 + 485 + // Without a dependency tracker, should always return true 486 + let changed = vec![PathBuf::from("src/main.rs")]; 487 + assert!(manager.needs_recompile(&changed).await); 488 + } 489 + 490 + #[tokio::test] 491 + async fn test_needs_recompile_with_empty_tracker() { 492 + let manager = create_test_manager(); 493 + 494 + // Set an empty tracker (no dependencies) 495 + let tracker = DependencyTracker::new(); 496 + manager.set_dep_tracker(Some(tracker)).await; 497 + 498 + // Empty tracker has no dependencies, so has_dependencies() returns false 499 + // This means we should still return true (recompile needed) 500 + let changed = vec![PathBuf::from("src/main.rs")]; 501 + assert!(manager.needs_recompile(&changed).await); 502 + } 503 + 504 + #[tokio::test] 505 + async fn test_needs_recompile_with_matching_dependency() { 506 + let manager = create_test_manager(); 507 + 508 + // Create a tracker with some dependencies 509 + let temp_dir = TempDir::new().unwrap(); 510 + let dep_file = temp_dir.path().join("src/lib.rs"); 511 + std::fs::create_dir_all(dep_file.parent().unwrap()).unwrap(); 512 + std::fs::write(&dep_file, "// test").unwrap(); 513 + 514 + // Get canonical path and current mod time 515 + let canonical_path = dep_file.canonicalize().unwrap(); 516 + let old_time = SystemTime::UNIX_EPOCH; // Very old time 517 + 518 + let mut tracker = DependencyTracker::new(); 519 + tracker.dependencies = HashMap::from([(canonical_path, old_time)]); 520 + 521 + manager.set_dep_tracker(Some(tracker)).await; 522 + 523 + // Changed file IS a dependency and is newer - should need recompile 524 + let changed = vec![dep_file]; 525 + assert!(manager.needs_recompile(&changed).await); 526 + } 527 + 528 + #[tokio::test] 529 + async fn test_needs_recompile_with_non_matching_file() { 530 + let manager = create_test_manager(); 531 + 532 + // Create a tracker with some dependencies 533 + let temp_dir = TempDir::new().unwrap(); 534 + let dep_file = temp_dir.path().join("src/lib.rs"); 535 + std::fs::create_dir_all(dep_file.parent().unwrap()).unwrap(); 536 + std::fs::write(&dep_file, "// test").unwrap(); 537 + 538 + let canonical_path = dep_file.canonicalize().unwrap(); 539 + let mod_time = std::fs::metadata(&dep_file).unwrap().modified().unwrap(); 540 + 541 + let mut tracker = DependencyTracker::new(); 542 + tracker.dependencies = HashMap::from([(canonical_path, mod_time)]); 543 + 544 + manager.set_dep_tracker(Some(tracker)).await; 545 + 546 + // Changed file is NOT a dependency (different file) 547 + let other_file = temp_dir.path().join("assets/style.css"); 548 + std::fs::create_dir_all(other_file.parent().unwrap()).unwrap(); 549 + std::fs::write(&other_file, "/* css */").unwrap(); 550 + 551 + let changed = vec![other_file]; 552 + assert!(!manager.needs_recompile(&changed).await); 553 + } 554 + 555 + #[tokio::test] 556 + async fn test_update_dependency_tracker_with_config_missing_binary() { 557 + let temp_dir = TempDir::new().unwrap(); 558 + let manager = create_test_manager_with_config( 559 + Some(temp_dir.path().to_path_buf()), 560 + Some("nonexistent-binary".to_string()), 561 + ); 562 + 563 + // Binary doesn't exist, so binary_path should not be set 564 + manager.update_dependency_tracker().await; 565 + 566 + assert!(manager.get_binary_path().await.is_none()); 567 + } 568 + 569 + #[tokio::test] 570 + async fn test_update_dependency_tracker_with_existing_binary() { 571 + let temp_dir = TempDir::new().unwrap(); 572 + let binary_name = "test-binary"; 573 + let binary_path = temp_dir.path().join(binary_name); 211 574 212 - // Wait for the build result 213 - let success = result_rx.recv().await.unwrap_or(false); 214 - Ok(success) 575 + // Create a fake binary file 576 + std::fs::write(&binary_path, "fake binary").unwrap(); 577 + 578 + let manager = create_test_manager_with_config( 579 + Some(temp_dir.path().to_path_buf()), 580 + Some(binary_name.to_string()), 581 + ); 582 + 583 + manager.update_dependency_tracker().await; 584 + 585 + // Binary path should be set 586 + assert_eq!(manager.get_binary_path().await, Some(binary_path)); 215 587 } 216 588 }
+279
crates/maudit-cli/src/dev/dep_tracker.rs
··· 1 + use depinfo::RustcDepInfo; 2 + use std::collections::HashMap; 3 + use std::fs; 4 + use std::path::{Path, PathBuf}; 5 + use std::time::SystemTime; 6 + use tracing::{debug, warn}; 7 + 8 + /// Tracks dependencies from .d files to determine if recompilation is needed 9 + #[derive(Debug, Clone)] 10 + pub struct DependencyTracker { 11 + /// Path to the .d file 12 + pub(crate) d_file_path: Option<PathBuf>, 13 + /// Map of dependency paths to their last modification times 14 + pub(crate) dependencies: HashMap<PathBuf, SystemTime>, 15 + } 16 + 17 + /// Find the target directory using multiple strategies 18 + /// 19 + /// This function tries multiple approaches to locate the target directory: 20 + /// 1. CARGO_TARGET_DIR / CARGO_BUILD_TARGET_DIR environment variables 21 + /// 2. Local ./target/debug directory 22 + /// 3. Workspace root target/debug directory (walking up to find [workspace]) 23 + /// 4. Fallback to relative "target/debug" path 24 + pub fn find_target_dir() -> Result<PathBuf, std::io::Error> { 25 + // 1. Check CARGO_TARGET_DIR and CARGO_BUILD_TARGET_DIR environment variables 26 + for env_var in ["CARGO_TARGET_DIR", "CARGO_BUILD_TARGET_DIR"] { 27 + if let Ok(target_dir) = std::env::var(env_var) { 28 + // Try with /debug appended 29 + let path = PathBuf::from(&target_dir).join("debug"); 30 + if path.exists() { 31 + debug!("Using target directory from {}: {:?}", env_var, path); 32 + return Ok(path); 33 + } 34 + // If the env var points directly to debug or release 35 + let path_no_debug = PathBuf::from(&target_dir); 36 + if path_no_debug.exists() 37 + && (path_no_debug.ends_with("debug") || path_no_debug.ends_with("release")) 38 + { 39 + debug!( 40 + "Using target directory from {} (direct): {:?}", 41 + env_var, path_no_debug 42 + ); 43 + return Ok(path_no_debug); 44 + } 45 + } 46 + } 47 + 48 + // 2. Look for target directory in current directory 49 + let local_target = PathBuf::from("target/debug"); 50 + if local_target.exists() { 51 + debug!("Using local target directory: {:?}", local_target); 52 + return Ok(local_target); 53 + } 54 + 55 + // 3. Try to find workspace root by looking for Cargo.toml with [workspace] 56 + let mut current = std::env::current_dir()?; 57 + loop { 58 + let cargo_toml = current.join("Cargo.toml"); 59 + if cargo_toml.exists() 60 + && let Ok(content) = fs::read_to_string(&cargo_toml) 61 + && content.contains("[workspace]") 62 + { 63 + let workspace_target = current.join("target").join("debug"); 64 + if workspace_target.exists() { 65 + debug!("Using workspace target directory: {:?}", workspace_target); 66 + return Ok(workspace_target); 67 + } 68 + } 69 + 70 + // Move up to parent directory 71 + if !current.pop() { 72 + break; 73 + } 74 + } 75 + 76 + // 4. Final fallback to relative path 77 + debug!("Falling back to relative target/debug path"); 78 + Ok(PathBuf::from("target/debug")) 79 + } 80 + 81 + impl DependencyTracker { 82 + #[allow(dead_code)] 83 + pub fn new() -> Self { 84 + Self { 85 + d_file_path: None, 86 + dependencies: HashMap::new(), 87 + } 88 + } 89 + 90 + /// Locate and load the .d file for the current binary 91 + /// The .d file is typically at target/debug/<binary-name>.d 92 + pub fn load_from_binary_name(binary_name: &str) -> Result<Self, std::io::Error> { 93 + let target_dir = find_target_dir()?; 94 + let d_file_path = target_dir.join(format!("{}.d", binary_name)); 95 + 96 + if !d_file_path.exists() { 97 + return Err(std::io::Error::new( 98 + std::io::ErrorKind::NotFound, 99 + format!(".d file not found at {:?}", d_file_path), 100 + )); 101 + } 102 + 103 + let mut tracker = Self { 104 + d_file_path: Some(d_file_path.clone()), 105 + dependencies: HashMap::new(), 106 + }; 107 + 108 + tracker.reload_dependencies()?; 109 + Ok(tracker) 110 + } 111 + 112 + /// Reload dependencies from the .d file using the depinfo crate 113 + pub fn reload_dependencies(&mut self) -> Result<(), std::io::Error> { 114 + let Some(d_file_path) = &self.d_file_path else { 115 + return Err(std::io::Error::new( 116 + std::io::ErrorKind::NotFound, 117 + "No .d file path set", 118 + )); 119 + }; 120 + 121 + let dep_info = RustcDepInfo::from_file(d_file_path).map_err(|e| { 122 + warn!("Failed to parse .d file at {:?}: {}", d_file_path, e); 123 + std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()) 124 + })?; 125 + 126 + // Clear old dependencies and load new ones with their modification times 127 + self.dependencies.clear(); 128 + 129 + for dep_path in dep_info.files { 130 + match fs::metadata(&dep_path) { 131 + Ok(metadata) => { 132 + if let Ok(modified) = metadata.modified() { 133 + self.dependencies.insert(dep_path.clone(), modified); 134 + debug!("Tracking dependency: {:?}", dep_path); 135 + } 136 + } 137 + Err(e) => { 138 + // Dependency file doesn't exist or can't be read - this is okay, 139 + // it might have been deleted or moved 140 + debug!("Could not read dependency {:?}: {}", dep_path, e); 141 + } 142 + } 143 + } 144 + 145 + debug!( 146 + "Loaded {} dependencies from {:?}", 147 + self.dependencies.len(), 148 + d_file_path 149 + ); 150 + Ok(()) 151 + } 152 + 153 + /// Check if any of the given paths require recompilation 154 + /// Returns true if any path is a tracked dependency that has been modified 155 + pub fn needs_recompile(&self, changed_paths: &[PathBuf]) -> bool { 156 + for changed_path in changed_paths { 157 + // Normalize the changed path to handle relative vs absolute paths 158 + let changed_path_canonical = changed_path.canonicalize().ok(); 159 + 160 + for (dep_path, last_modified) in &self.dependencies { 161 + // Try to match both exact path and canonical path 162 + let matches = changed_path == dep_path 163 + || changed_path_canonical.as_ref() == Some(dep_path) 164 + || dep_path.canonicalize().ok().as_ref() == changed_path_canonical.as_ref(); 165 + 166 + if matches { 167 + // Check if the file was modified after we last tracked it 168 + if let Ok(metadata) = fs::metadata(changed_path) { 169 + if let Ok(current_modified) = metadata.modified() 170 + && current_modified > *last_modified 171 + { 172 + debug!( 173 + "Dependency {:?} was modified, recompile needed", 174 + changed_path 175 + ); 176 + return true; 177 + } 178 + } else { 179 + // File was deleted or can't be read, assume recompile is needed 180 + debug!( 181 + "Dependency {:?} no longer exists, recompile needed", 182 + changed_path 183 + ); 184 + return true; 185 + } 186 + } 187 + } 188 + } 189 + 190 + false 191 + } 192 + 193 + /// Get the list of tracked dependency paths 194 + pub fn get_dependencies(&self) -> Vec<&Path> { 195 + self.dependencies.keys().map(|p| p.as_path()).collect() 196 + } 197 + 198 + /// Check if we have any dependencies loaded 199 + pub fn has_dependencies(&self) -> bool { 200 + !self.dependencies.is_empty() 201 + } 202 + } 203 + 204 + #[cfg(test)] 205 + mod tests { 206 + use super::*; 207 + use std::fs; 208 + use std::io::Write; 209 + use tempfile::TempDir; 210 + 211 + #[test] 212 + fn test_parse_d_file() { 213 + let temp_dir = TempDir::new().unwrap(); 214 + let d_file_path = temp_dir.path().join("test.d"); 215 + 216 + // Create a mock .d file 217 + let mut d_file = fs::File::create(&d_file_path).unwrap(); 218 + writeln!( 219 + d_file, 220 + "/path/to/target: /path/to/dep1.rs /path/to/dep2.rs \\" 221 + ) 222 + .unwrap(); 223 + writeln!(d_file, " /path/to/dep3.rs").unwrap(); 224 + 225 + // Create a tracker and point it to our test file 226 + let mut tracker = DependencyTracker::new(); 227 + tracker.d_file_path = Some(d_file_path); 228 + 229 + // This will fail to load the actual files, but we can check the parsing logic 230 + let _ = tracker.reload_dependencies(); 231 + 232 + // We won't have any dependencies because the files don't exist, 233 + // but we've verified the parsing doesn't crash 234 + } 235 + 236 + #[test] 237 + fn test_parse_d_file_with_spaces() { 238 + let temp_dir = TempDir::new().unwrap(); 239 + let d_file_path = temp_dir.path().join("test_spaces.d"); 240 + 241 + // Create actual test files with spaces in names 242 + let dep_with_space = temp_dir.path().join("my file.rs"); 243 + fs::write(&dep_with_space, "// test").unwrap(); 244 + 245 + let normal_dep = temp_dir.path().join("normal.rs"); 246 + fs::write(&normal_dep, "// test").unwrap(); 247 + 248 + // Create a mock .d file with escaped spaces (Make format) 249 + let mut d_file = fs::File::create(&d_file_path).unwrap(); 250 + writeln!( 251 + d_file, 252 + "/path/to/target: {} {}", 253 + dep_with_space.to_str().unwrap().replace(' ', "\\ "), 254 + normal_dep.to_str().unwrap() 255 + ) 256 + .unwrap(); 257 + 258 + let mut tracker = DependencyTracker::new(); 259 + tracker.d_file_path = Some(d_file_path); 260 + 261 + // Load dependencies 262 + tracker.reload_dependencies().unwrap(); 263 + 264 + // Should have successfully parsed both files 265 + assert!(tracker.has_dependencies()); 266 + let deps = tracker.get_dependencies(); 267 + assert_eq!(deps.len(), 2); 268 + assert!( 269 + deps.iter() 270 + .any(|p| p.to_str().unwrap().contains("my file.rs")), 271 + "Should contain file with space" 272 + ); 273 + assert!( 274 + deps.iter() 275 + .any(|p| p.to_str().unwrap().contains("normal.rs")), 276 + "Should contain normal file" 277 + ); 278 + } 279 + }
+223 -64
crates/maudit-cli/src/dev/server.rs
··· 64 64 pub message: String, 65 65 } 66 66 67 + /// Manages status updates and WebSocket broadcasting. 68 + /// Cheap to clone - all clones share the same underlying state. 67 69 #[derive(Clone)] 68 - struct AppState { 70 + pub struct StatusManager { 69 71 tx: broadcast::Sender<WebSocketMessage>, 70 72 current_status: Arc<RwLock<Option<PersistentStatus>>>, 73 + } 74 + 75 + impl StatusManager { 76 + pub fn new() -> Self { 77 + let (tx, _) = broadcast::channel::<WebSocketMessage>(100); 78 + Self { 79 + tx, 80 + current_status: Arc::new(RwLock::new(None)), 81 + } 82 + } 83 + 84 + /// Update the status and broadcast to all connected WebSocket clients. 85 + pub async fn update(&self, status_type: StatusType, message: &str) { 86 + // Only store persistent states (Success clears errors, Error stores the error) 87 + let persistent_status = match status_type { 88 + StatusType::Success => None, // Clear any error state 89 + StatusType::Error => Some(PersistentStatus { 90 + status_type: StatusType::Error, 91 + message: message.to_string(), 92 + }), 93 + // Everything else just keeps the current state 94 + _ => { 95 + let status = self.current_status.read().await; 96 + status.clone() // Keep existing persistent state 97 + } 98 + }; 99 + 100 + // Update the stored status 101 + { 102 + let mut status = self.current_status.write().await; 103 + *status = persistent_status; 104 + } 105 + 106 + // Send the message to all connected clients 107 + let _ = self.tx.send(WebSocketMessage { 108 + data: json!({ 109 + "type": status_type.to_string(), 110 + "message": message 111 + }) 112 + .to_string(), 113 + }); 114 + } 115 + 116 + /// Subscribe to WebSocket messages (for new connections). 117 + pub fn subscribe(&self) -> broadcast::Receiver<WebSocketMessage> { 118 + self.tx.subscribe() 119 + } 120 + 121 + /// Get the current persistent status (for new connections). 122 + pub async fn get_current(&self) -> Option<PersistentStatus> { 123 + self.current_status.read().await.clone() 124 + } 125 + 126 + /// Send a raw WebSocket message (for initial errors, etc.). 127 + pub fn send_raw(&self, message: WebSocketMessage) { 128 + let _ = self.tx.send(message); 129 + } 130 + } 131 + 132 + impl Default for StatusManager { 133 + fn default() -> Self { 134 + Self::new() 135 + } 136 + } 137 + 138 + #[derive(Clone)] 139 + struct AppState { 140 + status_manager: StatusManager, 71 141 } 72 142 73 143 fn inject_live_reload_script(html_content: &str, socket_addr: SocketAddr, host: bool) -> String { ··· 93 163 94 164 pub async fn start_dev_web_server( 95 165 start_time: Instant, 96 - tx: broadcast::Sender<WebSocketMessage>, 166 + status_manager: StatusManager, 97 167 host: bool, 98 168 port: Option<u16>, 99 169 initial_error: Option<String>, 100 - current_status: Arc<RwLock<Option<PersistentStatus>>>, 101 170 ) { 102 171 // TODO: The dist dir should be configurable 103 172 let dist_dir = "dist"; 104 173 105 174 // Send initial error if present 106 175 if let Some(error) = initial_error { 107 - let _ = tx.send(WebSocketMessage { 176 + status_manager.send_raw(WebSocketMessage { 108 177 data: json!({ 109 178 "type": StatusType::Error.to_string(), 110 179 "message": error ··· 172 241 .on_response(CustomOnResponse), 173 242 ) 174 243 .with_state(AppState { 175 - tx: tx.clone(), 176 - current_status: current_status.clone(), 244 + status_manager: status_manager.clone(), 177 245 }); 178 246 179 247 log_server_start( ··· 192 260 .unwrap(); 193 261 } 194 262 195 - pub async fn update_status( 196 - tx: &broadcast::Sender<WebSocketMessage>, 197 - current_status: Arc<RwLock<Option<PersistentStatus>>>, 198 - status_type: StatusType, 199 - message: &str, 200 - ) { 201 - // Only store persistent states (Success clears errors, Error stores the error) 202 - let persistent_status = match status_type { 203 - StatusType::Success => None, // Clear any error state 204 - StatusType::Error => Some(PersistentStatus { 205 - status_type: StatusType::Error, 206 - message: message.to_string(), 207 - }), 208 - // Everything else just keeps the current state 209 - _ => { 210 - let status = current_status.read().await; 211 - status.clone() // Keep existing persistent state 212 - } 213 - }; 214 - 215 - // Update the stored status 216 - { 217 - let mut status = current_status.write().await; 218 - *status = persistent_status; 219 - } 220 - 221 - // Send the message to all connected clients 222 - let _ = tx.send(WebSocketMessage { 223 - data: json!({ 224 - "type": status_type.to_string(), 225 - "message": message 226 - }) 227 - .to_string(), 228 - }); 229 - } 230 - 231 263 async fn add_dev_client_script( 232 264 req: Request, 233 265 next: Next, ··· 311 343 debug!("`{addr} connected."); 312 344 // finalize the upgrade process by returning upgrade callback. 313 345 // we can customize the callback by sending additional info such as address. 314 - ws.on_upgrade(move |socket| handle_socket(socket, addr, state.tx, state.current_status)) 346 + ws.on_upgrade(move |socket| handle_socket(socket, addr, state.status_manager)) 315 347 } 316 348 317 - async fn handle_socket( 318 - socket: WebSocket, 319 - who: SocketAddr, 320 - tx: broadcast::Sender<WebSocketMessage>, 321 - current_status: Arc<RwLock<Option<PersistentStatus>>>, 322 - ) { 349 + async fn handle_socket(socket: WebSocket, who: SocketAddr, status_manager: StatusManager) { 323 350 let (mut sender, mut receiver) = socket.split(); 324 351 325 352 // Send current persistent status to new connection if there is one 326 - { 327 - let status = current_status.read().await; 328 - if let Some(persistent_status) = status.as_ref() { 329 - let _ = sender 330 - .send(Message::Text( 331 - json!({ 332 - "type": persistent_status.status_type.to_string(), 333 - "message": persistent_status.message 334 - }) 335 - .to_string() 336 - .into(), 337 - )) 338 - .await; 339 - } 353 + if let Some(persistent_status) = status_manager.get_current().await { 354 + let _ = sender 355 + .send(Message::Text( 356 + json!({ 357 + "type": persistent_status.status_type.to_string(), 358 + "message": persistent_status.message 359 + }) 360 + .to_string() 361 + .into(), 362 + )) 363 + .await; 340 364 } 341 365 342 - let mut rx = tx.subscribe(); 366 + let mut rx = status_manager.subscribe(); 343 367 344 368 tokio::select! { 345 369 _ = async { ··· 387 411 _ = terminate => {}, 388 412 } 389 413 } 414 + 415 + #[cfg(test)] 416 + mod tests { 417 + use super::*; 418 + 419 + #[tokio::test] 420 + async fn test_status_manager_update_error_persists() { 421 + let manager = StatusManager::new(); 422 + 423 + manager 424 + .update(StatusType::Error, "Something went wrong") 425 + .await; 426 + 427 + let status = manager.get_current().await; 428 + assert!(status.is_some()); 429 + let status = status.unwrap(); 430 + assert!(matches!(status.status_type, StatusType::Error)); 431 + assert_eq!(status.message, "Something went wrong"); 432 + } 433 + 434 + #[tokio::test] 435 + async fn test_status_manager_update_success_clears_error() { 436 + let manager = StatusManager::new(); 437 + 438 + // First set an error 439 + manager.update(StatusType::Error, "Build failed").await; 440 + assert!(manager.get_current().await.is_some()); 441 + 442 + // Then send success - should clear the error 443 + manager.update(StatusType::Success, "Build succeeded").await; 444 + assert!(manager.get_current().await.is_none()); 445 + } 446 + 447 + #[tokio::test] 448 + async fn test_status_manager_update_info_preserves_state() { 449 + let manager = StatusManager::new(); 450 + 451 + // Set an error 452 + manager.update(StatusType::Error, "Build failed").await; 453 + let original_status = manager.get_current().await; 454 + assert!(original_status.is_some()); 455 + 456 + // Send info - should preserve the error state 457 + manager.update(StatusType::Info, "Building...").await; 458 + let status = manager.get_current().await; 459 + assert!(status.is_some()); 460 + assert_eq!(status.unwrap().message, "Build failed"); 461 + } 462 + 463 + #[tokio::test] 464 + async fn test_status_manager_update_info_when_no_error() { 465 + let manager = StatusManager::new(); 466 + 467 + // No prior state 468 + assert!(manager.get_current().await.is_none()); 469 + 470 + // Send info - should remain None 471 + manager.update(StatusType::Info, "Building...").await; 472 + assert!(manager.get_current().await.is_none()); 473 + } 474 + 475 + #[tokio::test] 476 + async fn test_status_manager_subscribe_receives_messages() { 477 + let manager = StatusManager::new(); 478 + let mut rx = manager.subscribe(); 479 + 480 + manager.update(StatusType::Info, "Hello").await; 481 + 482 + let msg = rx.try_recv(); 483 + assert!(msg.is_ok()); 484 + let msg = msg.unwrap(); 485 + assert!(msg.data.contains("Hello")); 486 + assert!(msg.data.contains("info")); 487 + } 488 + 489 + #[tokio::test] 490 + async fn test_status_manager_multiple_subscribers() { 491 + let manager = StatusManager::new(); 492 + let mut rx1 = manager.subscribe(); 493 + let mut rx2 = manager.subscribe(); 494 + 495 + manager.update(StatusType::Success, "Done").await; 496 + 497 + // Both subscribers should receive the message 498 + assert!(rx1.try_recv().is_ok()); 499 + assert!(rx2.try_recv().is_ok()); 500 + } 501 + 502 + #[tokio::test] 503 + async fn test_status_manager_send_raw() { 504 + let manager = StatusManager::new(); 505 + let mut rx = manager.subscribe(); 506 + 507 + manager.send_raw(WebSocketMessage { 508 + data: r#"{"custom": "message"}"#.to_string(), 509 + }); 510 + 511 + let msg = rx.try_recv(); 512 + assert!(msg.is_ok()); 513 + assert_eq!(msg.unwrap().data, r#"{"custom": "message"}"#); 514 + } 515 + 516 + #[tokio::test] 517 + async fn test_status_manager_clone_shares_state() { 518 + let manager1 = StatusManager::new(); 519 + let manager2 = manager1.clone(); 520 + 521 + // Update via one clone 522 + manager1 523 + .update(StatusType::Error, "Error from clone 1") 524 + .await; 525 + 526 + // Should be visible via the other clone 527 + let status = manager2.get_current().await; 528 + assert!(status.is_some()); 529 + assert_eq!(status.unwrap().message, "Error from clone 1"); 530 + } 531 + 532 + #[tokio::test] 533 + async fn test_status_manager_clone_shares_broadcast() { 534 + let manager1 = StatusManager::new(); 535 + let manager2 = manager1.clone(); 536 + 537 + // Subscribe via one clone 538 + let mut rx = manager2.subscribe(); 539 + 540 + // Send via the other clone 541 + manager1.update(StatusType::Info, "From clone 1").await; 542 + 543 + // Should receive the message 544 + let msg = rx.try_recv(); 545 + assert!(msg.is_ok()); 546 + assert!(msg.unwrap().data.contains("From clone 1")); 547 + } 548 + }
+66 -27
crates/maudit-cli/src/dev.rs
··· 1 1 pub(crate) mod server; 2 2 3 3 mod build; 4 + mod dep_tracker; 4 5 mod filterer; 5 6 6 7 use notify::{ ··· 9 10 }; 10 11 use notify_debouncer_full::{DebounceEventResult, DebouncedEvent, new_debouncer}; 11 12 use quanta::Instant; 12 - use server::WebSocketMessage; 13 - use std::{fs, path::Path}; 14 - use tokio::{ 15 - signal, 16 - sync::{broadcast, mpsc::channel}, 17 - task::JoinHandle, 13 + use server::StatusManager; 14 + use std::{ 15 + fs, 16 + path::{Path, PathBuf}, 18 17 }; 18 + use tokio::{signal, sync::mpsc::channel, task::JoinHandle}; 19 19 use tracing::{error, info}; 20 20 21 21 use crate::dev::build::BuildManager; 22 22 23 - pub async fn start_dev_env(cwd: &str, host: bool, port: Option<u16>) -> Result<(), Box<dyn std::error::Error>> { 23 + pub async fn start_dev_env( 24 + cwd: &str, 25 + host: bool, 26 + port: Option<u16>, 27 + ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { 24 28 let start_time = Instant::now(); 25 29 info!(name: "dev", "Preparing dev environmentโ€ฆ"); 26 30 27 - let (sender_websocket, _) = broadcast::channel::<WebSocketMessage>(100); 31 + // Create status manager (handles WebSocket communication) 32 + let status_manager = StatusManager::new(); 28 33 29 - // Create build manager (it will create its own status state internally) 30 - let build_manager = BuildManager::new(sender_websocket.clone()); 34 + // Create build manager 35 + let build_manager = BuildManager::new(status_manager.clone()); 31 36 32 37 // Do initial build 33 38 info!(name: "build", "Doing initial buildโ€ฆ"); ··· 48 53 .collect::<Vec<_>>(); 49 54 50 55 let mut debouncer = new_debouncer( 51 - std::time::Duration::from_millis(100), 56 + std::time::Duration::from_millis(200), // Longer debounce to better batch rapid file changes 52 57 None, 53 58 move |result: DebounceEventResult| { 54 59 tx.blocking_send(result).unwrap_or(()); ··· 73 78 info!(name: "dev", "Starting web server..."); 74 79 web_server_thread = Some(tokio::spawn(server::start_dev_web_server( 75 80 start_time, 76 - sender_websocket.clone(), 81 + status_manager.clone(), 77 82 host, 78 83 port, 79 84 None, 80 - build_manager.current_status(), 81 85 ))); 82 86 } 83 87 84 88 // Clone build manager for the file watcher task 85 89 let build_manager_watcher = build_manager.clone(); 86 - let sender_websocket_watcher = sender_websocket.clone(); 90 + let status_manager_watcher = status_manager.clone(); 87 91 88 92 let file_watcher_task = tokio::spawn(async move { 89 93 let mut dev_server_started = initial_build_success; ··· 147 151 dev_server_handle = 148 152 Some(tokio::spawn(server::start_dev_web_server( 149 153 start_time, 150 - sender_websocket_watcher.clone(), 154 + status_manager_watcher.clone(), 151 155 host, 152 156 port, 153 157 None, 154 - build_manager_watcher.current_status(), 155 158 ))); 156 159 } 157 160 Ok(false) => { ··· 162 165 } 163 166 } 164 167 } else { 165 - // Normal rebuild - spawn in background so file watcher can continue 166 - info!(name: "watch", "Files changed, rebuilding..."); 167 - let build_manager_clone = build_manager_watcher.clone(); 168 - tokio::spawn(async move { 169 - match build_manager_clone.start_build().await { 170 - Ok(_) => { 171 - // Build completed (success or failure already logged) 168 + // Normal rebuild - check if we need full recompilation or just rerun 169 + // Only collect paths from events that actually trigger a rebuild 170 + let mut changed_paths: Vec<PathBuf> = events.iter() 171 + .filter(|e| should_rebuild_for_event(e)) 172 + .flat_map(|e| e.paths.iter().cloned()) 173 + .collect(); 174 + 175 + // Deduplicate paths 176 + changed_paths.sort(); 177 + changed_paths.dedup(); 178 + 179 + if changed_paths.is_empty() { 180 + // No file changes, only directory changes - skip rebuild 181 + continue; 182 + } 183 + 184 + let needs_recompile = build_manager_watcher.needs_recompile(&changed_paths).await; 185 + 186 + if needs_recompile { 187 + // Need to recompile - spawn in background so file watcher can continue 188 + info!(name: "watch", "Files changed, rebuilding..."); 189 + let build_manager_clone = build_manager_watcher.clone(); 190 + let changed_paths_clone = changed_paths.clone(); 191 + tokio::spawn(async move { 192 + match build_manager_clone.start_build(Some(&changed_paths_clone)).await { 193 + Ok(_) => { 194 + // Build completed (success or failure already logged) 195 + } 196 + Err(e) => { 197 + error!(name: "build", "Failed to start build: {}", e); 198 + } 172 199 } 173 - Err(e) => { 174 - error!(name: "build", "Failed to start build: {}", e); 200 + }); 201 + } else { 202 + // Just rerun the binary without recompiling 203 + info!(name: "watch", "Non-dependency files changed, rerunning binary..."); 204 + let build_manager_clone = build_manager_watcher.clone(); 205 + let changed_paths_clone = changed_paths.clone(); 206 + tokio::spawn(async move { 207 + match build_manager_clone.rerun_binary(&changed_paths_clone).await { 208 + Ok(_) => { 209 + // Rerun completed (success or failure already logged) 210 + } 211 + Err(e) => { 212 + error!(name: "build", "Failed to rerun binary: {}", e); 213 + } 175 214 } 176 - } 177 - }); 215 + }); 216 + } 178 217 } 179 218 } 180 219 }
+4
crates/maudit-macros/src/lib.rs
··· 330 330 impl maudit::route::InternalRoute for #struct_name { 331 331 #route_raw_impl 332 332 333 + fn source_file(&self) -> &'static str { 334 + file!() 335 + } 336 + 333 337 #variant_method 334 338 335 339 #sitemap_method
+3 -3
crates/oubli/src/archetypes/blog.rs
··· 1 1 //! Blog archetype. 2 2 //! Represents a markdown blog archetype, with an index page and individual entry pages. 3 3 use crate::layouts::layout; 4 - use maud::{Markup, html}; 4 + use maud::{html, Markup}; 5 5 use maudit::content::markdown_entry; 6 - use maudit::route::FullRoute; 7 6 use maudit::route::prelude::*; 7 + use maudit::route::FullRoute; 8 8 9 9 pub fn blog_index_content<T: FullRoute>( 10 10 route: impl FullRoute, ··· 18 18 19 19 let markup = html! { 20 20 main { 21 - @for entry in &blog_entries.entries { 21 + @for entry in blog_entries.entries() { 22 22 a href=(route.url(&BlogEntryParams { entry: entry.id.clone() }.into())) { 23 23 h2 { (entry.data(ctx).title) } 24 24 p { (entry.data(ctx).description) }
+3
e2e/README.md
··· 13 13 ## Running Tests 14 14 15 15 The tests will automatically: 16 + 16 17 1. Build the prefetch.js bundle (via `cargo xtask build-maudit-js`) 17 18 2. Start the Maudit dev server on the test fixture site 18 19 3. Run the tests ··· 46 47 ## Features Tested 47 48 48 49 ### Basic Prefetch 50 + 49 51 - Creating link elements with `rel="prefetch"` 50 52 - Preventing duplicate prefetches 51 53 - Skipping current page prefetch 52 54 - Blocking cross-origin prefetches 53 55 54 56 ### Prerendering (Chromium only) 57 + 55 58 - Creating `<script type="speculationrules">` elements 56 59 - Different eagerness levels (immediate, eager, moderate, conservative) 57 60 - Fallback to link prefetch on non-Chromium browsers
+1
e2e/fixtures/hot-reload/data.txt
··· 1 + Test data
+1 -1
e2e/fixtures/hot-reload/src/main.rs
··· 1 - use maudit::{content_sources, coronate, routes, BuildOptions, BuildOutput}; 1 + use maudit::{BuildOptions, BuildOutput, content_sources, coronate, routes}; 2 2 3 3 mod pages { 4 4 mod index;
+10
e2e/fixtures/incremental-build/Cargo.toml
··· 1 + [package] 2 + name = "fixtures-incremental-build" 3 + version = "0.1.0" 4 + edition = "2024" 5 + publish = false 6 + 7 + [dependencies] 8 + maudit.workspace = true 9 + maud.workspace = true 10 + serde.workspace = true
+8
e2e/fixtures/incremental-build/content/articles/first-post.md
··· 1 + --- 2 + title: "First Post" 3 + description: "This is the first post" 4 + --- 5 + 6 + # First Post 7 + 8 + This is the content of the first post.
+8
e2e/fixtures/incremental-build/content/articles/second-post.md
··· 1 + --- 2 + title: "Second Post" 3 + description: "This is the second post" 4 + --- 5 + 6 + # Second Post 7 + 8 + This is the content of the second post.
+8
e2e/fixtures/incremental-build/content/articles/third-post.md
··· 1 + --- 2 + title: "Third Post" 3 + description: "This is the third post" 4 + --- 5 + 6 + # Third Post 7 + 8 + This is the content of the third post.
+3
e2e/fixtures/incremental-build/src/assets/about-content.txt
··· 1 + Learn more about us 2 + 3 + <!-- test-1-init -->
+2
e2e/fixtures/incremental-build/src/assets/about.js
··· 1 + // About script 2 + console.log("About script loaded");
e2e/fixtures/incremental-build/src/assets/bg.png

This is a binary file and will not be displayed.

+10
e2e/fixtures/incremental-build/src/assets/blog.css
··· 1 + /* Blog styles */ 2 + .blog-post { 3 + margin: 20px; 4 + } 5 + 6 + /* Background image referenced via url() - tests CSS asset dependency tracking */ 7 + .blog-header { 8 + background-image: url("./bg.png"); 9 + background-size: cover; 10 + }
+8
e2e/fixtures/incremental-build/src/assets/icons/blog-icon.css
··· 1 + /* Blog icon styles */ 2 + .blog-icon { 3 + width: 24px; 4 + height: 24px; 5 + display: inline-block; 6 + } 7 + 8 + /* init */
e2e/fixtures/incremental-build/src/assets/logo.png

This is a binary file and will not be displayed.

+5
e2e/fixtures/incremental-build/src/assets/main.js
··· 1 + // Main script 2 + import { greet } from "./utils.js"; 3 + 4 + console.log("Main script loaded"); 5 + console.log(greet("World"));
+13
e2e/fixtures/incremental-build/src/assets/styles.css
··· 1 + /* Main styles */ 2 + body { 3 + font-family: sans-serif; 4 + } 5 + /* test7 */ 6 + /* test */ 7 + /* test2 */ 8 + /* test4 */ 9 + /* change1 */ 10 + /* change1 */ 11 + /* change1 */ 12 + /* change1 */ 13 + /* change1 */
e2e/fixtures/incremental-build/src/assets/team.png

This is a binary file and will not be displayed.

+4
e2e/fixtures/incremental-build/src/assets/utils.js
··· 1 + // Utility functions 2 + export function greet(name) { 3 + return `Hello, ${name}!`; 4 + }
+12
e2e/fixtures/incremental-build/src/content.rs
··· 1 + use maudit::content::{glob_markdown, markdown_entry}; 2 + 3 + #[markdown_entry] 4 + #[derive(Debug, Clone)] 5 + pub struct ArticleContent { 6 + pub title: String, 7 + pub description: String, 8 + } 9 + 10 + pub fn load_articles() -> Vec<maudit::content::Entry<ArticleContent>> { 11 + glob_markdown("content/articles/*.md") 12 + }
+20
e2e/fixtures/incremental-build/src/main.rs
··· 1 + use maudit::{BuildOptions, BuildOutput, content_sources, coronate, routes}; 2 + 3 + mod content; 4 + mod pages; 5 + 6 + fn main() -> Result<BuildOutput, Box<dyn std::error::Error>> { 7 + coronate( 8 + routes![ 9 + pages::index::Index, 10 + pages::about::About, 11 + pages::blog::Blog, 12 + pages::articles::Articles, 13 + pages::article::Article 14 + ], 15 + content_sources![ 16 + "articles" => content::load_articles() 17 + ], 18 + BuildOptions::default(), 19 + ) 20 + }
+42
e2e/fixtures/incremental-build/src/pages/about.rs
··· 1 + use maud::html; 2 + use maudit::route::prelude::*; 3 + use std::time::{SystemTime, UNIX_EPOCH}; 4 + 5 + use super::helpers; 6 + 7 + // Include content from external file - this creates a compile-time dependency 8 + const ABOUT_CONTENT: &str = include_str!("../assets/about-content.txt"); 9 + 10 + #[route("/about")] 11 + pub struct About; 12 + 13 + impl Route for About { 14 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 15 + let _image = ctx.assets.add_image("src/assets/team.png"); 16 + let _script = ctx.assets.add_script("src/assets/about.js"); 17 + // Shared style with index page (for testing shared assets) 18 + let _style = ctx.assets.add_style("src/assets/styles.css"); 19 + 20 + // Use shared helper function 21 + let greeting = helpers::get_greeting(); 22 + 23 + // Generate a unique build ID - uses nanoseconds for uniqueness 24 + let build_id = SystemTime::now() 25 + .duration_since(UNIX_EPOCH) 26 + .map(|d| d.as_nanos().to_string()) 27 + .unwrap_or_else(|_| "0".to_string()); 28 + 29 + html! { 30 + html { 31 + head { 32 + title { "About Page" } 33 + } 34 + body data-build-id=(build_id) { 35 + h1 id="title" { "About Us" } 36 + p id="greeting" { (greeting) } 37 + p id="content" { (ABOUT_CONTENT.trim()) } 38 + } 39 + } 40 + } 41 + } 42 + }
+56
e2e/fixtures/incremental-build/src/pages/article.rs
··· 1 + use maud::html; 2 + use maudit::route::prelude::*; 3 + use std::time::{SystemTime, UNIX_EPOCH}; 4 + 5 + use crate::content::ArticleContent; 6 + 7 + /// Dynamic route for individual articles - uses `get_entry()` which tracks only the accessed file 8 + #[route("/articles/[slug]")] 9 + pub struct Article; 10 + 11 + #[derive(Params, Clone)] 12 + pub struct ArticleParams { 13 + slug: String, 14 + } 15 + 16 + impl Route<ArticleParams> for Article { 17 + fn pages(&self, ctx: &mut DynamicRouteContext) -> Pages<ArticleParams> { 18 + let articles = ctx.content.get_source::<ArticleContent>("articles"); 19 + 20 + // into_pages tracks all files (for generating the list of pages) 21 + articles.into_pages(|entry| { 22 + Page::from_params(ArticleParams { 23 + slug: entry.id.clone(), 24 + }) 25 + }) 26 + } 27 + 28 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 29 + let slug = ctx.params::<ArticleParams>().slug.clone(); 30 + let articles = ctx.content.get_source::<ArticleContent>("articles"); 31 + 32 + // get_entry tracks only THIS specific file 33 + let article = articles.get_entry(&slug); 34 + 35 + // Generate a unique build ID - uses nanoseconds for uniqueness 36 + let build_id = SystemTime::now() 37 + .duration_since(UNIX_EPOCH) 38 + .map(|d| d.as_nanos().to_string()) 39 + .unwrap_or_else(|_| "0".to_string()); 40 + 41 + html! { 42 + html { 43 + head { 44 + title { (article.data(ctx).title) } 45 + } 46 + body data-build-id=(build_id) { 47 + h1 id="title" { (article.data(ctx).title) } 48 + p id="description" { (article.data(ctx).description) } 49 + div id="content" { 50 + (maud::PreEscaped(article.render(ctx))) 51 + } 52 + } 53 + } 54 + } 55 + } 56 + }
+46
e2e/fixtures/incremental-build/src/pages/articles.rs
··· 1 + use maud::html; 2 + use maudit::route::prelude::*; 3 + use std::time::{SystemTime, UNIX_EPOCH}; 4 + 5 + use crate::content::ArticleContent; 6 + 7 + /// Route that lists all articles - uses `entries()` which tracks all content files 8 + #[route("/articles")] 9 + pub struct Articles; 10 + 11 + impl Route for Articles { 12 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 13 + let articles = ctx.content.get_source::<ArticleContent>("articles"); 14 + 15 + // Using entries() tracks ALL content files in the source 16 + let article_list: Vec<_> = articles.entries().iter().collect(); 17 + 18 + // Generate a unique build ID - uses nanoseconds for uniqueness 19 + let build_id = SystemTime::now() 20 + .duration_since(UNIX_EPOCH) 21 + .map(|d| d.as_nanos().to_string()) 22 + .unwrap_or_else(|_| "0".to_string()); 23 + 24 + html! { 25 + html { 26 + head { 27 + title { "Articles" } 28 + } 29 + body data-build-id=(build_id) { 30 + h1 id="title" { "Articles" } 31 + ul id="article-list" { 32 + @for article in article_list { 33 + li { 34 + a href=(format!("/articles/{}", article.id)) { 35 + (article.data(ctx).title) 36 + } 37 + " - " 38 + (article.data(ctx).description) 39 + } 40 + } 41 + } 42 + } 43 + } 44 + } 45 + } 46 + }
+31
e2e/fixtures/incremental-build/src/pages/blog.rs
··· 1 + use maud::html; 2 + use maudit::route::prelude::*; 3 + use std::time::{SystemTime, UNIX_EPOCH}; 4 + 5 + #[route("/blog")] 6 + pub struct Blog; 7 + 8 + impl Route for Blog { 9 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 10 + let _style = ctx.assets.add_style("src/assets/blog.css"); 11 + let _icon_style = ctx.assets.add_style("src/assets/icons/blog-icon.css"); 12 + 13 + // Generate a unique build ID - uses nanoseconds for uniqueness 14 + let build_id = SystemTime::now() 15 + .duration_since(UNIX_EPOCH) 16 + .map(|d| d.as_nanos().to_string()) 17 + .unwrap_or_else(|_| "0".to_string()); 18 + 19 + html! { 20 + html { 21 + head { 22 + title { "Blog Page" } 23 + } 24 + body data-build-id=(build_id) { 25 + h1 id="title" { "Blog" } 26 + p id="content" { "Read our latest posts" } 27 + } 28 + } 29 + } 30 + } 31 + }
+3
e2e/fixtures/incremental-build/src/pages/helpers.rs
··· 1 + pub fn get_greeting() -> &'static str { 2 + "Welcome to our site!" 3 + }
+32
e2e/fixtures/incremental-build/src/pages/index.rs
··· 1 + use maud::html; 2 + use maudit::route::prelude::*; 3 + use std::time::{SystemTime, UNIX_EPOCH}; 4 + 5 + #[route("/")] 6 + pub struct Index; 7 + 8 + impl Route for Index { 9 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 10 + let _image = ctx.assets.add_image("src/assets/logo.png"); 11 + let _script = ctx.assets.add_script("src/assets/main.js"); 12 + let _style = ctx.assets.add_style("src/assets/styles.css"); 13 + 14 + // Generate a unique build ID - uses nanoseconds for uniqueness 15 + let build_id = SystemTime::now() 16 + .duration_since(UNIX_EPOCH) 17 + .map(|d| d.as_nanos().to_string()) 18 + .unwrap_or_else(|_| "0".to_string()); 19 + 20 + html! { 21 + html { 22 + head { 23 + title { "Home Page" } 24 + } 25 + body data-build-id=(build_id) { 26 + h1 id="title" { "Home Page" } 27 + p id="content" { "Welcome to the home page" } 28 + } 29 + } 30 + } 31 + } 32 + }
+6
e2e/fixtures/incremental-build/src/pages/mod.rs
··· 1 + pub mod about; 2 + pub mod article; 3 + pub mod articles; 4 + pub mod blog; 5 + pub mod helpers; 6 + pub mod index;
+1 -1
e2e/fixtures/prefetch-prerender/src/main.rs
··· 1 - use maudit::{content_sources, coronate, routes, BuildOptions, BuildOutput}; 1 + use maudit::{BuildOptions, BuildOutput, content_sources, coronate, routes}; 2 2 3 3 mod pages { 4 4 mod about;
+116 -8
e2e/tests/hot-reload.spec.ts
··· 12 12 13 13 test.describe.configure({ mode: "serial" }); 14 14 15 + /** 16 + * Wait for dev server to complete a build/rerun by polling logs 17 + */ 18 + async function waitForBuildComplete(devServer: any, timeoutMs = 20000): Promise<string[]> { 19 + const startTime = Date.now(); 20 + 21 + while (Date.now() - startTime < timeoutMs) { 22 + const logs = devServer.getLogs(100); 23 + const logsText = logs.join("\n").toLowerCase(); 24 + 25 + // Look for completion messages 26 + if ( 27 + logsText.includes("finished") || 28 + logsText.includes("rerun finished") || 29 + logsText.includes("build finished") 30 + ) { 31 + return logs; 32 + } 33 + 34 + // Wait 100ms before checking again 35 + await new Promise((resolve) => setTimeout(resolve, 100)); 36 + } 37 + 38 + throw new Error(`Build did not complete within ${timeoutMs}ms`); 39 + } 40 + 15 41 test.describe("Hot Reload", () => { 42 + // Increase timeout for these tests since they involve compilation 43 + test.setTimeout(60000); 44 + 16 45 const fixturePath = resolve(__dirname, "..", "fixtures", "hot-reload"); 17 46 const indexPath = resolve(fixturePath, "src", "pages", "index.rs"); 18 - let originalContent: string; 47 + const mainPath = resolve(fixturePath, "src", "main.rs"); 48 + const dataPath = resolve(fixturePath, "data.txt"); 49 + let originalIndexContent: string; 50 + let originalMainContent: string; 51 + let originalDataContent: string; 19 52 20 53 test.beforeAll(async () => { 21 54 // Save original content 22 - originalContent = readFileSync(indexPath, "utf-8"); 55 + originalIndexContent = readFileSync(indexPath, "utf-8"); 56 + originalMainContent = readFileSync(mainPath, "utf-8"); 57 + originalDataContent = readFileSync(dataPath, "utf-8"); 58 + 59 + // Ensure files are in original state 60 + writeFileSync(indexPath, originalIndexContent, "utf-8"); 61 + writeFileSync(mainPath, originalMainContent, "utf-8"); 62 + writeFileSync(dataPath, originalDataContent, "utf-8"); 23 63 }); 24 64 25 - test.afterEach(async () => { 65 + test.afterEach(async ({ devServer }) => { 26 66 // Restore original content after each test 27 - writeFileSync(indexPath, originalContent, "utf-8"); 28 - // Wait a bit for the rebuild 29 - await new Promise((resolve) => setTimeout(resolve, 2000)); 67 + writeFileSync(indexPath, originalIndexContent, "utf-8"); 68 + writeFileSync(mainPath, originalMainContent, "utf-8"); 69 + writeFileSync(dataPath, originalDataContent, "utf-8"); 70 + 71 + // Only wait for build if devServer is available (startup might have failed) 72 + if (devServer) { 73 + try { 74 + devServer.clearLogs(); 75 + await waitForBuildComplete(devServer); 76 + } catch (error) { 77 + console.warn("Failed to wait for build completion in afterEach:", error); 78 + } 79 + } 30 80 }); 31 81 32 82 test.afterAll(async () => { 33 83 // Restore original content 34 - writeFileSync(indexPath, originalContent, "utf-8"); 84 + writeFileSync(indexPath, originalIndexContent, "utf-8"); 85 + writeFileSync(mainPath, originalMainContent, "utf-8"); 86 + writeFileSync(dataPath, originalDataContent, "utf-8"); 87 + }); 88 + 89 + test("should recompile when Rust code changes (dependencies)", async ({ page, devServer }) => { 90 + await page.goto(devServer.url); 91 + 92 + // Verify initial content 93 + await expect(page.locator("#title")).toHaveText("Original Title"); 94 + 95 + // Clear logs to track what happens after this point 96 + devServer.clearLogs(); 97 + 98 + // Modify main.rs - this is a tracked dependency, should trigger recompile 99 + const modifiedMain = originalMainContent.replace( 100 + "BuildOptions::default()", 101 + "BuildOptions::default() // Modified comment", 102 + ); 103 + writeFileSync(mainPath, modifiedMain, "utf-8"); 104 + 105 + // Wait for rebuild to complete 106 + const logs = await waitForBuildComplete(devServer, 20000); 107 + const logsText = logs.join("\n"); 108 + 109 + // Check logs to verify it actually recompiled (ran cargo) 110 + expect(logsText).toContain("rebuilding"); 111 + // Make sure it didn't just rerun the binary 112 + expect(logsText.toLowerCase()).not.toContain("rerunning binary"); 113 + }); 114 + 115 + test("should rerun without recompile when non-dependency files change", async ({ 116 + page, 117 + devServer, 118 + }) => { 119 + await page.goto(devServer.url); 120 + 121 + // Verify initial content 122 + await expect(page.locator("#title")).toHaveText("Original Title"); 123 + 124 + // Clear logs to track what happens after this point 125 + devServer.clearLogs(); 126 + 127 + // Modify data.txt - this file is NOT in the .d dependencies 128 + // So it should trigger a rerun without recompilation 129 + writeFileSync(dataPath, "Modified data", "utf-8"); 130 + 131 + // Wait for build/rerun to complete 132 + const logs = await waitForBuildComplete(devServer, 20000); 133 + const logsText = logs.join("\n"); 134 + 135 + // Should see "rerunning binary" message (case insensitive) 136 + const hasRerunMessage = logsText.toLowerCase().includes("rerunning binary"); 137 + expect(hasRerunMessage).toBe(true); 138 + 139 + // Should NOT see cargo-related rebuild messages (compiling, building crate) 140 + // Note: "Rebuilding N affected routes" is fine - that's the incremental build system 141 + expect(logsText.toLowerCase()).not.toContain("compiling"); 142 + expect(logsText.toLowerCase()).not.toContain("cargo build"); 35 143 }); 36 144 37 145 test("should show updated content after file changes", async ({ page, devServer }) => { ··· 44 152 const currentUrl = page.url(); 45 153 46 154 // Modify the file 47 - const modifiedContent = originalContent.replace( 155 + const modifiedContent = originalIndexContent.replace( 48 156 'h1 id="title" { "Original Title" }', 49 157 'h1 id="title" { "Another Update" }', 50 158 );
+1024
e2e/tests/incremental-build.spec.ts
··· 1 + import { expect } from "@playwright/test"; 2 + import { createTestWithFixture } from "./test-utils"; 3 + import { readFileSync, writeFileSync, renameSync, existsSync } from "node:fs"; 4 + import { resolve, dirname } from "node:path"; 5 + import { fileURLToPath } from "node:url"; 6 + 7 + const __filename = fileURLToPath(import.meta.url); 8 + const __dirname = dirname(__filename); 9 + 10 + // Create test instance with incremental-build fixture 11 + const test = createTestWithFixture("incremental-build"); 12 + 13 + // Run tests serially since they share state; allow retries for timing-sensitive tests 14 + test.describe.configure({ mode: "serial", retries: 2 }); 15 + 16 + /** 17 + * Wait for dev server to complete a build by polling logs. 18 + * Returns logs once build is finished. 19 + */ 20 + async function waitForBuildComplete(devServer: any, timeoutMs = 30000): Promise<string[]> { 21 + const startTime = Date.now(); 22 + const pollInterval = 50; 23 + 24 + // Phase 1: Wait for build to start 25 + while (Date.now() - startTime < timeoutMs) { 26 + const logs = devServer.getLogs(200); 27 + const logsText = logs.join("\n").toLowerCase(); 28 + 29 + if ( 30 + logsText.includes("rerunning") || 31 + logsText.includes("rebuilding") || 32 + logsText.includes("files changed") 33 + ) { 34 + break; 35 + } 36 + 37 + await new Promise((r) => setTimeout(r, pollInterval)); 38 + } 39 + 40 + // Phase 2: Wait for build to finish 41 + while (Date.now() - startTime < timeoutMs) { 42 + const logs = devServer.getLogs(200); 43 + const logsText = logs.join("\n").toLowerCase(); 44 + 45 + if ( 46 + logsText.includes("finished") || 47 + logsText.includes("rerun finished") || 48 + logsText.includes("build finished") 49 + ) { 50 + return logs; 51 + } 52 + 53 + await new Promise((r) => setTimeout(r, pollInterval)); 54 + } 55 + 56 + console.log("TIMEOUT - logs seen:", devServer.getLogs(50)); 57 + throw new Error(`Build did not complete within ${timeoutMs}ms`); 58 + } 59 + 60 + /** 61 + * Wait for the dev server to become idle (no builds in progress). 62 + * This polls build IDs until they stop changing. 63 + */ 64 + async function waitForIdle(htmlPaths: Record<string, string>, stableMs = 200): Promise<void> { 65 + let lastIds = recordBuildIds(htmlPaths); 66 + let stableTime = 0; 67 + 68 + while (stableTime < stableMs) { 69 + await new Promise((r) => setTimeout(r, 50)); 70 + const currentIds = recordBuildIds(htmlPaths); 71 + 72 + const allSame = Object.keys(lastIds).every( 73 + (key) => lastIds[key] === currentIds[key] 74 + ); 75 + 76 + if (allSame) { 77 + stableTime += 50; 78 + } else { 79 + stableTime = 0; 80 + lastIds = currentIds; 81 + } 82 + } 83 + } 84 + 85 + /** 86 + * Wait for a specific HTML file's build ID to change from a known value. 87 + * This is more reliable than arbitrary sleeps. 88 + */ 89 + async function waitForBuildIdChange( 90 + htmlPath: string, 91 + previousId: string | null, 92 + timeoutMs = 30000, 93 + ): Promise<string> { 94 + const startTime = Date.now(); 95 + const pollInterval = 50; 96 + 97 + while (Date.now() - startTime < timeoutMs) { 98 + const currentId = getBuildId(htmlPath); 99 + if (currentId !== null && currentId !== previousId) { 100 + // Small delay to let any concurrent writes settle 101 + await new Promise((r) => setTimeout(r, 100)); 102 + return currentId; 103 + } 104 + await new Promise((r) => setTimeout(r, pollInterval)); 105 + } 106 + 107 + throw new Error(`Build ID did not change within ${timeoutMs}ms`); 108 + } 109 + 110 + /** 111 + * Extract the build ID from an HTML file. 112 + */ 113 + function getBuildId(htmlPath: string): string | null { 114 + try { 115 + const content = readFileSync(htmlPath, "utf-8"); 116 + const match = content.match(/data-build-id="(\d+)"/); 117 + return match ? match[1] : null; 118 + } catch { 119 + return null; 120 + } 121 + } 122 + 123 + /** 124 + * Check if logs indicate incremental build was used 125 + */ 126 + function isIncrementalBuild(logs: string[]): boolean { 127 + return logs.join("\n").toLowerCase().includes("incremental build"); 128 + } 129 + 130 + /** 131 + * Get the number of affected routes from logs 132 + */ 133 + function getAffectedRouteCount(logs: string[]): number { 134 + const logsText = logs.join("\n"); 135 + const match = logsText.match(/Rebuilding (\d+) affected routes/i); 136 + return match ? parseInt(match[1], 10) : -1; 137 + } 138 + 139 + /** 140 + * Record build IDs for all pages 141 + */ 142 + function recordBuildIds(htmlPaths: Record<string, string>): Record<string, string | null> { 143 + const ids: Record<string, string | null> = {}; 144 + for (const [name, path] of Object.entries(htmlPaths)) { 145 + ids[name] = getBuildId(path); 146 + } 147 + return ids; 148 + } 149 + 150 + /** 151 + * Trigger a change and wait for build to complete. 152 + * Returns logs from the build. 153 + */ 154 + async function triggerAndWaitForBuild( 155 + devServer: any, 156 + modifyFn: () => void, 157 + timeoutMs = 30000, 158 + ): Promise<string[]> { 159 + devServer.clearLogs(); 160 + modifyFn(); 161 + return await waitForBuildComplete(devServer, timeoutMs); 162 + } 163 + 164 + /** 165 + * Set up incremental build state by triggering two builds. 166 + * First build establishes state, second ensures state is populated. 167 + * Returns build IDs recorded after the second build completes and server is idle. 168 + * 169 + * Note: We don't assert incremental here - the actual test will verify that. 170 + * This is because on first test run the server might still be initializing. 171 + */ 172 + async function setupIncrementalState( 173 + devServer: any, 174 + modifyFn: (suffix: string) => void, 175 + htmlPaths: Record<string, string>, 176 + expectedChangedRoute: string, // Which route we expect to change 177 + ): Promise<Record<string, string | null>> { 178 + // First change: triggers build (establishes state) 179 + const beforeInit = getBuildId(htmlPaths[expectedChangedRoute]); 180 + await triggerAndWaitForBuild(devServer, () => modifyFn("init")); 181 + await waitForBuildIdChange(htmlPaths[expectedChangedRoute], beforeInit); 182 + 183 + // Second change: state should now exist for incremental builds 184 + const beforeSetup = getBuildId(htmlPaths[expectedChangedRoute]); 185 + await triggerAndWaitForBuild(devServer, () => modifyFn("setup")); 186 + await waitForBuildIdChange(htmlPaths[expectedChangedRoute], beforeSetup); 187 + 188 + // Wait for server to become completely idle before recording baseline 189 + await waitForIdle(htmlPaths); 190 + 191 + return recordBuildIds(htmlPaths); 192 + } 193 + 194 + test.describe("Incremental Build", () => { 195 + test.setTimeout(180000); 196 + 197 + const fixturePath = resolve(__dirname, "..", "fixtures", "incremental-build"); 198 + 199 + // Asset paths 200 + const assets = { 201 + blogCss: resolve(fixturePath, "src", "assets", "blog.css"), 202 + utilsJs: resolve(fixturePath, "src", "assets", "utils.js"), 203 + mainJs: resolve(fixturePath, "src", "assets", "main.js"), 204 + aboutJs: resolve(fixturePath, "src", "assets", "about.js"), 205 + stylesCss: resolve(fixturePath, "src", "assets", "styles.css"), 206 + logoPng: resolve(fixturePath, "src", "assets", "logo.png"), 207 + teamPng: resolve(fixturePath, "src", "assets", "team.png"), 208 + bgPng: resolve(fixturePath, "src", "assets", "bg.png"), 209 + }; 210 + 211 + // Content file paths (for granular content tracking tests) 212 + const contentFiles = { 213 + firstPost: resolve(fixturePath, "content", "articles", "first-post.md"), 214 + secondPost: resolve(fixturePath, "content", "articles", "second-post.md"), 215 + thirdPost: resolve(fixturePath, "content", "articles", "third-post.md"), 216 + }; 217 + 218 + // Output HTML paths 219 + const htmlPaths = { 220 + index: resolve(fixturePath, "dist", "index.html"), 221 + about: resolve(fixturePath, "dist", "about", "index.html"), 222 + blog: resolve(fixturePath, "dist", "blog", "index.html"), 223 + articles: resolve(fixturePath, "dist", "articles", "index.html"), 224 + articleFirst: resolve(fixturePath, "dist", "articles", "first-post", "index.html"), 225 + articleSecond: resolve(fixturePath, "dist", "articles", "second-post", "index.html"), 226 + articleThird: resolve(fixturePath, "dist", "articles", "third-post", "index.html"), 227 + }; 228 + 229 + // Original content storage 230 + const originals: Record<string, string | Buffer> = {}; 231 + 232 + test.beforeAll(async () => { 233 + // Store original content for all assets we might modify 234 + originals.blogCss = readFileSync(assets.blogCss, "utf-8"); 235 + originals.utilsJs = readFileSync(assets.utilsJs, "utf-8"); 236 + originals.mainJs = readFileSync(assets.mainJs, "utf-8"); 237 + originals.aboutJs = readFileSync(assets.aboutJs, "utf-8"); 238 + originals.stylesCss = readFileSync(assets.stylesCss, "utf-8"); 239 + originals.logoPng = readFileSync(assets.logoPng); // binary 240 + originals.teamPng = readFileSync(assets.teamPng); // binary 241 + originals.bgPng = readFileSync(assets.bgPng); // binary 242 + // Content files 243 + originals.firstPost = readFileSync(contentFiles.firstPost, "utf-8"); 244 + originals.secondPost = readFileSync(contentFiles.secondPost, "utf-8"); 245 + originals.thirdPost = readFileSync(contentFiles.thirdPost, "utf-8"); 246 + }); 247 + 248 + test.afterAll(async () => { 249 + // Restore all original content 250 + writeFileSync(assets.blogCss, originals.blogCss); 251 + writeFileSync(assets.utilsJs, originals.utilsJs); 252 + writeFileSync(assets.mainJs, originals.mainJs); 253 + writeFileSync(assets.aboutJs, originals.aboutJs); 254 + writeFileSync(assets.stylesCss, originals.stylesCss); 255 + writeFileSync(assets.logoPng, originals.logoPng); 256 + writeFileSync(assets.teamPng, originals.teamPng); 257 + writeFileSync(assets.bgPng, originals.bgPng); 258 + // Restore content files 259 + writeFileSync(contentFiles.firstPost, originals.firstPost); 260 + writeFileSync(contentFiles.secondPost, originals.secondPost); 261 + writeFileSync(contentFiles.thirdPost, originals.thirdPost); 262 + }); 263 + 264 + // ============================================================ 265 + // TEST 1: Direct CSS dependency (blog.css โ†’ /blog only) 266 + // ============================================================ 267 + test("CSS file change rebuilds only routes using it", async ({ devServer }) => { 268 + let testCounter = 0; 269 + 270 + function modifyFile(suffix: string) { 271 + testCounter++; 272 + writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`); 273 + } 274 + 275 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog"); 276 + expect(before.index).not.toBeNull(); 277 + expect(before.about).not.toBeNull(); 278 + expect(before.blog).not.toBeNull(); 279 + 280 + // Trigger the final change and wait for build 281 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 282 + await waitForBuildIdChange(htmlPaths.blog, before.blog); 283 + 284 + // Verify incremental build with 1 route 285 + expect(isIncrementalBuild(logs)).toBe(true); 286 + expect(getAffectedRouteCount(logs)).toBe(1); 287 + 288 + // Verify only blog was rebuilt 289 + const after = recordBuildIds(htmlPaths); 290 + expect(after.index).toBe(before.index); 291 + expect(after.about).toBe(before.about); 292 + expect(after.blog).not.toBe(before.blog); 293 + }); 294 + 295 + // ============================================================ 296 + // TEST 2: Transitive JS dependency (utils.js โ†’ main.js โ†’ /) 297 + // ============================================================ 298 + test("transitive JS dependency change rebuilds affected routes", async ({ devServer }) => { 299 + let testCounter = 0; 300 + 301 + function modifyFile(suffix: string) { 302 + testCounter++; 303 + writeFileSync(assets.utilsJs, originals.utilsJs + `\n// test-${testCounter}-${suffix}`); 304 + } 305 + 306 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index"); 307 + expect(before.index).not.toBeNull(); 308 + 309 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 310 + await waitForBuildIdChange(htmlPaths.index, before.index); 311 + 312 + // Verify incremental build with 1 route 313 + expect(isIncrementalBuild(logs)).toBe(true); 314 + expect(getAffectedRouteCount(logs)).toBe(1); 315 + 316 + // Only index should be rebuilt (uses main.js which imports utils.js) 317 + const after = recordBuildIds(htmlPaths); 318 + expect(after.about).toBe(before.about); 319 + expect(after.blog).toBe(before.blog); 320 + expect(after.index).not.toBe(before.index); 321 + }); 322 + 323 + // ============================================================ 324 + // TEST 3: Direct JS entry point change (about.js โ†’ /about) 325 + // ============================================================ 326 + test("direct JS entry point change rebuilds only routes using it", async ({ devServer }) => { 327 + let testCounter = 0; 328 + 329 + function modifyFile(suffix: string) { 330 + testCounter++; 331 + writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`); 332 + } 333 + 334 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "about"); 335 + expect(before.about).not.toBeNull(); 336 + 337 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 338 + await waitForBuildIdChange(htmlPaths.about, before.about); 339 + 340 + // Verify incremental build with 1 route 341 + expect(isIncrementalBuild(logs)).toBe(true); 342 + expect(getAffectedRouteCount(logs)).toBe(1); 343 + 344 + // Only about should be rebuilt 345 + const after = recordBuildIds(htmlPaths); 346 + expect(after.index).toBe(before.index); 347 + expect(after.blog).toBe(before.blog); 348 + expect(after.about).not.toBe(before.about); 349 + }); 350 + 351 + // ============================================================ 352 + // TEST 4: Shared asset change (styles.css โ†’ / AND /about) 353 + // ============================================================ 354 + test("shared asset change rebuilds all routes using it", async ({ devServer }) => { 355 + let testCounter = 0; 356 + 357 + function modifyFile(suffix: string) { 358 + testCounter++; 359 + writeFileSync(assets.stylesCss, originals.stylesCss + `\n/* test-${testCounter}-${suffix} */`); 360 + } 361 + 362 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index"); 363 + expect(before.index).not.toBeNull(); 364 + expect(before.about).not.toBeNull(); 365 + 366 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 367 + await waitForBuildIdChange(htmlPaths.index, before.index); 368 + 369 + // Verify incremental build with 2 routes (/ and /about both use styles.css) 370 + expect(isIncrementalBuild(logs)).toBe(true); 371 + expect(getAffectedRouteCount(logs)).toBe(2); 372 + 373 + // Index and about should be rebuilt, blog should not 374 + const after = recordBuildIds(htmlPaths); 375 + expect(after.blog).toBe(before.blog); 376 + expect(after.index).not.toBe(before.index); 377 + expect(after.about).not.toBe(before.about); 378 + }); 379 + 380 + // ============================================================ 381 + // TEST 5: Image change (logo.png โ†’ /) 382 + // ============================================================ 383 + test("image change rebuilds only routes using it", async ({ devServer }) => { 384 + let testCounter = 0; 385 + 386 + function modifyFile(suffix: string) { 387 + testCounter++; 388 + const modified = Buffer.concat([ 389 + originals.logoPng as Buffer, 390 + Buffer.from(`<!-- test-${testCounter}-${suffix} -->`), 391 + ]); 392 + writeFileSync(assets.logoPng, modified); 393 + } 394 + 395 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index"); 396 + expect(before.index).not.toBeNull(); 397 + 398 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 399 + await waitForBuildIdChange(htmlPaths.index, before.index); 400 + 401 + // Verify incremental build with 1 route 402 + expect(isIncrementalBuild(logs)).toBe(true); 403 + expect(getAffectedRouteCount(logs)).toBe(1); 404 + 405 + // Only index should be rebuilt (uses logo.png) 406 + const after = recordBuildIds(htmlPaths); 407 + expect(after.about).toBe(before.about); 408 + expect(after.blog).toBe(before.blog); 409 + expect(after.index).not.toBe(before.index); 410 + }); 411 + 412 + // ============================================================ 413 + // TEST 6: Multiple files changed simultaneously 414 + // ============================================================ 415 + test("multiple file changes rebuild union of affected routes", async ({ devServer }) => { 416 + let testCounter = 0; 417 + 418 + function modifyFile(suffix: string) { 419 + testCounter++; 420 + // Change both blog.css (affects /blog) and about.js (affects /about) 421 + writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`); 422 + writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`); 423 + } 424 + 425 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog"); 426 + expect(before.about).not.toBeNull(); 427 + expect(before.blog).not.toBeNull(); 428 + 429 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 430 + await waitForBuildIdChange(htmlPaths.blog, before.blog); 431 + 432 + // Verify incremental build with 2 routes (/about and /blog) 433 + expect(isIncrementalBuild(logs)).toBe(true); 434 + expect(getAffectedRouteCount(logs)).toBe(2); 435 + 436 + // About and blog should be rebuilt, index should not 437 + const after = recordBuildIds(htmlPaths); 438 + expect(after.index).toBe(before.index); 439 + expect(after.about).not.toBe(before.about); 440 + expect(after.blog).not.toBe(before.blog); 441 + }); 442 + 443 + // ============================================================ 444 + // TEST 7: CSS url() asset dependency (bg.png via blog.css โ†’ /blog) 445 + // ============================================================ 446 + test("CSS url() asset change triggers rebundling and rebuilds affected routes", async ({ 447 + devServer, 448 + }) => { 449 + let testCounter = 0; 450 + 451 + function modifyFile(suffix: string) { 452 + testCounter++; 453 + const modified = Buffer.concat([ 454 + originals.bgPng as Buffer, 455 + Buffer.from(`<!-- test-${testCounter}-${suffix} -->`), 456 + ]); 457 + writeFileSync(assets.bgPng, modified); 458 + } 459 + 460 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog"); 461 + expect(before.blog).not.toBeNull(); 462 + 463 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 464 + await waitForBuildIdChange(htmlPaths.blog, before.blog); 465 + 466 + // Verify incremental build triggered 467 + expect(isIncrementalBuild(logs)).toBe(true); 468 + 469 + // Blog should be rebuilt (uses blog.css which references bg.png via url()) 470 + const after = recordBuildIds(htmlPaths); 471 + expect(after.blog).not.toBe(before.blog); 472 + }); 473 + 474 + // ============================================================ 475 + // TEST 8: Source file change rebuilds only routes defined in that file 476 + // ============================================================ 477 + test("source file change rebuilds only routes defined in that file", async ({ devServer }) => { 478 + // This test verifies that when a .rs source file changes, only routes 479 + // defined in that file are rebuilt (via source_to_routes tracking). 480 + // 481 + // Flow: 482 + // 1. Dev server starts โ†’ initial build โ†’ creates build_state.json with source file mappings 483 + // 2. Modify about.rs โ†’ cargo recompiles โ†’ binary reruns with MAUDIT_CHANGED_FILES 484 + // 3. New binary loads build_state.json and finds /about is affected by about.rs 485 + // 4. Only /about route is rebuilt 486 + // 487 + // Note: Unlike asset changes, .rs changes require cargo recompilation. 488 + // The binary's logs (showing "Incremental build") aren't captured by the 489 + // dev server's log collection, so we verify behavior through build IDs. 490 + 491 + const aboutRs = resolve(fixturePath, "src", "pages", "about.rs"); 492 + const originalAboutRs = readFileSync(aboutRs, "utf-8"); 493 + 494 + try { 495 + let testCounter = 0; 496 + 497 + function modifyFile(suffix: string) { 498 + testCounter++; 499 + writeFileSync(aboutRs, originalAboutRs + `\n// test-${testCounter}-${suffix}`); 500 + } 501 + 502 + const rsTimeout = 60000; 503 + 504 + // First change: triggers recompile + build (establishes build state with source_to_routes) 505 + const beforeInit = getBuildId(htmlPaths.about); 506 + await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout); 507 + await waitForBuildIdChange(htmlPaths.about, beforeInit, rsTimeout); 508 + 509 + // Record build IDs - state now exists with source_to_routes mappings 510 + const before = recordBuildIds(htmlPaths); 511 + expect(before.index).not.toBeNull(); 512 + expect(before.about).not.toBeNull(); 513 + expect(before.blog).not.toBeNull(); 514 + 515 + // Second change: should do incremental build (only about.rs route) 516 + await triggerAndWaitForBuild(devServer, () => modifyFile("final"), rsTimeout); 517 + await waitForBuildIdChange(htmlPaths.about, before.about, rsTimeout); 518 + 519 + // Verify only /about was rebuilt (it's defined in about.rs) 520 + const after = recordBuildIds(htmlPaths); 521 + expect(after.index).toBe(before.index); 522 + expect(after.blog).toBe(before.blog); 523 + expect(after.about).not.toBe(before.about); 524 + 525 + } finally { 526 + // Restore original content and wait for build to complete 527 + const beforeRestore = getBuildId(htmlPaths.about); 528 + writeFileSync(aboutRs, originalAboutRs); 529 + try { 530 + await waitForBuildIdChange(htmlPaths.about, beforeRestore, 60000); 531 + } catch { 532 + // Restoration build may not always complete, that's ok 533 + } 534 + } 535 + }); 536 + 537 + // ============================================================ 538 + // TEST 9: include_str! file change triggers full rebuild (untracked file) 539 + // ============================================================ 540 + test("include_str file change triggers full rebuild", async ({ devServer }) => { 541 + // This test verifies that changing a file referenced by include_str!() 542 + // triggers cargo recompilation and a FULL rebuild (all routes). 543 + // 544 + // Setup: about.rs uses include_str!("../assets/about-content.txt") 545 + // The .d file from cargo includes this dependency, so the dependency tracker 546 + // knows that changing about-content.txt requires recompilation. 547 + // 548 + // Flow: 549 + // 1. Dev server starts โ†’ initial build 550 + // 2. Modify about-content.txt โ†’ cargo recompiles (because .d file tracks it) 551 + // 3. Binary runs with MAUDIT_CHANGED_FILES pointing to about-content.txt 552 + // 4. Since about-content.txt is NOT in source_to_routes or asset_to_routes, 553 + // it's an "untracked file" and triggers a full rebuild of all routes 554 + // 555 + // This is the correct safe behavior - we don't know which route uses the 556 + // include_str! file, so we rebuild everything to ensure correctness. 557 + 558 + const contentFile = resolve(fixturePath, "src", "assets", "about-content.txt"); 559 + const originalContent = readFileSync(contentFile, "utf-8"); 560 + const rsTimeout = 60000; 561 + 562 + try { 563 + let testCounter = 0; 564 + 565 + function modifyFile(suffix: string) { 566 + testCounter++; 567 + writeFileSync(contentFile, originalContent + `\n<!-- test-${testCounter}-${suffix} -->`); 568 + } 569 + 570 + // First change: triggers recompile + full build (establishes build state) 571 + const beforeInit = getBuildId(htmlPaths.about); 572 + await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout); 573 + await waitForBuildIdChange(htmlPaths.about, beforeInit, rsTimeout); 574 + 575 + // Record build IDs before the final change 576 + const before = recordBuildIds(htmlPaths); 577 + expect(before.index).not.toBeNull(); 578 + expect(before.about).not.toBeNull(); 579 + expect(before.blog).not.toBeNull(); 580 + 581 + // Trigger the content file change with unique content to verify 582 + devServer.clearLogs(); 583 + writeFileSync(contentFile, originalContent + "\nUpdated content!"); 584 + await waitForBuildComplete(devServer, rsTimeout); 585 + await waitForBuildIdChange(htmlPaths.about, before.about, rsTimeout); 586 + 587 + // All routes should be rebuilt (full rebuild due to untracked file) 588 + const after = recordBuildIds(htmlPaths); 589 + expect(after.index).not.toBe(before.index); 590 + expect(after.about).not.toBe(before.about); 591 + expect(after.blog).not.toBe(before.blog); 592 + 593 + // Verify the content was actually updated in the output 594 + const aboutHtml = readFileSync(htmlPaths.about, "utf-8"); 595 + expect(aboutHtml).toContain("Updated content!"); 596 + 597 + } finally { 598 + // Restore original content and wait for build to complete 599 + const beforeRestore = getBuildId(htmlPaths.about); 600 + writeFileSync(contentFile, originalContent); 601 + try { 602 + await waitForBuildIdChange(htmlPaths.about, beforeRestore, 60000); 603 + } catch { 604 + // Restoration build may not always complete, that's ok 605 + } 606 + } 607 + }); 608 + 609 + // ============================================================ 610 + // TEST 10: Folder rename detection 611 + // ============================================================ 612 + test("folder rename is detected and affects routes using assets in that folder", async ({ devServer }) => { 613 + // This test verifies that renaming a folder containing tracked assets 614 + // is detected by the file watcher and affects the correct routes. 615 + // 616 + // Setup: The blog page uses src/assets/icons/blog-icon.css 617 + // Test: Rename icons -> icons-renamed, verify the blog route is identified as affected 618 + // 619 + // Note: The actual build will fail because the asset path becomes invalid, 620 + // but this test verifies the DETECTION and ROUTE MATCHING works correctly. 621 + 622 + const iconsFolder = resolve(fixturePath, "src", "assets", "icons"); 623 + const renamedFolder = resolve(fixturePath, "src", "assets", "icons-renamed"); 624 + const iconFile = resolve(iconsFolder, "blog-icon.css"); 625 + 626 + // Ensure we start with the correct state 627 + if (existsSync(renamedFolder)) { 628 + renameSync(renamedFolder, iconsFolder); 629 + // Wait briefly for any triggered build to start 630 + await new Promise((resolve) => setTimeout(resolve, 500)); 631 + } 632 + 633 + expect(existsSync(iconsFolder)).toBe(true); 634 + expect(existsSync(iconFile)).toBe(true); 635 + 636 + const originalContent = readFileSync(iconFile, "utf-8"); 637 + 638 + try { 639 + let testCounter = 0; 640 + 641 + function modifyFile(suffix: string) { 642 + testCounter++; 643 + writeFileSync(iconFile, originalContent + `\n/* test-${testCounter}-${suffix} */`); 644 + } 645 + 646 + // Use setupIncrementalState to establish tracking 647 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog"); 648 + expect(before.blog).not.toBeNull(); 649 + 650 + // Clear logs for the actual test 651 + devServer.clearLogs(); 652 + 653 + // Rename icons -> icons-renamed 654 + renameSync(iconsFolder, renamedFolder); 655 + 656 + // Wait for the build to be attempted (it will fail because path is now invalid) 657 + const startTime = Date.now(); 658 + const timeoutMs = 15000; 659 + let logs: string[] = []; 660 + 661 + while (Date.now() - startTime < timeoutMs) { 662 + logs = devServer.getLogs(100); 663 + const logsText = logs.join("\n"); 664 + 665 + // Wait for either success or failure indication 666 + if (logsText.includes("finished") || logsText.includes("failed") || logsText.includes("error")) { 667 + break; 668 + } 669 + 670 + await new Promise((resolve) => setTimeout(resolve, 100)); 671 + } 672 + 673 + logs = devServer.getLogs(100); 674 + const logsText = logs.join("\n"); 675 + 676 + // Key assertions: verify the detection and route matching worked 677 + // 1. The folder paths should be in changed files 678 + expect(logsText).toContain("icons"); 679 + 680 + // 2. The blog route should be identified as affected 681 + expect(logsText).toContain("Rebuilding 1 affected routes"); 682 + expect(logsText).toContain("/blog"); 683 + 684 + // 3. Other routes should NOT be affected (index and about don't use icons/) 685 + expect(logsText).not.toContain("/about"); 686 + 687 + } finally { 688 + // Restore: rename icons-renamed back to icons 689 + if (existsSync(renamedFolder) && !existsSync(iconsFolder)) { 690 + renameSync(renamedFolder, iconsFolder); 691 + } 692 + // Restore original content and wait for build 693 + if (existsSync(iconFile)) { 694 + const beforeRestore = getBuildId(htmlPaths.blog); 695 + writeFileSync(iconFile, originalContent); 696 + try { 697 + await waitForBuildIdChange(htmlPaths.blog, beforeRestore, 30000); 698 + } catch { 699 + // Restoration build may not always complete, that's ok 700 + } 701 + } 702 + } 703 + }); 704 + 705 + // ============================================================ 706 + // TEST 11: Shared Rust module change triggers full rebuild 707 + // ============================================================ 708 + test("shared Rust module change triggers full rebuild", async ({ devServer }) => { 709 + // This test verifies that changing a shared Rust module (not a route file) 710 + // triggers a full rebuild of all routes. 711 + // 712 + // Setup: helpers.rs contains shared functions used by about.rs 713 + // The helpers.rs file is not tracked in source_to_routes (only route files are) 714 + // so it's treated as an "untracked file" which triggers a full rebuild. 715 + // 716 + // This is the correct safe behavior - we can't determine which routes 717 + // depend on the shared module, so we rebuild everything. 718 + 719 + const helpersRs = resolve(fixturePath, "src", "pages", "helpers.rs"); 720 + const originalContent = readFileSync(helpersRs, "utf-8"); 721 + const rsTimeout = 60000; 722 + 723 + try { 724 + let testCounter = 0; 725 + 726 + function modifyFile(suffix: string) { 727 + testCounter++; 728 + writeFileSync(helpersRs, originalContent + `\n// test-${testCounter}-${suffix}`); 729 + } 730 + 731 + // First change: triggers recompile + full build (establishes build state) 732 + const beforeInit = getBuildId(htmlPaths.index); 733 + await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout); 734 + await waitForBuildIdChange(htmlPaths.index, beforeInit, rsTimeout); 735 + 736 + // Record build IDs before the final change 737 + const before = recordBuildIds(htmlPaths); 738 + expect(before.index).not.toBeNull(); 739 + expect(before.about).not.toBeNull(); 740 + expect(before.blog).not.toBeNull(); 741 + 742 + // Trigger the shared module change 743 + await triggerAndWaitForBuild(devServer, () => modifyFile("final"), rsTimeout); 744 + await waitForBuildIdChange(htmlPaths.index, before.index, rsTimeout); 745 + 746 + // All routes should be rebuilt (full rebuild due to untracked shared module) 747 + const after = recordBuildIds(htmlPaths); 748 + expect(after.index).not.toBe(before.index); 749 + expect(after.about).not.toBe(before.about); 750 + expect(after.blog).not.toBe(before.blog); 751 + 752 + } finally { 753 + // Restore original content and wait for build to complete 754 + const beforeRestore = getBuildId(htmlPaths.index); 755 + writeFileSync(helpersRs, originalContent); 756 + try { 757 + await waitForBuildIdChange(htmlPaths.index, beforeRestore, 60000); 758 + } catch { 759 + // Restoration build may not always complete, that's ok 760 + } 761 + } 762 + }); 763 + 764 + // ============================================================ 765 + // TEST 12: Content file change rebuilds only routes accessing that specific file 766 + // ============================================================ 767 + test("content file change rebuilds only routes accessing that file (granular tracking)", async ({ devServer }) => { 768 + // This test verifies granular content file tracking. 769 + // 770 + // Setup: 771 + // - /articles/first-post uses get_entry("first-post") โ†’ tracks only first-post.md 772 + // - /articles/second-post uses get_entry("second-post") โ†’ tracks only second-post.md 773 + // - /articles (list) uses entries() โ†’ tracks ALL content files 774 + // 775 + // When we change first-post.md: 776 + // - /articles/first-post should be rebuilt (directly uses this file) 777 + // - /articles should be rebuilt (uses entries() which tracks all files) 778 + // - /articles/second-post should NOT be rebuilt (uses different file) 779 + // - /articles/third-post should NOT be rebuilt (uses different file) 780 + // - Other routes (/, /about, /blog) should NOT be rebuilt 781 + 782 + let testCounter = 0; 783 + 784 + function modifyFile(suffix: string) { 785 + testCounter++; 786 + const newContent = (originals.firstPost as string).replace( 787 + "first post", 788 + `first post - test-${testCounter}-${suffix}` 789 + ); 790 + writeFileSync(contentFiles.firstPost, newContent); 791 + } 792 + 793 + // Setup: establish incremental state 794 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleFirst"); 795 + expect(before.articleFirst).not.toBeNull(); 796 + expect(before.articleSecond).not.toBeNull(); 797 + expect(before.articles).not.toBeNull(); 798 + 799 + // Trigger the final change 800 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 801 + await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst); 802 + 803 + // Verify incremental build occurred 804 + expect(isIncrementalBuild(logs)).toBe(true); 805 + 806 + // Check which routes were rebuilt 807 + const after = recordBuildIds(htmlPaths); 808 + 809 + // Routes that should NOT be rebuilt (don't access first-post.md) 810 + expect(after.index).toBe(before.index); 811 + expect(after.about).toBe(before.about); 812 + expect(after.blog).toBe(before.blog); 813 + expect(after.articleSecond).toBe(before.articleSecond); 814 + expect(after.articleThird).toBe(before.articleThird); 815 + 816 + // Routes that SHOULD be rebuilt (access first-post.md) 817 + expect(after.articleFirst).not.toBe(before.articleFirst); 818 + expect(after.articles).not.toBe(before.articles); // Uses entries() which tracks all files 819 + }); 820 + 821 + // ============================================================ 822 + // TEST 13: Different content file changes rebuild different routes 823 + // ============================================================ 824 + test("different content files trigger rebuilds of different routes", async ({ devServer }) => { 825 + // This test verifies that changing different content files rebuilds 826 + // different sets of routes, proving granular tracking works. 827 + // 828 + // Change second-post.md: 829 + // - /articles/second-post should be rebuilt 830 + // - /articles (list) should be rebuilt (entries() tracks all) 831 + // - /articles/first-post and /articles/third-post should NOT be rebuilt 832 + 833 + let testCounter = 0; 834 + 835 + function modifyFile(suffix: string) { 836 + testCounter++; 837 + const newContent = (originals.secondPost as string).replace( 838 + "second post", 839 + `second post - test-${testCounter}-${suffix}` 840 + ); 841 + writeFileSync(contentFiles.secondPost, newContent); 842 + } 843 + 844 + // Setup: establish incremental state 845 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleSecond"); 846 + expect(before.articleFirst).not.toBeNull(); 847 + expect(before.articleSecond).not.toBeNull(); 848 + expect(before.articleThird).not.toBeNull(); 849 + expect(before.articles).not.toBeNull(); 850 + 851 + // Trigger the final change 852 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 853 + await waitForBuildIdChange(htmlPaths.articleSecond, before.articleSecond); 854 + 855 + // Verify incremental build occurred 856 + expect(isIncrementalBuild(logs)).toBe(true); 857 + 858 + // Check which routes were rebuilt 859 + const after = recordBuildIds(htmlPaths); 860 + 861 + // Routes that should NOT be rebuilt 862 + expect(after.index).toBe(before.index); 863 + expect(after.about).toBe(before.about); 864 + expect(after.blog).toBe(before.blog); 865 + expect(after.articleFirst).toBe(before.articleFirst); 866 + expect(after.articleThird).toBe(before.articleThird); 867 + 868 + // Routes that SHOULD be rebuilt 869 + expect(after.articleSecond).not.toBe(before.articleSecond); 870 + expect(after.articles).not.toBe(before.articles); 871 + }); 872 + 873 + // ============================================================ 874 + // TEST 14: Multiple content files changed rebuilds union of affected routes 875 + // ============================================================ 876 + test("multiple content file changes rebuild union of affected routes", async ({ devServer }) => { 877 + // This test verifies that changing multiple content files correctly 878 + // rebuilds the union of all routes that access any of the changed files. 879 + // 880 + // Change both first-post.md and third-post.md simultaneously: 881 + // - /articles/first-post should be rebuilt 882 + // - /articles/third-post should be rebuilt 883 + // - /articles (list) should be rebuilt 884 + // - /articles/second-post should NOT be rebuilt 885 + 886 + let testCounter = 0; 887 + 888 + function modifyFile(suffix: string) { 889 + testCounter++; 890 + // Change both first and third posts 891 + const newFirst = (originals.firstPost as string).replace( 892 + "first post", 893 + `first post - multi-${testCounter}-${suffix}` 894 + ); 895 + const newThird = (originals.thirdPost as string).replace( 896 + "third post", 897 + `third post - multi-${testCounter}-${suffix}` 898 + ); 899 + writeFileSync(contentFiles.firstPost, newFirst); 900 + writeFileSync(contentFiles.thirdPost, newThird); 901 + } 902 + 903 + // Setup: establish incremental state 904 + const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleFirst"); 905 + expect(before.articleFirst).not.toBeNull(); 906 + expect(before.articleSecond).not.toBeNull(); 907 + expect(before.articleThird).not.toBeNull(); 908 + expect(before.articles).not.toBeNull(); 909 + 910 + // Trigger the final change 911 + const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final")); 912 + await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst); 913 + 914 + // Verify incremental build occurred 915 + expect(isIncrementalBuild(logs)).toBe(true); 916 + 917 + // Check which routes were rebuilt 918 + const after = recordBuildIds(htmlPaths); 919 + 920 + // Routes that should NOT be rebuilt 921 + expect(after.index).toBe(before.index); 922 + expect(after.about).toBe(before.about); 923 + expect(after.blog).toBe(before.blog); 924 + expect(after.articleSecond).toBe(before.articleSecond); 925 + 926 + // Routes that SHOULD be rebuilt 927 + expect(after.articleFirst).not.toBe(before.articleFirst); 928 + expect(after.articleThird).not.toBe(before.articleThird); 929 + expect(after.articles).not.toBe(before.articles); 930 + }); 931 + 932 + // ============================================================ 933 + // TEST 15: Full rebuild from untracked file properly initializes content sources 934 + // ============================================================ 935 + test("full rebuild from untracked file properly initializes content sources", async ({ devServer }) => { 936 + // This test verifies that when an untracked Rust file (like helpers.rs) changes, 937 + // triggering a full rebuild (routes_to_rebuild = None), content sources are 938 + // still properly initialized. 939 + // 940 + // This was a bug where the code checked `is_incremental` instead of 941 + // `routes_to_rebuild.is_some()`, causing content sources to not be initialized 942 + // during full rebuilds triggered by untracked file changes. 943 + // 944 + // Setup: 945 + // - helpers.rs is a shared module not tracked in source_to_routes 946 + // - Changing it triggers routes_to_rebuild = None (full rebuild) 947 + // - Routes like /articles/* use content from the "articles" content source 948 + // - If content sources aren't initialized, the build would crash 949 + // 950 + // This test: 951 + // 1. First modifies a content file to ensure specific content exists 952 + // 2. Then modifies helpers.rs to trigger a full rebuild 953 + // 3. Verifies the content-using routes are properly built with correct content 954 + 955 + const helpersRs = resolve(fixturePath, "src", "pages", "helpers.rs"); 956 + const originalHelpersRs = readFileSync(helpersRs, "utf-8"); 957 + const rsTimeout = 60000; 958 + 959 + try { 960 + // Step 1: Modify content file to set up specific content we can verify 961 + const testMarker = `CONTENT-INIT-TEST-${Date.now()}`; 962 + const newContent = (originals.firstPost as string).replace( 963 + "first post", 964 + `first post - ${testMarker}` 965 + ); 966 + writeFileSync(contentFiles.firstPost, newContent); 967 + 968 + // Wait for the content change to be processed 969 + const beforeContent = getBuildId(htmlPaths.articleFirst); 970 + await waitForBuildComplete(devServer, rsTimeout); 971 + await waitForBuildIdChange(htmlPaths.articleFirst, beforeContent, rsTimeout); 972 + 973 + // Verify the content was updated 974 + let articleHtml = readFileSync(htmlPaths.articleFirst, "utf-8"); 975 + expect(articleHtml).toContain(testMarker); 976 + 977 + // Record build IDs before the helpers.rs change 978 + const before = recordBuildIds(htmlPaths); 979 + expect(before.articleFirst).not.toBeNull(); 980 + expect(before.articles).not.toBeNull(); 981 + 982 + // Step 2: Modify helpers.rs to trigger full rebuild 983 + // This is an untracked file, so it triggers routes_to_rebuild = None 984 + devServer.clearLogs(); 985 + writeFileSync(helpersRs, originalHelpersRs + `\n// content-init-test-${Date.now()}`); 986 + 987 + await waitForBuildComplete(devServer, rsTimeout); 988 + await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst, rsTimeout); 989 + 990 + // Step 3: Verify the build succeeded and content is still correct 991 + // If content sources weren't initialized, this would fail or crash 992 + const after = recordBuildIds(htmlPaths); 993 + 994 + // All routes should be rebuilt (full rebuild) 995 + expect(after.index).not.toBe(before.index); 996 + expect(after.about).not.toBe(before.about); 997 + expect(after.blog).not.toBe(before.blog); 998 + expect(after.articleFirst).not.toBe(before.articleFirst); 999 + expect(after.articles).not.toBe(before.articles); 1000 + 1001 + // Most importantly: verify the content-using routes have correct content 1002 + // This proves content sources were properly initialized during the full rebuild 1003 + articleHtml = readFileSync(htmlPaths.articleFirst, "utf-8"); 1004 + expect(articleHtml).toContain(testMarker); 1005 + 1006 + // Also verify the articles list page works (uses entries()) 1007 + const articlesHtml = readFileSync(htmlPaths.articles, "utf-8"); 1008 + expect(articlesHtml).toContain("First Post"); 1009 + 1010 + } finally { 1011 + // Restore original content 1012 + writeFileSync(helpersRs, originalHelpersRs); 1013 + writeFileSync(contentFiles.firstPost, originals.firstPost as string); 1014 + 1015 + // Wait for restoration build 1016 + const beforeRestore = getBuildId(htmlPaths.articleFirst); 1017 + try { 1018 + await waitForBuildIdChange(htmlPaths.articleFirst, beforeRestore, 60000); 1019 + } catch { 1020 + // Restoration build may not always complete, that's ok 1021 + } 1022 + } 1023 + }); 1024 + });
+76 -6
e2e/tests/test-utils.ts
··· 23 23 port: number; 24 24 /** Stop the dev server */ 25 25 stop: () => Promise<void>; 26 + /** Get recent log output (last N lines) */ 27 + getLogs: (lines?: number) => string[]; 28 + /** Clear captured logs */ 29 + clearLogs: () => void; 26 30 } 27 31 28 32 /** ··· 52 56 const childProcess = spawn(command, args, { 53 57 cwd: fixturePath, 54 58 stdio: ["ignore", "pipe", "pipe"], 59 + env: { 60 + ...process.env, 61 + // Show binary output for tests so we can verify incremental build logs 62 + MAUDIT_SHOW_BINARY_OUTPUT: "1", 63 + }, 55 64 }); 56 65 57 66 // Capture output to detect when server is ready 58 67 let serverReady = false; 68 + const capturedLogs: string[] = []; 59 69 60 70 const outputPromise = new Promise<number>((resolve, reject) => { 61 71 const timeout = setTimeout(() => { 62 - reject(new Error("Dev server did not start within 30 seconds")); 63 - }, 30000); 72 + console.error("[test-utils] Dev server startup timeout. Recent logs:"); 73 + console.error(capturedLogs.slice(-20).join("\n")); 74 + reject(new Error("Dev server did not start within 120 seconds")); 75 + }, 120000); // Increased to 120 seconds for CI 64 76 65 77 childProcess.stdout?.on("data", (data: Buffer) => { 66 78 const output = data.toString(); 79 + // Capture all stdout logs 80 + output 81 + .split("\n") 82 + .filter((line) => line.trim()) 83 + .forEach((line) => { 84 + capturedLogs.push(line); 85 + }); 67 86 68 87 // Look for "waiting for requests" to know server is ready 69 88 if (output.includes("waiting for requests")) { ··· 75 94 }); 76 95 77 96 childProcess.stderr?.on("data", (data: Buffer) => { 78 - // Only log errors, not all stderr output 79 97 const output = data.toString(); 98 + // Capture all stderr logs 99 + output 100 + .split("\n") 101 + .filter((line) => line.trim()) 102 + .forEach((line) => { 103 + capturedLogs.push(line); 104 + }); 105 + 106 + // Only log errors to console, not all stderr output 80 107 if (output.toLowerCase().includes("error")) { 81 108 console.error(`[maudit dev] ${output}`); 82 109 } ··· 113 140 }, 5000); 114 141 }); 115 142 }, 143 + getLogs: (lines?: number) => { 144 + if (lines) { 145 + return capturedLogs.slice(-lines); 146 + } 147 + return [...capturedLogs]; 148 + }, 149 + clearLogs: () => { 150 + capturedLogs.length = 0; 151 + }, 116 152 }; 117 153 } 118 154 ··· 138 174 // Worker-scoped server pool - one server per worker, shared across all tests in that worker 139 175 // Key format: "workerIndex-fixtureName" 140 176 const workerServers = new Map<string, DevServer>(); 177 + 178 + // Track used ports to avoid collisions 179 + const usedPorts = new Set<number>(); 180 + 181 + /** 182 + * Generate a deterministic port offset based on fixture name. 183 + * This ensures each fixture gets a unique port range, avoiding collisions 184 + * when multiple fixtures run on the same worker. 185 + */ 186 + function getFixturePortOffset(fixtureName: string): number { 187 + // Simple hash function to get a number from the fixture name 188 + let hash = 0; 189 + for (let i = 0; i < fixtureName.length; i++) { 190 + const char = fixtureName.charCodeAt(i); 191 + hash = (hash << 5) - hash + char; 192 + hash = hash & hash; // Convert to 32bit integer 193 + } 194 + // Use modulo to keep the offset reasonable (0-99) 195 + return Math.abs(hash) % 100; 196 + } 197 + 198 + /** 199 + * Find an available port starting from the preferred port. 200 + */ 201 + function findAvailablePort(preferredPort: number): number { 202 + let port = preferredPort; 203 + while (usedPorts.has(port)) { 204 + port++; 205 + } 206 + usedPorts.add(port); 207 + return port; 208 + } 141 209 142 210 /** 143 211 * Create a test instance with a devServer fixture for a specific fixture. 144 212 * This allows each test file to use a different fixture while sharing the same pattern. 145 213 * 146 214 * @param fixtureName - Name of the fixture directory under e2e/fixtures/ 147 - * @param basePort - Starting port number (default: 1864). Each worker gets basePort + workerIndex 215 + * @param basePort - Starting port number (default: 1864). Each fixture gets a unique port based on its name. 148 216 * 149 217 * @example 150 218 * ```ts ··· 167 235 let server = workerServers.get(serverKey); 168 236 169 237 if (!server) { 170 - // Assign unique port based on worker index 171 - const port = basePort + workerIndex; 238 + // Calculate port based on fixture name hash + worker index to avoid collisions 239 + const fixtureOffset = getFixturePortOffset(fixtureName); 240 + const preferredPort = basePort + workerIndex * 100 + fixtureOffset; 241 + const port = findAvailablePort(preferredPort); 172 242 173 243 server = await startDevServer({ 174 244 fixture: fixtureName,
+2 -2
examples/blog/src/routes/index.rs
··· 4 4 use crate::{ 5 5 content::ArticleContent, 6 6 layout::layout, 7 - routes::{Article, article::ArticleParams}, 7 + routes::{article::ArticleParams, Article}, 8 8 }; 9 9 10 10 #[route("/")] ··· 16 16 17 17 let markup = html! { 18 18 ul { 19 - @for entry in &articles.entries { 19 + @for entry in articles.entries() { 20 20 li { 21 21 a href=(&Article.url(ArticleParams { article: entry.id.clone() })) { 22 22 h2 { (entry.data(ctx).title) }
+2 -2
examples/library/src/routes/index.rs
··· 4 4 use crate::{ 5 5 content::ArticleContent, 6 6 layout::layout, 7 - routes::{Article, article::ArticleParams}, 7 + routes::{article::ArticleParams, Article}, 8 8 }; 9 9 10 10 #[route("/")] ··· 18 18 let markup = html! { 19 19 (logo.render("Maudit logo, a crudely drawn crown")) 20 20 ul { 21 - @for entry in &articles.entries { 21 + @for entry in articles.entries() { 22 22 li { 23 23 a href=(&Article.url(ArticleParams { article: entry.id.clone() })) { 24 24 h2 { (entry.data(ctx).title) }
+1 -1
examples/oubli-basics/src/routes/index.rs
··· 16 16 Ok(layout(html! { 17 17 (logo.render("Maudit logo, a crudely drawn crown")) 18 18 h1 { "Hello World" } 19 - @for archetype in &archetype_store.entries { 19 + @for archetype in archetype_store.entries() { 20 20 a href=(archetype.id) { (archetype.data(ctx).title) } 21 21 } 22 22 }))
+1 -1
website/content/docs/prefetching.md
··· 49 49 50 50 Note that prerendering, unlike prefetching, may require rethinking how the JavaScript on your pages works, as it'll run JavaScript from pages that the user hasn't visited yet. For example, this might result in analytics reporting incorrect page views. 51 51 52 - ## Possible risks 52 + ## Possible risks 53 53 54 54 Prefetching pages in static websites is typically always safe. In more traditional apps, an issue can arise if your pages cause side effects to happen on the server. For instance, if you were to prefetch `/logout`, your user might get disconnected on hover, or worse as soon as the log out link appear in the viewport. In modern times, it is typically not recommended to have links cause such side effects anyway, reducing the risk of this happening. 55 55
+1 -1
website/content/news/2026-in-the-cursed-lands.md
··· 70 70 71 71 ### Shortcodes 72 72 73 - Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, you will. 73 + Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, if you will. 74 74 75 75 ```md 76 76 Here's my cool video:
+2 -2
website/src/layout/docs_sidebars.rs
··· 1 - use maud::{Markup, html}; 1 + use maud::{html, Markup}; 2 2 use maudit::{ 3 3 content::MarkdownHeading, 4 4 route::{PageContext, RouteExt}, ··· 14 14 15 15 let mut sections = std::collections::HashMap::new(); 16 16 17 - for entry in content.entries.iter() { 17 + for entry in content.entries() { 18 18 if let Some(section) = &entry.data(ctx).section { 19 19 sections.entry(section).or_insert_with(Vec::new).push(entry); 20 20 }
+3 -3
website/src/routes/news.rs
··· 1 1 use chrono::Datelike; 2 - use maud::PreEscaped; 3 2 use maud::html; 3 + use maud::PreEscaped; 4 4 use maudit::route::prelude::*; 5 5 use std::collections::BTreeMap; 6 6 7 7 use crate::content::NewsContent; 8 - use crate::layout::SeoMeta; 9 8 use crate::layout::layout; 9 + use crate::layout::SeoMeta; 10 10 11 11 #[route("/news/")] 12 12 pub struct NewsIndex; ··· 18 18 // Group articles by year 19 19 let mut articles_by_year: BTreeMap<String, Vec<_>> = BTreeMap::new(); 20 20 21 - for article in &content.entries { 21 + for article in content.entries() { 22 22 let year = article.data(ctx).date.year().to_string(); 23 23 articles_by_year 24 24 .entry(year)