···154154 /// Get a placeholder for the image, which can be used for low-quality image placeholders (LQIP) or similar techniques.
155155 ///
156156 /// This uses the [ThumbHash](https://evanw.github.io/thumbhash/) algorithm to generate a very small placeholder image.
157157- pub fn placeholder(&self) -> ImagePlaceholder {
157157+ ///
158158+ /// Returns an error if the image cannot be loaded.
159159+ pub fn placeholder(&self) -> Result<ImagePlaceholder, crate::errors::AssetError> {
158160 get_placeholder(&self.path, self.cache.as_ref())
159161 }
160162···258260 }
259261}
260262261261-fn get_placeholder(path: &PathBuf, cache: Option<&ImageCache>) -> ImagePlaceholder {
263263+fn get_placeholder(
264264+ path: &PathBuf,
265265+ cache: Option<&ImageCache>,
266266+) -> Result<ImagePlaceholder, crate::errors::AssetError> {
262267 // Check cache first if provided
263268 if let Some(cache) = cache
264269 && let Some(cached) = cache.get_placeholder(path)
265270 {
266271 debug!("Using cached placeholder for {}", path.display());
267272 let thumbhash_base64 = base64::engine::general_purpose::STANDARD.encode(&cached.thumbhash);
268268- return ImagePlaceholder::new(cached.thumbhash, thumbhash_base64);
273273+ return Ok(ImagePlaceholder::new(cached.thumbhash, thumbhash_base64));
269274 }
270275271276 let total_start = Instant::now();
272277273278 let load_start = Instant::now();
274274- let image = image::open(path).ok().unwrap();
279279+ let image = image::open(path).map_err(|e| crate::errors::AssetError::ImageLoadFailed {
280280+ path: path.clone(),
281281+ source: e,
282282+ })?;
275283 let (width, height) = image.dimensions();
276284 let (width, height) = (width as usize, height as usize);
277285 debug!(
···329337 cache.cache_placeholder(path, thumb_hash.clone());
330338 }
331339332332- ImagePlaceholder::new(thumb_hash, thumbhash_base64)
340340+ Ok(ImagePlaceholder::new(thumb_hash, thumbhash_base64))
333341}
334342335343/// Port of https://github.com/evanw/thumbhash/blob/a652ce6ed691242f459f468f0a8756cda3b90a82/js/thumbhash.js#L234
···516524 ).into()
517525 }
518526}
527527+528528+#[cfg(test)]
529529+mod tests {
530530+ use crate::errors::AssetError;
531531+532532+ use super::*;
533533+ use std::{error::Error, path::PathBuf};
534534+535535+ #[test]
536536+ fn test_placeholder_with_missing_file() {
537537+ let nonexistent_path = PathBuf::from("/this/file/does/not/exist.png");
538538+539539+ let result = get_placeholder(&nonexistent_path, None);
540540+541541+ assert!(result.is_err());
542542+ if let Err(AssetError::ImageLoadFailed { path, .. }) = result {
543543+ assert_eq!(path, nonexistent_path);
544544+ } else {
545545+ panic!("Expected ImageLoadFailed error");
546546+ }
547547+ }
548548+549549+ #[test]
550550+ fn test_placeholder_with_valid_image() {
551551+ let temp_dir = tempfile::tempdir().unwrap();
552552+ let image_path = temp_dir.path().join("test.png");
553553+554554+ // Create a minimal valid 1x1 PNG file using the image crate to ensure correct CRCs
555555+ let img = image::ImageBuffer::<image::Rgba<u8>, _>::from_fn(1, 1, |_x, _y| {
556556+ image::Rgba([255, 0, 0, 255])
557557+ });
558558+ img.save(&image_path).unwrap();
559559+560560+ let result = get_placeholder(&image_path, None);
561561+562562+ if let Err(e) = &result {
563563+ eprintln!("get_placeholder failed: {:?}", e.source());
564564+ }
565565+566566+ assert!(result.is_ok());
567567+ let placeholder = result.unwrap();
568568+ assert!(!placeholder.thumbhash.is_empty());
569569+ assert!(!placeholder.thumbhash_base64.is_empty());
570570+ }
571571+}
+4-8
crates/maudit/src/assets/image_cache.rs
···338338339339 #[test]
340340 fn test_build_options_integration() {
341341- use crate::build::options::{AssetsOptions, BuildOptions};
341341+ use crate::build::options::BuildOptions;
342342343343 // Test that BuildOptions can configure the cache directory
344344 let custom_cache = PathBuf::from("/tmp/custom_maudit_cache");
345345 let build_options = BuildOptions {
346346- assets: AssetsOptions {
347347- image_cache_dir: custom_cache.clone(),
348348- ..Default::default()
349349- },
346346+ cache_dir: custom_cache.clone(),
350347 ..Default::default()
351348 };
352349353353- // Create cache with build options
354354- let cache = ImageCache::with_cache_dir(&build_options.assets.image_cache_dir);
350350+ let cache = ImageCache::with_cache_dir(build_options.assets_cache_dir());
355351356352 // Verify it uses the configured directory
357357- assert_eq!(cache.get_cache_dir(), custom_cache);
353353+ assert_eq!(cache.get_cache_dir(), custom_cache.join("assets"));
358354 }
359355360356 #[test]
+167-46
crates/maudit/src/assets.rs
···432432}
433433434434fn make_filename(path: &Path, hash: &String, extension: Option<&str>) -> PathBuf {
435435- let file_stem = path.file_stem().unwrap();
436436- let sanitized_stem = sanitize_filename::default_sanitize_file_name(file_stem.to_str().unwrap());
435435+ let file_stem = path.file_stem().and_then(|s| s.to_str()).unwrap_or("asset");
436436+437437+ let sanitized_stem = sanitize_filename::default_sanitize_file_name(file_stem);
437438438439 let mut filename = PathBuf::new();
439440 filename.push(format!("{}.{}", sanitized_stem, hash));
···532533533534#[cfg(test)]
534535mod tests {
535535- use super::*;
536536- use std::env;
536536+ use std::path::PathBuf;
537537+538538+ use crate::{
539539+ AssetHashingStrategy,
540540+ assets::{
541541+ Asset, ImageFormat, ImageOptions, RouteAssets, RouteAssetsOptions, StyleOptions,
542542+ make_filename,
543543+ },
544544+ };
537545538538- fn setup_temp_dir() -> PathBuf {
539539- // Create a temporary directory and test files
540540- let temp_dir = env::temp_dir().join("maudit_test");
541541- std::fs::create_dir_all(&temp_dir).unwrap();
546546+ fn setup_temp_dir() -> tempfile::TempDir {
547547+ let temp_dir = tempfile::tempdir().unwrap();
542548543543- std::fs::write(temp_dir.join("style.css"), "body { background: red; }").unwrap();
544544- std::fs::write(temp_dir.join("script.js"), "console.log('Hello, world!');").unwrap();
545545- std::fs::write(temp_dir.join("image.png"), b"").unwrap();
549549+ std::fs::write(
550550+ temp_dir.path().join("style.css"),
551551+ "body { background: red; }",
552552+ )
553553+ .unwrap();
554554+ std::fs::write(
555555+ temp_dir.path().join("script.js"),
556556+ "console.log('Hello, world!');",
557557+ )
558558+ .unwrap();
559559+ std::fs::write(temp_dir.path().join("image.png"), b"").unwrap();
546560 temp_dir
547561 }
548562···550564 fn test_add_style() {
551565 let temp_dir = setup_temp_dir();
552566 let mut page_assets = RouteAssets::default();
553553- page_assets.add_style(temp_dir.join("style.css")).unwrap();
567567+ page_assets
568568+ .add_style(temp_dir.path().join("style.css"))
569569+ .unwrap();
554570555571 assert!(page_assets.styles.len() == 1);
556572 }
···561577 let mut page_assets = RouteAssets::default();
562578563579 page_assets
564564- .include_style(temp_dir.join("style.css"))
580580+ .include_style(temp_dir.path().join("style.css"))
565581 .unwrap();
566582567583 assert!(page_assets.styles.len() == 1);
···573589 let temp_dir = setup_temp_dir();
574590 let mut page_assets = RouteAssets::default();
575591576576- page_assets.add_script(temp_dir.join("script.js")).unwrap();
592592+ page_assets
593593+ .add_script(temp_dir.path().join("script.js"))
594594+ .unwrap();
577595 assert!(page_assets.scripts.len() == 1);
578596 }
579597···583601 let mut page_assets = RouteAssets::default();
584602585603 page_assets
586586- .include_script(temp_dir.join("script.js"))
604604+ .include_script(temp_dir.path().join("script.js"))
587605 .unwrap();
588606589607 assert!(page_assets.scripts.len() == 1);
···595613 let temp_dir = setup_temp_dir();
596614 let mut page_assets = RouteAssets::default();
597615598598- page_assets.add_image(temp_dir.join("image.png")).unwrap();
616616+ page_assets
617617+ .add_image(temp_dir.path().join("image.png"))
618618+ .unwrap();
599619 assert!(page_assets.images.len() == 1);
600620 }
601621···604624 let temp_dir = setup_temp_dir();
605625 let mut page_assets = RouteAssets::default();
606626607607- let image = page_assets.add_image(temp_dir.join("image.png")).unwrap();
627627+ let image = page_assets
628628+ .add_image(temp_dir.path().join("image.png"))
629629+ .unwrap();
608630 assert_eq!(image.url().chars().next(), Some('/'));
609631610610- let script = page_assets.add_script(temp_dir.join("script.js")).unwrap();
632632+ let script = page_assets
633633+ .add_script(temp_dir.path().join("script.js"))
634634+ .unwrap();
611635 assert_eq!(script.url().chars().next(), Some('/'));
612636613613- let style = page_assets.add_style(temp_dir.join("style.css")).unwrap();
637637+ let style = page_assets
638638+ .add_style(temp_dir.path().join("style.css"))
639639+ .unwrap();
614640 assert_eq!(style.url().chars().next(), Some('/'));
615641 }
616642···619645 let temp_dir = setup_temp_dir();
620646 let mut page_assets = RouteAssets::default();
621647622622- let image = page_assets.add_image(temp_dir.join("image.png")).unwrap();
648648+ let image = page_assets
649649+ .add_image(temp_dir.path().join("image.png"))
650650+ .unwrap();
623651 assert!(image.url().contains(&image.hash));
624652625625- let script = page_assets.add_script(temp_dir.join("script.js")).unwrap();
653653+ let script = page_assets
654654+ .add_script(temp_dir.path().join("script.js"))
655655+ .unwrap();
626656 assert!(script.url().contains(&script.hash));
627657628628- let style = page_assets.add_style(temp_dir.join("style.css")).unwrap();
658658+ let style = page_assets
659659+ .add_style(temp_dir.path().join("style.css"))
660660+ .unwrap();
629661 assert!(style.url().contains(&style.hash));
630662 }
631663···634666 let temp_dir = setup_temp_dir();
635667 let mut page_assets = RouteAssets::default();
636668637637- let image = page_assets.add_image(temp_dir.join("image.png")).unwrap();
669669+ let image = page_assets
670670+ .add_image(temp_dir.path().join("image.png"))
671671+ .unwrap();
638672 assert!(image.build_path().to_string_lossy().contains(&image.hash));
639673640640- let script = page_assets.add_script(temp_dir.join("script.js")).unwrap();
674674+ let script = page_assets
675675+ .add_script(temp_dir.path().join("script.js"))
676676+ .unwrap();
641677 assert!(script.build_path().to_string_lossy().contains(&script.hash));
642678643643- let style = page_assets.add_style(temp_dir.join("style.css")).unwrap();
679679+ let style = page_assets
680680+ .add_style(temp_dir.path().join("style.css"))
681681+ .unwrap();
644682 assert!(style.build_path().to_string_lossy().contains(&style.hash));
645683 }
646684647685 #[test]
648686 fn test_image_hash_different_options() {
649687 let temp_dir = setup_temp_dir();
650650- let image_path = temp_dir.join("image.png");
688688+ let image_path = temp_dir.path().join("image.png");
651689652652- // Create a simple test PNG (1x1 transparent pixel)
653653- let png_data = [
654654- 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
655655- 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
656656- 0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0B, 0x49, 0x44, 0x41, 0x54, 0x78,
657657- 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00,
658658- 0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
659659- ];
660660- std::fs::write(&image_path, png_data).unwrap();
690690+ let img = image::ImageBuffer::<image::Rgba<u8>, _>::from_fn(1, 1, |_x, _y| {
691691+ image::Rgba([255, 0, 0, 255])
692692+ });
693693+ img.save(&image_path).unwrap();
661694662662- let mut page_assets = RouteAssets::default();
695695+ let mut page_assets = RouteAssets::new(
696696+ &RouteAssetsOptions {
697697+ hashing_strategy: AssetHashingStrategy::Precise,
698698+ ..Default::default()
699699+ },
700700+ None,
701701+ );
663702664703 // Test that different options produce different hashes
665704 let image_default = page_assets.add_image(&image_path).unwrap();
···716755 #[test]
717756 fn test_image_hash_same_options() {
718757 let temp_dir = setup_temp_dir();
719719- let image_path = temp_dir.join("image.png");
758758+ let image_path = temp_dir.path().join("image.png");
720759721760 // Create a simple test PNG (1x1 transparent pixel)
722761 let png_data = [
···728767 ];
729768 std::fs::write(&image_path, png_data).unwrap();
730769731731- let mut page_assets = RouteAssets::default();
770770+ let mut page_assets = RouteAssets::new(
771771+ &RouteAssetsOptions {
772772+ hashing_strategy: AssetHashingStrategy::Precise,
773773+ ..Default::default()
774774+ },
775775+ None,
776776+ );
732777733778 // Same options should produce same hash
734779 let image1 = page_assets
···762807 #[test]
763808 fn test_style_hash_different_options() {
764809 let temp_dir = setup_temp_dir();
765765- let style_path = temp_dir.join("style.css");
810810+ let style_path = temp_dir.path().join("style.css");
766811767767- let mut page_assets = RouteAssets::new(&RouteAssetsOptions::default(), None);
812812+ let mut page_assets = RouteAssets::new(
813813+ &RouteAssetsOptions {
814814+ hashing_strategy: AssetHashingStrategy::Precise,
815815+ ..Default::default()
816816+ },
817817+ None,
818818+ );
768819769820 // Test that different tailwind options produce different hashes
770821 let style_default = page_assets.add_style(&style_path).unwrap();
···784835785836 // Create two identical files with different paths
786837 let content = "body { background: blue; }";
787787- let style1_path = temp_dir.join("style1.css");
788788- let style2_path = temp_dir.join("style2.css");
838838+ let style1_path = temp_dir.path().join("style1.css");
839839+ let style2_path = temp_dir.path().join("style2.css");
789840790841 std::fs::write(&style1_path, content).unwrap();
791842 std::fs::write(&style2_path, content).unwrap();
792843793793- let mut page_assets = RouteAssets::new(&RouteAssetsOptions::default(), None);
844844+ let mut page_assets = RouteAssets::new(
845845+ &RouteAssetsOptions {
846846+ hashing_strategy: AssetHashingStrategy::Precise,
847847+ ..Default::default()
848848+ },
849849+ None,
850850+ );
794851795852 let style1 = page_assets.add_style(&style1_path).unwrap();
796853 let style2 = page_assets.add_style(&style2_path).unwrap();
···804861 #[test]
805862 fn test_hash_includes_content() {
806863 let temp_dir = setup_temp_dir();
807807- let style_path = temp_dir.join("dynamic_style.css");
864864+ let style_path = temp_dir.path().join("dynamic_style.css");
808865809809- let assets_options = RouteAssetsOptions::default();
810810- let mut page_assets = RouteAssets::new(&assets_options, None);
866866+ let mut page_assets = RouteAssets::new(
867867+ &RouteAssetsOptions {
868868+ hashing_strategy: AssetHashingStrategy::Precise,
869869+ ..Default::default()
870870+ },
871871+ None,
872872+ );
811873812874 // Write first content and get hash
813875 std::fs::write(&style_path, "body { background: red; }").unwrap();
···823885 hash1, hash2,
824886 "Different content should produce different hashes"
825887 );
888888+ }
889889+890890+ #[test]
891891+ fn test_make_filename_normal_path() {
892892+ let path = PathBuf::from("/foo/bar/test.png");
893893+ let hash = "abc12".to_string();
894894+895895+ let filename = make_filename(&path, &hash, Some("png"));
896896+897897+ // Format is: stem.hash with extension hash.ext
898898+ assert_eq!(filename.to_string_lossy(), "test.abc12.png");
899899+ }
900900+901901+ #[test]
902902+ fn test_make_filename_no_extension() {
903903+ let path = PathBuf::from("/foo/bar/test");
904904+ let hash = "abc12".to_string();
905905+906906+ let filename = make_filename(&path, &hash, None);
907907+908908+ assert_eq!(filename.to_string_lossy(), "test.abc12");
909909+ }
910910+911911+ #[test]
912912+ fn test_make_filename_fallback_for_root_path() {
913913+ // Root path has no file stem
914914+ let path = PathBuf::from("/");
915915+ let hash = "abc12".to_string();
916916+917917+ let filename = make_filename(&path, &hash, Some("css"));
918918+919919+ // Should fallback to "asset"
920920+ assert_eq!(filename.to_string_lossy(), "asset.abc12.css");
921921+ }
922922+923923+ #[test]
924924+ fn test_make_filename_fallback_for_dotdot_path() {
925925+ // Path ending with ".." has no file stem
926926+ let path = PathBuf::from("/foo/..");
927927+ let hash = "xyz99".to_string();
928928+929929+ let filename = make_filename(&path, &hash, Some("js"));
930930+931931+ // Should fallback to "asset"
932932+ assert_eq!(filename.to_string_lossy(), "asset.xyz99.js");
933933+ }
934934+935935+ #[test]
936936+ fn test_make_filename_with_special_characters() {
937937+ // Test that special characters get sanitized
938938+ let path = PathBuf::from("/foo/test:file*.txt");
939939+ let hash = "def45".to_string();
940940+941941+ let filename = make_filename(&path, &hash, Some("txt"));
942942+943943+ // Special characters should be replaced with underscores
944944+ let result = filename.to_string_lossy();
945945+ assert!(result.contains("test_file_"));
946946+ assert!(result.ends_with(".def45.txt"));
826947 }
827948}
+104-13
crates/maudit/src/build/options.rs
···11-use std::{env, path::PathBuf};
11+use std::{fs, path::PathBuf};
2233use crate::{assets::RouteAssetsOptions, is_dev, sitemap::SitemapOptions};
44···3636/// assets: AssetsOptions {
3737/// assets_dir: "_assets".into(),
3838/// tailwind_binary_path: "./node_modules/.bin/tailwindcss".into(),
3939-/// image_cache_dir: ".cache/maudit/images".into(),
4039/// ..Default::default()
4140/// },
4241/// prefetch: PrefetchOptions {
···6160 /// At the speed Maudit operates at, not cleaning the output directory may offer a significant performance improvement at the cost of potentially serving stale content.
6261 pub clean_output_dir: bool,
63626363+ /// Whether to enable incremental builds.
6464+ ///
6565+ /// When enabled, Maudit tracks which assets are used by which routes and only rebuilds
6666+ /// routes affected by changed files. This can significantly speed up rebuilds when only
6767+ /// a few files have changed.
6868+ ///
6969+ /// Defaults to `true` in dev mode (`maudit dev`) and `false` in production builds.
7070+ pub incremental: bool,
7171+7272+ /// Directory for build cache storage (incremental build state, etc.).
7373+ ///
7474+ /// Defaults to `target/maudit_cache/{package_name}` where `{package_name}` is derived
7575+ /// from the current directory name.
7676+ pub cache_dir: PathBuf,
7777+7878+ /// Directory for caching processed assets (images, etc.).
7979+ ///
8080+ /// If `None`, defaults to `{cache_dir}/assets`.
8181+ pub assets_cache_dir: Option<PathBuf>,
8282+6483 pub assets: AssetsOptions,
65846685 pub prefetch: PrefetchOptions,
···124143 hashing_strategy: self.assets.hashing_strategy,
125144 }
126145 }
146146+147147+ /// Returns the directory for caching processed assets (images, etc.).
148148+ /// Uses `assets_cache_dir` if set, otherwise defaults to `{cache_dir}/assets`.
149149+ pub fn assets_cache_dir(&self) -> PathBuf {
150150+ self.assets_cache_dir
151151+ .clone()
152152+ .unwrap_or_else(|| self.cache_dir.join("assets"))
153153+ }
127154}
128155129156#[derive(Clone)]
···139166 /// Note that this value is not automatically joined with the `output_dir` in `BuildOptions`. Use [`BuildOptions::route_assets_options()`] to get a `RouteAssetsOptions` with the correct final path.
140167 pub assets_dir: PathBuf,
141168142142- /// Directory to use for image cache storage.
143143- /// Defaults to `target/maudit_cache/images`.
144144- ///
145145- /// This cache is used to store processed images and their placeholders to speed up subsequent builds.
146146- pub image_cache_dir: PathBuf,
147147-148169 /// Strategy to use when hashing assets for fingerprinting.
149170 ///
150171 /// Defaults to [`AssetHashingStrategy::Precise`] in production builds, and [`AssetHashingStrategy::FastImprecise`] in development builds. Note that this means that the cache isn't shared between dev and prod builds by default, if you have a lot of assets you may want to set this to the same value in both environments.
···164185 Self {
165186 tailwind_binary_path: "tailwindcss".into(),
166187 assets_dir: "_maudit".into(),
167167- image_cache_dir: {
168168- let target_dir =
169169- env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string());
170170- PathBuf::from(target_dir).join("maudit_cache/images")
171171- },
172188 hashing_strategy: if is_dev() {
173189 AssetHashingStrategy::FastImprecise
174190 } else {
···196212/// ```
197213impl Default for BuildOptions {
198214 fn default() -> Self {
215215+ let site_name = get_site_name();
216216+ let cache_dir = find_target_dir()
217217+ .unwrap_or_else(|_| PathBuf::from("target"))
218218+ .join("maudit_cache")
219219+ .join(&site_name);
220220+199221 Self {
200222 base_url: None,
201223 output_dir: "dist".into(),
202224 static_dir: "static".into(),
203225 clean_output_dir: true,
226226+ incremental: is_dev(),
227227+ cache_dir,
228228+ assets_cache_dir: None,
204229 prefetch: PrefetchOptions::default(),
205230 assets: AssetsOptions::default(),
206231 sitemap: SitemapOptions::default(),
207232 }
208233 }
209234}
235235+236236+/// Get the site name for cache directory purposes.
237237+///
238238+/// Uses the current executable's name (which matches the package/binary name),
239239+/// falling back to the current directory name.
240240+fn get_site_name() -> String {
241241+ // Get the binary name from the current executable
242242+ std::env::current_exe()
243243+ .ok()
244244+ .and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string()))
245245+ .unwrap_or_else(|| {
246246+ // Fallback to current directory name
247247+ std::env::current_dir()
248248+ .ok()
249249+ .and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string()))
250250+ .unwrap_or_else(|| "default".to_string())
251251+ })
252252+}
253253+254254+/// Find the target directory using multiple strategies
255255+///
256256+/// This function tries multiple approaches to locate the target directory:
257257+/// 1. CARGO_TARGET_DIR / CARGO_BUILD_TARGET_DIR environment variables
258258+/// 2. Local ./target directory
259259+/// 3. Workspace root target directory (walking up to find [workspace])
260260+/// 4. Fallback to relative "target" path
261261+fn find_target_dir() -> Result<PathBuf, std::io::Error> {
262262+ // 1. Check CARGO_TARGET_DIR and CARGO_BUILD_TARGET_DIR environment variables
263263+ for env_var in ["CARGO_TARGET_DIR", "CARGO_BUILD_TARGET_DIR"] {
264264+ if let Ok(target_dir) = std::env::var(env_var) {
265265+ let path = PathBuf::from(&target_dir);
266266+ if path.exists() {
267267+ return Ok(path);
268268+ }
269269+ }
270270+ }
271271+272272+ // 2. Look for target directory in current directory
273273+ let local_target = PathBuf::from("target");
274274+ if local_target.exists() {
275275+ return Ok(local_target);
276276+ }
277277+278278+ // 3. Try to find workspace root by looking for Cargo.toml with [workspace]
279279+ let mut current = std::env::current_dir()?;
280280+ loop {
281281+ let cargo_toml = current.join("Cargo.toml");
282282+ if cargo_toml.exists()
283283+ && let Ok(content) = fs::read_to_string(&cargo_toml)
284284+ && content.contains("[workspace]")
285285+ {
286286+ let workspace_target = current.join("target");
287287+ if workspace_target.exists() {
288288+ return Ok(workspace_target);
289289+ }
290290+ }
291291+292292+ // Move up to parent directory
293293+ if !current.pop() {
294294+ break;
295295+ }
296296+ }
297297+298298+ // 4. Final fallback to relative path
299299+ Ok(PathBuf::from("target"))
300300+}
+1137
crates/maudit/src/build/state.rs
···11+use rustc_hash::{FxHashMap, FxHashSet};
22+use serde::{Deserialize, Serialize};
33+use std::fs;
44+use std::path::{Path, PathBuf};
55+66+/// Identifies a specific route or variant for incremental rebuilds
77+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
88+pub enum RouteIdentifier {
99+ /// A base route with optional page parameters
1010+ /// Params are stored as a sorted Vec for hashing purposes
1111+ Base {
1212+ route_path: String,
1313+ params: Option<Vec<(String, Option<String>)>>,
1414+ },
1515+ /// A variant route with optional page parameters
1616+ /// Params are stored as a sorted Vec for hashing purposes
1717+ Variant {
1818+ variant_id: String,
1919+ variant_path: String,
2020+ params: Option<Vec<(String, Option<String>)>>,
2121+ },
2222+}
2323+2424+impl RouteIdentifier {
2525+ pub fn base(route_path: String, params: Option<FxHashMap<String, Option<String>>>) -> Self {
2626+ Self::Base {
2727+ route_path,
2828+ params: params.map(|p| {
2929+ let mut sorted: Vec<_> = p.into_iter().collect();
3030+ sorted.sort_by(|a, b| a.0.cmp(&b.0));
3131+ sorted
3232+ }),
3333+ }
3434+ }
3535+3636+ pub fn variant(
3737+ variant_id: String,
3838+ variant_path: String,
3939+ params: Option<FxHashMap<String, Option<String>>>,
4040+ ) -> Self {
4141+ Self::Variant {
4242+ variant_id,
4343+ variant_path,
4444+ params: params.map(|p| {
4545+ let mut sorted: Vec<_> = p.into_iter().collect();
4646+ sorted.sort_by(|a, b| a.0.cmp(&b.0));
4747+ sorted
4848+ }),
4949+ }
5050+ }
5151+}
5252+5353+/// Tracks build state for incremental builds
5454+#[derive(Debug, Default, Serialize, Deserialize)]
5555+pub struct BuildState {
5656+ /// Maps asset paths to routes that use them
5757+ /// Key: canonicalized asset path
5858+ /// Value: set of routes using this asset
5959+ pub asset_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>,
6060+6161+ /// Maps source file paths to routes defined in them
6262+ /// Key: canonicalized source file path (e.g., src/pages/index.rs)
6363+ /// Value: set of routes defined in this source file
6464+ pub source_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>,
6565+6666+ /// Maps content file paths to routes that use them
6767+ /// Key: canonicalized content file path (e.g., content/articles/hello.md)
6868+ /// Value: set of routes using this specific content file
6969+ /// This provides granular tracking - if only hello.md changes, only routes
7070+ /// that accessed hello.md need to be rebuilt.
7171+ pub content_file_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>,
7272+7373+ /// Maps content file paths to the content source that owns them
7474+ /// Key: canonicalized content file path (e.g., content/articles/hello.md)
7575+ /// Value: content source name (e.g., "articles")
7676+ /// This allows selective re-initialization of only the content sources
7777+ /// whose files have changed.
7878+ pub content_file_to_source: FxHashMap<PathBuf, String>,
7979+8080+ /// Stores all bundler input paths from the last build
8181+ /// This needs to be preserved to ensure consistent bundling
8282+ pub bundler_inputs: Vec<String>,
8383+}
8484+8585+impl BuildState {
8686+ pub fn new() -> Self {
8787+ Self::default()
8888+ }
8989+9090+ /// Load build state from disk cache
9191+ pub fn load(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
9292+ let state_path = cache_dir.join("build_state.json");
9393+9494+ if !state_path.exists() {
9595+ return Ok(Self::new());
9696+ }
9797+9898+ let content = fs::read_to_string(&state_path)?;
9999+ let state: BuildState = serde_json::from_str(&content)?;
100100+ Ok(state)
101101+ }
102102+103103+ /// Save build state to disk cache
104104+ pub fn save(&self, cache_dir: &Path) -> Result<(), Box<dyn std::error::Error>> {
105105+ fs::create_dir_all(cache_dir)?;
106106+ let state_path = cache_dir.join("build_state.json");
107107+ let content = serde_json::to_string_pretty(self)?;
108108+ fs::write(state_path, content)?;
109109+ Ok(())
110110+ }
111111+112112+ /// Add an asset->route mapping
113113+ pub fn track_asset(&mut self, asset_path: PathBuf, route_id: RouteIdentifier) {
114114+ self.asset_to_routes
115115+ .entry(asset_path)
116116+ .or_default()
117117+ .insert(route_id);
118118+ }
119119+120120+ /// Add a source file->route mapping
121121+ /// This tracks which .rs file defines which routes for incremental rebuilds
122122+ pub fn track_source_file(&mut self, source_path: PathBuf, route_id: RouteIdentifier) {
123123+ self.source_to_routes
124124+ .entry(source_path)
125125+ .or_default()
126126+ .insert(route_id);
127127+ }
128128+129129+ /// Add a content file->route mapping
130130+ /// This tracks which specific content files are used by which routes for incremental rebuilds.
131131+ /// This provides granular tracking - only routes that actually accessed a specific file
132132+ /// will be rebuilt when that file changes.
133133+ ///
134134+ /// The file path is canonicalized before storage to ensure consistent lookups when
135135+ /// comparing against absolute paths from the file watcher.
136136+ pub fn track_content_file(&mut self, file_path: PathBuf, route_id: RouteIdentifier) {
137137+ // Canonicalize the path to ensure consistent matching with absolute paths from the watcher
138138+ let canonical_path = file_path.canonicalize().unwrap_or(file_path);
139139+ self.content_file_to_routes
140140+ .entry(canonical_path)
141141+ .or_default()
142142+ .insert(route_id);
143143+ }
144144+145145+ /// Add a content file->source mapping
146146+ /// This tracks which content source owns each file, allowing selective re-initialization
147147+ /// of only the sources whose files have changed.
148148+ ///
149149+ /// The file path is canonicalized before storage to ensure consistent lookups.
150150+ pub fn track_content_file_source(&mut self, file_path: PathBuf, source_name: String) {
151151+ let canonical_path = file_path.canonicalize().unwrap_or(file_path);
152152+ self.content_file_to_source
153153+ .insert(canonical_path, source_name);
154154+ }
155155+156156+ /// Get the names of content sources that have files in the changed files list.
157157+ /// Returns `None` if any changed content file is not tracked (new file), indicating
158158+ /// that all content sources should be re-initialized.
159159+ ///
160160+ /// Only considers files that look like content files (have common content extensions).
161161+ pub fn get_affected_content_sources(
162162+ &self,
163163+ changed_files: &[PathBuf],
164164+ ) -> Option<FxHashSet<String>> {
165165+ let content_extensions = ["md", "mdx", "yaml", "yml", "json", "toml"];
166166+ let mut affected_sources = FxHashSet::default();
167167+168168+ for changed_file in changed_files {
169169+ // Skip files that don't look like content files
170170+ let is_content_file = changed_file
171171+ .extension()
172172+ .and_then(|ext| ext.to_str())
173173+ .map(|ext| content_extensions.contains(&ext))
174174+ .unwrap_or(false);
175175+176176+ if !is_content_file {
177177+ continue;
178178+ }
179179+180180+ // Try to find the source for this file
181181+ let canonical = changed_file.canonicalize().ok();
182182+183183+ let source = canonical
184184+ .as_ref()
185185+ .and_then(|c| self.content_file_to_source.get(c))
186186+ .or_else(|| self.content_file_to_source.get(changed_file));
187187+188188+ match source {
189189+ Some(source_name) => {
190190+ affected_sources.insert(source_name.clone());
191191+ }
192192+ None => {
193193+ // Unknown content file - could be a new file
194194+ // Fall back to re-initializing all sources
195195+ return None;
196196+ }
197197+ }
198198+ }
199199+200200+ Some(affected_sources)
201201+ }
202202+203203+ /// Get all routes affected by changes to specific files.
204204+ ///
205205+ /// Returns `Some(routes)` if all changed files were found in the mappings,
206206+ /// or `None` if any changed file is untracked (meaning we need a full rebuild).
207207+ ///
208208+ /// This handles the case where files like those referenced by `include_str!()`
209209+ /// are not tracked at the route level - when these change, we fall back to
210210+ /// rebuilding all routes to ensure correctness.
211211+ ///
212212+ /// Note: Existing directories are not considered "untracked" - they are checked
213213+ /// via prefix matching, but a new/unknown directory won't trigger a full rebuild.
214214+ pub fn get_affected_routes(
215215+ &self,
216216+ changed_files: &[PathBuf],
217217+ ) -> Option<FxHashSet<RouteIdentifier>> {
218218+ let mut affected_routes = FxHashSet::default();
219219+ let mut has_untracked_file = false;
220220+221221+ for changed_file in changed_files {
222222+ let mut file_was_tracked = false;
223223+224224+ // Canonicalize the changed file path for consistent comparison
225225+ // All asset paths in asset_to_routes are stored as canonical paths
226226+ let canonical_changed = changed_file.canonicalize().ok();
227227+228228+ // Check source file mappings first (for .rs files)
229229+ if let Some(canonical) = &canonical_changed
230230+ && let Some(routes) = self.source_to_routes.get(canonical)
231231+ {
232232+ affected_routes.extend(routes.iter().cloned());
233233+ file_was_tracked = true;
234234+ // Continue to also check asset mappings (a file could be both)
235235+ }
236236+237237+ // Also check with original path for source files
238238+ if let Some(routes) = self.source_to_routes.get(changed_file) {
239239+ affected_routes.extend(routes.iter().cloned());
240240+ file_was_tracked = true;
241241+ }
242242+243243+ // Try exact match with canonical path for assets
244244+ if let Some(canonical) = &canonical_changed
245245+ && let Some(routes) = self.asset_to_routes.get(canonical)
246246+ {
247247+ affected_routes.extend(routes.iter().cloned());
248248+ file_was_tracked = true;
249249+ }
250250+251251+ // Fallback: try exact match with original path (shouldn't normally match)
252252+ if let Some(routes) = self.asset_to_routes.get(changed_file) {
253253+ affected_routes.extend(routes.iter().cloned());
254254+ file_was_tracked = true;
255255+ }
256256+257257+ // Check if this is a content file with direct file->route tracking
258258+ if let Some(canonical) = &canonical_changed
259259+ && let Some(routes) = self.content_file_to_routes.get(canonical)
260260+ {
261261+ affected_routes.extend(routes.iter().cloned());
262262+ file_was_tracked = true;
263263+ }
264264+265265+ // Also check with original path for content files
266266+ if let Some(routes) = self.content_file_to_routes.get(changed_file) {
267267+ affected_routes.extend(routes.iter().cloned());
268268+ file_was_tracked = true;
269269+ }
270270+271271+ // Directory prefix check: find all routes using assets within this directory.
272272+ // This handles two cases:
273273+ // 1. A directory was modified - rebuild all routes using assets in that dir
274274+ // 2. A directory was renamed/deleted - the old path no longer exists but we
275275+ // still need to rebuild routes that used assets under that path
276276+ //
277277+ // We do this check if:
278278+ // - The path currently exists as a directory, OR
279279+ // - The path doesn't exist (could be a deleted/renamed directory)
280280+ let is_existing_directory = changed_file.is_dir();
281281+ let path_does_not_exist = !changed_file.exists();
282282+283283+ if is_existing_directory || path_does_not_exist {
284284+ // Use original path for prefix matching (canonical won't exist for deleted dirs)
285285+ for (asset_path, routes) in &self.asset_to_routes {
286286+ if asset_path.starts_with(changed_file) {
287287+ affected_routes.extend(routes.iter().cloned());
288288+ file_was_tracked = true;
289289+ }
290290+ }
291291+ // Also check source files for directory prefix
292292+ for (source_path, routes) in &self.source_to_routes {
293293+ if source_path.starts_with(changed_file) {
294294+ affected_routes.extend(routes.iter().cloned());
295295+ file_was_tracked = true;
296296+ }
297297+ }
298298+ // Also check content files for directory prefix
299299+ for (content_path, routes) in &self.content_file_to_routes {
300300+ if content_path.starts_with(changed_file) {
301301+ affected_routes.extend(routes.iter().cloned());
302302+ file_was_tracked = true;
303303+ }
304304+ }
305305+ }
306306+307307+ // Flag as untracked (triggering full rebuild) if:
308308+ // 1. The file wasn't found in any mapping, AND
309309+ // 2. It's not a currently-existing directory (new directories are OK to ignore)
310310+ //
311311+ // For non-existent paths that weren't matched:
312312+ // - If the path has a file extension, treat it as a deleted file โ full rebuild
313313+ // - If the path has no extension, it might be a deleted directory โ allow
314314+ // (we already checked prefix matching above)
315315+ //
316316+ // This is conservative: we'd rather rebuild too much than too little.
317317+ if !file_was_tracked && !is_existing_directory {
318318+ if path_does_not_exist {
319319+ // For deleted paths, check if it looks like a file (has extension)
320320+ // If it has an extension, it was probably a file โ trigger full rebuild
321321+ // If no extension, it might have been a directory โ don't trigger
322322+ let has_extension = changed_file
323323+ .extension()
324324+ .map(|ext| !ext.is_empty())
325325+ .unwrap_or(false);
326326+327327+ if has_extension {
328328+ has_untracked_file = true;
329329+ }
330330+ } else {
331331+ // Path exists but wasn't tracked โ definitely untracked file
332332+ has_untracked_file = true;
333333+ }
334334+ }
335335+ }
336336+337337+ if has_untracked_file {
338338+ // Some files weren't tracked - caller should do a full rebuild
339339+ None
340340+ } else {
341341+ Some(affected_routes)
342342+ }
343343+ }
344344+345345+ /// Clear all tracked data (for full rebuild)
346346+ pub fn clear(&mut self) {
347347+ self.asset_to_routes.clear();
348348+ self.source_to_routes.clear();
349349+ self.content_file_to_routes.clear();
350350+ self.content_file_to_source.clear();
351351+ self.bundler_inputs.clear();
352352+ }
353353+354354+ /// Clear the content file to routes mapping.
355355+ /// This should be called before re-tracking content files after content sources are re-initialized.
356356+ pub fn clear_content_file_mappings(&mut self) {
357357+ self.content_file_to_routes.clear();
358358+ }
359359+360360+ /// Clear content file mappings for specific sources.
361361+ /// This removes both file->routes and file->source mappings for files owned by the given sources.
362362+ /// Called when selectively re-initializing specific content sources.
363363+ pub fn clear_content_mappings_for_sources(&mut self, source_names: &FxHashSet<String>) {
364364+ // Find all files that belong to the specified sources
365365+ let files_to_remove: Vec<PathBuf> = self
366366+ .content_file_to_source
367367+ .iter()
368368+ .filter(|(_, source)| source_names.contains(*source))
369369+ .map(|(path, _)| path.clone())
370370+ .collect();
371371+372372+ // Remove file->source mappings only
373373+ // We DON'T clear file->routes mappings here because:
374374+ // 1. Routes not being rebuilt should keep their mappings
375375+ // 2. Routes being rebuilt will have their mappings cleared separately
376376+ // via clear_content_file_mappings_for_routes()
377377+ for file in &files_to_remove {
378378+ self.content_file_to_source.remove(file);
379379+ }
380380+ }
381381+382382+ /// Remove content file mappings for specific routes.
383383+ /// This is used during incremental builds to clear only the mappings for routes
384384+ /// that will be rebuilt, preserving mappings for routes that won't change.
385385+ pub fn clear_content_file_mappings_for_routes(&mut self, routes: &FxHashSet<RouteIdentifier>) {
386386+ for routes_set in self.content_file_to_routes.values_mut() {
387387+ routes_set.retain(|route| !routes.contains(route));
388388+ }
389389+ // Remove any entries that have no routes left
390390+ self.content_file_to_routes
391391+ .retain(|_, routes_set| !routes_set.is_empty());
392392+ }
393393+394394+ /// Check if a file path is a known content file.
395395+ /// This is used to determine if a new file might be a content file.
396396+ #[allow(dead_code)] // Used in tests and potentially useful for debugging
397397+ pub fn is_known_content_file(&self, file_path: &Path) -> bool {
398398+ if self.content_file_to_routes.contains_key(file_path) {
399399+ return true;
400400+ }
401401+402402+ // Try with canonicalized path
403403+ if let Ok(canonical) = file_path.canonicalize() {
404404+ return self.content_file_to_routes.contains_key(&canonical);
405405+ }
406406+407407+ false
408408+ }
409409+}
410410+411411+#[cfg(test)]
412412+mod tests {
413413+ use super::*;
414414+415415+ fn make_route(path: &str) -> RouteIdentifier {
416416+ RouteIdentifier::base(path.to_string(), None)
417417+ }
418418+419419+ #[test]
420420+ fn test_get_affected_routes_exact_match() {
421421+ let mut state = BuildState::new();
422422+ let asset_path = PathBuf::from("/project/src/assets/logo.png");
423423+ let route = make_route("/");
424424+425425+ state.track_asset(asset_path.clone(), route.clone());
426426+427427+ // Exact match should work and return Some
428428+ let affected = state.get_affected_routes(&[asset_path]).unwrap();
429429+ assert_eq!(affected.len(), 1);
430430+ assert!(affected.contains(&route));
431431+ }
432432+433433+ #[test]
434434+ fn test_get_affected_routes_untracked_file() {
435435+ use std::fs;
436436+ use tempfile::TempDir;
437437+438438+ let mut state = BuildState::new();
439439+440440+ // Create temp files
441441+ let temp_dir = TempDir::new().unwrap();
442442+ let tracked_file = temp_dir.path().join("logo.png");
443443+ let untracked_file = temp_dir.path().join("other.png");
444444+ fs::write(&tracked_file, "tracked").unwrap();
445445+ fs::write(&untracked_file, "untracked").unwrap();
446446+447447+ let route = make_route("/");
448448+ state.track_asset(tracked_file.clone(), route);
449449+450450+ // Untracked file that EXISTS should return None (triggers full rebuild)
451451+ let affected = state.get_affected_routes(&[untracked_file]);
452452+ assert!(affected.is_none());
453453+ }
454454+455455+ #[test]
456456+ fn test_get_affected_routes_mixed_tracked_untracked() {
457457+ use std::fs;
458458+ use tempfile::TempDir;
459459+460460+ let mut state = BuildState::new();
461461+462462+ // Create temp files
463463+ let temp_dir = TempDir::new().unwrap();
464464+ let tracked_file = temp_dir.path().join("logo.png");
465465+ let untracked_file = temp_dir.path().join("other.png");
466466+ fs::write(&tracked_file, "tracked").unwrap();
467467+ fs::write(&untracked_file, "untracked").unwrap();
468468+469469+ let route = make_route("/");
470470+ state.track_asset(tracked_file.canonicalize().unwrap(), route);
471471+472472+ // If any file is untracked, return None (even if some are tracked)
473473+ let affected = state.get_affected_routes(&[tracked_file, untracked_file]);
474474+ assert!(affected.is_none());
475475+ }
476476+477477+ #[test]
478478+ fn test_get_affected_routes_deleted_directory() {
479479+ let mut state = BuildState::new();
480480+481481+ // Track assets under a directory path
482482+ let asset1 = PathBuf::from("/project/src/assets/icons/logo.png");
483483+ let asset2 = PathBuf::from("/project/src/assets/icons/favicon.ico");
484484+ let asset3 = PathBuf::from("/project/src/assets/styles.css");
485485+ let route1 = make_route("/");
486486+ let route2 = make_route("/about");
487487+488488+ state.track_asset(asset1, route1.clone());
489489+ state.track_asset(asset2, route1.clone());
490490+ state.track_asset(asset3, route2.clone());
491491+492492+ // Simulate a deleted/renamed directory (path doesn't exist)
493493+ // The "icons" directory was renamed, so the old path doesn't exist
494494+ let deleted_dir = PathBuf::from("/project/src/assets/icons");
495495+496496+ // Since the path doesn't exist, it should check prefix matching
497497+ let affected = state.get_affected_routes(&[deleted_dir]).unwrap();
498498+499499+ // Should find route1 (uses assets under /icons/) but not route2
500500+ assert_eq!(affected.len(), 1);
501501+ assert!(affected.contains(&route1));
502502+ }
503503+504504+ #[test]
505505+ fn test_get_affected_routes_multiple_routes_same_asset() {
506506+ let mut state = BuildState::new();
507507+ let asset_path = PathBuf::from("/project/src/assets/shared.css");
508508+ let route1 = make_route("/");
509509+ let route2 = make_route("/about");
510510+511511+ state.track_asset(asset_path.clone(), route1.clone());
512512+ state.track_asset(asset_path.clone(), route2.clone());
513513+514514+ let affected = state.get_affected_routes(&[asset_path]).unwrap();
515515+ assert_eq!(affected.len(), 2);
516516+ assert!(affected.contains(&route1));
517517+ assert!(affected.contains(&route2));
518518+ }
519519+520520+ #[test]
521521+ fn test_get_affected_routes_source_file() {
522522+ let mut state = BuildState::new();
523523+ let source_path = PathBuf::from("/project/src/pages/index.rs");
524524+ let route1 = make_route("/");
525525+ let route2 = make_route("/about");
526526+527527+ // Track routes to their source files
528528+ state.track_source_file(source_path.clone(), route1.clone());
529529+ state.track_source_file(source_path.clone(), route2.clone());
530530+531531+ // When the source file changes, both routes should be affected
532532+ let affected = state.get_affected_routes(&[source_path]).unwrap();
533533+ assert_eq!(affected.len(), 2);
534534+ assert!(affected.contains(&route1));
535535+ assert!(affected.contains(&route2));
536536+ }
537537+538538+ #[test]
539539+ fn test_get_affected_routes_source_file_only_matching() {
540540+ let mut state = BuildState::new();
541541+ let source_index = PathBuf::from("/project/src/pages/index.rs");
542542+ let source_about = PathBuf::from("/project/src/pages/about.rs");
543543+ let route_index = make_route("/");
544544+ let route_about = make_route("/about");
545545+546546+ state.track_source_file(source_index.clone(), route_index.clone());
547547+ state.track_source_file(source_about.clone(), route_about.clone());
548548+549549+ // Changing only index.rs should only affect the index route
550550+ let affected = state.get_affected_routes(&[source_index]).unwrap();
551551+ assert_eq!(affected.len(), 1);
552552+ assert!(affected.contains(&route_index));
553553+ assert!(!affected.contains(&route_about));
554554+ }
555555+556556+ #[test]
557557+ fn test_clear_also_clears_source_files() {
558558+ let mut state = BuildState::new();
559559+ let source_path = PathBuf::from("/project/src/pages/index.rs");
560560+ let asset_path = PathBuf::from("/project/src/assets/logo.png");
561561+ let route = make_route("/");
562562+563563+ state.track_source_file(source_path.clone(), route.clone());
564564+ state.track_asset(asset_path.clone(), route.clone());
565565+566566+ assert!(!state.source_to_routes.is_empty());
567567+ assert!(!state.asset_to_routes.is_empty());
568568+569569+ state.clear();
570570+571571+ assert!(state.source_to_routes.is_empty());
572572+ assert!(state.asset_to_routes.is_empty());
573573+ }
574574+575575+ #[test]
576576+ fn test_get_affected_routes_new_directory_not_untracked() {
577577+ use std::fs;
578578+ use tempfile::TempDir;
579579+580580+ let mut state = BuildState::new();
581581+582582+ // Create a temporary directory to simulate the "new directory" scenario
583583+ let temp_dir = TempDir::new().unwrap();
584584+ let new_dir = temp_dir.path().join("new-folder");
585585+ fs::create_dir(&new_dir).unwrap();
586586+587587+ // Track some asset under a different path
588588+ let asset_path = PathBuf::from("/project/src/assets/logo.png");
589589+ let route = make_route("/");
590590+ state.track_asset(asset_path.clone(), route.clone());
591591+592592+ // When a new directory appears (e.g., from renaming another folder),
593593+ // it should NOT trigger a full rebuild (return None), even though
594594+ // we don't have any assets tracked under it.
595595+ let affected = state.get_affected_routes(&[new_dir]);
596596+597597+ // Should return Some (not None), meaning we don't trigger full rebuild
598598+ // The set should be empty since no assets are under this new directory
599599+ assert!(
600600+ affected.is_some(),
601601+ "New directory should not trigger full rebuild"
602602+ );
603603+ assert!(affected.unwrap().is_empty());
604604+ }
605605+606606+ #[test]
607607+ fn test_get_affected_routes_folder_rename_scenario() {
608608+ use std::fs;
609609+ use tempfile::TempDir;
610610+611611+ let mut state = BuildState::new();
612612+613613+ // Create temp directories to simulate folder rename
614614+ let temp_dir = TempDir::new().unwrap();
615615+ let new_dir = temp_dir.path().join("icons-renamed");
616616+ fs::create_dir(&new_dir).unwrap();
617617+618618+ // Track assets under the OLD folder path (which no longer exists)
619619+ let old_dir = PathBuf::from("/project/src/assets/icons");
620620+ let asset1 = PathBuf::from("/project/src/assets/icons/logo.png");
621621+ let route = make_route("/blog");
622622+ state.track_asset(asset1, route.clone());
623623+624624+ // Simulate folder rename: old path doesn't exist, new path is a directory
625625+ // Both paths are passed as "changed"
626626+ let affected = state.get_affected_routes(&[old_dir, new_dir]);
627627+628628+ // Should return Some (not None) - we found the affected route via prefix matching
629629+ // and the new directory doesn't trigger "untracked file" behavior
630630+ assert!(
631631+ affected.is_some(),
632632+ "Folder rename should not trigger full rebuild"
633633+ );
634634+ let routes = affected.unwrap();
635635+ assert_eq!(routes.len(), 1);
636636+ assert!(routes.contains(&route));
637637+ }
638638+639639+ #[test]
640640+ fn test_get_affected_routes_deleted_untracked_file() {
641641+ let mut state = BuildState::new();
642642+643643+ // Track some assets
644644+ let tracked_asset = PathBuf::from("/project/src/assets/logo.png");
645645+ let route = make_route("/");
646646+ state.track_asset(tracked_asset, route);
647647+648648+ // Simulate a deleted file that was NEVER tracked
649649+ // (e.g., a file used via include_str! that we don't know about)
650650+ // This path doesn't exist and isn't in any mapping
651651+ let deleted_untracked_file = PathBuf::from("/project/src/content/data.txt");
652652+653653+ let affected = state.get_affected_routes(&[deleted_untracked_file]);
654654+655655+ // Since the deleted path has a file extension (.txt), we treat it as
656656+ // a deleted file that might have been a dependency we don't track.
657657+ // We should trigger a full rebuild (return None) to be safe.
658658+ assert!(
659659+ affected.is_none(),
660660+ "Deleted untracked file with extension should trigger full rebuild"
661661+ );
662662+ }
663663+664664+ #[test]
665665+ fn test_get_affected_routes_deleted_untracked_directory() {
666666+ let mut state = BuildState::new();
667667+668668+ // Track some assets
669669+ let tracked_asset = PathBuf::from("/project/src/assets/logo.png");
670670+ let route = make_route("/");
671671+ state.track_asset(tracked_asset, route);
672672+673673+ // Simulate a deleted directory that was NEVER tracked
674674+ // This path doesn't exist, isn't in any mapping, and has no extension
675675+ let deleted_untracked_dir = PathBuf::from("/project/src/content");
676676+677677+ let affected = state.get_affected_routes(&[deleted_untracked_dir]);
678678+679679+ // Since the path has no extension, it might have been a directory.
680680+ // We already did prefix matching (found nothing), so we allow this
681681+ // without triggering a full rebuild.
682682+ assert!(
683683+ affected.is_some(),
684684+ "Deleted path without extension (possible directory) should not trigger full rebuild"
685685+ );
686686+ assert!(affected.unwrap().is_empty());
687687+ }
688688+689689+ #[test]
690690+ fn test_get_affected_routes_deleted_tracked_file() {
691691+ use std::fs;
692692+ use tempfile::TempDir;
693693+694694+ let mut state = BuildState::new();
695695+696696+ // Create a temp file, track it, then delete it
697697+ let temp_dir = TempDir::new().unwrap();
698698+ let tracked_file = temp_dir.path().join("logo.png");
699699+ fs::write(&tracked_file, "content").unwrap();
700700+701701+ let canonical_path = tracked_file.canonicalize().unwrap();
702702+ let route = make_route("/");
703703+ state.track_asset(canonical_path.clone(), route.clone());
704704+705705+ // Now delete the file
706706+ fs::remove_file(&tracked_file).unwrap();
707707+708708+ // The file no longer exists, but its canonical path is still in our mapping
709709+ // When we get the change event, notify gives us the original path
710710+ let affected = state.get_affected_routes(std::slice::from_ref(&tracked_file));
711711+712712+ // This SHOULD find the route because we track by canonical path
713713+ // and the original path should match via the mapping lookup
714714+ println!("Result for deleted tracked file: {:?}", affected);
715715+716716+ // The path doesn't exist anymore, so canonicalize() fails.
717717+ // We fall back to prefix matching, but exact path matching on
718718+ // the non-canonical path should still work if stored that way.
719719+ // Let's check what actually happens...
720720+ match affected {
721721+ Some(routes) => {
722722+ // If we found routes, great - the system works
723723+ assert!(
724724+ routes.contains(&route),
725725+ "Should find the route for deleted tracked file"
726726+ );
727727+ }
728728+ None => {
729729+ // If None, that means we triggered a full rebuild, which is also safe
730730+ // This happens because the file doesn't exist and wasn't found in mappings
731731+ println!("Deleted tracked file triggered full rebuild (safe behavior)");
732732+ }
733733+ }
734734+ }
735735+736736+ #[test]
737737+ fn test_track_content_file() {
738738+ let mut state = BuildState::new();
739739+ let route = make_route("/");
740740+ let content_file = PathBuf::from("/project/content/articles/hello.md");
741741+742742+ state.track_content_file(content_file.clone(), route.clone());
743743+744744+ assert_eq!(state.content_file_to_routes.len(), 1);
745745+ assert!(state.content_file_to_routes.contains_key(&content_file));
746746+ assert!(state.content_file_to_routes[&content_file].contains(&route));
747747+ }
748748+749749+ #[test]
750750+ fn test_track_content_file_multiple_routes() {
751751+ let mut state = BuildState::new();
752752+ let route1 = make_route("/");
753753+ let route2 = make_route("/blog");
754754+ let content_file = PathBuf::from("/project/content/articles/hello.md");
755755+756756+ state.track_content_file(content_file.clone(), route1.clone());
757757+ state.track_content_file(content_file.clone(), route2.clone());
758758+759759+ assert_eq!(state.content_file_to_routes.len(), 1);
760760+ assert_eq!(state.content_file_to_routes[&content_file].len(), 2);
761761+ assert!(state.content_file_to_routes[&content_file].contains(&route1));
762762+ assert!(state.content_file_to_routes[&content_file].contains(&route2));
763763+ }
764764+765765+ #[test]
766766+ fn test_track_content_file_multiple_files() {
767767+ let mut state = BuildState::new();
768768+ let route = make_route("/");
769769+ let file1 = PathBuf::from("/project/content/articles/hello.md");
770770+ let file2 = PathBuf::from("/project/content/articles/world.md");
771771+772772+ state.track_content_file(file1.clone(), route.clone());
773773+ state.track_content_file(file2.clone(), route.clone());
774774+775775+ assert_eq!(state.content_file_to_routes.len(), 2);
776776+ assert!(state.content_file_to_routes[&file1].contains(&route));
777777+ assert!(state.content_file_to_routes[&file2].contains(&route));
778778+ }
779779+780780+ #[test]
781781+ fn test_clear_also_clears_content_files() {
782782+ let mut state = BuildState::new();
783783+ let route = make_route("/");
784784+ let content_file = PathBuf::from("/project/content/articles/hello.md");
785785+786786+ state.track_content_file(content_file, route);
787787+788788+ assert!(!state.content_file_to_routes.is_empty());
789789+790790+ state.clear();
791791+792792+ assert!(state.content_file_to_routes.is_empty());
793793+ }
794794+795795+ #[test]
796796+ fn test_get_affected_routes_content_file() {
797797+ let mut state = BuildState::new();
798798+ let route1 = make_route("/");
799799+ let route2 = make_route("/blog/[slug]");
800800+ let route3 = make_route("/about");
801801+802802+ // Track content file -> route mappings directly
803803+ let article1 = PathBuf::from("/project/content/articles/hello.md");
804804+ let article2 = PathBuf::from("/project/content/articles/world.md");
805805+ let page1 = PathBuf::from("/project/content/pages/about.md");
806806+807807+ // Route "/" uses article1 and article2
808808+ state.track_content_file(article1.clone(), route1.clone());
809809+ state.track_content_file(article2.clone(), route1.clone());
810810+ // Route "/blog/[slug]" uses only article1
811811+ state.track_content_file(article1.clone(), route2.clone());
812812+ // Route "/about" uses page1
813813+ state.track_content_file(page1.clone(), route3.clone());
814814+815815+ // When article1 changes, only routes that used article1 should be affected
816816+ let affected = state.get_affected_routes(&[article1]).unwrap();
817817+ assert_eq!(affected.len(), 2);
818818+ assert!(affected.contains(&route1));
819819+ assert!(affected.contains(&route2));
820820+ assert!(!affected.contains(&route3));
821821+822822+ // When article2 changes, only route1 should be affected (granular!)
823823+ let affected = state.get_affected_routes(&[article2]).unwrap();
824824+ assert_eq!(affected.len(), 1);
825825+ assert!(affected.contains(&route1));
826826+ assert!(!affected.contains(&route2));
827827+ assert!(!affected.contains(&route3));
828828+829829+ // When page1 changes, only route3 should be affected
830830+ let affected = state.get_affected_routes(&[page1]).unwrap();
831831+ assert_eq!(affected.len(), 1);
832832+ assert!(affected.contains(&route3));
833833+ assert!(!affected.contains(&route1));
834834+ assert!(!affected.contains(&route2));
835835+ }
836836+837837+ #[test]
838838+ fn test_get_affected_routes_content_file_multiple_files_changed() {
839839+ let mut state = BuildState::new();
840840+ let route1 = make_route("/");
841841+ let route2 = make_route("/about");
842842+843843+ // Track content files
844844+ let article = PathBuf::from("/project/content/articles/hello.md");
845845+ let page = PathBuf::from("/project/content/pages/about.md");
846846+847847+ state.track_content_file(article.clone(), route1.clone());
848848+ state.track_content_file(page.clone(), route2.clone());
849849+850850+ // When both files change, both routes should be affected
851851+ let affected = state.get_affected_routes(&[article, page]).unwrap();
852852+ assert_eq!(affected.len(), 2);
853853+ assert!(affected.contains(&route1));
854854+ assert!(affected.contains(&route2));
855855+ }
856856+857857+ #[test]
858858+ fn test_get_affected_routes_content_file_mixed_with_asset() {
859859+ let mut state = BuildState::new();
860860+ let route1 = make_route("/");
861861+ let route2 = make_route("/about");
862862+863863+ // Track a content file for route1
864864+ let article = PathBuf::from("/project/content/articles/hello.md");
865865+ state.track_content_file(article.clone(), route1.clone());
866866+867867+ // Track an asset used by route2
868868+ let style = PathBuf::from("/project/src/styles.css");
869869+ state.track_asset(style.clone(), route2.clone());
870870+871871+ // When both content file and asset change
872872+ let affected = state.get_affected_routes(&[article, style]).unwrap();
873873+ assert_eq!(affected.len(), 2);
874874+ assert!(affected.contains(&route1));
875875+ assert!(affected.contains(&route2));
876876+ }
877877+878878+ #[test]
879879+ fn test_get_affected_routes_unknown_content_file() {
880880+ let mut state = BuildState::new();
881881+ let route = make_route("/");
882882+883883+ // Track a content file
884884+ let article = PathBuf::from("/project/content/articles/hello.md");
885885+ state.track_content_file(article, route);
886886+887887+ // A new/unknown .md file that isn't tracked
888888+ // This could be a newly created file
889889+ let new_file = PathBuf::from("/project/content/articles/new-post.md");
890890+891891+ // Should trigger full rebuild since it's an untracked file with extension
892892+ let affected = state.get_affected_routes(&[new_file]);
893893+ assert!(
894894+ affected.is_none(),
895895+ "New untracked content file should trigger full rebuild"
896896+ );
897897+ }
898898+899899+ #[test]
900900+ fn test_is_known_content_file() {
901901+ let mut state = BuildState::new();
902902+ let route = make_route("/");
903903+ let content_file = PathBuf::from("/project/content/articles/hello.md");
904904+905905+ state.track_content_file(content_file.clone(), route);
906906+907907+ assert!(state.is_known_content_file(&content_file));
908908+ assert!(!state.is_known_content_file(Path::new("/project/content/articles/unknown.md")));
909909+ }
910910+911911+ #[test]
912912+ fn test_content_file_directory_prefix() {
913913+ let mut state = BuildState::new();
914914+ let route = make_route("/");
915915+916916+ // Track content files under a directory
917917+ let article1 = PathBuf::from("/project/content/articles/hello.md");
918918+ let article2 = PathBuf::from("/project/content/articles/world.md");
919919+ state.track_content_file(article1, route.clone());
920920+ state.track_content_file(article2, route.clone());
921921+922922+ // When the parent directory changes (e.g., renamed), should find affected routes
923923+ let content_dir = PathBuf::from("/project/content/articles");
924924+ let affected = state.get_affected_routes(&[content_dir]).unwrap();
925925+ assert_eq!(affected.len(), 1);
926926+ assert!(affected.contains(&route));
927927+ }
928928+929929+ #[test]
930930+ fn test_clear_content_file_mappings_for_routes() {
931931+ let mut state = BuildState::new();
932932+ let route1 = make_route("/articles");
933933+ let route2 = make_route("/articles/[slug]");
934934+ let route3 = make_route("/about");
935935+936936+ // Article 1 is accessed by routes 1 and 2
937937+ let article1 = PathBuf::from("/project/content/articles/hello.md");
938938+ state.track_content_file(article1.clone(), route1.clone());
939939+ state.track_content_file(article1.clone(), route2.clone());
940940+941941+ // Article 2 is accessed by routes 1 and 2
942942+ let article2 = PathBuf::from("/project/content/articles/world.md");
943943+ state.track_content_file(article2.clone(), route1.clone());
944944+ state.track_content_file(article2.clone(), route2.clone());
945945+946946+ // Route 3 uses a different file
947947+ let page = PathBuf::from("/project/content/pages/about.md");
948948+ state.track_content_file(page.clone(), route3.clone());
949949+950950+ assert_eq!(state.content_file_to_routes.len(), 3);
951951+952952+ // Clear mappings only for route2
953953+ let mut routes_to_clear = FxHashSet::default();
954954+ routes_to_clear.insert(route2.clone());
955955+ state.clear_content_file_mappings_for_routes(&routes_to_clear);
956956+957957+ // route2 should be removed from article1 and article2 mappings
958958+ assert!(!state.content_file_to_routes[&article1].contains(&route2));
959959+ assert!(state.content_file_to_routes[&article1].contains(&route1));
960960+961961+ assert!(!state.content_file_to_routes[&article2].contains(&route2));
962962+ assert!(state.content_file_to_routes[&article2].contains(&route1));
963963+964964+ // route3's mapping should be unaffected
965965+ assert!(state.content_file_to_routes[&page].contains(&route3));
966966+ }
967967+968968+ #[test]
969969+ fn test_clear_content_file_mappings_for_routes_removes_empty_entries() {
970970+ let mut state = BuildState::new();
971971+ let route1 = make_route("/articles/first");
972972+ let route2 = make_route("/articles/second");
973973+974974+ // Route1 uses only article1
975975+ let article1 = PathBuf::from("/project/content/articles/first.md");
976976+ state.track_content_file(article1.clone(), route1.clone());
977977+978978+ // Route2 uses only article2
979979+ let article2 = PathBuf::from("/project/content/articles/second.md");
980980+ state.track_content_file(article2.clone(), route2.clone());
981981+982982+ assert_eq!(state.content_file_to_routes.len(), 2);
983983+984984+ // Clear mappings for route1
985985+ let mut routes_to_clear = FxHashSet::default();
986986+ routes_to_clear.insert(route1);
987987+ state.clear_content_file_mappings_for_routes(&routes_to_clear);
988988+989989+ // article1 entry should be completely removed (no routes left)
990990+ assert!(!state.content_file_to_routes.contains_key(&article1));
991991+992992+ // article2 entry should still exist
993993+ assert!(state.content_file_to_routes.contains_key(&article2));
994994+ assert!(state.content_file_to_routes[&article2].contains(&route2));
995995+ }
996996+997997+ #[test]
998998+ fn test_track_content_file_source() {
999999+ let mut state = BuildState::new();
10001000+ let file = PathBuf::from("/project/content/articles/hello.md");
10011001+10021002+ state.track_content_file_source(file.clone(), "articles".to_string());
10031003+10041004+ assert_eq!(state.content_file_to_source.len(), 1);
10051005+ assert_eq!(
10061006+ state.content_file_to_source.get(&file),
10071007+ Some(&"articles".to_string())
10081008+ );
10091009+ }
10101010+10111011+ #[test]
10121012+ fn test_get_affected_content_sources_single_source() {
10131013+ let mut state = BuildState::new();
10141014+ let article1 = PathBuf::from("/project/content/articles/hello.md");
10151015+ let article2 = PathBuf::from("/project/content/articles/world.md");
10161016+10171017+ state.track_content_file_source(article1.clone(), "articles".to_string());
10181018+ state.track_content_file_source(article2.clone(), "articles".to_string());
10191019+10201020+ // Change one article file
10211021+ let affected = state.get_affected_content_sources(&[article1]).unwrap();
10221022+ assert_eq!(affected.len(), 1);
10231023+ assert!(affected.contains("articles"));
10241024+ }
10251025+10261026+ #[test]
10271027+ fn test_get_affected_content_sources_multiple_sources() {
10281028+ let mut state = BuildState::new();
10291029+ let article = PathBuf::from("/project/content/articles/hello.md");
10301030+ let page = PathBuf::from("/project/content/pages/about.md");
10311031+10321032+ state.track_content_file_source(article.clone(), "articles".to_string());
10331033+ state.track_content_file_source(page.clone(), "pages".to_string());
10341034+10351035+ // Change both files
10361036+ let affected = state
10371037+ .get_affected_content_sources(&[article, page])
10381038+ .unwrap();
10391039+ assert_eq!(affected.len(), 2);
10401040+ assert!(affected.contains("articles"));
10411041+ assert!(affected.contains("pages"));
10421042+ }
10431043+10441044+ #[test]
10451045+ fn test_get_affected_content_sources_unknown_file_returns_none() {
10461046+ let mut state = BuildState::new();
10471047+ let article = PathBuf::from("/project/content/articles/hello.md");
10481048+ state.track_content_file_source(article, "articles".to_string());
10491049+10501050+ // A new file that's not tracked
10511051+ let new_file = PathBuf::from("/project/content/articles/new-post.md");
10521052+10531053+ // Should return None (need to re-init all sources)
10541054+ let affected = state.get_affected_content_sources(&[new_file]);
10551055+ assert!(affected.is_none());
10561056+ }
10571057+10581058+ #[test]
10591059+ fn test_get_affected_content_sources_ignores_non_content_files() {
10601060+ let mut state = BuildState::new();
10611061+ let article = PathBuf::from("/project/content/articles/hello.md");
10621062+ state.track_content_file_source(article.clone(), "articles".to_string());
10631063+10641064+ // A non-content file (e.g., .rs file) - should be ignored
10651065+ let rust_file = PathBuf::from("/project/src/pages/index.rs");
10661066+10671067+ // Should return empty set (no content sources affected)
10681068+ let affected = state
10691069+ .get_affected_content_sources(std::slice::from_ref(&rust_file))
10701070+ .unwrap();
10711071+ assert!(affected.is_empty());
10721072+10731073+ // Mixed: content file + non-content file
10741074+ let affected = state
10751075+ .get_affected_content_sources(&[article, rust_file])
10761076+ .unwrap();
10771077+ assert_eq!(affected.len(), 1);
10781078+ assert!(affected.contains("articles"));
10791079+ }
10801080+10811081+ #[test]
10821082+ fn test_clear_content_mappings_for_sources() {
10831083+ let mut state = BuildState::new();
10841084+ let route1 = make_route("/articles");
10851085+ let route2 = make_route("/pages");
10861086+10871087+ // Set up articles source
10881088+ let article1 = PathBuf::from("/project/content/articles/hello.md");
10891089+ let article2 = PathBuf::from("/project/content/articles/world.md");
10901090+ state.track_content_file_source(article1.clone(), "articles".to_string());
10911091+ state.track_content_file_source(article2.clone(), "articles".to_string());
10921092+ state.track_content_file(article1.clone(), route1.clone());
10931093+ state.track_content_file(article2.clone(), route1.clone());
10941094+10951095+ // Set up pages source
10961096+ let page = PathBuf::from("/project/content/pages/about.md");
10971097+ state.track_content_file_source(page.clone(), "pages".to_string());
10981098+ state.track_content_file(page.clone(), route2.clone());
10991099+11001100+ assert_eq!(state.content_file_to_source.len(), 3);
11011101+ assert_eq!(state.content_file_to_routes.len(), 3);
11021102+11031103+ // Clear only the articles source
11041104+ let mut sources_to_clear = FxHashSet::default();
11051105+ sources_to_clear.insert("articles".to_string());
11061106+ state.clear_content_mappings_for_sources(&sources_to_clear);
11071107+11081108+ // Articles source mappings should be removed
11091109+ assert!(!state.content_file_to_source.contains_key(&article1));
11101110+ assert!(!state.content_file_to_source.contains_key(&article2));
11111111+11121112+ // But routes mappings should be preserved (cleared separately per-route)
11131113+ assert!(state.content_file_to_routes.contains_key(&article1));
11141114+ assert!(state.content_file_to_routes.contains_key(&article2));
11151115+11161116+ // Pages should remain completely unchanged
11171117+ assert!(state.content_file_to_source.contains_key(&page));
11181118+ assert!(state.content_file_to_routes.contains_key(&page));
11191119+ assert_eq!(
11201120+ state.content_file_to_source.get(&page),
11211121+ Some(&"pages".to_string())
11221122+ );
11231123+ }
11241124+11251125+ #[test]
11261126+ fn test_clear_also_clears_content_file_to_source() {
11271127+ let mut state = BuildState::new();
11281128+ let file = PathBuf::from("/project/content/articles/hello.md");
11291129+ state.track_content_file_source(file, "articles".to_string());
11301130+11311131+ assert!(!state.content_file_to_source.is_empty());
11321132+11331133+ state.clear();
11341134+11351135+ assert!(state.content_file_to_source.is_empty());
11361136+ }
11371137+}
+696-143
crates/maudit/src/build.rs
···1414 self, HashAssetType, HashConfig, PrefetchPlugin, RouteAssets, Script, TailwindPlugin,
1515 calculate_hash, image_cache::ImageCache, prefetch,
1616 },
1717- build::{images::process_image, options::PrefetchStrategy},
1818- content::ContentSources,
1717+ build::{
1818+ images::process_image,
1919+ options::PrefetchStrategy,
2020+ state::{BuildState, RouteIdentifier},
2121+ },
2222+ content::{ContentSources, finish_tracking_content_files, start_tracking_content_files},
1923 is_dev,
2024 logging::print_title,
2125 route::{CachedRoute, DynamicRouteContext, FullRoute, InternalRoute, PageContext, PageParams},
···2630use log::{debug, info, trace, warn};
2731use pathdiff::diff_paths;
2832use rolldown::{Bundler, BundlerOptions, InputItem, ModuleType};
3333+use rolldown_common::Output;
3434+use rolldown_plugin_replace::ReplacePlugin;
2935use rustc_hash::{FxHashMap, FxHashSet};
30363137use crate::assets::Asset;
···3541pub mod images;
3642pub mod metadata;
3743pub mod options;
4444+pub mod state;
4545+4646+/// Helper to check if a route should be rebuilt during incremental builds.
4747+/// Returns `true` for full builds (when `routes_to_rebuild` is `None`).
4848+fn should_rebuild_route(
4949+ route_id: Option<&RouteIdentifier>,
5050+ routes_to_rebuild: &Option<FxHashSet<RouteIdentifier>>,
5151+) -> bool {
5252+ match routes_to_rebuild {
5353+ Some(set) => {
5454+ // Incremental build - need route_id to check
5555+ let route_id = route_id.expect("route_id required for incremental builds");
5656+ let result = set.contains(route_id);
5757+ if !result {
5858+ trace!(target: "build", "Skipping route {:?} (not in rebuild set)", route_id);
5959+ }
6060+ result
6161+ }
6262+ None => true, // Full build - always rebuild
6363+ }
6464+}
6565+6666+/// Helper to track all assets and source files used by a route.
6767+/// Only performs work when incremental builds are enabled and route_id is provided.
6868+fn track_route_assets(
6969+ build_state: &mut BuildState,
7070+ route_id: Option<&RouteIdentifier>,
7171+ route_assets: &RouteAssets,
7272+) {
7373+ // Skip tracking entirely when route_id is not provided (incremental disabled)
7474+ let Some(route_id) = route_id else {
7575+ return;
7676+ };
7777+7878+ // Track images
7979+ for image in &route_assets.images {
8080+ if let Ok(canonical) = image.path().canonicalize() {
8181+ build_state.track_asset(canonical, route_id.clone());
8282+ }
8383+ }
8484+8585+ // Track scripts
8686+ for script in &route_assets.scripts {
8787+ if let Ok(canonical) = script.path().canonicalize() {
8888+ build_state.track_asset(canonical, route_id.clone());
8989+ }
9090+ }
9191+9292+ // Track styles
9393+ for style in &route_assets.styles {
9494+ if let Ok(canonical) = style.path().canonicalize() {
9595+ build_state.track_asset(canonical, route_id.clone());
9696+ }
9797+ }
9898+}
9999+100100+/// Helper to track the source file where a route is defined.
101101+/// Only performs work when incremental builds are enabled and route_id is provided.
102102+fn track_route_source_file(
103103+ build_state: &mut BuildState,
104104+ route_id: Option<&RouteIdentifier>,
105105+ source_file: &str,
106106+) {
107107+ // Skip tracking entirely when route_id is not provided (incremental disabled)
108108+ let Some(route_id) = route_id else {
109109+ return;
110110+ };
111111+112112+ // The file!() macro returns a path relative to the cargo workspace root.
113113+ // We need to canonicalize it to match against changed file paths (which are absolute).
114114+ let source_path = PathBuf::from(source_file);
115115+116116+ // Try direct canonicalization first (works if CWD is workspace root)
117117+ if let Ok(canonical) = source_path.canonicalize() {
118118+ build_state.track_source_file(canonical, route_id.clone());
119119+ return;
120120+ }
121121+122122+ // The file!() macro path is relative to the workspace root at compile time.
123123+ // At runtime, we're typically running from the package directory.
124124+ // Try to find the file by walking up from CWD until we find it.
125125+ if let Ok(cwd) = std::env::current_dir() {
126126+ let mut current = cwd.as_path();
127127+ loop {
128128+ let candidate = current.join(&source_path);
129129+ if let Ok(canonical) = candidate.canonicalize() {
130130+ build_state.track_source_file(canonical, route_id.clone());
131131+ return;
132132+ }
133133+ match current.parent() {
134134+ Some(parent) => current = parent,
135135+ None => break,
136136+ }
137137+ }
138138+ }
139139+140140+ // Last resort: store the relative path (won't match absolute changed files)
141141+ debug!(target: "build", "Could not canonicalize source file path: {}", source_file);
142142+ build_state.track_source_file(source_path, route_id.clone());
143143+}
144144+145145+/// Helper to track content files accessed during page rendering.
146146+/// Only performs work when incremental builds are enabled and route_id is provided.
147147+/// This should be called after `finish_tracking_content_files()` to get the accessed files.
148148+fn track_route_content_files(
149149+ build_state: &mut BuildState,
150150+ route_id: Option<&RouteIdentifier>,
151151+ accessed_files: Option<FxHashSet<PathBuf>>,
152152+) {
153153+ // Skip tracking entirely when route_id is not provided (incremental disabled)
154154+ let Some(route_id) = route_id else {
155155+ return;
156156+ };
157157+158158+ // Skip if no files were tracked
159159+ let Some(files) = accessed_files else {
160160+ return;
161161+ };
162162+163163+ for file_path in files {
164164+ build_state.track_content_file(file_path, route_id.clone());
165165+ }
166166+}
3816739168pub fn execute_build(
40169 routes: &[&dyn FullRoute],
41170 content_sources: &mut ContentSources,
42171 options: &BuildOptions,
172172+ changed_files: Option<&[PathBuf]>,
43173 async_runtime: &tokio::runtime::Runtime,
44174) -> Result<BuildOutput, Box<dyn std::error::Error>> {
4545- async_runtime.block_on(async { build(routes, content_sources, options).await })
175175+ async_runtime.block_on(async { build(routes, content_sources, options, changed_files).await })
46176}
4717748178pub async fn build(
49179 routes: &[&dyn FullRoute],
50180 content_sources: &mut ContentSources,
51181 options: &BuildOptions,
182182+ changed_files: Option<&[PathBuf]>,
52183) -> Result<BuildOutput, Box<dyn std::error::Error>> {
53184 let build_start = Instant::now();
54185 let mut build_metadata = BuildOutput::new(build_start);
···56187 // Create a directory for the output
57188 trace!(target: "build", "Setting up required directories...");
581895959- let clean_up_handle = if options.clean_output_dir {
190190+ // Use cache directory from options
191191+ let build_cache_dir = &options.cache_dir;
192192+193193+ // Load build state for incremental builds (only if incremental is enabled)
194194+ let mut build_state = if options.incremental {
195195+ BuildState::load(build_cache_dir).unwrap_or_else(|e| {
196196+ debug!(target: "build", "Failed to load build state: {}", e);
197197+ BuildState::new()
198198+ })
199199+ } else {
200200+ BuildState::new()
201201+ };
202202+203203+ debug!(target: "build", "Loaded build state with {} asset mappings, {} source mappings, {} content file mappings", build_state.asset_to_routes.len(), build_state.source_to_routes.len(), build_state.content_file_to_routes.len());
204204+ debug!(target: "build", "options.incremental: {}, changed_files.is_some(): {}", options.incremental, changed_files.is_some());
205205+206206+ // Determine if this is an incremental build
207207+ // We need either asset mappings OR source file mappings to do incremental builds
208208+ let has_build_state =
209209+ !build_state.asset_to_routes.is_empty() || !build_state.source_to_routes.is_empty();
210210+ let is_incremental = options.incremental && changed_files.is_some() && has_build_state;
211211+212212+ let routes_to_rebuild = if is_incremental {
213213+ let changed = changed_files.unwrap();
214214+ info!(target: "build", "Incremental build: {} files changed", changed.len());
215215+ info!(target: "build", "Changed files: {:?}", changed);
216216+217217+ info!(target: "build", "Build state has {} asset mappings, {} source mappings, {} content file mappings", build_state.asset_to_routes.len(), build_state.source_to_routes.len(), build_state.content_file_to_routes.len());
218218+219219+ match build_state.get_affected_routes(changed) {
220220+ Some(affected) => {
221221+ info!(target: "build", "Rebuilding {} affected routes", affected.len());
222222+ info!(target: "build", "Affected routes: {:?}", affected);
223223+ Some(affected)
224224+ }
225225+ None => {
226226+ // Some changed files weren't tracked (e.g., include_str! dependencies)
227227+ // Fall back to full rebuild to ensure correctness
228228+ info!(target: "build", "Untracked files changed, falling back to full rebuild");
229229+ build_state.clear();
230230+ None
231231+ }
232232+ }
233233+ } else {
234234+ if changed_files.is_some() {
235235+ info!(target: "build", "Full build (first run after recompilation)");
236236+ }
237237+ // Full build - clear old state
238238+ build_state.clear();
239239+ None
240240+ };
241241+242242+ // Check if we should rebundle during incremental builds
243243+ // Rebundle if a changed file is either:
244244+ // 1. A direct bundler input (entry point)
245245+ // 2. A transitive dependency tracked in asset_to_routes (any file the bundler processed)
246246+ let should_rebundle = if is_incremental && !build_state.bundler_inputs.is_empty() {
247247+ let changed = changed_files.unwrap();
248248+ let should = changed.iter().any(|changed_file| {
249249+ // Check if it's a direct bundler input
250250+ let is_bundler_input = build_state.bundler_inputs.iter().any(|bundler_input| {
251251+ if let (Ok(changed_canonical), Ok(bundler_canonical)) = (
252252+ changed_file.canonicalize(),
253253+ PathBuf::from(bundler_input).canonicalize(),
254254+ ) {
255255+ changed_canonical == bundler_canonical
256256+ } else {
257257+ false
258258+ }
259259+ });
260260+261261+ if is_bundler_input {
262262+ return true;
263263+ }
264264+265265+ // Check if it's a transitive dependency tracked by the bundler
266266+ // (JS/TS modules, CSS files, or assets like images/fonts referenced via url())
267267+ if let Ok(canonical) = changed_file.canonicalize() {
268268+ return build_state.asset_to_routes.contains_key(&canonical);
269269+ }
270270+271271+ false
272272+ });
273273+274274+ if should {
275275+ info!(target: "build", "Rebundling needed: changed file affects bundled assets");
276276+ } else {
277277+ info!(target: "build", "Skipping bundler: no changed files affect bundled assets");
278278+ }
279279+280280+ should
281281+ } else {
282282+ // Not incremental or no previous bundler inputs
283283+ false
284284+ };
285285+286286+ let clean_up_handle = if options.clean_output_dir && !is_incremental {
60287 let old_dist_tmp_dir = {
61288 let duration = SystemTime::now().duration_since(UNIX_EPOCH)?;
62289 let num = (duration.as_secs() + duration.subsec_nanos() as u64) % 100000;
···73300 };
7430175302 // Create the image cache early so it can be shared across routes
7676- let image_cache = ImageCache::with_cache_dir(&options.assets.image_cache_dir);
303303+ let image_cache = ImageCache::with_cache_dir(options.assets_cache_dir());
77304 let _ = fs::create_dir_all(image_cache.get_cache_dir());
7830579306 // Create route_assets_options with the image cache
···8331084311 let content_sources_start = Instant::now();
85312 print_title("initializing content sources");
8686- content_sources.sources_mut().iter_mut().for_each(|source| {
8787- let source_start = Instant::now();
8888- source.init();
313313+314314+ // Determine which content sources need to be initialized
315315+ // For incremental builds with specific routes to rebuild, only re-init sources whose files have changed
316316+ // If routes_to_rebuild is None (full rebuild), always init all sources
317317+ let sources_to_init: Option<FxHashSet<String>> = if routes_to_rebuild.is_some() {
318318+ if let Some(changed) = changed_files {
319319+ build_state.get_affected_content_sources(changed)
320320+ } else {
321321+ None // Full init
322322+ }
323323+ } else {
324324+ None // Full init (routes_to_rebuild is None means full rebuild)
325325+ };
326326+327327+ // Initialize content sources (all or selective)
328328+ let initialized_sources: Vec<String> = match &sources_to_init {
329329+ Some(source_names) if !source_names.is_empty() => {
330330+ info!(target: "content", "Selectively initializing {} content source(s): {:?}", source_names.len(), source_names);
331331+332332+ // Clear mappings for sources being re-initialized before init
333333+ build_state.clear_content_mappings_for_sources(source_names);
334334+335335+ // Initialize only the affected sources
336336+ let mut initialized = Vec::new();
337337+ for source in content_sources.sources_mut() {
338338+ if source_names.contains(source.get_name()) {
339339+ let source_start = Instant::now();
340340+ source.init();
341341+ info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default()));
342342+ initialized.push(source.get_name().to_string());
343343+ } else {
344344+ info!(target: "content", "{} (unchanged, skipped)", source.get_name());
345345+ }
346346+ }
347347+ initialized
348348+ }
349349+ Some(_) => {
350350+ // Empty set means no content files changed, skip all initialization
351351+ info!(target: "content", "No content files changed, skipping content source initialization");
352352+ Vec::new()
353353+ }
354354+ None => {
355355+ // Full initialization (first build, unknown files, or non-incremental)
356356+ info!(target: "content", "Initializing all content sources");
357357+358358+ // Clear all content mappings for full init
359359+ build_state.clear_content_file_mappings();
360360+ build_state.content_file_to_source.clear();
361361+362362+ let mut initialized = Vec::new();
363363+ for source in content_sources.sources_mut() {
364364+ let source_start = Instant::now();
365365+ source.init();
366366+ info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default()));
367367+ initialized.push(source.get_name().to_string());
368368+ }
369369+ initialized
370370+ }
371371+ };
893729090- info!(target: "content", "{} initialized in {}", source.get_name(), format_elapsed_time(source_start.elapsed(), &FormatElapsedTimeOptions::default()));
9191- });
373373+ // Track file->source mappings for all initialized sources
374374+ for source in content_sources.sources() {
375375+ if initialized_sources.contains(&source.get_name().to_string()) {
376376+ let source_name = source.get_name().to_string();
377377+ for file_path in source.get_entry_file_paths() {
378378+ build_state.track_content_file_source(file_path, source_name.clone());
379379+ }
380380+ }
381381+ }
9238293383 info!(target: "content", "{}", format!("Content sources initialized in {}", format_elapsed_time(
94384 content_sources_start.elapsed(),
95385 &FormatElapsedTimeOptions::default(),
96386 )).bold());
97387388388+ // Clear content file->routes mappings for routes being rebuilt
389389+ // (so they get fresh tracking during this build)
390390+ if let Some(ref routes) = routes_to_rebuild {
391391+ build_state.clear_content_file_mappings_for_routes(routes);
392392+ }
393393+98394 print_title("generating pages");
99395 let pages_start = Instant::now();
100396···182478183479 // Static base route
184480 if base_params.is_empty() {
185185- let mut route_assets = RouteAssets::with_default_assets(
186186- &route_assets_options,
187187- Some(image_cache.clone()),
188188- default_scripts.clone(),
189189- vec![],
190190- );
481481+ // Only create RouteIdentifier when incremental builds are enabled
482482+ let route_id = if options.incremental {
483483+ Some(RouteIdentifier::base(base_path.clone(), None))
484484+ } else {
485485+ None
486486+ };
191487192192- let params = PageParams::default();
193193- let url = cached_route.url(¶ms);
488488+ // Check if we need to rebuild this route
489489+ if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) {
490490+ let mut route_assets = RouteAssets::with_default_assets(
491491+ &route_assets_options,
492492+ Some(image_cache.clone()),
493493+ default_scripts.clone(),
494494+ vec![],
495495+ );
194496195195- let result = route.build(&mut PageContext::from_static_route(
196196- content_sources,
197197- &mut route_assets,
198198- &url,
199199- &options.base_url,
200200- None,
201201- ))?;
497497+ let params = PageParams::default();
498498+ let url = cached_route.url(¶ms);
499499+500500+ // Start tracking content file access for incremental builds
501501+ if options.incremental {
502502+ start_tracking_content_files();
503503+ }
504504+505505+ let result = route.build(&mut PageContext::from_static_route(
506506+ content_sources,
507507+ &mut route_assets,
508508+ &url,
509509+ &options.base_url,
510510+ None,
511511+ ))?;
512512+513513+ // Finish tracking and record accessed content files
514514+ let accessed_files = if options.incremental {
515515+ finish_tracking_content_files()
516516+ } else {
517517+ None
518518+ };
519519+520520+ let file_path = cached_route.file_path(¶ms, &options.output_dir);
202521203203- let file_path = cached_route.file_path(¶ms, &options.output_dir);
522522+ write_route_file(&result, &file_path)?;
204523205205- write_route_file(&result, &file_path)?;
524524+ info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options));
206525207207- info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options));
526526+ // Track assets, source file, and content files for this route
527527+ track_route_assets(&mut build_state, route_id.as_ref(), &route_assets);
528528+ track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file());
529529+ track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files);
208530209209- build_pages_images.extend(route_assets.images);
210210- build_pages_scripts.extend(route_assets.scripts);
211211- build_pages_styles.extend(route_assets.styles);
531531+ build_pages_images.extend(route_assets.images);
532532+ build_pages_scripts.extend(route_assets.scripts);
533533+ build_pages_styles.extend(route_assets.styles);
212534213213- build_metadata.add_page(
214214- base_path.clone(),
215215- file_path.to_string_lossy().to_string(),
216216- None,
217217- );
535535+ build_metadata.add_page(
536536+ base_path.clone(),
537537+ file_path.to_string_lossy().to_string(),
538538+ None,
539539+ );
218540219219- add_sitemap_entry(
220220- &mut sitemap_entries,
221221- normalized_base_url,
222222- &url,
223223- base_path,
224224- &route.sitemap_metadata(),
225225- &options.sitemap,
226226- );
541541+ add_sitemap_entry(
542542+ &mut sitemap_entries,
543543+ normalized_base_url,
544544+ &url,
545545+ base_path,
546546+ &route.sitemap_metadata(),
547547+ &options.sitemap,
548548+ );
227549228228- page_count += 1;
550550+ page_count += 1;
551551+ } else {
552552+ trace!(target: "build", "Skipping unchanged route: {}", base_path);
553553+ }
229554 } else {
230555 // Dynamic base route
231556 let mut route_assets = RouteAssets::with_default_assets(
···249574250575 // Build all pages for this route
251576 for page in pages {
252252- let page_start = Instant::now();
253253- let url = cached_route.url(&page.0);
254254- let file_path = cached_route.file_path(&page.0, &options.output_dir);
577577+ // Only create RouteIdentifier when incremental builds are enabled
578578+ let route_id = if options.incremental {
579579+ Some(RouteIdentifier::base(base_path.clone(), Some(page.0.0.clone())))
580580+ } else {
581581+ None
582582+ };
583583+584584+ // Check if we need to rebuild this specific page
585585+ if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) {
586586+ let page_start = Instant::now();
587587+ let url = cached_route.url(&page.0);
588588+ let file_path = cached_route.file_path(&page.0, &options.output_dir);
589589+590590+ // Start tracking content file access for incremental builds
591591+ if options.incremental {
592592+ start_tracking_content_files();
593593+ }
594594+595595+ let content = route.build(&mut PageContext::from_dynamic_route(
596596+ &page,
597597+ content_sources,
598598+ &mut route_assets,
599599+ &url,
600600+ &options.base_url,
601601+ None,
602602+ ))?;
603603+604604+ // Finish tracking and record accessed content files
605605+ let accessed_files = if options.incremental {
606606+ finish_tracking_content_files()
607607+ } else {
608608+ None
609609+ };
255610256256- let content = route.build(&mut PageContext::from_dynamic_route(
257257- &page,
258258- content_sources,
259259- &mut route_assets,
260260- &url,
261261- &options.base_url,
262262- None,
263263- ))?;
611611+ write_route_file(&content, &file_path)?;
264612265265- write_route_file(&content, &file_path)?;
613613+ info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options));
266614267267- info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options));
615615+ // Track assets, source file, and content files for this page
616616+ track_route_assets(&mut build_state, route_id.as_ref(), &route_assets);
617617+ track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file());
618618+ track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files);
268619269269- build_metadata.add_page(
270270- base_path.clone(),
271271- file_path.to_string_lossy().to_string(),
272272- Some(page.0.0.clone()),
273273- );
620620+ build_metadata.add_page(
621621+ base_path.clone(),
622622+ file_path.to_string_lossy().to_string(),
623623+ Some(page.0.0.clone()),
624624+ );
274625275275- add_sitemap_entry(
276276- &mut sitemap_entries,
277277- normalized_base_url,
278278- &url,
279279- base_path,
280280- &route.sitemap_metadata(),
281281- &options.sitemap,
282282- );
626626+ add_sitemap_entry(
627627+ &mut sitemap_entries,
628628+ normalized_base_url,
629629+ &url,
630630+ base_path,
631631+ &route.sitemap_metadata(),
632632+ &options.sitemap,
633633+ );
283634284284- page_count += 1;
635635+ page_count += 1;
636636+ } else {
637637+ trace!(target: "build", "Skipping unchanged page: {} with params {:?}", base_path, page.0.0);
638638+ }
285639 }
286640 }
287641···298652299653 if variant_params.is_empty() {
300654 // Static variant
301301- let mut route_assets = RouteAssets::with_default_assets(
302302- &route_assets_options,
303303- Some(image_cache.clone()),
304304- default_scripts.clone(),
305305- vec![],
306306- );
655655+ // Only create RouteIdentifier when incremental builds are enabled
656656+ let route_id = if options.incremental {
657657+ Some(RouteIdentifier::variant(variant_id.clone(), variant_path.clone(), None))
658658+ } else {
659659+ None
660660+ };
307661308308- let params = PageParams::default();
309309- let url = cached_route.variant_url(¶ms, &variant_id)?;
310310- let file_path =
311311- cached_route.variant_file_path(¶ms, &options.output_dir, &variant_id)?;
662662+ // Check if we need to rebuild this variant
663663+ if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) {
664664+ let mut route_assets = RouteAssets::with_default_assets(
665665+ &route_assets_options,
666666+ Some(image_cache.clone()),
667667+ default_scripts.clone(),
668668+ vec![],
669669+ );
312670313313- let result = route.build(&mut PageContext::from_static_route(
314314- content_sources,
315315- &mut route_assets,
316316- &url,
317317- &options.base_url,
318318- Some(variant_id.clone()),
319319- ))?;
671671+ let params = PageParams::default();
672672+ let url = cached_route.variant_url(¶ms, &variant_id)?;
673673+ let file_path = cached_route.variant_file_path(
674674+ ¶ms,
675675+ &options.output_dir,
676676+ &variant_id,
677677+ )?;
678678+679679+ // Start tracking content file access for incremental builds
680680+ if options.incremental {
681681+ start_tracking_content_files();
682682+ }
683683+684684+ let result = route.build(&mut PageContext::from_static_route(
685685+ content_sources,
686686+ &mut route_assets,
687687+ &url,
688688+ &options.base_url,
689689+ Some(variant_id.clone()),
690690+ ))?;
691691+692692+ // Finish tracking and record accessed content files
693693+ let accessed_files = if options.incremental {
694694+ finish_tracking_content_files()
695695+ } else {
696696+ None
697697+ };
320698321321- write_route_file(&result, &file_path)?;
699699+ write_route_file(&result, &file_path)?;
322700323323- info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options));
701701+ info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options));
324702325325- build_pages_images.extend(route_assets.images);
326326- build_pages_scripts.extend(route_assets.scripts);
327327- build_pages_styles.extend(route_assets.styles);
703703+ // Track assets, source file, and content files for this variant
704704+ track_route_assets(&mut build_state, route_id.as_ref(), &route_assets);
705705+ track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file());
706706+ track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files);
328707329329- build_metadata.add_page(
330330- variant_path.clone(),
331331- file_path.to_string_lossy().to_string(),
332332- None,
333333- );
708708+ build_pages_images.extend(route_assets.images);
709709+ build_pages_scripts.extend(route_assets.scripts);
710710+ build_pages_styles.extend(route_assets.styles);
711711+712712+ build_metadata.add_page(
713713+ variant_path.clone(),
714714+ file_path.to_string_lossy().to_string(),
715715+ None,
716716+ );
334717335335- add_sitemap_entry(
336336- &mut sitemap_entries,
337337- normalized_base_url,
338338- &url,
339339- &variant_path,
340340- &route.sitemap_metadata(),
341341- &options.sitemap,
342342- );
718718+ add_sitemap_entry(
719719+ &mut sitemap_entries,
720720+ normalized_base_url,
721721+ &url,
722722+ &variant_path,
723723+ &route.sitemap_metadata(),
724724+ &options.sitemap,
725725+ );
343726344344- page_count += 1;
727727+ page_count += 1;
728728+ } else {
729729+ trace!(target: "build", "Skipping unchanged variant: {}", variant_path);
730730+ }
345731 } else {
346732 // Dynamic variant
347733 let mut route_assets = RouteAssets::with_default_assets(
···364750365751 // Build all pages for this variant group
366752 for page in pages {
367367- let variant_page_start = Instant::now();
368368- let url = cached_route.variant_url(&page.0, &variant_id)?;
369369- let file_path = cached_route.variant_file_path(
370370- &page.0,
371371- &options.output_dir,
372372- &variant_id,
373373- )?;
753753+ // Only create RouteIdentifier when incremental builds are enabled
754754+ let route_id = if options.incremental {
755755+ Some(RouteIdentifier::variant(
756756+ variant_id.clone(),
757757+ variant_path.clone(),
758758+ Some(page.0.0.clone()),
759759+ ))
760760+ } else {
761761+ None
762762+ };
763763+764764+ // Check if we need to rebuild this specific variant page
765765+ if should_rebuild_route(route_id.as_ref(), &routes_to_rebuild) {
766766+ let variant_page_start = Instant::now();
767767+ let url = cached_route.variant_url(&page.0, &variant_id)?;
768768+ let file_path = cached_route.variant_file_path(
769769+ &page.0,
770770+ &options.output_dir,
771771+ &variant_id,
772772+ )?;
773773+774774+ // Start tracking content file access for incremental builds
775775+ if options.incremental {
776776+ start_tracking_content_files();
777777+ }
778778+779779+ let content = route.build(&mut PageContext::from_dynamic_route(
780780+ &page,
781781+ content_sources,
782782+ &mut route_assets,
783783+ &url,
784784+ &options.base_url,
785785+ Some(variant_id.clone()),
786786+ ))?;
787787+788788+ // Finish tracking and record accessed content files
789789+ let accessed_files = if options.incremental {
790790+ finish_tracking_content_files()
791791+ } else {
792792+ None
793793+ };
374794375375- let content = route.build(&mut PageContext::from_dynamic_route(
376376- &page,
377377- content_sources,
378378- &mut route_assets,
379379- &url,
380380- &options.base_url,
381381- Some(variant_id.clone()),
382382- ))?;
795795+ write_route_file(&content, &file_path)?;
383796384384- write_route_file(&content, &file_path)?;
797797+ info!(target: "pages", "โ โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options));
385798386386- info!(target: "pages", "โ โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options));
799799+ // Track assets, source file, and content files for this variant page
800800+ track_route_assets(&mut build_state, route_id.as_ref(), &route_assets);
801801+ track_route_source_file(&mut build_state, route_id.as_ref(), route.source_file());
802802+ track_route_content_files(&mut build_state, route_id.as_ref(), accessed_files);
387803388388- build_metadata.add_page(
389389- variant_path.clone(),
390390- file_path.to_string_lossy().to_string(),
391391- Some(page.0.0.clone()),
392392- );
804804+ build_metadata.add_page(
805805+ variant_path.clone(),
806806+ file_path.to_string_lossy().to_string(),
807807+ Some(page.0.0.clone()),
808808+ );
393809394394- add_sitemap_entry(
395395- &mut sitemap_entries,
396396- normalized_base_url,
397397- &url,
398398- &variant_path,
399399- &route.sitemap_metadata(),
400400- &options.sitemap,
401401- );
810810+ add_sitemap_entry(
811811+ &mut sitemap_entries,
812812+ normalized_base_url,
813813+ &url,
814814+ &variant_path,
815815+ &route.sitemap_metadata(),
816816+ &options.sitemap,
817817+ );
402818403403- page_count += 1;
819819+ page_count += 1;
820820+ } else {
821821+ trace!(target: "build", "Skipping unchanged variant page: {} with params {:?}", variant_path, page.0.0);
822822+ }
404823 }
405824 }
406825···420839 fs::create_dir_all(&route_assets_options.output_assets_dir)?;
421840 }
422841423423- if !build_pages_styles.is_empty() || !build_pages_scripts.is_empty() {
842842+ if !build_pages_styles.is_empty()
843843+ || !build_pages_scripts.is_empty()
844844+ || (is_incremental && should_rebundle)
845845+ {
424846 let assets_start = Instant::now();
425847 print_title("generating assets");
426848···438860 })
439861 .collect::<Vec<InputItem>>();
440862441441- let bundler_inputs = build_pages_scripts
863863+ let mut bundler_inputs = build_pages_scripts
442864 .iter()
443865 .map(|script| InputItem {
444866 import: script.path().to_string_lossy().to_string(),
···453875 .chain(css_inputs.into_iter())
454876 .collect::<Vec<InputItem>>();
455877878878+ // During incremental builds, merge with previous bundler inputs
879879+ // to ensure we bundle all assets, not just from rebuilt routes
880880+ if is_incremental && !build_state.bundler_inputs.is_empty() {
881881+ debug!(target: "bundling", "Merging with {} previous bundler inputs", build_state.bundler_inputs.len());
882882+883883+ let current_imports: FxHashSet<String> = bundler_inputs
884884+ .iter()
885885+ .map(|input| input.import.clone())
886886+ .collect();
887887+888888+ // Add previous inputs that aren't in the current set
889889+ for prev_input in &build_state.bundler_inputs {
890890+ if !current_imports.contains(prev_input) {
891891+ bundler_inputs.push(InputItem {
892892+ import: prev_input.clone(),
893893+ name: Some(
894894+ PathBuf::from(prev_input)
895895+ .file_stem()
896896+ .unwrap_or_default()
897897+ .to_string_lossy()
898898+ .to_string(),
899899+ ),
900900+ });
901901+ }
902902+ }
903903+ }
904904+456905 debug!(
457906 target: "bundling",
458907 "Bundler inputs: {:?}",
···462911 .collect::<Vec<String>>()
463912 );
464913914914+ // Store bundler inputs in build state for next incremental build
915915+ if options.incremental {
916916+ build_state.bundler_inputs = bundler_inputs
917917+ .iter()
918918+ .map(|input| input.import.clone())
919919+ .collect();
920920+ }
921921+465922 if !bundler_inputs.is_empty() {
466923 let mut module_types_hashmap = FxHashMap::default();
924924+ // Fonts
467925 module_types_hashmap.insert("woff".to_string(), ModuleType::Asset);
468926 module_types_hashmap.insert("woff2".to_string(), ModuleType::Asset);
927927+ module_types_hashmap.insert("ttf".to_string(), ModuleType::Asset);
928928+ module_types_hashmap.insert("otf".to_string(), ModuleType::Asset);
929929+ module_types_hashmap.insert("eot".to_string(), ModuleType::Asset);
930930+ // Images
931931+ module_types_hashmap.insert("png".to_string(), ModuleType::Asset);
932932+ module_types_hashmap.insert("jpg".to_string(), ModuleType::Asset);
933933+ module_types_hashmap.insert("jpeg".to_string(), ModuleType::Asset);
934934+ module_types_hashmap.insert("gif".to_string(), ModuleType::Asset);
935935+ module_types_hashmap.insert("svg".to_string(), ModuleType::Asset);
936936+ module_types_hashmap.insert("webp".to_string(), ModuleType::Asset);
937937+ module_types_hashmap.insert("avif".to_string(), ModuleType::Asset);
938938+ module_types_hashmap.insert("ico".to_string(), ModuleType::Asset);
469939470940 let mut bundler = Bundler::with_plugins(
471941 BundlerOptions {
···495965 .collect::<Vec<PathBuf>>(),
496966 }),
497967 Arc::new(PrefetchPlugin {}),
968968+ Arc::new(ReplacePlugin::new(FxHashMap::default())?),
498969 ],
499970 )?;
500971501501- let _result = bundler.write().await?;
972972+ let result = bundler.write().await?;
502973503503- // TODO: Add outputted chunks to build_metadata
974974+ // Track transitive dependencies from bundler output
975975+ // For each chunk, map all its modules to the routes that use the entry point
976976+ // For assets (images, fonts via CSS url()), map them to all routes using any entry point
977977+ if options.incremental {
978978+ // First, collect all routes that use any bundler entry point
979979+ let mut all_bundler_routes: FxHashSet<RouteIdentifier> = FxHashSet::default();
980980+981981+ for output in &result.assets {
982982+ if let Output::Chunk(chunk) = output {
983983+ // Get the entry point for this chunk
984984+ if let Some(facade_module_id) = &chunk.facade_module_id {
985985+ // Try to find routes using this entry point
986986+ let entry_path = PathBuf::from(facade_module_id.as_str());
987987+ let canonical_entry = entry_path.canonicalize().ok();
988988+989989+ // Look up routes for this entry point
990990+ let routes = canonical_entry
991991+ .as_ref()
992992+ .and_then(|p| build_state.asset_to_routes.get(p))
993993+ .cloned();
994994+995995+ if let Some(routes) = routes {
996996+ // Collect routes for asset tracking later
997997+ all_bundler_routes.extend(routes.iter().cloned());
998998+999999+ // Register all modules in this chunk as dependencies for those routes
10001000+ let mut transitive_count = 0;
10011001+ for module_id in &chunk.module_ids {
10021002+ let module_path = PathBuf::from(module_id.as_str());
10031003+ if let Ok(canonical_module) = module_path.canonicalize() {
10041004+ // Skip the entry point itself (already tracked)
10051005+ if Some(&canonical_module) != canonical_entry.as_ref() {
10061006+ for route in &routes {
10071007+ build_state.track_asset(
10081008+ canonical_module.clone(),
10091009+ route.clone(),
10101010+ );
10111011+ }
10121012+ transitive_count += 1;
10131013+ }
10141014+ }
10151015+ }
10161016+ if transitive_count > 0 {
10171017+ debug!(target: "build", "Tracked {} transitive dependencies for {}", transitive_count, facade_module_id);
10181018+ }
10191019+ }
10201020+ }
10211021+ }
10221022+ }
10231023+10241024+ // Now track Output::Asset items (images, fonts, etc. referenced via CSS url() or JS imports)
10251025+ // These are mapped to all routes that use any bundler entry point
10261026+ if !all_bundler_routes.is_empty() {
10271027+ let mut asset_count = 0;
10281028+ for output in &result.assets {
10291029+ if let Output::Asset(asset) = output {
10301030+ for original_file in &asset.original_file_names {
10311031+ let asset_path = PathBuf::from(original_file);
10321032+ if let Ok(canonical_asset) = asset_path.canonicalize() {
10331033+ for route in &all_bundler_routes {
10341034+ build_state
10351035+ .track_asset(canonical_asset.clone(), route.clone());
10361036+ }
10371037+ asset_count += 1;
10381038+ }
10391039+ }
10401040+ }
10411041+ }
10421042+ if asset_count > 0 {
10431043+ debug!(target: "build", "Tracked {} bundler assets for {} routes", asset_count, all_bundler_routes.len());
10441044+ }
10451045+ }
10461046+ }
5041047 }
50510485061049 info!(target: "build", "{}", format!("Assets generated in {}", format_elapsed_time(assets_start.elapsed(), §ion_format_options)).bold());
···5961139 info!(target: "SKIP_FORMAT", "{}", "");
5971140 info!(target: "build", "{}", format!("Build completed in {}", format_elapsed_time(build_start.elapsed(), §ion_format_options)).bold());
598114111421142+ // Save build state for next incremental build (only if incremental is enabled)
11431143+ if options.incremental {
11441144+ if let Err(e) = build_state.save(build_cache_dir) {
11451145+ warn!(target: "build", "Failed to save build state: {}", e);
11461146+ } else {
11471147+ debug!(target: "build", "Build state saved to {}", build_cache_dir.join("build_state.json").display());
11481148+ }
11491149+ }
11501150+5991151 if let Some(clean_up_handle) = clean_up_handle {
6001152 clean_up_handle.await?;
6011153 }
···6781230 fs::create_dir_all(parent_dir)?
6791231 }
680123212331233+ trace!(target: "build", "Writing HTML file: {}", file_path.display());
6811234 fs::write(file_path, content)?;
68212356831236 Ok(())
···11//! Core functions and structs to define the content sources of your website.
22//!
33//! Content sources represent the content of your website, such as articles, blog posts, etc. Then, content sources can be passed to [`coronate()`](crate::coronate), through the [`content_sources!`](crate::content_sources) macro, to be loaded.
44-use std::{any::Any, path::PathBuf, sync::Arc};
44+use std::{
55+ any::Any,
66+ cell::RefCell,
77+ path::{Path, PathBuf},
88+ sync::Arc,
99+};
51066-use rustc_hash::FxHashMap;
1111+use rustc_hash::{FxHashMap, FxHashSet};
712813mod highlight;
914pub mod markdown;
···2530};
26312732pub use highlight::{HighlightOptions, highlight_code};
3333+3434+// Thread-local storage for tracking content file access during page rendering.
3535+// This allows us to transparently track which content files a page uses
3636+// without requiring changes to user code.
3737+thread_local! {
3838+ static ACCESSED_CONTENT_FILES: RefCell<Option<FxHashSet<PathBuf>>> = const { RefCell::new(None) };
3939+}
4040+4141+/// Start tracking content file access for a page render.
4242+/// Call this before rendering a page, then call `finish_tracking_content_files()`
4343+/// after rendering to get the set of accessed content files.
4444+pub(crate) fn start_tracking_content_files() {
4545+ ACCESSED_CONTENT_FILES.with(|cell| {
4646+ *cell.borrow_mut() = Some(FxHashSet::default());
4747+ });
4848+}
4949+5050+/// Finish tracking content file access and return the set of accessed files.
5151+/// Returns `None` if tracking was not started.
5252+pub(crate) fn finish_tracking_content_files() -> Option<FxHashSet<PathBuf>> {
5353+ ACCESSED_CONTENT_FILES.with(|cell| cell.borrow_mut().take())
5454+}
5555+5656+/// Record that a content file was accessed.
5757+/// This is called internally when entries are accessed.
5858+fn track_content_file_access(file_path: &Path) {
5959+ ACCESSED_CONTENT_FILES.with(|cell| {
6060+ if let Some(ref mut set) = *cell.borrow_mut() {
6161+ set.insert(file_path.to_path_buf());
6262+ }
6363+ });
6464+}
28652966/// Helps implement a struct as a Markdown content entry.
3067///
···302339 }
303340 }
304341342342+ /// Initialize only the content sources with the given names.
343343+ /// Sources not in the set are left untouched (their entries remain as-is).
344344+ /// Returns the names of sources that were actually initialized.
345345+ pub fn init_sources(&mut self, source_names: &rustc_hash::FxHashSet<String>) -> Vec<String> {
346346+ let mut initialized = Vec::new();
347347+ for source in &mut self.0 {
348348+ if source_names.contains(source.get_name()) {
349349+ source.init();
350350+ initialized.push(source.get_name().to_string());
351351+ }
352352+ }
353353+ initialized
354354+ }
355355+305356 pub fn get_untyped_source(&self, name: &str) -> &ContentSource<Untyped> {
306357 self.get_source::<Untyped>(name)
307358 }
···337388/// A source of content such as articles, blog posts, etc.
338389pub struct ContentSource<T = Untyped> {
339390 pub name: String,
340340- pub entries: Vec<Arc<EntryInner<T>>>,
391391+ entries: Vec<Arc<EntryInner<T>>>,
341392 pub(crate) init_method: ContentSourceInitMethod<T>,
342393}
343394···354405 }
355406356407 pub fn get_entry(&self, id: &str) -> &Entry<T> {
357357- self.entries
408408+ let entry = self
409409+ .entries
358410 .iter()
359411 .find(|entry| entry.id == id)
360360- .unwrap_or_else(|| panic!("Entry with id '{}' not found", id))
412412+ .unwrap_or_else(|| panic!("Entry with id '{}' not found", id));
413413+414414+ // Track file access for incremental builds
415415+ if let Some(ref file_path) = entry.file_path {
416416+ track_content_file_access(file_path);
417417+ }
418418+419419+ entry
361420 }
362421363422 pub fn get_entry_safe(&self, id: &str) -> Option<&Entry<T>> {
364364- self.entries.iter().find(|entry| entry.id == id)
423423+ let entry = self.entries.iter().find(|entry| entry.id == id);
424424+425425+ // Track file access for incremental builds
426426+ if let Some(entry) = &entry
427427+ && let Some(ref file_path) = entry.file_path
428428+ {
429429+ track_content_file_access(file_path);
430430+ }
431431+432432+ entry
365433 }
366434367435 pub fn into_params<P>(&self, cb: impl FnMut(&Entry<T>) -> P) -> Vec<P>
368436 where
369437 P: Into<PageParams>,
370438 {
439439+ // Track all entries accessed for incremental builds
440440+ for entry in &self.entries {
441441+ if let Some(ref file_path) = entry.file_path {
442442+ track_content_file_access(file_path);
443443+ }
444444+ }
371445 self.entries.iter().map(cb).collect()
372446 }
373447···378452 where
379453 Params: Into<PageParams>,
380454 {
455455+ // Track all entries accessed for incremental builds
456456+ for entry in &self.entries {
457457+ if let Some(ref file_path) = entry.file_path {
458458+ track_content_file_access(file_path);
459459+ }
460460+ }
381461 self.entries.iter().map(cb).collect()
382462 }
463463+464464+ /// Get all entries, tracking access for incremental builds.
465465+ ///
466466+ /// This returns a slice of all entries in the content source.
467467+ /// You can use standard slice methods like `.iter()`, `.len()`, `.is_empty()`, etc.
468468+ pub fn entries(&self) -> &[Entry<T>] {
469469+ // Track all entries accessed for incremental builds
470470+ for entry in &self.entries {
471471+ if let Some(ref file_path) = entry.file_path {
472472+ track_content_file_access(file_path);
473473+ }
474474+ }
475475+ &self.entries
476476+ }
383477}
384478385479#[doc(hidden)]
···389483 fn init(&mut self);
390484 fn get_name(&self) -> &str;
391485 fn as_any(&self) -> &dyn Any; // Used for type checking at runtime
486486+487487+ /// Get all file paths for entries in this content source.
488488+ /// Used for incremental builds to map content files to their source.
489489+ fn get_entry_file_paths(&self) -> Vec<PathBuf>;
392490}
393491394492impl<T: 'static + Sync + Send> ContentSourceInternal for ContentSource<T> {
···400498 }
401499 fn as_any(&self) -> &dyn Any {
402500 self
501501+ }
502502+ fn get_entry_file_paths(&self) -> Vec<PathBuf> {
503503+ self.entries
504504+ .iter()
505505+ .filter_map(|entry| entry.file_path.clone())
506506+ .collect()
403507 }
404508}
···5454// Internal modules
5555mod logging;
56565757-use std::env;
5757+use std::sync::LazyLock;
5858+use std::{env, path::PathBuf};
58595960use build::execute_build;
6061use content::ContentSources;
6162use logging::init_logging;
6263use route::FullRoute;
63646565+static IS_DEV: LazyLock<bool> = LazyLock::new(|| {
6666+ std::env::var("MAUDIT_DEV")
6767+ .map(|v| v == "true")
6868+ .unwrap_or(false)
6969+});
7070+6471/// Returns whether Maudit is running in development mode (through `maudit dev`).
6572///
6673/// This can be useful to conditionally enable features or logging that should only be active during development.
6774/// Oftentimes, this is used to disable some expensive operations that would slow down build times during development.
6875pub fn is_dev() -> bool {
6969- env::var("MAUDIT_DEV").map(|v| v == "true").unwrap_or(false)
7676+ *IS_DEV
7077}
71787279#[macro_export]
···212219 .enable_all()
213220 .build()?;
214221215215- execute_build(routes, &mut content_sources, &options, &async_runtime)
222222+ // Check for changed files from environment variable (set by CLI in dev mode)
223223+ let changed_files = env::var("MAUDIT_CHANGED_FILES")
224224+ .ok()
225225+ .and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
226226+ .map(|paths| paths.into_iter().map(PathBuf::from).collect::<Vec<_>>());
227227+228228+ execute_build(
229229+ routes,
230230+ &mut content_sources,
231231+ &options,
232232+ changed_files.as_deref(),
233233+ &async_runtime,
234234+ )
216235}
+6-2
crates/maudit/src/logging.rs
···29293030 let _ = Builder::from_env(logging_env)
3131 .format(|buf, record| {
3232- if std::env::args().any(|arg| arg == "--quiet") || std::env::var("MAUDIT_QUIET").is_ok()
3333- {
3232+ if std::env::args().any(|arg| arg == "--quiet") {
3333+ return Ok(());
3434+ }
3535+3636+ // In quiet mode, only show build target logs (for debugging incremental builds)
3737+ if std::env::var("MAUDIT_QUIET").is_ok() && record.target() != "build" {
3438 return Ok(());
3539 }
3640
+18-5
crates/maudit/src/route.rs
···99use std::any::Any;
1010use std::path::{Path, PathBuf};
11111212-use lol_html::{RewriteStrSettings, element, rewrite_str};
1212+use lol_html::{element, rewrite_str, RewriteStrSettings};
13131414/// The result of a page render, can be either text, raw bytes, or an error.
1515///
···282282/// impl Route for Index {
283283/// fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
284284/// let logo = ctx.assets.add_image("logo.png")?;
285285-/// let last_entries = &ctx.content.get_source::<ArticleContent>("articles").entries;
285285+/// let last_entries = ctx.content.get_source::<ArticleContent>("articles").entries();
286286///
287287/// Ok(html! {
288288/// main {
···504504pub trait InternalRoute {
505505 fn route_raw(&self) -> Option<String>;
506506507507+ /// Returns the source file path where this route is defined.
508508+ /// This is used for incremental builds to track which routes are affected
509509+ /// when a source file changes.
510510+ fn source_file(&self) -> &'static str;
511511+507512 fn variants(&self) -> Vec<(String, String)> {
508513 vec![]
509514 }
···796801 self.inner.route_raw()
797802 }
798803804804+ fn source_file(&self) -> &'static str {
805805+ self.inner.source_file()
806806+ }
807807+799808 fn variants(&self) -> Vec<(String, String)> {
800809 self.inner.variants()
801810 }
···957966 //! use maudit::route::prelude::*;
958967 //! ```
959968 pub use super::{
960960- CachedRoute, DynamicRouteContext, FullRoute, Page, PageContext, PageParams, Pages,
961961- PaginatedContentPage, PaginationPage, RenderResult, Route, RouteExt, paginate, redirect,
969969+ paginate, redirect, CachedRoute, DynamicRouteContext, FullRoute, Page, PageContext,
970970+ PageParams, Pages, PaginatedContentPage, PaginationPage, RenderResult, Route, RouteExt,
962971 };
963972 pub use crate::assets::{
964973 Asset, Image, ImageFormat, ImageOptions, ImagePlaceholder, RenderWithAlt, Script, Style,
965974 StyleOptions,
966975 };
967976 pub use crate::content::{ContentContext, ContentEntry, Entry, EntryInner, MarkdownContent};
968968- pub use maudit_macros::{Params, route};
977977+ pub use maudit_macros::{route, Params};
969978}
970979971980#[cfg(test)]
···982991 impl InternalRoute for TestPage {
983992 fn route_raw(&self) -> Option<String> {
984993 Some(self.route.clone())
994994+ }
995995+996996+ fn source_file(&self) -> &'static str {
997997+ file!()
985998 }
986999 }
9871000
···11//! Blog archetype.
22//! Represents a markdown blog archetype, with an index page and individual entry pages.
33use crate::layouts::layout;
44-use maud::{Markup, html};
44+use maud::{html, Markup};
55use maudit::content::markdown_entry;
66-use maudit::route::FullRoute;
76use maudit::route::prelude::*;
77+use maudit::route::FullRoute;
8899pub fn blog_index_content<T: FullRoute>(
1010 route: impl FullRoute,
···18181919 let markup = html! {
2020 main {
2121- @for entry in &blog_entries.entries {
2121+ @for entry in blog_entries.entries() {
2222 a href=(route.url(&BlogEntryParams { entry: entry.id.clone() }.into())) {
2323 h2 { (entry.data(ctx).title) }
2424 p { (entry.data(ctx).description) }
+3
e2e/README.md
···1313## Running Tests
14141515The tests will automatically:
1616+16171. Build the prefetch.js bundle (via `cargo xtask build-maudit-js`)
17182. Start the Maudit dev server on the test fixture site
18193. Run the tests
···4647## Features Tested
47484849### Basic Prefetch
5050+4951- Creating link elements with `rel="prefetch"`
5052- Preventing duplicate prefetches
5153- Skipping current page prefetch
5254- Blocking cross-origin prefetches
53555456### Prerendering (Chromium only)
5757+5558- Creating `<script type="speculationrules">` elements
5659- Different eagerness levels (immediate, eager, moderate, conservative)
5760- Fallback to link prefetch on non-Chromium browsers
···11+import { expect } from "@playwright/test";
22+import { createTestWithFixture } from "./test-utils";
33+import { readFileSync, writeFileSync } from "node:fs";
44+import { resolve, dirname } from "node:path";
55+import { fileURLToPath } from "node:url";
66+77+const __filename = fileURLToPath(import.meta.url);
88+const __dirname = dirname(__filename);
99+1010+// Create test instance with hot-reload fixture
1111+const test = createTestWithFixture("hot-reload");
1212+1313+test.describe.configure({ mode: "serial" });
1414+1515+/**
1616+ * Wait for dev server to complete a build/rerun by polling logs
1717+ */
1818+async function waitForBuildComplete(devServer: any, timeoutMs = 20000): Promise<string[]> {
1919+ const startTime = Date.now();
2020+2121+ while (Date.now() - startTime < timeoutMs) {
2222+ const logs = devServer.getLogs(100);
2323+ const logsText = logs.join("\n").toLowerCase();
2424+2525+ // Look for completion messages
2626+ if (
2727+ logsText.includes("finished") ||
2828+ logsText.includes("rerun finished") ||
2929+ logsText.includes("build finished")
3030+ ) {
3131+ return logs;
3232+ }
3333+3434+ // Wait 100ms before checking again
3535+ await new Promise((resolve) => setTimeout(resolve, 100));
3636+ }
3737+3838+ throw new Error(`Build did not complete within ${timeoutMs}ms`);
3939+}
4040+4141+test.describe("Hot Reload", () => {
4242+ // Increase timeout for these tests since they involve compilation
4343+ test.setTimeout(60000);
4444+4545+ const fixturePath = resolve(__dirname, "..", "fixtures", "hot-reload");
4646+ const indexPath = resolve(fixturePath, "src", "pages", "index.rs");
4747+ const mainPath = resolve(fixturePath, "src", "main.rs");
4848+ const dataPath = resolve(fixturePath, "data.txt");
4949+ let originalIndexContent: string;
5050+ let originalMainContent: string;
5151+ let originalDataContent: string;
5252+5353+ test.beforeAll(async () => {
5454+ // Save original content
5555+ originalIndexContent = readFileSync(indexPath, "utf-8");
5656+ originalMainContent = readFileSync(mainPath, "utf-8");
5757+ originalDataContent = readFileSync(dataPath, "utf-8");
5858+5959+ // Ensure files are in original state
6060+ writeFileSync(indexPath, originalIndexContent, "utf-8");
6161+ writeFileSync(mainPath, originalMainContent, "utf-8");
6262+ writeFileSync(dataPath, originalDataContent, "utf-8");
6363+ });
6464+6565+ test.afterEach(async ({ devServer }) => {
6666+ // Restore original content after each test
6767+ writeFileSync(indexPath, originalIndexContent, "utf-8");
6868+ writeFileSync(mainPath, originalMainContent, "utf-8");
6969+ writeFileSync(dataPath, originalDataContent, "utf-8");
7070+7171+ // Only wait for build if devServer is available (startup might have failed)
7272+ if (devServer) {
7373+ try {
7474+ devServer.clearLogs();
7575+ await waitForBuildComplete(devServer);
7676+ } catch (error) {
7777+ console.warn("Failed to wait for build completion in afterEach:", error);
7878+ }
7979+ }
8080+ });
8181+8282+ test.afterAll(async () => {
8383+ // Restore original content
8484+ writeFileSync(indexPath, originalIndexContent, "utf-8");
8585+ writeFileSync(mainPath, originalMainContent, "utf-8");
8686+ writeFileSync(dataPath, originalDataContent, "utf-8");
8787+ });
8888+8989+ test("should recompile when Rust code changes (dependencies)", async ({ page, devServer }) => {
9090+ await page.goto(devServer.url);
9191+9292+ // Verify initial content
9393+ await expect(page.locator("#title")).toHaveText("Original Title");
9494+9595+ // Clear logs to track what happens after this point
9696+ devServer.clearLogs();
9797+9898+ // Modify main.rs - this is a tracked dependency, should trigger recompile
9999+ const modifiedMain = originalMainContent.replace(
100100+ "BuildOptions::default()",
101101+ "BuildOptions::default() // Modified comment",
102102+ );
103103+ writeFileSync(mainPath, modifiedMain, "utf-8");
104104+105105+ // Wait for rebuild to complete
106106+ const logs = await waitForBuildComplete(devServer, 20000);
107107+ const logsText = logs.join("\n");
108108+109109+ // Check logs to verify it actually recompiled (ran cargo)
110110+ expect(logsText).toContain("rebuilding");
111111+ // Make sure it didn't just rerun the binary
112112+ expect(logsText.toLowerCase()).not.toContain("rerunning binary");
113113+ });
114114+115115+ test("should rerun without recompile when non-dependency files change", async ({
116116+ page,
117117+ devServer,
118118+ }) => {
119119+ await page.goto(devServer.url);
120120+121121+ // Verify initial content
122122+ await expect(page.locator("#title")).toHaveText("Original Title");
123123+124124+ // Clear logs to track what happens after this point
125125+ devServer.clearLogs();
126126+127127+ // Modify data.txt - this file is NOT in the .d dependencies
128128+ // So it should trigger a rerun without recompilation
129129+ writeFileSync(dataPath, "Modified data", "utf-8");
130130+131131+ // Wait for build/rerun to complete
132132+ const logs = await waitForBuildComplete(devServer, 20000);
133133+ const logsText = logs.join("\n");
134134+135135+ // Should see "rerunning binary" message (case insensitive)
136136+ const hasRerunMessage = logsText.toLowerCase().includes("rerunning binary");
137137+ expect(hasRerunMessage).toBe(true);
138138+139139+ // Should NOT see cargo-related rebuild messages (compiling, building crate)
140140+ // Note: "Rebuilding N affected routes" is fine - that's the incremental build system
141141+ expect(logsText.toLowerCase()).not.toContain("compiling");
142142+ expect(logsText.toLowerCase()).not.toContain("cargo build");
143143+ });
144144+145145+ test("should show updated content after file changes", async ({ page, devServer }) => {
146146+ await page.goto(devServer.url);
147147+148148+ // Verify initial content
149149+ await expect(page.locator("#title")).toHaveText("Original Title");
150150+151151+ // Prepare to wait for actual reload by waiting for the same URL to reload
152152+ const currentUrl = page.url();
153153+154154+ // Modify the file
155155+ const modifiedContent = originalIndexContent.replace(
156156+ 'h1 id="title" { "Original Title" }',
157157+ 'h1 id="title" { "Another Update" }',
158158+ );
159159+ writeFileSync(indexPath, modifiedContent, "utf-8");
160160+161161+ // Wait for the page to actually reload on the same URL
162162+ await page.waitForURL(currentUrl, { timeout: 15000 });
163163+ // Verify the updated content
164164+ await expect(page.locator("#title")).toHaveText("Another Update", { timeout: 15000 });
165165+ });
166166+});
+1024
e2e/tests/incremental-build.spec.ts
···11+import { expect } from "@playwright/test";
22+import { createTestWithFixture } from "./test-utils";
33+import { readFileSync, writeFileSync, renameSync, existsSync } from "node:fs";
44+import { resolve, dirname } from "node:path";
55+import { fileURLToPath } from "node:url";
66+77+const __filename = fileURLToPath(import.meta.url);
88+const __dirname = dirname(__filename);
99+1010+// Create test instance with incremental-build fixture
1111+const test = createTestWithFixture("incremental-build");
1212+1313+// Run tests serially since they share state; allow retries for timing-sensitive tests
1414+test.describe.configure({ mode: "serial", retries: 2 });
1515+1616+/**
1717+ * Wait for dev server to complete a build by polling logs.
1818+ * Returns logs once build is finished.
1919+ */
2020+async function waitForBuildComplete(devServer: any, timeoutMs = 30000): Promise<string[]> {
2121+ const startTime = Date.now();
2222+ const pollInterval = 50;
2323+2424+ // Phase 1: Wait for build to start
2525+ while (Date.now() - startTime < timeoutMs) {
2626+ const logs = devServer.getLogs(200);
2727+ const logsText = logs.join("\n").toLowerCase();
2828+2929+ if (
3030+ logsText.includes("rerunning") ||
3131+ logsText.includes("rebuilding") ||
3232+ logsText.includes("files changed")
3333+ ) {
3434+ break;
3535+ }
3636+3737+ await new Promise((r) => setTimeout(r, pollInterval));
3838+ }
3939+4040+ // Phase 2: Wait for build to finish
4141+ while (Date.now() - startTime < timeoutMs) {
4242+ const logs = devServer.getLogs(200);
4343+ const logsText = logs.join("\n").toLowerCase();
4444+4545+ if (
4646+ logsText.includes("finished") ||
4747+ logsText.includes("rerun finished") ||
4848+ logsText.includes("build finished")
4949+ ) {
5050+ return logs;
5151+ }
5252+5353+ await new Promise((r) => setTimeout(r, pollInterval));
5454+ }
5555+5656+ console.log("TIMEOUT - logs seen:", devServer.getLogs(50));
5757+ throw new Error(`Build did not complete within ${timeoutMs}ms`);
5858+}
5959+6060+/**
6161+ * Wait for the dev server to become idle (no builds in progress).
6262+ * This polls build IDs until they stop changing.
6363+ */
6464+async function waitForIdle(htmlPaths: Record<string, string>, stableMs = 200): Promise<void> {
6565+ let lastIds = recordBuildIds(htmlPaths);
6666+ let stableTime = 0;
6767+6868+ while (stableTime < stableMs) {
6969+ await new Promise((r) => setTimeout(r, 50));
7070+ const currentIds = recordBuildIds(htmlPaths);
7171+7272+ const allSame = Object.keys(lastIds).every(
7373+ (key) => lastIds[key] === currentIds[key]
7474+ );
7575+7676+ if (allSame) {
7777+ stableTime += 50;
7878+ } else {
7979+ stableTime = 0;
8080+ lastIds = currentIds;
8181+ }
8282+ }
8383+}
8484+8585+/**
8686+ * Wait for a specific HTML file's build ID to change from a known value.
8787+ * This is more reliable than arbitrary sleeps.
8888+ */
8989+async function waitForBuildIdChange(
9090+ htmlPath: string,
9191+ previousId: string | null,
9292+ timeoutMs = 30000,
9393+): Promise<string> {
9494+ const startTime = Date.now();
9595+ const pollInterval = 50;
9696+9797+ while (Date.now() - startTime < timeoutMs) {
9898+ const currentId = getBuildId(htmlPath);
9999+ if (currentId !== null && currentId !== previousId) {
100100+ // Small delay to let any concurrent writes settle
101101+ await new Promise((r) => setTimeout(r, 100));
102102+ return currentId;
103103+ }
104104+ await new Promise((r) => setTimeout(r, pollInterval));
105105+ }
106106+107107+ throw new Error(`Build ID did not change within ${timeoutMs}ms`);
108108+}
109109+110110+/**
111111+ * Extract the build ID from an HTML file.
112112+ */
113113+function getBuildId(htmlPath: string): string | null {
114114+ try {
115115+ const content = readFileSync(htmlPath, "utf-8");
116116+ const match = content.match(/data-build-id="(\d+)"/);
117117+ return match ? match[1] : null;
118118+ } catch {
119119+ return null;
120120+ }
121121+}
122122+123123+/**
124124+ * Check if logs indicate incremental build was used
125125+ */
126126+function isIncrementalBuild(logs: string[]): boolean {
127127+ return logs.join("\n").toLowerCase().includes("incremental build");
128128+}
129129+130130+/**
131131+ * Get the number of affected routes from logs
132132+ */
133133+function getAffectedRouteCount(logs: string[]): number {
134134+ const logsText = logs.join("\n");
135135+ const match = logsText.match(/Rebuilding (\d+) affected routes/i);
136136+ return match ? parseInt(match[1], 10) : -1;
137137+}
138138+139139+/**
140140+ * Record build IDs for all pages
141141+ */
142142+function recordBuildIds(htmlPaths: Record<string, string>): Record<string, string | null> {
143143+ const ids: Record<string, string | null> = {};
144144+ for (const [name, path] of Object.entries(htmlPaths)) {
145145+ ids[name] = getBuildId(path);
146146+ }
147147+ return ids;
148148+}
149149+150150+/**
151151+ * Trigger a change and wait for build to complete.
152152+ * Returns logs from the build.
153153+ */
154154+async function triggerAndWaitForBuild(
155155+ devServer: any,
156156+ modifyFn: () => void,
157157+ timeoutMs = 30000,
158158+): Promise<string[]> {
159159+ devServer.clearLogs();
160160+ modifyFn();
161161+ return await waitForBuildComplete(devServer, timeoutMs);
162162+}
163163+164164+/**
165165+ * Set up incremental build state by triggering two builds.
166166+ * First build establishes state, second ensures state is populated.
167167+ * Returns build IDs recorded after the second build completes and server is idle.
168168+ *
169169+ * Note: We don't assert incremental here - the actual test will verify that.
170170+ * This is because on first test run the server might still be initializing.
171171+ */
172172+async function setupIncrementalState(
173173+ devServer: any,
174174+ modifyFn: (suffix: string) => void,
175175+ htmlPaths: Record<string, string>,
176176+ expectedChangedRoute: string, // Which route we expect to change
177177+): Promise<Record<string, string | null>> {
178178+ // First change: triggers build (establishes state)
179179+ const beforeInit = getBuildId(htmlPaths[expectedChangedRoute]);
180180+ await triggerAndWaitForBuild(devServer, () => modifyFn("init"));
181181+ await waitForBuildIdChange(htmlPaths[expectedChangedRoute], beforeInit);
182182+183183+ // Second change: state should now exist for incremental builds
184184+ const beforeSetup = getBuildId(htmlPaths[expectedChangedRoute]);
185185+ await triggerAndWaitForBuild(devServer, () => modifyFn("setup"));
186186+ await waitForBuildIdChange(htmlPaths[expectedChangedRoute], beforeSetup);
187187+188188+ // Wait for server to become completely idle before recording baseline
189189+ await waitForIdle(htmlPaths);
190190+191191+ return recordBuildIds(htmlPaths);
192192+}
193193+194194+test.describe("Incremental Build", () => {
195195+ test.setTimeout(180000);
196196+197197+ const fixturePath = resolve(__dirname, "..", "fixtures", "incremental-build");
198198+199199+ // Asset paths
200200+ const assets = {
201201+ blogCss: resolve(fixturePath, "src", "assets", "blog.css"),
202202+ utilsJs: resolve(fixturePath, "src", "assets", "utils.js"),
203203+ mainJs: resolve(fixturePath, "src", "assets", "main.js"),
204204+ aboutJs: resolve(fixturePath, "src", "assets", "about.js"),
205205+ stylesCss: resolve(fixturePath, "src", "assets", "styles.css"),
206206+ logoPng: resolve(fixturePath, "src", "assets", "logo.png"),
207207+ teamPng: resolve(fixturePath, "src", "assets", "team.png"),
208208+ bgPng: resolve(fixturePath, "src", "assets", "bg.png"),
209209+ };
210210+211211+ // Content file paths (for granular content tracking tests)
212212+ const contentFiles = {
213213+ firstPost: resolve(fixturePath, "content", "articles", "first-post.md"),
214214+ secondPost: resolve(fixturePath, "content", "articles", "second-post.md"),
215215+ thirdPost: resolve(fixturePath, "content", "articles", "third-post.md"),
216216+ };
217217+218218+ // Output HTML paths
219219+ const htmlPaths = {
220220+ index: resolve(fixturePath, "dist", "index.html"),
221221+ about: resolve(fixturePath, "dist", "about", "index.html"),
222222+ blog: resolve(fixturePath, "dist", "blog", "index.html"),
223223+ articles: resolve(fixturePath, "dist", "articles", "index.html"),
224224+ articleFirst: resolve(fixturePath, "dist", "articles", "first-post", "index.html"),
225225+ articleSecond: resolve(fixturePath, "dist", "articles", "second-post", "index.html"),
226226+ articleThird: resolve(fixturePath, "dist", "articles", "third-post", "index.html"),
227227+ };
228228+229229+ // Original content storage
230230+ const originals: Record<string, string | Buffer> = {};
231231+232232+ test.beforeAll(async () => {
233233+ // Store original content for all assets we might modify
234234+ originals.blogCss = readFileSync(assets.blogCss, "utf-8");
235235+ originals.utilsJs = readFileSync(assets.utilsJs, "utf-8");
236236+ originals.mainJs = readFileSync(assets.mainJs, "utf-8");
237237+ originals.aboutJs = readFileSync(assets.aboutJs, "utf-8");
238238+ originals.stylesCss = readFileSync(assets.stylesCss, "utf-8");
239239+ originals.logoPng = readFileSync(assets.logoPng); // binary
240240+ originals.teamPng = readFileSync(assets.teamPng); // binary
241241+ originals.bgPng = readFileSync(assets.bgPng); // binary
242242+ // Content files
243243+ originals.firstPost = readFileSync(contentFiles.firstPost, "utf-8");
244244+ originals.secondPost = readFileSync(contentFiles.secondPost, "utf-8");
245245+ originals.thirdPost = readFileSync(contentFiles.thirdPost, "utf-8");
246246+ });
247247+248248+ test.afterAll(async () => {
249249+ // Restore all original content
250250+ writeFileSync(assets.blogCss, originals.blogCss);
251251+ writeFileSync(assets.utilsJs, originals.utilsJs);
252252+ writeFileSync(assets.mainJs, originals.mainJs);
253253+ writeFileSync(assets.aboutJs, originals.aboutJs);
254254+ writeFileSync(assets.stylesCss, originals.stylesCss);
255255+ writeFileSync(assets.logoPng, originals.logoPng);
256256+ writeFileSync(assets.teamPng, originals.teamPng);
257257+ writeFileSync(assets.bgPng, originals.bgPng);
258258+ // Restore content files
259259+ writeFileSync(contentFiles.firstPost, originals.firstPost);
260260+ writeFileSync(contentFiles.secondPost, originals.secondPost);
261261+ writeFileSync(contentFiles.thirdPost, originals.thirdPost);
262262+ });
263263+264264+ // ============================================================
265265+ // TEST 1: Direct CSS dependency (blog.css โ /blog only)
266266+ // ============================================================
267267+ test("CSS file change rebuilds only routes using it", async ({ devServer }) => {
268268+ let testCounter = 0;
269269+270270+ function modifyFile(suffix: string) {
271271+ testCounter++;
272272+ writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`);
273273+ }
274274+275275+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog");
276276+ expect(before.index).not.toBeNull();
277277+ expect(before.about).not.toBeNull();
278278+ expect(before.blog).not.toBeNull();
279279+280280+ // Trigger the final change and wait for build
281281+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
282282+ await waitForBuildIdChange(htmlPaths.blog, before.blog);
283283+284284+ // Verify incremental build with 1 route
285285+ expect(isIncrementalBuild(logs)).toBe(true);
286286+ expect(getAffectedRouteCount(logs)).toBe(1);
287287+288288+ // Verify only blog was rebuilt
289289+ const after = recordBuildIds(htmlPaths);
290290+ expect(after.index).toBe(before.index);
291291+ expect(after.about).toBe(before.about);
292292+ expect(after.blog).not.toBe(before.blog);
293293+ });
294294+295295+ // ============================================================
296296+ // TEST 2: Transitive JS dependency (utils.js โ main.js โ /)
297297+ // ============================================================
298298+ test("transitive JS dependency change rebuilds affected routes", async ({ devServer }) => {
299299+ let testCounter = 0;
300300+301301+ function modifyFile(suffix: string) {
302302+ testCounter++;
303303+ writeFileSync(assets.utilsJs, originals.utilsJs + `\n// test-${testCounter}-${suffix}`);
304304+ }
305305+306306+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index");
307307+ expect(before.index).not.toBeNull();
308308+309309+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
310310+ await waitForBuildIdChange(htmlPaths.index, before.index);
311311+312312+ // Verify incremental build with 1 route
313313+ expect(isIncrementalBuild(logs)).toBe(true);
314314+ expect(getAffectedRouteCount(logs)).toBe(1);
315315+316316+ // Only index should be rebuilt (uses main.js which imports utils.js)
317317+ const after = recordBuildIds(htmlPaths);
318318+ expect(after.about).toBe(before.about);
319319+ expect(after.blog).toBe(before.blog);
320320+ expect(after.index).not.toBe(before.index);
321321+ });
322322+323323+ // ============================================================
324324+ // TEST 3: Direct JS entry point change (about.js โ /about)
325325+ // ============================================================
326326+ test("direct JS entry point change rebuilds only routes using it", async ({ devServer }) => {
327327+ let testCounter = 0;
328328+329329+ function modifyFile(suffix: string) {
330330+ testCounter++;
331331+ writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`);
332332+ }
333333+334334+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "about");
335335+ expect(before.about).not.toBeNull();
336336+337337+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
338338+ await waitForBuildIdChange(htmlPaths.about, before.about);
339339+340340+ // Verify incremental build with 1 route
341341+ expect(isIncrementalBuild(logs)).toBe(true);
342342+ expect(getAffectedRouteCount(logs)).toBe(1);
343343+344344+ // Only about should be rebuilt
345345+ const after = recordBuildIds(htmlPaths);
346346+ expect(after.index).toBe(before.index);
347347+ expect(after.blog).toBe(before.blog);
348348+ expect(after.about).not.toBe(before.about);
349349+ });
350350+351351+ // ============================================================
352352+ // TEST 4: Shared asset change (styles.css โ / AND /about)
353353+ // ============================================================
354354+ test("shared asset change rebuilds all routes using it", async ({ devServer }) => {
355355+ let testCounter = 0;
356356+357357+ function modifyFile(suffix: string) {
358358+ testCounter++;
359359+ writeFileSync(assets.stylesCss, originals.stylesCss + `\n/* test-${testCounter}-${suffix} */`);
360360+ }
361361+362362+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index");
363363+ expect(before.index).not.toBeNull();
364364+ expect(before.about).not.toBeNull();
365365+366366+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
367367+ await waitForBuildIdChange(htmlPaths.index, before.index);
368368+369369+ // Verify incremental build with 2 routes (/ and /about both use styles.css)
370370+ expect(isIncrementalBuild(logs)).toBe(true);
371371+ expect(getAffectedRouteCount(logs)).toBe(2);
372372+373373+ // Index and about should be rebuilt, blog should not
374374+ const after = recordBuildIds(htmlPaths);
375375+ expect(after.blog).toBe(before.blog);
376376+ expect(after.index).not.toBe(before.index);
377377+ expect(after.about).not.toBe(before.about);
378378+ });
379379+380380+ // ============================================================
381381+ // TEST 5: Image change (logo.png โ /)
382382+ // ============================================================
383383+ test("image change rebuilds only routes using it", async ({ devServer }) => {
384384+ let testCounter = 0;
385385+386386+ function modifyFile(suffix: string) {
387387+ testCounter++;
388388+ const modified = Buffer.concat([
389389+ originals.logoPng as Buffer,
390390+ Buffer.from(`<!-- test-${testCounter}-${suffix} -->`),
391391+ ]);
392392+ writeFileSync(assets.logoPng, modified);
393393+ }
394394+395395+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "index");
396396+ expect(before.index).not.toBeNull();
397397+398398+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
399399+ await waitForBuildIdChange(htmlPaths.index, before.index);
400400+401401+ // Verify incremental build with 1 route
402402+ expect(isIncrementalBuild(logs)).toBe(true);
403403+ expect(getAffectedRouteCount(logs)).toBe(1);
404404+405405+ // Only index should be rebuilt (uses logo.png)
406406+ const after = recordBuildIds(htmlPaths);
407407+ expect(after.about).toBe(before.about);
408408+ expect(after.blog).toBe(before.blog);
409409+ expect(after.index).not.toBe(before.index);
410410+ });
411411+412412+ // ============================================================
413413+ // TEST 6: Multiple files changed simultaneously
414414+ // ============================================================
415415+ test("multiple file changes rebuild union of affected routes", async ({ devServer }) => {
416416+ let testCounter = 0;
417417+418418+ function modifyFile(suffix: string) {
419419+ testCounter++;
420420+ // Change both blog.css (affects /blog) and about.js (affects /about)
421421+ writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`);
422422+ writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`);
423423+ }
424424+425425+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog");
426426+ expect(before.about).not.toBeNull();
427427+ expect(before.blog).not.toBeNull();
428428+429429+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
430430+ await waitForBuildIdChange(htmlPaths.blog, before.blog);
431431+432432+ // Verify incremental build with 2 routes (/about and /blog)
433433+ expect(isIncrementalBuild(logs)).toBe(true);
434434+ expect(getAffectedRouteCount(logs)).toBe(2);
435435+436436+ // About and blog should be rebuilt, index should not
437437+ const after = recordBuildIds(htmlPaths);
438438+ expect(after.index).toBe(before.index);
439439+ expect(after.about).not.toBe(before.about);
440440+ expect(after.blog).not.toBe(before.blog);
441441+ });
442442+443443+ // ============================================================
444444+ // TEST 7: CSS url() asset dependency (bg.png via blog.css โ /blog)
445445+ // ============================================================
446446+ test("CSS url() asset change triggers rebundling and rebuilds affected routes", async ({
447447+ devServer,
448448+ }) => {
449449+ let testCounter = 0;
450450+451451+ function modifyFile(suffix: string) {
452452+ testCounter++;
453453+ const modified = Buffer.concat([
454454+ originals.bgPng as Buffer,
455455+ Buffer.from(`<!-- test-${testCounter}-${suffix} -->`),
456456+ ]);
457457+ writeFileSync(assets.bgPng, modified);
458458+ }
459459+460460+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog");
461461+ expect(before.blog).not.toBeNull();
462462+463463+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
464464+ await waitForBuildIdChange(htmlPaths.blog, before.blog);
465465+466466+ // Verify incremental build triggered
467467+ expect(isIncrementalBuild(logs)).toBe(true);
468468+469469+ // Blog should be rebuilt (uses blog.css which references bg.png via url())
470470+ const after = recordBuildIds(htmlPaths);
471471+ expect(after.blog).not.toBe(before.blog);
472472+ });
473473+474474+ // ============================================================
475475+ // TEST 8: Source file change rebuilds only routes defined in that file
476476+ // ============================================================
477477+ test("source file change rebuilds only routes defined in that file", async ({ devServer }) => {
478478+ // This test verifies that when a .rs source file changes, only routes
479479+ // defined in that file are rebuilt (via source_to_routes tracking).
480480+ //
481481+ // Flow:
482482+ // 1. Dev server starts โ initial build โ creates build_state.json with source file mappings
483483+ // 2. Modify about.rs โ cargo recompiles โ binary reruns with MAUDIT_CHANGED_FILES
484484+ // 3. New binary loads build_state.json and finds /about is affected by about.rs
485485+ // 4. Only /about route is rebuilt
486486+ //
487487+ // Note: Unlike asset changes, .rs changes require cargo recompilation.
488488+ // The binary's logs (showing "Incremental build") aren't captured by the
489489+ // dev server's log collection, so we verify behavior through build IDs.
490490+491491+ const aboutRs = resolve(fixturePath, "src", "pages", "about.rs");
492492+ const originalAboutRs = readFileSync(aboutRs, "utf-8");
493493+494494+ try {
495495+ let testCounter = 0;
496496+497497+ function modifyFile(suffix: string) {
498498+ testCounter++;
499499+ writeFileSync(aboutRs, originalAboutRs + `\n// test-${testCounter}-${suffix}`);
500500+ }
501501+502502+ const rsTimeout = 60000;
503503+504504+ // First change: triggers recompile + build (establishes build state with source_to_routes)
505505+ const beforeInit = getBuildId(htmlPaths.about);
506506+ await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout);
507507+ await waitForBuildIdChange(htmlPaths.about, beforeInit, rsTimeout);
508508+509509+ // Record build IDs - state now exists with source_to_routes mappings
510510+ const before = recordBuildIds(htmlPaths);
511511+ expect(before.index).not.toBeNull();
512512+ expect(before.about).not.toBeNull();
513513+ expect(before.blog).not.toBeNull();
514514+515515+ // Second change: should do incremental build (only about.rs route)
516516+ await triggerAndWaitForBuild(devServer, () => modifyFile("final"), rsTimeout);
517517+ await waitForBuildIdChange(htmlPaths.about, before.about, rsTimeout);
518518+519519+ // Verify only /about was rebuilt (it's defined in about.rs)
520520+ const after = recordBuildIds(htmlPaths);
521521+ expect(after.index).toBe(before.index);
522522+ expect(after.blog).toBe(before.blog);
523523+ expect(after.about).not.toBe(before.about);
524524+525525+ } finally {
526526+ // Restore original content and wait for build to complete
527527+ const beforeRestore = getBuildId(htmlPaths.about);
528528+ writeFileSync(aboutRs, originalAboutRs);
529529+ try {
530530+ await waitForBuildIdChange(htmlPaths.about, beforeRestore, 60000);
531531+ } catch {
532532+ // Restoration build may not always complete, that's ok
533533+ }
534534+ }
535535+ });
536536+537537+ // ============================================================
538538+ // TEST 9: include_str! file change triggers full rebuild (untracked file)
539539+ // ============================================================
540540+ test("include_str file change triggers full rebuild", async ({ devServer }) => {
541541+ // This test verifies that changing a file referenced by include_str!()
542542+ // triggers cargo recompilation and a FULL rebuild (all routes).
543543+ //
544544+ // Setup: about.rs uses include_str!("../assets/about-content.txt")
545545+ // The .d file from cargo includes this dependency, so the dependency tracker
546546+ // knows that changing about-content.txt requires recompilation.
547547+ //
548548+ // Flow:
549549+ // 1. Dev server starts โ initial build
550550+ // 2. Modify about-content.txt โ cargo recompiles (because .d file tracks it)
551551+ // 3. Binary runs with MAUDIT_CHANGED_FILES pointing to about-content.txt
552552+ // 4. Since about-content.txt is NOT in source_to_routes or asset_to_routes,
553553+ // it's an "untracked file" and triggers a full rebuild of all routes
554554+ //
555555+ // This is the correct safe behavior - we don't know which route uses the
556556+ // include_str! file, so we rebuild everything to ensure correctness.
557557+558558+ const contentFile = resolve(fixturePath, "src", "assets", "about-content.txt");
559559+ const originalContent = readFileSync(contentFile, "utf-8");
560560+ const rsTimeout = 60000;
561561+562562+ try {
563563+ let testCounter = 0;
564564+565565+ function modifyFile(suffix: string) {
566566+ testCounter++;
567567+ writeFileSync(contentFile, originalContent + `\n<!-- test-${testCounter}-${suffix} -->`);
568568+ }
569569+570570+ // First change: triggers recompile + full build (establishes build state)
571571+ const beforeInit = getBuildId(htmlPaths.about);
572572+ await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout);
573573+ await waitForBuildIdChange(htmlPaths.about, beforeInit, rsTimeout);
574574+575575+ // Record build IDs before the final change
576576+ const before = recordBuildIds(htmlPaths);
577577+ expect(before.index).not.toBeNull();
578578+ expect(before.about).not.toBeNull();
579579+ expect(before.blog).not.toBeNull();
580580+581581+ // Trigger the content file change with unique content to verify
582582+ devServer.clearLogs();
583583+ writeFileSync(contentFile, originalContent + "\nUpdated content!");
584584+ await waitForBuildComplete(devServer, rsTimeout);
585585+ await waitForBuildIdChange(htmlPaths.about, before.about, rsTimeout);
586586+587587+ // All routes should be rebuilt (full rebuild due to untracked file)
588588+ const after = recordBuildIds(htmlPaths);
589589+ expect(after.index).not.toBe(before.index);
590590+ expect(after.about).not.toBe(before.about);
591591+ expect(after.blog).not.toBe(before.blog);
592592+593593+ // Verify the content was actually updated in the output
594594+ const aboutHtml = readFileSync(htmlPaths.about, "utf-8");
595595+ expect(aboutHtml).toContain("Updated content!");
596596+597597+ } finally {
598598+ // Restore original content and wait for build to complete
599599+ const beforeRestore = getBuildId(htmlPaths.about);
600600+ writeFileSync(contentFile, originalContent);
601601+ try {
602602+ await waitForBuildIdChange(htmlPaths.about, beforeRestore, 60000);
603603+ } catch {
604604+ // Restoration build may not always complete, that's ok
605605+ }
606606+ }
607607+ });
608608+609609+ // ============================================================
610610+ // TEST 10: Folder rename detection
611611+ // ============================================================
612612+ test("folder rename is detected and affects routes using assets in that folder", async ({ devServer }) => {
613613+ // This test verifies that renaming a folder containing tracked assets
614614+ // is detected by the file watcher and affects the correct routes.
615615+ //
616616+ // Setup: The blog page uses src/assets/icons/blog-icon.css
617617+ // Test: Rename icons -> icons-renamed, verify the blog route is identified as affected
618618+ //
619619+ // Note: The actual build will fail because the asset path becomes invalid,
620620+ // but this test verifies the DETECTION and ROUTE MATCHING works correctly.
621621+622622+ const iconsFolder = resolve(fixturePath, "src", "assets", "icons");
623623+ const renamedFolder = resolve(fixturePath, "src", "assets", "icons-renamed");
624624+ const iconFile = resolve(iconsFolder, "blog-icon.css");
625625+626626+ // Ensure we start with the correct state
627627+ if (existsSync(renamedFolder)) {
628628+ renameSync(renamedFolder, iconsFolder);
629629+ // Wait briefly for any triggered build to start
630630+ await new Promise((resolve) => setTimeout(resolve, 500));
631631+ }
632632+633633+ expect(existsSync(iconsFolder)).toBe(true);
634634+ expect(existsSync(iconFile)).toBe(true);
635635+636636+ const originalContent = readFileSync(iconFile, "utf-8");
637637+638638+ try {
639639+ let testCounter = 0;
640640+641641+ function modifyFile(suffix: string) {
642642+ testCounter++;
643643+ writeFileSync(iconFile, originalContent + `\n/* test-${testCounter}-${suffix} */`);
644644+ }
645645+646646+ // Use setupIncrementalState to establish tracking
647647+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "blog");
648648+ expect(before.blog).not.toBeNull();
649649+650650+ // Clear logs for the actual test
651651+ devServer.clearLogs();
652652+653653+ // Rename icons -> icons-renamed
654654+ renameSync(iconsFolder, renamedFolder);
655655+656656+ // Wait for the build to be attempted (it will fail because path is now invalid)
657657+ const startTime = Date.now();
658658+ const timeoutMs = 15000;
659659+ let logs: string[] = [];
660660+661661+ while (Date.now() - startTime < timeoutMs) {
662662+ logs = devServer.getLogs(100);
663663+ const logsText = logs.join("\n");
664664+665665+ // Wait for either success or failure indication
666666+ if (logsText.includes("finished") || logsText.includes("failed") || logsText.includes("error")) {
667667+ break;
668668+ }
669669+670670+ await new Promise((resolve) => setTimeout(resolve, 100));
671671+ }
672672+673673+ logs = devServer.getLogs(100);
674674+ const logsText = logs.join("\n");
675675+676676+ // Key assertions: verify the detection and route matching worked
677677+ // 1. The folder paths should be in changed files
678678+ expect(logsText).toContain("icons");
679679+680680+ // 2. The blog route should be identified as affected
681681+ expect(logsText).toContain("Rebuilding 1 affected routes");
682682+ expect(logsText).toContain("/blog");
683683+684684+ // 3. Other routes should NOT be affected (index and about don't use icons/)
685685+ expect(logsText).not.toContain("/about");
686686+687687+ } finally {
688688+ // Restore: rename icons-renamed back to icons
689689+ if (existsSync(renamedFolder) && !existsSync(iconsFolder)) {
690690+ renameSync(renamedFolder, iconsFolder);
691691+ }
692692+ // Restore original content and wait for build
693693+ if (existsSync(iconFile)) {
694694+ const beforeRestore = getBuildId(htmlPaths.blog);
695695+ writeFileSync(iconFile, originalContent);
696696+ try {
697697+ await waitForBuildIdChange(htmlPaths.blog, beforeRestore, 30000);
698698+ } catch {
699699+ // Restoration build may not always complete, that's ok
700700+ }
701701+ }
702702+ }
703703+ });
704704+705705+ // ============================================================
706706+ // TEST 11: Shared Rust module change triggers full rebuild
707707+ // ============================================================
708708+ test("shared Rust module change triggers full rebuild", async ({ devServer }) => {
709709+ // This test verifies that changing a shared Rust module (not a route file)
710710+ // triggers a full rebuild of all routes.
711711+ //
712712+ // Setup: helpers.rs contains shared functions used by about.rs
713713+ // The helpers.rs file is not tracked in source_to_routes (only route files are)
714714+ // so it's treated as an "untracked file" which triggers a full rebuild.
715715+ //
716716+ // This is the correct safe behavior - we can't determine which routes
717717+ // depend on the shared module, so we rebuild everything.
718718+719719+ const helpersRs = resolve(fixturePath, "src", "pages", "helpers.rs");
720720+ const originalContent = readFileSync(helpersRs, "utf-8");
721721+ const rsTimeout = 60000;
722722+723723+ try {
724724+ let testCounter = 0;
725725+726726+ function modifyFile(suffix: string) {
727727+ testCounter++;
728728+ writeFileSync(helpersRs, originalContent + `\n// test-${testCounter}-${suffix}`);
729729+ }
730730+731731+ // First change: triggers recompile + full build (establishes build state)
732732+ const beforeInit = getBuildId(htmlPaths.index);
733733+ await triggerAndWaitForBuild(devServer, () => modifyFile("init"), rsTimeout);
734734+ await waitForBuildIdChange(htmlPaths.index, beforeInit, rsTimeout);
735735+736736+ // Record build IDs before the final change
737737+ const before = recordBuildIds(htmlPaths);
738738+ expect(before.index).not.toBeNull();
739739+ expect(before.about).not.toBeNull();
740740+ expect(before.blog).not.toBeNull();
741741+742742+ // Trigger the shared module change
743743+ await triggerAndWaitForBuild(devServer, () => modifyFile("final"), rsTimeout);
744744+ await waitForBuildIdChange(htmlPaths.index, before.index, rsTimeout);
745745+746746+ // All routes should be rebuilt (full rebuild due to untracked shared module)
747747+ const after = recordBuildIds(htmlPaths);
748748+ expect(after.index).not.toBe(before.index);
749749+ expect(after.about).not.toBe(before.about);
750750+ expect(after.blog).not.toBe(before.blog);
751751+752752+ } finally {
753753+ // Restore original content and wait for build to complete
754754+ const beforeRestore = getBuildId(htmlPaths.index);
755755+ writeFileSync(helpersRs, originalContent);
756756+ try {
757757+ await waitForBuildIdChange(htmlPaths.index, beforeRestore, 60000);
758758+ } catch {
759759+ // Restoration build may not always complete, that's ok
760760+ }
761761+ }
762762+ });
763763+764764+ // ============================================================
765765+ // TEST 12: Content file change rebuilds only routes accessing that specific file
766766+ // ============================================================
767767+ test("content file change rebuilds only routes accessing that file (granular tracking)", async ({ devServer }) => {
768768+ // This test verifies granular content file tracking.
769769+ //
770770+ // Setup:
771771+ // - /articles/first-post uses get_entry("first-post") โ tracks only first-post.md
772772+ // - /articles/second-post uses get_entry("second-post") โ tracks only second-post.md
773773+ // - /articles (list) uses entries() โ tracks ALL content files
774774+ //
775775+ // When we change first-post.md:
776776+ // - /articles/first-post should be rebuilt (directly uses this file)
777777+ // - /articles should be rebuilt (uses entries() which tracks all files)
778778+ // - /articles/second-post should NOT be rebuilt (uses different file)
779779+ // - /articles/third-post should NOT be rebuilt (uses different file)
780780+ // - Other routes (/, /about, /blog) should NOT be rebuilt
781781+782782+ let testCounter = 0;
783783+784784+ function modifyFile(suffix: string) {
785785+ testCounter++;
786786+ const newContent = (originals.firstPost as string).replace(
787787+ "first post",
788788+ `first post - test-${testCounter}-${suffix}`
789789+ );
790790+ writeFileSync(contentFiles.firstPost, newContent);
791791+ }
792792+793793+ // Setup: establish incremental state
794794+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleFirst");
795795+ expect(before.articleFirst).not.toBeNull();
796796+ expect(before.articleSecond).not.toBeNull();
797797+ expect(before.articles).not.toBeNull();
798798+799799+ // Trigger the final change
800800+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
801801+ await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst);
802802+803803+ // Verify incremental build occurred
804804+ expect(isIncrementalBuild(logs)).toBe(true);
805805+806806+ // Check which routes were rebuilt
807807+ const after = recordBuildIds(htmlPaths);
808808+809809+ // Routes that should NOT be rebuilt (don't access first-post.md)
810810+ expect(after.index).toBe(before.index);
811811+ expect(after.about).toBe(before.about);
812812+ expect(after.blog).toBe(before.blog);
813813+ expect(after.articleSecond).toBe(before.articleSecond);
814814+ expect(after.articleThird).toBe(before.articleThird);
815815+816816+ // Routes that SHOULD be rebuilt (access first-post.md)
817817+ expect(after.articleFirst).not.toBe(before.articleFirst);
818818+ expect(after.articles).not.toBe(before.articles); // Uses entries() which tracks all files
819819+ });
820820+821821+ // ============================================================
822822+ // TEST 13: Different content file changes rebuild different routes
823823+ // ============================================================
824824+ test("different content files trigger rebuilds of different routes", async ({ devServer }) => {
825825+ // This test verifies that changing different content files rebuilds
826826+ // different sets of routes, proving granular tracking works.
827827+ //
828828+ // Change second-post.md:
829829+ // - /articles/second-post should be rebuilt
830830+ // - /articles (list) should be rebuilt (entries() tracks all)
831831+ // - /articles/first-post and /articles/third-post should NOT be rebuilt
832832+833833+ let testCounter = 0;
834834+835835+ function modifyFile(suffix: string) {
836836+ testCounter++;
837837+ const newContent = (originals.secondPost as string).replace(
838838+ "second post",
839839+ `second post - test-${testCounter}-${suffix}`
840840+ );
841841+ writeFileSync(contentFiles.secondPost, newContent);
842842+ }
843843+844844+ // Setup: establish incremental state
845845+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleSecond");
846846+ expect(before.articleFirst).not.toBeNull();
847847+ expect(before.articleSecond).not.toBeNull();
848848+ expect(before.articleThird).not.toBeNull();
849849+ expect(before.articles).not.toBeNull();
850850+851851+ // Trigger the final change
852852+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
853853+ await waitForBuildIdChange(htmlPaths.articleSecond, before.articleSecond);
854854+855855+ // Verify incremental build occurred
856856+ expect(isIncrementalBuild(logs)).toBe(true);
857857+858858+ // Check which routes were rebuilt
859859+ const after = recordBuildIds(htmlPaths);
860860+861861+ // Routes that should NOT be rebuilt
862862+ expect(after.index).toBe(before.index);
863863+ expect(after.about).toBe(before.about);
864864+ expect(after.blog).toBe(before.blog);
865865+ expect(after.articleFirst).toBe(before.articleFirst);
866866+ expect(after.articleThird).toBe(before.articleThird);
867867+868868+ // Routes that SHOULD be rebuilt
869869+ expect(after.articleSecond).not.toBe(before.articleSecond);
870870+ expect(after.articles).not.toBe(before.articles);
871871+ });
872872+873873+ // ============================================================
874874+ // TEST 14: Multiple content files changed rebuilds union of affected routes
875875+ // ============================================================
876876+ test("multiple content file changes rebuild union of affected routes", async ({ devServer }) => {
877877+ // This test verifies that changing multiple content files correctly
878878+ // rebuilds the union of all routes that access any of the changed files.
879879+ //
880880+ // Change both first-post.md and third-post.md simultaneously:
881881+ // - /articles/first-post should be rebuilt
882882+ // - /articles/third-post should be rebuilt
883883+ // - /articles (list) should be rebuilt
884884+ // - /articles/second-post should NOT be rebuilt
885885+886886+ let testCounter = 0;
887887+888888+ function modifyFile(suffix: string) {
889889+ testCounter++;
890890+ // Change both first and third posts
891891+ const newFirst = (originals.firstPost as string).replace(
892892+ "first post",
893893+ `first post - multi-${testCounter}-${suffix}`
894894+ );
895895+ const newThird = (originals.thirdPost as string).replace(
896896+ "third post",
897897+ `third post - multi-${testCounter}-${suffix}`
898898+ );
899899+ writeFileSync(contentFiles.firstPost, newFirst);
900900+ writeFileSync(contentFiles.thirdPost, newThird);
901901+ }
902902+903903+ // Setup: establish incremental state
904904+ const before = await setupIncrementalState(devServer, modifyFile, htmlPaths, "articleFirst");
905905+ expect(before.articleFirst).not.toBeNull();
906906+ expect(before.articleSecond).not.toBeNull();
907907+ expect(before.articleThird).not.toBeNull();
908908+ expect(before.articles).not.toBeNull();
909909+910910+ // Trigger the final change
911911+ const logs = await triggerAndWaitForBuild(devServer, () => modifyFile("final"));
912912+ await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst);
913913+914914+ // Verify incremental build occurred
915915+ expect(isIncrementalBuild(logs)).toBe(true);
916916+917917+ // Check which routes were rebuilt
918918+ const after = recordBuildIds(htmlPaths);
919919+920920+ // Routes that should NOT be rebuilt
921921+ expect(after.index).toBe(before.index);
922922+ expect(after.about).toBe(before.about);
923923+ expect(after.blog).toBe(before.blog);
924924+ expect(after.articleSecond).toBe(before.articleSecond);
925925+926926+ // Routes that SHOULD be rebuilt
927927+ expect(after.articleFirst).not.toBe(before.articleFirst);
928928+ expect(after.articleThird).not.toBe(before.articleThird);
929929+ expect(after.articles).not.toBe(before.articles);
930930+ });
931931+932932+ // ============================================================
933933+ // TEST 15: Full rebuild from untracked file properly initializes content sources
934934+ // ============================================================
935935+ test("full rebuild from untracked file properly initializes content sources", async ({ devServer }) => {
936936+ // This test verifies that when an untracked Rust file (like helpers.rs) changes,
937937+ // triggering a full rebuild (routes_to_rebuild = None), content sources are
938938+ // still properly initialized.
939939+ //
940940+ // This was a bug where the code checked `is_incremental` instead of
941941+ // `routes_to_rebuild.is_some()`, causing content sources to not be initialized
942942+ // during full rebuilds triggered by untracked file changes.
943943+ //
944944+ // Setup:
945945+ // - helpers.rs is a shared module not tracked in source_to_routes
946946+ // - Changing it triggers routes_to_rebuild = None (full rebuild)
947947+ // - Routes like /articles/* use content from the "articles" content source
948948+ // - If content sources aren't initialized, the build would crash
949949+ //
950950+ // This test:
951951+ // 1. First modifies a content file to ensure specific content exists
952952+ // 2. Then modifies helpers.rs to trigger a full rebuild
953953+ // 3. Verifies the content-using routes are properly built with correct content
954954+955955+ const helpersRs = resolve(fixturePath, "src", "pages", "helpers.rs");
956956+ const originalHelpersRs = readFileSync(helpersRs, "utf-8");
957957+ const rsTimeout = 60000;
958958+959959+ try {
960960+ // Step 1: Modify content file to set up specific content we can verify
961961+ const testMarker = `CONTENT-INIT-TEST-${Date.now()}`;
962962+ const newContent = (originals.firstPost as string).replace(
963963+ "first post",
964964+ `first post - ${testMarker}`
965965+ );
966966+ writeFileSync(contentFiles.firstPost, newContent);
967967+968968+ // Wait for the content change to be processed
969969+ const beforeContent = getBuildId(htmlPaths.articleFirst);
970970+ await waitForBuildComplete(devServer, rsTimeout);
971971+ await waitForBuildIdChange(htmlPaths.articleFirst, beforeContent, rsTimeout);
972972+973973+ // Verify the content was updated
974974+ let articleHtml = readFileSync(htmlPaths.articleFirst, "utf-8");
975975+ expect(articleHtml).toContain(testMarker);
976976+977977+ // Record build IDs before the helpers.rs change
978978+ const before = recordBuildIds(htmlPaths);
979979+ expect(before.articleFirst).not.toBeNull();
980980+ expect(before.articles).not.toBeNull();
981981+982982+ // Step 2: Modify helpers.rs to trigger full rebuild
983983+ // This is an untracked file, so it triggers routes_to_rebuild = None
984984+ devServer.clearLogs();
985985+ writeFileSync(helpersRs, originalHelpersRs + `\n// content-init-test-${Date.now()}`);
986986+987987+ await waitForBuildComplete(devServer, rsTimeout);
988988+ await waitForBuildIdChange(htmlPaths.articleFirst, before.articleFirst, rsTimeout);
989989+990990+ // Step 3: Verify the build succeeded and content is still correct
991991+ // If content sources weren't initialized, this would fail or crash
992992+ const after = recordBuildIds(htmlPaths);
993993+994994+ // All routes should be rebuilt (full rebuild)
995995+ expect(after.index).not.toBe(before.index);
996996+ expect(after.about).not.toBe(before.about);
997997+ expect(after.blog).not.toBe(before.blog);
998998+ expect(after.articleFirst).not.toBe(before.articleFirst);
999999+ expect(after.articles).not.toBe(before.articles);
10001000+10011001+ // Most importantly: verify the content-using routes have correct content
10021002+ // This proves content sources were properly initialized during the full rebuild
10031003+ articleHtml = readFileSync(htmlPaths.articleFirst, "utf-8");
10041004+ expect(articleHtml).toContain(testMarker);
10051005+10061006+ // Also verify the articles list page works (uses entries())
10071007+ const articlesHtml = readFileSync(htmlPaths.articles, "utf-8");
10081008+ expect(articlesHtml).toContain("First Post");
10091009+10101010+ } finally {
10111011+ // Restore original content
10121012+ writeFileSync(helpersRs, originalHelpersRs);
10131013+ writeFileSync(contentFiles.firstPost, originals.firstPost as string);
10141014+10151015+ // Wait for restoration build
10161016+ const beforeRestore = getBuildId(htmlPaths.articleFirst);
10171017+ try {
10181018+ await waitForBuildIdChange(htmlPaths.articleFirst, beforeRestore, 60000);
10191019+ } catch {
10201020+ // Restoration build may not always complete, that's ok
10211021+ }
10221022+ }
10231023+ });
10241024+});
+3-1
e2e/tests/prefetch.spec.ts
···11-import { test, expect } from "./test-utils";
11+import { createTestWithFixture, expect } from "./test-utils";
22import { prefetchScript } from "./utils";
33+44+const test = createTestWithFixture("prefetch-prerender");
3546test.describe("Prefetch", () => {
57 test("should create prefetch via speculation rules on Chromium or link element elsewhere", async ({
+3-1
e2e/tests/prerender.spec.ts
···11-import { test, expect } from "./test-utils";
11+import { createTestWithFixture, expect } from "./test-utils";
22import { prefetchScript } from "./utils";
33+44+const test = createTestWithFixture("prefetch-prerender");
3546test.describe("Prefetch - Speculation Rules (Prerender)", () => {
57 test("should create speculation rules on Chromium or link prefetch elsewhere when prerender is enabled", async ({
+117-26
e2e/tests/test-utils.ts
···11-import { spawn, execFile, type ChildProcess } from "node:child_process";
22-import { join, resolve, dirname } from "node:path";
11+import { spawn } from "node:child_process";
22+import { resolve, dirname } from "node:path";
33import { existsSync } from "node:fs";
44import { fileURLToPath } from "node:url";
55import { test as base } from "@playwright/test";
···2323 port: number;
2424 /** Stop the dev server */
2525 stop: () => Promise<void>;
2626+ /** Get recent log output (last N lines) */
2727+ getLogs: (lines?: number) => string[];
2828+ /** Clear captured logs */
2929+ clearLogs: () => void;
2630}
27312832/**
···5256 const childProcess = spawn(command, args, {
5357 cwd: fixturePath,
5458 stdio: ["ignore", "pipe", "pipe"],
5959+ env: {
6060+ ...process.env,
6161+ // Show binary output for tests so we can verify incremental build logs
6262+ MAUDIT_SHOW_BINARY_OUTPUT: "1",
6363+ },
5564 });
56655766 // Capture output to detect when server is ready
5867 let serverReady = false;
6868+ const capturedLogs: string[] = [];
59696070 const outputPromise = new Promise<number>((resolve, reject) => {
6171 const timeout = setTimeout(() => {
6262- reject(new Error("Dev server did not start within 30 seconds"));
6363- }, 30000);
7272+ console.error("[test-utils] Dev server startup timeout. Recent logs:");
7373+ console.error(capturedLogs.slice(-20).join("\n"));
7474+ reject(new Error("Dev server did not start within 120 seconds"));
7575+ }, 120000); // Increased to 120 seconds for CI
64766577 childProcess.stdout?.on("data", (data: Buffer) => {
6678 const output = data.toString();
7979+ // Capture all stdout logs
8080+ output
8181+ .split("\n")
8282+ .filter((line) => line.trim())
8383+ .forEach((line) => {
8484+ capturedLogs.push(line);
8585+ });
67866887 // Look for "waiting for requests" to know server is ready
6988 if (output.includes("waiting for requests")) {
···7594 });
76957796 childProcess.stderr?.on("data", (data: Buffer) => {
7878- // Only log errors, not all stderr output
7997 const output = data.toString();
9898+ // Capture all stderr logs
9999+ output
100100+ .split("\n")
101101+ .filter((line) => line.trim())
102102+ .forEach((line) => {
103103+ capturedLogs.push(line);
104104+ });
105105+106106+ // Only log errors to console, not all stderr output
80107 if (output.toLowerCase().includes("error")) {
81108 console.error(`[maudit dev] ${output}`);
82109 }
···113140 }, 5000);
114141 });
115142 },
143143+ getLogs: (lines?: number) => {
144144+ if (lines) {
145145+ return capturedLogs.slice(-lines);
146146+ }
147147+ return [...capturedLogs];
148148+ },
149149+ clearLogs: () => {
150150+ capturedLogs.length = 0;
151151+ },
116152 };
117153}
118154···136172}
137173138174// Worker-scoped server pool - one server per worker, shared across all tests in that worker
139139-const workerServers = new Map<number, DevServer>();
175175+// Key format: "workerIndex-fixtureName"
176176+const workerServers = new Map<string, DevServer>();
177177+178178+// Track used ports to avoid collisions
179179+const usedPorts = new Set<number>();
180180+181181+/**
182182+ * Generate a deterministic port offset based on fixture name.
183183+ * This ensures each fixture gets a unique port range, avoiding collisions
184184+ * when multiple fixtures run on the same worker.
185185+ */
186186+function getFixturePortOffset(fixtureName: string): number {
187187+ // Simple hash function to get a number from the fixture name
188188+ let hash = 0;
189189+ for (let i = 0; i < fixtureName.length; i++) {
190190+ const char = fixtureName.charCodeAt(i);
191191+ hash = (hash << 5) - hash + char;
192192+ hash = hash & hash; // Convert to 32bit integer
193193+ }
194194+ // Use modulo to keep the offset reasonable (0-99)
195195+ return Math.abs(hash) % 100;
196196+}
197197+198198+/**
199199+ * Find an available port starting from the preferred port.
200200+ */
201201+function findAvailablePort(preferredPort: number): number {
202202+ let port = preferredPort;
203203+ while (usedPorts.has(port)) {
204204+ port++;
205205+ }
206206+ usedPorts.add(port);
207207+ return port;
208208+}
140209141141-// Extend Playwright's test with a devServer fixture
142142-export const test = base.extend<{ devServer: DevServer }>({
143143- devServer: async ({}, use, testInfo) => {
144144- // Use worker index to get or create a server for this worker
145145- const workerIndex = testInfo.workerIndex;
210210+/**
211211+ * Create a test instance with a devServer fixture for a specific fixture.
212212+ * This allows each test file to use a different fixture while sharing the same pattern.
213213+ *
214214+ * @param fixtureName - Name of the fixture directory under e2e/fixtures/
215215+ * @param basePort - Starting port number (default: 1864). Each fixture gets a unique port based on its name.
216216+ *
217217+ * @example
218218+ * ```ts
219219+ * import { createTestWithFixture } from "./test-utils";
220220+ * const test = createTestWithFixture("my-fixture");
221221+ *
222222+ * test("my test", async ({ devServer }) => {
223223+ * // devServer is automatically started for "my-fixture"
224224+ * });
225225+ * ```
226226+ */
227227+export function createTestWithFixture(fixtureName: string, basePort = 1864) {
228228+ return base.extend<{ devServer: DevServer }>({
229229+ // oxlint-disable-next-line no-empty-pattern
230230+ devServer: async ({}, use, testInfo) => {
231231+ // Use worker index to get or create a server for this worker
232232+ const workerIndex = testInfo.workerIndex;
233233+ const serverKey = `${workerIndex}-${fixtureName}`;
146234147147- let server = workerServers.get(workerIndex);
235235+ let server = workerServers.get(serverKey);
148236149149- if (!server) {
150150- // Assign unique port based on worker index
151151- const port = 1864 + workerIndex;
237237+ if (!server) {
238238+ // Calculate port based on fixture name hash + worker index to avoid collisions
239239+ const fixtureOffset = getFixturePortOffset(fixtureName);
240240+ const preferredPort = basePort + workerIndex * 100 + fixtureOffset;
241241+ const port = findAvailablePort(preferredPort);
152242153153- server = await startDevServer({
154154- fixture: "prefetch-prerender",
155155- port,
156156- });
243243+ server = await startDevServer({
244244+ fixture: fixtureName,
245245+ port,
246246+ });
157247158158- workerServers.set(workerIndex, server);
159159- }
248248+ workerServers.set(serverKey, server);
249249+ }
160250161161- await use(server);
251251+ await use(server);
162252163163- // Don't stop the server here - it stays alive for all tests in this worker
164164- // Playwright will clean up when the worker exits
165165- },
166166-});
253253+ // Don't stop the server here - it stays alive for all tests in this worker
254254+ // Playwright will clean up when the worker exits
255255+ },
256256+ });
257257+}
167258168259export { expect } from "@playwright/test";
+1-1
e2e/tests/utils.ts
···44// Find the actual prefetch bundle file (hash changes on each build)
55const distDir = join(process.cwd(), "../crates/maudit/js/dist");
66const prefetchFile = readdirSync(distDir).find(
77- (f) => f.startsWith("prefetch-") && f.endsWith(".js"),
77+ (f) => f.startsWith("prefetch") && f.endsWith(".js"),
88);
99if (!prefetchFile) throw new Error("Could not find prefetch bundle");
1010
···9696impl Route for ImagePage {
9797 fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
9898 let image = ctx.assets.add_image("path/to/image.jpg")?;
9999- let placeholder = image.placeholder();
9999+ let placeholder = image.placeholder()?;
100100101101 Ok(format!("<img src=\"{}\" alt=\"Image with placeholder\" style=\"background-image: url('{}'); background-size: cover;\" />", image.url(), placeholder.data_uri()))
102102 }
+1-1
website/content/docs/prefetching.md
···49495050Note that prerendering, unlike prefetching, may require rethinking how the JavaScript on your pages works, as it'll run JavaScript from pages that the user hasn't visited yet. For example, this might result in analytics reporting incorrect page views.
51515252-## Possible risks
5252+## Possible risks
53535454Prefetching pages in static websites is typically always safe. In more traditional apps, an issue can arise if your pages cause side effects to happen on the server. For instance, if you were to prefetch `/logout`, your user might get disconnected on hover, or worse as soon as the log out link appear in the viewport. In modern times, it is typically not recommended to have links cause such side effects anyway, reducing the risk of this happening.
5555
+2-2
website/content/news/2026-in-the-cursed-lands.md
···5555impl Route for ImagePage {
5656 fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
5757 let image = ctx.assets.add_image("path/to/image.jpg")?;
5858- let placeholder = image.placeholder();
5858+ let placeholder = image.placeholder()?;
59596060 Ok(format!("<img src=\"{}\" alt=\"Image with placeholder\" style=\"background-image: url('{}'); background-size: cover;\" />", image.url(), placeholder.data_uri()))
6161 }
···70707171### Shortcodes
72727373-Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, you will.
7373+Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, if you will.
74747575```md
7676Here's my cool video:
+2-2
website/src/layout/docs_sidebars.rs
···11-use maud::{Markup, html};
11+use maud::{html, Markup};
22use maudit::{
33 content::MarkdownHeading,
44 route::{PageContext, RouteExt},
···14141515 let mut sections = std::collections::HashMap::new();
16161717- for entry in content.entries.iter() {
1717+ for entry in content.entries() {
1818 if let Some(section) = &entry.data(ctx).section {
1919 sections.entry(section).or_insert_with(Vec::new).push(entry);
2020 }
+3-3
website/src/routes/news.rs
···11use chrono::Datelike;
22-use maud::PreEscaped;
32use maud::html;
33+use maud::PreEscaped;
44use maudit::route::prelude::*;
55use std::collections::BTreeMap;
6677use crate::content::NewsContent;
88-use crate::layout::SeoMeta;
98use crate::layout::layout;
99+use crate::layout::SeoMeta;
10101111#[route("/news/")]
1212pub struct NewsIndex;
···1818 // Group articles by year
1919 let mut articles_by_year: BTreeMap<String, Vec<_>> = BTreeMap::new();
20202121- for article in &content.entries {
2121+ for article in content.entries() {
2222 let year = article.data(ctx).date.year().to_string();
2323 articles_by_year
2424 .entry(year)
+1-1
xtask/Cargo.toml
···55publish = false
6677[dependencies]
88-rolldown = { package = "brk_rolldown", version = "0.2.3" }
88+rolldown = { package = "brk_rolldown", version = "0.8.0" }
99tokio = { version = "1", features = ["rt"] }