+1
-1
.github/workflows/benchmark.yaml
+1
-1
.github/workflows/benchmark.yaml
+4
-4
.github/workflows/ci.yaml
+4
-4
.github/workflows/ci.yaml
···
38
38
uses: actions/setup-node@v4
39
39
with:
40
40
node-version: latest
41
-
cache: 'pnpm'
41
+
cache: "pnpm"
42
42
43
43
- name: Install dependencies
44
44
run: pnpm install
···
66
66
uses: actions/setup-node@v4
67
67
with:
68
68
node-version: latest
69
-
cache: 'pnpm'
69
+
cache: "pnpm"
70
70
71
71
- name: Install dependencies
72
72
run: pnpm install
···
94
94
uses: actions/setup-node@v4
95
95
with:
96
96
node-version: latest
97
-
cache: 'pnpm'
97
+
cache: "pnpm"
98
98
99
99
- name: Install dependencies
100
100
run: pnpm install
···
126
126
uses: actions/setup-node@v4
127
127
with:
128
128
node-version: latest
129
-
cache: 'pnpm'
129
+
cache: "pnpm"
130
130
131
131
- name: Install dependencies
132
132
run: pnpm install
+1
-1
.github/workflows/release.yml
+1
-1
.github/workflows/release.yml
+2
-6
.vscode/extensions.json
+2
-6
.vscode/extensions.json
+14
-14
.vscode/settings.json
+14
-14
.vscode/settings.json
···
1
1
{
2
-
"typescript.experimental.useTsgo": true,
3
-
"editor.defaultFormatter": "oxc.oxc-vscode",
4
-
"oxc.typeAware": true,
5
-
"oxc.fixKind": "safe_fix",
6
-
"oxc.unusedDisableDirectives": "deny",
7
-
"[rust]": {
8
-
"editor.defaultFormatter": "rust-lang.rust-analyzer"
9
-
},
10
-
"editor.codeActionsOnSave": {
11
-
"source.fixAll.oxc": "explicit"
12
-
},
13
-
"biome.enabled": false,
14
-
"css.lint.unknownAtRules": "ignore",
15
-
}
2
+
"typescript.experimental.useTsgo": true,
3
+
"editor.defaultFormatter": "oxc.oxc-vscode",
4
+
"oxc.typeAware": true,
5
+
"oxc.fixKind": "safe_fix",
6
+
"oxc.unusedDisableDirectives": "deny",
7
+
"[rust]": {
8
+
"editor.defaultFormatter": "rust-lang.rust-analyzer"
9
+
},
10
+
"editor.codeActionsOnSave": {
11
+
"source.fixAll.oxc": "explicit"
12
+
},
13
+
"biome.enabled": false,
14
+
"css.lint.unknownAtRules": "ignore"
15
+
}
+64
-2
Cargo.lock
+64
-2
Cargo.lock
···
1670
1670
]
1671
1671
1672
1672
[[package]]
1673
+
name = "fixtures-incremental-build"
1674
+
version = "0.1.0"
1675
+
dependencies = [
1676
+
"maud",
1677
+
"maudit",
1678
+
]
1679
+
1680
+
[[package]]
1673
1681
name = "fixtures-prefetch-prerender"
1674
1682
version = "0.1.0"
1675
1683
dependencies = [
···
2574
2582
dependencies = [
2575
2583
"base64",
2576
2584
"brk_rolldown",
2585
+
"brk_rolldown_common",
2577
2586
"brk_rolldown_plugin_replace",
2578
2587
"chrono",
2579
2588
"colored 3.1.1",
···
2592
2601
"rayon",
2593
2602
"rustc-hash",
2594
2603
"serde",
2604
+
"serde_json",
2595
2605
"serde_yaml",
2596
2606
"slug",
2597
2607
"syntect",
···
2622
2632
"serde_json",
2623
2633
"spinach",
2624
2634
"tar",
2635
+
"tempfile",
2625
2636
"tokio",
2626
2637
"tokio-util",
2638
+
"toml",
2627
2639
"toml_edit 0.24.0+spec-1.1.0",
2628
2640
"tower-http",
2629
2641
"tracing",
···
4522
4534
]
4523
4535
4524
4536
[[package]]
4537
+
name = "serde_spanned"
4538
+
version = "0.6.9"
4539
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4540
+
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
4541
+
dependencies = [
4542
+
"serde",
4543
+
]
4544
+
4545
+
[[package]]
4525
4546
name = "serde_urlencoded"
4526
4547
version = "0.7.1"
4527
4548
source = "registry+https://github.com/rust-lang/crates.io-index"
···
5019
5040
]
5020
5041
5021
5042
[[package]]
5043
+
name = "toml"
5044
+
version = "0.8.23"
5045
+
source = "registry+https://github.com/rust-lang/crates.io-index"
5046
+
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
5047
+
dependencies = [
5048
+
"serde",
5049
+
"serde_spanned",
5050
+
"toml_datetime 0.6.11",
5051
+
"toml_edit 0.22.27",
5052
+
]
5053
+
5054
+
[[package]]
5055
+
name = "toml_datetime"
5056
+
version = "0.6.11"
5057
+
source = "registry+https://github.com/rust-lang/crates.io-index"
5058
+
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
5059
+
dependencies = [
5060
+
"serde",
5061
+
]
5062
+
5063
+
[[package]]
5022
5064
name = "toml_datetime"
5023
5065
version = "0.7.5+spec-1.1.0"
5024
5066
source = "registry+https://github.com/rust-lang/crates.io-index"
···
5029
5071
5030
5072
[[package]]
5031
5073
name = "toml_edit"
5074
+
version = "0.22.27"
5075
+
source = "registry+https://github.com/rust-lang/crates.io-index"
5076
+
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
5077
+
dependencies = [
5078
+
"indexmap",
5079
+
"serde",
5080
+
"serde_spanned",
5081
+
"toml_datetime 0.6.11",
5082
+
"toml_write",
5083
+
"winnow",
5084
+
]
5085
+
5086
+
[[package]]
5087
+
name = "toml_edit"
5032
5088
version = "0.23.10+spec-1.0.0"
5033
5089
source = "registry+https://github.com/rust-lang/crates.io-index"
5034
5090
checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269"
5035
5091
dependencies = [
5036
5092
"indexmap",
5037
-
"toml_datetime",
5093
+
"toml_datetime 0.7.5+spec-1.1.0",
5038
5094
"toml_parser",
5039
5095
"winnow",
5040
5096
]
···
5046
5102
checksum = "8c740b185920170a6d9191122cafef7010bd6270a3824594bff6784c04d7f09e"
5047
5103
dependencies = [
5048
5104
"indexmap",
5049
-
"toml_datetime",
5105
+
"toml_datetime 0.7.5+spec-1.1.0",
5050
5106
"toml_parser",
5051
5107
"toml_writer",
5052
5108
"winnow",
···
5060
5116
dependencies = [
5061
5117
"winnow",
5062
5118
]
5119
+
5120
+
[[package]]
5121
+
name = "toml_write"
5122
+
version = "0.1.2"
5123
+
source = "registry+https://github.com/rust-lang/crates.io-index"
5124
+
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
5063
5125
5064
5126
[[package]]
5065
5127
name = "toml_writer"
+3
-1
crates/maudit/Cargo.toml
+3
-1
crates/maudit/Cargo.toml
···
23
23
24
24
# TODO: Allow making those optional
25
25
rolldown = { package = "brk_rolldown", version = "0.8.0" }
26
+
rolldown_common = { package = "brk_rolldown_common", version = "0.8.0" }
26
27
serde = { workspace = true }
28
+
serde_json = "1.0"
27
29
serde_yaml = "0.9.34"
28
30
pulldown-cmark = "0.13.0"
29
31
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
···
48
50
rayon = "1.11.0"
49
51
rapidhash = "4.2.1"
50
52
pathdiff = "0.2.3"
51
-
rolldown_plugin_replace = {package = "brk_rolldown_plugin_replace", version = "0.8.0"}
53
+
rolldown_plugin_replace = { package = "brk_rolldown_plugin_replace", version = "0.8.0" }
52
54
53
55
[dev-dependencies]
54
56
tempfile = "3.24.0"
+4
-8
crates/maudit/src/assets/image_cache.rs
+4
-8
crates/maudit/src/assets/image_cache.rs
···
338
338
339
339
#[test]
340
340
fn test_build_options_integration() {
341
-
use crate::build::options::{AssetsOptions, BuildOptions};
341
+
use crate::build::options::BuildOptions;
342
342
343
343
// Test that BuildOptions can configure the cache directory
344
344
let custom_cache = PathBuf::from("/tmp/custom_maudit_cache");
345
345
let build_options = BuildOptions {
346
-
assets: AssetsOptions {
347
-
image_cache_dir: custom_cache.clone(),
348
-
..Default::default()
349
-
},
346
+
cache_dir: custom_cache.clone(),
350
347
..Default::default()
351
348
};
352
349
353
-
// Create cache with build options
354
-
let cache = ImageCache::with_cache_dir(&build_options.assets.image_cache_dir);
350
+
let cache = ImageCache::with_cache_dir(build_options.assets_cache_dir());
355
351
356
352
// Verify it uses the configured directory
357
-
assert_eq!(cache.get_cache_dir(), custom_cache);
353
+
assert_eq!(cache.get_cache_dir(), custom_cache.join("assets"));
358
354
}
359
355
360
356
#[test]
+104
-13
crates/maudit/src/build/options.rs
+104
-13
crates/maudit/src/build/options.rs
···
1
-
use std::{env, path::PathBuf};
1
+
use std::{fs, path::PathBuf};
2
2
3
3
use crate::{assets::RouteAssetsOptions, is_dev, sitemap::SitemapOptions};
4
4
···
36
36
/// assets: AssetsOptions {
37
37
/// assets_dir: "_assets".into(),
38
38
/// tailwind_binary_path: "./node_modules/.bin/tailwindcss".into(),
39
-
/// image_cache_dir: ".cache/maudit/images".into(),
40
39
/// ..Default::default()
41
40
/// },
42
41
/// prefetch: PrefetchOptions {
···
61
60
/// At the speed Maudit operates at, not cleaning the output directory may offer a significant performance improvement at the cost of potentially serving stale content.
62
61
pub clean_output_dir: bool,
63
62
63
+
/// Whether to enable incremental builds.
64
+
///
65
+
/// When enabled, Maudit tracks which assets are used by which routes and only rebuilds
66
+
/// routes affected by changed files. This can significantly speed up rebuilds when only
67
+
/// a few files have changed.
68
+
///
69
+
/// Defaults to `true` in dev mode (`maudit dev`) and `false` in production builds.
70
+
pub incremental: bool,
71
+
72
+
/// Directory for build cache storage (incremental build state, etc.).
73
+
///
74
+
/// Defaults to `target/maudit_cache/{package_name}` where `{package_name}` is derived
75
+
/// from the current directory name.
76
+
pub cache_dir: PathBuf,
77
+
78
+
/// Directory for caching processed assets (images, etc.).
79
+
///
80
+
/// If `None`, defaults to `{cache_dir}/assets`.
81
+
pub assets_cache_dir: Option<PathBuf>,
82
+
64
83
pub assets: AssetsOptions,
65
84
66
85
pub prefetch: PrefetchOptions,
···
124
143
hashing_strategy: self.assets.hashing_strategy,
125
144
}
126
145
}
146
+
147
+
/// Returns the directory for caching processed assets (images, etc.).
148
+
/// Uses `assets_cache_dir` if set, otherwise defaults to `{cache_dir}/assets`.
149
+
pub fn assets_cache_dir(&self) -> PathBuf {
150
+
self.assets_cache_dir
151
+
.clone()
152
+
.unwrap_or_else(|| self.cache_dir.join("assets"))
153
+
}
127
154
}
128
155
129
156
#[derive(Clone)]
···
139
166
/// Note that this value is not automatically joined with the `output_dir` in `BuildOptions`. Use [`BuildOptions::route_assets_options()`] to get a `RouteAssetsOptions` with the correct final path.
140
167
pub assets_dir: PathBuf,
141
168
142
-
/// Directory to use for image cache storage.
143
-
/// Defaults to `target/maudit_cache/images`.
144
-
///
145
-
/// This cache is used to store processed images and their placeholders to speed up subsequent builds.
146
-
pub image_cache_dir: PathBuf,
147
-
148
169
/// Strategy to use when hashing assets for fingerprinting.
149
170
///
150
171
/// Defaults to [`AssetHashingStrategy::Precise`] in production builds, and [`AssetHashingStrategy::FastImprecise`] in development builds. Note that this means that the cache isn't shared between dev and prod builds by default, if you have a lot of assets you may want to set this to the same value in both environments.
···
164
185
Self {
165
186
tailwind_binary_path: "tailwindcss".into(),
166
187
assets_dir: "_maudit".into(),
167
-
image_cache_dir: {
168
-
let target_dir =
169
-
env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string());
170
-
PathBuf::from(target_dir).join("maudit_cache/images")
171
-
},
172
188
hashing_strategy: if is_dev() {
173
189
AssetHashingStrategy::FastImprecise
174
190
} else {
···
196
212
/// ```
197
213
impl Default for BuildOptions {
198
214
fn default() -> Self {
215
+
let site_name = get_site_name();
216
+
let cache_dir = find_target_dir()
217
+
.unwrap_or_else(|_| PathBuf::from("target"))
218
+
.join("maudit_cache")
219
+
.join(&site_name);
220
+
199
221
Self {
200
222
base_url: None,
201
223
output_dir: "dist".into(),
202
224
static_dir: "static".into(),
203
225
clean_output_dir: true,
226
+
incremental: is_dev(),
227
+
cache_dir,
228
+
assets_cache_dir: None,
204
229
prefetch: PrefetchOptions::default(),
205
230
assets: AssetsOptions::default(),
206
231
sitemap: SitemapOptions::default(),
207
232
}
208
233
}
209
234
}
235
+
236
+
/// Get the site name for cache directory purposes.
237
+
///
238
+
/// Uses the current executable's name (which matches the package/binary name),
239
+
/// falling back to the current directory name.
240
+
fn get_site_name() -> String {
241
+
// Get the binary name from the current executable
242
+
std::env::current_exe()
243
+
.ok()
244
+
.and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string()))
245
+
.unwrap_or_else(|| {
246
+
// Fallback to current directory name
247
+
std::env::current_dir()
248
+
.ok()
249
+
.and_then(|p| p.file_name().map(|s| s.to_string_lossy().to_string()))
250
+
.unwrap_or_else(|| "default".to_string())
251
+
})
252
+
}
253
+
254
+
/// Find the target directory using multiple strategies
255
+
///
256
+
/// This function tries multiple approaches to locate the target directory:
257
+
/// 1. CARGO_TARGET_DIR / CARGO_BUILD_TARGET_DIR environment variables
258
+
/// 2. Local ./target directory
259
+
/// 3. Workspace root target directory (walking up to find [workspace])
260
+
/// 4. Fallback to relative "target" path
261
+
fn find_target_dir() -> Result<PathBuf, std::io::Error> {
262
+
// 1. Check CARGO_TARGET_DIR and CARGO_BUILD_TARGET_DIR environment variables
263
+
for env_var in ["CARGO_TARGET_DIR", "CARGO_BUILD_TARGET_DIR"] {
264
+
if let Ok(target_dir) = std::env::var(env_var) {
265
+
let path = PathBuf::from(&target_dir);
266
+
if path.exists() {
267
+
return Ok(path);
268
+
}
269
+
}
270
+
}
271
+
272
+
// 2. Look for target directory in current directory
273
+
let local_target = PathBuf::from("target");
274
+
if local_target.exists() {
275
+
return Ok(local_target);
276
+
}
277
+
278
+
// 3. Try to find workspace root by looking for Cargo.toml with [workspace]
279
+
let mut current = std::env::current_dir()?;
280
+
loop {
281
+
let cargo_toml = current.join("Cargo.toml");
282
+
if cargo_toml.exists()
283
+
&& let Ok(content) = fs::read_to_string(&cargo_toml)
284
+
&& content.contains("[workspace]")
285
+
{
286
+
let workspace_target = current.join("target");
287
+
if workspace_target.exists() {
288
+
return Ok(workspace_target);
289
+
}
290
+
}
291
+
292
+
// Move up to parent directory
293
+
if !current.pop() {
294
+
break;
295
+
}
296
+
}
297
+
298
+
// 4. Final fallback to relative path
299
+
Ok(PathBuf::from("target"))
300
+
}
+233
crates/maudit/src/build/state.rs
+233
crates/maudit/src/build/state.rs
···
1
+
use rustc_hash::{FxHashMap, FxHashSet};
2
+
use serde::{Deserialize, Serialize};
3
+
use std::fs;
4
+
use std::path::{Path, PathBuf};
5
+
6
+
/// Identifies a specific route or variant for incremental rebuilds
7
+
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
8
+
pub enum RouteIdentifier {
9
+
/// A base route with optional page parameters
10
+
/// Params are stored as a sorted Vec for hashing purposes
11
+
Base {
12
+
route_path: String,
13
+
params: Option<Vec<(String, Option<String>)>>,
14
+
},
15
+
/// A variant route with optional page parameters
16
+
/// Params are stored as a sorted Vec for hashing purposes
17
+
Variant {
18
+
variant_id: String,
19
+
variant_path: String,
20
+
params: Option<Vec<(String, Option<String>)>>,
21
+
},
22
+
}
23
+
24
+
impl RouteIdentifier {
25
+
pub fn base(route_path: String, params: Option<FxHashMap<String, Option<String>>>) -> Self {
26
+
Self::Base {
27
+
route_path,
28
+
params: params.map(|p| {
29
+
let mut sorted: Vec<_> = p.into_iter().collect();
30
+
sorted.sort_by(|a, b| a.0.cmp(&b.0));
31
+
sorted
32
+
}),
33
+
}
34
+
}
35
+
36
+
pub fn variant(
37
+
variant_id: String,
38
+
variant_path: String,
39
+
params: Option<FxHashMap<String, Option<String>>>,
40
+
) -> Self {
41
+
Self::Variant {
42
+
variant_id,
43
+
variant_path,
44
+
params: params.map(|p| {
45
+
let mut sorted: Vec<_> = p.into_iter().collect();
46
+
sorted.sort_by(|a, b| a.0.cmp(&b.0));
47
+
sorted
48
+
}),
49
+
}
50
+
}
51
+
}
52
+
53
+
/// Tracks build state for incremental builds
54
+
#[derive(Debug, Default, Serialize, Deserialize)]
55
+
pub struct BuildState {
56
+
/// Maps asset paths to routes that use them
57
+
/// Key: canonicalized asset path
58
+
/// Value: set of routes using this asset
59
+
pub asset_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>,
60
+
61
+
/// Stores all bundler input paths from the last build
62
+
/// This needs to be preserved to ensure consistent bundling
63
+
pub bundler_inputs: Vec<String>,
64
+
}
65
+
66
+
impl BuildState {
67
+
pub fn new() -> Self {
68
+
Self::default()
69
+
}
70
+
71
+
/// Load build state from disk cache
72
+
pub fn load(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
73
+
let state_path = cache_dir.join("build_state.json");
74
+
75
+
if !state_path.exists() {
76
+
return Ok(Self::new());
77
+
}
78
+
79
+
let content = fs::read_to_string(&state_path)?;
80
+
let state: BuildState = serde_json::from_str(&content)?;
81
+
Ok(state)
82
+
}
83
+
84
+
/// Save build state to disk cache
85
+
pub fn save(&self, cache_dir: &Path) -> Result<(), Box<dyn std::error::Error>> {
86
+
fs::create_dir_all(cache_dir)?;
87
+
let state_path = cache_dir.join("build_state.json");
88
+
let content = serde_json::to_string_pretty(self)?;
89
+
fs::write(state_path, content)?;
90
+
Ok(())
91
+
}
92
+
93
+
/// Add an asset->route mapping
94
+
pub fn track_asset(&mut self, asset_path: PathBuf, route_id: RouteIdentifier) {
95
+
self.asset_to_routes
96
+
.entry(asset_path)
97
+
.or_default()
98
+
.insert(route_id);
99
+
}
100
+
101
+
/// Get all routes affected by changes to specific files
102
+
pub fn get_affected_routes(&self, changed_files: &[PathBuf]) -> FxHashSet<RouteIdentifier> {
103
+
let mut affected_routes = FxHashSet::default();
104
+
105
+
for changed_file in changed_files {
106
+
// Canonicalize the changed file path for consistent comparison
107
+
// All asset paths in asset_to_routes are stored as canonical paths
108
+
let canonical_changed = changed_file.canonicalize().ok();
109
+
110
+
// Try exact match with canonical path
111
+
if let Some(canonical) = &canonical_changed
112
+
&& let Some(routes) = self.asset_to_routes.get(canonical)
113
+
{
114
+
affected_routes.extend(routes.iter().cloned());
115
+
continue; // Found exact match, no need for directory check
116
+
}
117
+
118
+
// Fallback: try exact match with original path (shouldn't normally match)
119
+
if let Some(routes) = self.asset_to_routes.get(changed_file) {
120
+
affected_routes.extend(routes.iter().cloned());
121
+
continue;
122
+
}
123
+
124
+
// Directory prefix check: find all routes using assets within this directory.
125
+
// This handles two cases:
126
+
// 1. A directory was modified - rebuild all routes using assets in that dir
127
+
// 2. A directory was renamed/deleted - the old path no longer exists but we
128
+
// still need to rebuild routes that used assets under that path
129
+
//
130
+
// We do this check if:
131
+
// - The path currently exists as a directory, OR
132
+
// - The path doesn't exist (could be a deleted/renamed directory)
133
+
let should_check_prefix = changed_file.is_dir() || !changed_file.exists();
134
+
135
+
if should_check_prefix {
136
+
// Use original path for prefix matching (canonical won't exist for deleted dirs)
137
+
for (asset_path, routes) in &self.asset_to_routes {
138
+
if asset_path.starts_with(changed_file) {
139
+
affected_routes.extend(routes.iter().cloned());
140
+
}
141
+
}
142
+
}
143
+
}
144
+
145
+
affected_routes
146
+
}
147
+
148
+
/// Clear all tracked data (for full rebuild)
149
+
pub fn clear(&mut self) {
150
+
self.asset_to_routes.clear();
151
+
self.bundler_inputs.clear();
152
+
}
153
+
}
154
+
155
+
#[cfg(test)]
156
+
mod tests {
157
+
use super::*;
158
+
159
+
fn make_route(path: &str) -> RouteIdentifier {
160
+
RouteIdentifier::base(path.to_string(), None)
161
+
}
162
+
163
+
#[test]
164
+
fn test_get_affected_routes_exact_match() {
165
+
let mut state = BuildState::new();
166
+
let asset_path = PathBuf::from("/project/src/assets/logo.png");
167
+
let route = make_route("/");
168
+
169
+
state.track_asset(asset_path.clone(), route.clone());
170
+
171
+
// Exact match should work
172
+
let affected = state.get_affected_routes(&[asset_path]);
173
+
assert_eq!(affected.len(), 1);
174
+
assert!(affected.contains(&route));
175
+
}
176
+
177
+
#[test]
178
+
fn test_get_affected_routes_no_match() {
179
+
let mut state = BuildState::new();
180
+
let asset_path = PathBuf::from("/project/src/assets/logo.png");
181
+
let route = make_route("/");
182
+
183
+
state.track_asset(asset_path, route);
184
+
185
+
// Different file should not match
186
+
let other_path = PathBuf::from("/project/src/assets/other.png");
187
+
let affected = state.get_affected_routes(&[other_path]);
188
+
assert!(affected.is_empty());
189
+
}
190
+
191
+
#[test]
192
+
fn test_get_affected_routes_deleted_directory() {
193
+
let mut state = BuildState::new();
194
+
195
+
// Track assets under a directory path
196
+
let asset1 = PathBuf::from("/project/src/assets/icons/logo.png");
197
+
let asset2 = PathBuf::from("/project/src/assets/icons/favicon.ico");
198
+
let asset3 = PathBuf::from("/project/src/assets/styles.css");
199
+
let route1 = make_route("/");
200
+
let route2 = make_route("/about");
201
+
202
+
state.track_asset(asset1, route1.clone());
203
+
state.track_asset(asset2, route1.clone());
204
+
state.track_asset(asset3, route2.clone());
205
+
206
+
// Simulate a deleted/renamed directory (path doesn't exist)
207
+
// The "icons" directory was renamed, so the old path doesn't exist
208
+
let deleted_dir = PathBuf::from("/project/src/assets/icons");
209
+
210
+
// Since the path doesn't exist, it should check prefix matching
211
+
let affected = state.get_affected_routes(&[deleted_dir]);
212
+
213
+
// Should find route1 (uses assets under /icons/) but not route2
214
+
assert_eq!(affected.len(), 1);
215
+
assert!(affected.contains(&route1));
216
+
}
217
+
218
+
#[test]
219
+
fn test_get_affected_routes_multiple_routes_same_asset() {
220
+
let mut state = BuildState::new();
221
+
let asset_path = PathBuf::from("/project/src/assets/shared.css");
222
+
let route1 = make_route("/");
223
+
let route2 = make_route("/about");
224
+
225
+
state.track_asset(asset_path.clone(), route1.clone());
226
+
state.track_asset(asset_path.clone(), route2.clone());
227
+
228
+
let affected = state.get_affected_routes(&[asset_path]);
229
+
assert_eq!(affected.len(), 2);
230
+
assert!(affected.contains(&route1));
231
+
assert!(affected.contains(&route2));
232
+
}
233
+
}
+457
-137
crates/maudit/src/build.rs
+457
-137
crates/maudit/src/build.rs
···
14
14
self, HashAssetType, HashConfig, PrefetchPlugin, RouteAssets, Script, TailwindPlugin,
15
15
calculate_hash, image_cache::ImageCache, prefetch,
16
16
},
17
-
build::{images::process_image, options::PrefetchStrategy},
17
+
build::{
18
+
images::process_image,
19
+
options::PrefetchStrategy,
20
+
state::{BuildState, RouteIdentifier},
21
+
},
18
22
content::ContentSources,
19
23
is_dev,
20
24
logging::print_title,
···
26
30
use log::{debug, info, trace, warn};
27
31
use pathdiff::diff_paths;
28
32
use rolldown::{Bundler, BundlerOptions, InputItem, ModuleType};
33
+
use rolldown_common::Output;
29
34
use rolldown_plugin_replace::ReplacePlugin;
30
35
use rustc_hash::{FxHashMap, FxHashSet};
31
36
···
36
41
pub mod images;
37
42
pub mod metadata;
38
43
pub mod options;
44
+
pub mod state;
45
+
46
+
/// Helper to check if a route should be rebuilt during incremental builds
47
+
fn should_rebuild_route(
48
+
route_id: &RouteIdentifier,
49
+
routes_to_rebuild: &Option<FxHashSet<RouteIdentifier>>,
50
+
) -> bool {
51
+
let result = match routes_to_rebuild {
52
+
Some(set) => set.contains(route_id),
53
+
None => true, // Full build
54
+
};
55
+
56
+
if !result {
57
+
trace!(target: "build", "Skipping route {:?} (not in rebuild set)", route_id);
58
+
}
59
+
60
+
result
61
+
}
62
+
63
+
/// Helper to track all assets used by a route
64
+
fn track_route_assets(
65
+
build_state: &mut BuildState,
66
+
route_id: &RouteIdentifier,
67
+
route_assets: &RouteAssets,
68
+
) {
69
+
// Track images
70
+
for image in &route_assets.images {
71
+
if let Ok(canonical) = image.path().canonicalize() {
72
+
build_state.track_asset(canonical, route_id.clone());
73
+
}
74
+
}
75
+
76
+
// Track scripts
77
+
for script in &route_assets.scripts {
78
+
if let Ok(canonical) = script.path().canonicalize() {
79
+
build_state.track_asset(canonical, route_id.clone());
80
+
}
81
+
}
82
+
83
+
// Track styles
84
+
for style in &route_assets.styles {
85
+
if let Ok(canonical) = style.path().canonicalize() {
86
+
build_state.track_asset(canonical, route_id.clone());
87
+
}
88
+
}
89
+
}
39
90
40
91
pub fn execute_build(
41
92
routes: &[&dyn FullRoute],
42
93
content_sources: &mut ContentSources,
43
94
options: &BuildOptions,
95
+
changed_files: Option<&[PathBuf]>,
44
96
async_runtime: &tokio::runtime::Runtime,
45
97
) -> Result<BuildOutput, Box<dyn std::error::Error>> {
46
-
async_runtime.block_on(async { build(routes, content_sources, options).await })
98
+
async_runtime.block_on(async { build(routes, content_sources, options, changed_files).await })
47
99
}
48
100
49
101
pub async fn build(
50
102
routes: &[&dyn FullRoute],
51
103
content_sources: &mut ContentSources,
52
104
options: &BuildOptions,
105
+
changed_files: Option<&[PathBuf]>,
53
106
) -> Result<BuildOutput, Box<dyn std::error::Error>> {
54
107
let build_start = Instant::now();
55
108
let mut build_metadata = BuildOutput::new(build_start);
···
57
110
// Create a directory for the output
58
111
trace!(target: "build", "Setting up required directories...");
59
112
60
-
let clean_up_handle = if options.clean_output_dir {
113
+
// Use cache directory from options
114
+
let build_cache_dir = &options.cache_dir;
115
+
116
+
// Load build state for incremental builds (only if incremental is enabled)
117
+
let mut build_state = if options.incremental {
118
+
BuildState::load(build_cache_dir).unwrap_or_else(|e| {
119
+
debug!(target: "build", "Failed to load build state: {}", e);
120
+
BuildState::new()
121
+
})
122
+
} else {
123
+
BuildState::new()
124
+
};
125
+
126
+
debug!(target: "build", "Loaded build state with {} asset mappings", build_state.asset_to_routes.len());
127
+
debug!(target: "build", "options.incremental: {}, changed_files.is_some(): {}", options.incremental, changed_files.is_some());
128
+
129
+
// Determine if this is an incremental build
130
+
let is_incremental =
131
+
options.incremental && changed_files.is_some() && !build_state.asset_to_routes.is_empty();
132
+
133
+
let routes_to_rebuild = if is_incremental {
134
+
let changed = changed_files.unwrap();
135
+
info!(target: "build", "Incremental build: {} files changed", changed.len());
136
+
info!(target: "build", "Changed files: {:?}", changed);
137
+
138
+
info!(target: "build", "Build state has {} asset mappings", build_state.asset_to_routes.len());
139
+
140
+
let affected = build_state.get_affected_routes(changed);
141
+
info!(target: "build", "Rebuilding {} affected routes", affected.len());
142
+
info!(target: "build", "Affected routes: {:?}", affected);
143
+
144
+
Some(affected)
145
+
} else {
146
+
if changed_files.is_some() {
147
+
info!(target: "build", "Full build (first run after recompilation)");
148
+
}
149
+
// Full build - clear old state
150
+
build_state.clear();
151
+
None
152
+
};
153
+
154
+
// Check if we should rebundle during incremental builds
155
+
// Rebundle if a changed file is either:
156
+
// 1. A direct bundler input (entry point)
157
+
// 2. A transitive dependency tracked in asset_to_routes (any file the bundler processed)
158
+
let should_rebundle = if is_incremental && !build_state.bundler_inputs.is_empty() {
159
+
let changed = changed_files.unwrap();
160
+
let should = changed.iter().any(|changed_file| {
161
+
// Check if it's a direct bundler input
162
+
let is_bundler_input = build_state.bundler_inputs.iter().any(|bundler_input| {
163
+
if let (Ok(changed_canonical), Ok(bundler_canonical)) = (
164
+
changed_file.canonicalize(),
165
+
PathBuf::from(bundler_input).canonicalize(),
166
+
) {
167
+
changed_canonical == bundler_canonical
168
+
} else {
169
+
false
170
+
}
171
+
});
172
+
173
+
if is_bundler_input {
174
+
return true;
175
+
}
176
+
177
+
// Check if it's a transitive dependency tracked by the bundler
178
+
// (JS/TS modules, CSS files, or assets like images/fonts referenced via url())
179
+
if let Ok(canonical) = changed_file.canonicalize() {
180
+
return build_state.asset_to_routes.contains_key(&canonical);
181
+
}
182
+
183
+
false
184
+
});
185
+
186
+
if should {
187
+
info!(target: "build", "Rebundling needed: changed file affects bundled assets");
188
+
} else {
189
+
info!(target: "build", "Skipping bundler: no changed files affect bundled assets");
190
+
}
191
+
192
+
should
193
+
} else {
194
+
// Not incremental or no previous bundler inputs
195
+
false
196
+
};
197
+
198
+
let clean_up_handle = if options.clean_output_dir && !is_incremental {
61
199
let old_dist_tmp_dir = {
62
200
let duration = SystemTime::now().duration_since(UNIX_EPOCH)?;
63
201
let num = (duration.as_secs() + duration.subsec_nanos() as u64) % 100000;
···
74
212
};
75
213
76
214
// Create the image cache early so it can be shared across routes
77
-
let image_cache = ImageCache::with_cache_dir(&options.assets.image_cache_dir);
215
+
let image_cache = ImageCache::with_cache_dir(options.assets_cache_dir());
78
216
let _ = fs::create_dir_all(image_cache.get_cache_dir());
79
217
80
218
// Create route_assets_options with the image cache
···
183
321
184
322
// Static base route
185
323
if base_params.is_empty() {
186
-
let mut route_assets = RouteAssets::with_default_assets(
187
-
&route_assets_options,
188
-
Some(image_cache.clone()),
189
-
default_scripts.clone(),
190
-
vec![],
191
-
);
324
+
let route_id = RouteIdentifier::base(base_path.clone(), None);
325
+
326
+
// Check if we need to rebuild this route
327
+
if should_rebuild_route(&route_id, &routes_to_rebuild) {
328
+
let mut route_assets = RouteAssets::with_default_assets(
329
+
&route_assets_options,
330
+
Some(image_cache.clone()),
331
+
default_scripts.clone(),
332
+
vec![],
333
+
);
192
334
193
-
let params = PageParams::default();
194
-
let url = cached_route.url(¶ms);
335
+
let params = PageParams::default();
336
+
let url = cached_route.url(¶ms);
195
337
196
-
let result = route.build(&mut PageContext::from_static_route(
197
-
content_sources,
198
-
&mut route_assets,
199
-
&url,
200
-
&options.base_url,
201
-
None,
202
-
))?;
338
+
let result = route.build(&mut PageContext::from_static_route(
339
+
content_sources,
340
+
&mut route_assets,
341
+
&url,
342
+
&options.base_url,
343
+
None,
344
+
))?;
203
345
204
-
let file_path = cached_route.file_path(¶ms, &options.output_dir);
346
+
let file_path = cached_route.file_path(¶ms, &options.output_dir);
205
347
206
-
write_route_file(&result, &file_path)?;
348
+
write_route_file(&result, &file_path)?;
207
349
208
-
info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options));
350
+
info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options));
209
351
210
-
build_pages_images.extend(route_assets.images);
211
-
build_pages_scripts.extend(route_assets.scripts);
212
-
build_pages_styles.extend(route_assets.styles);
352
+
// Track assets for this route
353
+
track_route_assets(&mut build_state, &route_id, &route_assets);
213
354
214
-
build_metadata.add_page(
215
-
base_path.clone(),
216
-
file_path.to_string_lossy().to_string(),
217
-
None,
218
-
);
355
+
build_pages_images.extend(route_assets.images);
356
+
build_pages_scripts.extend(route_assets.scripts);
357
+
build_pages_styles.extend(route_assets.styles);
358
+
359
+
build_metadata.add_page(
360
+
base_path.clone(),
361
+
file_path.to_string_lossy().to_string(),
362
+
None,
363
+
);
219
364
220
-
add_sitemap_entry(
221
-
&mut sitemap_entries,
222
-
normalized_base_url,
223
-
&url,
224
-
base_path,
225
-
&route.sitemap_metadata(),
226
-
&options.sitemap,
227
-
);
365
+
add_sitemap_entry(
366
+
&mut sitemap_entries,
367
+
normalized_base_url,
368
+
&url,
369
+
base_path,
370
+
&route.sitemap_metadata(),
371
+
&options.sitemap,
372
+
);
228
373
229
-
page_count += 1;
374
+
page_count += 1;
375
+
} else {
376
+
trace!(target: "build", "Skipping unchanged route: {}", base_path);
377
+
}
230
378
} else {
231
379
// Dynamic base route
232
380
let mut route_assets = RouteAssets::with_default_assets(
···
250
398
251
399
// Build all pages for this route
252
400
for page in pages {
253
-
let page_start = Instant::now();
254
-
let url = cached_route.url(&page.0);
255
-
let file_path = cached_route.file_path(&page.0, &options.output_dir);
401
+
let route_id =
402
+
RouteIdentifier::base(base_path.clone(), Some(page.0.0.clone()));
256
403
257
-
let content = route.build(&mut PageContext::from_dynamic_route(
258
-
&page,
259
-
content_sources,
260
-
&mut route_assets,
261
-
&url,
262
-
&options.base_url,
263
-
None,
264
-
))?;
404
+
// Check if we need to rebuild this specific page
405
+
if should_rebuild_route(&route_id, &routes_to_rebuild) {
406
+
let page_start = Instant::now();
407
+
let url = cached_route.url(&page.0);
408
+
let file_path = cached_route.file_path(&page.0, &options.output_dir);
265
409
266
-
write_route_file(&content, &file_path)?;
410
+
let content = route.build(&mut PageContext::from_dynamic_route(
411
+
&page,
412
+
content_sources,
413
+
&mut route_assets,
414
+
&url,
415
+
&options.base_url,
416
+
None,
417
+
))?;
267
418
268
-
info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options));
419
+
write_route_file(&content, &file_path)?;
269
420
270
-
build_metadata.add_page(
271
-
base_path.clone(),
272
-
file_path.to_string_lossy().to_string(),
273
-
Some(page.0.0.clone()),
274
-
);
421
+
info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options));
422
+
423
+
// Track assets for this page
424
+
track_route_assets(&mut build_state, &route_id, &route_assets);
425
+
426
+
build_metadata.add_page(
427
+
base_path.clone(),
428
+
file_path.to_string_lossy().to_string(),
429
+
Some(page.0.0.clone()),
430
+
);
275
431
276
-
add_sitemap_entry(
277
-
&mut sitemap_entries,
278
-
normalized_base_url,
279
-
&url,
280
-
base_path,
281
-
&route.sitemap_metadata(),
282
-
&options.sitemap,
283
-
);
432
+
add_sitemap_entry(
433
+
&mut sitemap_entries,
434
+
normalized_base_url,
435
+
&url,
436
+
base_path,
437
+
&route.sitemap_metadata(),
438
+
&options.sitemap,
439
+
);
284
440
285
-
page_count += 1;
441
+
page_count += 1;
442
+
} else {
443
+
trace!(target: "build", "Skipping unchanged page: {} with params {:?}", base_path, page.0.0);
444
+
}
286
445
}
287
446
}
288
447
···
299
458
300
459
if variant_params.is_empty() {
301
460
// Static variant
302
-
let mut route_assets = RouteAssets::with_default_assets(
303
-
&route_assets_options,
304
-
Some(image_cache.clone()),
305
-
default_scripts.clone(),
306
-
vec![],
307
-
);
461
+
let route_id =
462
+
RouteIdentifier::variant(variant_id.clone(), variant_path.clone(), None);
308
463
309
-
let params = PageParams::default();
310
-
let url = cached_route.variant_url(¶ms, &variant_id)?;
311
-
let file_path =
312
-
cached_route.variant_file_path(¶ms, &options.output_dir, &variant_id)?;
464
+
// Check if we need to rebuild this variant
465
+
if should_rebuild_route(&route_id, &routes_to_rebuild) {
466
+
let mut route_assets = RouteAssets::with_default_assets(
467
+
&route_assets_options,
468
+
Some(image_cache.clone()),
469
+
default_scripts.clone(),
470
+
vec![],
471
+
);
313
472
314
-
let result = route.build(&mut PageContext::from_static_route(
315
-
content_sources,
316
-
&mut route_assets,
317
-
&url,
318
-
&options.base_url,
319
-
Some(variant_id.clone()),
320
-
))?;
473
+
let params = PageParams::default();
474
+
let url = cached_route.variant_url(¶ms, &variant_id)?;
475
+
let file_path = cached_route.variant_file_path(
476
+
¶ms,
477
+
&options.output_dir,
478
+
&variant_id,
479
+
)?;
480
+
481
+
let result = route.build(&mut PageContext::from_static_route(
482
+
content_sources,
483
+
&mut route_assets,
484
+
&url,
485
+
&options.base_url,
486
+
Some(variant_id.clone()),
487
+
))?;
488
+
489
+
write_route_file(&result, &file_path)?;
321
490
322
-
write_route_file(&result, &file_path)?;
491
+
info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options));
323
492
324
-
info!(target: "pages", "โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options));
493
+
// Track assets for this variant
494
+
track_route_assets(&mut build_state, &route_id, &route_assets);
325
495
326
-
build_pages_images.extend(route_assets.images);
327
-
build_pages_scripts.extend(route_assets.scripts);
328
-
build_pages_styles.extend(route_assets.styles);
496
+
build_pages_images.extend(route_assets.images);
497
+
build_pages_scripts.extend(route_assets.scripts);
498
+
build_pages_styles.extend(route_assets.styles);
329
499
330
-
build_metadata.add_page(
331
-
variant_path.clone(),
332
-
file_path.to_string_lossy().to_string(),
333
-
None,
334
-
);
500
+
build_metadata.add_page(
501
+
variant_path.clone(),
502
+
file_path.to_string_lossy().to_string(),
503
+
None,
504
+
);
335
505
336
-
add_sitemap_entry(
337
-
&mut sitemap_entries,
338
-
normalized_base_url,
339
-
&url,
340
-
&variant_path,
341
-
&route.sitemap_metadata(),
342
-
&options.sitemap,
343
-
);
506
+
add_sitemap_entry(
507
+
&mut sitemap_entries,
508
+
normalized_base_url,
509
+
&url,
510
+
&variant_path,
511
+
&route.sitemap_metadata(),
512
+
&options.sitemap,
513
+
);
344
514
345
-
page_count += 1;
515
+
page_count += 1;
516
+
} else {
517
+
trace!(target: "build", "Skipping unchanged variant: {}", variant_path);
518
+
}
346
519
} else {
347
520
// Dynamic variant
348
521
let mut route_assets = RouteAssets::with_default_assets(
···
365
538
366
539
// Build all pages for this variant group
367
540
for page in pages {
368
-
let variant_page_start = Instant::now();
369
-
let url = cached_route.variant_url(&page.0, &variant_id)?;
370
-
let file_path = cached_route.variant_file_path(
371
-
&page.0,
372
-
&options.output_dir,
373
-
&variant_id,
374
-
)?;
541
+
let route_id = RouteIdentifier::variant(
542
+
variant_id.clone(),
543
+
variant_path.clone(),
544
+
Some(page.0.0.clone()),
545
+
);
375
546
376
-
let content = route.build(&mut PageContext::from_dynamic_route(
377
-
&page,
378
-
content_sources,
379
-
&mut route_assets,
380
-
&url,
381
-
&options.base_url,
382
-
Some(variant_id.clone()),
383
-
))?;
547
+
// Check if we need to rebuild this specific variant page
548
+
if should_rebuild_route(&route_id, &routes_to_rebuild) {
549
+
let variant_page_start = Instant::now();
550
+
let url = cached_route.variant_url(&page.0, &variant_id)?;
551
+
let file_path = cached_route.variant_file_path(
552
+
&page.0,
553
+
&options.output_dir,
554
+
&variant_id,
555
+
)?;
384
556
385
-
write_route_file(&content, &file_path)?;
557
+
let content = route.build(&mut PageContext::from_dynamic_route(
558
+
&page,
559
+
content_sources,
560
+
&mut route_assets,
561
+
&url,
562
+
&options.base_url,
563
+
Some(variant_id.clone()),
564
+
))?;
565
+
566
+
write_route_file(&content, &file_path)?;
567
+
568
+
info!(target: "pages", "โ โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options));
386
569
387
-
info!(target: "pages", "โ โโ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options));
570
+
// Track assets for this variant page
571
+
track_route_assets(&mut build_state, &route_id, &route_assets);
388
572
389
-
build_metadata.add_page(
390
-
variant_path.clone(),
391
-
file_path.to_string_lossy().to_string(),
392
-
Some(page.0.0.clone()),
393
-
);
573
+
build_metadata.add_page(
574
+
variant_path.clone(),
575
+
file_path.to_string_lossy().to_string(),
576
+
Some(page.0.0.clone()),
577
+
);
394
578
395
-
add_sitemap_entry(
396
-
&mut sitemap_entries,
397
-
normalized_base_url,
398
-
&url,
399
-
&variant_path,
400
-
&route.sitemap_metadata(),
401
-
&options.sitemap,
402
-
);
579
+
add_sitemap_entry(
580
+
&mut sitemap_entries,
581
+
normalized_base_url,
582
+
&url,
583
+
&variant_path,
584
+
&route.sitemap_metadata(),
585
+
&options.sitemap,
586
+
);
403
587
404
-
page_count += 1;
588
+
page_count += 1;
589
+
} else {
590
+
trace!(target: "build", "Skipping unchanged variant page: {} with params {:?}", variant_path, page.0.0);
591
+
}
405
592
}
406
593
}
407
594
···
421
608
fs::create_dir_all(&route_assets_options.output_assets_dir)?;
422
609
}
423
610
424
-
if !build_pages_styles.is_empty() || !build_pages_scripts.is_empty() {
611
+
if !build_pages_styles.is_empty()
612
+
|| !build_pages_scripts.is_empty()
613
+
|| (is_incremental && should_rebundle)
614
+
{
425
615
let assets_start = Instant::now();
426
616
print_title("generating assets");
427
617
···
439
629
})
440
630
.collect::<Vec<InputItem>>();
441
631
442
-
let bundler_inputs = build_pages_scripts
632
+
let mut bundler_inputs = build_pages_scripts
443
633
.iter()
444
634
.map(|script| InputItem {
445
635
import: script.path().to_string_lossy().to_string(),
···
454
644
.chain(css_inputs.into_iter())
455
645
.collect::<Vec<InputItem>>();
456
646
647
+
// During incremental builds, merge with previous bundler inputs
648
+
// to ensure we bundle all assets, not just from rebuilt routes
649
+
if is_incremental && !build_state.bundler_inputs.is_empty() {
650
+
debug!(target: "bundling", "Merging with {} previous bundler inputs", build_state.bundler_inputs.len());
651
+
652
+
let current_imports: FxHashSet<String> = bundler_inputs
653
+
.iter()
654
+
.map(|input| input.import.clone())
655
+
.collect();
656
+
657
+
// Add previous inputs that aren't in the current set
658
+
for prev_input in &build_state.bundler_inputs {
659
+
if !current_imports.contains(prev_input) {
660
+
bundler_inputs.push(InputItem {
661
+
import: prev_input.clone(),
662
+
name: Some(
663
+
PathBuf::from(prev_input)
664
+
.file_stem()
665
+
.unwrap_or_default()
666
+
.to_string_lossy()
667
+
.to_string(),
668
+
),
669
+
});
670
+
}
671
+
}
672
+
}
673
+
457
674
debug!(
458
675
target: "bundling",
459
676
"Bundler inputs: {:?}",
···
463
680
.collect::<Vec<String>>()
464
681
);
465
682
683
+
// Store bundler inputs in build state for next incremental build
684
+
if options.incremental {
685
+
build_state.bundler_inputs = bundler_inputs
686
+
.iter()
687
+
.map(|input| input.import.clone())
688
+
.collect();
689
+
}
690
+
466
691
if !bundler_inputs.is_empty() {
467
692
let mut module_types_hashmap = FxHashMap::default();
693
+
// Fonts
468
694
module_types_hashmap.insert("woff".to_string(), ModuleType::Asset);
469
695
module_types_hashmap.insert("woff2".to_string(), ModuleType::Asset);
696
+
module_types_hashmap.insert("ttf".to_string(), ModuleType::Asset);
697
+
module_types_hashmap.insert("otf".to_string(), ModuleType::Asset);
698
+
module_types_hashmap.insert("eot".to_string(), ModuleType::Asset);
699
+
// Images
700
+
module_types_hashmap.insert("png".to_string(), ModuleType::Asset);
701
+
module_types_hashmap.insert("jpg".to_string(), ModuleType::Asset);
702
+
module_types_hashmap.insert("jpeg".to_string(), ModuleType::Asset);
703
+
module_types_hashmap.insert("gif".to_string(), ModuleType::Asset);
704
+
module_types_hashmap.insert("svg".to_string(), ModuleType::Asset);
705
+
module_types_hashmap.insert("webp".to_string(), ModuleType::Asset);
706
+
module_types_hashmap.insert("avif".to_string(), ModuleType::Asset);
707
+
module_types_hashmap.insert("ico".to_string(), ModuleType::Asset);
470
708
471
709
let mut bundler = Bundler::with_plugins(
472
710
BundlerOptions {
···
500
738
],
501
739
)?;
502
740
503
-
let _result = bundler.write().await?;
741
+
let result = bundler.write().await?;
504
742
505
-
// TODO: Add outputted chunks to build_metadata
743
+
// Track transitive dependencies from bundler output
744
+
// For each chunk, map all its modules to the routes that use the entry point
745
+
// For assets (images, fonts via CSS url()), map them to all routes using any entry point
746
+
if options.incremental {
747
+
// First, collect all routes that use any bundler entry point
748
+
let mut all_bundler_routes: FxHashSet<RouteIdentifier> = FxHashSet::default();
749
+
750
+
for output in &result.assets {
751
+
if let Output::Chunk(chunk) = output {
752
+
// Get the entry point for this chunk
753
+
if let Some(facade_module_id) = &chunk.facade_module_id {
754
+
// Try to find routes using this entry point
755
+
let entry_path = PathBuf::from(facade_module_id.as_str());
756
+
let canonical_entry = entry_path.canonicalize().ok();
757
+
758
+
// Look up routes for this entry point
759
+
let routes = canonical_entry
760
+
.as_ref()
761
+
.and_then(|p| build_state.asset_to_routes.get(p))
762
+
.cloned();
763
+
764
+
if let Some(routes) = routes {
765
+
// Collect routes for asset tracking later
766
+
all_bundler_routes.extend(routes.iter().cloned());
767
+
768
+
// Register all modules in this chunk as dependencies for those routes
769
+
let mut transitive_count = 0;
770
+
for module_id in &chunk.module_ids {
771
+
let module_path = PathBuf::from(module_id.as_str());
772
+
if let Ok(canonical_module) = module_path.canonicalize() {
773
+
// Skip the entry point itself (already tracked)
774
+
if Some(&canonical_module) != canonical_entry.as_ref() {
775
+
for route in &routes {
776
+
build_state.track_asset(
777
+
canonical_module.clone(),
778
+
route.clone(),
779
+
);
780
+
}
781
+
transitive_count += 1;
782
+
}
783
+
}
784
+
}
785
+
if transitive_count > 0 {
786
+
debug!(target: "build", "Tracked {} transitive dependencies for {}", transitive_count, facade_module_id);
787
+
}
788
+
}
789
+
}
790
+
}
791
+
}
792
+
793
+
// Now track Output::Asset items (images, fonts, etc. referenced via CSS url() or JS imports)
794
+
// These are mapped to all routes that use any bundler entry point
795
+
if !all_bundler_routes.is_empty() {
796
+
let mut asset_count = 0;
797
+
for output in &result.assets {
798
+
if let Output::Asset(asset) = output {
799
+
for original_file in &asset.original_file_names {
800
+
let asset_path = PathBuf::from(original_file);
801
+
if let Ok(canonical_asset) = asset_path.canonicalize() {
802
+
for route in &all_bundler_routes {
803
+
build_state
804
+
.track_asset(canonical_asset.clone(), route.clone());
805
+
}
806
+
asset_count += 1;
807
+
}
808
+
}
809
+
}
810
+
}
811
+
if asset_count > 0 {
812
+
debug!(target: "build", "Tracked {} bundler assets for {} routes", asset_count, all_bundler_routes.len());
813
+
}
814
+
}
815
+
}
506
816
}
507
817
508
818
info!(target: "build", "{}", format!("Assets generated in {}", format_elapsed_time(assets_start.elapsed(), §ion_format_options)).bold());
···
598
908
info!(target: "SKIP_FORMAT", "{}", "");
599
909
info!(target: "build", "{}", format!("Build completed in {}", format_elapsed_time(build_start.elapsed(), §ion_format_options)).bold());
600
910
911
+
// Save build state for next incremental build (only if incremental is enabled)
912
+
if options.incremental {
913
+
if let Err(e) = build_state.save(build_cache_dir) {
914
+
warn!(target: "build", "Failed to save build state: {}", e);
915
+
} else {
916
+
debug!(target: "build", "Build state saved to {}", build_cache_dir.join("build_state.json").display());
917
+
}
918
+
}
919
+
601
920
if let Some(clean_up_handle) = clean_up_handle {
602
921
clean_up_handle.await?;
603
922
}
···
680
999
fs::create_dir_all(parent_dir)?
681
1000
}
682
1001
1002
+
trace!(target: "build", "Writing HTML file: {}", file_path.display());
683
1003
fs::write(file_path, content)?;
684
1004
685
1005
Ok(())
+22
-3
crates/maudit/src/lib.rs
+22
-3
crates/maudit/src/lib.rs
···
54
54
// Internal modules
55
55
mod logging;
56
56
57
-
use std::env;
57
+
use std::sync::LazyLock;
58
+
use std::{env, path::PathBuf};
58
59
59
60
use build::execute_build;
60
61
use content::ContentSources;
61
62
use logging::init_logging;
62
63
use route::FullRoute;
63
64
65
+
static IS_DEV: LazyLock<bool> = LazyLock::new(|| {
66
+
std::env::var("MAUDIT_DEV")
67
+
.map(|v| v == "true")
68
+
.unwrap_or(false)
69
+
});
70
+
64
71
/// Returns whether Maudit is running in development mode (through `maudit dev`).
65
72
///
66
73
/// This can be useful to conditionally enable features or logging that should only be active during development.
67
74
/// Oftentimes, this is used to disable some expensive operations that would slow down build times during development.
68
75
pub fn is_dev() -> bool {
69
-
env::var("MAUDIT_DEV").map(|v| v == "true").unwrap_or(false)
76
+
*IS_DEV
70
77
}
71
78
72
79
#[macro_export]
···
212
219
.enable_all()
213
220
.build()?;
214
221
215
-
execute_build(routes, &mut content_sources, &options, &async_runtime)
222
+
// Check for changed files from environment variable (set by CLI in dev mode)
223
+
let changed_files = env::var("MAUDIT_CHANGED_FILES")
224
+
.ok()
225
+
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
226
+
.map(|paths| paths.into_iter().map(PathBuf::from).collect::<Vec<_>>());
227
+
228
+
execute_build(
229
+
routes,
230
+
&mut content_sources,
231
+
&options,
232
+
changed_files.as_deref(),
233
+
&async_runtime,
234
+
)
216
235
}
+6
-2
crates/maudit/src/logging.rs
+6
-2
crates/maudit/src/logging.rs
···
29
29
30
30
let _ = Builder::from_env(logging_env)
31
31
.format(|buf, record| {
32
-
if std::env::args().any(|arg| arg == "--quiet") || std::env::var("MAUDIT_QUIET").is_ok()
33
-
{
32
+
if std::env::args().any(|arg| arg == "--quiet") {
33
+
return Ok(());
34
+
}
35
+
36
+
// In quiet mode, only show build target logs (for debugging incremental builds)
37
+
if std::env::var("MAUDIT_QUIET").is_ok() && record.target() != "build" {
34
38
return Ok(());
35
39
}
36
40
+5
crates/maudit-cli/Cargo.toml
+5
crates/maudit-cli/Cargo.toml
···
28
28
ureq = "3.1.4"
29
29
tar = "0.4.44"
30
30
toml_edit = "0.24.0"
31
+
toml = "0.8"
31
32
local-ip-address = "0.6.9"
32
33
flate2 = "1.1.8"
33
34
quanta = "0.12.6"
34
35
serde_json = "1.0"
35
36
tokio-util = "0.7"
36
37
cargo_metadata = "0.23.1"
38
+
39
+
[dev-dependencies]
40
+
tempfile = "3.24.0"
41
+
tokio = { version = "1", features = ["macros", "rt-multi-thread", "test-util"] }
+499
-142
crates/maudit-cli/src/dev/build.rs
+499
-142
crates/maudit-cli/src/dev/build.rs
···
1
1
use cargo_metadata::Message;
2
2
use quanta::Instant;
3
-
use server::{StatusType, WebSocketMessage, update_status};
3
+
use std::path::PathBuf;
4
4
use std::sync::Arc;
5
5
use tokio::process::Command;
6
-
use tokio::sync::broadcast;
6
+
use tokio::sync::RwLock;
7
7
use tokio_util::sync::CancellationToken;
8
-
use tracing::{debug, error, info};
8
+
use tracing::{debug, error, info, warn};
9
9
10
10
use crate::{
11
-
dev::server,
11
+
dev::server::{StatusManager, StatusType},
12
12
logging::{FormatElapsedTimeOptions, format_elapsed_time},
13
13
};
14
14
15
+
use super::dep_tracker::{DependencyTracker, find_target_dir};
16
+
17
+
/// Internal state shared across all BuildManager handles.
18
+
struct BuildManagerState {
19
+
current_cancel: RwLock<Option<CancellationToken>>,
20
+
build_semaphore: tokio::sync::Semaphore,
21
+
status_manager: StatusManager,
22
+
dep_tracker: RwLock<Option<DependencyTracker>>,
23
+
binary_path: RwLock<Option<PathBuf>>,
24
+
// Cached values computed once at startup
25
+
target_dir: Option<PathBuf>,
26
+
binary_name: Option<String>,
27
+
}
28
+
29
+
/// Manages cargo build processes with cancellation support.
30
+
/// Cheap to clone - all clones share the same underlying state.
15
31
#[derive(Clone)]
16
32
pub struct BuildManager {
17
-
current_cancel: Arc<tokio::sync::RwLock<Option<CancellationToken>>>,
18
-
build_semaphore: Arc<tokio::sync::Semaphore>,
19
-
websocket_tx: broadcast::Sender<WebSocketMessage>,
20
-
current_status: Arc<tokio::sync::RwLock<Option<server::PersistentStatus>>>,
33
+
state: Arc<BuildManagerState>,
21
34
}
22
35
23
36
impl BuildManager {
24
-
pub fn new(websocket_tx: broadcast::Sender<WebSocketMessage>) -> Self {
37
+
pub fn new(status_manager: StatusManager) -> Self {
38
+
// Try to determine target directory and binary name at startup
39
+
let target_dir = find_target_dir().ok();
40
+
let binary_name = Self::get_binary_name_from_cargo_toml().ok();
41
+
42
+
if let Some(ref name) = binary_name {
43
+
debug!(name: "build", "Detected binary name at startup: {}", name);
44
+
}
45
+
if let Some(ref dir) = target_dir {
46
+
debug!(name: "build", "Using target directory: {:?}", dir);
47
+
}
48
+
25
49
Self {
26
-
current_cancel: Arc::new(tokio::sync::RwLock::new(None)),
27
-
build_semaphore: Arc::new(tokio::sync::Semaphore::new(1)), // Only one build at a time
28
-
websocket_tx,
29
-
current_status: Arc::new(tokio::sync::RwLock::new(None)),
50
+
state: Arc::new(BuildManagerState {
51
+
current_cancel: RwLock::new(None),
52
+
build_semaphore: tokio::sync::Semaphore::new(1),
53
+
status_manager,
54
+
dep_tracker: RwLock::new(None),
55
+
binary_path: RwLock::new(None),
56
+
target_dir,
57
+
binary_name,
58
+
}),
30
59
}
31
60
}
32
61
33
-
/// Get a reference to the current status for use with the web server
34
-
pub fn current_status(&self) -> Arc<tokio::sync::RwLock<Option<server::PersistentStatus>>> {
35
-
self.current_status.clone()
62
+
/// Check if the given paths require recompilation based on dependency tracking.
63
+
/// Returns true if recompilation is needed, false if we can just rerun the binary.
64
+
pub async fn needs_recompile(&self, changed_paths: &[PathBuf]) -> bool {
65
+
let dep_tracker = self.state.dep_tracker.read().await;
66
+
67
+
if let Some(tracker) = dep_tracker.as_ref()
68
+
&& tracker.has_dependencies()
69
+
{
70
+
let needs_recompile = tracker.needs_recompile(changed_paths);
71
+
if !needs_recompile {
72
+
debug!(name: "build", "Changed files are not dependencies, rerun binary without recompile");
73
+
}
74
+
return needs_recompile;
75
+
}
76
+
77
+
// If we don't have a dependency tracker yet, always recompile
78
+
true
79
+
}
80
+
81
+
/// Rerun the binary without recompiling.
82
+
pub async fn rerun_binary(
83
+
&self,
84
+
changed_paths: &[PathBuf],
85
+
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
86
+
// Get binary path with limited lock scope
87
+
let path = {
88
+
let guard = self.state.binary_path.read().await;
89
+
match guard.as_ref() {
90
+
Some(p) if p.exists() => p.clone(),
91
+
Some(p) => {
92
+
warn!(name: "build", "Binary at {:?} no longer exists, falling back to full rebuild", p);
93
+
return self.start_build().await;
94
+
}
95
+
None => {
96
+
warn!(name: "build", "No binary path available, falling back to full rebuild");
97
+
return self.start_build().await;
98
+
}
99
+
}
100
+
};
101
+
102
+
// Log that we're doing an incremental build
103
+
debug!(name: "build", "Incremental build: {} files changed", changed_paths.len());
104
+
debug!(name: "build", "Changed files: {:?}", changed_paths);
105
+
debug!(name: "build", "Rerunning binary without recompilation...");
106
+
107
+
self.state
108
+
.status_manager
109
+
.update(StatusType::Info, "Rerunning...")
110
+
.await;
111
+
112
+
let build_start_time = Instant::now();
113
+
114
+
// Serialize changed paths to JSON for the binary
115
+
let changed_files_json = serde_json::to_string(changed_paths)?;
116
+
117
+
let child = Command::new(&path)
118
+
.envs([
119
+
("MAUDIT_DEV", "true"),
120
+
("MAUDIT_QUIET", "true"),
121
+
("MAUDIT_CHANGED_FILES", changed_files_json.as_str()),
122
+
])
123
+
.stdout(std::process::Stdio::piped())
124
+
.stderr(std::process::Stdio::piped())
125
+
.spawn()?;
126
+
127
+
let output = child.wait_with_output().await?;
128
+
129
+
let duration = build_start_time.elapsed();
130
+
let formatted_elapsed_time =
131
+
format_elapsed_time(duration, &FormatElapsedTimeOptions::default_dev());
132
+
133
+
if output.status.success() {
134
+
if std::env::var("MAUDIT_SHOW_BINARY_OUTPUT").is_ok() {
135
+
let stdout = String::from_utf8_lossy(&output.stdout);
136
+
let stderr = String::from_utf8_lossy(&output.stderr);
137
+
for line in stdout.lines().chain(stderr.lines()) {
138
+
if !line.trim().is_empty() {
139
+
info!(name: "build", "{}", line);
140
+
}
141
+
}
142
+
}
143
+
info!(name: "build", "Binary rerun finished {}", formatted_elapsed_time);
144
+
self.state
145
+
.status_manager
146
+
.update(StatusType::Success, "Binary rerun finished successfully")
147
+
.await;
148
+
Ok(true)
149
+
} else {
150
+
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
151
+
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
152
+
error!(name: "build", "Binary rerun failed {}\nstdout: {}\nstderr: {}",
153
+
formatted_elapsed_time, stdout, stderr);
154
+
self.state
155
+
.status_manager
156
+
.update(
157
+
StatusType::Error,
158
+
&format!("Binary rerun failed:\n{}\n{}", stdout, stderr),
159
+
)
160
+
.await;
161
+
Ok(false)
162
+
}
36
163
}
37
164
38
-
/// Do initial build that can be cancelled (but isn't stored as current build)
39
-
pub async fn do_initial_build(&self) -> Result<bool, Box<dyn std::error::Error>> {
165
+
/// Do initial build that can be cancelled.
166
+
pub async fn do_initial_build(&self) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
40
167
self.internal_build(true).await
41
168
}
42
169
43
-
/// Start a new build, cancelling any previous one
44
-
pub async fn start_build(&self) -> Result<bool, Box<dyn std::error::Error>> {
170
+
/// Start a new build, cancelling any previous one.
171
+
pub async fn start_build(&self) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
45
172
self.internal_build(false).await
46
173
}
47
174
48
-
/// Internal build method that handles both initial and regular builds
49
-
async fn internal_build(&self, is_initial: bool) -> Result<bool, Box<dyn std::error::Error>> {
175
+
async fn internal_build(
176
+
&self,
177
+
is_initial: bool,
178
+
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
50
179
// Cancel any existing build immediately
51
180
let cancel = CancellationToken::new();
52
181
{
53
-
let mut current_cancel = self.current_cancel.write().await;
182
+
let mut current_cancel = self.state.current_cancel.write().await;
54
183
if let Some(old_cancel) = current_cancel.replace(cancel.clone()) {
55
184
old_cancel.cancel();
56
185
}
57
186
}
58
187
59
188
// Acquire semaphore to ensure only one build runs at a time
60
-
// This prevents resource conflicts if cancellation fails
61
-
let _ = self.build_semaphore.acquire().await?;
189
+
let _permit = self.state.build_semaphore.acquire().await?;
62
190
63
-
// Notify that build is starting
64
-
update_status(
65
-
&self.websocket_tx,
66
-
self.current_status.clone(),
67
-
StatusType::Info,
68
-
"Building...",
69
-
)
70
-
.await;
191
+
self.state
192
+
.status_manager
193
+
.update(StatusType::Info, "Building...")
194
+
.await;
71
195
72
196
let mut child = Command::new("cargo")
73
197
.args([
···
85
209
.stderr(std::process::Stdio::piped())
86
210
.spawn()?;
87
211
88
-
// Take the stderr stream for manual handling
89
-
let mut stdout = child.stdout.take().unwrap();
90
-
let mut stderr = child.stderr.take().unwrap();
212
+
// Take stdout/stderr before select! so we can use them in the completion branch
213
+
// while still being able to kill the child in the cancellation branch
214
+
let stdout = child.stdout.take().unwrap();
215
+
let stderr = child.stderr.take().unwrap();
91
216
92
-
let websocket_tx = self.websocket_tx.clone();
93
-
let current_status = self.current_status.clone();
94
217
let build_start_time = Instant::now();
95
218
96
-
// Create a channel to get the build result back
97
-
let (result_tx, mut result_rx) = tokio::sync::mpsc::channel::<bool>(1);
219
+
tokio::select! {
220
+
_ = cancel.cancelled() => {
221
+
debug!(name: "build", "Build cancelled");
222
+
let _ = child.kill().await;
223
+
self.state.status_manager.update(StatusType::Info, "Build cancelled").await;
224
+
Ok(false)
225
+
}
226
+
result = self.run_build_to_completion(&mut child, stdout, stderr, is_initial, build_start_time) => {
227
+
result
228
+
}
229
+
}
230
+
}
98
231
99
-
// Spawn watcher task to monitor the child process
100
-
tokio::spawn(async move {
101
-
let output_future = async {
102
-
// Read stdout concurrently with waiting for process to finish
103
-
let stdout_task = tokio::spawn(async move {
104
-
let mut out = Vec::new();
105
-
tokio::io::copy(&mut stdout, &mut out).await.unwrap_or(0);
232
+
/// Run the cargo build process to completion and handle the output.
233
+
async fn run_build_to_completion(
234
+
&self,
235
+
child: &mut tokio::process::Child,
236
+
mut stdout: tokio::process::ChildStdout,
237
+
mut stderr: tokio::process::ChildStderr,
238
+
is_initial: bool,
239
+
build_start_time: Instant,
240
+
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
241
+
// Read stdout and stderr concurrently
242
+
let stdout_task = tokio::spawn(async move {
243
+
let mut out = Vec::new();
244
+
tokio::io::copy(&mut stdout, &mut out).await.unwrap_or(0);
106
245
107
-
let mut rendered_messages: Vec<String> = Vec::new();
246
+
let mut rendered_messages: Vec<String> = Vec::new();
108
247
109
-
// Ideally we'd stream things as they come, but I can't figure it out
110
-
for message in cargo_metadata::Message::parse_stream(
111
-
String::from_utf8_lossy(&out).to_string().as_bytes(),
112
-
) {
113
-
match message {
114
-
Err(e) => {
115
-
error!(name: "build", "Failed to parse cargo message: {}", e);
116
-
continue;
117
-
}
118
-
Ok(message) => {
119
-
match message {
120
-
// Compiler wants to tell us something
121
-
Message::CompilerMessage(msg) => {
122
-
// TODO: For now, just send through the rendered messages, but in the future let's send
123
-
// structured messages to the frontend so we can do better formatting
124
-
if let Some(rendered) = &msg.message.rendered {
125
-
info!("{}", rendered);
126
-
rendered_messages.push(rendered.to_string());
127
-
}
128
-
}
129
-
// Random text came in, just log it
130
-
Message::TextLine(msg) => {
131
-
info!("{}", msg);
132
-
}
133
-
_ => {}
134
-
}
135
-
}
248
+
for message in cargo_metadata::Message::parse_stream(
249
+
String::from_utf8_lossy(&out).to_string().as_bytes(),
250
+
) {
251
+
match message {
252
+
Err(e) => {
253
+
error!(name: "build", "Failed to parse cargo message: {}", e);
254
+
}
255
+
Ok(Message::CompilerMessage(msg)) => {
256
+
if let Some(rendered) = &msg.message.rendered {
257
+
info!("{}", rendered);
258
+
rendered_messages.push(rendered.to_string());
136
259
}
137
260
}
261
+
Ok(Message::TextLine(msg)) => {
262
+
info!("{}", msg);
263
+
}
264
+
_ => {}
265
+
}
266
+
}
138
267
139
-
(out, rendered_messages)
140
-
});
268
+
(out, rendered_messages)
269
+
});
141
270
142
-
let stderr_task = tokio::spawn(async move {
143
-
let mut err = Vec::new();
144
-
tokio::io::copy(&mut stderr, &mut err).await.unwrap_or(0);
271
+
let stderr_task = tokio::spawn(async move {
272
+
let mut err = Vec::new();
273
+
tokio::io::copy(&mut stderr, &mut err).await.unwrap_or(0);
274
+
err
275
+
});
145
276
146
-
err
147
-
});
277
+
let status = child.wait().await?;
278
+
let (_stdout_bytes, rendered_messages) = stdout_task.await.unwrap_or_default();
279
+
let stderr_bytes = stderr_task.await.unwrap_or_default();
148
280
149
-
let status = child.wait().await?;
150
-
let stdout_data = stdout_task.await.unwrap_or_default();
151
-
let stderr_data = stderr_task.await.unwrap_or_default();
281
+
let duration = build_start_time.elapsed();
282
+
let formatted_elapsed_time =
283
+
format_elapsed_time(duration, &FormatElapsedTimeOptions::default_dev());
284
+
285
+
if status.success() {
286
+
let build_type = if is_initial {
287
+
"Initial build"
288
+
} else {
289
+
"Rebuild"
290
+
};
291
+
info!(name: "build", "{} finished {}", build_type, formatted_elapsed_time);
292
+
self.state
293
+
.status_manager
294
+
.update(StatusType::Success, "Build finished successfully")
295
+
.await;
296
+
297
+
self.update_dependency_tracker().await;
298
+
299
+
Ok(true)
300
+
} else {
301
+
let stderr_str = String::from_utf8_lossy(&stderr_bytes).to_string();
302
+
// Raw stderr sometimes has something to say whenever cargo fails
303
+
println!("{}", stderr_str);
152
304
153
-
Ok::<(std::process::Output, Vec<String>), Box<dyn std::error::Error + Send + Sync>>(
154
-
(
155
-
std::process::Output {
156
-
status,
157
-
stdout: stdout_data.0,
158
-
stderr: stderr_data,
159
-
},
160
-
stdout_data.1,
161
-
),
162
-
)
305
+
let build_type = if is_initial {
306
+
"Initial build"
307
+
} else {
308
+
"Rebuild"
163
309
};
310
+
error!(name: "build", "{} failed with errors {}", build_type, formatted_elapsed_time);
164
311
165
-
tokio::select! {
166
-
_ = cancel.cancelled() => {
167
-
debug!(name: "build", "Build cancelled");
168
-
let _ = child.kill().await;
169
-
update_status(&websocket_tx, current_status, StatusType::Info, "Build cancelled").await;
170
-
let _ = result_tx.send(false).await; // Build failed due to cancellation
171
-
}
172
-
res = output_future => {
173
-
let duration = build_start_time.elapsed();
174
-
let formatted_elapsed_time = format_elapsed_time(
175
-
duration,
176
-
&FormatElapsedTimeOptions::default_dev(),
177
-
);
312
+
if is_initial {
313
+
error!(name: "build", "Initial build needs to succeed before we can start the dev server");
314
+
self.state
315
+
.status_manager
316
+
.update(
317
+
StatusType::Error,
318
+
"Initial build failed - fix errors and save to retry",
319
+
)
320
+
.await;
321
+
} else {
322
+
self.state
323
+
.status_manager
324
+
.update(StatusType::Error, &rendered_messages.join("\n"))
325
+
.await;
326
+
}
178
327
179
-
let success = match res {
180
-
Ok(output) => {
181
-
let (output, rendered_messages) = output;
182
-
if output.status.success() {
183
-
let build_type = if is_initial { "Initial build" } else { "Rebuild" };
184
-
info!(name: "build", "{} finished {}", build_type, formatted_elapsed_time);
185
-
update_status(&websocket_tx, current_status, StatusType::Success, "Build finished successfully").await;
186
-
true
187
-
} else {
188
-
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
189
-
println!("{}", stderr); // Raw stderr sometimes has something to say whenever cargo fails, even if the errors messages are actually in stdout
190
-
let build_type = if is_initial { "Initial build" } else { "Rebuild" };
191
-
error!(name: "build", "{} failed with errors {}", build_type, formatted_elapsed_time);
192
-
if is_initial {
193
-
error!(name: "build", "Initial build needs to succeed before we can start the dev server");
194
-
update_status(&websocket_tx, current_status, StatusType::Error, "Initial build failed - fix errors and save to retry").await;
195
-
} else {
196
-
update_status(&websocket_tx, current_status, StatusType::Error, &rendered_messages.join("\n")).await;
197
-
}
198
-
false
199
-
}
200
-
}
201
-
Err(e) => {
202
-
error!(name: "build", "Failed to wait for build: {}", e);
203
-
update_status(&websocket_tx, current_status, StatusType::Error, &format!("Failed to wait for build: {}", e)).await;
204
-
false
205
-
}
206
-
};
207
-
let _ = result_tx.send(success).await;
208
-
}
328
+
Ok(false)
329
+
}
330
+
}
331
+
332
+
/// Update the dependency tracker after a successful build.
333
+
async fn update_dependency_tracker(&self) {
334
+
let Some(ref name) = self.state.binary_name else {
335
+
debug!(name: "build", "No binary name available, skipping dependency tracker update");
336
+
return;
337
+
};
338
+
339
+
let Some(ref target) = self.state.target_dir else {
340
+
debug!(name: "build", "No target directory available, skipping dependency tracker update");
341
+
return;
342
+
};
343
+
344
+
// Update binary path
345
+
let bin_path = target.join(name);
346
+
if bin_path.exists() {
347
+
*self.state.binary_path.write().await = Some(bin_path.clone());
348
+
debug!(name: "build", "Binary path set to: {:?}", bin_path);
349
+
} else {
350
+
debug!(name: "build", "Binary not found at expected path: {:?}", bin_path);
351
+
}
352
+
353
+
// Reload the dependency tracker from the .d file
354
+
match DependencyTracker::load_from_binary_name(name) {
355
+
Ok(tracker) => {
356
+
debug!(name: "build", "Loaded {} dependencies for tracking", tracker.get_dependencies().len());
357
+
*self.state.dep_tracker.write().await = Some(tracker);
209
358
}
210
-
});
359
+
Err(e) => {
360
+
debug!(name: "build", "Could not load dependency tracker: {}", e);
361
+
}
362
+
}
363
+
}
211
364
212
-
// Wait for the build result
213
-
let success = result_rx.recv().await.unwrap_or(false);
214
-
Ok(success)
365
+
fn get_binary_name_from_cargo_toml() -> Result<String, Box<dyn std::error::Error + Send + Sync>>
366
+
{
367
+
let cargo_toml_path = PathBuf::from("Cargo.toml");
368
+
if !cargo_toml_path.exists() {
369
+
return Err("Cargo.toml not found in current directory".into());
370
+
}
371
+
372
+
let cargo_toml_content = std::fs::read_to_string(&cargo_toml_path)?;
373
+
let cargo_toml: toml::Value = toml::from_str(&cargo_toml_content)?;
374
+
375
+
if let Some(package_name) = cargo_toml
376
+
.get("package")
377
+
.and_then(|p| p.get("name"))
378
+
.and_then(|n| n.as_str())
379
+
{
380
+
// Check if there's a [[bin]] section with a different name
381
+
if let Some(bins) = cargo_toml.get("bin").and_then(|b| b.as_array())
382
+
&& let Some(first_bin) = bins.first()
383
+
&& let Some(bin_name) = first_bin.get("name").and_then(|n| n.as_str())
384
+
{
385
+
return Ok(bin_name.to_string());
386
+
}
387
+
388
+
return Ok(package_name.to_string());
389
+
}
390
+
391
+
Err("Could not find package name in Cargo.toml".into())
392
+
}
393
+
394
+
/// Set the dependency tracker directly (for testing).
395
+
#[cfg(test)]
396
+
pub(crate) async fn set_dep_tracker(&self, tracker: Option<DependencyTracker>) {
397
+
*self.state.dep_tracker.write().await = tracker;
398
+
}
399
+
400
+
/// Set the binary path directly (for testing).
401
+
#[cfg(test)]
402
+
pub(crate) async fn set_binary_path(&self, path: Option<PathBuf>) {
403
+
*self.state.binary_path.write().await = path;
404
+
}
405
+
406
+
/// Get the current binary path (for testing).
407
+
#[cfg(test)]
408
+
pub(crate) async fn get_binary_path(&self) -> Option<PathBuf> {
409
+
self.state.binary_path.read().await.clone()
410
+
}
411
+
412
+
/// Create a BuildManager with custom target_dir and binary_name (for testing).
413
+
#[cfg(test)]
414
+
pub(crate) fn new_with_config(
415
+
status_manager: StatusManager,
416
+
target_dir: Option<PathBuf>,
417
+
binary_name: Option<String>,
418
+
) -> Self {
419
+
Self {
420
+
state: Arc::new(BuildManagerState {
421
+
current_cancel: RwLock::new(None),
422
+
build_semaphore: tokio::sync::Semaphore::new(1),
423
+
status_manager,
424
+
dep_tracker: RwLock::new(None),
425
+
binary_path: RwLock::new(None),
426
+
target_dir,
427
+
binary_name,
428
+
}),
429
+
}
430
+
}
431
+
}
432
+
433
+
#[cfg(test)]
434
+
mod tests {
435
+
use super::*;
436
+
use std::collections::HashMap;
437
+
use std::time::SystemTime;
438
+
use tempfile::TempDir;
439
+
440
+
fn create_test_manager() -> BuildManager {
441
+
let status_manager = StatusManager::new();
442
+
BuildManager::new_with_config(status_manager, None, None)
443
+
}
444
+
445
+
fn create_test_manager_with_config(
446
+
target_dir: Option<PathBuf>,
447
+
binary_name: Option<String>,
448
+
) -> BuildManager {
449
+
let status_manager = StatusManager::new();
450
+
BuildManager::new_with_config(status_manager, target_dir, binary_name)
451
+
}
452
+
453
+
#[tokio::test]
454
+
async fn test_build_manager_clone_shares_state() {
455
+
let manager1 = create_test_manager();
456
+
let manager2 = manager1.clone();
457
+
458
+
// Set binary path via one clone
459
+
let test_path = PathBuf::from("/test/path");
460
+
manager1.set_binary_path(Some(test_path.clone())).await;
461
+
462
+
// Should be visible via the other clone
463
+
assert_eq!(manager2.get_binary_path().await, Some(test_path));
464
+
}
465
+
466
+
#[tokio::test]
467
+
async fn test_needs_recompile_without_tracker() {
468
+
let manager = create_test_manager();
469
+
470
+
// Without a dependency tracker, should always return true
471
+
let changed = vec![PathBuf::from("src/main.rs")];
472
+
assert!(manager.needs_recompile(&changed).await);
473
+
}
474
+
475
+
#[tokio::test]
476
+
async fn test_needs_recompile_with_empty_tracker() {
477
+
let manager = create_test_manager();
478
+
479
+
// Set an empty tracker (no dependencies)
480
+
let tracker = DependencyTracker::new();
481
+
manager.set_dep_tracker(Some(tracker)).await;
482
+
483
+
// Empty tracker has no dependencies, so has_dependencies() returns false
484
+
// This means we should still return true (recompile needed)
485
+
let changed = vec![PathBuf::from("src/main.rs")];
486
+
assert!(manager.needs_recompile(&changed).await);
487
+
}
488
+
489
+
#[tokio::test]
490
+
async fn test_needs_recompile_with_matching_dependency() {
491
+
let manager = create_test_manager();
492
+
493
+
// Create a tracker with some dependencies
494
+
let temp_dir = TempDir::new().unwrap();
495
+
let dep_file = temp_dir.path().join("src/lib.rs");
496
+
std::fs::create_dir_all(dep_file.parent().unwrap()).unwrap();
497
+
std::fs::write(&dep_file, "// test").unwrap();
498
+
499
+
// Get canonical path and current mod time
500
+
let canonical_path = dep_file.canonicalize().unwrap();
501
+
let old_time = SystemTime::UNIX_EPOCH; // Very old time
502
+
503
+
let mut tracker = DependencyTracker::new();
504
+
tracker.dependencies = HashMap::from([(canonical_path, old_time)]);
505
+
506
+
manager.set_dep_tracker(Some(tracker)).await;
507
+
508
+
// Changed file IS a dependency and is newer - should need recompile
509
+
let changed = vec![dep_file];
510
+
assert!(manager.needs_recompile(&changed).await);
511
+
}
512
+
513
+
#[tokio::test]
514
+
async fn test_needs_recompile_with_non_matching_file() {
515
+
let manager = create_test_manager();
516
+
517
+
// Create a tracker with some dependencies
518
+
let temp_dir = TempDir::new().unwrap();
519
+
let dep_file = temp_dir.path().join("src/lib.rs");
520
+
std::fs::create_dir_all(dep_file.parent().unwrap()).unwrap();
521
+
std::fs::write(&dep_file, "// test").unwrap();
522
+
523
+
let canonical_path = dep_file.canonicalize().unwrap();
524
+
let mod_time = std::fs::metadata(&dep_file).unwrap().modified().unwrap();
525
+
526
+
let mut tracker = DependencyTracker::new();
527
+
tracker.dependencies = HashMap::from([(canonical_path, mod_time)]);
528
+
529
+
manager.set_dep_tracker(Some(tracker)).await;
530
+
531
+
// Changed file is NOT a dependency (different file)
532
+
let other_file = temp_dir.path().join("assets/style.css");
533
+
std::fs::create_dir_all(other_file.parent().unwrap()).unwrap();
534
+
std::fs::write(&other_file, "/* css */").unwrap();
535
+
536
+
let changed = vec![other_file];
537
+
assert!(!manager.needs_recompile(&changed).await);
538
+
}
539
+
540
+
#[tokio::test]
541
+
async fn test_update_dependency_tracker_with_config_missing_binary() {
542
+
let temp_dir = TempDir::new().unwrap();
543
+
let manager = create_test_manager_with_config(
544
+
Some(temp_dir.path().to_path_buf()),
545
+
Some("nonexistent-binary".to_string()),
546
+
);
547
+
548
+
// Binary doesn't exist, so binary_path should not be set
549
+
manager.update_dependency_tracker().await;
550
+
551
+
assert!(manager.get_binary_path().await.is_none());
552
+
}
553
+
554
+
#[tokio::test]
555
+
async fn test_update_dependency_tracker_with_existing_binary() {
556
+
let temp_dir = TempDir::new().unwrap();
557
+
let binary_name = "test-binary";
558
+
let binary_path = temp_dir.path().join(binary_name);
559
+
560
+
// Create a fake binary file
561
+
std::fs::write(&binary_path, "fake binary").unwrap();
562
+
563
+
let manager = create_test_manager_with_config(
564
+
Some(temp_dir.path().to_path_buf()),
565
+
Some(binary_name.to_string()),
566
+
);
567
+
568
+
manager.update_dependency_tracker().await;
569
+
570
+
// Binary path should be set
571
+
assert_eq!(manager.get_binary_path().await, Some(binary_path));
215
572
}
216
573
}
+377
crates/maudit-cli/src/dev/dep_tracker.rs
+377
crates/maudit-cli/src/dev/dep_tracker.rs
···
1
+
use std::collections::HashMap;
2
+
use std::fs;
3
+
use std::path::{Path, PathBuf};
4
+
use std::time::SystemTime;
5
+
use tracing::{debug, warn};
6
+
7
+
/// Tracks dependencies from .d files to determine if recompilation is needed
8
+
#[derive(Debug, Clone)]
9
+
pub struct DependencyTracker {
10
+
/// Path to the .d file
11
+
pub(crate) d_file_path: Option<PathBuf>,
12
+
/// Map of dependency paths to their last modification times
13
+
pub(crate) dependencies: HashMap<PathBuf, SystemTime>,
14
+
}
15
+
16
+
/// Find the target directory using multiple strategies
17
+
///
18
+
/// This function tries multiple approaches to locate the target directory:
19
+
/// 1. CARGO_TARGET_DIR / CARGO_BUILD_TARGET_DIR environment variables
20
+
/// 2. Local ./target/debug directory
21
+
/// 3. Workspace root target/debug directory (walking up to find [workspace])
22
+
/// 4. Fallback to relative "target/debug" path
23
+
pub fn find_target_dir() -> Result<PathBuf, std::io::Error> {
24
+
// 1. Check CARGO_TARGET_DIR and CARGO_BUILD_TARGET_DIR environment variables
25
+
for env_var in ["CARGO_TARGET_DIR", "CARGO_BUILD_TARGET_DIR"] {
26
+
if let Ok(target_dir) = std::env::var(env_var) {
27
+
// Try with /debug appended
28
+
let path = PathBuf::from(&target_dir).join("debug");
29
+
if path.exists() {
30
+
debug!("Using target directory from {}: {:?}", env_var, path);
31
+
return Ok(path);
32
+
}
33
+
// If the env var points directly to debug or release
34
+
let path_no_debug = PathBuf::from(&target_dir);
35
+
if path_no_debug.exists()
36
+
&& (path_no_debug.ends_with("debug") || path_no_debug.ends_with("release"))
37
+
{
38
+
debug!(
39
+
"Using target directory from {} (direct): {:?}",
40
+
env_var, path_no_debug
41
+
);
42
+
return Ok(path_no_debug);
43
+
}
44
+
}
45
+
}
46
+
47
+
// 2. Look for target directory in current directory
48
+
let local_target = PathBuf::from("target/debug");
49
+
if local_target.exists() {
50
+
debug!("Using local target directory: {:?}", local_target);
51
+
return Ok(local_target);
52
+
}
53
+
54
+
// 3. Try to find workspace root by looking for Cargo.toml with [workspace]
55
+
let mut current = std::env::current_dir()?;
56
+
loop {
57
+
let cargo_toml = current.join("Cargo.toml");
58
+
if cargo_toml.exists()
59
+
&& let Ok(content) = fs::read_to_string(&cargo_toml)
60
+
&& content.contains("[workspace]")
61
+
{
62
+
let workspace_target = current.join("target").join("debug");
63
+
if workspace_target.exists() {
64
+
debug!("Using workspace target directory: {:?}", workspace_target);
65
+
return Ok(workspace_target);
66
+
}
67
+
}
68
+
69
+
// Move up to parent directory
70
+
if !current.pop() {
71
+
break;
72
+
}
73
+
}
74
+
75
+
// 4. Final fallback to relative path
76
+
debug!("Falling back to relative target/debug path");
77
+
Ok(PathBuf::from("target/debug"))
78
+
}
79
+
80
+
impl DependencyTracker {
81
+
#[allow(dead_code)]
82
+
pub fn new() -> Self {
83
+
Self {
84
+
d_file_path: None,
85
+
dependencies: HashMap::new(),
86
+
}
87
+
}
88
+
89
+
/// Locate and load the .d file for the current binary
90
+
/// The .d file is typically at target/debug/<binary-name>.d
91
+
pub fn load_from_binary_name(binary_name: &str) -> Result<Self, std::io::Error> {
92
+
let target_dir = find_target_dir()?;
93
+
let d_file_path = target_dir.join(format!("{}.d", binary_name));
94
+
95
+
if !d_file_path.exists() {
96
+
return Err(std::io::Error::new(
97
+
std::io::ErrorKind::NotFound,
98
+
format!(".d file not found at {:?}", d_file_path),
99
+
));
100
+
}
101
+
102
+
let mut tracker = Self {
103
+
d_file_path: Some(d_file_path.clone()),
104
+
dependencies: HashMap::new(),
105
+
};
106
+
107
+
tracker.reload_dependencies()?;
108
+
Ok(tracker)
109
+
}
110
+
111
+
/// Parse space-separated paths from a string, handling escaped spaces
112
+
/// In Make-style .d files, spaces in filenames are escaped with backslashes
113
+
fn parse_paths(input: &str) -> Vec<PathBuf> {
114
+
let mut paths = Vec::new();
115
+
let mut current_path = String::new();
116
+
let mut chars = input.chars().peekable();
117
+
118
+
while let Some(ch) = chars.next() {
119
+
match ch {
120
+
'\\' => {
121
+
// Check if this is escaping a space or newline
122
+
if let Some(&next_ch) = chars.peek() {
123
+
if next_ch == ' ' {
124
+
// Escaped space - add it to the current path
125
+
current_path.push(' ');
126
+
chars.next(); // consume the space
127
+
} else if next_ch == '\n' || next_ch == '\r' {
128
+
// Line continuation - skip the backslash and newline
129
+
chars.next();
130
+
if next_ch == '\r' {
131
+
// Handle \r\n
132
+
if chars.peek() == Some(&'\n') {
133
+
chars.next();
134
+
}
135
+
}
136
+
} else {
137
+
// Not escaping space or newline, keep the backslash
138
+
current_path.push('\\');
139
+
}
140
+
} else {
141
+
// Backslash at end of string
142
+
current_path.push('\\');
143
+
}
144
+
}
145
+
' ' | '\t' | '\n' | '\r' => {
146
+
// Unescaped whitespace - end current path
147
+
if !current_path.is_empty() {
148
+
paths.push(PathBuf::from(current_path.clone()));
149
+
current_path.clear();
150
+
}
151
+
}
152
+
_ => {
153
+
current_path.push(ch);
154
+
}
155
+
}
156
+
}
157
+
158
+
// Don't forget the last path
159
+
if !current_path.is_empty() {
160
+
paths.push(PathBuf::from(current_path));
161
+
}
162
+
163
+
paths
164
+
}
165
+
166
+
/// Reload dependencies from the .d file
167
+
pub fn reload_dependencies(&mut self) -> Result<(), std::io::Error> {
168
+
let Some(d_file_path) = &self.d_file_path else {
169
+
return Err(std::io::Error::new(
170
+
std::io::ErrorKind::NotFound,
171
+
"No .d file path set",
172
+
));
173
+
};
174
+
175
+
let content = fs::read_to_string(d_file_path)?;
176
+
177
+
// Parse the .d file format: "target: dep1 dep2 dep3 ..."
178
+
// The first line contains the target and dependencies, separated by ':'
179
+
let deps = if let Some(colon_pos) = content.find(':') {
180
+
// Everything after the colon is dependencies
181
+
&content[colon_pos + 1..]
182
+
} else {
183
+
// Malformed .d file
184
+
warn!("Malformed .d file at {:?}", d_file_path);
185
+
return Ok(());
186
+
};
187
+
188
+
// Dependencies are space-separated and may span multiple lines (with line continuations)
189
+
// Spaces in filenames are escaped with backslashes
190
+
let dep_paths = Self::parse_paths(deps);
191
+
192
+
// Clear old dependencies and load new ones with their modification times
193
+
self.dependencies.clear();
194
+
195
+
for dep_path in dep_paths {
196
+
match fs::metadata(&dep_path) {
197
+
Ok(metadata) => {
198
+
if let Ok(modified) = metadata.modified() {
199
+
self.dependencies.insert(dep_path.clone(), modified);
200
+
debug!("Tracking dependency: {:?}", dep_path);
201
+
}
202
+
}
203
+
Err(e) => {
204
+
// Dependency file doesn't exist or can't be read - this is okay,
205
+
// it might have been deleted or moved
206
+
debug!("Could not read dependency {:?}: {}", dep_path, e);
207
+
}
208
+
}
209
+
}
210
+
211
+
debug!(
212
+
"Loaded {} dependencies from {:?}",
213
+
self.dependencies.len(),
214
+
d_file_path
215
+
);
216
+
Ok(())
217
+
}
218
+
219
+
/// Check if any of the given paths require recompilation
220
+
/// Returns true if any path is a tracked dependency that has been modified
221
+
pub fn needs_recompile(&self, changed_paths: &[PathBuf]) -> bool {
222
+
for changed_path in changed_paths {
223
+
// Normalize the changed path to handle relative vs absolute paths
224
+
let changed_path_canonical = changed_path.canonicalize().ok();
225
+
226
+
for (dep_path, last_modified) in &self.dependencies {
227
+
// Try to match both exact path and canonical path
228
+
let matches = changed_path == dep_path
229
+
|| changed_path_canonical.as_ref() == Some(dep_path)
230
+
|| dep_path.canonicalize().ok().as_ref() == changed_path_canonical.as_ref();
231
+
232
+
if matches {
233
+
// Check if the file was modified after we last tracked it
234
+
if let Ok(metadata) = fs::metadata(changed_path) {
235
+
if let Ok(current_modified) = metadata.modified()
236
+
&& current_modified > *last_modified
237
+
{
238
+
debug!(
239
+
"Dependency {:?} was modified, recompile needed",
240
+
changed_path
241
+
);
242
+
return true;
243
+
}
244
+
} else {
245
+
// File was deleted or can't be read, assume recompile is needed
246
+
debug!(
247
+
"Dependency {:?} no longer exists, recompile needed",
248
+
changed_path
249
+
);
250
+
return true;
251
+
}
252
+
}
253
+
}
254
+
}
255
+
256
+
false
257
+
}
258
+
259
+
/// Get the list of tracked dependency paths
260
+
pub fn get_dependencies(&self) -> Vec<&Path> {
261
+
self.dependencies.keys().map(|p| p.as_path()).collect()
262
+
}
263
+
264
+
/// Check if we have any dependencies loaded
265
+
pub fn has_dependencies(&self) -> bool {
266
+
!self.dependencies.is_empty()
267
+
}
268
+
}
269
+
270
+
#[cfg(test)]
271
+
mod tests {
272
+
use super::*;
273
+
use std::fs;
274
+
use std::io::Write;
275
+
use tempfile::TempDir;
276
+
277
+
#[test]
278
+
fn test_parse_d_file() {
279
+
let temp_dir = TempDir::new().unwrap();
280
+
let d_file_path = temp_dir.path().join("test.d");
281
+
282
+
// Create a mock .d file
283
+
let mut d_file = fs::File::create(&d_file_path).unwrap();
284
+
writeln!(
285
+
d_file,
286
+
"/path/to/target: /path/to/dep1.rs /path/to/dep2.rs \\"
287
+
)
288
+
.unwrap();
289
+
writeln!(d_file, " /path/to/dep3.rs").unwrap();
290
+
291
+
// Create a tracker and point it to our test file
292
+
let mut tracker = DependencyTracker::new();
293
+
tracker.d_file_path = Some(d_file_path);
294
+
295
+
// This will fail to load the actual files, but we can check the parsing logic
296
+
let _ = tracker.reload_dependencies();
297
+
298
+
// We won't have any dependencies because the files don't exist,
299
+
// but we've verified the parsing doesn't crash
300
+
}
301
+
302
+
#[test]
303
+
fn test_parse_d_file_with_spaces() {
304
+
let temp_dir = TempDir::new().unwrap();
305
+
let d_file_path = temp_dir.path().join("test_spaces.d");
306
+
307
+
// Create actual test files with spaces in names
308
+
let dep_with_space = temp_dir.path().join("my file.rs");
309
+
fs::write(&dep_with_space, "// test").unwrap();
310
+
311
+
let normal_dep = temp_dir.path().join("normal.rs");
312
+
fs::write(&normal_dep, "// test").unwrap();
313
+
314
+
// Create a mock .d file with escaped spaces (Make format)
315
+
let mut d_file = fs::File::create(&d_file_path).unwrap();
316
+
writeln!(
317
+
d_file,
318
+
"/path/to/target: {} {}",
319
+
dep_with_space.to_str().unwrap().replace(' ', "\\ "),
320
+
normal_dep.to_str().unwrap()
321
+
)
322
+
.unwrap();
323
+
324
+
let mut tracker = DependencyTracker::new();
325
+
tracker.d_file_path = Some(d_file_path);
326
+
327
+
// Load dependencies
328
+
tracker.reload_dependencies().unwrap();
329
+
330
+
// Should have successfully parsed both files
331
+
assert!(tracker.has_dependencies());
332
+
let deps = tracker.get_dependencies();
333
+
assert_eq!(deps.len(), 2);
334
+
assert!(
335
+
deps.iter()
336
+
.any(|p| p.to_str().unwrap().contains("my file.rs")),
337
+
"Should contain file with space"
338
+
);
339
+
assert!(
340
+
deps.iter()
341
+
.any(|p| p.to_str().unwrap().contains("normal.rs")),
342
+
"Should contain normal file"
343
+
);
344
+
}
345
+
346
+
#[test]
347
+
fn test_parse_escaped_paths() {
348
+
// Test basic space-separated paths
349
+
let paths = DependencyTracker::parse_paths("a.rs b.rs c.rs");
350
+
assert_eq!(paths.len(), 3);
351
+
assert_eq!(paths[0], PathBuf::from("a.rs"));
352
+
assert_eq!(paths[1], PathBuf::from("b.rs"));
353
+
assert_eq!(paths[2], PathBuf::from("c.rs"));
354
+
355
+
// Test escaped spaces
356
+
let paths = DependencyTracker::parse_paths("my\\ file.rs another.rs");
357
+
assert_eq!(paths.len(), 2);
358
+
assert_eq!(paths[0], PathBuf::from("my file.rs"));
359
+
assert_eq!(paths[1], PathBuf::from("another.rs"));
360
+
361
+
// Test line continuation
362
+
let paths = DependencyTracker::parse_paths("a.rs b.rs \\\nc.rs");
363
+
assert_eq!(paths.len(), 3);
364
+
assert_eq!(paths[0], PathBuf::from("a.rs"));
365
+
assert_eq!(paths[1], PathBuf::from("b.rs"));
366
+
assert_eq!(paths[2], PathBuf::from("c.rs"));
367
+
368
+
// Test multiple escaped spaces
369
+
let paths = DependencyTracker::parse_paths("path/to/my\\ file\\ name.rs");
370
+
assert_eq!(paths.len(), 1);
371
+
assert_eq!(paths[0], PathBuf::from("path/to/my file name.rs"));
372
+
373
+
// Test mixed whitespace
374
+
let paths = DependencyTracker::parse_paths("a.rs\tb.rs\nc.rs");
375
+
assert_eq!(paths.len(), 3);
376
+
}
377
+
}
+223
-64
crates/maudit-cli/src/dev/server.rs
+223
-64
crates/maudit-cli/src/dev/server.rs
···
64
64
pub message: String,
65
65
}
66
66
67
+
/// Manages status updates and WebSocket broadcasting.
68
+
/// Cheap to clone - all clones share the same underlying state.
67
69
#[derive(Clone)]
68
-
struct AppState {
70
+
pub struct StatusManager {
69
71
tx: broadcast::Sender<WebSocketMessage>,
70
72
current_status: Arc<RwLock<Option<PersistentStatus>>>,
73
+
}
74
+
75
+
impl StatusManager {
76
+
pub fn new() -> Self {
77
+
let (tx, _) = broadcast::channel::<WebSocketMessage>(100);
78
+
Self {
79
+
tx,
80
+
current_status: Arc::new(RwLock::new(None)),
81
+
}
82
+
}
83
+
84
+
/// Update the status and broadcast to all connected WebSocket clients.
85
+
pub async fn update(&self, status_type: StatusType, message: &str) {
86
+
// Only store persistent states (Success clears errors, Error stores the error)
87
+
let persistent_status = match status_type {
88
+
StatusType::Success => None, // Clear any error state
89
+
StatusType::Error => Some(PersistentStatus {
90
+
status_type: StatusType::Error,
91
+
message: message.to_string(),
92
+
}),
93
+
// Everything else just keeps the current state
94
+
_ => {
95
+
let status = self.current_status.read().await;
96
+
status.clone() // Keep existing persistent state
97
+
}
98
+
};
99
+
100
+
// Update the stored status
101
+
{
102
+
let mut status = self.current_status.write().await;
103
+
*status = persistent_status;
104
+
}
105
+
106
+
// Send the message to all connected clients
107
+
let _ = self.tx.send(WebSocketMessage {
108
+
data: json!({
109
+
"type": status_type.to_string(),
110
+
"message": message
111
+
})
112
+
.to_string(),
113
+
});
114
+
}
115
+
116
+
/// Subscribe to WebSocket messages (for new connections).
117
+
pub fn subscribe(&self) -> broadcast::Receiver<WebSocketMessage> {
118
+
self.tx.subscribe()
119
+
}
120
+
121
+
/// Get the current persistent status (for new connections).
122
+
pub async fn get_current(&self) -> Option<PersistentStatus> {
123
+
self.current_status.read().await.clone()
124
+
}
125
+
126
+
/// Send a raw WebSocket message (for initial errors, etc.).
127
+
pub fn send_raw(&self, message: WebSocketMessage) {
128
+
let _ = self.tx.send(message);
129
+
}
130
+
}
131
+
132
+
impl Default for StatusManager {
133
+
fn default() -> Self {
134
+
Self::new()
135
+
}
136
+
}
137
+
138
+
#[derive(Clone)]
139
+
struct AppState {
140
+
status_manager: StatusManager,
71
141
}
72
142
73
143
fn inject_live_reload_script(html_content: &str, socket_addr: SocketAddr, host: bool) -> String {
···
93
163
94
164
pub async fn start_dev_web_server(
95
165
start_time: Instant,
96
-
tx: broadcast::Sender<WebSocketMessage>,
166
+
status_manager: StatusManager,
97
167
host: bool,
98
168
port: Option<u16>,
99
169
initial_error: Option<String>,
100
-
current_status: Arc<RwLock<Option<PersistentStatus>>>,
101
170
) {
102
171
// TODO: The dist dir should be configurable
103
172
let dist_dir = "dist";
104
173
105
174
// Send initial error if present
106
175
if let Some(error) = initial_error {
107
-
let _ = tx.send(WebSocketMessage {
176
+
status_manager.send_raw(WebSocketMessage {
108
177
data: json!({
109
178
"type": StatusType::Error.to_string(),
110
179
"message": error
···
172
241
.on_response(CustomOnResponse),
173
242
)
174
243
.with_state(AppState {
175
-
tx: tx.clone(),
176
-
current_status: current_status.clone(),
244
+
status_manager: status_manager.clone(),
177
245
});
178
246
179
247
log_server_start(
···
192
260
.unwrap();
193
261
}
194
262
195
-
pub async fn update_status(
196
-
tx: &broadcast::Sender<WebSocketMessage>,
197
-
current_status: Arc<RwLock<Option<PersistentStatus>>>,
198
-
status_type: StatusType,
199
-
message: &str,
200
-
) {
201
-
// Only store persistent states (Success clears errors, Error stores the error)
202
-
let persistent_status = match status_type {
203
-
StatusType::Success => None, // Clear any error state
204
-
StatusType::Error => Some(PersistentStatus {
205
-
status_type: StatusType::Error,
206
-
message: message.to_string(),
207
-
}),
208
-
// Everything else just keeps the current state
209
-
_ => {
210
-
let status = current_status.read().await;
211
-
status.clone() // Keep existing persistent state
212
-
}
213
-
};
214
-
215
-
// Update the stored status
216
-
{
217
-
let mut status = current_status.write().await;
218
-
*status = persistent_status;
219
-
}
220
-
221
-
// Send the message to all connected clients
222
-
let _ = tx.send(WebSocketMessage {
223
-
data: json!({
224
-
"type": status_type.to_string(),
225
-
"message": message
226
-
})
227
-
.to_string(),
228
-
});
229
-
}
230
-
231
263
async fn add_dev_client_script(
232
264
req: Request,
233
265
next: Next,
···
311
343
debug!("`{addr} connected.");
312
344
// finalize the upgrade process by returning upgrade callback.
313
345
// we can customize the callback by sending additional info such as address.
314
-
ws.on_upgrade(move |socket| handle_socket(socket, addr, state.tx, state.current_status))
346
+
ws.on_upgrade(move |socket| handle_socket(socket, addr, state.status_manager))
315
347
}
316
348
317
-
async fn handle_socket(
318
-
socket: WebSocket,
319
-
who: SocketAddr,
320
-
tx: broadcast::Sender<WebSocketMessage>,
321
-
current_status: Arc<RwLock<Option<PersistentStatus>>>,
322
-
) {
349
+
async fn handle_socket(socket: WebSocket, who: SocketAddr, status_manager: StatusManager) {
323
350
let (mut sender, mut receiver) = socket.split();
324
351
325
352
// Send current persistent status to new connection if there is one
326
-
{
327
-
let status = current_status.read().await;
328
-
if let Some(persistent_status) = status.as_ref() {
329
-
let _ = sender
330
-
.send(Message::Text(
331
-
json!({
332
-
"type": persistent_status.status_type.to_string(),
333
-
"message": persistent_status.message
334
-
})
335
-
.to_string()
336
-
.into(),
337
-
))
338
-
.await;
339
-
}
353
+
if let Some(persistent_status) = status_manager.get_current().await {
354
+
let _ = sender
355
+
.send(Message::Text(
356
+
json!({
357
+
"type": persistent_status.status_type.to_string(),
358
+
"message": persistent_status.message
359
+
})
360
+
.to_string()
361
+
.into(),
362
+
))
363
+
.await;
340
364
}
341
365
342
-
let mut rx = tx.subscribe();
366
+
let mut rx = status_manager.subscribe();
343
367
344
368
tokio::select! {
345
369
_ = async {
···
387
411
_ = terminate => {},
388
412
}
389
413
}
414
+
415
+
#[cfg(test)]
416
+
mod tests {
417
+
use super::*;
418
+
419
+
#[tokio::test]
420
+
async fn test_status_manager_update_error_persists() {
421
+
let manager = StatusManager::new();
422
+
423
+
manager
424
+
.update(StatusType::Error, "Something went wrong")
425
+
.await;
426
+
427
+
let status = manager.get_current().await;
428
+
assert!(status.is_some());
429
+
let status = status.unwrap();
430
+
assert!(matches!(status.status_type, StatusType::Error));
431
+
assert_eq!(status.message, "Something went wrong");
432
+
}
433
+
434
+
#[tokio::test]
435
+
async fn test_status_manager_update_success_clears_error() {
436
+
let manager = StatusManager::new();
437
+
438
+
// First set an error
439
+
manager.update(StatusType::Error, "Build failed").await;
440
+
assert!(manager.get_current().await.is_some());
441
+
442
+
// Then send success - should clear the error
443
+
manager.update(StatusType::Success, "Build succeeded").await;
444
+
assert!(manager.get_current().await.is_none());
445
+
}
446
+
447
+
#[tokio::test]
448
+
async fn test_status_manager_update_info_preserves_state() {
449
+
let manager = StatusManager::new();
450
+
451
+
// Set an error
452
+
manager.update(StatusType::Error, "Build failed").await;
453
+
let original_status = manager.get_current().await;
454
+
assert!(original_status.is_some());
455
+
456
+
// Send info - should preserve the error state
457
+
manager.update(StatusType::Info, "Building...").await;
458
+
let status = manager.get_current().await;
459
+
assert!(status.is_some());
460
+
assert_eq!(status.unwrap().message, "Build failed");
461
+
}
462
+
463
+
#[tokio::test]
464
+
async fn test_status_manager_update_info_when_no_error() {
465
+
let manager = StatusManager::new();
466
+
467
+
// No prior state
468
+
assert!(manager.get_current().await.is_none());
469
+
470
+
// Send info - should remain None
471
+
manager.update(StatusType::Info, "Building...").await;
472
+
assert!(manager.get_current().await.is_none());
473
+
}
474
+
475
+
#[tokio::test]
476
+
async fn test_status_manager_subscribe_receives_messages() {
477
+
let manager = StatusManager::new();
478
+
let mut rx = manager.subscribe();
479
+
480
+
manager.update(StatusType::Info, "Hello").await;
481
+
482
+
let msg = rx.try_recv();
483
+
assert!(msg.is_ok());
484
+
let msg = msg.unwrap();
485
+
assert!(msg.data.contains("Hello"));
486
+
assert!(msg.data.contains("info"));
487
+
}
488
+
489
+
#[tokio::test]
490
+
async fn test_status_manager_multiple_subscribers() {
491
+
let manager = StatusManager::new();
492
+
let mut rx1 = manager.subscribe();
493
+
let mut rx2 = manager.subscribe();
494
+
495
+
manager.update(StatusType::Success, "Done").await;
496
+
497
+
// Both subscribers should receive the message
498
+
assert!(rx1.try_recv().is_ok());
499
+
assert!(rx2.try_recv().is_ok());
500
+
}
501
+
502
+
#[tokio::test]
503
+
async fn test_status_manager_send_raw() {
504
+
let manager = StatusManager::new();
505
+
let mut rx = manager.subscribe();
506
+
507
+
manager.send_raw(WebSocketMessage {
508
+
data: r#"{"custom": "message"}"#.to_string(),
509
+
});
510
+
511
+
let msg = rx.try_recv();
512
+
assert!(msg.is_ok());
513
+
assert_eq!(msg.unwrap().data, r#"{"custom": "message"}"#);
514
+
}
515
+
516
+
#[tokio::test]
517
+
async fn test_status_manager_clone_shares_state() {
518
+
let manager1 = StatusManager::new();
519
+
let manager2 = manager1.clone();
520
+
521
+
// Update via one clone
522
+
manager1
523
+
.update(StatusType::Error, "Error from clone 1")
524
+
.await;
525
+
526
+
// Should be visible via the other clone
527
+
let status = manager2.get_current().await;
528
+
assert!(status.is_some());
529
+
assert_eq!(status.unwrap().message, "Error from clone 1");
530
+
}
531
+
532
+
#[tokio::test]
533
+
async fn test_status_manager_clone_shares_broadcast() {
534
+
let manager1 = StatusManager::new();
535
+
let manager2 = manager1.clone();
536
+
537
+
// Subscribe via one clone
538
+
let mut rx = manager2.subscribe();
539
+
540
+
// Send via the other clone
541
+
manager1.update(StatusType::Info, "From clone 1").await;
542
+
543
+
// Should receive the message
544
+
let msg = rx.try_recv();
545
+
assert!(msg.is_ok());
546
+
assert!(msg.unwrap().data.contains("From clone 1"));
547
+
}
548
+
}
+65
-27
crates/maudit-cli/src/dev.rs
+65
-27
crates/maudit-cli/src/dev.rs
···
1
1
pub(crate) mod server;
2
2
3
3
mod build;
4
+
mod dep_tracker;
4
5
mod filterer;
5
6
6
7
use notify::{
···
9
10
};
10
11
use notify_debouncer_full::{DebounceEventResult, DebouncedEvent, new_debouncer};
11
12
use quanta::Instant;
12
-
use server::WebSocketMessage;
13
-
use std::{fs, path::Path};
14
-
use tokio::{
15
-
signal,
16
-
sync::{broadcast, mpsc::channel},
17
-
task::JoinHandle,
13
+
use server::StatusManager;
14
+
use std::{
15
+
fs,
16
+
path::{Path, PathBuf},
18
17
};
18
+
use tokio::{signal, sync::mpsc::channel, task::JoinHandle};
19
19
use tracing::{error, info};
20
20
21
21
use crate::dev::build::BuildManager;
22
22
23
-
pub async fn start_dev_env(cwd: &str, host: bool, port: Option<u16>) -> Result<(), Box<dyn std::error::Error>> {
23
+
pub async fn start_dev_env(
24
+
cwd: &str,
25
+
host: bool,
26
+
port: Option<u16>,
27
+
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
24
28
let start_time = Instant::now();
25
29
info!(name: "dev", "Preparing dev environmentโฆ");
26
30
27
-
let (sender_websocket, _) = broadcast::channel::<WebSocketMessage>(100);
31
+
// Create status manager (handles WebSocket communication)
32
+
let status_manager = StatusManager::new();
28
33
29
-
// Create build manager (it will create its own status state internally)
30
-
let build_manager = BuildManager::new(sender_websocket.clone());
34
+
// Create build manager
35
+
let build_manager = BuildManager::new(status_manager.clone());
31
36
32
37
// Do initial build
33
38
info!(name: "build", "Doing initial buildโฆ");
···
48
53
.collect::<Vec<_>>();
49
54
50
55
let mut debouncer = new_debouncer(
51
-
std::time::Duration::from_millis(100),
56
+
std::time::Duration::from_millis(200), // Longer debounce to better batch rapid file changes
52
57
None,
53
58
move |result: DebounceEventResult| {
54
59
tx.blocking_send(result).unwrap_or(());
···
73
78
info!(name: "dev", "Starting web server...");
74
79
web_server_thread = Some(tokio::spawn(server::start_dev_web_server(
75
80
start_time,
76
-
sender_websocket.clone(),
81
+
status_manager.clone(),
77
82
host,
78
83
port,
79
84
None,
80
-
build_manager.current_status(),
81
85
)));
82
86
}
83
87
84
88
// Clone build manager for the file watcher task
85
89
let build_manager_watcher = build_manager.clone();
86
-
let sender_websocket_watcher = sender_websocket.clone();
90
+
let status_manager_watcher = status_manager.clone();
87
91
88
92
let file_watcher_task = tokio::spawn(async move {
89
93
let mut dev_server_started = initial_build_success;
···
147
151
dev_server_handle =
148
152
Some(tokio::spawn(server::start_dev_web_server(
149
153
start_time,
150
-
sender_websocket_watcher.clone(),
154
+
status_manager_watcher.clone(),
151
155
host,
152
156
port,
153
157
None,
154
-
build_manager_watcher.current_status(),
155
158
)));
156
159
}
157
160
Ok(false) => {
···
162
165
}
163
166
}
164
167
} else {
165
-
// Normal rebuild - spawn in background so file watcher can continue
166
-
info!(name: "watch", "Files changed, rebuilding...");
167
-
let build_manager_clone = build_manager_watcher.clone();
168
-
tokio::spawn(async move {
169
-
match build_manager_clone.start_build().await {
170
-
Ok(_) => {
171
-
// Build completed (success or failure already logged)
168
+
// Normal rebuild - check if we need full recompilation or just rerun
169
+
// Only collect paths from events that actually trigger a rebuild
170
+
let mut changed_paths: Vec<PathBuf> = events.iter()
171
+
.filter(|e| should_rebuild_for_event(e))
172
+
.flat_map(|e| e.paths.iter().cloned())
173
+
.collect();
174
+
175
+
// Deduplicate paths
176
+
changed_paths.sort();
177
+
changed_paths.dedup();
178
+
179
+
if changed_paths.is_empty() {
180
+
// No file changes, only directory changes - skip rebuild
181
+
continue;
182
+
}
183
+
184
+
let needs_recompile = build_manager_watcher.needs_recompile(&changed_paths).await;
185
+
186
+
if needs_recompile {
187
+
// Need to recompile - spawn in background so file watcher can continue
188
+
info!(name: "watch", "Files changed, rebuilding...");
189
+
let build_manager_clone = build_manager_watcher.clone();
190
+
tokio::spawn(async move {
191
+
match build_manager_clone.start_build().await {
192
+
Ok(_) => {
193
+
// Build completed (success or failure already logged)
194
+
}
195
+
Err(e) => {
196
+
error!(name: "build", "Failed to start build: {}", e);
197
+
}
172
198
}
173
-
Err(e) => {
174
-
error!(name: "build", "Failed to start build: {}", e);
199
+
});
200
+
} else {
201
+
// Just rerun the binary without recompiling
202
+
info!(name: "watch", "Non-dependency files changed, rerunning binary...");
203
+
let build_manager_clone = build_manager_watcher.clone();
204
+
let changed_paths_clone = changed_paths.clone();
205
+
tokio::spawn(async move {
206
+
match build_manager_clone.rerun_binary(&changed_paths_clone).await {
207
+
Ok(_) => {
208
+
// Rerun completed (success or failure already logged)
209
+
}
210
+
Err(e) => {
211
+
error!(name: "build", "Failed to rerun binary: {}", e);
212
+
}
175
213
}
176
-
}
177
-
});
214
+
});
215
+
}
178
216
}
179
217
}
180
218
}
+3
e2e/README.md
+3
e2e/README.md
···
13
13
## Running Tests
14
14
15
15
The tests will automatically:
16
+
16
17
1. Build the prefetch.js bundle (via `cargo xtask build-maudit-js`)
17
18
2. Start the Maudit dev server on the test fixture site
18
19
3. Run the tests
···
46
47
## Features Tested
47
48
48
49
### Basic Prefetch
50
+
49
51
- Creating link elements with `rel="prefetch"`
50
52
- Preventing duplicate prefetches
51
53
- Skipping current page prefetch
52
54
- Blocking cross-origin prefetches
53
55
54
56
### Prerendering (Chromium only)
57
+
55
58
- Creating `<script type="speculationrules">` elements
56
59
- Different eagerness levels (immediate, eager, moderate, conservative)
57
60
- Fallback to link prefetch on non-Chromium browsers
+1
e2e/fixtures/hot-reload/data.txt
+1
e2e/fixtures/hot-reload/data.txt
···
1
+
Test data
+1
-1
e2e/fixtures/hot-reload/src/main.rs
+1
-1
e2e/fixtures/hot-reload/src/main.rs
+9
e2e/fixtures/incremental-build/Cargo.toml
+9
e2e/fixtures/incremental-build/Cargo.toml
+2
e2e/fixtures/incremental-build/src/assets/about.js
+2
e2e/fixtures/incremental-build/src/assets/about.js
e2e/fixtures/incremental-build/src/assets/bg.png
e2e/fixtures/incremental-build/src/assets/bg.png
This is a binary file and will not be displayed.
+10
e2e/fixtures/incremental-build/src/assets/blog.css
+10
e2e/fixtures/incremental-build/src/assets/blog.css
+8
e2e/fixtures/incremental-build/src/assets/icons/blog-icon.css
+8
e2e/fixtures/incremental-build/src/assets/icons/blog-icon.css
e2e/fixtures/incremental-build/src/assets/logo.png
e2e/fixtures/incremental-build/src/assets/logo.png
This is a binary file and will not be displayed.
+5
e2e/fixtures/incremental-build/src/assets/main.js
+5
e2e/fixtures/incremental-build/src/assets/main.js
+13
e2e/fixtures/incremental-build/src/assets/styles.css
+13
e2e/fixtures/incremental-build/src/assets/styles.css
e2e/fixtures/incremental-build/src/assets/team.png
e2e/fixtures/incremental-build/src/assets/team.png
This is a binary file and will not be displayed.
+4
e2e/fixtures/incremental-build/src/assets/utils.js
+4
e2e/fixtures/incremental-build/src/assets/utils.js
+11
e2e/fixtures/incremental-build/src/main.rs
+11
e2e/fixtures/incremental-build/src/main.rs
···
1
+
use maudit::{BuildOptions, BuildOutput, content_sources, coronate, routes};
2
+
3
+
mod pages;
4
+
5
+
fn main() -> Result<BuildOutput, Box<dyn std::error::Error>> {
6
+
coronate(
7
+
routes![pages::index::Index, pages::about::About, pages::blog::Blog],
8
+
content_sources![],
9
+
BuildOptions::default(),
10
+
)
11
+
}
+33
e2e/fixtures/incremental-build/src/pages/about.rs
+33
e2e/fixtures/incremental-build/src/pages/about.rs
···
1
+
use maud::html;
2
+
use maudit::route::prelude::*;
3
+
use std::time::{SystemTime, UNIX_EPOCH};
4
+
5
+
#[route("/about")]
6
+
pub struct About;
7
+
8
+
impl Route for About {
9
+
fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
10
+
let _image = ctx.assets.add_image("src/assets/team.png");
11
+
let _script = ctx.assets.add_script("src/assets/about.js");
12
+
// Shared style with index page (for testing shared assets)
13
+
let _style = ctx.assets.add_style("src/assets/styles.css");
14
+
15
+
// Generate a unique build ID - uses nanoseconds for uniqueness
16
+
let build_id = SystemTime::now()
17
+
.duration_since(UNIX_EPOCH)
18
+
.map(|d| d.as_nanos().to_string())
19
+
.unwrap_or_else(|_| "0".to_string());
20
+
21
+
html! {
22
+
html {
23
+
head {
24
+
title { "About Page" }
25
+
}
26
+
body data-build-id=(build_id) {
27
+
h1 id="title" { "About Us" }
28
+
p id="content" { "Learn more about us" }
29
+
}
30
+
}
31
+
}
32
+
}
33
+
}
+31
e2e/fixtures/incremental-build/src/pages/blog.rs
+31
e2e/fixtures/incremental-build/src/pages/blog.rs
···
1
+
use maud::html;
2
+
use maudit::route::prelude::*;
3
+
use std::time::{SystemTime, UNIX_EPOCH};
4
+
5
+
#[route("/blog")]
6
+
pub struct Blog;
7
+
8
+
impl Route for Blog {
9
+
fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
10
+
let _style = ctx.assets.add_style("src/assets/blog.css");
11
+
let _icon_style = ctx.assets.add_style("src/assets/icons/blog-icon.css");
12
+
13
+
// Generate a unique build ID - uses nanoseconds for uniqueness
14
+
let build_id = SystemTime::now()
15
+
.duration_since(UNIX_EPOCH)
16
+
.map(|d| d.as_nanos().to_string())
17
+
.unwrap_or_else(|_| "0".to_string());
18
+
19
+
html! {
20
+
html {
21
+
head {
22
+
title { "Blog Page" }
23
+
}
24
+
body data-build-id=(build_id) {
25
+
h1 id="title" { "Blog" }
26
+
p id="content" { "Read our latest posts" }
27
+
}
28
+
}
29
+
}
30
+
}
31
+
}
+32
e2e/fixtures/incremental-build/src/pages/index.rs
+32
e2e/fixtures/incremental-build/src/pages/index.rs
···
1
+
use maud::html;
2
+
use maudit::route::prelude::*;
3
+
use std::time::{SystemTime, UNIX_EPOCH};
4
+
5
+
#[route("/")]
6
+
pub struct Index;
7
+
8
+
impl Route for Index {
9
+
fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> {
10
+
let _image = ctx.assets.add_image("src/assets/logo.png");
11
+
let _script = ctx.assets.add_script("src/assets/main.js");
12
+
let _style = ctx.assets.add_style("src/assets/styles.css");
13
+
14
+
// Generate a unique build ID - uses nanoseconds for uniqueness
15
+
let build_id = SystemTime::now()
16
+
.duration_since(UNIX_EPOCH)
17
+
.map(|d| d.as_nanos().to_string())
18
+
.unwrap_or_else(|_| "0".to_string());
19
+
20
+
html! {
21
+
html {
22
+
head {
23
+
title { "Home Page" }
24
+
}
25
+
body data-build-id=(build_id) {
26
+
h1 id="title" { "Home Page" }
27
+
p id="content" { "Welcome to the home page" }
28
+
}
29
+
}
30
+
}
31
+
}
32
+
}
+3
e2e/fixtures/incremental-build/src/pages/mod.rs
+3
e2e/fixtures/incremental-build/src/pages/mod.rs
+1
-1
e2e/fixtures/prefetch-prerender/src/main.rs
+1
-1
e2e/fixtures/prefetch-prerender/src/main.rs
+116
-8
e2e/tests/hot-reload.spec.ts
+116
-8
e2e/tests/hot-reload.spec.ts
···
12
12
13
13
test.describe.configure({ mode: "serial" });
14
14
15
+
/**
16
+
* Wait for dev server to complete a build/rerun by polling logs
17
+
*/
18
+
async function waitForBuildComplete(devServer: any, timeoutMs = 20000): Promise<string[]> {
19
+
const startTime = Date.now();
20
+
21
+
while (Date.now() - startTime < timeoutMs) {
22
+
const logs = devServer.getLogs(100);
23
+
const logsText = logs.join("\n").toLowerCase();
24
+
25
+
// Look for completion messages
26
+
if (
27
+
logsText.includes("finished") ||
28
+
logsText.includes("rerun finished") ||
29
+
logsText.includes("build finished")
30
+
) {
31
+
return logs;
32
+
}
33
+
34
+
// Wait 100ms before checking again
35
+
await new Promise((resolve) => setTimeout(resolve, 100));
36
+
}
37
+
38
+
throw new Error(`Build did not complete within ${timeoutMs}ms`);
39
+
}
40
+
15
41
test.describe("Hot Reload", () => {
42
+
// Increase timeout for these tests since they involve compilation
43
+
test.setTimeout(60000);
44
+
16
45
const fixturePath = resolve(__dirname, "..", "fixtures", "hot-reload");
17
46
const indexPath = resolve(fixturePath, "src", "pages", "index.rs");
18
-
let originalContent: string;
47
+
const mainPath = resolve(fixturePath, "src", "main.rs");
48
+
const dataPath = resolve(fixturePath, "data.txt");
49
+
let originalIndexContent: string;
50
+
let originalMainContent: string;
51
+
let originalDataContent: string;
19
52
20
53
test.beforeAll(async () => {
21
54
// Save original content
22
-
originalContent = readFileSync(indexPath, "utf-8");
55
+
originalIndexContent = readFileSync(indexPath, "utf-8");
56
+
originalMainContent = readFileSync(mainPath, "utf-8");
57
+
originalDataContent = readFileSync(dataPath, "utf-8");
58
+
59
+
// Ensure files are in original state
60
+
writeFileSync(indexPath, originalIndexContent, "utf-8");
61
+
writeFileSync(mainPath, originalMainContent, "utf-8");
62
+
writeFileSync(dataPath, originalDataContent, "utf-8");
23
63
});
24
64
25
-
test.afterEach(async () => {
65
+
test.afterEach(async ({ devServer }) => {
26
66
// Restore original content after each test
27
-
writeFileSync(indexPath, originalContent, "utf-8");
28
-
// Wait a bit for the rebuild
29
-
await new Promise((resolve) => setTimeout(resolve, 2000));
67
+
writeFileSync(indexPath, originalIndexContent, "utf-8");
68
+
writeFileSync(mainPath, originalMainContent, "utf-8");
69
+
writeFileSync(dataPath, originalDataContent, "utf-8");
70
+
71
+
// Only wait for build if devServer is available (startup might have failed)
72
+
if (devServer) {
73
+
try {
74
+
devServer.clearLogs();
75
+
await waitForBuildComplete(devServer);
76
+
} catch (error) {
77
+
console.warn("Failed to wait for build completion in afterEach:", error);
78
+
}
79
+
}
30
80
});
31
81
32
82
test.afterAll(async () => {
33
83
// Restore original content
34
-
writeFileSync(indexPath, originalContent, "utf-8");
84
+
writeFileSync(indexPath, originalIndexContent, "utf-8");
85
+
writeFileSync(mainPath, originalMainContent, "utf-8");
86
+
writeFileSync(dataPath, originalDataContent, "utf-8");
87
+
});
88
+
89
+
test("should recompile when Rust code changes (dependencies)", async ({ page, devServer }) => {
90
+
await page.goto(devServer.url);
91
+
92
+
// Verify initial content
93
+
await expect(page.locator("#title")).toHaveText("Original Title");
94
+
95
+
// Clear logs to track what happens after this point
96
+
devServer.clearLogs();
97
+
98
+
// Modify main.rs - this is a tracked dependency, should trigger recompile
99
+
const modifiedMain = originalMainContent.replace(
100
+
"BuildOptions::default()",
101
+
"BuildOptions::default() // Modified comment",
102
+
);
103
+
writeFileSync(mainPath, modifiedMain, "utf-8");
104
+
105
+
// Wait for rebuild to complete
106
+
const logs = await waitForBuildComplete(devServer, 20000);
107
+
const logsText = logs.join("\n");
108
+
109
+
// Check logs to verify it actually recompiled (ran cargo)
110
+
expect(logsText).toContain("rebuilding");
111
+
// Make sure it didn't just rerun the binary
112
+
expect(logsText.toLowerCase()).not.toContain("rerunning binary");
113
+
});
114
+
115
+
test("should rerun without recompile when non-dependency files change", async ({
116
+
page,
117
+
devServer,
118
+
}) => {
119
+
await page.goto(devServer.url);
120
+
121
+
// Verify initial content
122
+
await expect(page.locator("#title")).toHaveText("Original Title");
123
+
124
+
// Clear logs to track what happens after this point
125
+
devServer.clearLogs();
126
+
127
+
// Modify data.txt - this file is NOT in the .d dependencies
128
+
// So it should trigger a rerun without recompilation
129
+
writeFileSync(dataPath, "Modified data", "utf-8");
130
+
131
+
// Wait for build/rerun to complete
132
+
const logs = await waitForBuildComplete(devServer, 20000);
133
+
const logsText = logs.join("\n");
134
+
135
+
// Should see "rerunning binary" message (case insensitive)
136
+
const hasRerunMessage = logsText.toLowerCase().includes("rerunning binary");
137
+
expect(hasRerunMessage).toBe(true);
138
+
139
+
// Should NOT see cargo-related rebuild messages (compiling, building crate)
140
+
// Note: "Rebuilding N affected routes" is fine - that's the incremental build system
141
+
expect(logsText.toLowerCase()).not.toContain("compiling");
142
+
expect(logsText.toLowerCase()).not.toContain("cargo build");
35
143
});
36
144
37
145
test("should show updated content after file changes", async ({ page, devServer }) => {
···
44
152
const currentUrl = page.url();
45
153
46
154
// Modify the file
47
-
const modifiedContent = originalContent.replace(
155
+
const modifiedContent = originalIndexContent.replace(
48
156
'h1 id="title" { "Original Title" }',
49
157
'h1 id="title" { "Another Update" }',
50
158
);
+521
e2e/tests/incremental-build.spec.ts
+521
e2e/tests/incremental-build.spec.ts
···
1
+
import { expect } from "@playwright/test";
2
+
import { createTestWithFixture } from "./test-utils";
3
+
import { readFileSync, writeFileSync, renameSync, rmSync, existsSync } from "node:fs";
4
+
import { resolve, dirname } from "node:path";
5
+
import { fileURLToPath } from "node:url";
6
+
7
+
const __filename = fileURLToPath(import.meta.url);
8
+
const __dirname = dirname(__filename);
9
+
10
+
// Create test instance with incremental-build fixture
11
+
const test = createTestWithFixture("incremental-build");
12
+
13
+
// Allow retries for timing-sensitive tests
14
+
test.describe.configure({ mode: "serial", retries: 2 });
15
+
16
+
/**
17
+
* Wait for dev server to complete a build by looking for specific patterns.
18
+
* Waits for the build to START, then waits for it to FINISH.
19
+
*/
20
+
async function waitForBuildComplete(devServer: any, timeoutMs = 30000): Promise<string[]> {
21
+
const startTime = Date.now();
22
+
23
+
// Phase 1: Wait for build to start
24
+
while (Date.now() - startTime < timeoutMs) {
25
+
const logs = devServer.getLogs(200);
26
+
const logsText = logs.join("\n").toLowerCase();
27
+
28
+
if (
29
+
logsText.includes("rerunning") ||
30
+
logsText.includes("rebuilding") ||
31
+
logsText.includes("files changed")
32
+
) {
33
+
break;
34
+
}
35
+
36
+
await new Promise((resolve) => setTimeout(resolve, 50));
37
+
}
38
+
39
+
// Phase 2: Wait for build to finish
40
+
while (Date.now() - startTime < timeoutMs) {
41
+
const logs = devServer.getLogs(200);
42
+
const logsText = logs.join("\n").toLowerCase();
43
+
44
+
if (
45
+
logsText.includes("finished") ||
46
+
logsText.includes("rerun finished") ||
47
+
logsText.includes("build finished")
48
+
) {
49
+
// Wait for filesystem to fully sync
50
+
await new Promise((resolve) => setTimeout(resolve, 500));
51
+
return devServer.getLogs(200);
52
+
}
53
+
54
+
await new Promise((resolve) => setTimeout(resolve, 100));
55
+
}
56
+
57
+
// On timeout, log what we DID see for debugging
58
+
console.log("TIMEOUT - logs seen:", devServer.getLogs(50));
59
+
throw new Error(`Build did not complete within ${timeoutMs}ms`);
60
+
}
61
+
62
+
/**
63
+
* Extract the build ID from an HTML file.
64
+
*/
65
+
function getBuildId(htmlPath: string): string | null {
66
+
try {
67
+
const content = readFileSync(htmlPath, "utf-8");
68
+
const match = content.match(/data-build-id="(\d+)"/);
69
+
return match ? match[1] : null;
70
+
} catch {
71
+
return null;
72
+
}
73
+
}
74
+
75
+
/**
76
+
* Check if logs indicate incremental build was used
77
+
*/
78
+
function isIncrementalBuild(logs: string[]): boolean {
79
+
return logs.join("\n").toLowerCase().includes("incremental build");
80
+
}
81
+
82
+
/**
83
+
* Get the number of affected routes from logs
84
+
*/
85
+
function getAffectedRouteCount(logs: string[]): number {
86
+
const logsText = logs.join("\n");
87
+
const match = logsText.match(/Rebuilding (\d+) affected routes/i);
88
+
return match ? parseInt(match[1], 10) : -1;
89
+
}
90
+
91
+
/**
92
+
* Helper to set up incremental build state
93
+
*/
94
+
async function setupIncrementalState(
95
+
devServer: any,
96
+
triggerChange: (suffix: string) => Promise<string[]>,
97
+
): Promise<void> {
98
+
// First change triggers a full build (no previous state)
99
+
await triggerChange("init");
100
+
await new Promise((resolve) => setTimeout(resolve, 500));
101
+
102
+
// Second change should be incremental (state now exists)
103
+
const logs = await triggerChange("setup");
104
+
expect(isIncrementalBuild(logs)).toBe(true);
105
+
await new Promise((resolve) => setTimeout(resolve, 500));
106
+
}
107
+
108
+
/**
109
+
* Record build IDs for all pages
110
+
*/
111
+
function recordBuildIds(htmlPaths: Record<string, string>): Record<string, string | null> {
112
+
const ids: Record<string, string | null> = {};
113
+
for (const [name, path] of Object.entries(htmlPaths)) {
114
+
ids[name] = getBuildId(path);
115
+
}
116
+
return ids;
117
+
}
118
+
119
+
test.describe("Incremental Build", () => {
120
+
test.setTimeout(180000);
121
+
122
+
const fixturePath = resolve(__dirname, "..", "fixtures", "incremental-build");
123
+
124
+
// Asset paths
125
+
const assets = {
126
+
blogCss: resolve(fixturePath, "src", "assets", "blog.css"),
127
+
utilsJs: resolve(fixturePath, "src", "assets", "utils.js"),
128
+
mainJs: resolve(fixturePath, "src", "assets", "main.js"),
129
+
aboutJs: resolve(fixturePath, "src", "assets", "about.js"),
130
+
stylesCss: resolve(fixturePath, "src", "assets", "styles.css"),
131
+
logoPng: resolve(fixturePath, "src", "assets", "logo.png"),
132
+
teamPng: resolve(fixturePath, "src", "assets", "team.png"),
133
+
bgPng: resolve(fixturePath, "src", "assets", "bg.png"),
134
+
};
135
+
136
+
// Output HTML paths
137
+
const htmlPaths = {
138
+
index: resolve(fixturePath, "dist", "index.html"),
139
+
about: resolve(fixturePath, "dist", "about", "index.html"),
140
+
blog: resolve(fixturePath, "dist", "blog", "index.html"),
141
+
};
142
+
143
+
// Original content storage
144
+
const originals: Record<string, string | Buffer> = {};
145
+
146
+
test.beforeAll(async () => {
147
+
// Store original content for all assets we might modify
148
+
originals.blogCss = readFileSync(assets.blogCss, "utf-8");
149
+
originals.utilsJs = readFileSync(assets.utilsJs, "utf-8");
150
+
originals.mainJs = readFileSync(assets.mainJs, "utf-8");
151
+
originals.aboutJs = readFileSync(assets.aboutJs, "utf-8");
152
+
originals.stylesCss = readFileSync(assets.stylesCss, "utf-8");
153
+
originals.logoPng = readFileSync(assets.logoPng); // binary
154
+
originals.teamPng = readFileSync(assets.teamPng); // binary
155
+
originals.bgPng = readFileSync(assets.bgPng); // binary
156
+
});
157
+
158
+
test.afterAll(async () => {
159
+
// Restore all original content
160
+
writeFileSync(assets.blogCss, originals.blogCss);
161
+
writeFileSync(assets.utilsJs, originals.utilsJs);
162
+
writeFileSync(assets.mainJs, originals.mainJs);
163
+
writeFileSync(assets.aboutJs, originals.aboutJs);
164
+
writeFileSync(assets.stylesCss, originals.stylesCss);
165
+
writeFileSync(assets.logoPng, originals.logoPng);
166
+
writeFileSync(assets.teamPng, originals.teamPng);
167
+
writeFileSync(assets.bgPng, originals.bgPng);
168
+
});
169
+
170
+
// ============================================================
171
+
// TEST 1: Direct CSS dependency (blog.css โ /blog only)
172
+
// ============================================================
173
+
test("CSS file change rebuilds only routes using it", async ({ devServer }) => {
174
+
let testCounter = 0;
175
+
176
+
async function triggerChange(suffix: string) {
177
+
testCounter++;
178
+
devServer.clearLogs();
179
+
writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`);
180
+
return await waitForBuildComplete(devServer, 30000);
181
+
}
182
+
183
+
await setupIncrementalState(devServer, triggerChange);
184
+
185
+
// Record build IDs before
186
+
const before = recordBuildIds(htmlPaths);
187
+
expect(before.index).not.toBeNull();
188
+
expect(before.about).not.toBeNull();
189
+
expect(before.blog).not.toBeNull();
190
+
191
+
await new Promise((resolve) => setTimeout(resolve, 500));
192
+
193
+
// Trigger the change
194
+
const logs = await triggerChange("final");
195
+
196
+
// Verify incremental build with 1 route
197
+
expect(isIncrementalBuild(logs)).toBe(true);
198
+
expect(getAffectedRouteCount(logs)).toBe(1);
199
+
200
+
// Verify only blog was rebuilt
201
+
const after = recordBuildIds(htmlPaths);
202
+
expect(after.index).toBe(before.index);
203
+
expect(after.about).toBe(before.about);
204
+
expect(after.blog).not.toBe(before.blog);
205
+
});
206
+
207
+
// ============================================================
208
+
// TEST 2: Transitive JS dependency (utils.js โ main.js โ /)
209
+
// ============================================================
210
+
test("transitive JS dependency change rebuilds affected routes", async ({ devServer }) => {
211
+
let testCounter = 0;
212
+
213
+
async function triggerChange(suffix: string) {
214
+
testCounter++;
215
+
devServer.clearLogs();
216
+
writeFileSync(assets.utilsJs, originals.utilsJs + `\n// test-${testCounter}-${suffix}`);
217
+
return await waitForBuildComplete(devServer, 30000);
218
+
}
219
+
220
+
await setupIncrementalState(devServer, triggerChange);
221
+
222
+
const before = recordBuildIds(htmlPaths);
223
+
expect(before.index).not.toBeNull();
224
+
225
+
await new Promise((resolve) => setTimeout(resolve, 500));
226
+
227
+
const logs = await triggerChange("final");
228
+
229
+
// Verify incremental build with 1 route
230
+
expect(isIncrementalBuild(logs)).toBe(true);
231
+
expect(getAffectedRouteCount(logs)).toBe(1);
232
+
233
+
// Only index should be rebuilt (uses main.js which imports utils.js)
234
+
const after = recordBuildIds(htmlPaths);
235
+
expect(after.about).toBe(before.about);
236
+
expect(after.blog).toBe(before.blog);
237
+
expect(after.index).not.toBe(before.index);
238
+
});
239
+
240
+
// ============================================================
241
+
// TEST 3: Direct JS entry point change (about.js โ /about)
242
+
// ============================================================
243
+
test("direct JS entry point change rebuilds only routes using it", async ({ devServer }) => {
244
+
let testCounter = 0;
245
+
246
+
async function triggerChange(suffix: string) {
247
+
testCounter++;
248
+
devServer.clearLogs();
249
+
writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`);
250
+
return await waitForBuildComplete(devServer, 30000);
251
+
}
252
+
253
+
await setupIncrementalState(devServer, triggerChange);
254
+
255
+
const before = recordBuildIds(htmlPaths);
256
+
expect(before.about).not.toBeNull();
257
+
258
+
await new Promise((resolve) => setTimeout(resolve, 500));
259
+
260
+
const logs = await triggerChange("final");
261
+
262
+
// Verify incremental build with 1 route
263
+
expect(isIncrementalBuild(logs)).toBe(true);
264
+
expect(getAffectedRouteCount(logs)).toBe(1);
265
+
266
+
// Only about should be rebuilt
267
+
const after = recordBuildIds(htmlPaths);
268
+
expect(after.index).toBe(before.index);
269
+
expect(after.blog).toBe(before.blog);
270
+
expect(after.about).not.toBe(before.about);
271
+
});
272
+
273
+
// ============================================================
274
+
// TEST 4: Shared asset change (styles.css โ / AND /about)
275
+
// ============================================================
276
+
test("shared asset change rebuilds all routes using it", async ({ devServer }) => {
277
+
let testCounter = 0;
278
+
279
+
async function triggerChange(suffix: string) {
280
+
testCounter++;
281
+
devServer.clearLogs();
282
+
writeFileSync(
283
+
assets.stylesCss,
284
+
originals.stylesCss + `\n/* test-${testCounter}-${suffix} */`,
285
+
);
286
+
return await waitForBuildComplete(devServer, 30000);
287
+
}
288
+
289
+
await setupIncrementalState(devServer, triggerChange);
290
+
291
+
const before = recordBuildIds(htmlPaths);
292
+
expect(before.index).not.toBeNull();
293
+
expect(before.about).not.toBeNull();
294
+
295
+
await new Promise((resolve) => setTimeout(resolve, 500));
296
+
297
+
const logs = await triggerChange("final");
298
+
299
+
// Verify incremental build with 2 routes (/ and /about both use styles.css)
300
+
expect(isIncrementalBuild(logs)).toBe(true);
301
+
expect(getAffectedRouteCount(logs)).toBe(2);
302
+
303
+
// Index and about should be rebuilt, blog should not
304
+
const after = recordBuildIds(htmlPaths);
305
+
expect(after.blog).toBe(before.blog);
306
+
expect(after.index).not.toBe(before.index);
307
+
expect(after.about).not.toBe(before.about);
308
+
});
309
+
310
+
// ============================================================
311
+
// TEST 5: Image change (logo.png โ /)
312
+
// ============================================================
313
+
test("image change rebuilds only routes using it", async ({ devServer }) => {
314
+
let testCounter = 0;
315
+
316
+
async function triggerChange(suffix: string) {
317
+
testCounter++;
318
+
devServer.clearLogs();
319
+
// For images, we append bytes to change the file
320
+
// This simulates modifying an image file
321
+
const modified = Buffer.concat([
322
+
originals.logoPng as Buffer,
323
+
Buffer.from(`<!-- test-${testCounter}-${suffix} -->`),
324
+
]);
325
+
writeFileSync(assets.logoPng, modified);
326
+
return await waitForBuildComplete(devServer, 30000);
327
+
}
328
+
329
+
await setupIncrementalState(devServer, triggerChange);
330
+
331
+
const before = recordBuildIds(htmlPaths);
332
+
expect(before.index).not.toBeNull();
333
+
334
+
await new Promise((resolve) => setTimeout(resolve, 500));
335
+
336
+
const logs = await triggerChange("final");
337
+
338
+
// Verify incremental build with 1 route
339
+
expect(isIncrementalBuild(logs)).toBe(true);
340
+
expect(getAffectedRouteCount(logs)).toBe(1);
341
+
342
+
// Only index should be rebuilt (uses logo.png)
343
+
const after = recordBuildIds(htmlPaths);
344
+
expect(after.about).toBe(before.about);
345
+
expect(after.blog).toBe(before.blog);
346
+
expect(after.index).not.toBe(before.index);
347
+
});
348
+
349
+
// ============================================================
350
+
// TEST 6: Multiple files changed simultaneously
351
+
// ============================================================
352
+
test("multiple file changes rebuild union of affected routes", async ({ devServer }) => {
353
+
let testCounter = 0;
354
+
355
+
async function triggerChange(suffix: string) {
356
+
testCounter++;
357
+
devServer.clearLogs();
358
+
// Change both blog.css (affects /blog) and about.js (affects /about)
359
+
writeFileSync(assets.blogCss, originals.blogCss + `\n/* test-${testCounter}-${suffix} */`);
360
+
writeFileSync(assets.aboutJs, originals.aboutJs + `\n// test-${testCounter}-${suffix}`);
361
+
return await waitForBuildComplete(devServer, 30000);
362
+
}
363
+
364
+
await setupIncrementalState(devServer, triggerChange);
365
+
366
+
const before = recordBuildIds(htmlPaths);
367
+
expect(before.about).not.toBeNull();
368
+
expect(before.blog).not.toBeNull();
369
+
370
+
await new Promise((resolve) => setTimeout(resolve, 500));
371
+
372
+
const logs = await triggerChange("final");
373
+
374
+
// Verify incremental build with 2 routes (/about and /blog)
375
+
expect(isIncrementalBuild(logs)).toBe(true);
376
+
expect(getAffectedRouteCount(logs)).toBe(2);
377
+
378
+
// About and blog should be rebuilt, index should not
379
+
const after = recordBuildIds(htmlPaths);
380
+
expect(after.index).toBe(before.index);
381
+
expect(after.about).not.toBe(before.about);
382
+
expect(after.blog).not.toBe(before.blog);
383
+
});
384
+
385
+
// ============================================================
386
+
// TEST 7: CSS url() asset dependency (bg.png via blog.css โ /blog)
387
+
// ============================================================
388
+
test("CSS url() asset change triggers rebundling and rebuilds affected routes", async ({
389
+
devServer,
390
+
}) => {
391
+
let testCounter = 0;
392
+
393
+
async function triggerChange(suffix: string) {
394
+
testCounter++;
395
+
devServer.clearLogs();
396
+
// Modify bg.png - this is referenced via url() in blog.css
397
+
// Changing it should trigger rebundling and rebuild /blog
398
+
const modified = Buffer.concat([
399
+
originals.bgPng as Buffer,
400
+
Buffer.from(`<!-- test-${testCounter}-${suffix} -->`),
401
+
]);
402
+
writeFileSync(assets.bgPng, modified);
403
+
return await waitForBuildComplete(devServer, 30000);
404
+
}
405
+
406
+
await setupIncrementalState(devServer, triggerChange);
407
+
408
+
const before = recordBuildIds(htmlPaths);
409
+
expect(before.blog).not.toBeNull();
410
+
411
+
await new Promise((resolve) => setTimeout(resolve, 500));
412
+
413
+
const logs = await triggerChange("final");
414
+
415
+
// Verify incremental build triggered
416
+
expect(isIncrementalBuild(logs)).toBe(true);
417
+
418
+
// Blog should be rebuilt (uses blog.css which references bg.png via url())
419
+
// The bundler should have been re-run to update the hashed asset reference
420
+
const after = recordBuildIds(htmlPaths);
421
+
expect(after.blog).not.toBe(before.blog);
422
+
});
423
+
424
+
// ============================================================
425
+
// TEST 8: Folder rename detection
426
+
// ============================================================
427
+
test("folder rename is detected and affects routes using assets in that folder", async ({ devServer }) => {
428
+
// This test verifies that renaming a folder containing tracked assets
429
+
// is detected by the file watcher and affects the correct routes.
430
+
//
431
+
// Setup: The blog page uses src/assets/icons/blog-icon.css
432
+
// Test: Rename icons -> icons-renamed, verify the blog route is identified as affected
433
+
//
434
+
// Note: The actual build will fail because the asset path becomes invalid,
435
+
// but this test verifies the DETECTION and ROUTE MATCHING works correctly.
436
+
437
+
const iconsFolder = resolve(fixturePath, "src", "assets", "icons");
438
+
const renamedFolder = resolve(fixturePath, "src", "assets", "icons-renamed");
439
+
const iconFile = resolve(iconsFolder, "blog-icon.css");
440
+
441
+
// Ensure we start with the correct state
442
+
if (existsSync(renamedFolder)) {
443
+
// Restore from previous failed run
444
+
renameSync(renamedFolder, iconsFolder);
445
+
await new Promise((resolve) => setTimeout(resolve, 1000));
446
+
}
447
+
448
+
// Make sure the icons folder exists with the file
449
+
expect(existsSync(iconsFolder)).toBe(true);
450
+
expect(existsSync(iconFile)).toBe(true);
451
+
452
+
try {
453
+
// First, trigger TWO builds to establish the asset tracking
454
+
// The first build creates the state, the second ensures the icon is tracked
455
+
const originalContent = readFileSync(iconFile, "utf-8");
456
+
457
+
// Build 1: Ensure blog-icon.css is used and tracked
458
+
devServer.clearLogs();
459
+
writeFileSync(iconFile, originalContent + "\n/* setup1 */");
460
+
await waitForBuildComplete(devServer, 30000);
461
+
await new Promise((resolve) => setTimeout(resolve, 500));
462
+
463
+
// Build 2: Now the asset should definitely be in the state
464
+
devServer.clearLogs();
465
+
writeFileSync(iconFile, originalContent + "\n/* setup2 */");
466
+
await waitForBuildComplete(devServer, 30000);
467
+
await new Promise((resolve) => setTimeout(resolve, 500));
468
+
469
+
// Clear for the actual test
470
+
devServer.clearLogs();
471
+
472
+
// Rename icons -> icons-renamed
473
+
renameSync(iconsFolder, renamedFolder);
474
+
475
+
// Wait for the build to be attempted (it will fail because path is now invalid)
476
+
const startTime = Date.now();
477
+
const timeoutMs = 15000;
478
+
let logs: string[] = [];
479
+
480
+
while (Date.now() - startTime < timeoutMs) {
481
+
logs = devServer.getLogs(100);
482
+
const logsText = logs.join("\n");
483
+
484
+
// Wait for either success or failure
485
+
if (logsText.includes("finished") || logsText.includes("failed")) {
486
+
break;
487
+
}
488
+
489
+
await new Promise((resolve) => setTimeout(resolve, 100));
490
+
}
491
+
492
+
console.log("Logs after folder rename:", logs.slice(-15));
493
+
494
+
const logsText = logs.join("\n");
495
+
496
+
// Key assertions: verify the detection and route matching worked
497
+
// 1. The folder paths should be in changed files
498
+
expect(logsText).toContain("icons");
499
+
500
+
// 2. The blog route should be identified as affected
501
+
expect(logsText).toContain("Rebuilding 1 affected routes");
502
+
expect(logsText).toContain("/blog");
503
+
504
+
// 3. Other routes should NOT be affected (index and about don't use icons/)
505
+
expect(logsText).not.toContain("/about");
506
+
507
+
} finally {
508
+
// Restore: rename icons-renamed back to icons
509
+
if (existsSync(renamedFolder) && !existsSync(iconsFolder)) {
510
+
renameSync(renamedFolder, iconsFolder);
511
+
}
512
+
// Restore original content
513
+
if (existsSync(iconFile)) {
514
+
const content = readFileSync(iconFile, "utf-8");
515
+
writeFileSync(iconFile, content.replace(/\n\/\* setup[12] \*\//g, ""));
516
+
}
517
+
// Wait for restoration to be processed
518
+
await new Promise((resolve) => setTimeout(resolve, 1000));
519
+
}
520
+
});
521
+
});
+76
-6
e2e/tests/test-utils.ts
+76
-6
e2e/tests/test-utils.ts
···
23
23
port: number;
24
24
/** Stop the dev server */
25
25
stop: () => Promise<void>;
26
+
/** Get recent log output (last N lines) */
27
+
getLogs: (lines?: number) => string[];
28
+
/** Clear captured logs */
29
+
clearLogs: () => void;
26
30
}
27
31
28
32
/**
···
52
56
const childProcess = spawn(command, args, {
53
57
cwd: fixturePath,
54
58
stdio: ["ignore", "pipe", "pipe"],
59
+
env: {
60
+
...process.env,
61
+
// Show binary output for tests so we can verify incremental build logs
62
+
MAUDIT_SHOW_BINARY_OUTPUT: "1",
63
+
},
55
64
});
56
65
57
66
// Capture output to detect when server is ready
58
67
let serverReady = false;
68
+
const capturedLogs: string[] = [];
59
69
60
70
const outputPromise = new Promise<number>((resolve, reject) => {
61
71
const timeout = setTimeout(() => {
62
-
reject(new Error("Dev server did not start within 30 seconds"));
63
-
}, 30000);
72
+
console.error("[test-utils] Dev server startup timeout. Recent logs:");
73
+
console.error(capturedLogs.slice(-20).join("\n"));
74
+
reject(new Error("Dev server did not start within 120 seconds"));
75
+
}, 120000); // Increased to 120 seconds for CI
64
76
65
77
childProcess.stdout?.on("data", (data: Buffer) => {
66
78
const output = data.toString();
79
+
// Capture all stdout logs
80
+
output
81
+
.split("\n")
82
+
.filter((line) => line.trim())
83
+
.forEach((line) => {
84
+
capturedLogs.push(line);
85
+
});
67
86
68
87
// Look for "waiting for requests" to know server is ready
69
88
if (output.includes("waiting for requests")) {
···
75
94
});
76
95
77
96
childProcess.stderr?.on("data", (data: Buffer) => {
78
-
// Only log errors, not all stderr output
79
97
const output = data.toString();
98
+
// Capture all stderr logs
99
+
output
100
+
.split("\n")
101
+
.filter((line) => line.trim())
102
+
.forEach((line) => {
103
+
capturedLogs.push(line);
104
+
});
105
+
106
+
// Only log errors to console, not all stderr output
80
107
if (output.toLowerCase().includes("error")) {
81
108
console.error(`[maudit dev] ${output}`);
82
109
}
···
113
140
}, 5000);
114
141
});
115
142
},
143
+
getLogs: (lines?: number) => {
144
+
if (lines) {
145
+
return capturedLogs.slice(-lines);
146
+
}
147
+
return [...capturedLogs];
148
+
},
149
+
clearLogs: () => {
150
+
capturedLogs.length = 0;
151
+
},
116
152
};
117
153
}
118
154
···
138
174
// Worker-scoped server pool - one server per worker, shared across all tests in that worker
139
175
// Key format: "workerIndex-fixtureName"
140
176
const workerServers = new Map<string, DevServer>();
177
+
178
+
// Track used ports to avoid collisions
179
+
const usedPorts = new Set<number>();
180
+
181
+
/**
182
+
* Generate a deterministic port offset based on fixture name.
183
+
* This ensures each fixture gets a unique port range, avoiding collisions
184
+
* when multiple fixtures run on the same worker.
185
+
*/
186
+
function getFixturePortOffset(fixtureName: string): number {
187
+
// Simple hash function to get a number from the fixture name
188
+
let hash = 0;
189
+
for (let i = 0; i < fixtureName.length; i++) {
190
+
const char = fixtureName.charCodeAt(i);
191
+
hash = (hash << 5) - hash + char;
192
+
hash = hash & hash; // Convert to 32bit integer
193
+
}
194
+
// Use modulo to keep the offset reasonable (0-99)
195
+
return Math.abs(hash) % 100;
196
+
}
197
+
198
+
/**
199
+
* Find an available port starting from the preferred port.
200
+
*/
201
+
function findAvailablePort(preferredPort: number): number {
202
+
let port = preferredPort;
203
+
while (usedPorts.has(port)) {
204
+
port++;
205
+
}
206
+
usedPorts.add(port);
207
+
return port;
208
+
}
141
209
142
210
/**
143
211
* Create a test instance with a devServer fixture for a specific fixture.
144
212
* This allows each test file to use a different fixture while sharing the same pattern.
145
213
*
146
214
* @param fixtureName - Name of the fixture directory under e2e/fixtures/
147
-
* @param basePort - Starting port number (default: 1864). Each worker gets basePort + workerIndex
215
+
* @param basePort - Starting port number (default: 1864). Each fixture gets a unique port based on its name.
148
216
*
149
217
* @example
150
218
* ```ts
···
167
235
let server = workerServers.get(serverKey);
168
236
169
237
if (!server) {
170
-
// Assign unique port based on worker index
171
-
const port = basePort + workerIndex;
238
+
// Calculate port based on fixture name hash + worker index to avoid collisions
239
+
const fixtureOffset = getFixturePortOffset(fixtureName);
240
+
const preferredPort = basePort + workerIndex * 100 + fixtureOffset;
241
+
const port = findAvailablePort(preferredPort);
172
242
173
243
server = await startDevServer({
174
244
fixture: fixtureName,
+1
-1
website/content/docs/prefetching.md
+1
-1
website/content/docs/prefetching.md
···
49
49
50
50
Note that prerendering, unlike prefetching, may require rethinking how the JavaScript on your pages works, as it'll run JavaScript from pages that the user hasn't visited yet. For example, this might result in analytics reporting incorrect page views.
51
51
52
-
## Possible risks
52
+
## Possible risks
53
53
54
54
Prefetching pages in static websites is typically always safe. In more traditional apps, an issue can arise if your pages cause side effects to happen on the server. For instance, if you were to prefetch `/logout`, your user might get disconnected on hover, or worse as soon as the log out link appear in the viewport. In modern times, it is typically not recommended to have links cause such side effects anyway, reducing the risk of this happening.
55
55
+1
-1
website/content/news/2026-in-the-cursed-lands.md
+1
-1
website/content/news/2026-in-the-cursed-lands.md
···
70
70
71
71
### Shortcodes
72
72
73
-
Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, you will.
73
+
Embedding a YouTube video typically means copying a long, ugly iframe tag and configuring several attributes to ensure proper rendering. It'd be nice to have something friendlier, a code that would be short, if you will.
74
74
75
75
```md
76
76
Here's my cool video:
History
1 round
0 comments
erika.florist
submitted
#0
20 commits
expand
collapse
feat: smoother hot reload
fix: fix
fix: e2e tests
fix: longer timeout perhaps
fix: make it more reliable
fix: don't recompute dirs
fix: use same target finding function in assets
feat: some sort of incremental builds
fix: some things
fix: lint
fix: remove unrelated changes
fix: update hot reload tests
fix: just some clean up
fix: transitive dependencies
fix: bunch of fixes
fix: allow showing the binary output in dev if needs be
perf: cache is_dev()
fix: no idea what im doing
fix: folders
fix: folders
merge conflicts detected
expand
collapse
expand
collapse
- Cargo.lock:1661
- Cargo.lock:2622
- Cargo.lock:1661
- crates/maudit-cli/src/dev.rs:21
- e2e/fixtures/hot-reload/src/main.rs:1
- e2e/fixtures/prefetch-prerender/src/main.rs:1
- crates/maudit-cli/src/dev.rs:10
- .github/workflows/benchmark.yaml:41
- .github/workflows/ci.yaml:38
- .github/workflows/release.yml:30
- .vscode/extensions.json:1
- .vscode/settings.json:1
- crates/maudit-cli/src/dev.rs:15
- crates/maudit/Cargo.toml:50
- e2e/README.md:13
- website/content/docs/content.md:214
- website/content/docs/prefetching.md:49
- website/content/news/2026-in-the-cursed-lands.md:70
- crates/maudit-cli/src/dev.rs:172
- website/content/news/2026-in-the-cursed-lands.md:70