+15
-3
mlf-cli/src/config.rs
+15
-3
mlf-cli/src/config.rs
···
29
29
30
30
#[derive(Debug, Serialize, Deserialize)]
31
31
pub struct SourceConfig {
32
-
#[serde(default = "default_source_directory")]
32
+
#[serde(default = "default_source_directory", skip_serializing_if = "is_default_source_directory")]
33
33
pub directory: String,
34
34
}
35
35
···
45
45
"./lexicons".to_string()
46
46
}
47
47
48
+
fn is_default_source_directory(s: &str) -> bool {
49
+
s == default_source_directory()
50
+
}
51
+
48
52
#[derive(Debug, Serialize, Deserialize)]
49
53
pub struct OutputConfig {
50
54
pub r#type: String,
···
56
60
#[serde(default)]
57
61
pub dependencies: Vec<String>,
58
62
59
-
#[serde(default = "default_allow_transitive_deps")]
63
+
#[serde(default = "default_allow_transitive_deps", skip_serializing_if = "is_default_allow_transitive_deps")]
60
64
pub allow_transitive_deps: bool,
61
65
62
-
#[serde(default = "default_optimize_transitive_fetches")]
66
+
#[serde(default = "default_optimize_transitive_fetches", skip_serializing_if = "is_default_optimize_transitive_fetches")]
63
67
pub optimize_transitive_fetches: bool,
64
68
}
65
69
···
69
73
70
74
fn default_optimize_transitive_fetches() -> bool {
71
75
false
76
+
}
77
+
78
+
fn is_default_allow_transitive_deps(b: &bool) -> bool {
79
+
*b == default_allow_transitive_deps()
80
+
}
81
+
82
+
fn is_default_optimize_transitive_fetches(b: &bool) -> bool {
83
+
*b == default_optimize_transitive_fetches()
72
84
}
73
85
74
86
impl Default for DependenciesConfig {
+140
-107
mlf-cli/src/fetch.rs
+140
-107
mlf-cli/src/fetch.rs
···
68
68
69
69
match nsid {
70
70
Some(namespace) => {
71
-
// Fetch single namespace
72
-
fetch_lexicon(&namespace, &project_root)?;
71
+
// Fetch single namespace with transitive dependencies
72
+
let lockfile_path = project_root.join("mlf-lock.toml");
73
+
let mut lockfile = LockFile::load(&lockfile_path).unwrap_or_else(|_| LockFile::new());
74
+
75
+
// Load config to check if transitive deps are enabled
76
+
let config_path = project_root.join("mlf.toml");
77
+
let config = MlfConfig::load(&config_path).map_err(FetchError::NoProjectRoot)?;
78
+
79
+
fetch_lexicon_with_lock(&namespace, &project_root, &mut lockfile)?;
80
+
81
+
// Handle transitive dependencies if enabled
82
+
if config.dependencies.allow_transitive_deps {
83
+
println!("\n→ Checking for transitive dependencies...");
84
+
fetch_transitive_dependencies(
85
+
&project_root,
86
+
&mut lockfile,
87
+
config.dependencies.optimize_transitive_fetches
88
+
)?;
89
+
}
90
+
91
+
// Save lockfile
92
+
lockfile.save(&lockfile_path).map_err(FetchError::NoProjectRoot)?;
93
+
println!("\n→ Updated mlf-lock.toml");
73
94
74
95
// Save to mlf.toml if --save flag is provided
75
96
if save {
···
182
203
}
183
204
}
184
205
185
-
// If transitive dependencies are enabled, iteratively fetch missing deps
206
+
// If transitive dependencies are enabled, fetch them
186
207
if allow_transitive {
187
-
let mut iteration = 0;
188
-
let max_iterations = 10; // Prevent infinite loops
208
+
fetch_transitive_dependencies(&project_root, &mut lockfile, config.dependencies.optimize_transitive_fetches)?;
209
+
}
189
210
190
-
loop {
191
-
iteration += 1;
192
-
if iteration > max_iterations {
193
-
eprintln!("\nWarning: Reached maximum iteration limit for transitive dependencies");
194
-
break;
195
-
}
211
+
// Save the lockfile
212
+
lockfile.save(&lockfile_path).map_err(FetchError::NoProjectRoot)?;
213
+
println!("\n→ Updated mlf-lock.toml");
196
214
197
-
// Collect unresolved references
198
-
let unresolved = match collect_unresolved_references(project_root) {
199
-
Ok(refs) => refs,
200
-
Err(e) => {
201
-
eprintln!("\nWarning: Failed to analyze dependencies: {}", e);
202
-
break;
203
-
}
204
-
};
215
+
if !errors.is_empty() {
216
+
eprintln!(
217
+
"\n{} dependency(ies) fetched successfully, {} error(s):",
218
+
success_count,
219
+
errors.len()
220
+
);
221
+
for (dep, error) in &errors {
222
+
eprintln!(" {} - {}", dep, error);
223
+
}
224
+
return Err(FetchError::HttpError(format!(
225
+
"Failed to fetch {} dependencies",
226
+
errors.len()
227
+
)));
228
+
}
229
+
230
+
println!("\n✓ Successfully fetched all {} dependencies", success_count);
231
+
Ok(())
232
+
}
233
+
234
+
/// Fetch transitive dependencies by iteratively resolving unresolved references
235
+
fn fetch_transitive_dependencies(
236
+
project_root: &std::path::Path,
237
+
lockfile: &mut LockFile,
238
+
optimize_fetches: bool
239
+
) -> Result<(), FetchError> {
240
+
let mut fetched_nsids = HashSet::new();
241
+
// Track NSIDs from lockfile as already fetched
242
+
for nsid in lockfile.lexicons.keys() {
243
+
fetched_nsids.insert(nsid.clone());
244
+
}
245
+
246
+
let mut iteration = 0;
247
+
const MAX_ITERATIONS: usize = 10;
205
248
206
-
// Filter out NSIDs we've already fetched or tried to fetch
207
-
let new_deps: HashSet<String> = unresolved
208
-
.into_iter()
209
-
.filter(|nsid| !fetched_nsids.contains(nsid))
210
-
.collect();
249
+
loop {
250
+
iteration += 1;
251
+
if iteration > MAX_ITERATIONS {
252
+
eprintln!("\nWarning: Reached maximum iteration limit for transitive dependencies");
253
+
break;
254
+
}
211
255
212
-
if new_deps.is_empty() {
256
+
// Collect unresolved references
257
+
let unresolved = match collect_unresolved_references(project_root) {
258
+
Ok(refs) => refs,
259
+
Err(e) => {
260
+
eprintln!("\nWarning: Failed to analyze dependencies: {}", e);
213
261
break;
214
262
}
263
+
};
215
264
216
-
// Determine whether to optimize transitive fetches
217
-
let should_optimize = config.dependencies.optimize_transitive_fetches;
265
+
// Filter out NSIDs we've already fetched or tried to fetch
266
+
let new_deps: HashSet<String> = unresolved
267
+
.into_iter()
268
+
.filter(|nsid| !fetched_nsids.contains(nsid))
269
+
.collect();
270
+
271
+
if new_deps.is_empty() {
272
+
break;
273
+
}
218
274
219
-
if should_optimize {
220
-
// Optimize the fetch patterns to reduce number of fetches
221
-
let optimized_patterns = optimize_fetch_patterns(&new_deps);
275
+
if optimize_fetches {
276
+
// Optimize the fetch patterns to reduce number of fetches
277
+
let optimized_patterns = optimize_fetch_patterns(&new_deps);
222
278
223
-
println!("\n→ Found {} unresolved reference(s), fetching {} optimized pattern(s)...",
224
-
new_deps.len(), optimized_patterns.len());
279
+
println!("\n→ Found {} unresolved reference(s), fetching {} optimized pattern(s)...",
280
+
new_deps.len(), optimized_patterns.len());
225
281
226
-
// Track which patterns are wildcards and their constituent NSIDs
227
-
let mut wildcard_failures: Vec<(String, Vec<String>)> = Vec::new();
282
+
// Track which patterns are wildcards and their constituent NSIDs
283
+
let mut wildcard_failures: Vec<(String, Vec<String>)> = Vec::new();
228
284
229
-
for pattern in optimized_patterns {
230
-
let is_wildcard = pattern.ends_with(".*");
231
-
println!("\nFetching transitive dependency: {}", pattern);
232
-
fetched_nsids.insert(pattern.clone());
285
+
for pattern in optimized_patterns {
286
+
let is_wildcard = pattern.ends_with(".*");
287
+
println!("\nFetching transitive dependency: {}", pattern);
288
+
fetched_nsids.insert(pattern.clone());
233
289
234
-
match fetch_lexicon_with_lock(&pattern, project_root, &mut lockfile) {
235
-
Ok(()) => {
236
-
success_count += 1;
237
-
}
238
-
Err(e) => {
239
-
eprintln!(" Warning: Failed to fetch {}: {}", pattern, e);
290
+
match fetch_lexicon_with_lock(&pattern, project_root, lockfile) {
291
+
Ok(()) => {}
292
+
Err(e) => {
293
+
eprintln!(" Warning: Failed to fetch {}: {}", pattern, e);
240
294
241
-
// If this was a wildcard that failed, collect the individual NSIDs for retry
242
-
if is_wildcard {
243
-
let pattern_prefix = pattern.strip_suffix(".*").unwrap();
244
-
let matching_nsids: Vec<String> = new_deps.iter()
245
-
.filter(|nsid| nsid.starts_with(pattern_prefix))
246
-
.cloned()
247
-
.collect();
295
+
// If this was a wildcard that failed, collect the individual NSIDs for retry
296
+
if is_wildcard {
297
+
let pattern_prefix = pattern.strip_suffix(".*").unwrap();
298
+
let matching_nsids: Vec<String> = new_deps.iter()
299
+
.filter(|nsid| nsid.starts_with(pattern_prefix))
300
+
.cloned()
301
+
.collect();
248
302
249
-
if !matching_nsids.is_empty() {
250
-
wildcard_failures.push((pattern.clone(), matching_nsids));
251
-
}
303
+
if !matching_nsids.is_empty() {
304
+
wildcard_failures.push((pattern.clone(), matching_nsids));
252
305
}
253
306
}
254
307
}
255
308
}
309
+
}
256
310
257
-
// Retry failed wildcards with individual NSIDs
258
-
if !wildcard_failures.is_empty() {
259
-
println!("\n→ Retrying failed wildcard patterns with individual NSIDs...");
311
+
// Retry failed wildcards with individual NSIDs
312
+
if !wildcard_failures.is_empty() {
313
+
println!("\n→ Retrying failed wildcard patterns with individual NSIDs...");
260
314
261
-
for (failed_pattern, nsids) in wildcard_failures {
262
-
println!(" Retrying {} NSIDs from failed pattern: {}", nsids.len(), failed_pattern);
315
+
for (failed_pattern, nsids) in wildcard_failures {
316
+
println!(" Retrying {} NSIDs from failed pattern: {}", nsids.len(), failed_pattern);
263
317
264
-
for nsid in nsids {
265
-
if !fetched_nsids.contains(&nsid) {
266
-
println!(" Fetching: {}", nsid);
267
-
fetched_nsids.insert(nsid.clone());
318
+
for nsid in nsids {
319
+
if !fetched_nsids.contains(&nsid) {
320
+
println!(" Fetching: {}", nsid);
321
+
fetched_nsids.insert(nsid.clone());
268
322
269
-
match fetch_lexicon_with_lock(&nsid, project_root, &mut lockfile) {
270
-
Ok(()) => {
271
-
success_count += 1;
272
-
}
273
-
Err(e) => {
274
-
eprintln!(" Warning: Failed to fetch {}: {}", nsid, e);
275
-
}
323
+
match fetch_lexicon_with_lock(&nsid, project_root, lockfile) {
324
+
Ok(()) => {}
325
+
Err(e) => {
326
+
eprintln!(" Warning: Failed to fetch {}: {}", nsid, e);
276
327
}
277
328
}
278
329
}
279
330
}
280
331
}
281
-
} else {
282
-
// Fetch individually without optimization (safer, more predictable)
283
-
println!("\n→ Found {} unresolved reference(s), fetching individually...",
284
-
new_deps.len());
332
+
}
333
+
} else {
334
+
// Fetch individually without optimization (safer, more predictable)
335
+
println!("\n→ Found {} unresolved reference(s), fetching individually...",
336
+
new_deps.len());
285
337
286
-
for nsid in &new_deps {
287
-
println!("\nFetching transitive dependency: {}", nsid);
288
-
fetched_nsids.insert(nsid.clone());
338
+
for nsid in &new_deps {
339
+
println!("\nFetching transitive dependency: {}", nsid);
340
+
fetched_nsids.insert(nsid.clone());
289
341
290
-
match fetch_lexicon_with_lock(nsid, project_root, &mut lockfile) {
291
-
Ok(()) => {
292
-
success_count += 1;
293
-
}
294
-
Err(e) => {
295
-
// Don't fail the entire fetch for transitive deps
296
-
eprintln!(" Warning: Failed to fetch {}: {}", nsid, e);
297
-
}
342
+
match fetch_lexicon_with_lock(nsid, project_root, lockfile) {
343
+
Ok(()) => {}
344
+
Err(e) => {
345
+
// Don't fail the entire fetch for transitive deps
346
+
eprintln!(" Warning: Failed to fetch {}: {}", nsid, e);
298
347
}
299
348
}
300
349
}
301
350
}
302
351
}
303
352
304
-
// Save the lockfile
305
-
lockfile.save(&lockfile_path).map_err(FetchError::NoProjectRoot)?;
306
-
println!("\n→ Updated mlf-lock.toml");
307
-
308
-
if !errors.is_empty() {
309
-
eprintln!(
310
-
"\n{} dependency(ies) fetched successfully, {} error(s):",
311
-
success_count,
312
-
errors.len()
313
-
);
314
-
for (dep, error) in &errors {
315
-
eprintln!(" {} - {}", dep, error);
316
-
}
317
-
return Err(FetchError::HttpError(format!(
318
-
"Failed to fetch {} dependencies",
319
-
errors.len()
320
-
)));
321
-
}
322
-
323
-
println!("\n✓ Successfully fetched all {} dependencies", success_count);
324
353
Ok(())
325
354
}
326
355
···
874
903
return Ok(HashSet::new());
875
904
}
876
905
877
-
// Build a workspace from all fetched MLF files
878
-
let mut workspace = Workspace::new();
906
+
// Build a workspace with std library to avoid fetching std types
907
+
let mut workspace = Workspace::with_std()
908
+
.map_err(|e| FetchError::IoError(std::io::Error::new(
909
+
std::io::ErrorKind::Other,
910
+
format!("Failed to load standard library: {:?}", e)
911
+
)))?;
879
912
let mut unresolved = HashSet::new();
880
913
881
914
// Recursively find all .mlf files
+3
-2
mlf-cli/src/generate/mlf.rs
+3
-2
mlf-cli/src/generate/mlf.rs
···
417
417
.collect();
418
418
419
419
if !param_strs.is_empty() {
420
-
output.push_str(¶m_strs.join(","));
420
+
output.push_str(¶m_strs.join(",\n "));
421
421
}
422
422
}
423
423
}
···
514
514
.collect();
515
515
516
516
if !param_strs.is_empty() {
517
-
output.push_str(¶m_strs.join(","));
517
+
output.push_str(¶m_strs.join(",\n "));
518
518
}
519
519
}
520
520
}
···
652
652
}
653
653
654
654
// Use last segment of NSID for "main" definitions
655
+
// Keywords are now allowed by the parser, so just escape with backticks
655
656
let def_name = if name == "main" {
656
657
escape_name(last_segment)
657
658
} else {
+1
-1
mlf-lang/src/parser.rs
+1
-1
mlf-lang/src/parser.rs
···
421
421
fn parse_def_type(&mut self, docs: Vec<DocComment>, annotations: Vec<Annotation>) -> Result<Item, ParseError> {
422
422
let start = self.expect(LexToken::Def)?;
423
423
self.expect(LexToken::Type)?;
424
-
let name = self.parse_ident()?;
424
+
let name = self.parse_ident()?; // Backticked keywords are already converted to Ident by lexer
425
425
self.expect(LexToken::Equals)?;
426
426
let ty = self.parse_type()?;
427
427
let end = self.expect(LexToken::Semicolon)?;
+264
-42
mlf-lang/src/workspace.rs
+264
-42
mlf-lang/src/workspace.rs
···
25
25
struct ImportTable {
26
26
mappings: BTreeMap<String, ImportedSymbol>,
27
27
used_imports: BTreeSet<String>,
28
+
// Maps namespace alias to full namespace path
29
+
// e.g., "example" -> "com.example"
30
+
namespace_aliases: BTreeMap<String, String>,
28
31
}
29
32
30
33
#[derive(Debug, Clone, PartialEq)]
···
168
171
return Err(errors);
169
172
}
170
173
171
-
// Check for unused imports
174
+
// Check for unused imports (warnings only, don't fail)
172
175
if let Err(mut unused_import_errors) = self.check_unused_imports() {
173
176
errors.append(&mut unused_import_errors);
174
177
}
···
177
180
errors.append(&mut typecheck_errors);
178
181
}
179
182
180
-
if errors.is_empty() {
183
+
// Filter out warnings (UnusedImport) from blocking errors
184
+
let blocking_errors: Vec<ValidationError> = errors.errors.into_iter()
185
+
.filter(|e| !matches!(e, ValidationError::UnusedImport { .. }))
186
+
.collect();
187
+
188
+
if blocking_errors.is_empty() {
181
189
Ok(())
182
190
} else {
183
-
Err(errors)
191
+
Err(ValidationErrors { errors: blocking_errors })
184
192
}
185
193
}
186
194
···
875
883
}
876
884
};
877
885
878
-
if !self.modules.contains_key(&target_namespace) {
886
+
// Check if the target namespace exists or if there are modules with that prefix
887
+
let namespace_exists = self.modules.contains_key(&target_namespace);
888
+
let has_children = self.modules.keys().any(|ns| ns.starts_with(&alloc::format!("{}.", target_namespace)));
889
+
890
+
if !namespace_exists && !has_children {
879
891
errors.push(ValidationError::UndefinedReference {
880
892
name: target_namespace.clone(),
881
893
span: use_stmt.path.span,
···
884
896
return Err(errors);
885
897
}
886
898
899
+
let namespace_alias_to_add: Option<(String, String)> = match &use_stmt.imports {
900
+
UseImports::All => {
901
+
// Check for implicit main resolution
902
+
// If namespace suffix matches a type name, import only that type
903
+
// Otherwise, this is a namespace alias for path shortening
904
+
let namespace_suffix = target_namespace.split('.').last().unwrap_or(&target_namespace);
905
+
906
+
if let Some(target_module) = self.modules.get(&target_namespace) {
907
+
// Module exists - check if there's a type matching the namespace suffix
908
+
if target_module.symbols.types.contains_key(namespace_suffix) {
909
+
// Implicit main resolution: no namespace alias, just type import
910
+
None
911
+
} else {
912
+
// No type matching namespace suffix - this is a namespace alias
913
+
// Create alias: namespace_suffix -> target_namespace
914
+
// e.g., "use com.example;" creates alias "example" -> "com.example"
915
+
Some((namespace_suffix.to_string(), target_namespace.clone()))
916
+
}
917
+
} else {
918
+
// Module doesn't exist but has children - create namespace alias
919
+
// e.g., "use com.example;" with only "com.example.defs" module
920
+
Some((namespace_suffix.to_string(), target_namespace.clone()))
921
+
}
922
+
}
923
+
UseImports::Items(_) => {
924
+
// Items imports don't create namespace aliases
925
+
None
926
+
}
927
+
};
928
+
887
929
let imports_to_add: Vec<(String, ImportedSymbol)> = match &use_stmt.imports {
888
930
UseImports::All => {
889
-
// Import all types from the namespace
890
-
let target_module = self.modules.get(&target_namespace).unwrap();
891
-
target_module.symbols.types.keys()
892
-
.map(|type_name| {
931
+
// Check for implicit main resolution
932
+
// If namespace suffix matches a type name, import only that type
933
+
let namespace_suffix = target_namespace.split('.').last().unwrap_or(&target_namespace);
934
+
935
+
if let Some(target_module) = self.modules.get(&target_namespace) {
936
+
// Check if there's a type matching the namespace suffix
937
+
if target_module.symbols.types.contains_key(namespace_suffix) {
938
+
// Implicit main resolution: import only the type matching the namespace suffix
893
939
let imported = ImportedSymbol {
894
940
original_path: use_stmt.path.segments.iter()
895
941
.map(|s| s.name.clone())
896
-
.chain(core::iter::once(type_name.clone()))
897
942
.collect(),
898
-
local_name: type_name.clone(),
943
+
local_name: namespace_suffix.to_string(),
899
944
span: use_stmt.path.span,
900
945
};
901
-
(type_name.clone(), imported)
902
-
})
903
-
.collect()
946
+
vec![(namespace_suffix.to_string(), imported)]
947
+
} else {
948
+
// Namespace alias only, no type imports
949
+
vec![]
950
+
}
951
+
} else {
952
+
// Module doesn't exist - namespace alias only
953
+
vec![]
954
+
}
904
955
}
905
956
UseImports::Items(items) => {
906
957
// Import specific items from the namespace
···
974
1025
module.imports.mappings.insert(local_name, imported);
975
1026
}
976
1027
1028
+
// Add namespace alias if one was created
1029
+
if let Some((alias, full_namespace)) = namespace_alias_to_add {
1030
+
module.imports.namespace_aliases.insert(alias, full_namespace);
1031
+
}
1032
+
977
1033
if errors.is_empty() {
978
1034
Ok(())
979
1035
} else {
···
1479
1535
return Err(errors);
1480
1536
}
1481
1537
1482
-
let target_namespace = path.segments[..path.segments.len() - 1]
1483
-
.iter()
1484
-
.map(|s| s.name.as_str())
1485
-
.collect::<Vec<_>>()
1486
-
.join(".");
1538
+
// Multi-segment path: check for namespace alias
1539
+
// If the first segment is a namespace alias, expand it
1540
+
let first_segment = &path.segments[0].name;
1487
1541
let type_name = &path.segments[path.segments.len() - 1].name;
1488
1542
1543
+
let target_namespace = if let Some(module) = self.modules.get(current_namespace) {
1544
+
if let Some(full_ns) = module.imports.namespace_aliases.get(first_segment) {
1545
+
// Found a namespace alias! Expand it
1546
+
// e.g., "example.defs.foo" with alias "example" -> "com.example"
1547
+
// The namespace part is "example.defs" which expands to "com.example.defs"
1548
+
// (excluding the last segment "foo" which is the type name)
1549
+
let middle_segments: Vec<&str> = path.segments[1..path.segments.len() - 1]
1550
+
.iter()
1551
+
.map(|s| s.name.as_str())
1552
+
.collect();
1553
+
if middle_segments.is_empty() {
1554
+
// e.g., "example.foo" -> "com.example"
1555
+
full_ns.clone()
1556
+
} else {
1557
+
// e.g., "example.defs.foo" -> "com.example.defs"
1558
+
alloc::format!("{}.{}", full_ns, middle_segments.join("."))
1559
+
}
1560
+
} else {
1561
+
// No alias, use the path as-is (excluding type name)
1562
+
path.segments[..path.segments.len() - 1]
1563
+
.iter()
1564
+
.map(|s| s.name.as_str())
1565
+
.collect::<Vec<_>>()
1566
+
.join(".")
1567
+
}
1568
+
} else {
1569
+
path.segments[..path.segments.len() - 1]
1570
+
.iter()
1571
+
.map(|s| s.name.as_str())
1572
+
.collect::<Vec<_>>()
1573
+
.join(".")
1574
+
};
1575
+
1489
1576
// First try: normal resolution (namespace + type)
1490
1577
if let Some(module) = self.modules.get(&target_namespace) {
1491
1578
if module.symbols.types.contains_key(type_name) {
···
1496
1583
// Second try: implicit main resolution
1497
1584
// If com.atproto.repo.strongRef fails, try treating the full path as a namespace
1498
1585
// and look for a type named "strongRef" (matching the namespace suffix)
1499
-
let full_namespace = &full_path;
1500
-
if let Some(module) = self.modules.get(full_namespace) {
1501
-
let namespace_suffix = full_namespace.split('.').last().unwrap_or(full_namespace);
1586
+
// Need to use the expanded path if an alias was used
1587
+
let expanded_full_path = if target_namespace.is_empty() {
1588
+
type_name.to_string()
1589
+
} else {
1590
+
alloc::format!("{}.{}", target_namespace, type_name)
1591
+
};
1592
+
if let Some(module) = self.modules.get(&expanded_full_path) {
1593
+
let namespace_suffix = expanded_full_path.split('.').last().unwrap_or(&expanded_full_path);
1502
1594
if namespace_suffix == type_name && module.symbols.types.contains_key(type_name) {
1503
1595
return Ok(());
1504
1596
}
···
1580
1672
1581
1673
// Prelude should be accessible
1582
1674
assert!(ws.modules.contains_key("prelude"));
1583
-
}
1584
-
1585
-
#[test]
1586
-
fn test_use_import_all() {
1587
-
let mut ws = Workspace::new();
1588
-
1589
-
let a = parse_lexicon("record foo {} inline type bar = string;").unwrap();
1590
-
ws.add_module("a".into(), a).unwrap();
1591
-
1592
-
let b = parse_lexicon("use a; record baz { x: foo, y: bar, }").unwrap();
1593
-
ws.add_module("b".into(), b).unwrap();
1594
-
1595
-
assert!(ws.resolve().is_ok());
1596
1675
}
1597
1676
1598
1677
#[test]
···
2106
2185
let b = parse_lexicon("use a.foo as Foo; record bar {}").unwrap();
2107
2186
ws.add_module("b".into(), b).unwrap();
2108
2187
2188
+
// UnusedImport is now a warning, not a blocking error
2109
2189
let result = ws.resolve();
2110
-
assert!(result.is_err());
2111
-
let errors = result.unwrap_err();
2112
-
assert!(errors.errors.iter().any(|e| matches!(e, ValidationError::UnusedImport { .. })));
2190
+
assert!(result.is_ok());
2113
2191
}
2114
2192
2115
2193
#[test]
···
2136
2214
let b = parse_lexicon("use a; record baz {}").unwrap();
2137
2215
ws.add_module("b".into(), b).unwrap();
2138
2216
2217
+
// UnusedImport is now a warning, not a blocking error
2139
2218
let result = ws.resolve();
2140
-
assert!(result.is_err());
2141
-
let errors = result.unwrap_err();
2142
-
// Should have 2 unused import errors (foo and bar)
2143
-
let unused_count = errors.errors.iter().filter(|e| matches!(e, ValidationError::UnusedImport { .. })).count();
2144
-
assert_eq!(unused_count, 2);
2219
+
assert!(result.is_ok());
2220
+
}
2221
+
2222
+
#[test]
2223
+
fn test_use_implicit_main_resolution() {
2224
+
let mut ws = Workspace::new();
2225
+
2226
+
// Create a namespace where the namespace suffix matches a type name
2227
+
// No @main annotation needed for implicit main resolution
2228
+
let profile_ns = parse_lexicon(r#"
2229
+
def type color = {
2230
+
red!: integer,
2231
+
green!: integer,
2232
+
blue!: integer,
2233
+
};
2234
+
2235
+
record profile {
2236
+
color: color,
2237
+
}
2238
+
"#).unwrap();
2239
+
ws.add_module("place.stream.chat.profile".into(), profile_ns).unwrap();
2240
+
2241
+
// Import using implicit main resolution - should only import profile, not color
2242
+
let bookmark = parse_lexicon(r#"
2243
+
use place.stream.chat.profile;
2244
+
2245
+
record bookmark {
2246
+
owner!: profile,
2247
+
}
2248
+
"#).unwrap();
2249
+
ws.add_module("com.example.bookmark".into(), bookmark).unwrap();
2250
+
2251
+
// Should resolve without unused import warning for color
2252
+
let result = ws.resolve();
2253
+
assert!(result.is_ok());
2254
+
2255
+
// Verify that only profile was imported (implicit main resolution)
2256
+
let imports = ws.get_imports("com.example.bookmark");
2257
+
assert_eq!(imports.len(), 1);
2258
+
assert_eq!(imports[0].0, "profile");
2259
+
}
2260
+
2261
+
#[test]
2262
+
fn test_use_namespace_alias() {
2263
+
let mut ws = Workspace::new();
2264
+
2265
+
// Create a namespace where the suffix doesn't match a type name
2266
+
let defs = parse_lexicon(r#"
2267
+
def type foo = string;
2268
+
def type bar = integer;
2269
+
"#).unwrap();
2270
+
ws.add_module("com.example.defs".into(), defs).unwrap();
2271
+
2272
+
// Using "use com.example;" creates a namespace alias "example" -> "com.example"
2273
+
// This allows referencing types via the shortened path
2274
+
let app = parse_lexicon(r#"
2275
+
use com.example;
2276
+
2277
+
record thing {
2278
+
x!: example.defs.foo,
2279
+
y!: example.defs.bar,
2280
+
}
2281
+
"#).unwrap();
2282
+
ws.add_module("com.example.app".into(), app).unwrap();
2283
+
2284
+
// Should resolve successfully using the namespace alias
2285
+
let result = ws.resolve();
2286
+
if let Err(ref e) = result {
2287
+
eprintln!("Errors: {:?}", e);
2288
+
}
2289
+
assert!(result.is_ok());
2290
+
2291
+
// Verify that no types were imported (it's just a namespace alias)
2292
+
let imports = ws.get_imports("com.example.app");
2293
+
assert_eq!(imports.len(), 0);
2294
+
}
2295
+
2296
+
#[test]
2297
+
fn test_use_namespace_alias_nested() {
2298
+
let mut ws = Workspace::new();
2299
+
2300
+
// Create nested namespaces
2301
+
let actor_defs = parse_lexicon(r#"
2302
+
def type profileView = {
2303
+
did!: string,
2304
+
handle!: string,
2305
+
};
2306
+
"#).unwrap();
2307
+
ws.add_module("app.bsky.actor.defs".into(), actor_defs).unwrap();
2308
+
2309
+
let feed_post = parse_lexicon(r#"
2310
+
def type post = {
2311
+
text!: string,
2312
+
};
2313
+
"#).unwrap();
2314
+
ws.add_module("app.bsky.feed.post".into(), feed_post).unwrap();
2315
+
2316
+
// Use namespace alias to shorten references
2317
+
let like = parse_lexicon(r#"
2318
+
use app.bsky;
2319
+
2320
+
record like {
2321
+
subject!: bsky.feed.post,
2322
+
actor!: bsky.actor.defs.profileView,
2323
+
}
2324
+
"#).unwrap();
2325
+
ws.add_module("app.bsky.feed.like".into(), like).unwrap();
2326
+
2327
+
let result = ws.resolve();
2328
+
if let Err(ref e) = result {
2329
+
eprintln!("Errors: {:?}", e);
2330
+
}
2331
+
assert!(result.is_ok());
2332
+
}
2333
+
2334
+
#[test]
2335
+
fn test_use_namespace_reference_full_path() {
2336
+
let mut ws = Workspace::new();
2337
+
2338
+
// Create a namespace where the suffix doesn't match a type name
2339
+
let defs = parse_lexicon(r#"
2340
+
def type foo = string;
2341
+
def type bar = integer;
2342
+
"#).unwrap();
2343
+
ws.add_module("com.example.defs".into(), defs).unwrap();
2344
+
2345
+
// Using "use com.example.defs;" where "defs" is not a type name
2346
+
// creates a namespace alias "defs" -> "com.example.defs"
2347
+
let app = parse_lexicon(r#"
2348
+
use com.example.defs;
2349
+
2350
+
record thing {
2351
+
x!: defs.foo,
2352
+
y!: defs.bar,
2353
+
}
2354
+
"#).unwrap();
2355
+
ws.add_module("com.example.app".into(), app).unwrap();
2356
+
2357
+
// Should resolve successfully using the namespace alias
2358
+
let result = ws.resolve();
2359
+
if let Err(ref e) = result {
2360
+
eprintln!("Errors: {:?}", e);
2361
+
}
2362
+
assert!(result.is_ok());
2363
+
2364
+
// Verify that no types were imported (it's just a namespace alias)
2365
+
let imports = ws.get_imports("com.example.app");
2366
+
assert_eq!(imports.len(), 0);
2145
2367
}
2146
2368
}