···100100}
101101102102#[derive(Debug, Deserialize)]
103103-struct AtProtoRepoListRecords {
104104- records: Vec<AtProtoRecord>,
105105-}
106106-107107-#[derive(Debug, Deserialize)]
108103struct AtProtoRecord {
109104 uri: String,
110105 value: serde_json::Value,
···249244 return Ok(());
250245 }
251246252252- // Extract authority from NSID (e.g., "place.stream" from "place.stream.key")
253253- let authority = extract_authority(nsid_pattern)?;
247247+ // Extract authority and name segments from NSID
248248+ // For "app.bsky.actor.profile", authority is "app.bsky", name is "actor.profile"
249249+ // For DNS lookup, we need "_lexicon.actor.bsky.app"
250250+ let (authority, name_segments) = extract_authority_and_name(nsid_pattern)?;
254251 println!("Fetching lexicons for pattern: {}", nsid);
255252256253 // Step 1: DNS TXT lookup
257257- let did = resolve_lexicon_did(&authority)?;
254254+ let did = resolve_lexicon_did(&authority, &name_segments)?;
258255 println!(" → Resolved DID: {}", did);
259256260257 // Step 2: Query ATProto repo for lexicon schemas
···278275 // Match against pattern
279276 let matches = if is_wildcard {
280277 // Wildcard: match all records starting with the pattern
281281- record_nsid.starts_with(nsid_pattern) && record_nsid.len() > nsid_pattern.len()
278278+ // For "app.bsky.actor.*", nsid_pattern is "app.bsky.actor"
279279+ // Should match "app.bsky.actor.defs", "app.bsky.actor.profile", etc.
280280+ let starts_with_pattern = record_nsid.starts_with(nsid_pattern);
281281+ let has_more_segments = record_nsid.len() > nsid_pattern.len();
282282+ let is_direct_child = if starts_with_pattern && has_more_segments {
283283+ // Check if the next character after the pattern is a dot
284284+ record_nsid.chars().nth(nsid_pattern.len()) == Some('.')
285285+ } else {
286286+ false
287287+ };
288288+289289+ starts_with_pattern && has_more_segments && is_direct_child
282290 } else {
283291 // Specific: exact match only
284292 record_nsid == nsid
···355363356364 let parts: Vec<&str> = nsid_base.split('.').collect();
357365358358- // NSID must have at least 3 segments (authority + name)
359359- // e.g., "place.stream.key" or "place.stream.*"
360360- if parts.len() < 3 {
366366+ // NSID must have at least 2 segments (authority)
367367+ // e.g., "place.stream", "place.stream.key", or "place.stream.*"
368368+ if parts.len() < 2 {
361369 return Err(FetchError::InvalidNsid(format!(
362362- "NSID must have at least 3 segments or use wildcard (e.g., 'place.stream.key' or 'place.stream.*'): {}",
370370+ "NSID must have at least 2 segments (e.g., 'place.stream' or 'com.atproto.repo.strongRef'): {}",
363371 nsid
364372 )));
365373 }
···367375 Ok(())
368376}
369377370370-fn extract_authority(nsid_pattern: &str) -> Result<String, FetchError> {
378378+fn extract_authority_and_name(nsid_pattern: &str) -> Result<(String, String), FetchError> {
371379 // NSID format: authority.name(.name)*
372372- // For "place.stream.key", authority is "place.stream"
373373- // Typically authority is the first 2 segments (reversed domain)
380380+ // For "place.stream.key", authority is "place.stream" (first 2), name is "key"
381381+ // For "app.bsky.actor.profile", authority is "app.bsky" (first 2), name is "actor.profile"
374382 let parts: Vec<&str> = nsid_pattern.split('.').collect();
375383376384 if parts.len() < 2 {
···380388 )));
381389 }
382390383383- // Take first 2 segments as authority (reversed domain)
384384- Ok(format!("{}.{}", parts[0], parts[1]))
391391+ // Authority is first 2 segments (reversed domain)
392392+ let authority = format!("{}.{}", parts[0], parts[1]);
393393+394394+ // Name segments are everything after the authority
395395+ let name_segments = if parts.len() > 2 {
396396+ parts[2..].join(".")
397397+ } else {
398398+ String::new()
399399+ };
400400+401401+ Ok((authority, name_segments))
385402}
386403387387-fn resolve_lexicon_did(authority: &str) -> Result<String, FetchError> {
388388- // Reverse the authority for DNS lookup
389389- // "stream.place" -> "place.stream" -> "_lexicon.place.stream"
390390- let parts: Vec<&str> = authority.split('.').collect();
391391- let reversed_parts: Vec<&str> = parts.iter().rev().copied().collect();
392392- let dns_name = format!("_lexicon.{}", reversed_parts.join("."));
404404+fn resolve_lexicon_did(authority: &str, name_segments: &str) -> Result<String, FetchError> {
405405+ // Reverse the authority for DNS lookup and prepend name segments
406406+ // For "app.bsky" + "actor": "_lexicon.actor.bsky.app"
407407+ // For "place.stream" + "key": "_lexicon.key.stream.place"
408408+ let auth_parts: Vec<&str> = authority.split('.').collect();
409409+ let reversed_auth: Vec<&str> = auth_parts.iter().rev().copied().collect();
410410+411411+ let dns_name = if name_segments.is_empty() {
412412+ // No name segments, just use reversed authority
413413+ // For "place.stream": "_lexicon.stream.place"
414414+ format!("_lexicon.{}", reversed_auth.join("."))
415415+ } else {
416416+ // Prepend name segments before reversed authority
417417+ // For "app.bsky" + "actor": "_lexicon.actor.bsky.app"
418418+ format!("_lexicon.{}.{}", name_segments, reversed_auth.join("."))
419419+ };
393420394421 println!(" Looking up DNS TXT record: {}", dns_name);
395422···422449fn fetch_lexicon_records(did: &str) -> Result<Vec<AtProtoRecord>, FetchError> {
423450 // Query the ATProto repo for records in com.atproto.lexicon.schema collection
424451 // We need to use the repo.listRecords XRPC endpoint
452452+ // Note: This API is paginated, so we need to fetch all pages
425453426454 // First, resolve the DID to a PDS endpoint
427455 let pds_url = resolve_did_to_pds(did)?;
428456429457 println!(" → Using PDS: {}", pds_url);
430458431431- // Query listRecords endpoint
432432- let url = format!(
433433- "{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=com.atproto.lexicon.schema",
434434- pds_url, did
435435- );
459459+ let mut all_records = Vec::new();
460460+ let mut cursor: Option<String> = None;
461461+ let mut page_num = 1;
462462+463463+ loop {
464464+ // Build URL with optional cursor
465465+ let url = if let Some(ref c) = cursor {
466466+ format!(
467467+ "{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=com.atproto.lexicon.schema&cursor={}",
468468+ pds_url, did, c
469469+ )
470470+ } else {
471471+ format!(
472472+ "{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=com.atproto.lexicon.schema",
473473+ pds_url, did
474474+ )
475475+ };
476476+477477+ println!(" Fetching lexicon records (page {})...", page_num);
478478+479479+ let response = reqwest::blocking::get(&url)
480480+ .map_err(|e| FetchError::HttpError(format!("Failed to fetch records: {}", e)))?;
481481+482482+ if !response.status().is_success() {
483483+ return Err(FetchError::HttpError(format!(
484484+ "HTTP {} when fetching records",
485485+ response.status()
486486+ )));
487487+ }
488488+489489+ let mut list_response: serde_json::Value = response
490490+ .json()
491491+ .map_err(|e| FetchError::HttpError(format!("Failed to parse response: {}", e)))?;
492492+493493+ // Extract records
494494+ if let Some(records_array) = list_response.get_mut("records") {
495495+ if let Some(records) = records_array.as_array_mut() {
496496+ for record_value in records.drain(..) {
497497+ let record: AtProtoRecord = serde_json::from_value(record_value)
498498+ .map_err(|e| FetchError::HttpError(format!("Failed to parse record: {}", e)))?;
499499+ all_records.push(record);
500500+ }
501501+ }
502502+ }
436503437437- println!(" Fetching lexicon records from: {}", url);
504504+ // Check for cursor to continue pagination
505505+ cursor = list_response.get("cursor")
506506+ .and_then(|c| c.as_str())
507507+ .map(|s| s.to_string());
438508439439- let response = reqwest::blocking::get(&url)
440440- .map_err(|e| FetchError::HttpError(format!("Failed to fetch records: {}", e)))?;
509509+ if cursor.is_none() {
510510+ break;
511511+ }
441512442442- if !response.status().is_success() {
443443- return Err(FetchError::HttpError(format!(
444444- "HTTP {} when fetching records",
445445- response.status()
446446- )));
513513+ page_num += 1;
447514 }
448515449449- let list_response: AtProtoRepoListRecords = response
450450- .json()
451451- .map_err(|e| FetchError::HttpError(format!("Failed to parse response: {}", e)))?;
452452-453453- Ok(list_response.records)
516516+ Ok(all_records)
454517}
455518456519fn resolve_did_to_pds(did: &str) -> Result<String, FetchError> {
···11+/// Context detection for smart completions
22+33+#[derive(Debug, PartialEq)]
44+pub enum CompletionContext {
55+ /// Top-level (suggesting keywords like record, query, etc.)
66+ TopLevel,
77+ /// After "use" keyword (suggesting module paths)
88+ UseStatement,
99+ /// In a type position (field type, parameter type, etc.)
1010+ TypePosition,
1111+ /// Inside constrained { } block
1212+ ConstraintBlock,
1313+ /// Unknown context
1414+ Unknown,
1515+}
1616+1717+/// Detect the completion context based on the text before the cursor
1818+pub fn detect_context(text_before_cursor: &str) -> CompletionContext {
1919+ // Check if we're in a use statement (check before trimming!)
2020+ if let Some(last_line) = text_before_cursor.lines().last() {
2121+ let last_line = last_line.trim_start(); // Only trim left side
2222+2323+ // "use ", "use com.", "use com.atproto."
2424+ if last_line.starts_with("use ") && !last_line.contains(';') {
2525+ return CompletionContext::UseStatement;
2626+ }
2727+ }
2828+2929+ let trimmed = text_before_cursor.trim_end();
3030+3131+ // Check if we're inside a constrained block
3232+ // Count open and closed braces after "constrained"
3333+ if let Some(constrained_pos) = trimmed.rfind("constrained") {
3434+ let after_constrained = &trimmed[constrained_pos..];
3535+ let open_braces = after_constrained.matches('{').count();
3636+ let close_braces = after_constrained.matches('}').count();
3737+3838+ if open_braces > close_braces {
3939+ return CompletionContext::ConstraintBlock;
4040+ }
4141+ }
4242+4343+ // Check if we're in a type position (after : or after field name)
4444+ // Look for patterns like "field:" or "field :" or "field: " or "(param:"
4545+ if trimmed.ends_with(':') || trimmed.ends_with(": ") {
4646+ return CompletionContext::TypePosition;
4747+ }
4848+4949+ // Check if the last "word" before cursor looks like we're after a colon
5050+ // This handles "field:string" where cursor is after string
5151+ let words: Vec<&str> = trimmed.split_whitespace().collect();
5252+ if let Some(last_word) = words.last() {
5353+ if last_word.contains(':') && !last_word.ends_with(':') {
5454+ // We're potentially completing a type that was already started
5555+ return CompletionContext::TypePosition;
5656+ }
5757+ }
5858+5959+ // Check if we're in a record/query/procedure body (inside braces or parens)
6060+ let open_braces = trimmed.matches('{').count();
6161+ let close_braces = trimmed.matches('}').count();
6262+ let open_parens = trimmed.matches('(').count();
6363+ let close_parens = trimmed.matches(')').count();
6464+6565+ if (open_braces > close_braces) || (open_parens > close_parens) {
6666+ // We're inside a block - could be fields or parameters
6767+ // If the line contains a colon, we're likely after it (type position)
6868+ if let Some(last_line) = trimmed.lines().last() {
6969+ if last_line.contains(':') {
7070+ return CompletionContext::TypePosition;
7171+ }
7272+ }
7373+ // Otherwise, might be starting a new field (but not type position yet)
7474+ return CompletionContext::Unknown;
7575+ }
7676+7777+ // Default to top-level
7878+ CompletionContext::TopLevel
7979+}
8080+8181+#[cfg(test)]
8282+mod tests {
8383+ use super::*;
8484+8585+ #[test]
8686+ fn test_use_statement() {
8787+ assert_eq!(
8888+ detect_context("use "),
8989+ CompletionContext::UseStatement
9090+ );
9191+ assert_eq!(
9292+ detect_context("use com."),
9393+ CompletionContext::UseStatement
9494+ );
9595+ assert_eq!(
9696+ detect_context("use com.atproto."),
9797+ CompletionContext::UseStatement
9898+ );
9999+ }
100100+101101+ #[test]
102102+ fn test_type_position() {
103103+ assert_eq!(
104104+ detect_context("record foo {\n bar:"),
105105+ CompletionContext::TypePosition
106106+ );
107107+ assert_eq!(
108108+ detect_context("record foo {\n bar: "),
109109+ CompletionContext::TypePosition
110110+ );
111111+ }
112112+113113+ #[test]
114114+ fn test_constraint_block() {
115115+ assert_eq!(
116116+ detect_context("field: string constrained {"),
117117+ CompletionContext::ConstraintBlock
118118+ );
119119+ assert_eq!(
120120+ detect_context("field: string constrained {\n maxLength: 100,"),
121121+ CompletionContext::ConstraintBlock
122122+ );
123123+ }
124124+125125+ #[test]
126126+ fn test_top_level() {
127127+ assert_eq!(detect_context(""), CompletionContext::TopLevel);
128128+ assert_eq!(detect_context("rec"), CompletionContext::TopLevel);
129129+ }
130130+}
+2
mlf-lsp/src/lib.rs
···11+pub mod context;
22+pub mod namespace_completion;
13pub mod server;
24pub mod utils;
35
+22-2
mlf-lsp/src/main.rs
···4455#[tokio::main]
66async fn main() {
77- // Initialize logging
77+ // Set up panic hook to log panics
88+ std::panic::set_hook(Box::new(|panic_info| {
99+ eprintln!("LSP PANIC: {:?}", panic_info);
1010+ if let Some(location) = panic_info.location() {
1111+ eprintln!(" at {}:{}:{}", location.file(), location.line(), location.column());
1212+ }
1313+ if let Some(message) = panic_info.payload().downcast_ref::<&str>() {
1414+ eprintln!(" message: {}", message);
1515+ } else if let Some(message) = panic_info.payload().downcast_ref::<String>() {
1616+ eprintln!(" message: {}", message);
1717+ }
1818+ }));
1919+2020+ // Initialize logging with debug level
821 tracing_subscriber::fmt()
99- .with_env_filter(EnvFilter::from_default_env())
2222+ .with_env_filter(
2323+ EnvFilter::try_from_default_env()
2424+ .unwrap_or_else(|_| EnvFilter::new("debug"))
2525+ )
1026 .with_writer(std::io::stderr)
1127 .init();
1228···17331834 let (service, socket) = LspService::new(|client| MlfLanguageServer::new(client));
19353636+ tracing::info!("Server created, starting to serve...");
3737+2038 Server::new(stdin, stdout, socket).serve(service).await;
3939+4040+ tracing::info!("Server stopped");
2141}
+162
mlf-lsp/src/namespace_completion.rs
···11+/// Shared namespace path completion logic
22+///
33+/// This module provides utilities for completing namespace paths in both
44+/// `use` statements and type positions (e.g., `com.atproto.repo.strongRef`).
55+66+use std::collections::{HashMap, HashSet};
77+use tower_lsp::lsp_types::*;
88+use mlf_lang::Workspace;
99+1010+use crate::server::DocumentState;
1111+1212+/// Complete a namespace path based on a partial input
1313+///
1414+/// This function handles intelligent namespace completion:
1515+/// - If partial_path is empty: suggests top-level namespaces only (e.g., "com", "app")
1616+/// - If partial_path ends with '.': suggests next-level segments or types in that namespace
1717+/// - Otherwise: suggests matching namespace extensions
1818+///
1919+/// Returns a vector of completion items with proper text edits
2020+pub fn complete_namespace_path(
2121+ workspace: &Workspace,
2222+ documents: &HashMap<Url, DocumentState>,
2323+ current_namespace: Option<&String>,
2424+ partial_path: &str,
2525+ has_trailing_dot: bool,
2626+ replace_start: Position,
2727+ replace_end: Position,
2828+ _full_document_text: &str,
2929+ suggest_types_after_dot: bool, // Whether to suggest { }, *, and types after a trailing dot
3030+) -> Vec<CompletionItem> {
3131+ let mut completions = vec![];
3232+ let mut seen = HashSet::new();
3333+3434+ // If user typed "namespace." - suggest items from that namespace (and { } and *)
3535+ if has_trailing_dot && !partial_path.is_empty() && suggest_types_after_dot {
3636+ let types = workspace.get_namespace_types(partial_path);
3737+3838+ if !types.is_empty() {
3939+ // Suggest wildcard
4040+ completions.push(CompletionItem {
4141+ label: "*".to_string(),
4242+ kind: Some(CompletionItemKind::KEYWORD),
4343+ detail: Some("Import all types".to_string()),
4444+ insert_text: Some("*".to_string()),
4545+ ..Default::default()
4646+ });
4747+4848+ // Suggest braces for item list
4949+ completions.push(CompletionItem {
5050+ label: "{ }".to_string(),
5151+ kind: Some(CompletionItemKind::SNIPPET),
5252+ detail: Some("Import specific items".to_string()),
5353+ insert_text: Some("{ $0 }".to_string()),
5454+ insert_text_format: Some(InsertTextFormat::SNIPPET),
5555+ ..Default::default()
5656+ });
5757+5858+ // Suggest individual types
5959+ for type_name in types {
6060+ completions.push(CompletionItem {
6161+ label: type_name.clone(),
6262+ kind: Some(CompletionItemKind::CLASS),
6363+ detail: Some(format!("from {}", partial_path)),
6464+ insert_text: Some(format!("{{ {} }}", type_name)),
6565+ ..Default::default()
6666+ });
6767+ }
6868+ }
6969+ }
7070+7171+ // Get all module namespaces from open documents
7272+ for (_, doc) in documents.iter() {
7373+ if let Some(ns) = &doc.namespace {
7474+ // Skip self
7575+ if Some(ns) == current_namespace {
7676+ continue;
7777+ }
7878+7979+ // Skip prelude (it's auto-imported)
8080+ if ns == "prelude" {
8181+ continue;
8282+ }
8383+8484+ // Determine what to suggest based on partial path
8585+ let suggestion = if partial_path.is_empty() {
8686+ // No path yet - suggest top-level modules only
8787+ ns.split('.').next().map(|s| s.to_string())
8888+ } else {
8989+ // Check if this namespace matches or extends the partial path
9090+ if ns.starts_with(partial_path) {
9191+ // Full match - include it
9292+ if ns == partial_path {
9393+ Some(ns.clone())
9494+ } else if ns.chars().nth(partial_path.len()) == Some('.') {
9595+ // Namespace continues after partial path with a dot
9696+ // Suggest the next level: com.atproto.server -> suggest "com.atproto" when partial is "com"
9797+ let after_partial = &ns[partial_path.len() + 1..]; // Skip the dot
9898+ if let Some(next_segment) = after_partial.split('.').next() {
9999+ Some(format!("{}.{}", partial_path, next_segment))
100100+ } else {
101101+ None
102102+ }
103103+ } else {
104104+ // Partial path is a prefix but not at a segment boundary
105105+ None
106106+ }
107107+ } else {
108108+ None
109109+ }
110110+ };
111111+112112+ if let Some(label) = suggestion {
113113+ if seen.insert(label.clone()) {
114114+ let text_edit = TextEdit {
115115+ range: Range {
116116+ start: replace_start,
117117+ end: replace_end,
118118+ },
119119+ new_text: label.clone(),
120120+ };
121121+122122+ completions.push(CompletionItem {
123123+ label: label.clone(),
124124+ kind: Some(CompletionItemKind::MODULE),
125125+ detail: Some("module".to_string()),
126126+ text_edit: Some(CompletionTextEdit::Edit(text_edit)),
127127+ ..Default::default()
128128+ });
129129+ }
130130+ }
131131+ }
132132+ }
133133+134134+ // If we're completing a path with a trailing dot, also suggest types from that namespace
135135+ if has_trailing_dot && !partial_path.is_empty() && !suggest_types_after_dot {
136136+ let types = workspace.get_namespace_types(partial_path);
137137+ for type_name in types {
138138+ let full_nsid = format!("{}.{}", partial_path, type_name);
139139+ if seen.insert(full_nsid.clone()) {
140140+ let text_edit = TextEdit {
141141+ range: Range {
142142+ start: replace_start,
143143+ end: replace_end,
144144+ },
145145+ new_text: full_nsid.clone(),
146146+ };
147147+148148+ completions.push(CompletionItem {
149149+ label: full_nsid.clone(),
150150+ kind: Some(CompletionItemKind::REFERENCE),
151151+ detail: Some(format!("{} from {}", type_name, partial_path)),
152152+ text_edit: Some(CompletionTextEdit::Edit(text_edit)),
153153+ filter_text: Some(full_nsid.clone()),
154154+ sort_text: Some(format!("a{}", type_name)), // Sort types first
155155+ ..Default::default()
156156+ });
157157+ }
158158+ }
159159+ }
160160+161161+ completions
162162+}
+1023-93
mlf-lsp/src/server.rs
···66use tower_lsp::lsp_types::*;
77use tower_lsp::{Client, LanguageServer};
8899+use crate::context::{detect_context, CompletionContext as MlfCompletionContext};
1010+use crate::namespace_completion;
911use crate::utils::*;
10121113pub struct MlfLanguageServer {
···1416 workspace: tokio::sync::RwLock<Option<Workspace>>,
1517}
16181717-struct DocumentState {
1818- text: String,
1919- lexicon: Option<Lexicon>,
2020- namespace: Option<String>,
1919+pub struct DocumentState {
2020+ pub text: String,
2121+ pub lexicon: Option<Lexicon>,
2222+ pub namespace: Option<String>,
2123}
22242325impl MlfLanguageServer {
···3032 }
31333234 async fn parse_document(&self, uri: &Url, text: &str) {
3535+ // Extract namespace from file path (always available, even on parse error)
3636+ let namespace = extract_namespace_from_uri(uri);
3737+3338 // Parse the document
3439 match mlf_lang::parser::parse_lexicon(text) {
3540 Ok(lexicon) => {
3636- // Extract namespace from file path
3737- let namespace = extract_namespace_from_uri(uri);
3838-3941 // Store parsed state
4042 self.documents.write().await.insert(
4143 uri.clone(),
···4648 },
4749 );
48504949- // Update workspace
5050- self.update_workspace(uri, lexicon, namespace).await;
5151+ // Update workspace and get validation diagnostics
5252+ let mut diagnostics = self.update_workspace(uri, lexicon, namespace, text).await;
51535252- // Clear diagnostics on success
5454+ // Add read-only warning for std library files
5555+ if is_std_library_file(uri) {
5656+ diagnostics.insert(0, Diagnostic {
5757+ range: Range {
5858+ start: Position { line: 0, character: 0 },
5959+ end: Position { line: 0, character: 0 },
6060+ },
6161+ severity: Some(DiagnosticSeverity::INFORMATION),
6262+ code: None,
6363+ code_description: None,
6464+ source: Some("mlf".to_string()),
6565+ message: "This is a standard library file and should not be edited. Changes may be overwritten.".to_string(),
6666+ related_information: None,
6767+ tags: None,
6868+ data: None,
6969+ });
7070+ }
7171+7272+ // Publish diagnostics (empty if no errors)
5373 self.client
5454- .publish_diagnostics(uri.clone(), vec![], None)
7474+ .publish_diagnostics(uri.clone(), diagnostics, None)
5575 .await;
5676 }
5777 Err(err) => {
5858- // Store failed state
7878+ // Even on parse error, try to extract partial information
7979+ // Parse individual top-level items and keep what works
8080+ let partial_lexicon = self.parse_partial_lexicon(text);
8181+8282+ // Store partial state (with namespace so completion still works)
5983 self.documents.write().await.insert(
6084 uri.clone(),
6185 DocumentState {
6286 text: text.to_string(),
6363- lexicon: None,
6464- namespace: None,
8787+ lexicon: partial_lexicon.clone(),
8888+ namespace: namespace.clone(),
6589 },
6690 );
67916868- // Convert parse error to LSP diagnostic
6969- let diagnostic = Diagnostic {
7070- range: Range {
7171- start: Position {
7272- line: 0,
7373- character: 0,
7474- },
7575- end: Position {
7676- line: 0,
7777- character: 0,
7878- },
7979- },
9292+ // Try to update workspace with partial lexicon if available
9393+ let mut diagnostics = if let Some(ref lex) = partial_lexicon {
9494+ self.update_workspace(uri, lex.clone(), namespace, text).await
9595+ } else {
9696+ vec![]
9797+ };
9898+9999+ // Add parse error diagnostic
100100+ let error_range = if let Some(span) = err.span() {
101101+ span_to_range(text, span)
102102+ } else {
103103+ Range {
104104+ start: Position { line: 0, character: 0 },
105105+ end: Position { line: 0, character: 0 },
106106+ }
107107+ };
108108+109109+ diagnostics.push(Diagnostic {
110110+ range: error_range,
80111 severity: Some(DiagnosticSeverity::ERROR),
81112 code: None,
82113 code_description: None,
83114 source: Some("mlf".to_string()),
8484- message: format!("{:?}", err),
115115+ message: err.message(),
85116 related_information: None,
86117 tags: None,
87118 data: None,
8888- };
119119+ });
8912090121 self.client
9191- .publish_diagnostics(uri.clone(), vec![diagnostic], None)
122122+ .publish_diagnostics(uri.clone(), diagnostics, None)
92123 .await;
93124 }
94125 }
95126 }
961279797- async fn update_workspace(&self, _uri: &Url, lexicon: Lexicon, namespace: Option<String>) {
128128+ /// Try to parse individual items from a document, skipping errors
129129+ /// This allows LSP features to work on the valid parts of a document
130130+ fn parse_partial_lexicon(&self, _text: &str) -> Option<Lexicon> {
131131+ // Split by top-level keywords and try to parse each section
132132+ // For now, just return None - a full implementation would parse
133133+ // line-by-line or use error recovery in the parser
134134+ // TODO: Implement proper error recovery
135135+ None
136136+ }
137137+138138+ async fn update_workspace(&self, _uri: &Url, lexicon: Lexicon, namespace: Option<String>, text: &str) -> Vec<Diagnostic> {
139139+ let mut diagnostics = vec![];
140140+98141 if let Some(ns) = namespace {
99142 let mut workspace_guard = self.workspace.write().await;
100143101101- // Initialize workspace if needed
144144+ // Initialize workspace if needed (with full std library)
102145 if workspace_guard.is_none() {
103146 match Workspace::with_std() {
104104- Ok(ws) => *workspace_guard = Some(ws),
147147+ Ok(ws) => {
148148+ // Populate document storage with std library files for navigation
149149+ self.load_std_documents().await;
150150+ *workspace_guard = Some(ws);
151151+ }
105152 Err(_) => {
106106- *workspace_guard = Some(Workspace::new());
153153+ // Fallback to prelude if std loading fails
154154+ match Workspace::with_prelude() {
155155+ Ok(ws) => {
156156+ self.load_prelude_document().await;
157157+ *workspace_guard = Some(ws);
158158+ }
159159+ Err(_) => *workspace_guard = Some(Workspace::new()),
160160+ }
107161 }
108162 }
109163 }
···111165 // Add or update module in workspace
112166 if let Some(workspace) = workspace_guard.as_mut() {
113167 let _ = workspace.add_module(ns.clone(), lexicon);
114114- let _ = workspace.resolve();
168168+169169+ // Resolve and collect validation errors
170170+ if let Err(errors) = workspace.resolve() {
171171+ use mlf_lang::error::ValidationError;
172172+173173+ for error in errors.errors {
174174+ // Only show diagnostics for the current file
175175+ if mlf_diagnostics::get_error_module_namespace_str(&error) == ns {
176176+ // Extract span and message from error variant
177177+ let (span, message, is_unused) = match &error {
178178+ ValidationError::DuplicateDefinition { name, second_span, .. } => {
179179+ (*second_span, format!("Duplicate definition: {}", name), false)
180180+ }
181181+ ValidationError::UndefinedReference { name, span, .. } => {
182182+ (*span, format!("Undefined reference: {}", name), false)
183183+ }
184184+ ValidationError::InvalidConstraint { message, span, .. } => {
185185+ (*span, format!("Invalid constraint: {}", message), false)
186186+ }
187187+ ValidationError::TypeMismatch { expected, found, span, .. } => {
188188+ (*span, format!("Type mismatch: expected {}, found {}", expected, found), false)
189189+ }
190190+ ValidationError::ConstraintTooPermissive { message, span, .. } => {
191191+ (*span, format!("Constraint too permissive: {}", message), false)
192192+ }
193193+ ValidationError::ReservedName { name, span, .. } => {
194194+ (*span, format!("Reserved name: {}", name), false)
195195+ }
196196+ ValidationError::AmbiguousMain { name, first_span, .. } => {
197197+ (*first_span, format!("Ambiguous main: {}", name), false)
198198+ }
199199+ ValidationError::MultipleMain { name, first_span, .. } => {
200200+ (*first_span, format!("Multiple @main annotations: {}", name), false)
201201+ }
202202+ ValidationError::ConflictNotAllowed { name, span, .. } => {
203203+ (*span, format!("Conflict not allowed: {}", name), false)
204204+ }
205205+ ValidationError::CircularImport { cycle, span, .. } => {
206206+ (*span, format!("Circular import: {}", cycle.join(" -> ")), false)
207207+ }
208208+ ValidationError::UnusedImport { name, span, .. } => {
209209+ (*span, format!("Unused import: {}", name), true)
210210+ }
211211+ };
212212+213213+ let range = span_to_range(text, span);
214214+215215+ diagnostics.push(Diagnostic {
216216+ range,
217217+ severity: Some(if is_unused { DiagnosticSeverity::HINT } else { DiagnosticSeverity::ERROR }),
218218+ code: None,
219219+ code_description: None,
220220+ source: Some("mlf".to_string()),
221221+ message,
222222+ related_information: None,
223223+ tags: if is_unused {
224224+ Some(vec![DiagnosticTag::UNNECESSARY])
225225+ } else {
226226+ None
227227+ },
228228+ data: None,
229229+ });
230230+ }
231231+ }
232232+ }
233233+ }
234234+ }
235235+236236+ diagnostics
237237+ }
238238+239239+ async fn load_std_documents(&self) {
240240+ self.client
241241+ .log_message(MessageType::INFO, "Loading std library documents...")
242242+ .await;
243243+244244+ // Use global ~/.mlf/lexicons/mlf/ directory (matches project structure)
245245+ let global_mlf_dir = dirs::home_dir()
246246+ .map(|h| h.join(".mlf"))
247247+ .unwrap_or_else(|| PathBuf::from(".mlf"));
248248+249249+ let std_dir = global_mlf_dir.join("lexicons").join("mlf");
250250+251251+ self.client
252252+ .log_message(MessageType::INFO, format!("Global MLF directory: {}", global_mlf_dir.display()))
253253+ .await;
254254+255255+ // Ensure std directory exists and has files
256256+ if !std_dir.join("prelude.mlf").exists() {
257257+ self.client
258258+ .log_message(MessageType::INFO, "Std library not found in ~/.mlf/lexicons/mlf/, extracting embedded files...")
259259+ .await;
260260+261261+ // Create directory
262262+ if let Err(e) = std::fs::create_dir_all(&std_dir) {
263263+ self.client
264264+ .log_message(MessageType::ERROR, format!("Failed to create ~/.mlf/lexicons/mlf/: {}", e))
265265+ .await;
266266+ return;
267267+ }
268268+269269+ // Extract embedded files
270270+ self.extract_embedded_std_to_directory(&std_dir).await;
271271+ } else {
272272+ self.client
273273+ .log_message(MessageType::INFO, "Using existing std library from ~/.mlf/lexicons/mlf/")
274274+ .await;
275275+ }
276276+277277+ // Load std files from ~/.mlf/lexicons/mlf/
278278+ self.load_std_from_directory(&std_dir).await;
279279+280280+ self.client
281281+ .log_message(MessageType::INFO, "Finished loading std library documents")
282282+ .await;
283283+ }
284284+285285+ /// Load fetched lexicons from project's .mlf cache directory
286286+ async fn load_project_lexicons(&self, workspace: &mut Workspace) -> std::result::Result<(), String> {
287287+ // Try to find project root by looking for mlf.toml
288288+ // We'll check a few common locations
289289+ let possible_roots = vec![
290290+ std::env::current_dir().ok(),
291291+ // Could add more heuristics here
292292+ ];
293293+294294+ for maybe_root in possible_roots {
295295+ if let Some(root) = maybe_root {
296296+ if let Some(project_root) = find_mlf_project_root(&root) {
297297+ let mlf_cache_dir = project_root.join(".mlf");
298298+ let lexicons_dir = mlf_cache_dir.join("lexicons").join("mlf");
299299+300300+ if lexicons_dir.exists() {
301301+ self.client
302302+ .log_message(MessageType::INFO, format!("Loading project lexicons from {}", lexicons_dir.display()))
303303+ .await;
304304+305305+ self.load_lexicons_from_directory(workspace, &lexicons_dir, &lexicons_dir).await?;
306306+307307+ self.client
308308+ .log_message(MessageType::INFO, "Finished loading project lexicons")
309309+ .await;
310310+311311+ return Ok(());
312312+ }
313313+ }
115314 }
116315 }
316316+317317+ Ok(())
318318+ }
319319+320320+ /// Recursively load .mlf files from a directory into the workspace
321321+ async fn load_lexicons_from_directory(
322322+ &self,
323323+ workspace: &mut Workspace,
324324+ dir: &PathBuf,
325325+ base_dir: &PathBuf,
326326+ ) -> std::result::Result<(), String> {
327327+ if !dir.exists() {
328328+ return Ok(());
329329+ }
330330+331331+ let entries = std::fs::read_dir(dir)
332332+ .map_err(|e| format!("Failed to read directory {}: {}", dir.display(), e))?;
333333+334334+ for entry in entries.flatten() {
335335+ let path = entry.path();
336336+337337+ if path.is_file() && path.extension().map_or(false, |e| e == "mlf") {
338338+ // Extract namespace from file path
339339+ if let Ok(rel_path) = path.strip_prefix(base_dir) {
340340+ if let Some(path_str) = rel_path.to_str() {
341341+ let namespace = path_str
342342+ .strip_suffix(".mlf")
343343+ .unwrap_or(path_str)
344344+ .replace('/', ".")
345345+ .replace('\\', ".");
346346+347347+ // Read and parse the file
348348+ if let Ok(contents) = std::fs::read_to_string(&path) {
349349+ if let Ok(lexicon) = mlf_lang::parser::parse_lexicon(&contents) {
350350+ // Add to workspace (skip if already exists to avoid overwriting std)
351351+ if let Err(e) = workspace.add_module(namespace.clone(), lexicon) {
352352+ self.client
353353+ .log_message(
354354+ MessageType::WARNING,
355355+ format!("Failed to add module {}: {:?}", namespace, e)
356356+ )
357357+ .await;
358358+ }
359359+360360+ // Also add to documents for navigation
361361+ if let Ok(uri) = Url::from_file_path(&path) {
362362+ self.documents.write().await.insert(
363363+ uri.clone(),
364364+ DocumentState {
365365+ text: contents.clone(),
366366+ lexicon: Some(mlf_lang::parser::parse_lexicon(&contents).unwrap()),
367367+ namespace: Some(namespace.clone()),
368368+ },
369369+ );
370370+371371+ self.client
372372+ .log_message(
373373+ MessageType::INFO,
374374+ format!("Loaded project lexicon: {} (namespace: {})", uri, namespace)
375375+ )
376376+ .await;
377377+ }
378378+ }
379379+ }
380380+ }
381381+ }
382382+ } else if path.is_dir() {
383383+ // Recurse into subdirectory
384384+ Box::pin(self.load_lexicons_from_directory(workspace, &path, base_dir)).await?;
385385+ }
386386+ }
387387+388388+ Ok(())
389389+ }
390390+391391+ async fn load_std_from_directory(&self, std_dir: &PathBuf) {
392392+ // Read real files from directory
393393+ fn load_dir_recursive<'a>(
394394+ server: &'a MlfLanguageServer,
395395+ dir: &'a PathBuf,
396396+ base_dir: &'a PathBuf,
397397+ ) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send + 'a>> {
398398+ Box::pin(async move {
399399+ if let Ok(entries) = std::fs::read_dir(dir) {
400400+ for entry in entries.flatten() {
401401+ let path = entry.path();
402402+ if path.is_file() && path.extension().map_or(false, |e| e == "mlf") {
403403+ if let Ok(contents) = std::fs::read_to_string(&path) {
404404+ if let Ok(lexicon) = mlf_lang::parser::parse_lexicon(&contents) {
405405+ if let Ok(uri) = Url::from_file_path(&path) {
406406+ // Extract relative path for namespace
407407+ if let Ok(rel_path) = path.strip_prefix(base_dir) {
408408+ if let Some(path_str) = rel_path.to_str() {
409409+ let namespace = path_str
410410+ .strip_suffix(".mlf")
411411+ .unwrap_or(path_str)
412412+ .replace('/', ".")
413413+ .replace('\\', ".");
414414+415415+ server.client
416416+ .log_message(
417417+ MessageType::INFO,
418418+ format!("Loaded std document: {} (namespace: {})", uri, namespace)
419419+ )
420420+ .await;
421421+422422+ server.documents.write().await.insert(
423423+ uri,
424424+ DocumentState {
425425+ text: contents,
426426+ lexicon: Some(lexicon),
427427+ namespace: Some(namespace),
428428+ },
429429+ );
430430+ }
431431+ }
432432+ }
433433+ }
434434+ }
435435+ } else if path.is_dir() {
436436+ load_dir_recursive(server, &path, base_dir).await;
437437+ }
438438+ }
439439+ }
440440+ })
441441+ }
442442+443443+ load_dir_recursive(self, std_dir, std_dir).await;
444444+ }
445445+446446+ async fn extract_embedded_std_to_directory(&self, std_dir: &PathBuf) {
447447+ // Extract embedded files to ~/.mlf/std/
448448+ fn extract_embedded_dir<'a>(
449449+ server: &'a MlfLanguageServer,
450450+ dir: &'static include_dir::Dir<'static>,
451451+ base_std_dir: &'a PathBuf,
452452+ ) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send + 'a>> {
453453+ Box::pin(async move {
454454+ for file in dir.files() {
455455+ if let Some(path_str) = file.path().to_str() {
456456+ if path_str.ends_with(".mlf") {
457457+ if let Some(contents_str) = file.contents_utf8() {
458458+ let file_path = base_std_dir.join(path_str);
459459+460460+ // Create parent directories
461461+ if let Some(parent) = file_path.parent() {
462462+ let _ = std::fs::create_dir_all(parent);
463463+ }
464464+465465+ // Write file
466466+ if let Err(e) = std::fs::write(&file_path, contents_str) {
467467+ server.client
468468+ .log_message(
469469+ MessageType::ERROR,
470470+ format!("Failed to write std file {}: {}", file_path.display(), e)
471471+ )
472472+ .await;
473473+ continue;
474474+ }
475475+476476+ server.client
477477+ .log_message(
478478+ MessageType::INFO,
479479+ format!("Extracted: {}", file_path.display())
480480+ )
481481+ .await;
482482+ }
483483+ }
484484+ }
485485+ }
486486+487487+ // Recursively process subdirectories
488488+ for subdir in dir.dirs() {
489489+ extract_embedded_dir(server, subdir, base_std_dir).await;
490490+ }
491491+ })
492492+ }
493493+494494+ extract_embedded_dir(self, &mlf_lang::STD_DIR, std_dir).await;
495495+ }
496496+497497+ async fn load_prelude_document(&self) {
498498+ // Prelude is already included in STD_DIR, so this is handled by load_std_documents
499499+ // This function kept for backward compatibility but does nothing
117500 }
118501119502 async fn find_definition_in_workspace(
···122505 current_namespace: &str,
123506 path: &Path,
124507 ) -> Option<(Url, mlf_lang::span::Span)> {
508508+ self.client
509509+ .log_message(
510510+ MessageType::INFO,
511511+ format!("find_definition_in_workspace: target_name={}, current_namespace={}, path={}",
512512+ target_name, current_namespace, path.to_string())
513513+ )
514514+ .await;
515515+125516 let workspace_guard = self.workspace.read().await;
126517 let workspace = workspace_guard.as_ref()?;
127518128519 // Resolve the reference to find which namespace it's in
129520 let target_namespace = workspace.resolve_reference_namespace(path, current_namespace)?;
130521522522+ self.client
523523+ .log_message(
524524+ MessageType::INFO,
525525+ format!("Resolved target namespace: {}", target_namespace)
526526+ )
527527+ .await;
528528+131529 // Find the document with that namespace
132530 let documents = self.documents.read().await;
531531+532532+ self.client
533533+ .log_message(
534534+ MessageType::INFO,
535535+ format!("Searching {} documents for namespace '{}'", documents.len(), target_namespace)
536536+ )
537537+ .await;
538538+133539 for (doc_uri, doc_state) in documents.iter() {
134540 if let Some(doc_ns) = &doc_state.namespace {
541541+ self.client
542542+ .log_message(
543543+ MessageType::INFO,
544544+ format!("Checking document {} with namespace '{}'", doc_uri, doc_ns)
545545+ )
546546+ .await;
547547+135548 if doc_ns == &target_namespace {
549549+ self.client
550550+ .log_message(
551551+ MessageType::INFO,
552552+ format!("Found matching namespace! Searching for item '{}'", target_name)
553553+ )
554554+ .await;
555555+136556 if let Some(lexicon) = &doc_state.lexicon {
137557 // Find the item in this lexicon
138558 for item in &lexicon.items {
139139- if get_item_name(item) == target_name {
559559+ let item_name = get_item_name(item);
560560+ if item_name == target_name {
140561 let def_span = match item {
141562 Item::Record(r) => r.name.span,
142563 Item::InlineType(i) => i.name.span,
···147568 Item::Subscription(s) => s.name.span,
148569 Item::Use(_) => continue,
149570 };
571571+572572+ self.client
573573+ .log_message(
574574+ MessageType::INFO,
575575+ format!("Found definition of '{}' in {}", target_name, doc_uri)
576576+ )
577577+ .await;
150578151579 return Some((doc_uri.clone(), def_span));
152580 }
···156584 }
157585 }
158586587587+ self.client
588588+ .log_message(
589589+ MessageType::INFO,
590590+ format!("Definition not found for '{}'", target_name)
591591+ )
592592+ .await;
593593+594594+ None
595595+ }
596596+597597+ /// Find the definition for a use statement
598598+ /// For "use a.b.c", navigate to the definition of c in namespace a.b
599599+ /// For "use a.b", navigate to the first definition in namespace a.b
600600+ async fn find_definition_in_workspace_for_use(
601601+ &self,
602602+ use_stmt: &Use,
603603+ _current_namespace: &str,
604604+ ) -> Option<(Url, mlf_lang::span::Span)> {
605605+ // For "use a.b.c" or "use a.b.c { ... }", we want to navigate to the namespace a.b
606606+ // and find the type c (or the first type if it's just "use a.b")
607607+608608+ let path = &use_stmt.path;
609609+610610+ // Check if this looks like "use namespace.typename" (old syntax)
611611+ // or "use namespace" (new syntax)
612612+ let (target_namespace, target_type) = if let UseImports::Items(items) = &use_stmt.imports {
613613+ if items.len() == 1 && path.segments.len() >= 2
614614+ && items[0].name.name == path.segments.last().unwrap().name {
615615+ // Old syntax: use a.b.c as Foo
616616+ // Navigate to type "c" in namespace "a.b"
617617+ let ns = path.segments[..path.segments.len() - 1]
618618+ .iter()
619619+ .map(|s| s.name.as_str())
620620+ .collect::<Vec<_>>()
621621+ .join(".");
622622+ let type_name = path.segments.last().unwrap().name.clone();
623623+ (ns, Some(type_name))
624624+ } else {
625625+ // New syntax: use a.b { c, d }
626626+ // Navigate to namespace a.b
627627+ (path.to_string(), None)
628628+ }
629629+ } else {
630630+ // use a.b; or use a.b.*;
631631+ (path.to_string(), None)
632632+ };
633633+634634+ self.find_definition_in_namespace(&target_namespace, target_type.as_deref().unwrap_or("")).await
635635+ }
636636+637637+ /// Find a definition in a specific namespace
638638+ async fn find_definition_in_namespace(
639639+ &self,
640640+ target_namespace: &str,
641641+ target_name: &str,
642642+ ) -> Option<(Url, mlf_lang::span::Span)> {
643643+ let documents = self.documents.read().await;
644644+645645+ self.client
646646+ .log_message(
647647+ MessageType::INFO,
648648+ format!("find_definition_in_namespace: namespace='{}', name='{}'", target_namespace, target_name)
649649+ )
650650+ .await;
651651+652652+ for (doc_uri, doc_state) in documents.iter() {
653653+ if let Some(doc_ns) = &doc_state.namespace {
654654+ if doc_ns == target_namespace {
655655+ if let Some(lexicon) = &doc_state.lexicon {
656656+ // If target_name is empty, return the first definition
657657+ if target_name.is_empty() {
658658+ for item in &lexicon.items {
659659+ let def_span = match item {
660660+ Item::Record(r) => Some(r.name.span),
661661+ Item::InlineType(i) => Some(i.name.span),
662662+ Item::DefType(d) => Some(d.name.span),
663663+ Item::Token(t) => Some(t.name.span),
664664+ Item::Query(q) => Some(q.name.span),
665665+ Item::Procedure(p) => Some(p.name.span),
666666+ Item::Subscription(s) => Some(s.name.span),
667667+ Item::Use(_) => None,
668668+ };
669669+670670+ if let Some(span) = def_span {
671671+ return Some((doc_uri.clone(), span));
672672+ }
673673+ }
674674+ } else {
675675+ // Find specific item by name
676676+ for item in &lexicon.items {
677677+ let item_name = get_item_name(item);
678678+ if item_name == target_name {
679679+ let def_span = match item {
680680+ Item::Record(r) => r.name.span,
681681+ Item::InlineType(i) => i.name.span,
682682+ Item::DefType(d) => d.name.span,
683683+ Item::Token(t) => t.name.span,
684684+ Item::Query(q) => q.name.span,
685685+ Item::Procedure(p) => p.name.span,
686686+ Item::Subscription(s) => s.name.span,
687687+ Item::Use(_) => continue,
688688+ };
689689+690690+ return Some((doc_uri.clone(), def_span));
691691+ }
692692+ }
693693+ }
694694+ }
695695+ }
696696+ }
697697+ }
698698+159699 None
160700 }
161701}
···201741 Some(components.join("."))
202742}
203743744744+/// Check if a URI points to a std library file that shouldn't be edited
745745+fn is_std_library_file(uri: &Url) -> bool {
746746+ if let Ok(path) = uri.to_file_path() {
747747+ // Check if the path contains ~/.mlf/lexicons/mlf/
748748+ if let Some(home_dir) = dirs::home_dir() {
749749+ let std_dir = home_dir.join(".mlf").join("lexicons").join("mlf");
750750+ if let Ok(canonical_path) = path.canonicalize() {
751751+ if let Ok(canonical_std_dir) = std_dir.canonicalize() {
752752+ return canonical_path.starts_with(canonical_std_dir);
753753+ }
754754+ }
755755+ }
756756+ }
757757+ false
758758+}
759759+760760+/// Find the project root by looking for mlf.toml
761761+fn find_mlf_project_root(start_path: &std::path::Path) -> Option<PathBuf> {
762762+ let mut current = start_path;
763763+764764+ loop {
765765+ if current.join("mlf.toml").exists() {
766766+ return Some(current.to_path_buf());
767767+ }
768768+769769+ current = current.parent()?;
770770+771771+ // Stop at filesystem root
772772+ if current.parent().is_none() {
773773+ break;
774774+ }
775775+ }
776776+777777+ None
778778+}
779779+204780#[tower_lsp::async_trait]
205781impl LanguageServer for MlfLanguageServer {
206782 async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
···212788 hover_provider: Some(HoverProviderCapability::Simple(true)),
213789 completion_provider: Some(CompletionOptions {
214790 resolve_provider: Some(false),
215215- trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
791791+ trigger_characters: Some(vec![
792792+ ".".to_string(),
793793+ ":".to_string(),
794794+ " ".to_string(),
795795+ ]),
216796 ..Default::default()
217797 }),
218798 definition_provider: Some(OneOf::Left(true)),
···232812 self.client
233813 .log_message(MessageType::INFO, "MLF Language Server initialized")
234814 .await;
815815+816816+ // Initialize workspace with std library immediately on startup
817817+ // This ensures completion works even if did_open hasn't been called yet (e.g., after lsp-restart)
818818+ let mut workspace_guard = self.workspace.write().await;
819819+ if workspace_guard.is_none() {
820820+ tracing::info!("Initializing workspace with std library on startup");
821821+ match Workspace::with_std() {
822822+ Ok(mut ws) => {
823823+ // Populate document storage with std library files for navigation
824824+ self.load_std_documents().await;
825825+826826+ // Load project lexicons from .mlf cache
827827+ if let Err(e) = self.load_project_lexicons(&mut ws).await {
828828+ tracing::warn!("Failed to load project lexicons: {}", e);
829829+ self.client
830830+ .log_message(MessageType::WARNING, format!("Failed to load project lexicons: {}", e))
831831+ .await;
832832+ }
833833+834834+ *workspace_guard = Some(ws);
835835+ tracing::info!("Workspace initialized successfully");
836836+ }
837837+ Err(e) => {
838838+ tracing::error!("Failed to initialize workspace: {:?}", e);
839839+ // Fallback to prelude if std loading fails
840840+ match Workspace::with_prelude() {
841841+ Ok(ws) => {
842842+ self.load_prelude_document().await;
843843+ *workspace_guard = Some(ws);
844844+ tracing::info!("Workspace initialized with prelude only");
845845+ }
846846+ Err(_) => {
847847+ *workspace_guard = Some(Workspace::new());
848848+ tracing::warn!("Workspace initialized empty (no std or prelude)");
849849+ }
850850+ }
851851+ }
852852+ }
853853+ }
235854 }
236855237856 async fn shutdown(&self) -> Result<()> {
···3991018 }
40010194011020 async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
10211021+ tracing::debug!("Completion request received");
10221022+4021023 let uri = params.text_document_position.text_document.uri;
4031024 let position = params.text_document_position.position;
40410254051026 let documents = self.documents.read().await;
10271027+ tracing::debug!("Got documents lock");
10281028+4061029 if let Some(doc_state) = documents.get(&uri) {
4071030 let mut completions = vec![];
4081031409409- // Always provide keywords
410410- let keywords = vec![
411411- ("record", CompletionItemKind::KEYWORD, "Define a record type"),
412412- ("inline type", CompletionItemKind::KEYWORD, "Define an inline type"),
413413- ("def type", CompletionItemKind::KEYWORD, "Define a def type"),
414414- ("token", CompletionItemKind::KEYWORD, "Define a token"),
415415- ("query", CompletionItemKind::KEYWORD, "Define a query"),
416416- ("procedure", CompletionItemKind::KEYWORD, "Define a procedure"),
417417- ("subscription", CompletionItemKind::KEYWORD, "Define a subscription"),
418418- ("use", CompletionItemKind::KEYWORD, "Import types from another module"),
419419- ("constrained", CompletionItemKind::KEYWORD, "Add constraints to a type"),
420420- ];
10321032+ // Detect context
10331033+ let text_before_cursor = if let Some(offset) = position_to_offset(&doc_state.text, position) {
10341034+ &doc_state.text[..offset]
10351035+ } else {
10361036+ ""
10371037+ };
4211038422422- for (label, kind, detail) in keywords {
423423- completions.push(CompletionItem {
424424- label: label.to_string(),
425425- kind: Some(kind),
426426- detail: Some(detail.to_string()),
427427- ..Default::default()
428428- });
429429- }
10391039+ let context = detect_context(text_before_cursor);
10401040+ tracing::debug!("Context detected: {:?}", context);
4301041431431- // Primitive types
432432- let primitives = vec![
433433- "null", "boolean", "integer", "string", "bytes", "blob",
434434- ];
10421042+ match context {
10431043+ MlfCompletionContext::UseStatement => {
10441044+ tracing::debug!("UseStatement completion");
4351045436436- for prim in primitives {
437437- completions.push(CompletionItem {
438438- label: prim.to_string(),
439439- kind: Some(CompletionItemKind::TYPE_PARAMETER),
440440- detail: Some("Primitive type".to_string()),
441441- ..Default::default()
442442- });
443443- }
10461046+ // Extract the partial path already typed (e.g., "use com.atproto." -> "com.atproto")
10471047+ let last_line = text_before_cursor.lines().last().unwrap_or("");
10481048+ let after_use = last_line
10491049+ .trim_start()
10501050+ .strip_prefix("use ")
10511051+ .unwrap_or("")
10521052+ .trim_end();
10531053+10541054+ // Track if there's a trailing dot
10551055+ let has_trailing_dot = after_use.ends_with('.');
10561056+ let partial_path = after_use.trim_end_matches('.'); // Remove trailing dot for matching
10571057+10581058+ tracing::debug!("Partial path: '{}', has_trailing_dot: {}", partial_path, has_trailing_dot);
4441059445445- // If we have a parsed lexicon, provide type completions from current file
446446- if let Some(lexicon) = &doc_state.lexicon {
447447- for item in &lexicon.items {
448448- let (name, kind, detail) = match item {
449449- Item::Record(r) => (r.name.name.as_str(), CompletionItemKind::CLASS, "record"),
450450- Item::InlineType(i) => (i.name.name.as_str(), CompletionItemKind::TYPE_PARAMETER, "inline type"),
451451- Item::DefType(d) => (d.name.name.as_str(), CompletionItemKind::TYPE_PARAMETER, "def type"),
452452- Item::Token(t) => (t.name.name.as_str(), CompletionItemKind::ENUM, "token"),
453453- Item::Query(q) => (q.name.name.as_str(), CompletionItemKind::FUNCTION, "query"),
454454- Item::Procedure(p) => (p.name.name.as_str(), CompletionItemKind::FUNCTION, "procedure"),
455455- Item::Subscription(s) => (s.name.name.as_str(), CompletionItemKind::EVENT, "subscription"),
456456- Item::Use(_) => continue,
457457- };
10601060+ // Calculate the range to replace
10611061+ let use_start_offset = text_before_cursor.rfind("use ").map(|i| i + 4).unwrap_or(0);
10621062+ let use_start_pos = offset_to_position(&doc_state.text, use_start_offset);
4581063459459- completions.push(CompletionItem {
460460- label: name.to_string(),
461461- kind: Some(kind),
462462- detail: Some(detail.to_string()),
463463- ..Default::default()
464464- });
10641064+ // Use shared namespace completion logic
10651065+ let workspace_guard = self.workspace.read().await;
10661066+ if let Some(workspace) = workspace_guard.as_ref() {
10671067+ completions.extend(namespace_completion::complete_namespace_path(
10681068+ workspace,
10691069+ &documents,
10701070+ doc_state.namespace.as_ref(),
10711071+ partial_path,
10721072+ has_trailing_dot,
10731073+ use_start_pos,
10741074+ position,
10751075+ &doc_state.text,
10761076+ true, // Suggest { }, *, and types after trailing dot in use statements
10771077+ ));
10781078+ }
4651079 }
466466- }
4671080468468- // Check if we're at a field position to provide constraint completions
469469- if let Some(offset) = position_to_offset(&doc_state.text, position) {
470470- // Check if "constrained {" appears before cursor
471471- if doc_state.text[..offset].ends_with("constrained {")
472472- || doc_state.text[..offset].contains("constrained {") {
10811081+ MlfCompletionContext::ConstraintBlock => {
10821082+ // Only suggest constraint names
4731083 let constraint_items = vec![
4741084 ("maxLength", "Maximum string/array length"),
4751085 ("minLength", "Minimum string/array length"),
···4961106 });
4971107 }
4981108 }
11091109+11101110+ MlfCompletionContext::TypePosition => {
11111111+ // Extract text after the last ':' to detect if user is typing a namespace path
11121112+ let last_line = text_before_cursor.lines().last().unwrap_or("");
11131113+ let after_colon = last_line.rfind(':')
11141114+ .map(|idx| &last_line[idx + 1..])
11151115+ .unwrap_or("");
11161116+11171117+ // Trim only for detection purposes, but preserve space in offset calculation
11181118+ let after_colon_trimmed = after_colon.trim_start();
11191119+11201120+ // Check if user is typing a path (contains a dot)
11211121+ let is_typing_path = after_colon_trimmed.contains('.');
11221122+11231123+ tracing::debug!("Type position: after_colon_trimmed='{}', is_typing_path={}", after_colon_trimmed, is_typing_path);
11241124+11251125+ if is_typing_path {
11261126+ // User is typing a namespace path like "com.atproto."
11271127+ // Only suggest namespace path completions
11281128+ let has_trailing_dot = after_colon_trimmed.ends_with('.');
11291129+ let partial_path = after_colon_trimmed.trim_end_matches('.');
11301130+11311131+ tracing::debug!("Namespace path mode: partial_path='{}', has_trailing_dot={}", partial_path, has_trailing_dot);
11321132+11331133+ // Calculate the range to replace (from after ':' and any spaces to cursor)
11341134+ let colon_offset = text_before_cursor.rfind(':').map(|i| i + 1).unwrap_or(0);
11351135+ // Skip leading spaces to get to where the actual path starts
11361136+ let space_count = after_colon.len() - after_colon_trimmed.len();
11371137+ let replace_start_pos = offset_to_position(&doc_state.text, colon_offset + space_count);
11381138+11391139+ let workspace_guard = self.workspace.read().await;
11401140+ if let Some(workspace) = workspace_guard.as_ref() {
11411141+ completions.extend(namespace_completion::complete_namespace_path(
11421142+ workspace,
11431143+ &documents,
11441144+ doc_state.namespace.as_ref(),
11451145+ partial_path,
11461146+ has_trailing_dot,
11471147+ replace_start_pos,
11481148+ position,
11491149+ &doc_state.text,
11501150+ false, // Don't suggest { } or * in type positions
11511151+ ));
11521152+ }
11531153+ } else {
11541154+ // Not typing a path - suggest local types, primitives, imports, prelude
11551155+ tracing::debug!("Local type mode");
11561156+11571157+ // Primitive types
11581158+ let primitives = vec![
11591159+ "null", "boolean", "integer", "string", "bytes", "blob",
11601160+ ];
11611161+11621162+ for prim in primitives {
11631163+ completions.push(CompletionItem {
11641164+ label: prim.to_string(),
11651165+ kind: Some(CompletionItemKind::TYPE_PARAMETER),
11661166+ detail: Some("Primitive type".to_string()),
11671167+ ..Default::default()
11681168+ });
11691169+ }
11701170+11711171+ // Add defined types (records, inline types, def types, tokens only)
11721172+ if let Some(lexicon) = &doc_state.lexicon {
11731173+ for item in &lexicon.items {
11741174+ match item {
11751175+ Item::Record(r) => {
11761176+ completions.push(CompletionItem {
11771177+ label: r.name.name.clone(),
11781178+ kind: Some(CompletionItemKind::CLASS),
11791179+ detail: Some("record".to_string()),
11801180+ ..Default::default()
11811181+ });
11821182+ }
11831183+ Item::InlineType(i) => {
11841184+ completions.push(CompletionItem {
11851185+ label: i.name.name.clone(),
11861186+ kind: Some(CompletionItemKind::TYPE_PARAMETER),
11871187+ detail: Some("inline type".to_string()),
11881188+ ..Default::default()
11891189+ });
11901190+ }
11911191+ Item::DefType(d) => {
11921192+ completions.push(CompletionItem {
11931193+ label: d.name.name.clone(),
11941194+ kind: Some(CompletionItemKind::TYPE_PARAMETER),
11951195+ detail: Some("def type".to_string()),
11961196+ ..Default::default()
11971197+ });
11981198+ }
11991199+ Item::Token(t) => {
12001200+ completions.push(CompletionItem {
12011201+ label: t.name.name.clone(),
12021202+ kind: Some(CompletionItemKind::ENUM),
12031203+ detail: Some("token".to_string()),
12041204+ ..Default::default()
12051205+ });
12061206+ }
12071207+ // Don't suggest queries, procedures, subscriptions as types
12081208+ _ => {}
12091209+ }
12101210+ }
12111211+ }
12121212+12131213+ // Add imported types
12141214+ if let Some(current_namespace) = &doc_state.namespace {
12151215+ let workspace_guard = self.workspace.read().await;
12161216+ if let Some(workspace) = workspace_guard.as_ref() {
12171217+ let imports = workspace.get_imports(current_namespace);
12181218+ tracing::debug!("Found {} imports for namespace '{}'", imports.len(), current_namespace);
12191219+12201220+ for (local_name, original_path) in imports {
12211221+ // Format the original path for display
12221222+ let path_str = original_path.join(".");
12231223+ tracing::debug!(" - {} from {}", local_name, path_str);
12241224+ completions.push(CompletionItem {
12251225+ label: local_name.clone(),
12261226+ kind: Some(CompletionItemKind::TYPE_PARAMETER),
12271227+ detail: Some(format!("imported from {}", path_str)),
12281228+ ..Default::default()
12291229+ });
12301230+ }
12311231+12321232+ // Add prelude types
12331233+ let prelude_types = workspace.get_namespace_types("prelude");
12341234+ tracing::debug!("Found {} prelude types", prelude_types.len());
12351235+ for type_name in prelude_types {
12361236+ completions.push(CompletionItem {
12371237+ label: type_name.clone(),
12381238+ kind: Some(CompletionItemKind::TYPE_PARAMETER),
12391239+ detail: Some("from prelude".to_string()),
12401240+ ..Default::default()
12411241+ });
12421242+ }
12431243+12441244+ // Add NSIDs (fully qualified type references)
12451245+ // This allows users to type full NSIDs like "com.atproto.repo.strongRef"
12461246+ let nsids = workspace.get_all_nsids();
12471247+ tracing::debug!("Found {} NSIDs", nsids.len());
12481248+ for (nsid, namespace, type_name) in nsids {
12491249+ completions.push(CompletionItem {
12501250+ label: nsid.clone(),
12511251+ kind: Some(CompletionItemKind::REFERENCE),
12521252+ detail: Some(format!("{} from {}", type_name, namespace)),
12531253+ filter_text: Some(nsid.clone()),
12541254+ sort_text: Some(format!("z{}", nsid)), // Sort NSIDs after local types
12551255+ ..Default::default()
12561256+ });
12571257+ }
12581258+ }
12591259+ }
12601260+ }
12611261+12621262+ tracing::debug!("Total completions: {}", completions.len());
12631263+ }
12641264+12651265+ MlfCompletionContext::TopLevel => {
12661266+ // Suggest keywords for top-level declarations
12671267+ let keywords = vec![
12681268+ ("record", CompletionItemKind::KEYWORD, "Define a record type"),
12691269+ ("inline type", CompletionItemKind::KEYWORD, "Define an inline type"),
12701270+ ("def type", CompletionItemKind::KEYWORD, "Define a def type"),
12711271+ ("token", CompletionItemKind::KEYWORD, "Define a token"),
12721272+ ("query", CompletionItemKind::KEYWORD, "Define a query"),
12731273+ ("procedure", CompletionItemKind::KEYWORD, "Define a procedure"),
12741274+ ("subscription", CompletionItemKind::KEYWORD, "Define a subscription"),
12751275+ ("use", CompletionItemKind::KEYWORD, "Import types from another module"),
12761276+ ];
12771277+12781278+ for (label, kind, detail) in keywords {
12791279+ completions.push(CompletionItem {
12801280+ label: label.to_string(),
12811281+ kind: Some(kind),
12821282+ detail: Some(detail.to_string()),
12831283+ ..Default::default()
12841284+ });
12851285+ }
12861286+ }
12871287+12881288+ MlfCompletionContext::Unknown => {
12891289+ // Unknown context - don't suggest anything to avoid incorrect completions
12901290+ // For example, when user is typing a field name but hasn't typed ':' yet
12911291+ tracing::debug!("Unknown context - no completions");
12921292+ }
4991293 }
50012945011295 return Ok(Some(CompletionResponse::Array(completions)));
···53413285351329 if let Some(lexicon) = lexicon {
5361330 if let Some(offset) = position_to_offset(&text, position) {
13311331+ // Check if cursor is on a use statement
13321332+ for item in &lexicon.items {
13331333+ if let Item::Use(use_stmt) = item {
13341334+ // Check if the cursor is within the use statement's path
13351335+ if use_stmt.path.span.start <= offset && offset <= use_stmt.path.span.end {
13361336+ self.client
13371337+ .log_message(
13381338+ MessageType::INFO,
13391339+ format!("Found use statement at cursor: {}", use_stmt.path.to_string())
13401340+ )
13411341+ .await;
13421342+13431343+ // Determine which type is being referenced
13441344+ // For "use a.b.c", we navigate to the definition of c in namespace a.b
13451345+ // For "use a.b { c }", we would navigate to c in namespace a.b
13461346+13471347+ // Handle go-to-definition for use statements
13481348+ if let Some(ref current_ns) = current_namespace {
13491349+ if let Some((def_uri, def_span)) =
13501350+ self.find_definition_in_workspace_for_use(use_stmt, current_ns).await {
13511351+13521352+ let documents = self.documents.read().await;
13531353+ if let Some(target_doc) = documents.get(&def_uri) {
13541354+ let range = span_to_range(&target_doc.text, def_span);
13551355+13561356+ self.client
13571357+ .log_message(
13581358+ MessageType::INFO,
13591359+ format!("Returning use statement definition from: {}", def_uri)
13601360+ )
13611361+ .await;
13621362+13631363+ return Ok(Some(GotoDefinitionResponse::Scalar(
13641364+ Location {
13651365+ uri: def_uri,
13661366+ range,
13671367+ },
13681368+ )));
13691369+ }
13701370+ }
13711371+ }
13721372+ }
13731373+13741374+ // Check if cursor is on an imported item name
13751375+ if let UseImports::Items(items) = &use_stmt.imports {
13761376+ for import_item in items {
13771377+ if import_item.name.span.start <= offset && offset <= import_item.name.span.end {
13781378+ self.client
13791379+ .log_message(
13801380+ MessageType::INFO,
13811381+ format!("Found use item at cursor: {}", import_item.name.name)
13821382+ )
13831383+ .await;
13841384+13851385+ // Navigate to the definition of this specific item
13861386+ if current_namespace.is_some() {
13871387+ // Construct the full path to the item
13881388+ let target_namespace = use_stmt.path.to_string();
13891389+ let item_name = if import_item.name.name == "main" {
13901390+ // Special case: "main" resolves to namespace suffix
13911391+ target_namespace.split('.').last().unwrap_or(&import_item.name.name)
13921392+ } else {
13931393+ &import_item.name.name
13941394+ };
13951395+13961396+ if let Some((def_uri, def_span)) =
13971397+ self.find_definition_in_namespace(&target_namespace, item_name).await {
13981398+13991399+ let documents = self.documents.read().await;
14001400+ if let Some(target_doc) = documents.get(&def_uri) {
14011401+ let range = span_to_range(&target_doc.text, def_span);
14021402+14031403+ return Ok(Some(GotoDefinitionResponse::Scalar(
14041404+ Location {
14051405+ uri: def_uri,
14061406+ range,
14071407+ },
14081408+ )));
14091409+ }
14101410+ }
14111411+ }
14121412+ }
14131413+ }
14141414+ }
14151415+ }
14161416+ }
14171417+5371418 // Find type reference at this position
5381419 for item in &lexicon.items {
5391420 let type_to_check = match item {
···55314345541435 if let Some(ty) = type_to_check {
5551436 if let Some(Type::Reference { path, .. }) = find_type_at_offset(ty, offset) {
14371437+ self.client
14381438+ .log_message(
14391439+ MessageType::INFO,
14401440+ format!("Found reference at cursor: {}", path.to_string())
14411441+ )
14421442+ .await;
14431443+5561444 // Find the definition of this type
5571445 let target_name = if path.segments.len() == 1 {
5581446 &path.segments[0].name
···5601448 &path.segments.last().unwrap().name
5611449 };
562145014511451+ self.client
14521452+ .log_message(
14531453+ MessageType::INFO,
14541454+ format!("Target name: {}, path segments: {}", target_name, path.segments.len())
14551455+ )
14561456+ .await;
14571457+5631458 // First try to find in current file
5641459 for target_item in &lexicon.items {
5651460 if get_item_name(target_item) == target_name {
···57614715771472 let range = span_to_range(&text, def_span);
578147314741474+ self.client
14751475+ .log_message(
14761476+ MessageType::INFO,
14771477+ format!("Found definition in current file")
14781478+ )
14791479+ .await;
14801480+5791481 return Ok(Some(GotoDefinitionResponse::Scalar(
5801482 Location {
5811483 uri: uri.clone(),
···5851487 }
5861488 }
587148914901490+ self.client
14911491+ .log_message(
14921492+ MessageType::INFO,
14931493+ format!("Not found in current file, searching workspace...")
14941494+ )
14951495+ .await;
14961496+5881497 // Not found in current file - try workspace
5891498 if let Some(ref current_ns) = current_namespace {
14991499+ self.client
15001500+ .log_message(
15011501+ MessageType::INFO,
15021502+ format!("Current namespace: {}", current_ns)
15031503+ )
15041504+ .await;
15051505+5901506 if let Some((def_uri, def_span)) =
5911507 self.find_definition_in_workspace(target_name, current_ns, path).await {
5921508···5951511 if let Some(target_doc) = documents.get(&def_uri) {
5961512 let range = span_to_range(&target_doc.text, def_span);
597151315141514+ self.client
15151515+ .log_message(
15161516+ MessageType::INFO,
15171517+ format!("Returning definition from workspace: {}", def_uri)
15181518+ )
15191519+ .await;
15201520+5981521 return Ok(Some(GotoDefinitionResponse::Scalar(
5991522 Location {
6001523 uri: def_uri,
···6031526 )));
6041527 }
6051528 }
15291529+ } else {
15301530+ self.client
15311531+ .log_message(
15321532+ MessageType::WARNING,
15331533+ format!("No current namespace available")
15341534+ )
15351535+ .await;
6061536 }
6071537 }
6081538 }