A better Rust ATProto crate
1use crate::corpus::LexiconCorpus;
2use crate::error::{CodegenError, Result};
3use crate::lexicon::{LexArrayItem, LexUserType};
4use proc_macro2::TokenStream;
5use quote::quote;
6
7pub(crate) mod builder_gen;
8pub(crate) mod builder_heuristics;
9pub(crate) mod collect;
10pub(crate) mod lifetime;
11pub(crate) mod names;
12pub(crate) mod nsid_utils;
13pub(crate) mod output;
14pub(crate) mod prettify;
15pub(crate) mod schema_impl;
16pub(crate) mod structs;
17pub(crate) mod types;
18pub(crate) mod union_codegen;
19pub(crate) mod utils;
20pub(crate) mod xrpc;
21
22// Re-export types that external consumers need (binaries, test crates).
23pub use prettify::{CodegenMode, GeneratedCode, FileOutput};
24
25/// Code generator for lexicon types
26pub struct CodeGenerator<'c> {
27 corpus: &'c LexiconCorpus,
28 root_module: String,
29 mode: prettify::CodegenMode,
30 /// Track namespace dependencies (namespace -> set of namespaces it depends on)
31 namespace_deps:
32 std::cell::RefCell<std::collections::HashMap<String, std::collections::HashSet<String>>>,
33 /// Track which file paths contain subscription endpoints
34 subscription_files: std::cell::RefCell<std::collections::HashSet<std::path::PathBuf>>,
35 /// Track which NSIDs have already generated their shared lexicon_doc function
36 generated_shared_docs: std::cell::RefCell<std::collections::HashSet<String>>,
37}
38
39impl<'c> CodeGenerator<'c> {
40 /// Create a new code generator
41 pub fn new(corpus: &'c LexiconCorpus, root_module: impl Into<String>) -> Self {
42 Self {
43 corpus,
44 root_module: root_module.into(),
45 mode: prettify::CodegenMode::Macro,
46 namespace_deps: std::cell::RefCell::new(std::collections::HashMap::new()),
47 subscription_files: std::cell::RefCell::new(std::collections::HashSet::new()),
48 generated_shared_docs: std::cell::RefCell::new(std::collections::HashSet::new()),
49 }
50 }
51
52 /// Create a new code generator with a specific codegen mode
53 pub fn with_mode(
54 corpus: &'c LexiconCorpus,
55 root_module: impl Into<String>,
56 mode: prettify::CodegenMode,
57 ) -> Self {
58 Self {
59 corpus,
60 root_module: root_module.into(),
61 mode,
62 namespace_deps: std::cell::RefCell::new(std::collections::HashMap::new()),
63 subscription_files: std::cell::RefCell::new(std::collections::HashSet::new()),
64 generated_shared_docs: std::cell::RefCell::new(std::collections::HashSet::new()),
65 }
66 }
67
68 /// Generate doc comment from optional description (wrapper for utils function)
69 fn generate_doc_comment(&self, desc: Option<&jacquard_common::CowStr>) -> TokenStream {
70 utils::generate_doc_comment(desc)
71 }
72
73 /// Create a ResolvedImports instance for this generator's mode with no collisions.
74 /// Used in tests and as a fallback when per-file ResolvedImports isn't available.
75 /// In Macro mode, this produces fully-qualified paths for all types.
76 /// In Pretty mode, this produces short names (but should normally use per-file ResolvedImports from Phase 2).
77 #[cfg(test)]
78 pub(crate) fn default_resolved_imports(&self) -> prettify::ResolvedImports {
79 prettify::ResolvedImports::resolve(
80 &prettify::ImportSet::default(),
81 &std::collections::HashSet::new(),
82 &std::collections::HashSet::new(),
83 self.mode,
84 &std::collections::BTreeMap::new(),
85 )
86 }
87
88 /// Track namespace dependency when a ref crosses namespace boundaries
89 pub(crate) fn track_ref_namespace_dep(&self, current_nsid: &str, ref_str: &str) {
90 use nsid_utils::NsidPath;
91
92 let current_path = NsidPath::parse(current_nsid);
93 let ref_path = nsid_utils::RefPath::parse(ref_str, None);
94 let ref_nsid_path = NsidPath::parse(ref_path.nsid());
95
96 let current_ns = current_path.namespace();
97 let ref_ns = ref_nsid_path.namespace();
98
99 // Only track if crossing namespace boundaries
100 if current_ns != ref_ns {
101 self.namespace_deps
102 .borrow_mut()
103 .entry(current_ns)
104 .or_default()
105 .insert(ref_ns);
106 }
107 }
108
109 /// Generate or reference the shared lexicon_doc function for this NSID.
110 /// Returns (optional shared function, trait impl tokens).
111 pub(crate) fn generate_schema_impl_with_shared(
112 &self,
113 type_name: &str,
114 nsid: &str,
115 def_name: &str,
116 has_lifetime: bool,
117 resolved: &prettify::ResolvedImports,
118 ) -> (Option<TokenStream>, TokenStream) {
119 let lex_doc = self.corpus.get(nsid).expect("nsid exists in corpus");
120
121 // Generate shared function name from NSID (use sanitize_name for proper handling)
122 let shared_fn_name = format!("lexicon_doc_{}", utils::sanitize_name(nsid));
123 let shared_fn_ident = syn::Ident::new(&shared_fn_name, proc_macro2::Span::call_site());
124
125 // Check if we need to generate the shared function
126 let mut generated = self.generated_shared_docs.borrow_mut();
127 let shared_fn = if !generated.contains(nsid) {
128 generated.insert(nsid.to_string());
129 // Codegen from JSON doesn't have union_fields (those are for Rust -> lexicon derive).
130 use crate::derive_impl::doc_to_tokens::{DocPaths, doc_to_tokens_with_paths};
131 let doc_paths = match self.mode {
132 prettify::CodegenMode::Pretty => DocPaths::short(),
133 prettify::CodegenMode::Macro => DocPaths::qualified(),
134 };
135 let scoped_imports = doc_paths.scoped_imports();
136 let doc_literal = doc_to_tokens_with_paths(
137 lex_doc,
138 &std::collections::BTreeMap::new(),
139 &doc_paths,
140 );
141 let lexicon_doc_path = resolved.external_type_tokens(&prettify::ExternalImport::LexiconDoc);
142 Some(quote! {
143 fn #shared_fn_ident() -> #lexicon_doc_path<'static> {
144 #scoped_imports
145 #doc_literal
146 }
147 })
148 } else {
149 None
150 };
151
152 // Generate lightweight trait impl that calls shared function
153 let type_ident = syn::Ident::new(type_name, proc_macro2::Span::call_site());
154 let (impl_generics, type_generics) = if has_lifetime {
155 (quote! { <'a> }, quote! { <'a> })
156 } else {
157 (quote! {}, quote! {})
158 };
159
160 // Extract validation checks for this specific def.
161 let validation_checks = schema_impl::extract_validation_checks(lex_doc, def_name);
162 let validation_code =
163 crate::derive_impl::doc_to_tokens::validations_to_tokens_resolved(
164 &validation_checks,
165 Some(resolved),
166 );
167
168 let constraint_error_type = resolved.external_type_tokens(&prettify::ExternalImport::ConstraintError);
169 let schema_path = resolved.external_type_tokens(&prettify::ExternalImport::LexiconSchema);
170 let lexicon_doc_path = resolved.external_type_tokens(&prettify::ExternalImport::LexiconDoc);
171
172 let trait_impl = quote! {
173 impl #impl_generics #schema_path for #type_ident #type_generics {
174 fn nsid() -> &'static str {
175 #nsid
176 }
177
178 fn def_name() -> &'static str {
179 #def_name
180 }
181
182 fn lexicon_doc() -> #lexicon_doc_path<'static> {
183 #shared_fn_ident()
184 }
185
186 fn validate(&self) -> Result<(), #constraint_error_type> {
187 #validation_code
188 }
189 }
190 };
191
192 (shared_fn, trait_impl)
193 }
194
195 /// Generate code for a lexicon def
196 pub fn generate_def(
197 &self,
198 nsid: &str,
199 def_name: &str,
200 def: &LexUserType<'static>,
201 resolved: &prettify::ResolvedImports,
202 ) -> Result<GeneratedCode> {
203 match def {
204 LexUserType::Record(record) => self.generate_record(nsid, def_name, record, resolved),
205 LexUserType::Object(obj) => self.generate_object(nsid, def_name, obj, resolved),
206 LexUserType::XrpcQuery(query) => self.generate_query(nsid, def_name, query, resolved),
207 LexUserType::XrpcProcedure(proc) => self.generate_procedure(nsid, def_name, proc, resolved),
208 LexUserType::Token(token) => {
209 // Token types are marker structs that can be used as union refs
210 let type_name = self.def_to_type_name(nsid, def_name);
211 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
212 let doc = self.generate_doc_comment(token.description.as_ref());
213
214 // Token name for Display impl (just the def name, not the full ref)
215 let token_name = def_name;
216
217 let derive_attr = resolved.derive_standard_with(quote! { Hash });
218 let tokens = quote! {
219 #doc
220 #derive_attr
221 pub struct #ident;
222
223 impl core::fmt::Display for #ident {
224 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
225 write!(f, #token_name)
226 }
227 }
228 };
229 Ok(GeneratedCode::type_only(tokens))
230 }
231 LexUserType::String(s) if s.known_values.is_some() => {
232 self.generate_known_values_enum(nsid, def_name, s, resolved)
233 }
234 LexUserType::String(s) => {
235 // Plain string type alias
236 let type_name = self.def_to_type_name(nsid, def_name);
237 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
238 let rust_type = self.string_to_rust_type(s, resolved);
239 let doc = self.generate_doc_comment(s.description.as_ref());
240 let tokens = quote! {
241 #doc
242 pub type #ident<'a> = #rust_type;
243 };
244 Ok(GeneratedCode::type_only(tokens))
245 }
246 LexUserType::Integer(i) if i.r#enum.is_some() => {
247 self.generate_integer_enum(nsid, def_name, i)
248 }
249 LexUserType::Array(array) => {
250 // Top-level array becomes type alias to Vec<ItemType>
251 let type_name = self.def_to_type_name(nsid, def_name);
252 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
253 let doc = self.generate_doc_comment(array.description.as_ref());
254 let needs_lifetime = self.array_item_needs_lifetime(&array.items);
255
256 // Check if items are a union - if so, generate the union enum first
257 if let LexArrayItem::Union(union) = &array.items {
258 let union_name = format!("{}Item", type_name);
259 let refs: Vec<_> = union.refs.iter().cloned().collect();
260 let union_generated = self.generate_union(nsid, &union_name, &refs, None, union.closed, resolved)?;
261
262 let union_ident = syn::Ident::new(&union_name, proc_macro2::Span::call_site());
263 let union_tokens = union_generated.into_tokens();
264 let type_alias = if needs_lifetime {
265 quote! {
266 #doc
267 pub type #ident<'a> = Vec<#union_ident<'a>>;
268 }
269 } else {
270 quote! {
271 #doc
272 pub type #ident = Vec<#union_ident>;
273 }
274 };
275
276 let type_defs = quote! {
277 #union_tokens
278
279 #type_alias
280 };
281 Ok(GeneratedCode::type_only(type_defs))
282 } else {
283 // Regular array item type
284 let item_type = self.array_item_to_rust_type(nsid, &array.items, resolved)?;
285 let tokens = if needs_lifetime {
286 quote! {
287 #doc
288 pub type #ident<'a> = Vec<#item_type>;
289 }
290 } else {
291 quote! {
292 #doc
293 pub type #ident = Vec<#item_type>;
294 }
295 };
296 Ok(GeneratedCode::type_only(tokens))
297 }
298 }
299 LexUserType::Boolean(_)
300 | LexUserType::Integer(_)
301 | LexUserType::Bytes(_)
302 | LexUserType::CidLink(_)
303 | LexUserType::Unknown(_) => {
304 // These are rarely top-level defs, but if they are, make type aliases
305 let type_name = self.def_to_type_name(nsid, def_name);
306 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
307 let (rust_type, needs_lifetime) = match def {
308 LexUserType::Boolean(_) => (quote! { bool }, false),
309 LexUserType::Integer(_) => (quote! { i64 }, false),
310 LexUserType::Bytes(_) => {
311 (resolved.external_type_tokens(&prettify::ExternalImport::Bytes), false)
312 }
313 LexUserType::CidLink(_) => {
314 (resolved.type_tokens(&prettify::CommonType::CidLink), true)
315 }
316 LexUserType::Unknown(_) => {
317 (resolved.type_tokens(&prettify::CommonType::Data), true)
318 }
319 _ => unreachable!(),
320 };
321 let tokens = if needs_lifetime {
322 quote! {
323 pub type #ident<'a> = #rust_type;
324 }
325 } else {
326 quote! {
327 pub type #ident = #rust_type;
328 }
329 };
330 Ok(GeneratedCode::type_only(tokens))
331 }
332 LexUserType::Blob(_) => Err(CodegenError::unsupported(
333 format!("top-level def type {:?}", def),
334 nsid,
335 None::<String>,
336 )),
337 LexUserType::XrpcSubscription(sub) => {
338 // Track this file as containing a subscription
339 let file_path = self.nsid_to_file_path(nsid);
340 self.subscription_files.borrow_mut().insert(file_path);
341 self.generate_subscription(nsid, def_name, sub, resolved)
342 }
343 LexUserType::Union(union) => {
344 // Top-level union generates an enum
345 let type_name = self.def_to_type_name(nsid, def_name);
346 let refs: Vec<_> = union.refs.iter().cloned().collect();
347 // Union already returns GeneratedCode, so just forward it
348 self.generate_union(
349 nsid,
350 &type_name,
351 &refs,
352 union.description.as_ref().map(|d| d.as_ref()),
353 union.closed,
354 resolved,
355 )
356 }
357 }
358 }
359}
360
361#[cfg(test)]
362mod tests {
363 use super::*;
364
365 #[test]
366 fn test_generate_record() {
367 let corpus =
368 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
369 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
370
371 let doc = corpus.get("app.bsky.feed.post").expect("get post");
372 let def = doc.defs.get("main").expect("get main def");
373
374 let resolved = codegen.default_resolved_imports();
375 let generated = codegen
376 .generate_def("app.bsky.feed.post", "main", def, &resolved)
377 .expect("generate");
378 let tokens = generated.into_tokens();
379
380 // Format and print for inspection
381 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
382 let formatted = prettyplease::unparse(&file);
383 println!("\n{}\n", formatted);
384
385 // Check basic structure
386 assert!(formatted.contains("struct Post"));
387 assert!(formatted.contains("pub text"));
388 assert!(formatted.contains("CowStr<'a>"));
389 }
390
391 #[test]
392 fn test_generate_union() {
393 let corpus =
394 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
395 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
396
397 // Create a union with embed types
398 let refs = vec![
399 "app.bsky.embed.images".into(),
400 "app.bsky.embed.video".into(),
401 "app.bsky.embed.external".into(),
402 ];
403
404 let resolved = codegen.default_resolved_imports();
405 let generated = codegen
406 .generate_union(
407 "app.bsky.feed.post",
408 "RecordEmbed",
409 &refs,
410 Some("Post embed union"),
411 None,
412 &resolved,
413 )
414 .expect("generate union");
415 let tokens = generated.into_tokens();
416
417 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
418 let formatted = prettyplease::unparse(&file);
419 println!("\n{}\n", formatted);
420
421 // Check structure
422 assert!(formatted.contains("enum RecordEmbed"));
423 assert!(formatted.contains("Images"));
424 assert!(formatted.contains("Video"));
425 assert!(formatted.contains("External"));
426 assert!(formatted.contains("#[serde(tag = \"$type\")]"));
427 assert!(formatted.contains("#[jacquard_derive::open_union]"));
428 }
429
430 #[test]
431 fn test_generate_query() {
432 let corpus =
433 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
434 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
435
436 let doc = corpus
437 .get("app.bsky.feed.getAuthorFeed")
438 .expect("get getAuthorFeed");
439 let def = doc.defs.get("main").expect("get main def");
440
441 let resolved = codegen.default_resolved_imports();
442 let generated = codegen
443 .generate_def("app.bsky.feed.getAuthorFeed", "main", def, &resolved)
444 .expect("generate");
445 let tokens = generated.into_tokens();
446
447 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
448 let formatted = prettyplease::unparse(&file);
449 println!("\n{}\n", formatted);
450
451 // Check structure
452 assert!(formatted.contains("struct GetAuthorFeed"));
453 assert!(formatted.contains("struct GetAuthorFeedOutput"));
454 assert!(formatted.contains("enum GetAuthorFeedError"));
455 assert!(formatted.contains("pub actor"));
456 assert!(formatted.contains("pub limit"));
457 assert!(formatted.contains("pub cursor"));
458 assert!(formatted.contains("pub feed"));
459 assert!(formatted.contains("BlockedActor"));
460 assert!(formatted.contains("BlockedByActor"));
461 }
462
463 #[test]
464 fn test_generate_known_values_enum() {
465 let corpus =
466 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
467 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
468
469 let doc = corpus
470 .get("com.atproto.label.defs")
471 .expect("get label defs");
472 let def = doc.defs.get("labelValue").expect("get labelValue def");
473 let resolved = codegen.default_resolved_imports();
474
475 let generated = codegen
476 .generate_def("com.atproto.label.defs", "labelValue", def, &resolved)
477 .expect("generate");
478 let tokens = generated.into_tokens();
479
480 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
481 let formatted = prettyplease::unparse(&file);
482 println!("\n{}\n", formatted);
483
484 // Check structure
485 assert!(formatted.contains("enum LabelValue"));
486 assert!(formatted.contains("Hide"));
487 assert!(formatted.contains("NoPromote"));
488 assert!(formatted.contains("Warn"));
489 assert!(formatted.contains("DmcaViolation"));
490 assert!(formatted.contains("Other(jacquard_common::CowStr"));
491 assert!(formatted.contains("impl<'a> From<&'a str>"));
492 assert!(formatted.contains("fn as_str(&self)"));
493 }
494
495 #[test]
496 fn test_nsid_to_file_path() {
497 let corpus =
498 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
499 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
500
501 // Regular paths
502 assert_eq!(
503 codegen.nsid_to_file_path("app.bsky.feed.post"),
504 std::path::PathBuf::from("app_bsky/feed/post.rs")
505 );
506
507 assert_eq!(
508 codegen.nsid_to_file_path("app.bsky.feed.getAuthorFeed"),
509 std::path::PathBuf::from("app_bsky/feed/get_author_feed.rs")
510 );
511
512 // Defs paths - should go in parent
513 assert_eq!(
514 codegen.nsid_to_file_path("com.atproto.label.defs"),
515 std::path::PathBuf::from("com_atproto/label.rs")
516 );
517 }
518
519 #[test]
520 fn test_write_to_disk() {
521 let corpus =
522 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
523 let codegen = CodeGenerator::new(&corpus, "test_generated");
524
525 let tmp_dir =
526 tempfile::tempdir().expect("should be able to create temp directory for output");
527 let output_dir = std::path::PathBuf::from(tmp_dir.path());
528
529 // Clean up any previous test output
530 let _ = std::fs::remove_dir_all(&output_dir);
531
532 // Generate and write
533 codegen.write_to_disk(&output_dir).expect("write to disk");
534
535 // Verify some files were created
536 assert!(output_dir.join("app_bsky/feed/post.rs").exists());
537 assert!(output_dir.join("app_bsky/feed/get_author_feed.rs").exists());
538 assert!(output_dir.join("com_atproto/label.rs").exists());
539
540 // Verify module files were created
541 assert!(output_dir.join("lib.rs").exists());
542 assert!(output_dir.join("app_bsky.rs").exists());
543
544 // Read and verify post.rs contains expected content
545 let post_content = std::fs::read_to_string(output_dir.join("app_bsky/feed/post.rs"))
546 .expect("read post.rs");
547 assert!(post_content.contains("pub struct Post"));
548 assert!(post_content.contains("jacquard_common"));
549 }
550
551 #[test]
552 fn test_generate_procedure() {
553 let corpus =
554 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
555 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
556
557 let doc = corpus
558 .get("com.atproto.repo.createRecord")
559 .expect("get createRecord");
560 let def = doc.defs.get("main").expect("get main def");
561 let resolved = codegen.default_resolved_imports();
562
563 let generated = codegen
564 .generate_def("com.atproto.repo.createRecord", "main", def, &resolved)
565 .expect("generate");
566 let tokens = generated.into_tokens();
567
568 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
569 let formatted = prettyplease::unparse(&file);
570 println!("\n{}\n", formatted);
571
572 // Check structure - procedures have input, output, and error types
573 assert!(formatted.contains("struct CreateRecord"));
574 assert!(formatted.contains("struct CreateRecordOutput"));
575 assert!(formatted.contains("enum CreateRecordError"));
576 // Check input fields
577 assert!(formatted.contains("pub repo"));
578 assert!(formatted.contains("pub collection"));
579 assert!(formatted.contains("pub record"));
580 // Check output fields
581 assert!(formatted.contains("pub uri"));
582 assert!(formatted.contains("pub cid"));
583 // Check error variants
584 assert!(formatted.contains("InvalidSwap"));
585 assert!(formatted.contains("InvalidRecord"));
586 }
587
588 #[test]
589 fn test_generate_subscription() {
590 let corpus =
591 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
592 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
593
594 let doc = corpus
595 .get("com.atproto.sync.subscribeRepos")
596 .expect("get subscribeRepos");
597 let def = doc.defs.get("main").expect("get main def");
598 let resolved = codegen.default_resolved_imports();
599
600 let generated = codegen
601 .generate_def("com.atproto.sync.subscribeRepos", "main", def, &resolved)
602 .expect("generate");
603 let tokens = generated.into_tokens();
604
605 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
606 let formatted = prettyplease::unparse(&file);
607 println!("\n{}\n", formatted);
608
609 // Check subscription structure
610 assert!(formatted.contains("struct SubscribeRepos"));
611 assert!(formatted.contains("enum SubscribeReposMessage"));
612 // Check message union variants
613 assert!(formatted.contains("Commit"));
614 assert!(formatted.contains("Identity"));
615 assert!(formatted.contains("Account"));
616 }
617
618 // #[test]
619 // fn test_generate_token_type() {
620 // let corpus =
621 // LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
622 // let codegen = CodeGenerator::new(&corpus, "jacquard_api");
623
624 // let doc = corpus.get("app.bsky.embed.images").expect("get images");
625 // let def = doc.defs.get("viewImage").expect("get viewImage def");
626
627 // let tokens = codegen
628 // .generate_def("app.bsky.embed.images", "viewImage", def, &resolved)
629 // .expect("generate");
630
631 // let file: syn::File = syn::parse2(tokens).expect("parse tokens");
632 // let formatted = prettyplease::unparse(&file);
633 // println!("\n{}\n", formatted);
634
635 // // Token types are unit structs
636 // assert!(formatted.contains("struct ViewImage"));
637 // // Should have Display implementation
638 // assert!(formatted.contains("impl std::fmt::Display"));
639 // }
640
641 #[test]
642 fn test_generate_array_types() {
643 let corpus =
644 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
645 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
646
647 let doc = corpus.get("test.array.types").expect("get array types");
648 let def = doc.defs.get("main").expect("get main def");
649 let resolved = codegen.default_resolved_imports();
650
651 let generated = codegen
652 .generate_def("test.array.types", "main", def, &resolved)
653 .expect("generate");
654
655 let tokens = generated.into_tokens();
656
657 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
658 let formatted = prettyplease::unparse(&file);
659 println!("\n{}\n", formatted);
660
661 // Check different array item types
662 assert!(formatted.contains("simple_strings"));
663 assert!(formatted.contains("Vec<"));
664 // Union array items should generate enum
665 assert!(formatted.contains("union_items"));
666 // Ref array items
667 assert!(formatted.contains("ref_items"));
668 // CID link arrays
669 assert!(formatted.contains("cid_links"));
670 }
671
672 #[test]
673 fn test_generate_binary_types() {
674 let corpus =
675 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
676 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
677
678 let doc = corpus.get("test.binary.types").expect("get binary types");
679 let def = doc.defs.get("main").expect("get main def");
680 let resolved = codegen.default_resolved_imports();
681
682 let generated = codegen
683 .generate_def("test.binary.types", "main", def, &resolved)
684 .expect("generate");
685
686 let tokens = generated.into_tokens();
687
688 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
689 let formatted = prettyplease::unparse(&file);
690 println!("\n{}\n", formatted);
691
692 // Check binary field types
693 assert!(formatted.contains("pub cid"));
694 assert!(formatted.contains("CidLink") || formatted.contains("types::cid"));
695 assert!(formatted.contains("pub data"));
696 assert!(formatted.contains("Bytes"));
697 assert!(formatted.contains("pub avatar"));
698 assert!(formatted.contains("BlobRef") || formatted.contains("types::blob"));
699 }
700
701 #[test]
702 fn test_generate_empty_object() {
703 let corpus =
704 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
705 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
706
707 let doc = corpus.get("test.empty.object").expect("get empty object");
708 let def = doc.defs.get("emptyDef").expect("get emptyDef");
709 let resolved = codegen.default_resolved_imports();
710
711 let generated = codegen
712 .generate_def("test.empty.object", "emptyDef", def, &resolved)
713 .expect("generate");
714
715 let tokens = generated.into_tokens();
716
717 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
718 let formatted = prettyplease::unparse(&file);
719 println!("\n{}\n", formatted);
720
721 // Empty objects should generate type alias to Data<'a>
722 assert!(formatted.contains("type EmptyDef") || formatted.contains("Data<'a>"));
723 }
724
725 #[test]
726 fn test_generate_multi_def_lexicon() {
727 let corpus =
728 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
729 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
730
731 let doc = corpus
732 .get("pub.leaflet.poll.definition")
733 .expect("get poll definition");
734
735 // Test main def
736 let main_def = doc.defs.get("main").expect("get main def");
737 let resolved = codegen.default_resolved_imports();
738 let main_generated = codegen
739 .generate_def("pub.leaflet.poll.definition", "main", main_def, &resolved)
740 .expect("generate main");
741 let main_tokens = main_generated.into_tokens();
742 let main_file: syn::File = syn::parse2(main_tokens).expect("parse main tokens");
743 let main_formatted = prettyplease::unparse(&main_file);
744 println!("\nMain:\n{}\n", main_formatted);
745 assert!(main_formatted.contains("struct Definition"));
746 assert!(main_formatted.contains("pub question"));
747 assert!(main_formatted.contains("pub options"));
748
749 // Test option fragment
750 let option_def = doc.defs.get("option").expect("get option def");
751 let option_generated = codegen
752 .generate_def("pub.leaflet.poll.definition", "option", option_def, &resolved)
753 .expect("generate option");
754 let option_tokens = option_generated.into_tokens();
755 let option_file: syn::File = syn::parse2(option_tokens).expect("parse option tokens");
756 let option_formatted = prettyplease::unparse(&option_file);
757 println!("\nOption:\n{}\n", option_formatted);
758 assert!(option_formatted.contains("struct DefinitionOption"));
759 assert!(option_formatted.contains("pub text"));
760
761 // Test vote fragment
762 let vote_def = doc.defs.get("vote").expect("get vote def");
763 let vote_generated = codegen
764 .generate_def("pub.leaflet.poll.definition", "vote", vote_def, &resolved)
765 .expect("generate vote");
766 let vote_tokens = vote_generated.into_tokens();
767 let vote_file: syn::File = syn::parse2(vote_tokens).expect("parse vote tokens");
768 let vote_formatted = prettyplease::unparse(&vote_file);
769 println!("\nVote:\n{}\n", vote_formatted);
770 assert!(
771 vote_formatted.contains("struct DefinitionVote")
772 || vote_formatted.contains("struct Vote")
773 );
774 assert!(vote_formatted.contains("pub poll_ref"));
775 assert!(vote_formatted.contains("pub option_index"));
776 }
777
778 #[test]
779 fn test_generate_with_constraints_and_defaults() {
780 let corpus =
781 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
782 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
783
784 let doc = corpus
785 .get("test.constraints.validation")
786 .expect("get constraints");
787 let def = doc.defs.get("main").expect("get main def");
788 let resolved = codegen.default_resolved_imports();
789
790 let generated = codegen
791 .generate_def("test.constraints.validation", "main", def, &resolved)
792 .expect("generate");
793
794 let tokens = generated.into_tokens();
795
796 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
797 let formatted = prettyplease::unparse(&file);
798 println!("\n{}\n", formatted);
799
800 // Check fields with constraints are generated
801 assert!(formatted.contains("pub username"));
802 assert!(formatted.contains("pub bio"));
803 assert!(formatted.contains("pub age"));
804 assert!(formatted.contains("pub enabled"));
805 assert!(formatted.contains("pub tags"));
806 assert!(formatted.contains("pub role"));
807
808 // Constraints should be in docs or validation metadata
809 // (exact format depends on codegen implementation)
810 }
811
812 #[test]
813 fn test_local_refs_in_definitions() {
814 let corpus =
815 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
816 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
817
818 let doc = corpus
819 .get("pub.leaflet.poll.definition")
820 .expect("get poll definition");
821 let def = doc.defs.get("main").expect("get main def");
822 let resolved = codegen.default_resolved_imports();
823
824 let generated = codegen
825 .generate_def("pub.leaflet.poll.definition", "main", def, &resolved)
826 .expect("generate");
827
828 let tokens = generated.into_tokens();
829
830 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
831 let formatted = prettyplease::unparse(&file);
832 println!("\n{}\n", formatted);
833
834 // Local ref #option should resolve to DefinitionOption type (fully qualified or local)
835 assert!(
836 formatted.contains("Vec<DefinitionOption")
837 || formatted
838 .contains("Vec<jacquard_api::pub_leaflet::poll::definition::DefinitionOption")
839 );
840 }
841
842 #[test]
843 fn test_nullable_optional_properties() {
844 let corpus =
845 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
846 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
847
848 let doc = corpus.get("test.binary.types").expect("get binary types");
849 let def = doc.defs.get("main").expect("get main def");
850 let resolved = codegen.default_resolved_imports();
851
852 let generated = codegen
853 .generate_def("test.binary.types", "main", def, &resolved)
854 .expect("generate");
855
856 let tokens = generated.into_tokens();
857
858 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
859 let formatted = prettyplease::unparse(&file);
860 println!("\n{}\n", formatted);
861
862 // Optional fields should use Option<T>
863 assert!(formatted.contains("optional_cid"));
864 assert!(formatted.contains("Option<"));
865 }
866}