+502
parakeet-lexgen/src/codegen.rs
+502
parakeet-lexgen/src/codegen.rs
···
1
+
use crate::types::*;
2
+
use std::collections::{BTreeMap, BTreeSet};
3
+
use std::fmt::Write as FmtWrite;
4
+
use std::io::{self, Write};
5
+
6
+
pub struct CodeGenerator {
7
+
lexicons: BTreeMap<String, Lexicon>,
8
+
output: Box<dyn Write>,
9
+
generated_types: BTreeSet<String>,
10
+
generated_enums: BTreeSet<String>,
11
+
pending_enums: Vec<(String, Vec<String>)>, // (enum_name, values)
12
+
imports: BTreeSet<String>,
13
+
}
14
+
15
+
impl CodeGenerator {
16
+
pub fn new(lexicons: BTreeMap<String, Lexicon>, output: Box<dyn Write>) -> Self {
17
+
let mut imports = BTreeSet::new();
18
+
// Always include these basic imports
19
+
imports.insert("serde::{Deserialize, Serialize}".to_string());
20
+
21
+
Self {
22
+
lexicons,
23
+
output,
24
+
generated_types: BTreeSet::new(),
25
+
generated_enums: BTreeSet::new(),
26
+
pending_enums: Vec::new(),
27
+
imports,
28
+
}
29
+
}
30
+
31
+
pub fn generate(&mut self) -> io::Result<()> {
32
+
// Write header
33
+
self.write_header()?;
34
+
35
+
// Group lexicons by namespace
36
+
let namespaces = self.group_by_namespace();
37
+
38
+
// Generate code for each namespace
39
+
for (ns, lexicon_ids) in namespaces {
40
+
self.generate_namespace(&ns, &lexicon_ids)?;
41
+
}
42
+
43
+
Ok(())
44
+
}
45
+
46
+
fn write_header(&mut self) -> io::Result<()> {
47
+
let header = r#"// THIS FILE IS AUTO-GENERATED. DO NOT EDIT.
48
+
// Generated from AT Protocol lexicons
49
+
50
+
use chrono::{DateTime, Utc};
51
+
use serde::{Deserialize, Serialize};
52
+
use serde_json::Value;
53
+
use std::collections::BTreeMap;
54
+
55
+
"#;
56
+
self.output.write_all(header.as_bytes())
57
+
}
58
+
59
+
fn group_by_namespace(&self) -> BTreeMap<String, Vec<String>> {
60
+
let mut namespaces: BTreeMap<String, Vec<String>> = BTreeMap::new();
61
+
62
+
for id in self.lexicons.keys() {
63
+
let parts: Vec<&str> = id.split('.').collect();
64
+
if parts.len() >= 3 {
65
+
// Group by first 3 parts (e.g., app.bsky.actor)
66
+
let namespace = parts[0..3].join(".");
67
+
namespaces.entry(namespace).or_default().push(id.clone());
68
+
}
69
+
}
70
+
71
+
namespaces
72
+
}
73
+
74
+
fn generate_namespace(&mut self, namespace: &str, lexicon_ids: &[String]) -> io::Result<()> {
75
+
// Convert namespace to module name (e.g., app.bsky.actor -> actor)
76
+
let module_name = namespace.split('.').last().unwrap_or(namespace);
77
+
writeln!(self.output, "pub mod {} {{", module_name)?;
78
+
writeln!(self.output, " use super::*;")?;
79
+
writeln!(self.output)?;
80
+
81
+
// Clear pending enums for this namespace
82
+
self.pending_enums.clear();
83
+
84
+
for id in lexicon_ids {
85
+
// Clone to avoid borrow issues
86
+
let lexicon = self.lexicons.get(id).cloned();
87
+
if let Some(lexicon) = lexicon {
88
+
self.generate_lexicon(&lexicon)?;
89
+
}
90
+
}
91
+
92
+
// Generate any enums that were collected
93
+
let enums_to_generate = self.pending_enums.clone();
94
+
for (enum_name, values) in enums_to_generate {
95
+
self.generate_enum(&enum_name, &values)?;
96
+
}
97
+
98
+
writeln!(self.output, "}}")?;
99
+
writeln!(self.output)?;
100
+
Ok(())
101
+
}
102
+
103
+
fn generate_enum(&mut self, name: &str, values: &[String]) -> io::Result<()> {
104
+
let mut code = String::new();
105
+
106
+
// Determine if this is a simple enum (all lowercase) or complex
107
+
let is_simple = values.iter().all(|v| v.chars().all(|c| c.is_lowercase() || c == '-'));
108
+
109
+
// Add derives
110
+
if is_simple {
111
+
writeln!(&mut code, " #[derive(Copy, Clone, Debug, Serialize, Deserialize)]").unwrap();
112
+
writeln!(&mut code, " #[serde(rename_all = \"lowercase\")]").unwrap();
113
+
} else {
114
+
writeln!(&mut code, " #[derive(Clone, Debug, Serialize, Deserialize)]").unwrap();
115
+
}
116
+
writeln!(&mut code, " pub enum {} {{", name).unwrap();
117
+
118
+
// Generate variants
119
+
for value in values {
120
+
if value.contains('.') || value.contains('#') {
121
+
// Complex value, use explicit rename
122
+
let variant_name = to_pascal_case(value.split('#').last().unwrap_or(value));
123
+
writeln!(&mut code, " #[serde(rename = \"{}\")]", value).unwrap();
124
+
writeln!(&mut code, " {},", variant_name).unwrap();
125
+
} else {
126
+
// Simple value
127
+
let variant_name = to_pascal_case(value);
128
+
writeln!(&mut code, " {},", variant_name).unwrap();
129
+
}
130
+
}
131
+
132
+
writeln!(&mut code, " }}").unwrap();
133
+
writeln!(&mut code).unwrap();
134
+
135
+
self.output.write_all(code.as_bytes())
136
+
}
137
+
138
+
fn generate_lexicon(&mut self, lexicon: &Lexicon) -> io::Result<()> {
139
+
// Generate types for each definition in the lexicon
140
+
for (name, def) in &lexicon.defs {
141
+
// Better type naming: use the definition name directly for non-main items
142
+
let type_name = if name == "main" {
143
+
to_type_name(&lexicon.id)
144
+
} else {
145
+
to_pascal_case(name)
146
+
};
147
+
148
+
if self.generated_types.contains(&type_name) {
149
+
continue;
150
+
}
151
+
152
+
match def {
153
+
LexiconDef::Object(obj) => {
154
+
self.generate_object(&type_name, obj)?;
155
+
self.generated_types.insert(type_name);
156
+
}
157
+
LexiconDef::Record(record) => {
158
+
self.generate_record_object(&type_name, &record.record)?;
159
+
self.generated_types.insert(type_name);
160
+
}
161
+
LexiconDef::Query(query) => {
162
+
self.generate_query(&lexicon.id, query)?;
163
+
}
164
+
LexiconDef::Procedure(procedure) => {
165
+
self.generate_procedure(&lexicon.id, procedure)?;
166
+
}
167
+
LexiconDef::String(string_def) => {
168
+
self.generate_string_type(&type_name, string_def)?;
169
+
self.generated_types.insert(type_name);
170
+
}
171
+
LexiconDef::Array(array_def) => {
172
+
self.generate_array_type(&type_name, array_def)?;
173
+
self.generated_types.insert(type_name);
174
+
}
175
+
LexiconDef::Token { .. } | LexiconDef::PermissionSet { .. } | LexiconDef::Subscription(_) => {
176
+
// Skip these for now
177
+
}
178
+
}
179
+
}
180
+
181
+
Ok(())
182
+
}
183
+
184
+
fn generate_object(&mut self, name: &str, obj: &LexiconObject) -> io::Result<()> {
185
+
let mut code = String::new();
186
+
187
+
// Add description as doc comment if available
188
+
if let Some(desc) = &obj.description {
189
+
for line in desc.lines() {
190
+
writeln!(&mut code, " /// {}", line).unwrap();
191
+
}
192
+
}
193
+
194
+
// Check if we should add Default derive
195
+
let has_all_optional = obj.required.as_ref()
196
+
.map(|r| r.is_empty())
197
+
.unwrap_or(true);
198
+
199
+
// Add derives
200
+
if has_all_optional {
201
+
writeln!(&mut code, " #[derive(Clone, Default, Debug, Serialize, Deserialize)]").unwrap();
202
+
} else {
203
+
writeln!(&mut code, " #[derive(Clone, Debug, Serialize, Deserialize)]").unwrap();
204
+
}
205
+
writeln!(&mut code, " #[serde(rename_all = \"camelCase\")]").unwrap();
206
+
writeln!(&mut code, " pub struct {} {{", name).unwrap();
207
+
208
+
// Generate fields
209
+
for (field_name, field_schema) in &obj.properties {
210
+
let rust_field = to_field_name(field_name);
211
+
212
+
// Check if this field is an enum and generate it
213
+
let rust_type = if let LexiconSchema::String(s) = field_schema {
214
+
if let Some(known_values) = &s.known_values {
215
+
if !known_values.is_empty() {
216
+
// Generate enum name from struct name and field name
217
+
let enum_name = format!("{}{}", name, to_pascal_case(field_name));
218
+
if !self.generated_enums.contains(&enum_name) {
219
+
self.pending_enums.push((enum_name.clone(), known_values.clone()));
220
+
self.generated_enums.insert(enum_name.clone());
221
+
}
222
+
enum_name
223
+
} else {
224
+
self.schema_to_rust_type(field_schema)
225
+
}
226
+
} else {
227
+
self.schema_to_rust_type(field_schema)
228
+
}
229
+
} else {
230
+
self.schema_to_rust_type(field_schema)
231
+
};
232
+
233
+
// Add field description if available
234
+
if let Some(desc) = self.get_schema_description(field_schema) {
235
+
for line in desc.lines() {
236
+
writeln!(&mut code, " /// {}", line).unwrap();
237
+
}
238
+
}
239
+
240
+
let is_required = obj.required.as_ref()
241
+
.map(|r| r.contains(field_name))
242
+
.unwrap_or(false);
243
+
244
+
if !is_required {
245
+
writeln!(&mut code, " #[serde(skip_serializing_if = \"Option::is_none\")]").unwrap();
246
+
writeln!(&mut code, " pub {}: Option<{}>,", rust_field, rust_type).unwrap();
247
+
} else {
248
+
writeln!(&mut code, " pub {}: {},", rust_field, rust_type).unwrap();
249
+
}
250
+
}
251
+
252
+
writeln!(&mut code, " }}").unwrap();
253
+
writeln!(&mut code).unwrap();
254
+
255
+
self.output.write_all(code.as_bytes())
256
+
}
257
+
258
+
fn generate_record_object(&mut self, name: &str, obj: &LexiconObject) -> io::Result<()> {
259
+
self.generate_object(name, obj)
260
+
}
261
+
262
+
fn generate_query(&mut self, id: &str, query: &LexiconQuery) -> io::Result<()> {
263
+
let name = to_type_name(id);
264
+
265
+
// Generate params struct if exists
266
+
if let Some(params) = &query.parameters {
267
+
let params_name = format!("{}Params", name);
268
+
self.generate_params_struct(¶ms_name, params, query.description.as_deref())?;
269
+
}
270
+
271
+
// Generate output struct if exists
272
+
if let Some(output) = &query.output {
273
+
if let Some(schema) = &output.schema {
274
+
let output_name = format!("{}Response", name);
275
+
match schema {
276
+
LexiconSchema::Object(obj) => {
277
+
self.generate_object(&output_name, obj)?;
278
+
}
279
+
_ => {}
280
+
}
281
+
}
282
+
}
283
+
284
+
Ok(())
285
+
}
286
+
287
+
fn generate_procedure(&mut self, id: &str, procedure: &LexiconProcedure) -> io::Result<()> {
288
+
let name = to_type_name(id);
289
+
290
+
// Generate input struct if exists
291
+
if let Some(input) = &procedure.input {
292
+
if let Some(schema) = &input.schema {
293
+
let input_name = format!("{}Input", name);
294
+
match schema {
295
+
LexiconSchema::Object(obj) => {
296
+
self.generate_object(&input_name, obj)?;
297
+
}
298
+
_ => {}
299
+
}
300
+
}
301
+
}
302
+
303
+
// Generate output struct if exists
304
+
if let Some(output) = &procedure.output {
305
+
if let Some(schema) = &output.schema {
306
+
let output_name = format!("{}Output", name);
307
+
match schema {
308
+
LexiconSchema::Object(obj) => {
309
+
self.generate_object(&output_name, obj)?;
310
+
}
311
+
_ => {}
312
+
}
313
+
}
314
+
}
315
+
316
+
Ok(())
317
+
}
318
+
319
+
fn generate_params_struct(&mut self, name: &str, params: &LexiconParams, description: Option<&str>) -> io::Result<()> {
320
+
let mut code = String::new();
321
+
322
+
// Add description if available
323
+
if let Some(desc) = description.or(params.description.as_deref()) {
324
+
for line in desc.lines() {
325
+
writeln!(&mut code, " /// {}", line).unwrap();
326
+
}
327
+
}
328
+
329
+
writeln!(&mut code, " #[derive(Clone, Debug, Deserialize)]").unwrap();
330
+
writeln!(&mut code, " #[serde(rename_all = \"camelCase\")]").unwrap();
331
+
writeln!(&mut code, " pub struct {} {{", name).unwrap();
332
+
333
+
for (param_name, param_schema) in ¶ms.properties {
334
+
let rust_field = to_field_name(param_name);
335
+
let rust_type = self.schema_to_rust_type(param_schema);
336
+
337
+
// Add field description if available
338
+
if let Some(desc) = self.get_schema_description(param_schema) {
339
+
for line in desc.lines() {
340
+
writeln!(&mut code, " /// {}", line).unwrap();
341
+
}
342
+
}
343
+
344
+
// Parameters are typically optional in query strings
345
+
let is_required = false;
346
+
347
+
if !is_required {
348
+
writeln!(&mut code, " pub {}: Option<{}>,", rust_field, rust_type).unwrap();
349
+
} else {
350
+
writeln!(&mut code, " pub {}: {},", rust_field, rust_type).unwrap();
351
+
}
352
+
}
353
+
354
+
writeln!(&mut code, " }}").unwrap();
355
+
writeln!(&mut code).unwrap();
356
+
357
+
self.output.write_all(code.as_bytes())
358
+
}
359
+
360
+
fn generate_string_type(&mut self, name: &str, _string_def: &LexiconString) -> io::Result<()> {
361
+
// For now, just create a type alias
362
+
let mut code = String::new();
363
+
writeln!(&mut code, " pub type {} = String;", name).unwrap();
364
+
writeln!(&mut code).unwrap();
365
+
self.output.write_all(code.as_bytes())
366
+
}
367
+
368
+
fn generate_array_type(&mut self, name: &str, array_def: &LexiconArray) -> io::Result<()> {
369
+
let inner_type = self.schema_to_rust_type(&array_def.items);
370
+
let mut code = String::new();
371
+
writeln!(&mut code, " pub type {} = Vec<{}>;", name, inner_type).unwrap();
372
+
writeln!(&mut code).unwrap();
373
+
self.output.write_all(code.as_bytes())
374
+
}
375
+
376
+
fn get_schema_description<'a>(&self, schema: &'a LexiconSchema) -> Option<&'a String> {
377
+
match schema {
378
+
LexiconSchema::Null { description } |
379
+
LexiconSchema::Boolean { description, .. } |
380
+
LexiconSchema::Bytes { description, .. } |
381
+
LexiconSchema::CidLink { description } |
382
+
LexiconSchema::Blob { description, .. } |
383
+
LexiconSchema::Ref { description, .. } |
384
+
LexiconSchema::Unknown { description } => description.as_ref(),
385
+
LexiconSchema::Integer(i) => i.description.as_ref(),
386
+
LexiconSchema::String(s) => s.description.as_ref(),
387
+
LexiconSchema::Array(a) => a.description.as_ref(),
388
+
LexiconSchema::Object(o) => o.description.as_ref(),
389
+
LexiconSchema::Union(u) => u.description.as_ref(),
390
+
}
391
+
}
392
+
393
+
fn schema_to_rust_type(&self, schema: &LexiconSchema) -> String {
394
+
match schema {
395
+
LexiconSchema::Boolean { .. } => "bool".to_string(),
396
+
LexiconSchema::Integer(_) => "i64".to_string(),
397
+
LexiconSchema::String(s) => {
398
+
// Check for special formats
399
+
if let Some(format) = &s.format {
400
+
match format {
401
+
StringFormats::Datetime => return "DateTime<Utc>".to_string(),
402
+
StringFormats::Uri | StringFormats::AtUri => return "String".to_string(),
403
+
StringFormats::Did => return "String".to_string(),
404
+
StringFormats::Handle => return "String".to_string(),
405
+
StringFormats::Cid => return "String".to_string(),
406
+
_ => {}
407
+
}
408
+
}
409
+
// Check if this is an enum
410
+
if let Some(known_values) = &s.known_values {
411
+
if !known_values.is_empty() {
412
+
// Generate an enum name based on context (will need to track this)
413
+
return "String".to_string(); // For now, will improve this
414
+
}
415
+
}
416
+
"String".to_string()
417
+
}
418
+
LexiconSchema::Bytes { .. } => "Vec<u8>".to_string(),
419
+
LexiconSchema::CidLink { .. } => "String".to_string(),
420
+
LexiconSchema::Array(arr) => {
421
+
format!("Vec<{}>", self.schema_to_rust_type(&arr.items))
422
+
}
423
+
LexiconSchema::Object(_) => "Value".to_string(),
424
+
LexiconSchema::Blob { .. } => "Blob".to_string(),
425
+
LexiconSchema::Ref { ref_to, .. } => {
426
+
// Handle cross-module references
427
+
if ref_to.contains('.') && !ref_to.starts_with('#') {
428
+
// This is a cross-module reference
429
+
// For now, just use the type name, will add imports later
430
+
to_type_name(ref_to)
431
+
} else {
432
+
to_type_name(ref_to)
433
+
}
434
+
}
435
+
LexiconSchema::Union(_) => "Value".to_string(), // Will handle later
436
+
LexiconSchema::Unknown { .. } => "Value".to_string(),
437
+
LexiconSchema::Null { .. } => "()".to_string(),
438
+
}
439
+
}
440
+
}
441
+
442
+
// Helper functions to convert names to Rust conventions
443
+
fn to_type_name(s: &str) -> String {
444
+
// Handle references like "app.bsky.actor.defs#profileView"
445
+
let (base, fragment) = if s.contains('#') {
446
+
let parts: Vec<&str> = s.split('#').collect();
447
+
(parts[0], Some(parts[1]))
448
+
} else {
449
+
(s, None)
450
+
};
451
+
452
+
// If we have a fragment and it's not "main", use that as the primary name
453
+
if let Some(frag) = fragment {
454
+
if frag != "main" {
455
+
return to_pascal_case(frag);
456
+
}
457
+
}
458
+
459
+
// Otherwise use the last part of the base
460
+
let base_parts: Vec<&str> = base.split('.').collect();
461
+
let base_name = base_parts.last().unwrap_or(&base);
462
+
to_pascal_case(base_name)
463
+
}
464
+
465
+
fn to_pascal_case(s: &str) -> String {
466
+
let mut result = String::new();
467
+
let mut capitalize = true;
468
+
469
+
for ch in s.chars() {
470
+
if ch == '_' || ch == '-' {
471
+
capitalize = true;
472
+
} else if capitalize {
473
+
result.push(ch.to_uppercase().next().unwrap_or(ch));
474
+
capitalize = false;
475
+
} else {
476
+
result.push(ch);
477
+
}
478
+
}
479
+
480
+
result
481
+
}
482
+
483
+
fn to_field_name(s: &str) -> String {
484
+
// Convert to snake_case
485
+
let mut result = String::new();
486
+
let mut prev_upper = false;
487
+
488
+
for (i, ch) in s.chars().enumerate() {
489
+
if ch.is_uppercase() && i > 0 && !prev_upper {
490
+
result.push('_');
491
+
}
492
+
result.push(ch.to_lowercase().next().unwrap_or(ch));
493
+
prev_upper = ch.is_uppercase();
494
+
}
495
+
496
+
// Handle special cases
497
+
match result.as_str() {
498
+
"type" => "r#type".to_string(),
499
+
"ref" => "r#ref".to_string(),
500
+
other => other.to_string(),
501
+
}
502
+
}
+8
parakeet-lexgen/src/main.rs
+8
parakeet-lexgen/src/main.rs
···
2
2
use std::collections::BTreeMap;
3
3
use walkdir::WalkDir;
4
4
5
+
mod codegen;
5
6
mod types;
6
7
mod validate;
7
8
···
82
83
eprintln!("Lexicon {fail_nsid} failed validation: {fail_err:?}");
83
84
}
84
85
}
86
+
87
+
// Generate code
88
+
let output_file = std::fs::File::create(&args.out)?;
89
+
let mut generator = codegen::CodeGenerator::new(parsed, Box::new(output_file));
90
+
generator.generate()?;
91
+
92
+
println!("Generated code written to: {}", args.out);
85
93
86
94
Ok(())
87
95
}
+25
-22
parakeet-lexgen/src/types.rs
+25
-22
parakeet-lexgen/src/types.rs
···
2
2
use std::collections::BTreeMap;
3
3
4
4
#[expect(dead_code, reason = "Lexicon type used for parsing AT Protocol lexicon schemas")]
5
-
#[derive(Debug, Deserialize)]
5
+
#[derive(Debug, Clone, Deserialize)]
6
6
pub struct Lexicon {
7
-
/// Lexicon language version
8
7
pub lexicon: i32,
9
-
// Lexicon NSID
10
8
pub id: String,
11
9
pub revision: Option<i32>,
12
10
pub description: Option<String>,
···
14
12
}
15
13
16
14
#[expect(dead_code, reason = "Enum variants used for lexicon definition deserialization")]
17
-
#[derive(Debug, Deserialize)]
15
+
#[derive(Debug, Clone, Deserialize)]
18
16
#[serde(tag = "type")]
19
-
#[serde(rename_all = "lowercase")]
17
+
#[serde(rename_all = "kebab-case")]
20
18
pub enum LexiconDef {
21
19
Query(LexiconQuery),
22
20
Procedure(Box<LexiconProcedure>),
···
26
24
Array(LexiconArray),
27
25
Object(LexiconObject),
28
26
Token { description: Option<String> },
27
+
#[serde(rename = "permission-set")]
28
+
PermissionSet {
29
+
description: Option<String>,
30
+
permissions: Option<Vec<String>>,
31
+
},
29
32
}
30
33
31
34
#[expect(dead_code, reason = "Schema variants used for AT Protocol type definitions")]
32
-
#[derive(Debug, Deserialize)]
35
+
#[derive(Debug, Clone, Deserialize)]
33
36
#[serde(tag = "type")]
34
37
#[serde(rename_all = "kebab-case")]
35
38
pub enum LexiconSchema {
···
73
76
},
74
77
}
75
78
76
-
#[derive(Debug, Deserialize)]
79
+
#[derive(Debug, Clone, Deserialize)]
77
80
#[serde(rename_all = "lowercase")]
78
81
pub enum RKey {
79
82
Tid,
···
82
85
}
83
86
84
87
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
85
-
#[derive(Debug, Deserialize)]
88
+
#[derive(Debug, Clone, Deserialize)]
86
89
#[serde(untagged)]
87
90
pub enum RecordKey {
88
91
Typed(RKey),
89
92
Other(String),
90
93
}
91
94
92
-
#[derive(Debug, Deserialize)]
95
+
#[derive(Debug, Clone, Deserialize)]
93
96
#[serde(rename_all = "kebab-case")]
94
97
pub enum StringFormats {
95
98
AtIdentifier,
···
106
109
}
107
110
108
111
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
109
-
#[derive(Debug, Deserialize)]
112
+
#[derive(Debug, Clone, Deserialize)]
110
113
pub struct HttpApiExchange {
111
114
pub description: Option<String>,
112
115
pub encoding: String,
···
114
117
}
115
118
116
119
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
117
-
#[derive(Debug, Deserialize)]
120
+
#[derive(Debug, Clone, Deserialize)]
118
121
pub struct LexiconError {
119
122
pub name: String,
120
123
pub description: Option<String>,
121
124
}
122
125
123
126
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
124
-
#[derive(Debug, Deserialize)]
127
+
#[derive(Debug, Clone, Deserialize)]
125
128
pub struct LexiconSubMessage {
126
129
pub description: Option<String>,
127
130
pub schema: Option<LexiconUnion>,
128
131
}
129
132
130
133
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
131
-
#[derive(Debug, Deserialize)]
134
+
#[derive(Debug, Clone, Deserialize)]
132
135
pub struct LexiconRecord {
133
136
pub description: Option<String>,
134
137
pub key: RecordKey,
···
136
139
}
137
140
138
141
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
139
-
#[derive(Debug, Deserialize)]
142
+
#[derive(Debug, Clone, Deserialize)]
140
143
pub struct LexiconQuery {
141
144
pub description: Option<String>,
142
145
pub parameters: Option<LexiconParams>,
···
145
148
}
146
149
147
150
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
148
-
#[derive(Debug, Deserialize)]
151
+
#[derive(Debug, Clone, Deserialize)]
149
152
pub struct LexiconProcedure {
150
153
pub description: Option<String>,
151
154
pub parameters: Option<LexiconParams>,
···
155
158
}
156
159
157
160
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
158
-
#[derive(Debug, Deserialize)]
161
+
#[derive(Debug, Clone, Deserialize)]
159
162
pub struct LexiconSubscription {
160
163
pub description: Option<String>,
161
164
pub parameters: Option<LexiconParams>,
···
164
167
}
165
168
166
169
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
167
-
#[derive(Debug, Deserialize)]
170
+
#[derive(Debug, Clone, Deserialize)]
168
171
pub struct LexiconInteger {
169
172
pub description: Option<String>,
170
173
···
178
181
}
179
182
180
183
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
181
-
#[derive(Debug, Deserialize)]
184
+
#[derive(Debug, Clone, Deserialize)]
182
185
#[serde(rename_all = "camelCase")]
183
186
pub struct LexiconString {
184
187
pub description: Option<String>,
···
200
203
}
201
204
202
205
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
203
-
#[derive(Debug, Deserialize)]
206
+
#[derive(Debug, Clone, Deserialize)]
204
207
#[serde(rename_all = "camelCase")]
205
208
pub struct LexiconArray {
206
209
pub description: Option<String>,
···
211
214
}
212
215
213
216
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
214
-
#[derive(Debug, Deserialize)]
217
+
#[derive(Debug, Clone, Deserialize)]
215
218
#[serde(tag = "type")]
216
219
#[serde(rename = "object")]
217
220
pub struct LexiconObject {
···
222
225
}
223
226
224
227
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
225
-
#[derive(Debug, Deserialize)]
228
+
#[derive(Debug, Clone, Deserialize)]
226
229
#[serde(tag = "type")]
227
230
#[serde(rename = "params")]
228
231
pub struct LexiconParams {
···
231
234
}
232
235
233
236
#[expect(dead_code, reason = "Used for AT Protocol lexicon schema deserialization")]
234
-
#[derive(Debug, Deserialize)]
237
+
#[derive(Debug, Clone, Deserialize)]
235
238
#[serde(tag = "type")]
236
239
#[serde(rename = "union")]
237
240
pub struct LexiconUnion {
+1
-1
parakeet-lexgen/src/validate.rs
+1
-1
parakeet-lexgen/src/validate.rs
···
42
42
LexiconDef::String(lex_str) => validate_string(lex_str, lexica, lexicon_id),
43
43
LexiconDef::Array(array) => validate_schema(&array.items, lexica, lexicon_id),
44
44
LexiconDef::Object(obj) => validate_object(obj, lexica, lexicon_id),
45
-
LexiconDef::Token { .. } => continue,
45
+
LexiconDef::Token { .. } | LexiconDef::PermissionSet { .. } => continue,
46
46
};
47
47
48
48
if let Some(err) = pass {