+2
-2
api/Cargo.lock
+2
-2
api/Cargo.lock
···
2924
2924
2925
2925
[[package]]
2926
2926
name = "slices"
2927
-
version = "0.1.0"
2927
+
version = "0.2.0"
2928
2928
dependencies = [
2929
2929
"anyhow",
2930
2930
"async-graphql",
···
2963
2963
2964
2964
[[package]]
2965
2965
name = "slices-lexicon"
2966
-
version = "0.2.2"
2966
+
version = "0.2.3"
2967
2967
dependencies = [
2968
2968
"base64 0.21.7",
2969
2969
"chrono",
+1
-1
crates/slices-lexicon/Cargo.lock
+1
-1
crates/slices-lexicon/Cargo.lock
+2
-1
crates/slices-lexicon/Cargo.toml
+2
-1
crates/slices-lexicon/Cargo.toml
···
1
1
[package]
2
2
name = "slices-lexicon"
3
-
version = "0.2.2"
3
+
version = "0.2.3"
4
4
edition = "2021"
5
5
description = "AT Protocol lexicon validation library for Slices"
6
6
license = "MIT"
7
+
repository = "https://tangled.org/@slices.network/slices/tree/main/crates/slices-lexicon"
7
8
documentation = "https://docs.rs/slices-lexicon"
8
9
readme = "README.md"
9
10
keywords = ["atproto", "lexicon", "validation", "wasm", "bluesky"]
+1
-1
crates/slices-lexicon/README.md
+1
-1
crates/slices-lexicon/README.md
+114
-2
crates/slices-lexicon/src/validation/context.rs
+114
-2
crates/slices-lexicon/src/validation/context.rs
···
187
187
/// Creates a new context with the reference added to the resolution stack
188
188
///
189
189
/// This is used during recursive reference resolution to detect circular references.
190
+
/// Also updates the current lexicon context to the referenced lexicon, ensuring that
191
+
/// local references within the resolved schema are resolved in the correct lexicon.
190
192
///
191
193
/// # Arguments
192
194
///
···
194
196
///
195
197
/// # Returns
196
198
///
197
-
/// A new `ValidationContext` with the reference added to the stack
199
+
/// A new `ValidationContext` with the reference added to the stack and current lexicon updated
198
200
pub fn with_reference(&self, reference: &str) -> Self {
199
201
let mut reference_stack = self.reference_stack.clone();
200
202
reference_stack.insert(reference.to_string());
201
203
204
+
// Parse the reference to extract the target lexicon ID
205
+
// This ensures local references in the resolved schema are resolved in the correct context
206
+
let new_current_lexicon_id = if let Ok((lexicon_id, _def_name)) = self.parse_reference(reference) {
207
+
Some(lexicon_id)
208
+
} else {
209
+
// If parsing fails, keep the current lexicon ID
210
+
self.current_lexicon_id.clone()
211
+
};
212
+
202
213
Self {
203
214
lexicons: self.lexicons.clone(),
204
215
path: self.path.clone(),
205
216
strict_mode: self.strict_mode,
206
-
current_lexicon_id: self.current_lexicon_id.clone(),
217
+
current_lexicon_id: new_current_lexicon_id,
207
218
reference_stack,
208
219
}
209
220
}
···
721
732
let image_schema = ctx.resolve_reference("com.example.media#image").unwrap();
722
733
assert_eq!(image_schema.get("type").and_then(|t| t.as_str()), Some("object"));
723
734
assert!(image_schema.get("required").is_some());
735
+
}
736
+
737
+
#[test]
738
+
fn test_cross_lexicon_union_with_local_refs() {
739
+
// Regression test for bug where local references in a union weren't resolved
740
+
// in the correct lexicon context after a cross-lexicon reference.
741
+
// This mirrors the pub.leaflet.blocks.text -> pub.leaflet.richtext.facet scenario
742
+
743
+
let ctx = ValidationContext::builder()
744
+
.with_lexicons(vec![
745
+
json!({
746
+
"lexicon": 1,
747
+
"id": "pub.leaflet.blocks.text",
748
+
"defs": {
749
+
"main": {
750
+
"type": "object",
751
+
"required": ["plaintext"],
752
+
"properties": {
753
+
"plaintext": {"type": "string"},
754
+
"facets": {
755
+
"type": "array",
756
+
"items": {
757
+
"type": "ref",
758
+
"ref": "pub.leaflet.richtext.facet"
759
+
}
760
+
}
761
+
}
762
+
}
763
+
}
764
+
}),
765
+
json!({
766
+
"lexicon": 1,
767
+
"id": "pub.leaflet.richtext.facet",
768
+
"defs": {
769
+
"main": {
770
+
"type": "object",
771
+
"required": ["index", "features"],
772
+
"properties": {
773
+
"index": {"type": "ref", "ref": "#byteSlice"},
774
+
"features": {
775
+
"type": "array",
776
+
"items": {
777
+
"type": "union",
778
+
"refs": ["#bold", "#italic", "#link"]
779
+
}
780
+
}
781
+
}
782
+
},
783
+
"byteSlice": {
784
+
"type": "object",
785
+
"required": ["byteStart", "byteEnd"],
786
+
"properties": {
787
+
"byteStart": {"type": "integer", "minimum": 0},
788
+
"byteEnd": {"type": "integer", "minimum": 0}
789
+
}
790
+
},
791
+
"bold": {
792
+
"type": "object",
793
+
"description": "Facet feature for bold text",
794
+
"properties": {}
795
+
},
796
+
"italic": {
797
+
"type": "object",
798
+
"description": "Facet feature for italic text",
799
+
"properties": {}
800
+
},
801
+
"link": {
802
+
"type": "object",
803
+
"required": ["uri"],
804
+
"properties": {
805
+
"uri": {"type": "string", "format": "uri"}
806
+
}
807
+
}
808
+
}
809
+
})
810
+
])
811
+
.unwrap()
812
+
.build()
813
+
.unwrap()
814
+
.with_current_lexicon("pub.leaflet.blocks.text");
815
+
816
+
// Test 1: Verify we can resolve the cross-lexicon reference
817
+
let facet_schema = ctx.resolve_reference("pub.leaflet.richtext.facet").unwrap();
818
+
assert_eq!(facet_schema.get("type").and_then(|t| t.as_str()), Some("object"));
819
+
820
+
// Test 2: Verify that with_reference updates the current lexicon context correctly
821
+
let ref_ctx = ctx.with_reference("pub.leaflet.richtext.facet");
822
+
assert_eq!(ref_ctx.current_lexicon_id(), Some("pub.leaflet.richtext.facet"));
823
+
824
+
// Test 3: Most importantly - verify local references resolve in the TARGET lexicon
825
+
// This is the bug we fixed: #bold should resolve in pub.leaflet.richtext.facet, not pub.leaflet.blocks.text
826
+
let bold_schema = ref_ctx.resolve_reference("#bold").unwrap();
827
+
assert_eq!(bold_schema.get("type").and_then(|t| t.as_str()), Some("object"));
828
+
assert_eq!(bold_schema.get("description").and_then(|d| d.as_str()), Some("Facet feature for bold text"));
829
+
830
+
// Test 4: Verify other local references in the union also work
831
+
let italic_schema = ref_ctx.resolve_reference("#italic").unwrap();
832
+
assert_eq!(italic_schema.get("description").and_then(|d| d.as_str()), Some("Facet feature for italic text"));
833
+
834
+
let link_schema = ref_ctx.resolve_reference("#link").unwrap();
835
+
assert!(link_schema.get("required").is_some());
724
836
}
725
837
726
838
}
+2
-1
crates/slices-lexicon/src/validation/field/array.rs
+2
-1
crates/slices-lexicon/src/validation/field/array.rs
+7
-2
crates/slices-lexicon/src/validation/field/union.rs
+7
-2
crates/slices-lexicon/src/validation/field/union.rs
···
358
358
.with_lexicons(vec![json!({
359
359
"lexicon": 1,
360
360
"id": "com.example.test",
361
-
"defs": { "main": union_def.clone() }
361
+
"defs": {
362
+
"main": union_def.clone(),
363
+
"post": { "type": "object", "properties": {} },
364
+
"repost": { "type": "object", "properties": {} }
365
+
}
362
366
})])
363
367
.unwrap()
364
368
.build()
365
-
.unwrap();
369
+
.unwrap()
370
+
.with_current_lexicon("com.example.test");
366
371
367
372
let validator = UnionValidator;
368
373
assert!(validator.validate(&union_def, &ctx).is_ok());
+2
-1
crates/slices-lexicon/src/validation/primary/record.rs
+2
-1
crates/slices-lexicon/src/validation/primary/record.rs
+7
-2
crates/slices-lexicon/src/validation/primary/subscription.rs
+7
-2
crates/slices-lexicon/src/validation/primary/subscription.rs
···
471
471
.with_lexicons(vec![json!({
472
472
"lexicon": 1,
473
473
"id": "com.example.test",
474
-
"defs": { "main": subscription.clone() }
474
+
"defs": {
475
+
"main": subscription.clone(),
476
+
"post": { "type": "object", "properties": {} },
477
+
"like": { "type": "object", "properties": {} }
478
+
}
475
479
})])
476
480
.unwrap()
477
481
.build()
478
-
.unwrap();
482
+
.unwrap()
483
+
.with_current_lexicon("com.example.test");
479
484
480
485
let validator = SubscriptionValidator;
481
486
assert!(validator.validate(&subscription, &ctx).is_ok());