+23
.github/workflows/test.yml
+23
.github/workflows/test.yml
···
1
+
name: test
2
+
3
+
on:
4
+
push:
5
+
branches:
6
+
- master
7
+
- main
8
+
pull_request:
9
+
10
+
jobs:
11
+
test:
12
+
runs-on: ubuntu-latest
13
+
steps:
14
+
- uses: actions/checkout@v4
15
+
- uses: erlef/setup-beam@v1
16
+
with:
17
+
otp-version: "28"
18
+
gleam-version: "1.13.0"
19
+
rebar3-version: "3"
20
+
# elixir-version: "1"
21
+
- run: gleam deps download
22
+
- run: gleam test
23
+
- run: gleam format --check src test
+139
README.md
+139
README.md
···
1
+
# honk
2
+
3
+
[](https://hex.pm/packages/honk)
4
+
[](https://hexdocs.pm/honk/)
5
+
6
+
An ATProto Lexicon validator for Gleam.
7
+
8
+
## What is it?
9
+
10
+
**honk** validates [AT Protocol](https://atproto.com/) lexicon schemas and data against those schemas. Lexicons are the schema language used by the AT Protocol to define record types, XRPC endpoints, and data structures.
11
+
12
+
## Installation
13
+
14
+
```sh
15
+
gleam add honk@1
16
+
```
17
+
18
+
## Quick Start
19
+
20
+
### Validate a Lexicon Schema
21
+
22
+
```gleam
23
+
import honk
24
+
import gleam/json
25
+
26
+
pub fn main() {
27
+
let lexicon = json.object([
28
+
#("lexicon", json.int(1)),
29
+
#("id", json.string("xyz.statusphere.status")),
30
+
#("defs", json.object([
31
+
#("main", json.object([
32
+
#("type", json.string("record")),
33
+
#("key", json.string("tid")),
34
+
#("record", json.object([
35
+
#("type", json.string("object")),
36
+
#("required", json.preprocessed_array([
37
+
json.string("status"),
38
+
json.string("createdAt"),
39
+
])),
40
+
#("properties", json.object([
41
+
#("status", json.object([
42
+
#("type", json.string("string")),
43
+
#("minLength", json.int(1)),
44
+
#("maxGraphemes", json.int(1)),
45
+
#("maxLength", json.int(32)),
46
+
])),
47
+
#("createdAt", json.object([
48
+
#("type", json.string("string")),
49
+
#("format", json.string("datetime")),
50
+
])),
51
+
])),
52
+
])),
53
+
])),
54
+
])),
55
+
])
56
+
57
+
case honk.validate([lexicon]) {
58
+
Ok(_) -> io.println("✓ Lexicon is valid")
59
+
Error(err) -> io.println("✗ Validation failed: " <> err.message)
60
+
}
61
+
}
62
+
```
63
+
64
+
### Validate Record Data
65
+
66
+
```gleam
67
+
import honk
68
+
import gleam/json
69
+
70
+
pub fn validate_status() {
71
+
let lexicons = [my_lexicon] // Your lexicon definitions
72
+
let record_data = json.object([
73
+
#("status", json.string("👍")),
74
+
#("createdAt", json.string("2025-01-15T12:00:00Z")),
75
+
])
76
+
77
+
case honk.validate_record(lexicons, "xyz.statusphere.status", record_data) {
78
+
Ok(_) -> io.println("✓ Record is valid")
79
+
Error(err) -> io.println("✗ Invalid: " <> err.message)
80
+
}
81
+
}
82
+
```
83
+
84
+
## Features
85
+
86
+
- ✅ **17 Type Validators**: string, integer, boolean, bytes, blob, cid-link, null, object, array, union, ref, record, query, procedure, subscription, token, unknown
87
+
- ✅ **12 String Format Validators**: datetime (RFC3339), uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key
88
+
- ✅ **Constraint Validation**: length limits, ranges, enums, required fields
89
+
- ✅ **Reference Resolution**: local (`#def`), global (`nsid#def`), and cross-lexicon references
90
+
- ✅ **Circular Dependency Detection**: prevents infinite reference loops
91
+
- ✅ **Detailed Error Messages**: validation errors with path information
92
+
93
+
## API Overview
94
+
95
+
### Main Functions
96
+
97
+
- `validate(lexicons: List(Json))` - Validates one or more lexicon schemas
98
+
- `validate_record(lexicons, nsid, data)` - Validates record data against a schema
99
+
- `is_valid_nsid(value)` - Checks if a string is a valid NSID
100
+
- `validate_string_format(value, format)` - Validates string against a format
101
+
102
+
### Context Builder Pattern
103
+
104
+
```gleam
105
+
import validation/context
106
+
import validation/field
107
+
108
+
let assert Ok(ctx) =
109
+
context.builder()
110
+
|> context.with_validator(field.dispatch_data_validation)
111
+
|> context.with_lexicons([lexicon])
112
+
|> context.build
113
+
```
114
+
115
+
## Testing
116
+
117
+
```sh
118
+
gleam test
119
+
```
120
+
121
+
## Implementation
122
+
123
+
This implementation aligns with the [indigo/atproto/lexicon](https://github.com/bluesky-social/indigo/tree/main/atproto/lexicon) implementation as much as possible, ensuring compatibility with the ATProto specification and ecosystem.
124
+
125
+
## Documentation
126
+
127
+
Further documentation can be found at <https://hexdocs.pm/honk>.
128
+
129
+
## Development
130
+
131
+
```sh
132
+
gleam run # Run the project
133
+
gleam test # Run the tests
134
+
gleam build # Build the project
135
+
```
136
+
137
+
## License
138
+
139
+
Apache 2.0
+12
gleam.toml
+12
gleam.toml
···
1
+
name = "honk"
2
+
version = "1.0.0"
3
+
description = "ATProtocol lexicon validator for Gleam"
4
+
5
+
[dependencies]
6
+
gleam_stdlib = ">= 0.44.0 and < 2.0.0"
7
+
gleam_json = ">= 3.0.0 and < 4.0.0"
8
+
gleam_regexp = ">= 1.0.0 and < 2.0.0"
9
+
gleam_time = ">= 1.5.0 and < 2.0.0"
10
+
11
+
[dev-dependencies]
12
+
gleeunit = ">= 1.0.0 and < 2.0.0"
+17
manifest.toml
+17
manifest.toml
···
1
+
# This file was generated by Gleam
2
+
# You typically do not need to edit this file
3
+
4
+
packages = [
5
+
{ name = "gleam_json", version = "3.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "44FDAA8847BE8FC48CA7A1C089706BD54BADCC4C45B237A992EDDF9F2CDB2836" },
6
+
{ name = "gleam_regexp", version = "1.1.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_regexp", source = "hex", outer_checksum = "9C215C6CA84A5B35BB934A9B61A9A306EC743153BE2B0425A0D032E477B062A9" },
7
+
{ name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
8
+
{ name = "gleam_time", version = "1.5.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "D560E672C7279C89908981E068DF07FD16D0C859DCA266F908B18F04DF0EB8E6" },
9
+
{ name = "gleeunit", version = "1.9.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "DA9553CE58B67924B3C631F96FE3370C49EB6D6DC6B384EC4862CC4AAA718F3C" },
10
+
]
11
+
12
+
[requirements]
13
+
gleam_json = { version = ">= 3.0.0 and < 4.0.0" }
14
+
gleam_regexp = { version = ">= 1.0.0 and < 2.0.0" }
15
+
gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
16
+
gleam_time = { version = ">= 1.5.0 and < 2.0.0" }
17
+
gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
+32
src/errors.gleam
+32
src/errors.gleam
···
1
+
// Error types for lexicon validation
2
+
3
+
pub type ValidationError {
4
+
LexiconNotFound(collection: String)
5
+
InvalidSchema(message: String)
6
+
DataValidation(message: String)
7
+
}
8
+
9
+
/// Convert error to human-readable string
10
+
pub fn to_string(error: ValidationError) -> String {
11
+
case error {
12
+
LexiconNotFound(collection) ->
13
+
"Lexicon not found for collection: " <> collection
14
+
InvalidSchema(message) -> "Invalid lexicon schema: " <> message
15
+
DataValidation(message) -> "Data validation failed: " <> message
16
+
}
17
+
}
18
+
19
+
/// Create an InvalidSchema error with context
20
+
pub fn invalid_schema(message: String) -> ValidationError {
21
+
InvalidSchema(message)
22
+
}
23
+
24
+
/// Create a DataValidation error with context
25
+
pub fn data_validation(message: String) -> ValidationError {
26
+
DataValidation(message)
27
+
}
28
+
29
+
/// Create a LexiconNotFound error
30
+
pub fn lexicon_not_found(collection: String) -> ValidationError {
31
+
LexiconNotFound(collection)
32
+
}
+213
src/honk.gleam
+213
src/honk.gleam
···
1
+
// Main public API for the ATProtocol lexicon validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dict.{type Dict}
5
+
import gleam/json.{type Json}
6
+
import gleam/option.{None, Some}
7
+
import gleam/result
8
+
import json_helpers
9
+
import types
10
+
import validation/context
11
+
import validation/formats
12
+
13
+
// Import validators
14
+
import validation/field as validation_field
15
+
import validation/field/reference as validation_field_reference
16
+
import validation/field/union as validation_field_union
17
+
import validation/meta/token as validation_meta_token
18
+
import validation/meta/unknown as validation_meta_unknown
19
+
import validation/primary/params as validation_primary_params
20
+
import validation/primary/procedure as validation_primary_procedure
21
+
import validation/primary/query as validation_primary_query
22
+
import validation/primary/record as validation_primary_record
23
+
import validation/primary/subscription as validation_primary_subscription
24
+
import validation/primitive/blob as validation_primitive_blob
25
+
import validation/primitive/boolean as validation_primitive_boolean
26
+
import validation/primitive/bytes as validation_primitive_bytes
27
+
import validation/primitive/cid_link as validation_primitive_cid_link
28
+
import validation/primitive/integer as validation_primitive_integer
29
+
import validation/primitive/null as validation_primitive_null
30
+
import validation/primitive/string as validation_primitive_string
31
+
32
+
// Re-export core types
33
+
pub type LexiconDoc =
34
+
types.LexiconDoc
35
+
36
+
pub type StringFormat {
37
+
DateTime
38
+
Uri
39
+
AtUri
40
+
Did
41
+
Handle
42
+
AtIdentifier
43
+
Nsid
44
+
Cid
45
+
Language
46
+
Tid
47
+
RecordKey
48
+
}
49
+
50
+
pub type ValidationContext =
51
+
context.ValidationContext
52
+
53
+
/// Main validation function for lexicon documents
54
+
/// Returns Ok(Nil) if all lexicons are valid
55
+
/// Returns Error with a map of lexicon ID to list of error messages
56
+
pub fn validate(lexicons: List(Json)) -> Result(Nil, Dict(String, List(String))) {
57
+
// Build validation context
58
+
let builder_result =
59
+
context.builder()
60
+
|> context.with_lexicons(lexicons)
61
+
62
+
case builder_result {
63
+
Ok(builder) ->
64
+
case context.build(builder) {
65
+
Ok(ctx) -> {
66
+
// Validate each lexicon's main definition
67
+
let error_map =
68
+
dict.fold(ctx.lexicons, dict.new(), fn(errors, lex_id, lexicon) {
69
+
// Validate the main definition if it exists
70
+
case json_helpers.get_field(lexicon.defs, "main") {
71
+
Some(main_def) -> {
72
+
let lex_ctx = context.with_current_lexicon(ctx, lex_id)
73
+
case validate_definition(main_def, lex_ctx) {
74
+
Ok(_) -> errors
75
+
Error(e) ->
76
+
dict.insert(errors, lex_id, [errors.to_string(e)])
77
+
}
78
+
}
79
+
None -> errors
80
+
// No main definition is OK
81
+
}
82
+
})
83
+
84
+
case dict.is_empty(error_map) {
85
+
True -> Ok(Nil)
86
+
False -> Error(error_map)
87
+
}
88
+
}
89
+
Error(e) -> Error(dict.from_list([#("builder", [errors.to_string(e)])]))
90
+
}
91
+
Error(e) -> Error(dict.from_list([#("builder", [errors.to_string(e)])]))
92
+
}
93
+
}
94
+
95
+
/// Validates a single definition based on its type
96
+
fn validate_definition(
97
+
def: Json,
98
+
ctx: context.ValidationContext,
99
+
) -> Result(Nil, errors.ValidationError) {
100
+
case json_helpers.get_string(def, "type") {
101
+
Some("record") -> validation_primary_record.validate_schema(def, ctx)
102
+
Some("query") -> validation_primary_query.validate_schema(def, ctx)
103
+
Some("procedure") -> validation_primary_procedure.validate_schema(def, ctx)
104
+
Some("subscription") ->
105
+
validation_primary_subscription.validate_schema(def, ctx)
106
+
Some("params") -> validation_primary_params.validate_schema(def, ctx)
107
+
Some("object") -> validation_field.validate_object_schema(def, ctx)
108
+
Some("array") -> validation_field.validate_array_schema(def, ctx)
109
+
Some("union") -> validation_field_union.validate_schema(def, ctx)
110
+
Some("string") -> validation_primitive_string.validate_schema(def, ctx)
111
+
Some("integer") -> validation_primitive_integer.validate_schema(def, ctx)
112
+
Some("boolean") -> validation_primitive_boolean.validate_schema(def, ctx)
113
+
Some("bytes") -> validation_primitive_bytes.validate_schema(def, ctx)
114
+
Some("blob") -> validation_primitive_blob.validate_schema(def, ctx)
115
+
Some("cid-link") -> validation_primitive_cid_link.validate_schema(def, ctx)
116
+
Some("null") -> validation_primitive_null.validate_schema(def, ctx)
117
+
Some("ref") -> validation_field_reference.validate_schema(def, ctx)
118
+
Some("token") -> validation_meta_token.validate_schema(def, ctx)
119
+
Some("unknown") -> validation_meta_unknown.validate_schema(def, ctx)
120
+
Some(unknown_type) ->
121
+
Error(errors.invalid_schema("Unknown type: " <> unknown_type))
122
+
None -> Error(errors.invalid_schema("Definition missing type field"))
123
+
}
124
+
}
125
+
126
+
/// Validates a single data record against a collection schema
127
+
pub fn validate_record(
128
+
lexicons: List(Json),
129
+
collection: String,
130
+
record: Json,
131
+
) -> Result(Nil, ValidationError) {
132
+
// Build validation context
133
+
let builder_result =
134
+
context.builder()
135
+
|> context.with_lexicons(lexicons)
136
+
137
+
use builder <- result.try(builder_result)
138
+
use ctx <- result.try(context.build(builder))
139
+
140
+
// Get the lexicon for this collection
141
+
case context.get_lexicon(ctx, collection) {
142
+
Some(lexicon) -> {
143
+
// Get the main definition (should be a record type)
144
+
case json_helpers.get_field(lexicon.defs, "main") {
145
+
Some(main_def) -> {
146
+
let lex_ctx = context.with_current_lexicon(ctx, collection)
147
+
// Validate the record data against the main definition
148
+
validation_primary_record.validate_data(record, main_def, lex_ctx)
149
+
}
150
+
None ->
151
+
Error(errors.invalid_schema(
152
+
"Lexicon '" <> collection <> "' has no main definition",
153
+
))
154
+
}
155
+
}
156
+
None -> Error(errors.lexicon_not_found(collection))
157
+
}
158
+
}
159
+
160
+
/// Validates NSID format
161
+
pub fn is_valid_nsid(nsid: String) -> Bool {
162
+
formats.is_valid_nsid(nsid)
163
+
}
164
+
165
+
/// Validates a string value against a specific format
166
+
pub fn validate_string_format(
167
+
value: String,
168
+
format: StringFormat,
169
+
) -> Result(Nil, String) {
170
+
// Convert our StringFormat to types.StringFormat
171
+
let types_format = case format {
172
+
DateTime -> types.DateTime
173
+
Uri -> types.Uri
174
+
AtUri -> types.AtUri
175
+
Did -> types.Did
176
+
Handle -> types.Handle
177
+
AtIdentifier -> types.AtIdentifier
178
+
Nsid -> types.Nsid
179
+
Cid -> types.Cid
180
+
Language -> types.Language
181
+
Tid -> types.Tid
182
+
RecordKey -> types.RecordKey
183
+
}
184
+
185
+
case formats.validate_format(value, types_format) {
186
+
True -> Ok(Nil)
187
+
False -> {
188
+
let format_name = types.format_to_string(types_format)
189
+
Error("Value does not match format: " <> format_name)
190
+
}
191
+
}
192
+
}
193
+
194
+
/// Entry point for the honk lexicon validator.
195
+
///
196
+
/// This function serves as an example entry point and can be used
197
+
/// for basic CLI or testing purposes. For actual validation,
198
+
/// use the `validate()` or `validate_record()` functions.
199
+
///
200
+
/// ## Example
201
+
///
202
+
/// ```gleam
203
+
/// import honk
204
+
///
205
+
/// pub fn main() {
206
+
/// honk.main()
207
+
/// }
208
+
/// ```
209
+
pub fn main() -> Nil {
210
+
// This would typically be called from tests or CLI
211
+
let _example_result = is_valid_nsid("com.example.record")
212
+
Nil
213
+
}
+342
src/json_helpers.gleam
+342
src/json_helpers.gleam
···
1
+
// JSON helper utilities for extracting and validating fields
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dict.{type Dict}
5
+
import gleam/dynamic.{type Dynamic}
6
+
import gleam/dynamic/decode
7
+
import gleam/json.{type Json}
8
+
import gleam/list
9
+
import gleam/option.{type Option, None, Some}
10
+
import gleam/result
11
+
12
+
/// Parse JSON string to dynamic for decoding
13
+
fn json_to_dynamic(json_value: Json) -> Result(Dynamic, String) {
14
+
// Convert JSON to string, then parse it back to dynamic
15
+
let json_str = json.to_string(json_value)
16
+
json.parse(json_str, decode.dynamic)
17
+
|> result.map_error(fn(_) { "Failed to parse JSON" })
18
+
}
19
+
20
+
/// Check if a JSON value is null
21
+
pub fn is_null(json_value: Json) -> Bool {
22
+
json.to_string(json_value) == "null"
23
+
}
24
+
25
+
/// Check if a JSON value is a string
26
+
pub fn is_string(json_value: Json) -> Bool {
27
+
case json_to_dynamic(json_value) {
28
+
Ok(dyn) ->
29
+
case decode.run(dyn, decode.string) {
30
+
Ok(_) -> True
31
+
Error(_) -> False
32
+
}
33
+
Error(_) -> False
34
+
}
35
+
}
36
+
37
+
/// Check if a JSON value is an integer
38
+
pub fn is_int(json_value: Json) -> Bool {
39
+
case json_to_dynamic(json_value) {
40
+
Ok(dyn) ->
41
+
case decode.run(dyn, decode.int) {
42
+
Ok(_) -> True
43
+
Error(_) -> False
44
+
}
45
+
Error(_) -> False
46
+
}
47
+
}
48
+
49
+
/// Check if a JSON value is a boolean
50
+
pub fn is_bool(json_value: Json) -> Bool {
51
+
case json_to_dynamic(json_value) {
52
+
Ok(dyn) ->
53
+
case decode.run(dyn, decode.bool) {
54
+
Ok(_) -> True
55
+
Error(_) -> False
56
+
}
57
+
Error(_) -> False
58
+
}
59
+
}
60
+
61
+
/// Check if a JSON value is an array
62
+
pub fn is_array(json_value: Json) -> Bool {
63
+
case json_to_dynamic(json_value) {
64
+
Ok(dyn) ->
65
+
case decode.run(dyn, decode.list(decode.dynamic)) {
66
+
Ok(_) -> True
67
+
Error(_) -> False
68
+
}
69
+
Error(_) -> False
70
+
}
71
+
}
72
+
73
+
/// Check if a JSON value is an object
74
+
pub fn is_object(json_value: Json) -> Bool {
75
+
case json_to_dynamic(json_value) {
76
+
Ok(dyn) ->
77
+
case decode.run(dyn, decode.dict(decode.string, decode.dynamic)) {
78
+
Ok(_) -> True
79
+
Error(_) -> False
80
+
}
81
+
Error(_) -> False
82
+
}
83
+
}
84
+
85
+
/// Get a string field value from a JSON object
86
+
pub fn get_string(json_value: Json, field_name: String) -> Option(String) {
87
+
case json_to_dynamic(json_value) {
88
+
Ok(dyn) ->
89
+
case decode.run(dyn, decode.at([field_name], decode.string)) {
90
+
Ok(value) -> Some(value)
91
+
Error(_) -> None
92
+
}
93
+
Error(_) -> None
94
+
}
95
+
}
96
+
97
+
/// Get an integer field value from a JSON object
98
+
pub fn get_int(json_value: Json, field_name: String) -> Option(Int) {
99
+
case json_to_dynamic(json_value) {
100
+
Ok(dyn) ->
101
+
case decode.run(dyn, decode.at([field_name], decode.int)) {
102
+
Ok(value) -> Some(value)
103
+
Error(_) -> None
104
+
}
105
+
Error(_) -> None
106
+
}
107
+
}
108
+
109
+
/// Get a boolean field value from a JSON object
110
+
pub fn get_bool(json_value: Json, field_name: String) -> Option(Bool) {
111
+
case json_to_dynamic(json_value) {
112
+
Ok(dyn) ->
113
+
case decode.run(dyn, decode.at([field_name], decode.bool)) {
114
+
Ok(value) -> Some(value)
115
+
Error(_) -> None
116
+
}
117
+
Error(_) -> None
118
+
}
119
+
}
120
+
121
+
/// Get an array field value from a JSON object
122
+
pub fn get_array(json_value: Json, field_name: String) -> Option(List(Dynamic)) {
123
+
case json_to_dynamic(json_value) {
124
+
Ok(dyn) ->
125
+
case
126
+
decode.run(dyn, decode.at([field_name], decode.list(decode.dynamic)))
127
+
{
128
+
Ok(value) -> Some(value)
129
+
Error(_) -> None
130
+
}
131
+
Error(_) -> None
132
+
}
133
+
}
134
+
135
+
/// Get all keys from a JSON object
136
+
pub fn get_keys(json_value: Json) -> List(String) {
137
+
case json_to_dynamic(json_value) {
138
+
Ok(dyn) ->
139
+
case decode.run(dyn, decode.dict(decode.string, decode.dynamic)) {
140
+
Ok(dict_value) -> dict.keys(dict_value)
141
+
Error(_) -> []
142
+
}
143
+
Error(_) -> []
144
+
}
145
+
}
146
+
147
+
/// Require a string field, returning an error if missing or wrong type
148
+
pub fn require_string_field(
149
+
json_value: Json,
150
+
field_name: String,
151
+
def_name: String,
152
+
) -> Result(String, ValidationError) {
153
+
case get_string(json_value, field_name) {
154
+
Some(s) -> Ok(s)
155
+
None ->
156
+
Error(errors.invalid_schema(
157
+
def_name <> ": '" <> field_name <> "' must be a string",
158
+
))
159
+
}
160
+
}
161
+
162
+
/// Require an integer field, returning an error if missing or wrong type
163
+
pub fn require_int_field(
164
+
json_value: Json,
165
+
field_name: String,
166
+
def_name: String,
167
+
) -> Result(Int, ValidationError) {
168
+
case get_int(json_value, field_name) {
169
+
Some(i) -> Ok(i)
170
+
None ->
171
+
Error(errors.invalid_schema(
172
+
def_name <> ": '" <> field_name <> "' must be an integer",
173
+
))
174
+
}
175
+
}
176
+
177
+
/// Require an array field, returning an error if missing or wrong type
178
+
pub fn require_array_field(
179
+
json_value: Json,
180
+
field_name: String,
181
+
def_name: String,
182
+
) -> Result(List(Dynamic), ValidationError) {
183
+
case get_array(json_value, field_name) {
184
+
Some(arr) -> Ok(arr)
185
+
None ->
186
+
Error(errors.invalid_schema(
187
+
def_name <> ": '" <> field_name <> "' must be an array",
188
+
))
189
+
}
190
+
}
191
+
192
+
/// Get a generic field value from a JSON object (returns Json)
193
+
pub fn get_field(json_value: Json, field_name: String) -> Option(Json) {
194
+
case json_to_dynamic(json_value) {
195
+
Ok(dyn) ->
196
+
case decode.run(dyn, decode.at([field_name], decode.dynamic)) {
197
+
Ok(field_dyn) -> {
198
+
// Convert dynamic back to Json
199
+
case dynamic_to_json(field_dyn) {
200
+
Ok(json) -> Some(json)
201
+
Error(_) -> None
202
+
}
203
+
}
204
+
Error(_) -> None
205
+
}
206
+
Error(_) -> None
207
+
}
208
+
}
209
+
210
+
/// Get array from a JSON value that is itself an array (not from a field)
211
+
pub fn get_array_from_value(json_value: Json) -> Option(List(Dynamic)) {
212
+
case json_to_dynamic(json_value) {
213
+
Ok(dyn) ->
214
+
case decode.run(dyn, decode.list(decode.dynamic)) {
215
+
Ok(arr) -> Some(arr)
216
+
Error(_) -> None
217
+
}
218
+
Error(_) -> None
219
+
}
220
+
}
221
+
222
+
/// Check if dynamic value is null
223
+
pub fn is_null_dynamic(dyn: Dynamic) -> Bool {
224
+
case decode.run(dyn, decode.string) {
225
+
Ok("null") -> True
226
+
_ -> False
227
+
}
228
+
}
229
+
230
+
/// Convert JSON object to a dictionary
231
+
pub fn json_to_dict(
232
+
json_value: Json,
233
+
) -> Result(Dict(String, Dynamic), ValidationError) {
234
+
case json_to_dynamic(json_value) {
235
+
Ok(dyn) ->
236
+
case decode.run(dyn, decode.dict(decode.string, decode.dynamic)) {
237
+
Ok(dict_val) -> Ok(dict_val)
238
+
Error(_) ->
239
+
Error(errors.data_validation("Failed to convert JSON to dictionary"))
240
+
}
241
+
Error(_) -> Error(errors.data_validation("Failed to parse JSON as dynamic"))
242
+
}
243
+
}
244
+
245
+
/// Convert a dynamic value back to Json
246
+
/// This works by trying different decoders
247
+
pub fn dynamic_to_json(dyn: Dynamic) -> Result(Json, ValidationError) {
248
+
// Try null
249
+
case decode.run(dyn, decode.string) {
250
+
Ok(s) -> {
251
+
case s {
252
+
"null" -> Ok(json.null())
253
+
_ -> Ok(json.string(s))
254
+
}
255
+
}
256
+
Error(_) -> {
257
+
// Try number
258
+
case decode.run(dyn, decode.int) {
259
+
Ok(i) -> Ok(json.int(i))
260
+
Error(_) -> {
261
+
// Try boolean
262
+
case decode.run(dyn, decode.bool) {
263
+
Ok(b) -> Ok(json.bool(b))
264
+
Error(_) -> {
265
+
// Try array
266
+
case decode.run(dyn, decode.list(decode.dynamic)) {
267
+
Ok(arr) -> {
268
+
// Recursively convert array items
269
+
case list.try_map(arr, dynamic_to_json) {
270
+
Ok(json_arr) -> Ok(json.array(json_arr, fn(x) { x }))
271
+
Error(e) -> Error(e)
272
+
}
273
+
}
274
+
Error(_) -> {
275
+
// Try object
276
+
case
277
+
decode.run(dyn, decode.dict(decode.string, decode.dynamic))
278
+
{
279
+
Ok(dict_val) -> {
280
+
// Convert dict to object
281
+
let pairs = dict.to_list(dict_val)
282
+
case
283
+
list.try_map(pairs, fn(pair) {
284
+
let #(key, value_dyn) = pair
285
+
case dynamic_to_json(value_dyn) {
286
+
Ok(value_json) -> Ok(#(key, value_json))
287
+
Error(e) -> Error(e)
288
+
}
289
+
})
290
+
{
291
+
Ok(json_pairs) -> Ok(json.object(json_pairs))
292
+
Error(e) -> Error(e)
293
+
}
294
+
}
295
+
Error(_) ->
296
+
Error(errors.data_validation(
297
+
"Failed to convert dynamic to Json",
298
+
))
299
+
}
300
+
}
301
+
}
302
+
}
303
+
}
304
+
}
305
+
}
306
+
}
307
+
}
308
+
}
309
+
310
+
/// Type alias for JSON dictionary
311
+
pub type JsonDict =
312
+
Dict(String, Dynamic)
313
+
314
+
/// Create an empty JSON dictionary
315
+
pub fn empty_dict() -> JsonDict {
316
+
dict.new()
317
+
}
318
+
319
+
/// Check if a dictionary has a specific key
320
+
pub fn dict_has_key(dict_value: JsonDict, key: String) -> Bool {
321
+
case dict.get(dict_value, key) {
322
+
Ok(_) -> True
323
+
Error(_) -> False
324
+
}
325
+
}
326
+
327
+
/// Fold over a dictionary (wrapper around dict.fold)
328
+
pub fn dict_fold(
329
+
dict_value: JsonDict,
330
+
initial: acc,
331
+
folder: fn(acc, String, Dynamic) -> acc,
332
+
) -> acc {
333
+
dict.fold(dict_value, initial, folder)
334
+
}
335
+
336
+
/// Get a value from a dictionary
337
+
pub fn dict_get(dict_value: JsonDict, key: String) -> Option(Dynamic) {
338
+
case dict.get(dict_value, key) {
339
+
Ok(value) -> Some(value)
340
+
Error(_) -> None
341
+
}
342
+
}
+58
src/types.gleam
+58
src/types.gleam
···
1
+
// Core types for AT Protocol lexicon validation
2
+
3
+
import gleam/json.{type Json}
4
+
5
+
/// Represents a parsed lexicon document
6
+
pub type LexiconDoc {
7
+
LexiconDoc(id: String, defs: Json)
8
+
}
9
+
10
+
/// AT Protocol string formats
11
+
pub type StringFormat {
12
+
DateTime
13
+
Uri
14
+
AtUri
15
+
Did
16
+
Handle
17
+
AtIdentifier
18
+
Nsid
19
+
Cid
20
+
Language
21
+
Tid
22
+
RecordKey
23
+
}
24
+
25
+
/// Convert a string to a StringFormat
26
+
pub fn string_to_format(s: String) -> Result(StringFormat, Nil) {
27
+
case s {
28
+
"datetime" -> Ok(DateTime)
29
+
"uri" -> Ok(Uri)
30
+
"at-uri" -> Ok(AtUri)
31
+
"did" -> Ok(Did)
32
+
"handle" -> Ok(Handle)
33
+
"at-identifier" -> Ok(AtIdentifier)
34
+
"nsid" -> Ok(Nsid)
35
+
"cid" -> Ok(Cid)
36
+
"language" -> Ok(Language)
37
+
"tid" -> Ok(Tid)
38
+
"record-key" -> Ok(RecordKey)
39
+
_ -> Error(Nil)
40
+
}
41
+
}
42
+
43
+
/// Convert a StringFormat to string
44
+
pub fn format_to_string(format: StringFormat) -> String {
45
+
case format {
46
+
DateTime -> "datetime"
47
+
Uri -> "uri"
48
+
AtUri -> "at-uri"
49
+
Did -> "did"
50
+
Handle -> "handle"
51
+
AtIdentifier -> "at-identifier"
52
+
Nsid -> "nsid"
53
+
Cid -> "cid"
54
+
Language -> "language"
55
+
Tid -> "tid"
56
+
RecordKey -> "record-key"
57
+
}
58
+
}
+199
src/validation/constraints.gleam
+199
src/validation/constraints.gleam
···
1
+
// Reusable constraint validation functions
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/int
5
+
import gleam/list
6
+
import gleam/option.{type Option, Some}
7
+
import gleam/result
8
+
import gleam/string
9
+
10
+
/// Validates length constraints (minLength/maxLength)
11
+
pub fn validate_length_constraints(
12
+
def_name: String,
13
+
actual_length: Int,
14
+
min_length: Option(Int),
15
+
max_length: Option(Int),
16
+
type_name: String,
17
+
) -> Result(Nil, ValidationError) {
18
+
// Check minimum length
19
+
case min_length {
20
+
Some(min) if actual_length < min ->
21
+
Error(errors.data_validation(
22
+
def_name
23
+
<> ": "
24
+
<> type_name
25
+
<> " length "
26
+
<> int.to_string(actual_length)
27
+
<> " is less than minLength "
28
+
<> int.to_string(min),
29
+
))
30
+
_ -> Ok(Nil)
31
+
}
32
+
|> result.try(fn(_) {
33
+
// Check maximum length
34
+
case max_length {
35
+
Some(max) if actual_length > max ->
36
+
Error(errors.data_validation(
37
+
def_name
38
+
<> ": "
39
+
<> type_name
40
+
<> " length "
41
+
<> int.to_string(actual_length)
42
+
<> " exceeds maxLength "
43
+
<> int.to_string(max),
44
+
))
45
+
_ -> Ok(Nil)
46
+
}
47
+
})
48
+
}
49
+
50
+
/// Validates min/max length consistency
51
+
pub fn validate_length_constraint_consistency(
52
+
def_name: String,
53
+
min_length: Option(Int),
54
+
max_length: Option(Int),
55
+
type_name: String,
56
+
) -> Result(Nil, ValidationError) {
57
+
case min_length, max_length {
58
+
Some(min), Some(max) if min > max ->
59
+
Error(errors.invalid_schema(
60
+
def_name
61
+
<> ": "
62
+
<> type_name
63
+
<> " minLength ("
64
+
<> int.to_string(min)
65
+
<> ") cannot be greater than maxLength ("
66
+
<> int.to_string(max)
67
+
<> ")",
68
+
))
69
+
_, _ -> Ok(Nil)
70
+
}
71
+
}
72
+
73
+
/// Validates integer range constraints
74
+
pub fn validate_integer_range(
75
+
def_name: String,
76
+
value: Int,
77
+
minimum: Option(Int),
78
+
maximum: Option(Int),
79
+
) -> Result(Nil, ValidationError) {
80
+
// Check minimum
81
+
case minimum {
82
+
Some(min) if value < min ->
83
+
Error(errors.data_validation(
84
+
def_name
85
+
<> ": value "
86
+
<> int.to_string(value)
87
+
<> " is less than minimum "
88
+
<> int.to_string(min),
89
+
))
90
+
_ -> Ok(Nil)
91
+
}
92
+
|> result.try(fn(_) {
93
+
// Check maximum
94
+
case maximum {
95
+
Some(max) if value > max ->
96
+
Error(errors.data_validation(
97
+
def_name
98
+
<> ": value "
99
+
<> int.to_string(value)
100
+
<> " exceeds maximum "
101
+
<> int.to_string(max),
102
+
))
103
+
_ -> Ok(Nil)
104
+
}
105
+
})
106
+
}
107
+
108
+
/// Validates integer constraint consistency
109
+
pub fn validate_integer_constraint_consistency(
110
+
def_name: String,
111
+
minimum: Option(Int),
112
+
maximum: Option(Int),
113
+
) -> Result(Nil, ValidationError) {
114
+
case minimum, maximum {
115
+
Some(min), Some(max) if min > max ->
116
+
Error(errors.invalid_schema(
117
+
def_name
118
+
<> ": minimum ("
119
+
<> int.to_string(min)
120
+
<> ") cannot be greater than maximum ("
121
+
<> int.to_string(max)
122
+
<> ")",
123
+
))
124
+
_, _ -> Ok(Nil)
125
+
}
126
+
}
127
+
128
+
/// Validates enum constraints
129
+
/// The value must be one of the allowed values
130
+
/// Note: Gleam doesn't have trait bounds, so we pass a comparison function
131
+
pub fn validate_enum_constraint(
132
+
def_name: String,
133
+
value: a,
134
+
enum_values: List(a),
135
+
type_name: String,
136
+
to_string: fn(a) -> String,
137
+
equal: fn(a, a) -> Bool,
138
+
) -> Result(Nil, ValidationError) {
139
+
let found = list.any(enum_values, fn(enum_val) { equal(value, enum_val) })
140
+
141
+
case found {
142
+
True -> Ok(Nil)
143
+
False ->
144
+
Error(errors.data_validation(
145
+
def_name
146
+
<> ": "
147
+
<> type_name
148
+
<> " value '"
149
+
<> to_string(value)
150
+
<> "' is not in enum",
151
+
))
152
+
}
153
+
}
154
+
155
+
/// Validates const/default mutual exclusivity
156
+
pub fn validate_const_default_exclusivity(
157
+
def_name: String,
158
+
has_const: Bool,
159
+
has_default: Bool,
160
+
type_name: String,
161
+
) -> Result(Nil, ValidationError) {
162
+
case has_const, has_default {
163
+
True, True ->
164
+
Error(errors.invalid_schema(
165
+
def_name
166
+
<> ": "
167
+
<> type_name
168
+
<> " cannot have both 'const' and 'default'",
169
+
))
170
+
_, _ -> Ok(Nil)
171
+
}
172
+
}
173
+
174
+
/// Validates that only allowed fields are present in a schema object
175
+
pub fn validate_allowed_fields(
176
+
def_name: String,
177
+
actual_fields: List(String),
178
+
allowed_fields: List(String),
179
+
type_name: String,
180
+
) -> Result(Nil, ValidationError) {
181
+
let unknown_fields =
182
+
list.filter(actual_fields, fn(field) {
183
+
!list.contains(allowed_fields, field)
184
+
})
185
+
186
+
case unknown_fields {
187
+
[] -> Ok(Nil)
188
+
fields ->
189
+
Error(errors.invalid_schema(
190
+
def_name
191
+
<> ": "
192
+
<> type_name
193
+
<> " has unknown fields: "
194
+
<> string.join(fields, ", ")
195
+
<> ". Allowed fields: "
196
+
<> string.join(allowed_fields, ", "),
197
+
))
198
+
}
199
+
}
+332
src/validation/context.gleam
+332
src/validation/context.gleam
···
1
+
// Validation context and builder
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dict.{type Dict}
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{type Option, None, Some}
8
+
import gleam/result
9
+
import gleam/set.{type Set}
10
+
import gleam/string
11
+
import json_helpers
12
+
import types.{type LexiconDoc, LexiconDoc}
13
+
import validation/formats
14
+
15
+
/// Validation context that tracks state during validation
16
+
pub type ValidationContext {
17
+
ValidationContext(
18
+
// Map of lexicon ID to parsed lexicon document
19
+
lexicons: Dict(String, LexiconDoc),
20
+
// Current path in data structure (for error messages)
21
+
path: String,
22
+
// Current lexicon ID (for resolving local references)
23
+
current_lexicon_id: Option(String),
24
+
// Set of references being resolved (for circular detection)
25
+
reference_stack: Set(String),
26
+
// Recursive validator function for dispatching to type-specific validators
27
+
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
28
+
validator: fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
29
+
)
30
+
}
31
+
32
+
/// Builder for constructing ValidationContext
33
+
pub type ValidationContextBuilder {
34
+
ValidationContextBuilder(
35
+
lexicons: Dict(String, LexiconDoc),
36
+
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
37
+
validator: Option(
38
+
fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
39
+
),
40
+
)
41
+
}
42
+
43
+
/// Creates a new ValidationContextBuilder with default settings.
44
+
///
45
+
/// Use this to start building a validation context by chaining with
46
+
/// `with_lexicons`, `with_validator`, and `build`.
47
+
///
48
+
/// ## Example
49
+
///
50
+
/// ```gleam
51
+
/// let assert Ok(ctx) =
52
+
/// context.builder()
53
+
/// |> context.with_validator(field.dispatch_data_validation)
54
+
/// |> context.with_lexicons([my_lexicon])
55
+
/// |> context.build
56
+
/// ```
57
+
pub fn builder() -> ValidationContextBuilder {
58
+
ValidationContextBuilder(lexicons: dict.new(), validator: None)
59
+
}
60
+
61
+
/// Adds a list of lexicon JSON documents to the builder.
62
+
///
63
+
/// Each lexicon must have an 'id' field (valid NSID) and a 'defs' object
64
+
/// containing type definitions. Returns an error if any lexicon is invalid.
65
+
///
66
+
/// ## Example
67
+
///
68
+
/// ```gleam
69
+
/// let lexicon = json.object([
70
+
/// #("lexicon", json.int(1)),
71
+
/// #("id", json.string("com.example.post")),
72
+
/// #("defs", json.object([...])),
73
+
/// ])
74
+
///
75
+
/// let assert Ok(builder) =
76
+
/// context.builder()
77
+
/// |> context.with_lexicons([lexicon])
78
+
/// ```
79
+
pub fn with_lexicons(
80
+
builder: ValidationContextBuilder,
81
+
lexicons: List(Json),
82
+
) -> Result(ValidationContextBuilder, ValidationError) {
83
+
// Parse each lexicon and add to the dictionary
84
+
list.try_fold(lexicons, builder, fn(b, lex_json) {
85
+
// Extract id and defs from the lexicon JSON
86
+
case parse_lexicon(lex_json) {
87
+
Ok(lexicon_doc) -> {
88
+
let updated_lexicons =
89
+
dict.insert(b.lexicons, lexicon_doc.id, lexicon_doc)
90
+
Ok(ValidationContextBuilder(..b, lexicons: updated_lexicons))
91
+
}
92
+
Error(e) -> Error(e)
93
+
}
94
+
})
95
+
}
96
+
97
+
/// Set the validator function
98
+
/// Parameters: data (Json), schema (Json), ctx (ValidationContext)
99
+
pub fn with_validator(
100
+
builder: ValidationContextBuilder,
101
+
validator: fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
102
+
) -> ValidationContextBuilder {
103
+
ValidationContextBuilder(..builder, validator: Some(validator))
104
+
}
105
+
106
+
/// Builds the final ValidationContext from the builder.
107
+
///
108
+
/// Creates a no-op validator if none was set via `with_validator`.
109
+
/// Returns a ValidationContext ready for validating lexicons and data.
110
+
///
111
+
/// ## Example
112
+
///
113
+
/// ```gleam
114
+
/// let assert Ok(ctx) =
115
+
/// context.builder()
116
+
/// |> context.with_validator(field.dispatch_data_validation)
117
+
/// |> context.with_lexicons([lexicon])
118
+
/// |> context.build
119
+
/// ```
120
+
pub fn build(
121
+
builder: ValidationContextBuilder,
122
+
) -> Result(ValidationContext, ValidationError) {
123
+
// Create a default no-op validator if none is set
124
+
let validator = case builder.validator {
125
+
Some(v) -> v
126
+
None -> fn(_data, _schema, _ctx) { Ok(Nil) }
127
+
}
128
+
129
+
Ok(ValidationContext(
130
+
lexicons: builder.lexicons,
131
+
path: "",
132
+
current_lexicon_id: None,
133
+
reference_stack: set.new(),
134
+
validator: validator,
135
+
))
136
+
}
137
+
138
+
/// Retrieves a lexicon document by its NSID from the validation context.
139
+
///
140
+
/// Returns `None` if the lexicon is not found. Use this to access
141
+
/// lexicon definitions when resolving references.
142
+
///
143
+
/// ## Example
144
+
///
145
+
/// ```gleam
146
+
/// case context.get_lexicon(ctx, "com.example.post") {
147
+
/// Some(lexicon) -> // Use the lexicon
148
+
/// None -> // Lexicon not found
149
+
/// }
150
+
/// ```
151
+
pub fn get_lexicon(ctx: ValidationContext, id: String) -> Option(LexiconDoc) {
152
+
case dict.get(ctx.lexicons, id) {
153
+
Ok(lex) -> Some(lex)
154
+
Error(_) -> None
155
+
}
156
+
}
157
+
158
+
/// Returns the current validation path within the data structure.
159
+
///
160
+
/// The path is used for generating detailed error messages that show
161
+
/// exactly where in a nested structure validation failed.
162
+
///
163
+
/// ## Example
164
+
///
165
+
/// ```gleam
166
+
/// let current_path = context.path(ctx)
167
+
/// // Returns something like "defs.post.properties.text"
168
+
/// ```
169
+
pub fn path(ctx: ValidationContext) -> String {
170
+
ctx.path
171
+
}
172
+
173
+
/// Creates a new context with an updated path segment.
174
+
///
175
+
/// Used when traversing nested data structures during validation
176
+
/// to maintain accurate error location information.
177
+
///
178
+
/// ## Example
179
+
///
180
+
/// ```gleam
181
+
/// let nested_ctx = context.with_path(ctx, "properties.name")
182
+
/// // New path might be "defs.user.properties.name"
183
+
/// ```
184
+
pub fn with_path(ctx: ValidationContext, segment: String) -> ValidationContext {
185
+
let new_path = case ctx.path {
186
+
"" -> segment
187
+
_ -> ctx.path <> "." <> segment
188
+
}
189
+
ValidationContext(..ctx, path: new_path)
190
+
}
191
+
192
+
/// Returns the ID of the lexicon currently being validated.
193
+
///
194
+
/// Used for resolving local references (e.g., `#post`) which need to
195
+
/// know which lexicon they belong to.
196
+
///
197
+
/// ## Example
198
+
///
199
+
/// ```gleam
200
+
/// case context.current_lexicon_id(ctx) {
201
+
/// Some(id) -> // id is like "com.example.post"
202
+
/// None -> // No lexicon context set
203
+
/// }
204
+
/// ```
205
+
pub fn current_lexicon_id(ctx: ValidationContext) -> Option(String) {
206
+
ctx.current_lexicon_id
207
+
}
208
+
209
+
/// Creates a new context with a different current lexicon ID.
210
+
///
211
+
/// Used when validating cross-lexicon references to set the correct
212
+
/// lexicon context for resolving local references.
213
+
///
214
+
/// ## Example
215
+
///
216
+
/// ```gleam
217
+
/// let ctx_with_lexicon =
218
+
/// context.with_current_lexicon(ctx, "com.example.post")
219
+
/// ```
220
+
pub fn with_current_lexicon(
221
+
ctx: ValidationContext,
222
+
lexicon_id: String,
223
+
) -> ValidationContext {
224
+
ValidationContext(..ctx, current_lexicon_id: Some(lexicon_id))
225
+
}
226
+
227
+
/// Adds a reference to the reference stack for circular dependency detection.
228
+
///
229
+
/// Used internally during reference resolution to track which references
230
+
/// are currently being validated. This prevents infinite loops when
231
+
/// references form a cycle.
232
+
///
233
+
/// ## Example
234
+
///
235
+
/// ```gleam
236
+
/// let ctx_with_ref =
237
+
/// context.with_reference(ctx, "com.example.post#user")
238
+
/// ```
239
+
pub fn with_reference(
240
+
ctx: ValidationContext,
241
+
reference: String,
242
+
) -> ValidationContext {
243
+
ValidationContext(
244
+
..ctx,
245
+
reference_stack: set.insert(ctx.reference_stack, reference),
246
+
)
247
+
}
248
+
249
+
/// Checks if a reference is already in the reference stack.
250
+
///
251
+
/// Returns `True` if the reference is being validated, indicating a
252
+
/// circular reference that would cause infinite recursion. Used to
253
+
/// detect and prevent circular dependency errors.
254
+
///
255
+
/// ## Example
256
+
///
257
+
/// ```gleam
258
+
/// case context.has_reference(ctx, "#user") {
259
+
/// True -> Error(errors.data_validation("Circular reference detected"))
260
+
/// False -> // Safe to proceed with validation
261
+
/// }
262
+
/// ```
263
+
pub fn has_reference(ctx: ValidationContext, reference: String) -> Bool {
264
+
set.contains(ctx.reference_stack, reference)
265
+
}
266
+
267
+
/// Parse a reference string into (lexicon_id, definition)
268
+
/// Handles: #def, nsid#def, nsid
269
+
pub fn parse_reference(
270
+
ctx: ValidationContext,
271
+
reference: String,
272
+
) -> Result(#(String, String), ValidationError) {
273
+
case string.split(reference, "#") {
274
+
// Local reference: #def
275
+
["", def] ->
276
+
case ctx.current_lexicon_id {
277
+
Some(lex_id) -> Ok(#(lex_id, def))
278
+
None ->
279
+
Error(errors.invalid_schema(
280
+
"Local reference '"
281
+
<> reference
282
+
<> "' used without current lexicon context",
283
+
))
284
+
}
285
+
// Global reference: nsid#def
286
+
[nsid, def] if nsid != "" && def != "" -> Ok(#(nsid, def))
287
+
// Global main: nsid (implicit #main)
288
+
[nsid] if nsid != "" -> Ok(#(nsid, "main"))
289
+
// Invalid
290
+
_ -> Error(errors.invalid_schema("Invalid reference format: " <> reference))
291
+
}
292
+
}
293
+
294
+
/// Helper to parse a lexicon JSON into LexiconDoc
295
+
fn parse_lexicon(lex_json: Json) -> Result(LexiconDoc, ValidationError) {
296
+
// Extract "id" field (required NSID)
297
+
let id_result = case json_helpers.get_string(lex_json, "id") {
298
+
Some(id) -> Ok(id)
299
+
None -> Error(errors.invalid_schema("Lexicon missing required 'id' field"))
300
+
}
301
+
302
+
use id <- result.try(id_result)
303
+
304
+
// Validate that id is a valid NSID
305
+
use _ <- result.try(case formats.is_valid_nsid(id) {
306
+
True -> Ok(Nil)
307
+
False ->
308
+
Error(errors.invalid_schema(
309
+
"Lexicon 'id' field is not a valid NSID: " <> id,
310
+
))
311
+
})
312
+
313
+
// Extract "defs" field (required object containing definitions)
314
+
let defs_result = case json_helpers.get_field(lex_json, "defs") {
315
+
Some(defs) ->
316
+
case json_helpers.is_object(defs) {
317
+
True -> Ok(defs)
318
+
False ->
319
+
Error(errors.invalid_schema(
320
+
"Lexicon 'defs' must be an object at " <> id,
321
+
))
322
+
}
323
+
None ->
324
+
Error(errors.invalid_schema(
325
+
"Lexicon missing required 'defs' field at " <> id,
326
+
))
327
+
}
328
+
329
+
use defs <- result.try(defs_result)
330
+
331
+
Ok(LexiconDoc(id: id, defs: defs))
332
+
}
+598
src/validation/field.gleam
+598
src/validation/field.gleam
···
1
+
// Field type validators (object and array)
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dict
5
+
import gleam/dynamic.{type Dynamic}
6
+
import gleam/dynamic/decode
7
+
import gleam/int
8
+
import gleam/json.{type Json}
9
+
import gleam/list
10
+
import gleam/option.{None, Some}
11
+
import gleam/result
12
+
import json_helpers
13
+
import validation/constraints
14
+
import validation/context.{type ValidationContext}
15
+
16
+
// Import primitive validators
17
+
import validation/primitive/blob
18
+
import validation/primitive/boolean
19
+
import validation/primitive/bytes
20
+
import validation/primitive/cid_link
21
+
import validation/primitive/integer
22
+
import validation/primitive/null
23
+
import validation/primitive/string
24
+
25
+
// Import other field validators
26
+
import validation/field/reference
27
+
import validation/field/union
28
+
29
+
// Import meta validators
30
+
import validation/meta/token
31
+
import validation/meta/unknown
32
+
33
+
// ============================================================================
34
+
// SHARED TYPE DISPATCHER
35
+
// ============================================================================
36
+
37
+
/// Dispatch schema validation based on type field
38
+
/// Handles all primitive and field types
39
+
fn dispatch_schema_validation(
40
+
schema: Json,
41
+
ctx: ValidationContext,
42
+
) -> Result(Nil, ValidationError) {
43
+
case json_helpers.get_string(schema, "type") {
44
+
Some("string") -> string.validate_schema(schema, ctx)
45
+
Some("integer") -> integer.validate_schema(schema, ctx)
46
+
Some("boolean") -> boolean.validate_schema(schema, ctx)
47
+
Some("bytes") -> bytes.validate_schema(schema, ctx)
48
+
Some("blob") -> blob.validate_schema(schema, ctx)
49
+
Some("cid-link") -> cid_link.validate_schema(schema, ctx)
50
+
Some("null") -> null.validate_schema(schema, ctx)
51
+
Some("object") -> validate_object_schema(schema, ctx)
52
+
Some("array") -> validate_array_schema(schema, ctx)
53
+
Some("union") -> union.validate_schema(schema, ctx)
54
+
Some("ref") -> reference.validate_schema(schema, ctx)
55
+
Some("token") -> token.validate_schema(schema, ctx)
56
+
Some("unknown") -> unknown.validate_schema(schema, ctx)
57
+
Some(unknown_type) ->
58
+
Error(errors.invalid_schema(
59
+
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
60
+
))
61
+
None ->
62
+
Error(errors.invalid_schema(
63
+
context.path(ctx) <> ": schema missing type field",
64
+
))
65
+
}
66
+
}
67
+
68
+
/// Dispatches data validation to the appropriate type-specific validator.
69
+
///
70
+
/// This is the central dispatcher that routes validation based on the schema's
71
+
/// `type` field. Handles all primitive types (string, integer, boolean, etc.),
72
+
/// field types (object, array, union, ref), and meta types (token, unknown).
73
+
///
74
+
/// Made public to allow reference validators to recursively validate resolved
75
+
/// schemas. Typically set as the validator function in ValidationContext via
76
+
/// `context.with_validator(field.dispatch_data_validation)`.
77
+
///
78
+
/// ## Example
79
+
///
80
+
/// ```gleam
81
+
/// let schema = json.object([
82
+
/// #("type", json.string("string")),
83
+
/// #("maxLength", json.int(100)),
84
+
/// ])
85
+
/// let data = json.string("Hello")
86
+
///
87
+
/// field.dispatch_data_validation(data, schema, ctx)
88
+
/// // => Ok(Nil) if valid, Error(...) if invalid
89
+
/// ```
90
+
pub fn dispatch_data_validation(
91
+
data: Json,
92
+
schema: Json,
93
+
ctx: ValidationContext,
94
+
) -> Result(Nil, ValidationError) {
95
+
case json_helpers.get_string(schema, "type") {
96
+
Some("string") -> string.validate_data(data, schema, ctx)
97
+
Some("integer") -> integer.validate_data(data, schema, ctx)
98
+
Some("boolean") -> boolean.validate_data(data, schema, ctx)
99
+
Some("bytes") -> bytes.validate_data(data, schema, ctx)
100
+
Some("blob") -> blob.validate_data(data, schema, ctx)
101
+
Some("cid-link") -> cid_link.validate_data(data, schema, ctx)
102
+
Some("null") -> null.validate_data(data, schema, ctx)
103
+
Some("object") -> validate_object_data(data, schema, ctx)
104
+
Some("array") -> validate_array_data(data, schema, ctx)
105
+
Some("union") -> union.validate_data(data, schema, ctx)
106
+
Some("ref") -> reference.validate_data(data, schema, ctx)
107
+
Some("token") -> token.validate_data(data, schema, ctx)
108
+
Some("unknown") -> unknown.validate_data(data, schema, ctx)
109
+
Some(unknown_type) ->
110
+
Error(errors.data_validation(
111
+
"Unknown schema type '"
112
+
<> unknown_type
113
+
<> "' at '"
114
+
<> context.path(ctx)
115
+
<> "'",
116
+
))
117
+
None ->
118
+
Error(errors.data_validation(
119
+
"Schema missing type field at '" <> context.path(ctx) <> "'",
120
+
))
121
+
}
122
+
}
123
+
124
+
// ============================================================================
125
+
// OBJECT VALIDATOR
126
+
// ============================================================================
127
+
128
+
const object_allowed_fields = [
129
+
"type", "properties", "required", "nullable", "description",
130
+
]
131
+
132
+
/// Validates object schema definition
133
+
pub fn validate_object_schema(
134
+
schema: Json,
135
+
ctx: ValidationContext,
136
+
) -> Result(Nil, ValidationError) {
137
+
let def_name = context.path(ctx)
138
+
139
+
// Validate allowed fields
140
+
let keys = json_helpers.get_keys(schema)
141
+
use _ <- result.try(constraints.validate_allowed_fields(
142
+
def_name,
143
+
keys,
144
+
object_allowed_fields,
145
+
"object",
146
+
))
147
+
148
+
// Validate properties structure
149
+
let properties = case json_helpers.get_array(schema, "properties") {
150
+
Some(_) ->
151
+
Error(errors.invalid_schema(
152
+
def_name <> ": properties must be an object, not an array",
153
+
))
154
+
None ->
155
+
case json_helpers.is_object(schema) {
156
+
True -> Ok(None)
157
+
False -> Ok(None)
158
+
}
159
+
}
160
+
161
+
use _ <- result.try(properties)
162
+
163
+
// Validate required fields reference existing properties
164
+
use _ <- result.try(case json_helpers.get_array(schema, "required") {
165
+
Some(required_array) -> validate_required_fields(def_name, required_array)
166
+
None -> Ok(Nil)
167
+
})
168
+
169
+
// Validate nullable fields reference existing properties
170
+
use _ <- result.try(case json_helpers.get_array(schema, "nullable") {
171
+
Some(nullable_array) -> validate_nullable_fields(def_name, nullable_array)
172
+
None -> Ok(Nil)
173
+
})
174
+
175
+
// Validate each property schema recursively
176
+
case json_helpers.get_field(schema, "properties") {
177
+
Some(properties) -> {
178
+
case json_helpers.is_object(properties) {
179
+
True -> {
180
+
// Get property map and validate each property schema
181
+
validate_property_schemas(properties, ctx)
182
+
}
183
+
False -> Ok(Nil)
184
+
}
185
+
}
186
+
None -> Ok(Nil)
187
+
}
188
+
}
189
+
190
+
/// Validates object data against schema
191
+
pub fn validate_object_data(
192
+
data: Json,
193
+
schema: Json,
194
+
ctx: ValidationContext,
195
+
) -> Result(Nil, ValidationError) {
196
+
let def_name = context.path(ctx)
197
+
198
+
// Check data is an object
199
+
case json_helpers.is_object(data) {
200
+
False -> {
201
+
let type_name = get_type_name(data)
202
+
Error(errors.data_validation(
203
+
"Expected object at '" <> def_name <> "', found " <> type_name,
204
+
))
205
+
}
206
+
True -> {
207
+
// Check required fields are present
208
+
use _ <- result.try(case json_helpers.get_array(schema, "required") {
209
+
Some(required_array) ->
210
+
validate_required_fields_in_data(def_name, required_array, data)
211
+
None -> Ok(Nil)
212
+
})
213
+
214
+
// Get nullable fields for lookup
215
+
let nullable_fields = case json_helpers.get_array(schema, "nullable") {
216
+
Some(nullable_array) ->
217
+
list.filter_map(nullable_array, fn(item) {
218
+
decode.run(item, decode.string)
219
+
})
220
+
None -> []
221
+
}
222
+
223
+
// Validate each property in data against its schema
224
+
case json_helpers.get_field(schema, "properties") {
225
+
Some(properties) -> {
226
+
validate_properties_data(data, properties, nullable_fields, ctx)
227
+
}
228
+
None -> Ok(Nil)
229
+
}
230
+
}
231
+
}
232
+
}
233
+
234
+
/// Helper to validate required fields exist in properties
235
+
fn validate_required_fields(
236
+
def_name: String,
237
+
required: List(Dynamic),
238
+
) -> Result(Nil, ValidationError) {
239
+
// Convert dynamics to strings
240
+
let field_names =
241
+
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
242
+
243
+
// Each required field should be validated against properties
244
+
// Simplified: just check they're strings
245
+
case list.length(field_names) == list.length(required) {
246
+
True -> Ok(Nil)
247
+
False ->
248
+
Error(errors.invalid_schema(
249
+
def_name <> ": required fields must be strings",
250
+
))
251
+
}
252
+
}
253
+
254
+
/// Helper to validate nullable fields exist in properties
255
+
fn validate_nullable_fields(
256
+
def_name: String,
257
+
nullable: List(Dynamic),
258
+
) -> Result(Nil, ValidationError) {
259
+
// Convert dynamics to strings
260
+
let field_names =
261
+
list.filter_map(nullable, fn(item) { decode.run(item, decode.string) })
262
+
263
+
// Each nullable field should be validated against properties
264
+
// Simplified: just check they're strings
265
+
case list.length(field_names) == list.length(nullable) {
266
+
True -> Ok(Nil)
267
+
False ->
268
+
Error(errors.invalid_schema(
269
+
def_name <> ": nullable fields must be strings",
270
+
))
271
+
}
272
+
}
273
+
274
+
/// Helper to validate required fields are present in data
275
+
fn validate_required_fields_in_data(
276
+
def_name: String,
277
+
required: List(Dynamic),
278
+
data: Json,
279
+
) -> Result(Nil, ValidationError) {
280
+
// Convert dynamics to strings
281
+
let field_names =
282
+
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
283
+
284
+
// Check each required field exists in data
285
+
list.try_fold(field_names, Nil, fn(_, field_name) {
286
+
case json_helpers.get_string(data, field_name) {
287
+
Some(_) -> Ok(Nil)
288
+
None ->
289
+
// Field might not be a string, check if it exists at all
290
+
// Simplified: just report missing
291
+
Error(errors.data_validation(
292
+
def_name <> ": required field '" <> field_name <> "' is missing",
293
+
))
294
+
}
295
+
})
296
+
}
297
+
298
+
/// Validates all property schemas in an object
299
+
fn validate_property_schemas(
300
+
properties: Json,
301
+
ctx: ValidationContext,
302
+
) -> Result(Nil, ValidationError) {
303
+
// Convert JSON object to dict and validate each property
304
+
case json_helpers.json_to_dict(properties) {
305
+
Ok(props_dict) -> {
306
+
dict.fold(props_dict, Ok(Nil), fn(acc, prop_name, prop_schema_dyn) {
307
+
use _ <- result.try(acc)
308
+
// Convert dynamic to Json
309
+
case json_helpers.dynamic_to_json(prop_schema_dyn) {
310
+
Ok(prop_schema) -> {
311
+
let nested_ctx = context.with_path(ctx, "properties." <> prop_name)
312
+
validate_single_property_schema(prop_schema, nested_ctx)
313
+
}
314
+
Error(e) -> Error(e)
315
+
}
316
+
})
317
+
}
318
+
Error(e) -> Error(e)
319
+
}
320
+
}
321
+
322
+
/// Dispatch validation to appropriate validator based on type
323
+
fn validate_single_property_schema(
324
+
prop_schema: Json,
325
+
ctx: ValidationContext,
326
+
) -> Result(Nil, ValidationError) {
327
+
dispatch_schema_validation(prop_schema, ctx)
328
+
}
329
+
330
+
/// Validates all properties in data against their schemas
331
+
fn validate_properties_data(
332
+
data: Json,
333
+
properties: Json,
334
+
nullable_fields: List(String),
335
+
ctx: ValidationContext,
336
+
) -> Result(Nil, ValidationError) {
337
+
// Convert data to dict
338
+
case json_helpers.json_to_dict(data) {
339
+
Ok(data_dict) -> {
340
+
// Convert properties schema to dict
341
+
case json_helpers.json_to_dict(properties) {
342
+
Ok(props_dict) -> {
343
+
// Iterate through data fields
344
+
dict.fold(data_dict, Ok(Nil), fn(acc, field_name, field_value) {
345
+
use _ <- result.try(acc)
346
+
// Check if field has a schema definition
347
+
case dict.get(props_dict, field_name) {
348
+
Ok(field_schema_dyn) -> {
349
+
// Convert dynamic schema to Json
350
+
case json_helpers.dynamic_to_json(field_schema_dyn) {
351
+
Ok(field_schema) -> {
352
+
let nested_ctx = context.with_path(ctx, field_name)
353
+
// Check for null values
354
+
case json_helpers.is_null_dynamic(field_value) {
355
+
True -> {
356
+
// Check if field is nullable
357
+
case list.contains(nullable_fields, field_name) {
358
+
True -> Ok(Nil)
359
+
False ->
360
+
Error(errors.data_validation(
361
+
"Field '"
362
+
<> field_name
363
+
<> "' at '"
364
+
<> context.path(ctx)
365
+
<> "' cannot be null",
366
+
))
367
+
}
368
+
}
369
+
False -> {
370
+
// Validate field data against schema
371
+
case json_helpers.dynamic_to_json(field_value) {
372
+
Ok(field_value_json) ->
373
+
validate_single_property_data(
374
+
field_value_json,
375
+
field_schema,
376
+
nested_ctx,
377
+
)
378
+
Error(e) -> Error(e)
379
+
}
380
+
}
381
+
}
382
+
}
383
+
Error(e) -> Error(e)
384
+
}
385
+
}
386
+
Error(_) -> {
387
+
// Unknown fields are allowed in objects (open schema)
388
+
Ok(Nil)
389
+
}
390
+
}
391
+
})
392
+
}
393
+
Error(e) -> Error(e)
394
+
}
395
+
}
396
+
Error(e) -> Error(e)
397
+
}
398
+
}
399
+
400
+
/// Dispatch data validation to appropriate validator based on type
401
+
fn validate_single_property_data(
402
+
data: Json,
403
+
schema: Json,
404
+
ctx: ValidationContext,
405
+
) -> Result(Nil, ValidationError) {
406
+
dispatch_data_validation(data, schema, ctx)
407
+
}
408
+
409
+
// ============================================================================
410
+
// ARRAY VALIDATOR
411
+
// ============================================================================
412
+
413
+
const array_allowed_fields = [
414
+
"type", "items", "minLength", "maxLength", "description",
415
+
]
416
+
417
+
/// Validates array schema definition
418
+
pub fn validate_array_schema(
419
+
schema: Json,
420
+
ctx: ValidationContext,
421
+
) -> Result(Nil, ValidationError) {
422
+
let def_name = context.path(ctx)
423
+
424
+
// Validate allowed fields
425
+
let keys = json_helpers.get_keys(schema)
426
+
use _ <- result.try(constraints.validate_allowed_fields(
427
+
def_name,
428
+
keys,
429
+
array_allowed_fields,
430
+
"array",
431
+
))
432
+
433
+
// Validate required 'items' field
434
+
let items = case json_helpers.get_field(schema, "items") {
435
+
Some(items_value) -> Ok(items_value)
436
+
None ->
437
+
Error(errors.invalid_schema(
438
+
def_name <> ": array missing required 'items' field",
439
+
))
440
+
}
441
+
442
+
use items_schema <- result.try(items)
443
+
444
+
// Recursively validate the items schema definition
445
+
let nested_ctx = context.with_path(ctx, ".items")
446
+
use _ <- result.try(validate_array_item_schema(items_schema, nested_ctx))
447
+
448
+
// Validate length constraints
449
+
let min_length = json_helpers.get_int(schema, "minLength")
450
+
let max_length = json_helpers.get_int(schema, "maxLength")
451
+
452
+
// Validate that minLength/maxLength are consistent
453
+
use _ <- result.try(constraints.validate_length_constraint_consistency(
454
+
def_name,
455
+
min_length,
456
+
max_length,
457
+
"array",
458
+
))
459
+
460
+
Ok(Nil)
461
+
}
462
+
463
+
/// Validates array data against schema
464
+
pub fn validate_array_data(
465
+
data: Json,
466
+
schema: Json,
467
+
ctx: ValidationContext,
468
+
) -> Result(Nil, ValidationError) {
469
+
let def_name = context.path(ctx)
470
+
471
+
// Data must be an array
472
+
case json_helpers.is_array(data) {
473
+
False -> {
474
+
let type_name = get_type_name(data)
475
+
Error(errors.data_validation(
476
+
def_name <> ": expected array, found " <> type_name,
477
+
))
478
+
}
479
+
True -> {
480
+
// Get array from data
481
+
let data_array = case json_helpers.get_array_from_value(data) {
482
+
Some(arr) -> Ok(arr)
483
+
None ->
484
+
Error(errors.data_validation(def_name <> ": failed to parse array"))
485
+
}
486
+
487
+
use arr <- result.try(data_array)
488
+
489
+
let array_length = list.length(arr)
490
+
491
+
// Validate minLength constraint
492
+
use _ <- result.try(case json_helpers.get_int(schema, "minLength") {
493
+
Some(min_length) ->
494
+
case array_length < min_length {
495
+
True ->
496
+
Error(errors.data_validation(
497
+
def_name
498
+
<> ": array has length "
499
+
<> int.to_string(array_length)
500
+
<> " but minimum length is "
501
+
<> int.to_string(min_length),
502
+
))
503
+
False -> Ok(Nil)
504
+
}
505
+
None -> Ok(Nil)
506
+
})
507
+
508
+
// Validate maxLength constraint
509
+
use _ <- result.try(case json_helpers.get_int(schema, "maxLength") {
510
+
Some(max_length) ->
511
+
case array_length > max_length {
512
+
True ->
513
+
Error(errors.data_validation(
514
+
def_name
515
+
<> ": array has length "
516
+
<> int.to_string(array_length)
517
+
<> " but maximum length is "
518
+
<> int.to_string(max_length),
519
+
))
520
+
False -> Ok(Nil)
521
+
}
522
+
None -> Ok(Nil)
523
+
})
524
+
525
+
// Validate each array item against the items schema
526
+
case json_helpers.get_field(schema, "items") {
527
+
Some(items_schema) -> {
528
+
// Validate each item with index in path
529
+
list.index_fold(arr, Ok(Nil), fn(acc, item, index) {
530
+
use _ <- result.try(acc)
531
+
let nested_ctx =
532
+
context.with_path(ctx, "[" <> int.to_string(index) <> "]")
533
+
validate_array_item_data(item, items_schema, nested_ctx)
534
+
})
535
+
}
536
+
None -> Ok(Nil)
537
+
}
538
+
}
539
+
}
540
+
}
541
+
542
+
/// Validates an items schema definition recursively
543
+
fn validate_array_item_schema(
544
+
items_schema: Json,
545
+
ctx: ValidationContext,
546
+
) -> Result(Nil, ValidationError) {
547
+
// Handle reference types by delegating to reference validator
548
+
case json_helpers.get_string(items_schema, "type") {
549
+
Some("ref") -> reference.validate_schema(items_schema, ctx)
550
+
_ -> dispatch_schema_validation(items_schema, ctx)
551
+
}
552
+
}
553
+
554
+
/// Validates runtime data against an items schema using recursive validation
555
+
fn validate_array_item_data(
556
+
item: Dynamic,
557
+
items_schema: Json,
558
+
ctx: ValidationContext,
559
+
) -> Result(Nil, ValidationError) {
560
+
// Convert dynamic to Json for validation
561
+
let item_json = json_helpers.dynamic_to_json(item)
562
+
563
+
use item_value <- result.try(item_json)
564
+
565
+
// Handle reference types by delegating to reference validator
566
+
case json_helpers.get_string(items_schema, "type") {
567
+
Some("ref") -> reference.validate_data(item_value, items_schema, ctx)
568
+
_ -> dispatch_data_validation(item_value, items_schema, ctx)
569
+
}
570
+
}
571
+
572
+
// ============================================================================
573
+
// SHARED HELPERS
574
+
// ============================================================================
575
+
576
+
/// Helper to get type name for error messages
577
+
fn get_type_name(data: Json) -> String {
578
+
case json_helpers.is_null(data) {
579
+
True -> "null"
580
+
False ->
581
+
case json_helpers.is_bool(data) {
582
+
True -> "boolean"
583
+
False ->
584
+
case json_helpers.is_int(data) {
585
+
True -> "number"
586
+
False ->
587
+
case json_helpers.is_string(data) {
588
+
True -> "string"
589
+
False ->
590
+
case json_helpers.is_array(data) {
591
+
True -> "array"
592
+
False -> "object"
593
+
}
594
+
}
595
+
}
596
+
}
597
+
}
598
+
}
+176
src/validation/field/reference.gleam
+176
src/validation/field/reference.gleam
···
1
+
// Reference type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/json.{type Json}
5
+
import gleam/option.{None, Some}
6
+
import gleam/result
7
+
import gleam/string
8
+
import json_helpers
9
+
import validation/constraints
10
+
import validation/context.{type ValidationContext}
11
+
import validation/resolution
12
+
13
+
const allowed_fields = ["type", "ref", "description"]
14
+
15
+
/// Validates reference schema definition
16
+
pub fn validate_schema(
17
+
schema: Json,
18
+
ctx: ValidationContext,
19
+
) -> Result(Nil, ValidationError) {
20
+
let def_name = context.path(ctx)
21
+
22
+
// Validate allowed fields
23
+
let keys = json_helpers.get_keys(schema)
24
+
use _ <- result.try(constraints.validate_allowed_fields(
25
+
def_name,
26
+
keys,
27
+
allowed_fields,
28
+
"ref",
29
+
))
30
+
31
+
// Validate ref field (required)
32
+
let ref_value = case json_helpers.get_string(schema, "ref") {
33
+
Some(ref_str) -> Ok(ref_str)
34
+
None ->
35
+
Error(errors.invalid_schema(
36
+
def_name <> ": ref missing required 'ref' field",
37
+
))
38
+
}
39
+
40
+
use ref_str <- result.try(ref_value)
41
+
42
+
// Validate reference syntax
43
+
validate_ref_syntax(ref_str, def_name)
44
+
}
45
+
46
+
/// Validates data against the referenced schema
47
+
/// Uses the validator from the context for recursive validation
48
+
pub fn validate_data(
49
+
data: Json,
50
+
schema: Json,
51
+
ctx: ValidationContext,
52
+
) -> Result(Nil, ValidationError) {
53
+
let def_name = context.path(ctx)
54
+
55
+
// Get the reference string
56
+
use ref_str <- result.try(case json_helpers.get_string(schema, "ref") {
57
+
Some(ref_str) -> Ok(ref_str)
58
+
None ->
59
+
Error(errors.data_validation(
60
+
def_name <> ": ref schema missing 'ref' field",
61
+
))
62
+
})
63
+
64
+
// Check for circular references
65
+
case context.has_reference(ctx, ref_str) {
66
+
True ->
67
+
Error(errors.data_validation(
68
+
def_name <> ": circular reference detected: " <> ref_str,
69
+
))
70
+
False -> {
71
+
// Add to reference stack
72
+
let ref_ctx = context.with_reference(ctx, ref_str)
73
+
74
+
// Get current lexicon ID
75
+
use lex_id <- result.try(case context.current_lexicon_id(ref_ctx) {
76
+
Some(id) -> Ok(id)
77
+
None ->
78
+
Error(errors.data_validation(
79
+
def_name <> ": no current lexicon set for resolving reference",
80
+
))
81
+
})
82
+
83
+
// Resolve the reference to get the target schema
84
+
use resolved_opt <- result.try(resolution.resolve_reference(
85
+
ref_str,
86
+
ref_ctx,
87
+
lex_id,
88
+
))
89
+
90
+
use resolved_schema <- result.try(case resolved_opt {
91
+
Some(schema) -> Ok(schema)
92
+
None ->
93
+
Error(errors.data_validation(
94
+
def_name <> ": reference not found: " <> ref_str,
95
+
))
96
+
})
97
+
98
+
// Recursively validate data against the resolved schema
99
+
// Use the validator from the context
100
+
let validator = ref_ctx.validator
101
+
validator(data, resolved_schema, ref_ctx)
102
+
}
103
+
}
104
+
}
105
+
106
+
/// Validates reference syntax
107
+
fn validate_ref_syntax(
108
+
ref_str: String,
109
+
def_name: String,
110
+
) -> Result(Nil, ValidationError) {
111
+
case string.is_empty(ref_str) {
112
+
True ->
113
+
Error(errors.invalid_schema(def_name <> ": reference cannot be empty"))
114
+
False -> {
115
+
case string.starts_with(ref_str, "#") {
116
+
True -> {
117
+
// Local reference
118
+
let def_part = string.drop_start(ref_str, 1)
119
+
case string.is_empty(def_part) {
120
+
True ->
121
+
Error(errors.invalid_schema(
122
+
def_name
123
+
<> ": local reference must have a definition name after #",
124
+
))
125
+
False -> Ok(Nil)
126
+
}
127
+
}
128
+
False -> {
129
+
// Global reference (with or without fragment)
130
+
case string.contains(ref_str, "#") {
131
+
True -> {
132
+
// Global reference with fragment
133
+
validate_global_ref_with_fragment(ref_str, def_name)
134
+
}
135
+
False -> {
136
+
// Global main reference
137
+
// Would validate NSID format here
138
+
Ok(Nil)
139
+
}
140
+
}
141
+
}
142
+
}
143
+
}
144
+
}
145
+
}
146
+
147
+
/// Validates global reference with fragment (e.g., "com.example.lexicon#def")
148
+
fn validate_global_ref_with_fragment(
149
+
ref_str: String,
150
+
def_name: String,
151
+
) -> Result(Nil, ValidationError) {
152
+
// Split on # and validate both parts
153
+
case string.split(ref_str, "#") {
154
+
[nsid, definition] -> {
155
+
case string.is_empty(nsid) {
156
+
True ->
157
+
Error(errors.invalid_schema(
158
+
def_name <> ": NSID part of reference cannot be empty",
159
+
))
160
+
False ->
161
+
case string.is_empty(definition) {
162
+
True ->
163
+
Error(errors.invalid_schema(
164
+
def_name
165
+
<> ": definition name part of reference cannot be empty",
166
+
))
167
+
False -> Ok(Nil)
168
+
}
169
+
}
170
+
}
171
+
_ ->
172
+
Error(errors.invalid_schema(
173
+
def_name <> ": global reference can only contain one # character",
174
+
))
175
+
}
176
+
}
+255
src/validation/field/union.gleam
+255
src/validation/field/union.gleam
···
1
+
// Union type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dynamic/decode
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import gleam/string
10
+
import json_helpers
11
+
import validation/constraints
12
+
import validation/context.{type ValidationContext}
13
+
14
+
const allowed_fields = ["type", "refs", "closed", "description"]
15
+
16
+
/// Validates union schema definition
17
+
pub fn validate_schema(
18
+
schema: Json,
19
+
ctx: ValidationContext,
20
+
) -> Result(Nil, ValidationError) {
21
+
let def_name = context.path(ctx)
22
+
23
+
// Validate allowed fields
24
+
let keys = json_helpers.get_keys(schema)
25
+
use _ <- result.try(constraints.validate_allowed_fields(
26
+
def_name,
27
+
keys,
28
+
allowed_fields,
29
+
"union",
30
+
))
31
+
32
+
// Validate refs field (required)
33
+
let refs = case json_helpers.get_array(schema, "refs") {
34
+
Some(refs_array) -> Ok(refs_array)
35
+
None ->
36
+
Error(errors.invalid_schema(
37
+
def_name <> ": union missing required 'refs' field",
38
+
))
39
+
}
40
+
41
+
use refs_array <- result.try(refs)
42
+
43
+
// Validate that all refs are strings
44
+
use _ <- result.try(
45
+
list.index_fold(refs_array, Ok(Nil), fn(acc, ref_item, i) {
46
+
use _ <- result.try(acc)
47
+
case decode.run(ref_item, decode.string) {
48
+
Ok(_) -> Ok(Nil)
49
+
Error(_) ->
50
+
Error(errors.invalid_schema(
51
+
def_name <> ": refs[" <> string.inspect(i) <> "] must be a string",
52
+
))
53
+
}
54
+
}),
55
+
)
56
+
57
+
// Validate closed field if present
58
+
use _ <- result.try(case json_helpers.get_bool(schema, "closed") {
59
+
Some(closed) -> {
60
+
// If closed is true and refs is empty, that's invalid
61
+
case closed && list.is_empty(refs_array) {
62
+
True ->
63
+
Error(errors.invalid_schema(
64
+
def_name <> ": union cannot be closed with empty refs array",
65
+
))
66
+
False -> Ok(Nil)
67
+
}
68
+
}
69
+
None -> Ok(Nil)
70
+
})
71
+
72
+
// Empty refs array is only allowed for open unions
73
+
case list.is_empty(refs_array) {
74
+
True -> {
75
+
case json_helpers.get_bool(schema, "closed") {
76
+
Some(True) ->
77
+
Error(errors.invalid_schema(
78
+
def_name <> ": union cannot have empty refs array when closed=true",
79
+
))
80
+
_ -> Ok(Nil)
81
+
}
82
+
}
83
+
False -> Ok(Nil)
84
+
}
85
+
// Note: Full implementation would validate that each reference can be resolved
86
+
}
87
+
88
+
/// Validates union data against schema
89
+
pub fn validate_data(
90
+
data: Json,
91
+
schema: Json,
92
+
ctx: ValidationContext,
93
+
) -> Result(Nil, ValidationError) {
94
+
let def_name = context.path(ctx)
95
+
96
+
// Union data must be an object
97
+
case json_helpers.is_object(data) {
98
+
False -> {
99
+
let type_name = get_type_name(data)
100
+
Error(errors.data_validation(
101
+
def_name
102
+
<> ": union data must be an object which includes the \"$type\" property, found "
103
+
<> type_name,
104
+
))
105
+
}
106
+
True -> {
107
+
// Check for $type discriminator field
108
+
let type_field = case json_helpers.get_string(data, "$type") {
109
+
Some(type_name) -> Ok(type_name)
110
+
None ->
111
+
Error(errors.data_validation(
112
+
def_name
113
+
<> ": union data must be an object which includes the \"$type\" property",
114
+
))
115
+
}
116
+
117
+
use type_name <- result.try(type_field)
118
+
119
+
// Get the union's referenced types
120
+
let refs = case json_helpers.get_array(schema, "refs") {
121
+
Some(refs_array) -> Ok(refs_array)
122
+
None ->
123
+
Error(errors.data_validation(
124
+
def_name <> ": union schema missing or invalid 'refs' field",
125
+
))
126
+
}
127
+
128
+
use refs_array <- result.try(refs)
129
+
130
+
case list.is_empty(refs_array) {
131
+
True ->
132
+
Error(errors.data_validation(
133
+
def_name <> ": union schema has empty refs array",
134
+
))
135
+
False -> {
136
+
// Convert refs to strings
137
+
let ref_strings =
138
+
list.filter_map(refs_array, fn(r) { decode.run(r, decode.string) })
139
+
140
+
// Check if the $type matches any of the refs
141
+
case
142
+
list.find(ref_strings, fn(ref_str) {
143
+
refs_contain_type(ref_str, type_name)
144
+
})
145
+
{
146
+
Ok(_matching_ref) -> {
147
+
// Found matching ref
148
+
// In full implementation, would validate against the resolved schema
149
+
Ok(Nil)
150
+
}
151
+
Error(Nil) -> {
152
+
// No matching ref found
153
+
// Check if union is closed
154
+
let is_closed = case json_helpers.get_bool(schema, "closed") {
155
+
Some(closed) -> closed
156
+
None -> False
157
+
}
158
+
159
+
case is_closed {
160
+
True -> {
161
+
// Closed union - reject unknown types
162
+
Error(errors.data_validation(
163
+
def_name
164
+
<> ": union data $type must be one of "
165
+
<> string.join(ref_strings, ", ")
166
+
<> ", found '"
167
+
<> type_name
168
+
<> "'",
169
+
))
170
+
}
171
+
False -> {
172
+
// Open union - allow unknown types
173
+
Ok(Nil)
174
+
}
175
+
}
176
+
}
177
+
}
178
+
}
179
+
}
180
+
}
181
+
}
182
+
}
183
+
184
+
/// Checks if refs array contains the given type
185
+
/// Based on AT Protocol's refsContainType logic - handles both explicit and implicit #main
186
+
fn refs_contain_type(reference: String, type_name: String) -> Bool {
187
+
// Direct match
188
+
case reference == type_name {
189
+
True -> True
190
+
False -> {
191
+
// Handle local reference patterns (#ref)
192
+
case string.starts_with(reference, "#") {
193
+
True -> {
194
+
let ref_name = string.drop_start(reference, 1)
195
+
// Match bare name against local ref
196
+
case type_name == ref_name {
197
+
True -> True
198
+
False -> {
199
+
// Match full NSID#fragment against local ref
200
+
string.ends_with(type_name, "#" <> ref_name)
201
+
}
202
+
}
203
+
}
204
+
False -> {
205
+
// Handle implicit #main patterns
206
+
case string.ends_with(type_name, "#main") {
207
+
True -> {
208
+
// Remove "#main"
209
+
let base_type = string.drop_end(type_name, 5)
210
+
reference == base_type
211
+
}
212
+
False -> {
213
+
// type_name has no fragment, check if ref is the #main version
214
+
case string.contains(type_name, "#") {
215
+
True -> False
216
+
False -> {
217
+
let main_ref = type_name <> "#main"
218
+
reference == main_ref
219
+
}
220
+
}
221
+
}
222
+
}
223
+
}
224
+
}
225
+
}
226
+
}
227
+
}
228
+
229
+
/// Helper to get type name for error messages
230
+
fn get_type_name(data: Json) -> String {
231
+
case json_helpers.is_null(data) {
232
+
True -> "null"
233
+
False ->
234
+
case json_helpers.is_bool(data) {
235
+
True -> "boolean"
236
+
False ->
237
+
case json_helpers.is_int(data) {
238
+
True -> "number"
239
+
False ->
240
+
case json_helpers.is_string(data) {
241
+
True -> "string"
242
+
False ->
243
+
case json_helpers.is_array(data) {
244
+
True -> "array"
245
+
False ->
246
+
case json_helpers.is_object(data) {
247
+
True -> "object"
248
+
False -> "unknown"
249
+
}
250
+
}
251
+
}
252
+
}
253
+
}
254
+
}
255
+
}
+297
src/validation/formats.gleam
+297
src/validation/formats.gleam
···
1
+
// String format validation
2
+
3
+
import gleam/list
4
+
import gleam/regexp
5
+
import gleam/string
6
+
import gleam/time/timestamp
7
+
import types.{type StringFormat}
8
+
9
+
/// Validates RFC3339 datetime format
10
+
pub fn is_valid_rfc3339_datetime(value: String) -> Bool {
11
+
// Max length check (64 chars)
12
+
let len = string.length(value)
13
+
case len == 0 || len > 64 {
14
+
True -> False
15
+
False -> {
16
+
// Stricter RFC3339 regex pattern with restricted digit ranges
17
+
let pattern =
18
+
"^[0-9]{4}-[01][0-9]-[0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9](\\.[0-9]{1,20})?(Z|([+-][0-2][0-9]:[0-5][0-9]))$"
19
+
20
+
case regexp.from_string(pattern) {
21
+
Ok(re) ->
22
+
case regexp.check(re, value) {
23
+
False -> False
24
+
True -> {
25
+
// Reject -00:00 timezone suffix (must use +00:00 per ISO-8601)
26
+
case string.ends_with(value, "-00:00") {
27
+
True -> False
28
+
False -> {
29
+
// Attempt actual parsing to validate it's a real datetime
30
+
case timestamp.parse_rfc3339(value) {
31
+
Ok(_) -> True
32
+
Error(_) -> False
33
+
}
34
+
}
35
+
}
36
+
}
37
+
}
38
+
Error(_) -> False
39
+
}
40
+
}
41
+
}
42
+
}
43
+
44
+
/// Validates URI format
45
+
pub fn is_valid_uri(value: String) -> Bool {
46
+
// URI validation with max length and stricter scheme
47
+
// Max length check (8192 chars)
48
+
let len = string.length(value)
49
+
case len == 0 || len > 8192 {
50
+
True -> False
51
+
False -> {
52
+
// Lowercase scheme only, max 81 chars, printable characters after
53
+
// Note: Using [^ \t\n\r\x00-\x1F] for printable/graph chars
54
+
let pattern = "^[a-z][a-z.-]{0,80}:[!-~]+$"
55
+
case regexp.from_string(pattern) {
56
+
Ok(re) -> regexp.check(re, value)
57
+
Error(_) -> False
58
+
}
59
+
}
60
+
}
61
+
}
62
+
63
+
/// Validates AT Protocol URI format (at://did:plc:xxx/collection/rkey)
64
+
pub fn is_valid_at_uri(value: String) -> Bool {
65
+
// Max length check (8192 chars)
66
+
let len = string.length(value)
67
+
case len == 0 || len > 8192 {
68
+
True -> False
69
+
False ->
70
+
case string.starts_with(value, "at://") {
71
+
False -> False
72
+
True -> {
73
+
// Pattern: at://authority[/collection[/rkey]]
74
+
let without_scheme = string.drop_start(value, 5)
75
+
case string.split(without_scheme, "/") {
76
+
[authority] -> {
77
+
// Just authority - must be DID or handle
78
+
is_valid_did(authority) || is_valid_handle(authority)
79
+
}
80
+
[authority, collection] -> {
81
+
// Authority + collection - validate both
82
+
case is_valid_did(authority) || is_valid_handle(authority) {
83
+
False -> False
84
+
True -> is_valid_nsid(collection)
85
+
}
86
+
}
87
+
[authority, collection, rkey] -> {
88
+
// Full URI - validate all parts
89
+
case is_valid_did(authority) || is_valid_handle(authority) {
90
+
False -> False
91
+
True ->
92
+
case is_valid_nsid(collection) {
93
+
False -> False
94
+
True -> is_valid_record_key(rkey)
95
+
}
96
+
}
97
+
}
98
+
_ -> False
99
+
}
100
+
}
101
+
}
102
+
}
103
+
}
104
+
105
+
/// Validates DID format (did:method:identifier)
106
+
pub fn is_valid_did(value: String) -> Bool {
107
+
// Max length check (2048 chars)
108
+
let len = string.length(value)
109
+
case len == 0 || len > 2048 {
110
+
True -> False
111
+
False ->
112
+
case string.starts_with(value, "did:") {
113
+
False -> False
114
+
True -> {
115
+
// Pattern ensures identifier ends with valid char (not %)
116
+
let pattern = "^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$"
117
+
case regexp.from_string(pattern) {
118
+
Ok(re) -> regexp.check(re, value)
119
+
Error(_) -> False
120
+
}
121
+
}
122
+
}
123
+
}
124
+
}
125
+
126
+
/// Validates AT Protocol handle (user.bsky.social)
127
+
pub fn is_valid_handle(value: String) -> Bool {
128
+
// Handle is a domain name (hostname)
129
+
// Must be lowercase, can have dots, no special chars except hyphen
130
+
// Pattern requires at least one dot and TLD starts with letter
131
+
let pattern =
132
+
"^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$"
133
+
134
+
case
135
+
string.length(value) == 0 || string.length(value) > 253,
136
+
regexp.from_string(pattern)
137
+
{
138
+
True, _ -> False
139
+
False, Ok(re) ->
140
+
case regexp.check(re, value) {
141
+
False -> False
142
+
True -> {
143
+
// Extract TLD and check against disallowed list
144
+
let parts = string.split(value, ".")
145
+
case list.last(parts) {
146
+
Ok(tld) ->
147
+
case tld {
148
+
"local"
149
+
| "arpa"
150
+
| "invalid"
151
+
| "localhost"
152
+
| "internal"
153
+
| "example"
154
+
| "onion"
155
+
| "alt" -> False
156
+
_ -> True
157
+
}
158
+
Error(_) -> False
159
+
}
160
+
}
161
+
}
162
+
False, Error(_) -> False
163
+
}
164
+
}
165
+
166
+
/// Validates AT identifier (either DID or handle)
167
+
pub fn is_valid_at_identifier(value: String) -> Bool {
168
+
is_valid_did(value) || is_valid_handle(value)
169
+
}
170
+
171
+
/// Validates NSID format (com.example.type)
172
+
pub fn is_valid_nsid(value: String) -> Bool {
173
+
// NSID: reversed domain name with type
174
+
// Pattern: authority.name (e.g., com.example.record)
175
+
let pattern =
176
+
"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\.[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+$"
177
+
178
+
case regexp.from_string(pattern) {
179
+
Ok(re) -> {
180
+
case regexp.check(re, value) {
181
+
False -> False
182
+
True -> {
183
+
// Must have at least 3 segments and max length 317
184
+
let segments = string.split(value, ".")
185
+
list.length(segments) >= 3 && string.length(value) <= 317
186
+
}
187
+
}
188
+
}
189
+
Error(_) -> False
190
+
}
191
+
}
192
+
193
+
/// Validates CID format (Content Identifier)
194
+
pub fn is_valid_cid(value: String) -> Bool {
195
+
// Informal/incomplete helper for fast string verification
196
+
// Aligned with indigo's atproto/syntax/cid.go approach
197
+
// Length: 8-256 chars, alphanumeric plus += characters
198
+
// Rejects CIDv0 starting with "Qmb"
199
+
let len = string.length(value)
200
+
201
+
case len < 8 || len > 256 {
202
+
True -> False
203
+
False -> {
204
+
// Reject CIDv0 (not allowed in this version of atproto)
205
+
case string.starts_with(value, "Qmb") {
206
+
True -> False
207
+
False -> {
208
+
// Pattern: alphanumeric plus + and =
209
+
let pattern = "^[a-zA-Z0-9+=]{8,256}$"
210
+
case regexp.from_string(pattern) {
211
+
Ok(re) -> regexp.check(re, value)
212
+
Error(_) -> False
213
+
}
214
+
}
215
+
}
216
+
}
217
+
}
218
+
}
219
+
220
+
/// Validates BCP47 language tag
221
+
pub fn is_valid_language_tag(value: String) -> Bool {
222
+
// Lenient BCP47 validation (max 128 chars)
223
+
// Allows: i prefix (IANA), 2-3 letter codes, flexible extensions
224
+
// e.g., en, en-US, zh-Hans-CN, i-enochian
225
+
let len = string.length(value)
226
+
case len == 0 || len > 128 {
227
+
True -> False
228
+
False -> {
229
+
let pattern = "^(i|[a-z]{2,3})(-[a-zA-Z0-9]+)*$"
230
+
case regexp.from_string(pattern) {
231
+
Ok(re) -> regexp.check(re, value)
232
+
Error(_) -> False
233
+
}
234
+
}
235
+
}
236
+
}
237
+
238
+
/// Validates TID format (Timestamp Identifier)
239
+
pub fn is_valid_tid(value: String) -> Bool {
240
+
// TID is base32-sortable timestamp (13 characters)
241
+
// First char restricted to ensure valid timestamp range: 234567abcdefghij
242
+
// Remaining 12 chars use full alphabet: 234567abcdefghijklmnopqrstuvwxyz
243
+
let pattern = "^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$"
244
+
245
+
case string.length(value) == 13, regexp.from_string(pattern) {
246
+
True, Ok(re) -> regexp.check(re, value)
247
+
_, _ -> False
248
+
}
249
+
}
250
+
251
+
/// Validates record key format
252
+
pub fn is_valid_record_key(value: String) -> Bool {
253
+
// Record keys can be TIDs or custom strings
254
+
// Custom strings: alphanumeric, dots, dashes, underscores, tildes, colons
255
+
// Length: 1-512 characters
256
+
// Explicitly reject "." and ".." for security
257
+
let len = string.length(value)
258
+
259
+
case value == "." || value == ".." {
260
+
True -> False
261
+
False ->
262
+
case len >= 1 && len <= 512 {
263
+
False -> False
264
+
True -> {
265
+
// Check if it's a TID first
266
+
case is_valid_tid(value) {
267
+
True -> True
268
+
False -> {
269
+
// Check custom format (added : to allowed chars)
270
+
let pattern = "^[a-zA-Z0-9_~.:-]+$"
271
+
case regexp.from_string(pattern) {
272
+
Ok(re) -> regexp.check(re, value)
273
+
Error(_) -> False
274
+
}
275
+
}
276
+
}
277
+
}
278
+
}
279
+
}
280
+
}
281
+
282
+
/// Validates a string value against a specific format
283
+
pub fn validate_format(value: String, format: StringFormat) -> Bool {
284
+
case format {
285
+
types.DateTime -> is_valid_rfc3339_datetime(value)
286
+
types.Uri -> is_valid_uri(value)
287
+
types.AtUri -> is_valid_at_uri(value)
288
+
types.Did -> is_valid_did(value)
289
+
types.Handle -> is_valid_handle(value)
290
+
types.AtIdentifier -> is_valid_at_identifier(value)
291
+
types.Nsid -> is_valid_nsid(value)
292
+
types.Cid -> is_valid_cid(value)
293
+
types.Language -> is_valid_language_tag(value)
294
+
types.Tid -> is_valid_tid(value)
295
+
types.RecordKey -> is_valid_record_key(value)
296
+
}
297
+
}
+63
src/validation/meta/token.gleam
+63
src/validation/meta/token.gleam
···
1
+
// Token type validator
2
+
// Tokens are unit types used for discrimination in unions
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/json.{type Json}
6
+
import gleam/string
7
+
import json_helpers
8
+
import validation/constraints
9
+
import validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "description"]
12
+
13
+
/// Validates token schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "token")
23
+
}
24
+
25
+
/// Validates token data against schema
26
+
/// Note: Tokens are unit types used for discrimination in unions.
27
+
/// The token value should be a string matching the fully-qualified token name
28
+
/// (e.g., "example.lexicon.record#demoToken"). Full token name validation
29
+
/// happens at the union/record level where the expected token name is known.
30
+
pub fn validate_data(
31
+
data: Json,
32
+
_schema: Json,
33
+
ctx: ValidationContext,
34
+
) -> Result(Nil, ValidationError) {
35
+
let def_name = context.path(ctx)
36
+
37
+
// Token data must be a string (the fully-qualified token name)
38
+
case json_helpers.is_string(data) {
39
+
True -> {
40
+
// Extract and validate the string value
41
+
let json_str = json.to_string(data)
42
+
// Remove quotes from JSON string representation
43
+
let value = case
44
+
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
45
+
{
46
+
True -> string.slice(json_str, 1, string.length(json_str) - 2)
47
+
False -> json_str
48
+
}
49
+
50
+
case string.is_empty(value) {
51
+
True ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": token value cannot be empty string",
54
+
))
55
+
False -> Ok(Nil)
56
+
}
57
+
}
58
+
False ->
59
+
Error(errors.data_validation(
60
+
def_name <> ": expected string for token data, got other type",
61
+
))
62
+
}
63
+
}
+68
src/validation/meta/unknown.gleam
+68
src/validation/meta/unknown.gleam
···
1
+
// Unknown type validator
2
+
// Unknown allows flexible data with AT Protocol data model rules
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/json.{type Json}
6
+
import gleam/option.{None, Some}
7
+
import json_helpers
8
+
import validation/constraints
9
+
import validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "description"]
12
+
13
+
/// Validates unknown schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "unknown")
23
+
}
24
+
25
+
/// Validates unknown data against schema
26
+
/// Unknown allows flexible data following AT Protocol data model rules
27
+
pub fn validate_data(
28
+
data: Json,
29
+
_schema: Json,
30
+
ctx: ValidationContext,
31
+
) -> Result(Nil, ValidationError) {
32
+
let def_name = context.path(ctx)
33
+
34
+
// Unknown data must be an object (not primitives, arrays, bytes, or blobs)
35
+
case json_helpers.is_object(data) {
36
+
False ->
37
+
Error(errors.data_validation(
38
+
def_name <> ": unknown type must be an object, not a primitive or array",
39
+
))
40
+
True -> {
41
+
// Check for special AT Protocol objects that are not allowed
42
+
// Bytes objects: {"$bytes": "base64-string"}
43
+
case json_helpers.get_string(data, "$bytes") {
44
+
Some(_) ->
45
+
Error(errors.data_validation(
46
+
def_name <> ": unknown type cannot be a bytes object",
47
+
))
48
+
None -> {
49
+
// Blob objects: {"$type": "blob", "ref": {...}, "mimeType": "...", "size": ...}
50
+
case json_helpers.get_string(data, "$type") {
51
+
Some("blob") ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": unknown type cannot be a blob object",
54
+
))
55
+
_ -> {
56
+
// Valid unknown object
57
+
// AT Protocol data model rules:
58
+
// - No floats (only integers) - enforced by gleam_json type system
59
+
// - Objects can contain any valid JSON data
60
+
// - May contain $type field for type discrimination
61
+
Ok(Nil)
62
+
}
63
+
}
64
+
}
65
+
}
66
+
}
67
+
}
68
+
}
+230
src/validation/primary/params.gleam
+230
src/validation/primary/params.gleam
···
1
+
// Params type validator
2
+
// Mirrors the Go implementation's validation/primary/params
3
+
// Params define query/procedure/subscription parameters (XRPC endpoint arguments)
4
+
5
+
import errors.{type ValidationError}
6
+
import gleam/dynamic/decode
7
+
import gleam/json.{type Json}
8
+
import gleam/list
9
+
import gleam/option.{None, Some}
10
+
import gleam/result
11
+
import json_helpers
12
+
import validation/constraints
13
+
import validation/context.{type ValidationContext}
14
+
import validation/field as validation_field
15
+
import validation/meta/unknown as validation_meta_unknown
16
+
import validation/primitive/boolean as validation_primitive_boolean
17
+
import validation/primitive/integer as validation_primitive_integer
18
+
import validation/primitive/string as validation_primitive_string
19
+
20
+
const allowed_fields = ["type", "description", "properties", "required"]
21
+
22
+
/// Validates params schema definition
23
+
pub fn validate_schema(
24
+
schema: Json,
25
+
ctx: ValidationContext,
26
+
) -> Result(Nil, ValidationError) {
27
+
let def_name = context.path(ctx)
28
+
29
+
// Validate allowed fields
30
+
let keys = json_helpers.get_keys(schema)
31
+
use _ <- result.try(constraints.validate_allowed_fields(
32
+
def_name,
33
+
keys,
34
+
allowed_fields,
35
+
"params",
36
+
))
37
+
38
+
// Validate type field
39
+
use _ <- result.try(case json_helpers.get_string(schema, "type") {
40
+
Some("params") -> Ok(Nil)
41
+
Some(other_type) ->
42
+
Error(errors.invalid_schema(
43
+
def_name <> ": expected type 'params', got '" <> other_type <> "'",
44
+
))
45
+
None ->
46
+
Error(errors.invalid_schema(def_name <> ": params missing type field"))
47
+
})
48
+
49
+
// Get properties and required fields
50
+
let properties_dict = case json_helpers.get_field(schema, "properties") {
51
+
Some(props) -> json_helpers.json_to_dict(props)
52
+
None -> Ok(json_helpers.empty_dict())
53
+
}
54
+
55
+
let required_array = case json_helpers.get_array(schema, "required") {
56
+
Some(arr) -> Some(arr)
57
+
None -> None
58
+
}
59
+
60
+
// Validate required fields exist in properties
61
+
use props_dict <- result.try(properties_dict)
62
+
use _ <- result.try(validate_required_fields(
63
+
def_name,
64
+
required_array,
65
+
props_dict,
66
+
))
67
+
68
+
// Validate each property
69
+
validate_properties(def_name, props_dict, ctx)
70
+
}
71
+
72
+
/// Validates that all required fields exist in properties
73
+
fn validate_required_fields(
74
+
def_name: String,
75
+
required_array: option.Option(List(decode.Dynamic)),
76
+
properties_dict: json_helpers.JsonDict,
77
+
) -> Result(Nil, ValidationError) {
78
+
case required_array {
79
+
None -> Ok(Nil)
80
+
Some(required) -> {
81
+
list.try_fold(required, Nil, fn(_, item) {
82
+
case decode.run(item, decode.string) {
83
+
Ok(field_name) -> {
84
+
case json_helpers.dict_has_key(properties_dict, field_name) {
85
+
True -> Ok(Nil)
86
+
False ->
87
+
Error(errors.invalid_schema(
88
+
def_name
89
+
<> ": required field '"
90
+
<> field_name
91
+
<> "' not found in properties",
92
+
))
93
+
}
94
+
}
95
+
Error(_) ->
96
+
Error(errors.invalid_schema(
97
+
def_name <> ": required field must be a string",
98
+
))
99
+
}
100
+
})
101
+
}
102
+
}
103
+
}
104
+
105
+
/// Validates all properties in the params
106
+
fn validate_properties(
107
+
def_name: String,
108
+
properties_dict: json_helpers.JsonDict,
109
+
ctx: ValidationContext,
110
+
) -> Result(Nil, ValidationError) {
111
+
json_helpers.dict_fold(properties_dict, Ok(Nil), fn(acc, key, value) {
112
+
case acc {
113
+
Error(e) -> Error(e)
114
+
Ok(_) -> {
115
+
// Check property name is not empty
116
+
use _ <- result.try(case key {
117
+
"" ->
118
+
Error(errors.invalid_schema(
119
+
def_name <> ": empty property name not allowed",
120
+
))
121
+
_ -> Ok(Nil)
122
+
})
123
+
124
+
// Convert dynamic value to JSON
125
+
use prop_json <- result.try(case json_helpers.dynamic_to_json(value) {
126
+
Ok(j) -> Ok(j)
127
+
Error(_) ->
128
+
Error(errors.invalid_schema(
129
+
def_name <> ": invalid property value for '" <> key <> "'",
130
+
))
131
+
})
132
+
133
+
// Validate property type restrictions
134
+
validate_property_type(def_name, key, prop_json, ctx)
135
+
}
136
+
}
137
+
})
138
+
}
139
+
140
+
/// Validates that a property has an allowed type
141
+
/// Allowed types: boolean, integer, string, unknown, or arrays of these
142
+
fn validate_property_type(
143
+
def_name: String,
144
+
property_name: String,
145
+
property_schema: Json,
146
+
ctx: ValidationContext,
147
+
) -> Result(Nil, ValidationError) {
148
+
let prop_path = def_name <> ".properties." <> property_name
149
+
150
+
case json_helpers.get_string(property_schema, "type") {
151
+
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
152
+
// These are allowed types - recursively validate the schema
153
+
let prop_ctx = context.with_path(ctx, "properties." <> property_name)
154
+
validate_property_schema(property_schema, prop_ctx)
155
+
}
156
+
Some("array") -> {
157
+
// Arrays are allowed, but items must be one of the allowed types
158
+
case json_helpers.get_field(property_schema, "items") {
159
+
Some(items) -> {
160
+
case json_helpers.get_string(items, "type") {
161
+
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
162
+
// Valid array item type - recursively validate
163
+
let prop_ctx =
164
+
context.with_path(ctx, "properties." <> property_name)
165
+
validate_property_schema(property_schema, prop_ctx)
166
+
}
167
+
Some(other_type) ->
168
+
Error(errors.invalid_schema(
169
+
prop_path
170
+
<> ": params array items must be boolean, integer, string, or unknown, got '"
171
+
<> other_type
172
+
<> "'",
173
+
))
174
+
None ->
175
+
Error(errors.invalid_schema(
176
+
prop_path <> ": array items missing type field",
177
+
))
178
+
}
179
+
}
180
+
None ->
181
+
Error(errors.invalid_schema(
182
+
prop_path <> ": array property missing items field",
183
+
))
184
+
}
185
+
}
186
+
Some(other_type) ->
187
+
Error(errors.invalid_schema(
188
+
prop_path
189
+
<> ": params properties must be boolean, integer, string, unknown, or arrays of these, got '"
190
+
<> other_type
191
+
<> "'",
192
+
))
193
+
None ->
194
+
Error(errors.invalid_schema(prop_path <> ": property missing type field"))
195
+
}
196
+
}
197
+
198
+
/// Validates a property schema by dispatching to the appropriate validator
199
+
fn validate_property_schema(
200
+
schema: Json,
201
+
ctx: ValidationContext,
202
+
) -> Result(Nil, ValidationError) {
203
+
case json_helpers.get_string(schema, "type") {
204
+
Some("boolean") -> validation_primitive_boolean.validate_schema(schema, ctx)
205
+
Some("integer") -> validation_primitive_integer.validate_schema(schema, ctx)
206
+
Some("string") -> validation_primitive_string.validate_schema(schema, ctx)
207
+
Some("unknown") -> validation_meta_unknown.validate_schema(schema, ctx)
208
+
Some("array") -> validation_field.validate_array_schema(schema, ctx)
209
+
Some(unknown_type) ->
210
+
Error(errors.invalid_schema(
211
+
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
212
+
))
213
+
None ->
214
+
Error(errors.invalid_schema(
215
+
context.path(ctx) <> ": schema missing type field",
216
+
))
217
+
}
218
+
}
219
+
220
+
/// Validates params data against schema
221
+
pub fn validate_data(
222
+
_data: Json,
223
+
_schema: Json,
224
+
_ctx: ValidationContext,
225
+
) -> Result(Nil, ValidationError) {
226
+
// Params data validation would check that all required parameters are present
227
+
// and that each parameter value matches its schema
228
+
// For now, simplified implementation
229
+
Ok(Nil)
230
+
}
+163
src/validation/primary/procedure.gleam
+163
src/validation/primary/procedure.gleam
···
1
+
// Procedure type validator
2
+
// Procedures are XRPC Procedure (HTTP POST) endpoints for modifying data
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/json.{type Json}
6
+
import gleam/option.{None, Some}
7
+
import gleam/result
8
+
import json_helpers
9
+
import validation/constraints
10
+
import validation/context.{type ValidationContext}
11
+
import validation/field as validation_field
12
+
import validation/field/reference as validation_field_reference
13
+
import validation/field/union as validation_field_union
14
+
import validation/primary/params
15
+
16
+
const allowed_fields = [
17
+
"type", "parameters", "input", "output", "errors", "description",
18
+
]
19
+
20
+
/// Validates procedure schema definition
21
+
pub fn validate_schema(
22
+
schema: Json,
23
+
ctx: ValidationContext,
24
+
) -> Result(Nil, ValidationError) {
25
+
let def_name = context.path(ctx)
26
+
27
+
// Validate allowed fields
28
+
let keys = json_helpers.get_keys(schema)
29
+
use _ <- result.try(constraints.validate_allowed_fields(
30
+
def_name,
31
+
keys,
32
+
allowed_fields,
33
+
"procedure",
34
+
))
35
+
36
+
// Validate parameters field if present
37
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
38
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
39
+
None -> Ok(Nil)
40
+
})
41
+
42
+
// Validate input field if present
43
+
use _ <- result.try(case json_helpers.get_field(schema, "input") {
44
+
Some(input) -> validate_io_schema(def_name, input, "input")
45
+
None -> Ok(Nil)
46
+
})
47
+
48
+
// Validate output field if present
49
+
use _ <- result.try(case json_helpers.get_field(schema, "output") {
50
+
Some(output) -> validate_io_schema(def_name, output, "output")
51
+
None -> Ok(Nil)
52
+
})
53
+
54
+
// Validate errors field if present
55
+
case json_helpers.get_array(schema, "errors") {
56
+
Some(_) -> Ok(Nil)
57
+
None -> Ok(Nil)
58
+
}
59
+
}
60
+
61
+
/// Validates procedure input data against schema
62
+
/// Data should be the procedure input body as JSON
63
+
pub fn validate_data(
64
+
data: Json,
65
+
schema: Json,
66
+
ctx: ValidationContext,
67
+
) -> Result(Nil, ValidationError) {
68
+
// If schema has input, validate data against it
69
+
case json_helpers.get_field(schema, "input") {
70
+
Some(input) -> {
71
+
let input_ctx = context.with_path(ctx, "input")
72
+
validate_body_data(data, input, input_ctx)
73
+
}
74
+
None -> Ok(Nil)
75
+
}
76
+
}
77
+
78
+
/// Validates procedure output data against schema
79
+
pub fn validate_output_data(
80
+
data: Json,
81
+
schema: Json,
82
+
ctx: ValidationContext,
83
+
) -> Result(Nil, ValidationError) {
84
+
// If schema has output, validate data against it
85
+
case json_helpers.get_field(schema, "output") {
86
+
Some(output) -> {
87
+
let output_ctx = context.with_path(ctx, "output")
88
+
validate_body_data(data, output, output_ctx)
89
+
}
90
+
None -> Ok(Nil)
91
+
}
92
+
}
93
+
94
+
/// Validates data against a SchemaBody (input or output)
95
+
fn validate_body_data(
96
+
data: Json,
97
+
body: Json,
98
+
ctx: ValidationContext,
99
+
) -> Result(Nil, ValidationError) {
100
+
// Get the schema field from the body
101
+
case json_helpers.get_field(body, "schema") {
102
+
Some(schema) -> {
103
+
let schema_ctx = context.with_path(ctx, "schema")
104
+
// Dispatch to appropriate validator based on schema type
105
+
validate_body_schema_data(data, schema, schema_ctx)
106
+
}
107
+
None -> Ok(Nil)
108
+
}
109
+
}
110
+
111
+
/// Validates data against a body schema (object, ref, or union)
112
+
fn validate_body_schema_data(
113
+
data: Json,
114
+
schema: Json,
115
+
ctx: ValidationContext,
116
+
) -> Result(Nil, ValidationError) {
117
+
case json_helpers.get_string(schema, "type") {
118
+
Some("object") -> validation_field.validate_object_data(data, schema, ctx)
119
+
Some("ref") -> {
120
+
// For references, we need to resolve and validate
121
+
// For now, just validate it's structured correctly
122
+
validation_field_reference.validate_data(data, schema, ctx)
123
+
}
124
+
Some("union") -> validation_field_union.validate_data(data, schema, ctx)
125
+
Some(other_type) ->
126
+
Error(errors.data_validation(
127
+
context.path(ctx)
128
+
<> ": unsupported body schema type '"
129
+
<> other_type
130
+
<> "'",
131
+
))
132
+
None ->
133
+
Error(errors.data_validation(
134
+
context.path(ctx) <> ": body schema missing type field",
135
+
))
136
+
}
137
+
}
138
+
139
+
/// Validates parameters schema definition
140
+
fn validate_parameters_schema(
141
+
parameters: Json,
142
+
ctx: ValidationContext,
143
+
) -> Result(Nil, ValidationError) {
144
+
// Validate the full params schema
145
+
let params_ctx = context.with_path(ctx, "parameters")
146
+
params.validate_schema(parameters, params_ctx)
147
+
}
148
+
149
+
/// Validates input/output schema definition
150
+
fn validate_io_schema(
151
+
def_name: String,
152
+
io: Json,
153
+
field_name: String,
154
+
) -> Result(Nil, ValidationError) {
155
+
// Input/output must have encoding field
156
+
case json_helpers.get_string(io, "encoding") {
157
+
Some(_) -> Ok(Nil)
158
+
None ->
159
+
Error(errors.invalid_schema(
160
+
def_name <> ": procedure " <> field_name <> " missing encoding field",
161
+
))
162
+
}
163
+
}
+224
src/validation/primary/query.gleam
+224
src/validation/primary/query.gleam
···
1
+
// Query type validator
2
+
// Queries are XRPC Query (HTTP GET) endpoints for retrieving data
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/dynamic/decode
6
+
import gleam/json.{type Json}
7
+
import gleam/list
8
+
import gleam/option.{None, Some}
9
+
import gleam/result
10
+
import json_helpers
11
+
import validation/constraints
12
+
import validation/context.{type ValidationContext}
13
+
import validation/field as validation_field
14
+
import validation/meta/unknown as validation_meta_unknown
15
+
import validation/primary/params
16
+
import validation/primitive/boolean as validation_primitive_boolean
17
+
import validation/primitive/integer as validation_primitive_integer
18
+
import validation/primitive/string as validation_primitive_string
19
+
20
+
const allowed_fields = ["type", "parameters", "output", "errors", "description"]
21
+
22
+
/// Validates query schema definition
23
+
pub fn validate_schema(
24
+
schema: Json,
25
+
ctx: ValidationContext,
26
+
) -> Result(Nil, ValidationError) {
27
+
let def_name = context.path(ctx)
28
+
29
+
// Validate allowed fields
30
+
let keys = json_helpers.get_keys(schema)
31
+
use _ <- result.try(constraints.validate_allowed_fields(
32
+
def_name,
33
+
keys,
34
+
allowed_fields,
35
+
"query",
36
+
))
37
+
38
+
// Validate parameters field if present
39
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
40
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
41
+
None -> Ok(Nil)
42
+
})
43
+
44
+
// Validate output field if present
45
+
use _ <- result.try(case json_helpers.get_field(schema, "output") {
46
+
Some(output) -> validate_output_schema(def_name, output)
47
+
None -> Ok(Nil)
48
+
})
49
+
50
+
// Validate errors field if present
51
+
case json_helpers.get_array(schema, "errors") {
52
+
Some(_) -> Ok(Nil)
53
+
None -> Ok(Nil)
54
+
}
55
+
}
56
+
57
+
/// Validates query data against schema
58
+
/// Data should be the query parameters as a JSON object
59
+
pub fn validate_data(
60
+
data: Json,
61
+
schema: Json,
62
+
ctx: ValidationContext,
63
+
) -> Result(Nil, ValidationError) {
64
+
let def_name = context.path(ctx)
65
+
66
+
// Query data must be an object (the parameters)
67
+
use _ <- result.try(case json_helpers.is_object(data) {
68
+
True -> Ok(Nil)
69
+
False ->
70
+
Error(errors.data_validation(
71
+
def_name <> ": query parameters must be an object",
72
+
))
73
+
})
74
+
75
+
// If schema has parameters, validate data against them
76
+
case json_helpers.get_field(schema, "parameters") {
77
+
Some(parameters) -> {
78
+
let params_ctx = context.with_path(ctx, "parameters")
79
+
validate_parameters_data(data, parameters, params_ctx)
80
+
}
81
+
None -> Ok(Nil)
82
+
}
83
+
}
84
+
85
+
/// Validates parameter data against params schema
86
+
fn validate_parameters_data(
87
+
data: Json,
88
+
params_schema: Json,
89
+
ctx: ValidationContext,
90
+
) -> Result(Nil, ValidationError) {
91
+
let def_name = context.path(ctx)
92
+
93
+
// Get data as dict
94
+
use data_dict <- result.try(json_helpers.json_to_dict(data))
95
+
96
+
// Get properties and required from params schema
97
+
let properties_dict = case
98
+
json_helpers.get_field(params_schema, "properties")
99
+
{
100
+
Some(props) -> json_helpers.json_to_dict(props)
101
+
None -> Ok(json_helpers.empty_dict())
102
+
}
103
+
104
+
let required_array = json_helpers.get_array(params_schema, "required")
105
+
106
+
use props_dict <- result.try(properties_dict)
107
+
108
+
// Check all required parameters are present
109
+
use _ <- result.try(case required_array {
110
+
Some(required) -> {
111
+
list.try_fold(required, Nil, fn(_, item) {
112
+
case decode.run(item, decode.string) {
113
+
Ok(param_name) -> {
114
+
case json_helpers.dict_has_key(data_dict, param_name) {
115
+
True -> Ok(Nil)
116
+
False ->
117
+
Error(errors.data_validation(
118
+
def_name
119
+
<> ": missing required parameter '"
120
+
<> param_name
121
+
<> "'",
122
+
))
123
+
}
124
+
}
125
+
Error(_) -> Ok(Nil)
126
+
}
127
+
})
128
+
}
129
+
None -> Ok(Nil)
130
+
})
131
+
132
+
// Validate each parameter in data
133
+
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
134
+
case acc {
135
+
Error(e) -> Error(e)
136
+
Ok(_) -> {
137
+
// Get the schema for this parameter
138
+
case json_helpers.dict_get(props_dict, param_name) {
139
+
Some(param_schema_dyn) -> {
140
+
// Convert dynamic to JSON
141
+
case json_helpers.dynamic_to_json(param_schema_dyn) {
142
+
Ok(param_schema) -> {
143
+
// Convert param value to JSON
144
+
case json_helpers.dynamic_to_json(param_value) {
145
+
Ok(param_json) -> {
146
+
// Validate the parameter value against its schema
147
+
let param_ctx = context.with_path(ctx, param_name)
148
+
validate_parameter_value(
149
+
param_json,
150
+
param_schema,
151
+
param_ctx,
152
+
)
153
+
}
154
+
Error(e) -> Error(e)
155
+
}
156
+
}
157
+
Error(e) -> Error(e)
158
+
}
159
+
}
160
+
None -> {
161
+
// Parameter not in schema - could warn or allow
162
+
// For now, allow unknown parameters
163
+
Ok(Nil)
164
+
}
165
+
}
166
+
}
167
+
}
168
+
})
169
+
}
170
+
171
+
/// Validates a single parameter value against its schema
172
+
fn validate_parameter_value(
173
+
value: Json,
174
+
schema: Json,
175
+
ctx: ValidationContext,
176
+
) -> Result(Nil, ValidationError) {
177
+
// Dispatch based on schema type
178
+
case json_helpers.get_string(schema, "type") {
179
+
Some("boolean") ->
180
+
validation_primitive_boolean.validate_data(value, schema, ctx)
181
+
Some("integer") ->
182
+
validation_primitive_integer.validate_data(value, schema, ctx)
183
+
Some("string") ->
184
+
validation_primitive_string.validate_data(value, schema, ctx)
185
+
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
186
+
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
187
+
Some(other_type) ->
188
+
Error(errors.data_validation(
189
+
context.path(ctx)
190
+
<> ": unsupported parameter type '"
191
+
<> other_type
192
+
<> "'",
193
+
))
194
+
None ->
195
+
Error(errors.data_validation(
196
+
context.path(ctx) <> ": parameter schema missing type field",
197
+
))
198
+
}
199
+
}
200
+
201
+
/// Validates parameters schema definition
202
+
fn validate_parameters_schema(
203
+
parameters: Json,
204
+
ctx: ValidationContext,
205
+
) -> Result(Nil, ValidationError) {
206
+
// Validate the full params schema
207
+
let params_ctx = context.with_path(ctx, "parameters")
208
+
params.validate_schema(parameters, params_ctx)
209
+
}
210
+
211
+
/// Validates output schema definition
212
+
fn validate_output_schema(
213
+
def_name: String,
214
+
output: Json,
215
+
) -> Result(Nil, ValidationError) {
216
+
// Output must have encoding field
217
+
case json_helpers.get_string(output, "encoding") {
218
+
Some(_) -> Ok(Nil)
219
+
None ->
220
+
Error(errors.invalid_schema(
221
+
def_name <> ": query output missing encoding field",
222
+
))
223
+
}
224
+
}
+180
src/validation/primary/record.gleam
+180
src/validation/primary/record.gleam
···
1
+
// Record type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/json.{type Json}
5
+
import gleam/option.{None, Some}
6
+
import gleam/result
7
+
import gleam/string
8
+
import json_helpers
9
+
import validation/constraints
10
+
import validation/context.{type ValidationContext}
11
+
import validation/field
12
+
13
+
const allowed_fields = ["type", "key", "record", "description"]
14
+
15
+
const allowed_record_fields = [
16
+
"type", "properties", "required", "nullable", "description",
17
+
]
18
+
19
+
/// Validates record schema definition
20
+
pub fn validate_schema(
21
+
schema: Json,
22
+
ctx: ValidationContext,
23
+
) -> Result(Nil, ValidationError) {
24
+
let def_name = context.path(ctx)
25
+
26
+
// Validate allowed fields at record level
27
+
let keys = json_helpers.get_keys(schema)
28
+
use _ <- result.try(constraints.validate_allowed_fields(
29
+
def_name,
30
+
keys,
31
+
allowed_fields,
32
+
"record",
33
+
))
34
+
35
+
// Validate required 'key' field
36
+
let key_value = case json_helpers.get_string(schema, "key") {
37
+
Some(key) -> Ok(key)
38
+
None ->
39
+
Error(errors.invalid_schema(
40
+
def_name <> ": record missing required 'key' field",
41
+
))
42
+
}
43
+
44
+
use key <- result.try(key_value)
45
+
use _ <- result.try(validate_key(def_name, key))
46
+
47
+
// Validate required 'record' field
48
+
let record_def = case json_helpers.get_field(schema, "record") {
49
+
Some(record) -> Ok(record)
50
+
None ->
51
+
Error(errors.invalid_schema(
52
+
def_name <> ": record missing required 'record' field",
53
+
))
54
+
}
55
+
56
+
use record <- result.try(record_def)
57
+
58
+
// Validate record object structure
59
+
use _ <- result.try(validate_record_object(def_name, record))
60
+
61
+
// Recursively validate properties - delegate to object validator
62
+
// The record field is an object, so we can use field.validate_object_schema
63
+
let record_ctx = context.with_path(ctx, ".record")
64
+
field.validate_object_schema(record, record_ctx)
65
+
}
66
+
67
+
/// Validates record data against schema
68
+
pub fn validate_data(
69
+
data: Json,
70
+
schema: Json,
71
+
ctx: ValidationContext,
72
+
) -> Result(Nil, ValidationError) {
73
+
let def_name = context.path(ctx)
74
+
75
+
// Data must be an object
76
+
case json_helpers.is_object(data) {
77
+
False -> {
78
+
Error(errors.data_validation(def_name <> ": expected object for record"))
79
+
}
80
+
True -> {
81
+
// Get the record definition
82
+
case json_helpers.get_field(schema, "record") {
83
+
Some(record_def) -> {
84
+
// Delegate to object validator for full validation
85
+
// The record's data validation is the same as object validation
86
+
field.validate_object_data(data, record_def, ctx)
87
+
}
88
+
None ->
89
+
Error(errors.data_validation(
90
+
def_name <> ": record schema missing 'record' field",
91
+
))
92
+
}
93
+
}
94
+
}
95
+
}
96
+
97
+
/// Validates the `key` field of a record definition
98
+
///
99
+
/// Valid key types:
100
+
/// - `tid`: Record key is a Timestamp Identifier (auto-generated)
101
+
/// - `any`: Record key can be any valid record key format
102
+
/// - `nsid`: Record key must be a valid NSID
103
+
/// - `literal:*`: Record key must match the literal value after the colon
104
+
fn validate_key(def_name: String, key: String) -> Result(Nil, ValidationError) {
105
+
case key {
106
+
"tid" -> Ok(Nil)
107
+
"any" -> Ok(Nil)
108
+
"nsid" -> Ok(Nil)
109
+
_ ->
110
+
case string.starts_with(key, "literal:") {
111
+
True -> Ok(Nil)
112
+
False ->
113
+
Error(errors.invalid_schema(
114
+
def_name
115
+
<> ": record has invalid key type '"
116
+
<> key
117
+
<> "'. Must be 'tid', 'any', 'nsid', or 'literal:*'",
118
+
))
119
+
}
120
+
}
121
+
}
122
+
123
+
/// Validates the structure of a record object definition
124
+
fn validate_record_object(
125
+
def_name: String,
126
+
record_def: Json,
127
+
) -> Result(Nil, ValidationError) {
128
+
// Must be type "object"
129
+
case json_helpers.get_string(record_def, "type") {
130
+
Some("object") -> {
131
+
// Validate allowed fields in record object
132
+
let keys = json_helpers.get_keys(record_def)
133
+
use _ <- result.try(constraints.validate_allowed_fields(
134
+
def_name,
135
+
keys,
136
+
allowed_record_fields,
137
+
"record object",
138
+
))
139
+
140
+
// Validate properties structure
141
+
use _ <- result.try(
142
+
case json_helpers.get_field(record_def, "properties") {
143
+
Some(properties) ->
144
+
case json_helpers.is_object(properties) {
145
+
True -> Ok(Nil)
146
+
False ->
147
+
Error(errors.invalid_schema(
148
+
def_name <> ": record properties must be an object",
149
+
))
150
+
}
151
+
None -> Ok(Nil)
152
+
},
153
+
)
154
+
155
+
// Validate nullable is an array if present
156
+
case json_helpers.get_array(record_def, "nullable") {
157
+
Some(_) -> Ok(Nil)
158
+
None -> {
159
+
// Check if nullable exists but is not an array
160
+
case json_helpers.get_field(record_def, "nullable") {
161
+
Some(_) ->
162
+
Error(errors.invalid_schema(
163
+
def_name <> ": record nullable field must be an array",
164
+
))
165
+
None -> Ok(Nil)
166
+
}
167
+
}
168
+
}
169
+
}
170
+
Some(other_type) ->
171
+
Error(errors.invalid_schema(
172
+
def_name
173
+
<> ": record field must be type 'object', got '"
174
+
<> other_type
175
+
<> "'",
176
+
))
177
+
None ->
178
+
Error(errors.invalid_schema(def_name <> ": record field missing type"))
179
+
}
180
+
}
+269
src/validation/primary/subscription.gleam
+269
src/validation/primary/subscription.gleam
···
1
+
// Subscription type validator
2
+
// Subscriptions are XRPC Subscription (WebSocket) endpoints for real-time data
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/dynamic/decode
6
+
import gleam/json.{type Json}
7
+
import gleam/list
8
+
import gleam/option.{None, Some}
9
+
import gleam/result
10
+
import json_helpers
11
+
import validation/constraints
12
+
import validation/context.{type ValidationContext}
13
+
import validation/field as validation_field
14
+
import validation/field/union as validation_field_union
15
+
import validation/meta/unknown as validation_meta_unknown
16
+
import validation/primary/params
17
+
import validation/primitive/boolean as validation_primitive_boolean
18
+
import validation/primitive/integer as validation_primitive_integer
19
+
import validation/primitive/string as validation_primitive_string
20
+
21
+
const allowed_fields = [
22
+
"type",
23
+
"parameters",
24
+
"message",
25
+
"errors",
26
+
"description",
27
+
]
28
+
29
+
/// Validates subscription schema definition
30
+
pub fn validate_schema(
31
+
schema: Json,
32
+
ctx: ValidationContext,
33
+
) -> Result(Nil, ValidationError) {
34
+
let def_name = context.path(ctx)
35
+
36
+
// Validate allowed fields
37
+
let keys = json_helpers.get_keys(schema)
38
+
use _ <- result.try(constraints.validate_allowed_fields(
39
+
def_name,
40
+
keys,
41
+
allowed_fields,
42
+
"subscription",
43
+
))
44
+
45
+
// Validate parameters field if present
46
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
47
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
48
+
None -> Ok(Nil)
49
+
})
50
+
51
+
// Validate message field if present
52
+
use _ <- result.try(case json_helpers.get_field(schema, "message") {
53
+
Some(message) -> validate_message_schema(def_name, message)
54
+
None -> Ok(Nil)
55
+
})
56
+
57
+
// Validate errors field if present
58
+
case json_helpers.get_array(schema, "errors") {
59
+
Some(_) -> Ok(Nil)
60
+
None -> Ok(Nil)
61
+
}
62
+
}
63
+
64
+
/// Validates subscription parameters data against schema
65
+
/// Data should be the connection parameters as a JSON object
66
+
pub fn validate_data(
67
+
data: Json,
68
+
schema: Json,
69
+
ctx: ValidationContext,
70
+
) -> Result(Nil, ValidationError) {
71
+
let def_name = context.path(ctx)
72
+
73
+
// Subscription parameter data must be an object
74
+
use _ <- result.try(case json_helpers.is_object(data) {
75
+
True -> Ok(Nil)
76
+
False ->
77
+
Error(errors.data_validation(
78
+
def_name <> ": subscription parameters must be an object",
79
+
))
80
+
})
81
+
82
+
// If schema has parameters, validate data against them
83
+
case json_helpers.get_field(schema, "parameters") {
84
+
Some(parameters) -> {
85
+
let params_ctx = context.with_path(ctx, "parameters")
86
+
validate_parameters_data(data, parameters, params_ctx)
87
+
}
88
+
None -> Ok(Nil)
89
+
}
90
+
}
91
+
92
+
/// Validates subscription message data against schema
93
+
pub fn validate_message_data(
94
+
data: Json,
95
+
schema: Json,
96
+
ctx: ValidationContext,
97
+
) -> Result(Nil, ValidationError) {
98
+
// Get the message schema
99
+
case json_helpers.get_field(schema, "message") {
100
+
Some(message) -> {
101
+
case json_helpers.get_field(message, "schema") {
102
+
Some(msg_schema) -> {
103
+
// Message schema must be a union - validate data against it
104
+
let msg_ctx = context.with_path(ctx, "message.schema")
105
+
validation_field_union.validate_data(data, msg_schema, msg_ctx)
106
+
}
107
+
None -> Ok(Nil)
108
+
}
109
+
}
110
+
None -> Ok(Nil)
111
+
}
112
+
}
113
+
114
+
/// Validates parameter data against params schema
115
+
/// (Reused from query validator pattern)
116
+
fn validate_parameters_data(
117
+
data: Json,
118
+
params_schema: Json,
119
+
ctx: ValidationContext,
120
+
) -> Result(Nil, ValidationError) {
121
+
let def_name = context.path(ctx)
122
+
123
+
// Get data as dict
124
+
use data_dict <- result.try(json_helpers.json_to_dict(data))
125
+
126
+
// Get properties and required from params schema
127
+
let properties_dict = case
128
+
json_helpers.get_field(params_schema, "properties")
129
+
{
130
+
Some(props) -> json_helpers.json_to_dict(props)
131
+
None -> Ok(json_helpers.empty_dict())
132
+
}
133
+
134
+
let required_array = json_helpers.get_array(params_schema, "required")
135
+
136
+
use props_dict <- result.try(properties_dict)
137
+
138
+
// Check all required parameters are present
139
+
use _ <- result.try(case required_array {
140
+
Some(required) -> {
141
+
list.try_fold(required, Nil, fn(_, item) {
142
+
case decode.run(item, decode.string) {
143
+
Ok(param_name) -> {
144
+
case json_helpers.dict_has_key(data_dict, param_name) {
145
+
True -> Ok(Nil)
146
+
False ->
147
+
Error(errors.data_validation(
148
+
def_name
149
+
<> ": missing required parameter '"
150
+
<> param_name
151
+
<> "'",
152
+
))
153
+
}
154
+
}
155
+
Error(_) -> Ok(Nil)
156
+
}
157
+
})
158
+
}
159
+
None -> Ok(Nil)
160
+
})
161
+
162
+
// Validate each parameter in data
163
+
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
164
+
case acc {
165
+
Error(e) -> Error(e)
166
+
Ok(_) -> {
167
+
// Get the schema for this parameter
168
+
case json_helpers.dict_get(props_dict, param_name) {
169
+
Some(param_schema_dyn) -> {
170
+
// Convert dynamic to JSON
171
+
case json_helpers.dynamic_to_json(param_schema_dyn) {
172
+
Ok(param_schema) -> {
173
+
// Convert param value to JSON
174
+
case json_helpers.dynamic_to_json(param_value) {
175
+
Ok(param_json) -> {
176
+
// Validate the parameter value against its schema
177
+
let param_ctx = context.with_path(ctx, param_name)
178
+
validate_parameter_value(
179
+
param_json,
180
+
param_schema,
181
+
param_ctx,
182
+
)
183
+
}
184
+
Error(e) -> Error(e)
185
+
}
186
+
}
187
+
Error(e) -> Error(e)
188
+
}
189
+
}
190
+
None -> {
191
+
// Parameter not in schema - allow unknown parameters
192
+
Ok(Nil)
193
+
}
194
+
}
195
+
}
196
+
}
197
+
})
198
+
}
199
+
200
+
/// Validates a single parameter value against its schema
201
+
fn validate_parameter_value(
202
+
value: Json,
203
+
schema: Json,
204
+
ctx: ValidationContext,
205
+
) -> Result(Nil, ValidationError) {
206
+
// Dispatch based on schema type
207
+
case json_helpers.get_string(schema, "type") {
208
+
Some("boolean") ->
209
+
validation_primitive_boolean.validate_data(value, schema, ctx)
210
+
Some("integer") ->
211
+
validation_primitive_integer.validate_data(value, schema, ctx)
212
+
Some("string") ->
213
+
validation_primitive_string.validate_data(value, schema, ctx)
214
+
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
215
+
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
216
+
Some(other_type) ->
217
+
Error(errors.data_validation(
218
+
context.path(ctx)
219
+
<> ": unsupported parameter type '"
220
+
<> other_type
221
+
<> "'",
222
+
))
223
+
None ->
224
+
Error(errors.data_validation(
225
+
context.path(ctx) <> ": parameter schema missing type field",
226
+
))
227
+
}
228
+
}
229
+
230
+
/// Validates parameters schema definition
231
+
fn validate_parameters_schema(
232
+
parameters: Json,
233
+
ctx: ValidationContext,
234
+
) -> Result(Nil, ValidationError) {
235
+
// Validate the full params schema
236
+
let params_ctx = context.with_path(ctx, "parameters")
237
+
params.validate_schema(parameters, params_ctx)
238
+
}
239
+
240
+
/// Validates message schema definition
241
+
fn validate_message_schema(
242
+
def_name: String,
243
+
message: Json,
244
+
) -> Result(Nil, ValidationError) {
245
+
// Message must have schema field
246
+
case json_helpers.get_field(message, "schema") {
247
+
Some(schema_field) -> {
248
+
// Schema must be a union type
249
+
case json_helpers.get_string(schema_field, "type") {
250
+
Some("union") -> Ok(Nil)
251
+
Some(other_type) ->
252
+
Error(errors.invalid_schema(
253
+
def_name
254
+
<> ": subscription message schema must be type 'union', got '"
255
+
<> other_type
256
+
<> "'",
257
+
))
258
+
None ->
259
+
Error(errors.invalid_schema(
260
+
def_name <> ": subscription message schema missing type field",
261
+
))
262
+
}
263
+
}
264
+
None ->
265
+
Error(errors.invalid_schema(
266
+
def_name <> ": subscription message missing schema field",
267
+
))
268
+
}
269
+
}
+270
src/validation/primitive/blob.gleam
+270
src/validation/primitive/blob.gleam
···
1
+
// Blob type validator
2
+
// Blobs are binary objects with MIME types and size constraints
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/dynamic.{type Dynamic}
6
+
import gleam/dynamic/decode
7
+
import gleam/int
8
+
import gleam/json.{type Json}
9
+
import gleam/list
10
+
import gleam/option.{None, Some}
11
+
import gleam/result
12
+
import gleam/string
13
+
import json_helpers
14
+
import validation/constraints
15
+
import validation/context.{type ValidationContext}
16
+
17
+
const allowed_fields = ["type", "accept", "maxSize", "description"]
18
+
19
+
/// Validates blob schema definition
20
+
pub fn validate_schema(
21
+
schema: Json,
22
+
ctx: ValidationContext,
23
+
) -> Result(Nil, ValidationError) {
24
+
let def_name = context.path(ctx)
25
+
26
+
// Validate allowed fields
27
+
let keys = json_helpers.get_keys(schema)
28
+
use _ <- result.try(constraints.validate_allowed_fields(
29
+
def_name,
30
+
keys,
31
+
allowed_fields,
32
+
"blob",
33
+
))
34
+
35
+
// Validate accept field if present
36
+
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
37
+
Some(accept_array) -> validate_accept_field(def_name, accept_array)
38
+
None -> Ok(Nil)
39
+
})
40
+
41
+
// Validate maxSize is positive integer if present
42
+
case json_helpers.get_int(schema, "maxSize") {
43
+
Some(max_size) ->
44
+
case max_size > 0 {
45
+
True -> Ok(Nil)
46
+
False ->
47
+
Error(errors.invalid_schema(
48
+
def_name <> ": blob maxSize must be greater than 0",
49
+
))
50
+
}
51
+
None -> Ok(Nil)
52
+
}
53
+
}
54
+
55
+
/// Validates blob data against schema
56
+
pub fn validate_data(
57
+
data: Json,
58
+
schema: Json,
59
+
ctx: ValidationContext,
60
+
) -> Result(Nil, ValidationError) {
61
+
let def_name = context.path(ctx)
62
+
63
+
// Data must be an object
64
+
case json_helpers.is_object(data) {
65
+
False -> {
66
+
Error(errors.data_validation(def_name <> ": expected blob object"))
67
+
}
68
+
True -> {
69
+
// Validate required mimeType field
70
+
use mime_type <- result.try(
71
+
case json_helpers.get_string(data, "mimeType") {
72
+
Some(mt) -> Ok(mt)
73
+
None ->
74
+
Error(errors.data_validation(
75
+
def_name <> ": blob missing required 'mimeType' field",
76
+
))
77
+
},
78
+
)
79
+
80
+
// Validate required size field
81
+
use size <- result.try(case json_helpers.get_int(data, "size") {
82
+
Some(s) -> Ok(s)
83
+
None ->
84
+
Error(errors.data_validation(
85
+
def_name <> ": blob missing or invalid 'size' field",
86
+
))
87
+
})
88
+
89
+
// Validate against accept constraint if present
90
+
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
91
+
Some(accept_array) -> {
92
+
validate_mime_type_against_accept(def_name, mime_type, accept_array)
93
+
}
94
+
None -> Ok(Nil)
95
+
})
96
+
97
+
// Validate against maxSize constraint if present
98
+
case json_helpers.get_int(schema, "maxSize") {
99
+
Some(max_size) ->
100
+
case size <= max_size {
101
+
True -> Ok(Nil)
102
+
False ->
103
+
Error(errors.data_validation(
104
+
def_name
105
+
<> ": blob size "
106
+
<> int.to_string(size)
107
+
<> " exceeds maxSize "
108
+
<> int.to_string(max_size),
109
+
))
110
+
}
111
+
None -> Ok(Nil)
112
+
}
113
+
}
114
+
}
115
+
}
116
+
117
+
/// Validates accept field array
118
+
fn validate_accept_field(
119
+
def_name: String,
120
+
accept_array: List(Dynamic),
121
+
) -> Result(Nil, ValidationError) {
122
+
list.index_fold(accept_array, Ok(Nil), fn(acc, item, i) {
123
+
use _ <- result.try(acc)
124
+
case decode.run(item, decode.string) {
125
+
Ok(mime_type) -> validate_mime_type_pattern(def_name, mime_type, i)
126
+
Error(_) ->
127
+
Error(errors.invalid_schema(
128
+
def_name
129
+
<> ": blob accept["
130
+
<> int.to_string(i)
131
+
<> "] must be a string",
132
+
))
133
+
}
134
+
})
135
+
}
136
+
137
+
/// Validates MIME type pattern syntax
138
+
fn validate_mime_type_pattern(
139
+
def_name: String,
140
+
mime_type: String,
141
+
_index: Int,
142
+
) -> Result(Nil, ValidationError) {
143
+
case string.is_empty(mime_type) {
144
+
True ->
145
+
Error(errors.invalid_schema(
146
+
def_name <> ": blob MIME type cannot be empty",
147
+
))
148
+
False -> {
149
+
// Allow */*
150
+
case mime_type {
151
+
"*/*" -> Ok(Nil)
152
+
_ -> {
153
+
// Must contain exactly one /
154
+
case string.contains(mime_type, "/") {
155
+
False ->
156
+
Error(errors.invalid_schema(
157
+
def_name
158
+
<> ": blob MIME type '"
159
+
<> mime_type
160
+
<> "' must contain a '/' character",
161
+
))
162
+
True -> {
163
+
let parts = string.split(mime_type, "/")
164
+
case parts {
165
+
[type_part, subtype_part] -> {
166
+
// Validate * usage
167
+
use _ <- result.try(validate_wildcard(
168
+
def_name,
169
+
type_part,
170
+
"type",
171
+
mime_type,
172
+
))
173
+
validate_wildcard(
174
+
def_name,
175
+
subtype_part,
176
+
"subtype",
177
+
mime_type,
178
+
)
179
+
}
180
+
_ ->
181
+
Error(errors.invalid_schema(
182
+
def_name
183
+
<> ": blob MIME type '"
184
+
<> mime_type
185
+
<> "' must have exactly one '/' character",
186
+
))
187
+
}
188
+
}
189
+
}
190
+
}
191
+
}
192
+
}
193
+
}
194
+
}
195
+
196
+
/// Validates wildcard usage in MIME type parts
197
+
fn validate_wildcard(
198
+
def_name: String,
199
+
part: String,
200
+
part_name: String,
201
+
full_mime_type: String,
202
+
) -> Result(Nil, ValidationError) {
203
+
case string.contains(part, "*") {
204
+
True ->
205
+
case part {
206
+
"*" -> Ok(Nil)
207
+
_ ->
208
+
Error(errors.invalid_schema(
209
+
def_name
210
+
<> ": blob MIME type '"
211
+
<> full_mime_type
212
+
<> "' can only use '*' as a complete wildcard for "
213
+
<> part_name,
214
+
))
215
+
}
216
+
False -> Ok(Nil)
217
+
}
218
+
}
219
+
220
+
/// Validates MIME type against accept patterns
221
+
fn validate_mime_type_against_accept(
222
+
def_name: String,
223
+
mime_type: String,
224
+
accept_array: List(Dynamic),
225
+
) -> Result(Nil, ValidationError) {
226
+
let accept_patterns =
227
+
list.filter_map(accept_array, fn(item) { decode.run(item, decode.string) })
228
+
229
+
// Check if mime_type matches any accept pattern
230
+
case
231
+
list.any(accept_patterns, fn(pattern) {
232
+
mime_type_matches_pattern(mime_type, pattern)
233
+
})
234
+
{
235
+
True -> Ok(Nil)
236
+
False ->
237
+
Error(errors.data_validation(
238
+
def_name
239
+
<> ": blob mimeType '"
240
+
<> mime_type
241
+
<> "' not accepted. Allowed: "
242
+
<> string.join(accept_patterns, ", "),
243
+
))
244
+
}
245
+
}
246
+
247
+
/// Checks if a MIME type matches a pattern
248
+
fn mime_type_matches_pattern(mime_type: String, pattern: String) -> Bool {
249
+
case pattern {
250
+
"*/*" -> True
251
+
_ -> {
252
+
let mime_parts = string.split(mime_type, "/")
253
+
let pattern_parts = string.split(pattern, "/")
254
+
case mime_parts, pattern_parts {
255
+
[mime_type_part, mime_subtype], [pattern_type, pattern_subtype] -> {
256
+
let type_matches = case pattern_type {
257
+
"*" -> True
258
+
_ -> mime_type_part == pattern_type
259
+
}
260
+
let subtype_matches = case pattern_subtype {
261
+
"*" -> True
262
+
_ -> mime_subtype == pattern_subtype
263
+
}
264
+
type_matches && subtype_matches
265
+
}
266
+
_, _ -> False
267
+
}
268
+
}
269
+
}
270
+
}
+86
src/validation/primitive/boolean.gleam
+86
src/validation/primitive/boolean.gleam
···
1
+
// Boolean type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/json.{type Json}
5
+
import gleam/option.{None, Some}
6
+
import gleam/result
7
+
import json_helpers
8
+
import validation/constraints
9
+
import validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "const", "default", "description"]
12
+
13
+
/// Validates boolean schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
use _ <- result.try(constraints.validate_allowed_fields(
23
+
def_name,
24
+
keys,
25
+
allowed_fields,
26
+
"boolean",
27
+
))
28
+
29
+
// Validate const/default exclusivity
30
+
let has_const = json_helpers.get_bool(schema, "const") != None
31
+
let has_default = json_helpers.get_bool(schema, "default") != None
32
+
33
+
constraints.validate_const_default_exclusivity(
34
+
def_name,
35
+
has_const,
36
+
has_default,
37
+
"boolean",
38
+
)
39
+
}
40
+
41
+
/// Validates boolean data against schema
42
+
pub fn validate_data(
43
+
data: Json,
44
+
schema: Json,
45
+
ctx: ValidationContext,
46
+
) -> Result(Nil, ValidationError) {
47
+
let def_name = context.path(ctx)
48
+
49
+
// Check data is a boolean
50
+
case json_helpers.is_bool(data) {
51
+
False ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": expected boolean, got other type",
54
+
))
55
+
True -> {
56
+
// Extract boolean value
57
+
let json_str = json.to_string(data)
58
+
let is_true = json_str == "true"
59
+
let is_false = json_str == "false"
60
+
61
+
case is_true || is_false {
62
+
False ->
63
+
Error(errors.data_validation(
64
+
def_name <> ": invalid boolean representation",
65
+
))
66
+
True -> {
67
+
let value = is_true
68
+
69
+
// Validate const constraint
70
+
case json_helpers.get_bool(schema, "const") {
71
+
Some(const_val) if const_val != value ->
72
+
Error(errors.data_validation(
73
+
def_name
74
+
<> ": must be constant value "
75
+
<> case const_val {
76
+
True -> "true"
77
+
False -> "false"
78
+
},
79
+
))
80
+
_ -> Ok(Nil)
81
+
}
82
+
}
83
+
}
84
+
}
85
+
}
86
+
}
+134
src/validation/primitive/bytes.gleam
+134
src/validation/primitive/bytes.gleam
···
1
+
// Bytes type validator
2
+
// Bytes are base64-encoded strings
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/bit_array
6
+
import gleam/json.{type Json}
7
+
import gleam/list
8
+
import gleam/option.{None, Some}
9
+
import gleam/result
10
+
import gleam/string
11
+
import json_helpers
12
+
import validation/constraints
13
+
import validation/context.{type ValidationContext}
14
+
15
+
const allowed_fields = ["type", "minLength", "maxLength", "description"]
16
+
17
+
/// Validates bytes schema definition
18
+
pub fn validate_schema(
19
+
schema: Json,
20
+
ctx: ValidationContext,
21
+
) -> Result(Nil, ValidationError) {
22
+
let def_name = context.path(ctx)
23
+
24
+
// Validate allowed fields
25
+
let keys = json_helpers.get_keys(schema)
26
+
use _ <- result.try(constraints.validate_allowed_fields(
27
+
def_name,
28
+
keys,
29
+
allowed_fields,
30
+
"bytes",
31
+
))
32
+
33
+
// Validate length constraints
34
+
let min_length = json_helpers.get_int(schema, "minLength")
35
+
let max_length = json_helpers.get_int(schema, "maxLength")
36
+
37
+
// Check for negative values
38
+
use _ <- result.try(case min_length {
39
+
Some(min) if min < 0 ->
40
+
Error(errors.invalid_schema(
41
+
def_name <> ": bytes schema minLength below zero",
42
+
))
43
+
_ -> Ok(Nil)
44
+
})
45
+
46
+
use _ <- result.try(case max_length {
47
+
Some(max) if max < 0 ->
48
+
Error(errors.invalid_schema(
49
+
def_name <> ": bytes schema maxLength below zero",
50
+
))
51
+
_ -> Ok(Nil)
52
+
})
53
+
54
+
constraints.validate_length_constraint_consistency(
55
+
def_name,
56
+
min_length,
57
+
max_length,
58
+
"bytes",
59
+
)
60
+
}
61
+
62
+
/// Validates bytes data against schema
63
+
/// Expects data in ATProto format: {"$bytes": "base64-string"}
64
+
pub fn validate_data(
65
+
data: Json,
66
+
schema: Json,
67
+
ctx: ValidationContext,
68
+
) -> Result(Nil, ValidationError) {
69
+
let def_name = context.path(ctx)
70
+
71
+
// Check data is an object
72
+
case json_helpers.is_object(data) {
73
+
False -> Error(errors.data_validation(def_name <> ": expecting bytes"))
74
+
True -> {
75
+
// Get all keys from the object
76
+
let keys = json_helpers.get_keys(data)
77
+
78
+
// Must have exactly one field
79
+
use _ <- result.try(case list.length(keys) {
80
+
1 -> Ok(Nil)
81
+
_ ->
82
+
Error(errors.data_validation(
83
+
def_name <> ": $bytes objects must have a single field",
84
+
))
85
+
})
86
+
87
+
// That field must be "$bytes" with a string value
88
+
case json_helpers.get_string(data, "$bytes") {
89
+
None ->
90
+
Error(errors.data_validation(
91
+
def_name <> ": $bytes field missing or not a string",
92
+
))
93
+
Some(base64_str) -> {
94
+
// Decode the base64 string (using RawStdEncoding - no padding)
95
+
case bit_array.base64_decode(base64_str) {
96
+
Error(_) ->
97
+
Error(errors.data_validation(
98
+
def_name <> ": decoding $bytes value: invalid base64 encoding",
99
+
))
100
+
Ok(decoded_bytes) -> {
101
+
// Validate length of decoded bytes
102
+
let byte_length = bit_array.byte_size(decoded_bytes)
103
+
let min_length = json_helpers.get_int(schema, "minLength")
104
+
let max_length = json_helpers.get_int(schema, "maxLength")
105
+
106
+
// Check length constraints
107
+
use _ <- result.try(case min_length {
108
+
Some(min) if byte_length < min ->
109
+
Error(errors.data_validation(
110
+
def_name
111
+
<> ": bytes size out of bounds: "
112
+
<> string.inspect(byte_length),
113
+
))
114
+
_ -> Ok(Nil)
115
+
})
116
+
117
+
use _ <- result.try(case max_length {
118
+
Some(max) if byte_length > max ->
119
+
Error(errors.data_validation(
120
+
def_name
121
+
<> ": bytes size out of bounds: "
122
+
<> string.inspect(byte_length),
123
+
))
124
+
_ -> Ok(Nil)
125
+
})
126
+
127
+
Ok(Nil)
128
+
}
129
+
}
130
+
}
131
+
}
132
+
}
133
+
}
134
+
}
+63
src/validation/primitive/cid_link.gleam
+63
src/validation/primitive/cid_link.gleam
···
1
+
// CID Link type validator
2
+
// CID links are IPFS content identifiers
3
+
4
+
import errors.{type ValidationError}
5
+
import gleam/json.{type Json}
6
+
import gleam/option
7
+
import json_helpers
8
+
import validation/constraints
9
+
import validation/context.{type ValidationContext}
10
+
import validation/formats
11
+
12
+
const allowed_fields = ["type", "description"]
13
+
14
+
/// Validates cid-link schema definition
15
+
pub fn validate_schema(
16
+
schema: Json,
17
+
ctx: ValidationContext,
18
+
) -> Result(Nil, ValidationError) {
19
+
let def_name = context.path(ctx)
20
+
21
+
// Validate allowed fields
22
+
let keys = json_helpers.get_keys(schema)
23
+
constraints.validate_allowed_fields(
24
+
def_name,
25
+
keys,
26
+
allowed_fields,
27
+
"cid-link",
28
+
)
29
+
}
30
+
31
+
/// Validates cid-link data against schema
32
+
pub fn validate_data(
33
+
data: Json,
34
+
_schema: Json,
35
+
ctx: ValidationContext,
36
+
) -> Result(Nil, ValidationError) {
37
+
let def_name = context.path(ctx)
38
+
39
+
// Check data is an object with $link field
40
+
case json_helpers.is_object(data) {
41
+
False ->
42
+
Error(errors.data_validation(def_name <> ": expected CID link object"))
43
+
True -> {
44
+
// Validate structure: {$link: CID string}
45
+
case json_helpers.get_string(data, "$link") {
46
+
option.Some(cid) -> {
47
+
// Validate CID format
48
+
case formats.is_valid_cid(cid) {
49
+
True -> Ok(Nil)
50
+
False ->
51
+
Error(errors.data_validation(
52
+
def_name <> ": invalid CID format in $link",
53
+
))
54
+
}
55
+
}
56
+
option.None ->
57
+
Error(errors.data_validation(
58
+
def_name <> ": CID link must have $link field",
59
+
))
60
+
}
61
+
}
62
+
}
63
+
}
+153
src/validation/primitive/integer.gleam
+153
src/validation/primitive/integer.gleam
···
1
+
// Integer type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dynamic/decode
5
+
import gleam/int
6
+
import gleam/json.{type Json}
7
+
import gleam/list
8
+
import gleam/option.{None, Some}
9
+
import gleam/result
10
+
import json_helpers
11
+
import validation/constraints
12
+
import validation/context.{type ValidationContext}
13
+
14
+
const allowed_fields = [
15
+
"type", "minimum", "maximum", "enum", "const", "default", "description",
16
+
]
17
+
18
+
/// Validates integer schema definition
19
+
pub fn validate_schema(
20
+
schema: Json,
21
+
ctx: ValidationContext,
22
+
) -> Result(Nil, ValidationError) {
23
+
let def_name = context.path(ctx)
24
+
25
+
// Validate allowed fields
26
+
let keys = json_helpers.get_keys(schema)
27
+
use _ <- result.try(constraints.validate_allowed_fields(
28
+
def_name,
29
+
keys,
30
+
allowed_fields,
31
+
"integer",
32
+
))
33
+
34
+
// Extract min/max constraints
35
+
let minimum = json_helpers.get_int(schema, "minimum")
36
+
let maximum = json_helpers.get_int(schema, "maximum")
37
+
38
+
// Validate constraint consistency
39
+
use _ <- result.try(constraints.validate_integer_constraint_consistency(
40
+
def_name,
41
+
minimum,
42
+
maximum,
43
+
))
44
+
45
+
// Validate enum is array of integers if present
46
+
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
47
+
Some(enum_array) -> {
48
+
list.try_fold(enum_array, Nil, fn(_, item) {
49
+
case decode.run(item, decode.int) {
50
+
Ok(_) -> Ok(Nil)
51
+
Error(_) ->
52
+
Error(errors.invalid_schema(
53
+
def_name <> ": enum values must be integers",
54
+
))
55
+
}
56
+
})
57
+
}
58
+
None -> Ok(Nil)
59
+
})
60
+
61
+
// Validate const/default exclusivity
62
+
let has_const = json_helpers.get_int(schema, "const") != None
63
+
let has_default = json_helpers.get_int(schema, "default") != None
64
+
65
+
constraints.validate_const_default_exclusivity(
66
+
def_name,
67
+
has_const,
68
+
has_default,
69
+
"integer",
70
+
)
71
+
}
72
+
73
+
/// Validates integer data against schema
74
+
pub fn validate_data(
75
+
data: Json,
76
+
schema: Json,
77
+
ctx: ValidationContext,
78
+
) -> Result(Nil, ValidationError) {
79
+
let def_name = context.path(ctx)
80
+
81
+
// Check data is an integer
82
+
case json_helpers.is_int(data) {
83
+
False ->
84
+
Error(errors.data_validation(
85
+
def_name <> ": expected integer, got other type",
86
+
))
87
+
True -> {
88
+
// Extract integer value
89
+
let json_str = json.to_string(data)
90
+
case int.parse(json_str) {
91
+
Error(_) ->
92
+
Error(errors.data_validation(
93
+
def_name <> ": failed to parse integer value",
94
+
))
95
+
Ok(value) -> {
96
+
// Validate const constraint first (most restrictive)
97
+
case json_helpers.get_int(schema, "const") {
98
+
Some(const_val) if const_val != value ->
99
+
Error(errors.data_validation(
100
+
def_name
101
+
<> ": must be constant value "
102
+
<> int.to_string(const_val)
103
+
<> ", found "
104
+
<> int.to_string(value),
105
+
))
106
+
Some(_) -> Ok(Nil)
107
+
None -> {
108
+
// Validate enum constraint
109
+
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
110
+
Some(enum_array) -> {
111
+
let enum_ints =
112
+
list.filter_map(enum_array, fn(item) {
113
+
decode.run(item, decode.int)
114
+
})
115
+
116
+
validate_integer_enum(value, enum_ints, def_name)
117
+
}
118
+
None -> Ok(Nil)
119
+
})
120
+
121
+
// Validate range constraints
122
+
let minimum = json_helpers.get_int(schema, "minimum")
123
+
let maximum = json_helpers.get_int(schema, "maximum")
124
+
125
+
constraints.validate_integer_range(
126
+
def_name,
127
+
value,
128
+
minimum,
129
+
maximum,
130
+
)
131
+
}
132
+
}
133
+
}
134
+
}
135
+
}
136
+
}
137
+
}
138
+
139
+
/// Helper to validate integer enum
140
+
fn validate_integer_enum(
141
+
value: Int,
142
+
enum_values: List(Int),
143
+
def_name: String,
144
+
) -> Result(Nil, ValidationError) {
145
+
constraints.validate_enum_constraint(
146
+
def_name,
147
+
value,
148
+
enum_values,
149
+
"integer",
150
+
int.to_string,
151
+
fn(a, b) { a == b },
152
+
)
153
+
}
+39
src/validation/primitive/null.gleam
+39
src/validation/primitive/null.gleam
···
1
+
// Null type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/json.{type Json}
5
+
import json_helpers
6
+
import validation/constraints
7
+
import validation/context.{type ValidationContext}
8
+
9
+
const allowed_fields = ["type", "description"]
10
+
11
+
/// Validates null schema definition
12
+
pub fn validate_schema(
13
+
schema: Json,
14
+
ctx: ValidationContext,
15
+
) -> Result(Nil, ValidationError) {
16
+
let def_name = context.path(ctx)
17
+
18
+
// Validate allowed fields
19
+
let keys = json_helpers.get_keys(schema)
20
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "null")
21
+
}
22
+
23
+
/// Validates null data against schema
24
+
pub fn validate_data(
25
+
data: Json,
26
+
_schema: Json,
27
+
ctx: ValidationContext,
28
+
) -> Result(Nil, ValidationError) {
29
+
let def_name = context.path(ctx)
30
+
31
+
// Check data is null
32
+
case json_helpers.is_null(data) {
33
+
True -> Ok(Nil)
34
+
False ->
35
+
Error(errors.data_validation(
36
+
def_name <> ": expected null, got other type",
37
+
))
38
+
}
39
+
}
+297
src/validation/primitive/string.gleam
+297
src/validation/primitive/string.gleam
···
1
+
// String type validator
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dynamic/decode
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{type Option, None, Some}
8
+
import gleam/result
9
+
import gleam/string
10
+
import json_helpers
11
+
import types
12
+
import validation/constraints
13
+
import validation/context.{type ValidationContext}
14
+
import validation/formats
15
+
16
+
const allowed_fields = [
17
+
"type", "format", "minLength", "maxLength", "minGraphemes", "maxGraphemes",
18
+
"enum", "knownValues", "const", "default", "description",
19
+
]
20
+
21
+
/// Validates string schema definition
22
+
pub fn validate_schema(
23
+
schema: Json,
24
+
ctx: ValidationContext,
25
+
) -> Result(Nil, ValidationError) {
26
+
let def_name = context.path(ctx)
27
+
28
+
// Validate allowed fields
29
+
let keys = json_helpers.get_keys(schema)
30
+
use _ <- result.try(constraints.validate_allowed_fields(
31
+
def_name,
32
+
keys,
33
+
allowed_fields,
34
+
"string",
35
+
))
36
+
37
+
// Validate format if present
38
+
case json_helpers.get_string(schema, "format") {
39
+
Some(format_str) ->
40
+
case types.string_to_format(format_str) {
41
+
Ok(_format) -> Ok(Nil)
42
+
Error(_) ->
43
+
Error(errors.invalid_schema(
44
+
def_name
45
+
<> ": unknown format '"
46
+
<> format_str
47
+
<> "'. Valid formats: datetime, uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key",
48
+
))
49
+
}
50
+
None -> Ok(Nil)
51
+
}
52
+
|> result.try(fn(_) {
53
+
// Extract length constraints
54
+
let min_length = json_helpers.get_int(schema, "minLength")
55
+
let max_length = json_helpers.get_int(schema, "maxLength")
56
+
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
57
+
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
58
+
59
+
// Check for negative values
60
+
use _ <- result.try(case min_length {
61
+
Some(n) if n < 0 ->
62
+
Error(errors.invalid_schema(
63
+
def_name <> ": string schema minLength below zero",
64
+
))
65
+
_ -> Ok(Nil)
66
+
})
67
+
68
+
use _ <- result.try(case max_length {
69
+
Some(n) if n < 0 ->
70
+
Error(errors.invalid_schema(
71
+
def_name <> ": string schema maxLength below zero",
72
+
))
73
+
_ -> Ok(Nil)
74
+
})
75
+
76
+
use _ <- result.try(case min_graphemes {
77
+
Some(n) if n < 0 ->
78
+
Error(errors.invalid_schema(
79
+
def_name <> ": string schema minGraphemes below zero",
80
+
))
81
+
_ -> Ok(Nil)
82
+
})
83
+
84
+
use _ <- result.try(case max_graphemes {
85
+
Some(n) if n < 0 ->
86
+
Error(errors.invalid_schema(
87
+
def_name <> ": string schema maxGraphemes below zero",
88
+
))
89
+
_ -> Ok(Nil)
90
+
})
91
+
92
+
// Validate byte length constraints
93
+
use _ <- result.try(constraints.validate_length_constraint_consistency(
94
+
def_name,
95
+
min_length,
96
+
max_length,
97
+
"string",
98
+
))
99
+
100
+
// Validate grapheme constraints
101
+
constraints.validate_length_constraint_consistency(
102
+
def_name,
103
+
min_graphemes,
104
+
max_graphemes,
105
+
"string (graphemes)",
106
+
)
107
+
})
108
+
|> result.try(fn(_) {
109
+
// Validate enum is array of strings if present
110
+
case json_helpers.get_array(schema, "enum") {
111
+
Some(enum_array) -> {
112
+
// Check each item is a string
113
+
list.try_fold(enum_array, Nil, fn(_, item) {
114
+
case decode.run(item, decode.string) {
115
+
Ok(_) -> Ok(Nil)
116
+
Error(_) ->
117
+
Error(errors.invalid_schema(
118
+
def_name <> ": enum values must be strings",
119
+
))
120
+
}
121
+
})
122
+
}
123
+
None -> Ok(Nil)
124
+
}
125
+
})
126
+
|> result.try(fn(_) {
127
+
// Validate knownValues is array of strings if present
128
+
case json_helpers.get_array(schema, "knownValues") {
129
+
Some(known_array) -> {
130
+
list.try_fold(known_array, Nil, fn(_, item) {
131
+
case decode.run(item, decode.string) {
132
+
Ok(_) -> Ok(Nil)
133
+
Error(_) ->
134
+
Error(errors.invalid_schema(
135
+
def_name <> ": knownValues must be strings",
136
+
))
137
+
}
138
+
})
139
+
}
140
+
None -> Ok(Nil)
141
+
}
142
+
})
143
+
|> result.try(fn(_) {
144
+
// Validate const/default exclusivity
145
+
let has_const = json_helpers.get_string(schema, "const") != option.None
146
+
let has_default = json_helpers.get_string(schema, "default") != option.None
147
+
148
+
constraints.validate_const_default_exclusivity(
149
+
def_name,
150
+
has_const,
151
+
has_default,
152
+
"string",
153
+
)
154
+
})
155
+
}
156
+
157
+
/// Validates string data against schema
158
+
pub fn validate_data(
159
+
data: Json,
160
+
schema: Json,
161
+
ctx: ValidationContext,
162
+
) -> Result(Nil, ValidationError) {
163
+
let def_name = context.path(ctx)
164
+
165
+
// Check data is a string
166
+
case json_helpers.is_string(data) {
167
+
False ->
168
+
Error(errors.data_validation(
169
+
def_name <> ": expected string, got other type",
170
+
))
171
+
True -> {
172
+
// Extract the string value
173
+
let json_str = json.to_string(data)
174
+
// Remove quotes from JSON string representation
175
+
let value = case
176
+
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
177
+
{
178
+
True -> string.slice(json_str, 1, string.length(json_str) - 2)
179
+
False -> json_str
180
+
}
181
+
182
+
// Validate length constraints
183
+
let min_length = json_helpers.get_int(schema, "minLength")
184
+
let max_length = json_helpers.get_int(schema, "maxLength")
185
+
use _ <- result.try(validate_string_length(
186
+
value,
187
+
min_length,
188
+
max_length,
189
+
def_name,
190
+
))
191
+
192
+
// Validate grapheme constraints
193
+
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
194
+
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
195
+
use _ <- result.try(validate_grapheme_length(
196
+
value,
197
+
min_graphemes,
198
+
max_graphemes,
199
+
def_name,
200
+
))
201
+
202
+
// Validate format if specified
203
+
case json_helpers.get_string(schema, "format") {
204
+
Some(format_str) ->
205
+
case types.string_to_format(format_str) {
206
+
Ok(format) -> validate_string_format(value, format, def_name)
207
+
Error(_) -> Ok(Nil)
208
+
}
209
+
None -> Ok(Nil)
210
+
}
211
+
|> result.try(fn(_) {
212
+
// Validate enum if specified
213
+
case json_helpers.get_array(schema, "enum") {
214
+
Some(enum_array) -> {
215
+
// Convert dynamics to strings
216
+
let enum_strings =
217
+
list.filter_map(enum_array, fn(item) {
218
+
decode.run(item, decode.string)
219
+
})
220
+
221
+
validate_string_enum(value, enum_strings, def_name)
222
+
}
223
+
None -> Ok(Nil)
224
+
}
225
+
})
226
+
}
227
+
}
228
+
}
229
+
230
+
/// Helper to validate string length (UTF-8 bytes)
231
+
fn validate_string_length(
232
+
value: String,
233
+
min_length: Option(Int),
234
+
max_length: Option(Int),
235
+
def_name: String,
236
+
) -> Result(Nil, ValidationError) {
237
+
let byte_length = string.byte_size(value)
238
+
constraints.validate_length_constraints(
239
+
def_name,
240
+
byte_length,
241
+
min_length,
242
+
max_length,
243
+
"string",
244
+
)
245
+
}
246
+
247
+
/// Helper to validate grapheme length (visual characters)
248
+
fn validate_grapheme_length(
249
+
value: String,
250
+
min_graphemes: Option(Int),
251
+
max_graphemes: Option(Int),
252
+
def_name: String,
253
+
) -> Result(Nil, ValidationError) {
254
+
// Count grapheme clusters (visual characters) using Gleam's stdlib
255
+
// This correctly handles Unicode combining characters, emoji, etc.
256
+
let grapheme_count = value |> string.to_graphemes() |> list.length()
257
+
constraints.validate_length_constraints(
258
+
def_name,
259
+
grapheme_count,
260
+
min_graphemes,
261
+
max_graphemes,
262
+
"string (graphemes)",
263
+
)
264
+
}
265
+
266
+
/// Helper to validate string format
267
+
fn validate_string_format(
268
+
value: String,
269
+
format: types.StringFormat,
270
+
def_name: String,
271
+
) -> Result(Nil, ValidationError) {
272
+
case formats.validate_format(value, format) {
273
+
True -> Ok(Nil)
274
+
False -> {
275
+
let format_name = types.format_to_string(format)
276
+
Error(errors.data_validation(
277
+
def_name <> ": string does not match format '" <> format_name <> "'",
278
+
))
279
+
}
280
+
}
281
+
}
282
+
283
+
/// Helper to validate string enum
284
+
fn validate_string_enum(
285
+
value: String,
286
+
enum_values: List(String),
287
+
def_name: String,
288
+
) -> Result(Nil, ValidationError) {
289
+
constraints.validate_enum_constraint(
290
+
def_name,
291
+
value,
292
+
enum_values,
293
+
"string",
294
+
fn(s) { s },
295
+
fn(a, b) { a == b },
296
+
)
297
+
}
+241
src/validation/resolution.gleam
+241
src/validation/resolution.gleam
···
1
+
// Reference resolution utilities
2
+
3
+
import errors.{type ValidationError}
4
+
import gleam/dict.{type Dict}
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{type Option, None, Some}
8
+
import gleam/result
9
+
import gleam/set.{type Set}
10
+
import gleam/string
11
+
import json_helpers
12
+
import validation/context.{type ValidationContext}
13
+
14
+
/// Resolves a reference string to its target definition
15
+
pub fn resolve_reference(
16
+
reference: String,
17
+
ctx: ValidationContext,
18
+
current_lexicon_id: String,
19
+
) -> Result(Option(Json), ValidationError) {
20
+
// Update context with current lexicon
21
+
let ctx = context.with_current_lexicon(ctx, current_lexicon_id)
22
+
23
+
// Parse the reference
24
+
case context.parse_reference(ctx, reference) {
25
+
Ok(#(lex_id, def_name)) -> {
26
+
// Get the lexicon
27
+
case context.get_lexicon(ctx, lex_id) {
28
+
Some(lexicon) -> {
29
+
// Navigate to the specific definition in defs object
30
+
case json_helpers.get_field(lexicon.defs, def_name) {
31
+
Some(def_schema) -> Ok(Some(def_schema))
32
+
None ->
33
+
Error(errors.invalid_schema(
34
+
"Definition '"
35
+
<> def_name
36
+
<> "' not found in lexicon '"
37
+
<> lex_id
38
+
<> "'",
39
+
))
40
+
}
41
+
}
42
+
None ->
43
+
Error(errors.invalid_schema(
44
+
"Referenced lexicon not found: " <> lex_id,
45
+
))
46
+
}
47
+
}
48
+
Error(e) -> Error(e)
49
+
}
50
+
}
51
+
52
+
/// Validates that a reference exists and is accessible
53
+
pub fn validate_reference(
54
+
reference: String,
55
+
ctx: ValidationContext,
56
+
current_lexicon_id: String,
57
+
def_path: String,
58
+
) -> Result(Nil, ValidationError) {
59
+
// Check for circular reference
60
+
case context.has_reference(ctx, reference) {
61
+
True ->
62
+
Error(errors.invalid_schema(
63
+
def_path <> ": Circular reference detected: " <> reference,
64
+
))
65
+
False -> {
66
+
// Try to resolve the reference
67
+
case resolve_reference(reference, ctx, current_lexicon_id) {
68
+
Ok(Some(_)) -> Ok(Nil)
69
+
Ok(None) ->
70
+
Error(errors.invalid_schema(
71
+
def_path <> ": Reference not found: " <> reference,
72
+
))
73
+
Error(e) -> Error(e)
74
+
}
75
+
}
76
+
}
77
+
}
78
+
79
+
/// Collects all references from a definition recursively
80
+
/// Traverses JSON structure looking for "ref" fields
81
+
fn collect_references_recursive(
82
+
value: Json,
83
+
references: Set(String),
84
+
) -> Set(String) {
85
+
// Check if this is an object with a "ref" field
86
+
let refs = case json_helpers.get_string(value, "ref") {
87
+
Some(ref_str) -> set.insert(references, ref_str)
88
+
None -> references
89
+
}
90
+
91
+
// If it's an object, recursively check all its values
92
+
case json_helpers.json_to_dict(value) {
93
+
Ok(dict_value) -> {
94
+
dict.fold(dict_value, refs, fn(acc, _key, field_value) {
95
+
case json_helpers.dynamic_to_json(field_value) {
96
+
Ok(field_json) -> collect_references_recursive(field_json, acc)
97
+
Error(_) -> acc
98
+
}
99
+
})
100
+
}
101
+
Error(_) -> {
102
+
// If it's an array, check each element
103
+
case json_helpers.get_array_from_value(value) {
104
+
Some(array_items) -> {
105
+
list.fold(array_items, refs, fn(acc, item) {
106
+
case json_helpers.dynamic_to_json(item) {
107
+
Ok(item_json) -> collect_references_recursive(item_json, acc)
108
+
Error(_) -> acc
109
+
}
110
+
})
111
+
}
112
+
None -> refs
113
+
}
114
+
}
115
+
}
116
+
}
117
+
118
+
/// Validates all references in a lexicon are resolvable
119
+
pub fn validate_lexicon_references(
120
+
lexicon_id: String,
121
+
ctx: ValidationContext,
122
+
) -> Result(Nil, ValidationError) {
123
+
case context.get_lexicon(ctx, lexicon_id) {
124
+
Some(lexicon) -> {
125
+
// Collect all references from the lexicon
126
+
let references = collect_references_recursive(lexicon.defs, set.new())
127
+
128
+
// Validate each reference
129
+
set.fold(references, Ok(Nil), fn(acc, reference) {
130
+
case acc {
131
+
Error(e) -> Error(e)
132
+
Ok(_) -> validate_reference(reference, ctx, lexicon_id, lexicon_id)
133
+
}
134
+
})
135
+
}
136
+
None ->
137
+
Error(errors.lexicon_not_found(
138
+
"Lexicon not found for validation: " <> lexicon_id,
139
+
))
140
+
}
141
+
}
142
+
143
+
/// Validates completeness of all lexicons
144
+
pub fn validate_lexicon_completeness(
145
+
ctx: ValidationContext,
146
+
) -> Result(Nil, ValidationError) {
147
+
// Get all lexicon IDs
148
+
let lexicon_ids = dict.keys(ctx.lexicons)
149
+
150
+
// Validate references for each lexicon
151
+
list.try_fold(lexicon_ids, Nil, fn(_, lex_id) {
152
+
validate_lexicon_references(lex_id, ctx)
153
+
})
154
+
}
155
+
156
+
/// Detects circular dependencies in lexicon references
157
+
pub fn detect_circular_dependencies(
158
+
ctx: ValidationContext,
159
+
) -> Result(Nil, ValidationError) {
160
+
// Build dependency graph
161
+
let graph = build_dependency_graph(ctx)
162
+
163
+
// Check for cycles using DFS
164
+
let lexicon_ids = dict.keys(ctx.lexicons)
165
+
let visited = set.new()
166
+
let rec_stack = set.new()
167
+
168
+
list.try_fold(lexicon_ids, #(visited, rec_stack), fn(state, node) {
169
+
let #(visited, rec_stack) = state
170
+
case set.contains(visited, node) {
171
+
True -> Ok(state)
172
+
False -> {
173
+
case has_cycle_dfs(node, graph, visited, rec_stack) {
174
+
#(True, _v, _r) ->
175
+
Error(errors.invalid_schema(
176
+
"Circular dependency detected involving: " <> node,
177
+
))
178
+
#(False, v, r) -> Ok(#(v, r))
179
+
}
180
+
}
181
+
}
182
+
})
183
+
|> result.map(fn(_) { Nil })
184
+
}
185
+
186
+
/// Build a dependency graph from lexicon references
187
+
fn build_dependency_graph(ctx: ValidationContext) -> Dict(String, Set(String)) {
188
+
dict.fold(ctx.lexicons, dict.new(), fn(graph, lex_id, lexicon) {
189
+
let refs = collect_references_recursive(lexicon.defs, set.new())
190
+
// Extract just the lexicon IDs from references (before the #)
191
+
let dep_lexicons =
192
+
set.fold(refs, set.new(), fn(acc, reference) {
193
+
case string.split(reference, "#") {
194
+
[nsid, _] if nsid != "" -> set.insert(acc, nsid)
195
+
[nsid] if nsid != "" -> set.insert(acc, nsid)
196
+
_ -> acc
197
+
}
198
+
})
199
+
dict.insert(graph, lex_id, dep_lexicons)
200
+
})
201
+
}
202
+
203
+
/// Helper for cycle detection using DFS
204
+
fn has_cycle_dfs(
205
+
node: String,
206
+
graph: Dict(String, Set(String)),
207
+
visited: Set(String),
208
+
rec_stack: Set(String),
209
+
) -> #(Bool, Set(String), Set(String)) {
210
+
let visited = set.insert(visited, node)
211
+
let rec_stack = set.insert(rec_stack, node)
212
+
213
+
// Get neighbors
214
+
let neighbors = case dict.get(graph, node) {
215
+
Ok(deps) -> deps
216
+
Error(_) -> set.new()
217
+
}
218
+
219
+
// Check each neighbor
220
+
let result =
221
+
set.fold(neighbors, #(False, visited, rec_stack), fn(state, neighbor) {
222
+
let #(has_cycle, v, r) = state
223
+
case has_cycle {
224
+
True -> state
225
+
False -> {
226
+
case set.contains(v, neighbor) {
227
+
False -> has_cycle_dfs(neighbor, graph, v, r)
228
+
True ->
229
+
case set.contains(r, neighbor) {
230
+
True -> #(True, v, r)
231
+
False -> state
232
+
}
233
+
}
234
+
}
235
+
}
236
+
})
237
+
238
+
// Remove from recursion stack
239
+
let #(has_cycle, v, r) = result
240
+
#(has_cycle, v, set.delete(r, node))
241
+
}
+179
test/array_validator_test.gleam
+179
test/array_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/field
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid array schema with string items
12
+
pub fn valid_string_array_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("array")),
16
+
#("items", json.object([#("type", json.string("string"))])),
17
+
#("minLength", json.int(1)),
18
+
#("maxLength", json.int(10)),
19
+
])
20
+
21
+
let assert Ok(ctx) = context.builder() |> context.build
22
+
let result = field.validate_array_schema(schema, ctx)
23
+
result |> should.be_ok
24
+
}
25
+
26
+
// Test array schema with object items
27
+
pub fn array_with_object_items_test() {
28
+
let schema =
29
+
json.object([
30
+
#("type", json.string("array")),
31
+
#(
32
+
"items",
33
+
json.object([
34
+
#("type", json.string("object")),
35
+
#(
36
+
"properties",
37
+
json.object([
38
+
#("name", json.object([#("type", json.string("string"))])),
39
+
]),
40
+
),
41
+
]),
42
+
),
43
+
])
44
+
45
+
let assert Ok(ctx) = context.builder() |> context.build
46
+
let result = field.validate_array_schema(schema, ctx)
47
+
result |> should.be_ok
48
+
}
49
+
50
+
// Test array schema with nested array items
51
+
pub fn nested_array_schema_test() {
52
+
let schema =
53
+
json.object([
54
+
#("type", json.string("array")),
55
+
#(
56
+
"items",
57
+
json.object([
58
+
#("type", json.string("array")),
59
+
#("items", json.object([#("type", json.string("integer"))])),
60
+
]),
61
+
),
62
+
])
63
+
64
+
let assert Ok(ctx) = context.builder() |> context.build
65
+
let result = field.validate_array_schema(schema, ctx)
66
+
result |> should.be_ok
67
+
}
68
+
69
+
// Test array schema missing items field
70
+
pub fn missing_items_field_test() {
71
+
let schema =
72
+
json.object([
73
+
#("type", json.string("array")),
74
+
#("maxLength", json.int(10)),
75
+
])
76
+
77
+
let assert Ok(ctx) = context.builder() |> context.build
78
+
let result = field.validate_array_schema(schema, ctx)
79
+
result |> should.be_error
80
+
}
81
+
82
+
// Test array schema with invalid length constraints
83
+
pub fn invalid_length_constraints_test() {
84
+
let schema =
85
+
json.object([
86
+
#("type", json.string("array")),
87
+
#("items", json.object([#("type", json.string("string"))])),
88
+
#("minLength", json.int(10)),
89
+
#("maxLength", json.int(5)),
90
+
])
91
+
92
+
let assert Ok(ctx) = context.builder() |> context.build
93
+
let result = field.validate_array_schema(schema, ctx)
94
+
result |> should.be_error
95
+
}
96
+
97
+
// Test valid array data
98
+
pub fn valid_array_data_test() {
99
+
let schema =
100
+
json.object([
101
+
#("type", json.string("array")),
102
+
#("items", json.object([#("type", json.string("string"))])),
103
+
#("minLength", json.int(1)),
104
+
#("maxLength", json.int(5)),
105
+
])
106
+
107
+
let data =
108
+
json.array([json.string("hello"), json.string("world")], fn(x) { x })
109
+
110
+
let assert Ok(ctx) = context.builder() |> context.build
111
+
let result = field.validate_array_data(data, schema, ctx)
112
+
result |> should.be_ok
113
+
}
114
+
115
+
// Test array data below minLength
116
+
pub fn array_below_min_length_test() {
117
+
let schema =
118
+
json.object([
119
+
#("type", json.string("array")),
120
+
#("items", json.object([#("type", json.string("string"))])),
121
+
#("minLength", json.int(3)),
122
+
])
123
+
124
+
let data = json.array([json.string("hello")], fn(x) { x })
125
+
126
+
let assert Ok(ctx) = context.builder() |> context.build
127
+
let result = field.validate_array_data(data, schema, ctx)
128
+
result |> should.be_error
129
+
}
130
+
131
+
// Test array data above maxLength
132
+
pub fn array_above_max_length_test() {
133
+
let schema =
134
+
json.object([
135
+
#("type", json.string("array")),
136
+
#("items", json.object([#("type", json.string("string"))])),
137
+
#("maxLength", json.int(2)),
138
+
])
139
+
140
+
let data =
141
+
json.array([json.string("a"), json.string("b"), json.string("c")], fn(x) {
142
+
x
143
+
})
144
+
145
+
let assert Ok(ctx) = context.builder() |> context.build
146
+
let result = field.validate_array_data(data, schema, ctx)
147
+
result |> should.be_error
148
+
}
149
+
150
+
// Test array data with invalid item type
151
+
pub fn invalid_item_type_test() {
152
+
let schema =
153
+
json.object([
154
+
#("type", json.string("array")),
155
+
#("items", json.object([#("type", json.string("string"))])),
156
+
])
157
+
158
+
let data = json.array([json.string("hello"), json.int(42)], fn(x) { x })
159
+
160
+
let assert Ok(ctx) = context.builder() |> context.build
161
+
let result = field.validate_array_data(data, schema, ctx)
162
+
result |> should.be_error
163
+
}
164
+
165
+
// Test empty array with minLength
166
+
pub fn empty_array_with_min_length_test() {
167
+
let schema =
168
+
json.object([
169
+
#("type", json.string("array")),
170
+
#("items", json.object([#("type", json.string("string"))])),
171
+
#("minLength", json.int(1)),
172
+
])
173
+
174
+
let data = json.array([], fn(x) { x })
175
+
176
+
let assert Ok(ctx) = context.builder() |> context.build
177
+
let result = field.validate_array_data(data, schema, ctx)
178
+
result |> should.be_error
179
+
}
+160
test/blob_validator_test.gleam
+160
test/blob_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primitive/blob
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid blob schema
12
+
pub fn valid_blob_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("blob")),
16
+
#(
17
+
"accept",
18
+
json.array([json.string("image/*"), json.string("video/mp4")], fn(x) {
19
+
x
20
+
}),
21
+
),
22
+
#("maxSize", json.int(1_048_576)),
23
+
])
24
+
25
+
let assert Ok(ctx) = context.builder() |> context.build
26
+
let result = blob.validate_schema(schema, ctx)
27
+
result |> should.be_ok
28
+
}
29
+
30
+
// Test blob with wildcard MIME type
31
+
pub fn wildcard_mime_type_test() {
32
+
let schema =
33
+
json.object([
34
+
#("type", json.string("blob")),
35
+
#("accept", json.array([json.string("*/*")], fn(x) { x })),
36
+
])
37
+
38
+
let assert Ok(ctx) = context.builder() |> context.build
39
+
let result = blob.validate_schema(schema, ctx)
40
+
result |> should.be_ok
41
+
}
42
+
43
+
// Test invalid MIME type pattern (missing slash)
44
+
pub fn invalid_mime_type_no_slash_test() {
45
+
let schema =
46
+
json.object([
47
+
#("type", json.string("blob")),
48
+
#("accept", json.array([json.string("image")], fn(x) { x })),
49
+
])
50
+
51
+
let assert Ok(ctx) = context.builder() |> context.build
52
+
let result = blob.validate_schema(schema, ctx)
53
+
result |> should.be_error
54
+
}
55
+
56
+
// Test invalid MIME type pattern (partial wildcard)
57
+
pub fn invalid_mime_type_partial_wildcard_test() {
58
+
let schema =
59
+
json.object([
60
+
#("type", json.string("blob")),
61
+
#("accept", json.array([json.string("image/jpe*")], fn(x) { x })),
62
+
])
63
+
64
+
let assert Ok(ctx) = context.builder() |> context.build
65
+
let result = blob.validate_schema(schema, ctx)
66
+
result |> should.be_error
67
+
}
68
+
69
+
// Test zero maxSize
70
+
pub fn zero_max_size_test() {
71
+
let schema =
72
+
json.object([
73
+
#("type", json.string("blob")),
74
+
#("maxSize", json.int(0)),
75
+
])
76
+
77
+
let assert Ok(ctx) = context.builder() |> context.build
78
+
let result = blob.validate_schema(schema, ctx)
79
+
result |> should.be_error
80
+
}
81
+
82
+
// Test valid blob data
83
+
pub fn valid_blob_data_test() {
84
+
let schema =
85
+
json.object([
86
+
#("type", json.string("blob")),
87
+
#("accept", json.array([json.string("image/*")], fn(x) { x })),
88
+
#("maxSize", json.int(1_000_000)),
89
+
])
90
+
91
+
let data =
92
+
json.object([
93
+
#("mimeType", json.string("image/jpeg")),
94
+
#("size", json.int(50_000)),
95
+
])
96
+
97
+
let assert Ok(ctx) = context.builder() |> context.build
98
+
let result = blob.validate_data(data, schema, ctx)
99
+
result |> should.be_ok
100
+
}
101
+
102
+
// Test blob data with unaccepted MIME type
103
+
pub fn unaccepted_mime_type_test() {
104
+
let schema =
105
+
json.object([
106
+
#("type", json.string("blob")),
107
+
#("accept", json.array([json.string("image/*")], fn(x) { x })),
108
+
])
109
+
110
+
let data =
111
+
json.object([
112
+
#("mimeType", json.string("video/mp4")),
113
+
#("size", json.int(50_000)),
114
+
])
115
+
116
+
let assert Ok(ctx) = context.builder() |> context.build
117
+
let result = blob.validate_data(data, schema, ctx)
118
+
result |> should.be_error
119
+
}
120
+
121
+
// Test blob data exceeding maxSize
122
+
pub fn exceeds_max_size_test() {
123
+
let schema =
124
+
json.object([
125
+
#("type", json.string("blob")),
126
+
#("maxSize", json.int(10_000)),
127
+
])
128
+
129
+
let data =
130
+
json.object([
131
+
#("mimeType", json.string("image/jpeg")),
132
+
#("size", json.int(50_000)),
133
+
])
134
+
135
+
let assert Ok(ctx) = context.builder() |> context.build
136
+
let result = blob.validate_data(data, schema, ctx)
137
+
result |> should.be_error
138
+
}
139
+
140
+
// Test blob data missing mimeType
141
+
pub fn missing_mime_type_test() {
142
+
let schema = json.object([#("type", json.string("blob"))])
143
+
144
+
let data = json.object([#("size", json.int(50_000))])
145
+
146
+
let assert Ok(ctx) = context.builder() |> context.build
147
+
let result = blob.validate_data(data, schema, ctx)
148
+
result |> should.be_error
149
+
}
150
+
151
+
// Test blob data missing size
152
+
pub fn missing_size_test() {
153
+
let schema = json.object([#("type", json.string("blob"))])
154
+
155
+
let data = json.object([#("mimeType", json.string("image/jpeg"))])
156
+
157
+
let assert Ok(ctx) = context.builder() |> context.build
158
+
let result = blob.validate_data(data, schema, ctx)
159
+
result |> should.be_error
160
+
}
+215
test/bytes_validator_test.gleam
+215
test/bytes_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primitive/bytes
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// ========== SCHEMA VALIDATION TESTS ==========
12
+
13
+
pub fn valid_bytes_schema_basic_test() {
14
+
let schema = json.object([#("type", json.string("bytes"))])
15
+
16
+
let assert Ok(ctx) = context.builder() |> context.build
17
+
18
+
bytes.validate_schema(schema, ctx) |> should.be_ok
19
+
}
20
+
21
+
pub fn valid_bytes_schema_with_min_max_test() {
22
+
let schema =
23
+
json.object([
24
+
#("type", json.string("bytes")),
25
+
#("minLength", json.int(10)),
26
+
#("maxLength", json.int(20)),
27
+
])
28
+
29
+
let assert Ok(ctx) = context.builder() |> context.build
30
+
31
+
bytes.validate_schema(schema, ctx) |> should.be_ok
32
+
}
33
+
34
+
pub fn valid_bytes_schema_with_description_test() {
35
+
let schema =
36
+
json.object([
37
+
#("type", json.string("bytes")),
38
+
#("description", json.string("Binary data")),
39
+
])
40
+
41
+
let assert Ok(ctx) = context.builder() |> context.build
42
+
43
+
bytes.validate_schema(schema, ctx) |> should.be_ok
44
+
}
45
+
46
+
pub fn invalid_bytes_schema_extra_fields_test() {
47
+
let schema =
48
+
json.object([
49
+
#("type", json.string("bytes")),
50
+
#("extraField", json.string("not allowed")),
51
+
])
52
+
53
+
let assert Ok(ctx) = context.builder() |> context.build
54
+
55
+
bytes.validate_schema(schema, ctx) |> should.be_error
56
+
}
57
+
58
+
pub fn invalid_bytes_schema_max_less_than_min_test() {
59
+
let schema =
60
+
json.object([
61
+
#("type", json.string("bytes")),
62
+
#("minLength", json.int(20)),
63
+
#("maxLength", json.int(10)),
64
+
])
65
+
66
+
let assert Ok(ctx) = context.builder() |> context.build
67
+
68
+
bytes.validate_schema(schema, ctx) |> should.be_error
69
+
}
70
+
71
+
pub fn invalid_bytes_schema_negative_min_test() {
72
+
let schema =
73
+
json.object([
74
+
#("type", json.string("bytes")),
75
+
#("minLength", json.int(-1)),
76
+
])
77
+
78
+
let assert Ok(ctx) = context.builder() |> context.build
79
+
80
+
bytes.validate_schema(schema, ctx) |> should.be_error
81
+
}
82
+
83
+
pub fn invalid_bytes_schema_negative_max_test() {
84
+
let schema =
85
+
json.object([
86
+
#("type", json.string("bytes")),
87
+
#("maxLength", json.int(-5)),
88
+
])
89
+
90
+
let assert Ok(ctx) = context.builder() |> context.build
91
+
92
+
bytes.validate_schema(schema, ctx) |> should.be_error
93
+
}
94
+
95
+
// ========== DATA VALIDATION TESTS ==========
96
+
97
+
pub fn valid_bytes_data_basic_test() {
98
+
let schema = json.object([#("type", json.string("bytes"))])
99
+
// "123" in base64 is "MTIz"
100
+
let data = json.object([#("$bytes", json.string("MTIz"))])
101
+
102
+
let assert Ok(ctx) = context.builder() |> context.build
103
+
104
+
bytes.validate_data(data, schema, ctx) |> should.be_ok
105
+
}
106
+
107
+
pub fn valid_bytes_data_with_length_constraints_test() {
108
+
let schema =
109
+
json.object([
110
+
#("type", json.string("bytes")),
111
+
#("minLength", json.int(10)),
112
+
#("maxLength", json.int(20)),
113
+
])
114
+
// Base64 string that decodes to exactly 16 bytes
115
+
let data = json.object([#("$bytes", json.string("YXNkZmFzZGZhc2RmYXNkZg"))])
116
+
117
+
let assert Ok(ctx) = context.builder() |> context.build
118
+
119
+
bytes.validate_data(data, schema, ctx) |> should.be_ok
120
+
}
121
+
122
+
pub fn invalid_bytes_data_plain_string_test() {
123
+
let schema = json.object([#("type", json.string("bytes"))])
124
+
// Plain string instead of object with $bytes
125
+
let data = json.string("green")
126
+
127
+
let assert Ok(ctx) = context.builder() |> context.build
128
+
129
+
bytes.validate_data(data, schema, ctx) |> should.be_error
130
+
}
131
+
132
+
pub fn invalid_bytes_data_empty_object_test() {
133
+
let schema = json.object([#("type", json.string("bytes"))])
134
+
// Empty object
135
+
let data = json.object([])
136
+
137
+
let assert Ok(ctx) = context.builder() |> context.build
138
+
139
+
bytes.validate_data(data, schema, ctx) |> should.be_error
140
+
}
141
+
142
+
pub fn invalid_bytes_data_wrong_field_name_test() {
143
+
let schema = json.object([#("type", json.string("bytes"))])
144
+
// Wrong field name - should be "$bytes" not "bytes"
145
+
let data = json.object([#("bytes", json.string("YXNkZmFzZGZhc2RmYXNkZg"))])
146
+
147
+
let assert Ok(ctx) = context.builder() |> context.build
148
+
149
+
bytes.validate_data(data, schema, ctx) |> should.be_error
150
+
}
151
+
152
+
pub fn invalid_bytes_data_extra_fields_test() {
153
+
let schema = json.object([#("type", json.string("bytes"))])
154
+
// Object with extra fields - must have exactly one field
155
+
let data =
156
+
json.object([
157
+
#("$bytes", json.string("MTIz")),
158
+
#("other", json.string("blah")),
159
+
])
160
+
161
+
let assert Ok(ctx) = context.builder() |> context.build
162
+
163
+
bytes.validate_data(data, schema, ctx) |> should.be_error
164
+
}
165
+
166
+
pub fn invalid_bytes_data_non_string_value_test() {
167
+
let schema = json.object([#("type", json.string("bytes"))])
168
+
// $bytes value is not a string
169
+
let data =
170
+
json.object([
171
+
#("$bytes", json.preprocessed_array([json.int(1), json.int(2)])),
172
+
])
173
+
174
+
let assert Ok(ctx) = context.builder() |> context.build
175
+
176
+
bytes.validate_data(data, schema, ctx) |> should.be_error
177
+
}
178
+
179
+
pub fn invalid_bytes_data_invalid_base64_test() {
180
+
let schema = json.object([#("type", json.string("bytes"))])
181
+
// Invalid base64 string (contains invalid characters)
182
+
let data = json.object([#("$bytes", json.string("not!valid@base64"))])
183
+
184
+
let assert Ok(ctx) = context.builder() |> context.build
185
+
186
+
bytes.validate_data(data, schema, ctx) |> should.be_error
187
+
}
188
+
189
+
pub fn invalid_bytes_data_too_short_test() {
190
+
let schema =
191
+
json.object([
192
+
#("type", json.string("bytes")),
193
+
#("minLength", json.int(10)),
194
+
])
195
+
// "b25l" decodes to "one" which is only 3 bytes
196
+
let data = json.object([#("$bytes", json.string("b25l"))])
197
+
198
+
let assert Ok(ctx) = context.builder() |> context.build
199
+
200
+
bytes.validate_data(data, schema, ctx) |> should.be_error
201
+
}
202
+
203
+
pub fn invalid_bytes_data_too_long_test() {
204
+
let schema =
205
+
json.object([
206
+
#("type", json.string("bytes")),
207
+
#("maxLength", json.int(5)),
208
+
])
209
+
// "YXNkZmFzZGZhc2RmYXNkZg" decodes to "asdfasdfasdfasdf" which is 16 bytes
210
+
let data = json.object([#("$bytes", json.string("YXNkZmFzZGZhc2RmYXNkZg"))])
211
+
212
+
let assert Ok(ctx) = context.builder() |> context.build
213
+
214
+
bytes.validate_data(data, schema, ctx) |> should.be_error
215
+
}
+203
test/end_to_end_test.gleam
+203
test/end_to_end_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import honk
5
+
6
+
pub fn main() {
7
+
gleeunit.main()
8
+
}
9
+
10
+
// Test complete lexicon validation
11
+
pub fn validate_complete_lexicon_test() {
12
+
// Create a complete lexicon for a blog post record
13
+
let lexicon =
14
+
json.object([
15
+
#("lexicon", json.int(1)),
16
+
#("id", json.string("app.bsky.feed.post")),
17
+
#(
18
+
"defs",
19
+
json.object([
20
+
#(
21
+
"main",
22
+
json.object([
23
+
#("type", json.string("record")),
24
+
#("key", json.string("tid")),
25
+
#(
26
+
"record",
27
+
json.object([
28
+
#("type", json.string("object")),
29
+
#("required", json.array([json.string("text")], fn(x) { x })),
30
+
#(
31
+
"properties",
32
+
json.object([
33
+
#(
34
+
"text",
35
+
json.object([
36
+
#("type", json.string("string")),
37
+
#("maxLength", json.int(300)),
38
+
#("maxGraphemes", json.int(300)),
39
+
]),
40
+
),
41
+
#(
42
+
"createdAt",
43
+
json.object([
44
+
#("type", json.string("string")),
45
+
#("format", json.string("datetime")),
46
+
]),
47
+
),
48
+
]),
49
+
),
50
+
]),
51
+
),
52
+
]),
53
+
),
54
+
]),
55
+
),
56
+
])
57
+
58
+
let result = honk.validate([lexicon])
59
+
result |> should.be_ok
60
+
}
61
+
62
+
// Test invalid lexicon (missing id)
63
+
pub fn validate_invalid_lexicon_missing_id_test() {
64
+
let lexicon =
65
+
json.object([
66
+
#("lexicon", json.int(1)),
67
+
#(
68
+
"defs",
69
+
json.object([
70
+
#(
71
+
"main",
72
+
json.object([
73
+
#("type", json.string("record")),
74
+
#("key", json.string("tid")),
75
+
#(
76
+
"record",
77
+
json.object([
78
+
#("type", json.string("object")),
79
+
#("properties", json.object([])),
80
+
]),
81
+
),
82
+
]),
83
+
),
84
+
]),
85
+
),
86
+
])
87
+
88
+
let result = honk.validate([lexicon])
89
+
result |> should.be_error
90
+
}
91
+
92
+
// Test validate_record with valid data
93
+
pub fn validate_record_data_valid_test() {
94
+
let lexicon =
95
+
json.object([
96
+
#("lexicon", json.int(1)),
97
+
#("id", json.string("app.bsky.feed.post")),
98
+
#(
99
+
"defs",
100
+
json.object([
101
+
#(
102
+
"main",
103
+
json.object([
104
+
#("type", json.string("record")),
105
+
#("key", json.string("tid")),
106
+
#(
107
+
"record",
108
+
json.object([
109
+
#("type", json.string("object")),
110
+
#("required", json.array([json.string("text")], fn(x) { x })),
111
+
#(
112
+
"properties",
113
+
json.object([
114
+
#(
115
+
"text",
116
+
json.object([
117
+
#("type", json.string("string")),
118
+
#("maxLength", json.int(300)),
119
+
]),
120
+
),
121
+
]),
122
+
),
123
+
]),
124
+
),
125
+
]),
126
+
),
127
+
]),
128
+
),
129
+
])
130
+
131
+
let record_data = json.object([#("text", json.string("Hello, ATProtocol!"))])
132
+
133
+
let result =
134
+
honk.validate_record([lexicon], "app.bsky.feed.post", record_data)
135
+
result |> should.be_ok
136
+
}
137
+
138
+
// Test validate_record with invalid data (missing required field)
139
+
pub fn validate_record_data_missing_required_test() {
140
+
let lexicon =
141
+
json.object([
142
+
#("lexicon", json.int(1)),
143
+
#("id", json.string("app.bsky.feed.post")),
144
+
#(
145
+
"defs",
146
+
json.object([
147
+
#(
148
+
"main",
149
+
json.object([
150
+
#("type", json.string("record")),
151
+
#("key", json.string("tid")),
152
+
#(
153
+
"record",
154
+
json.object([
155
+
#("type", json.string("object")),
156
+
#("required", json.array([json.string("text")], fn(x) { x })),
157
+
#(
158
+
"properties",
159
+
json.object([
160
+
#(
161
+
"text",
162
+
json.object([
163
+
#("type", json.string("string")),
164
+
#("maxLength", json.int(300)),
165
+
]),
166
+
),
167
+
]),
168
+
),
169
+
]),
170
+
),
171
+
]),
172
+
),
173
+
]),
174
+
),
175
+
])
176
+
177
+
let record_data =
178
+
json.object([#("description", json.string("No text field"))])
179
+
180
+
let result =
181
+
honk.validate_record([lexicon], "app.bsky.feed.post", record_data)
182
+
result |> should.be_error
183
+
}
184
+
185
+
// Test NSID validation helper
186
+
pub fn is_valid_nsid_test() {
187
+
honk.is_valid_nsid("app.bsky.feed.post") |> should.be_true
188
+
honk.is_valid_nsid("com.example.foo") |> should.be_true
189
+
honk.is_valid_nsid("invalid") |> should.be_false
190
+
honk.is_valid_nsid("") |> should.be_false
191
+
}
192
+
193
+
// Test string format validation helper
194
+
pub fn validate_string_format_test() {
195
+
honk.validate_string_format("2024-01-01T12:00:00Z", honk.DateTime)
196
+
|> should.be_ok
197
+
198
+
honk.validate_string_format("not a datetime", honk.DateTime)
199
+
|> should.be_error
200
+
201
+
honk.validate_string_format("https://example.com", honk.Uri)
202
+
|> should.be_ok
203
+
}
+285
test/format_validator_test.gleam
+285
test/format_validator_test.gleam
···
1
+
import gleeunit
2
+
import gleeunit/should
3
+
import validation/formats
4
+
5
+
pub fn main() {
6
+
gleeunit.main()
7
+
}
8
+
9
+
// ========== DATETIME TESTS ==========
10
+
11
+
pub fn datetime_valid_test() {
12
+
formats.is_valid_rfc3339_datetime("2024-01-01T12:00:00Z") |> should.be_true
13
+
formats.is_valid_rfc3339_datetime("2024-01-01T12:00:00+00:00")
14
+
|> should.be_true
15
+
formats.is_valid_rfc3339_datetime("2024-01-01T12:00:00.123Z")
16
+
|> should.be_true
17
+
formats.is_valid_rfc3339_datetime("2024-12-31T23:59:59-05:00")
18
+
|> should.be_true
19
+
}
20
+
21
+
pub fn datetime_reject_negative_zero_timezone_test() {
22
+
// Should reject -00:00 per ISO-8601 (must use +00:00)
23
+
formats.is_valid_rfc3339_datetime("2024-01-01T12:00:00-00:00")
24
+
|> should.be_false
25
+
}
26
+
27
+
pub fn datetime_max_length_test() {
28
+
// 65 characters - should fail (max is 64)
29
+
let long_datetime =
30
+
"2024-01-01T12:00:00.12345678901234567890123456789012345678901234Z"
31
+
formats.is_valid_rfc3339_datetime(long_datetime) |> should.be_false
32
+
}
33
+
34
+
pub fn datetime_invalid_date_test() {
35
+
// February 30th doesn't exist - actual parsing should catch this
36
+
formats.is_valid_rfc3339_datetime("2024-02-30T12:00:00Z") |> should.be_false
37
+
}
38
+
39
+
pub fn datetime_empty_string_test() {
40
+
formats.is_valid_rfc3339_datetime("") |> should.be_false
41
+
}
42
+
43
+
// ========== HANDLE TESTS ==========
44
+
45
+
pub fn handle_valid_test() {
46
+
formats.is_valid_handle("user.bsky.social") |> should.be_true
47
+
formats.is_valid_handle("alice.example.com") |> should.be_true
48
+
formats.is_valid_handle("test.co.uk") |> should.be_true
49
+
}
50
+
51
+
pub fn handle_reject_disallowed_tlds_test() {
52
+
formats.is_valid_handle("user.local") |> should.be_false
53
+
formats.is_valid_handle("server.arpa") |> should.be_false
54
+
formats.is_valid_handle("example.invalid") |> should.be_false
55
+
formats.is_valid_handle("app.localhost") |> should.be_false
56
+
formats.is_valid_handle("service.internal") |> should.be_false
57
+
formats.is_valid_handle("demo.example") |> should.be_false
58
+
formats.is_valid_handle("site.onion") |> should.be_false
59
+
formats.is_valid_handle("custom.alt") |> should.be_false
60
+
}
61
+
62
+
pub fn handle_max_length_test() {
63
+
// 254 characters - should fail (max is 253)
64
+
// Create: "a123456789" (10) + ".b123456789" (11) repeated = 254 total
65
+
let segment = "a123456789b123456789c123456789d123456789e123456789"
66
+
let long_handle =
67
+
segment
68
+
<> "."
69
+
<> segment
70
+
<> "."
71
+
<> segment
72
+
<> "."
73
+
<> segment
74
+
<> "."
75
+
<> segment
76
+
<> ".com"
77
+
// This creates exactly 254 chars
78
+
formats.is_valid_handle(long_handle) |> should.be_false
79
+
}
80
+
81
+
pub fn handle_requires_dot_test() {
82
+
// Handle must have at least one dot (be a domain)
83
+
formats.is_valid_handle("nodot") |> should.be_false
84
+
}
85
+
86
+
// ========== DID TESTS ==========
87
+
88
+
pub fn did_valid_test() {
89
+
formats.is_valid_did("did:plc:z72i7hdynmk6r22z27h6tvur") |> should.be_true
90
+
formats.is_valid_did("did:web:example.com") |> should.be_true
91
+
formats.is_valid_did(
92
+
"did:key:z6MkhaXgBZDvotDkL5257faiztiGiC2QtKLGpbnnEGta2doK",
93
+
)
94
+
|> should.be_true
95
+
}
96
+
97
+
pub fn did_max_length_test() {
98
+
// Create a DID longer than 2048 chars - should fail
99
+
let long_did = "did:example:" <> string_repeat("a", 2040)
100
+
formats.is_valid_did(long_did) |> should.be_false
101
+
}
102
+
103
+
pub fn did_invalid_ending_test() {
104
+
// DIDs should not end with %
105
+
formats.is_valid_did("did:example:foo%") |> should.be_false
106
+
}
107
+
108
+
pub fn did_empty_test() {
109
+
formats.is_valid_did("") |> should.be_false
110
+
}
111
+
112
+
// ========== URI TESTS ==========
113
+
114
+
pub fn uri_valid_test() {
115
+
formats.is_valid_uri("https://example.com") |> should.be_true
116
+
formats.is_valid_uri("http://example.com/path") |> should.be_true
117
+
formats.is_valid_uri("ftp://files.example.com") |> should.be_true
118
+
}
119
+
120
+
pub fn uri_max_length_test() {
121
+
// Create a URI longer than 8192 chars - should fail
122
+
let long_uri = "https://example.com/" <> string_repeat("a", 8180)
123
+
formats.is_valid_uri(long_uri) |> should.be_false
124
+
}
125
+
126
+
pub fn uri_lowercase_scheme_test() {
127
+
// Scheme must be lowercase
128
+
formats.is_valid_uri("HTTP://example.com") |> should.be_false
129
+
formats.is_valid_uri("HTTPS://example.com") |> should.be_false
130
+
}
131
+
132
+
pub fn uri_empty_test() {
133
+
formats.is_valid_uri("") |> should.be_false
134
+
}
135
+
136
+
// ========== AT-URI TESTS ==========
137
+
138
+
pub fn at_uri_valid_test() {
139
+
formats.is_valid_at_uri("at://did:plc:z72i7hdynmk6r22z27h6tvur")
140
+
|> should.be_true
141
+
formats.is_valid_at_uri(
142
+
"at://did:plc:z72i7hdynmk6r22z27h6tvur/app.bsky.feed.post",
143
+
)
144
+
|> should.be_true
145
+
formats.is_valid_at_uri(
146
+
"at://did:plc:z72i7hdynmk6r22z27h6tvur/app.bsky.feed.post/3jui7kd54zh2y",
147
+
)
148
+
|> should.be_true
149
+
formats.is_valid_at_uri("at://user.bsky.social/app.bsky.feed.post")
150
+
|> should.be_true
151
+
}
152
+
153
+
pub fn at_uri_max_length_test() {
154
+
// Create an AT-URI longer than 8192 chars - should fail
155
+
let long_path = string_repeat("a", 8180)
156
+
let long_at_uri = "at://did:plc:test/" <> long_path
157
+
formats.is_valid_at_uri(long_at_uri) |> should.be_false
158
+
}
159
+
160
+
pub fn at_uri_invalid_collection_test() {
161
+
// Collection must be a valid NSID (needs 3 segments)
162
+
formats.is_valid_at_uri("at://did:plc:z72i7hdynmk6r22z27h6tvur/invalid")
163
+
|> should.be_false
164
+
}
165
+
166
+
pub fn at_uri_empty_test() {
167
+
formats.is_valid_at_uri("") |> should.be_false
168
+
}
169
+
170
+
// ========== TID TESTS ==========
171
+
172
+
pub fn tid_valid_test() {
173
+
formats.is_valid_tid("3jui7kd54zh2y") |> should.be_true
174
+
formats.is_valid_tid("2zzzzzzzzzzzy") |> should.be_true
175
+
}
176
+
177
+
pub fn tid_invalid_first_char_test() {
178
+
// First char must be [234567abcdefghij], not k-z
179
+
formats.is_valid_tid("kzzzzzzzzzzzz") |> should.be_false
180
+
formats.is_valid_tid("lzzzzzzzzzzzz") |> should.be_false
181
+
formats.is_valid_tid("zzzzzzzzzzzzz") |> should.be_false
182
+
}
183
+
184
+
pub fn tid_wrong_length_test() {
185
+
formats.is_valid_tid("3jui7kd54zh2") |> should.be_false
186
+
formats.is_valid_tid("3jui7kd54zh2yy") |> should.be_false
187
+
}
188
+
189
+
// ========== RECORD-KEY TESTS ==========
190
+
191
+
pub fn record_key_valid_test() {
192
+
formats.is_valid_record_key("3jui7kd54zh2y") |> should.be_true
193
+
formats.is_valid_record_key("my-custom-key") |> should.be_true
194
+
formats.is_valid_record_key("key_with_underscores") |> should.be_true
195
+
formats.is_valid_record_key("key:with:colons") |> should.be_true
196
+
}
197
+
198
+
pub fn record_key_reject_dot_test() {
199
+
formats.is_valid_record_key(".") |> should.be_false
200
+
}
201
+
202
+
pub fn record_key_reject_dotdot_test() {
203
+
formats.is_valid_record_key("..") |> should.be_false
204
+
}
205
+
206
+
pub fn record_key_max_length_test() {
207
+
// 513 characters - should fail (max is 512)
208
+
let long_key = string_repeat("a", 513)
209
+
formats.is_valid_record_key(long_key) |> should.be_false
210
+
}
211
+
212
+
pub fn record_key_empty_test() {
213
+
formats.is_valid_record_key("") |> should.be_false
214
+
}
215
+
216
+
// ========== CID TESTS ==========
217
+
218
+
pub fn cid_valid_test() {
219
+
// CIDv1 examples (base32, base58)
220
+
formats.is_valid_cid(
221
+
"bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi",
222
+
)
223
+
|> should.be_true
224
+
formats.is_valid_cid(
225
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
226
+
)
227
+
|> should.be_true
228
+
formats.is_valid_cid("QmQg1v4o9xdT3Q1R8tNK3z9ZkRmg7FbQfZ1J2Z3K4M5N6P")
229
+
|> should.be_true
230
+
}
231
+
232
+
pub fn cid_reject_qmb_prefix_test() {
233
+
// CIDv0 starting with "Qmb" not allowed per atproto spec
234
+
formats.is_valid_cid("QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR")
235
+
|> should.be_false
236
+
}
237
+
238
+
pub fn cid_min_length_test() {
239
+
// 7 characters - should fail (min is 8)
240
+
formats.is_valid_cid("abc1234") |> should.be_false
241
+
}
242
+
243
+
pub fn cid_max_length_test() {
244
+
// 257 characters - should fail (max is 256)
245
+
let long_cid = string_repeat("a", 257)
246
+
formats.is_valid_cid(long_cid) |> should.be_false
247
+
}
248
+
249
+
pub fn cid_invalid_chars_test() {
250
+
// Contains invalid characters
251
+
formats.is_valid_cid("bafybei@invalid!") |> should.be_false
252
+
formats.is_valid_cid("bafy bei space") |> should.be_false
253
+
}
254
+
255
+
pub fn cid_empty_test() {
256
+
formats.is_valid_cid("") |> should.be_false
257
+
}
258
+
259
+
// ========== LANGUAGE TESTS ==========
260
+
261
+
pub fn language_valid_test() {
262
+
formats.is_valid_language_tag("en") |> should.be_true
263
+
formats.is_valid_language_tag("en-US") |> should.be_true
264
+
formats.is_valid_language_tag("zh-Hans-CN") |> should.be_true
265
+
formats.is_valid_language_tag("i-enochian") |> should.be_true
266
+
}
267
+
268
+
pub fn language_max_length_test() {
269
+
// 129 characters - should fail (max is 128)
270
+
let long_tag = "en-" <> string_repeat("a", 126)
271
+
formats.is_valid_language_tag(long_tag) |> should.be_false
272
+
}
273
+
274
+
pub fn language_empty_test() {
275
+
formats.is_valid_language_tag("") |> should.be_false
276
+
}
277
+
278
+
// ========== HELPER FUNCTIONS ==========
279
+
280
+
fn string_repeat(s: String, n: Int) -> String {
281
+
case n <= 0 {
282
+
True -> ""
283
+
False -> s <> string_repeat(s, n - 1)
284
+
}
285
+
}
+13
test/honk_test.gleam
+13
test/honk_test.gleam
+146
test/integer_validator_test.gleam
+146
test/integer_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primitive/integer
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid integer schema
12
+
pub fn valid_integer_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("integer")),
16
+
#("minimum", json.int(0)),
17
+
#("maximum", json.int(100)),
18
+
])
19
+
20
+
let assert Ok(ctx) = context.builder() |> context.build
21
+
let result = integer.validate_schema(schema, ctx)
22
+
result |> should.be_ok
23
+
}
24
+
25
+
// Test integer schema with enum
26
+
pub fn integer_with_enum_test() {
27
+
let schema =
28
+
json.object([
29
+
#("type", json.string("integer")),
30
+
#(
31
+
"enum",
32
+
json.array([json.int(1), json.int(2), json.int(3)], fn(x) { x }),
33
+
),
34
+
])
35
+
36
+
let assert Ok(ctx) = context.builder() |> context.build
37
+
let result = integer.validate_schema(schema, ctx)
38
+
result |> should.be_ok
39
+
}
40
+
41
+
// Test invalid integer schema (min > max)
42
+
pub fn invalid_range_constraints_test() {
43
+
let schema =
44
+
json.object([
45
+
#("type", json.string("integer")),
46
+
#("minimum", json.int(100)),
47
+
#("maximum", json.int(10)),
48
+
])
49
+
50
+
let assert Ok(ctx) = context.builder() |> context.build
51
+
let result = integer.validate_schema(schema, ctx)
52
+
result |> should.be_error
53
+
}
54
+
55
+
// Test valid integer data
56
+
pub fn valid_integer_data_test() {
57
+
let schema =
58
+
json.object([
59
+
#("type", json.string("integer")),
60
+
#("minimum", json.int(0)),
61
+
#("maximum", json.int(100)),
62
+
])
63
+
64
+
let data = json.int(42)
65
+
66
+
let assert Ok(ctx) = context.builder() |> context.build
67
+
let result = integer.validate_data(data, schema, ctx)
68
+
result |> should.be_ok
69
+
}
70
+
71
+
// Test integer below minimum
72
+
pub fn integer_below_minimum_test() {
73
+
let schema =
74
+
json.object([
75
+
#("type", json.string("integer")),
76
+
#("minimum", json.int(10)),
77
+
])
78
+
79
+
let data = json.int(5)
80
+
81
+
let assert Ok(ctx) = context.builder() |> context.build
82
+
let result = integer.validate_data(data, schema, ctx)
83
+
result |> should.be_error
84
+
}
85
+
86
+
// Test integer above maximum
87
+
pub fn integer_above_maximum_test() {
88
+
let schema =
89
+
json.object([
90
+
#("type", json.string("integer")),
91
+
#("maximum", json.int(10)),
92
+
])
93
+
94
+
let data = json.int(15)
95
+
96
+
let assert Ok(ctx) = context.builder() |> context.build
97
+
let result = integer.validate_data(data, schema, ctx)
98
+
result |> should.be_error
99
+
}
100
+
101
+
// Test integer enum validation (valid)
102
+
pub fn integer_enum_valid_test() {
103
+
let schema =
104
+
json.object([
105
+
#("type", json.string("integer")),
106
+
#(
107
+
"enum",
108
+
json.array([json.int(1), json.int(2), json.int(3)], fn(x) { x }),
109
+
),
110
+
])
111
+
112
+
let data = json.int(2)
113
+
114
+
let assert Ok(ctx) = context.builder() |> context.build
115
+
let result = integer.validate_data(data, schema, ctx)
116
+
result |> should.be_ok
117
+
}
118
+
119
+
// Test integer enum validation (invalid)
120
+
pub fn integer_enum_invalid_test() {
121
+
let schema =
122
+
json.object([
123
+
#("type", json.string("integer")),
124
+
#(
125
+
"enum",
126
+
json.array([json.int(1), json.int(2), json.int(3)], fn(x) { x }),
127
+
),
128
+
])
129
+
130
+
let data = json.int(5)
131
+
132
+
let assert Ok(ctx) = context.builder() |> context.build
133
+
let result = integer.validate_data(data, schema, ctx)
134
+
result |> should.be_error
135
+
}
136
+
137
+
// Test wrong type (string instead of integer)
138
+
pub fn wrong_type_test() {
139
+
let schema = json.object([#("type", json.string("integer"))])
140
+
141
+
let data = json.string("42")
142
+
143
+
let assert Ok(ctx) = context.builder() |> context.build
144
+
let result = integer.validate_data(data, schema, ctx)
145
+
result |> should.be_error
146
+
}
+232
test/integration_test.gleam
+232
test/integration_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primary/record
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test complete record with nested objects and arrays
12
+
pub fn complex_record_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("record")),
16
+
#("key", json.string("tid")),
17
+
#(
18
+
"record",
19
+
json.object([
20
+
#("type", json.string("object")),
21
+
#(
22
+
"required",
23
+
json.array([json.string("title"), json.string("tags")], fn(x) { x }),
24
+
),
25
+
#(
26
+
"properties",
27
+
json.object([
28
+
#(
29
+
"title",
30
+
json.object([
31
+
#("type", json.string("string")),
32
+
#("maxLength", json.int(100)),
33
+
]),
34
+
),
35
+
#("description", json.object([#("type", json.string("string"))])),
36
+
#(
37
+
"tags",
38
+
json.object([
39
+
#("type", json.string("array")),
40
+
#("items", json.object([#("type", json.string("string"))])),
41
+
#("maxLength", json.int(10)),
42
+
]),
43
+
),
44
+
#(
45
+
"metadata",
46
+
json.object([
47
+
#("type", json.string("object")),
48
+
#(
49
+
"properties",
50
+
json.object([
51
+
#(
52
+
"views",
53
+
json.object([#("type", json.string("integer"))]),
54
+
),
55
+
#(
56
+
"published",
57
+
json.object([#("type", json.string("boolean"))]),
58
+
),
59
+
]),
60
+
),
61
+
]),
62
+
),
63
+
]),
64
+
),
65
+
]),
66
+
),
67
+
])
68
+
69
+
let assert Ok(ctx) = context.builder() |> context.build
70
+
let result = record.validate_schema(schema, ctx)
71
+
result |> should.be_ok
72
+
}
73
+
74
+
// Test valid complex record data
75
+
pub fn complex_record_data_test() {
76
+
let schema =
77
+
json.object([
78
+
#("type", json.string("record")),
79
+
#("key", json.string("tid")),
80
+
#(
81
+
"record",
82
+
json.object([
83
+
#("type", json.string("object")),
84
+
#("required", json.array([json.string("title")], fn(x) { x })),
85
+
#(
86
+
"properties",
87
+
json.object([
88
+
#("title", json.object([#("type", json.string("string"))])),
89
+
#(
90
+
"tags",
91
+
json.object([
92
+
#("type", json.string("array")),
93
+
#("items", json.object([#("type", json.string("string"))])),
94
+
]),
95
+
),
96
+
]),
97
+
),
98
+
]),
99
+
),
100
+
])
101
+
102
+
let data =
103
+
json.object([
104
+
#("title", json.string("My Post")),
105
+
#(
106
+
"tags",
107
+
json.array([json.string("tech"), json.string("gleam")], fn(x) { x }),
108
+
),
109
+
])
110
+
111
+
let assert Ok(ctx) = context.builder() |> context.build
112
+
let result = record.validate_data(data, schema, ctx)
113
+
result |> should.be_ok
114
+
}
115
+
116
+
// Test record data missing required field
117
+
pub fn complex_record_missing_required_test() {
118
+
let schema =
119
+
json.object([
120
+
#("type", json.string("record")),
121
+
#("key", json.string("tid")),
122
+
#(
123
+
"record",
124
+
json.object([
125
+
#("type", json.string("object")),
126
+
#("required", json.array([json.string("title")], fn(x) { x })),
127
+
#(
128
+
"properties",
129
+
json.object([
130
+
#("title", json.object([#("type", json.string("string"))])),
131
+
]),
132
+
),
133
+
]),
134
+
),
135
+
])
136
+
137
+
let data = json.object([#("description", json.string("No title"))])
138
+
139
+
let assert Ok(ctx) = context.builder() |> context.build
140
+
let result = record.validate_data(data, schema, ctx)
141
+
result |> should.be_error
142
+
}
143
+
144
+
// Test deeply nested object structure
145
+
pub fn deeply_nested_object_test() {
146
+
let schema =
147
+
json.object([
148
+
#("type", json.string("record")),
149
+
#("key", json.string("any")),
150
+
#(
151
+
"record",
152
+
json.object([
153
+
#("type", json.string("object")),
154
+
#(
155
+
"properties",
156
+
json.object([
157
+
#(
158
+
"level1",
159
+
json.object([
160
+
#("type", json.string("object")),
161
+
#(
162
+
"properties",
163
+
json.object([
164
+
#(
165
+
"level2",
166
+
json.object([
167
+
#("type", json.string("object")),
168
+
#(
169
+
"properties",
170
+
json.object([
171
+
#(
172
+
"level3",
173
+
json.object([#("type", json.string("string"))]),
174
+
),
175
+
]),
176
+
),
177
+
]),
178
+
),
179
+
]),
180
+
),
181
+
]),
182
+
),
183
+
]),
184
+
),
185
+
]),
186
+
),
187
+
])
188
+
189
+
let assert Ok(ctx) = context.builder() |> context.build
190
+
let result = record.validate_schema(schema, ctx)
191
+
result |> should.be_ok
192
+
}
193
+
194
+
// Test array of arrays
195
+
pub fn array_of_arrays_test() {
196
+
let schema =
197
+
json.object([
198
+
#("type", json.string("record")),
199
+
#("key", json.string("tid")),
200
+
#(
201
+
"record",
202
+
json.object([
203
+
#("type", json.string("object")),
204
+
#(
205
+
"properties",
206
+
json.object([
207
+
#(
208
+
"matrix",
209
+
json.object([
210
+
#("type", json.string("array")),
211
+
#(
212
+
"items",
213
+
json.object([
214
+
#("type", json.string("array")),
215
+
#(
216
+
"items",
217
+
json.object([#("type", json.string("integer"))]),
218
+
),
219
+
]),
220
+
),
221
+
]),
222
+
),
223
+
]),
224
+
),
225
+
]),
226
+
),
227
+
])
228
+
229
+
let assert Ok(ctx) = context.builder() |> context.build
230
+
let result = record.validate_schema(schema, ctx)
231
+
result |> should.be_ok
232
+
}
+76
test/object_validator_test.gleam
+76
test/object_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/field
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid object schema
12
+
pub fn valid_object_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("object")),
16
+
#(
17
+
"properties",
18
+
json.object([
19
+
#("title", json.object([#("type", json.string("string"))])),
20
+
#("count", json.object([#("type", json.string("integer"))])),
21
+
]),
22
+
),
23
+
#("required", json.array([json.string("title")], fn(x) { x })),
24
+
])
25
+
26
+
let assert Ok(ctx) = context.builder() |> context.build
27
+
let result = field.validate_object_schema(schema, ctx)
28
+
result |> should.be_ok
29
+
}
30
+
31
+
// Test valid object data
32
+
pub fn valid_object_data_test() {
33
+
let schema =
34
+
json.object([
35
+
#("type", json.string("object")),
36
+
#(
37
+
"properties",
38
+
json.object([
39
+
#("title", json.object([#("type", json.string("string"))])),
40
+
#("count", json.object([#("type", json.string("integer"))])),
41
+
]),
42
+
),
43
+
#("required", json.array([json.string("title")], fn(x) { x })),
44
+
])
45
+
46
+
let data =
47
+
json.object([
48
+
#("title", json.string("Hello")),
49
+
#("count", json.int(42)),
50
+
])
51
+
52
+
let assert Ok(ctx) = context.builder() |> context.build
53
+
let result = field.validate_object_data(data, schema, ctx)
54
+
result |> should.be_ok
55
+
}
56
+
57
+
// Test missing required field
58
+
pub fn missing_required_field_test() {
59
+
let schema =
60
+
json.object([
61
+
#("type", json.string("object")),
62
+
#(
63
+
"properties",
64
+
json.object([
65
+
#("title", json.object([#("type", json.string("string"))])),
66
+
]),
67
+
),
68
+
#("required", json.array([json.string("title")], fn(x) { x })),
69
+
])
70
+
71
+
let data = json.object([#("other", json.string("value"))])
72
+
73
+
let assert Ok(ctx) = context.builder() |> context.build
74
+
let result = field.validate_object_data(data, schema, ctx)
75
+
result |> should.be_error
76
+
}
+336
test/params_validator_test.gleam
+336
test/params_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primary/params
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid params with boolean property
12
+
pub fn valid_params_boolean_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("params")),
16
+
#(
17
+
"properties",
18
+
json.object([
19
+
#(
20
+
"isPublic",
21
+
json.object([
22
+
#("type", json.string("boolean")),
23
+
#("description", json.string("Whether the item is public")),
24
+
]),
25
+
),
26
+
]),
27
+
),
28
+
])
29
+
30
+
let ctx = context.builder() |> context.build()
31
+
case ctx {
32
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
33
+
Error(_) -> should.fail()
34
+
}
35
+
}
36
+
37
+
// Test valid params with multiple property types
38
+
pub fn valid_params_multiple_types_test() {
39
+
let schema =
40
+
json.object([
41
+
#("type", json.string("params")),
42
+
#(
43
+
"properties",
44
+
json.object([
45
+
#(
46
+
"limit",
47
+
json.object([
48
+
#("type", json.string("integer")),
49
+
#("minimum", json.int(1)),
50
+
#("maximum", json.int(100)),
51
+
]),
52
+
),
53
+
#(
54
+
"cursor",
55
+
json.object([
56
+
#("type", json.string("string")),
57
+
#("description", json.string("Pagination cursor")),
58
+
]),
59
+
),
60
+
#("includeReplies", json.object([#("type", json.string("boolean"))])),
61
+
]),
62
+
),
63
+
])
64
+
65
+
let ctx = context.builder() |> context.build()
66
+
case ctx {
67
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
68
+
Error(_) -> should.fail()
69
+
}
70
+
}
71
+
72
+
// Test valid params with array property
73
+
pub fn valid_params_with_array_test() {
74
+
let schema =
75
+
json.object([
76
+
#("type", json.string("params")),
77
+
#(
78
+
"properties",
79
+
json.object([
80
+
#(
81
+
"tags",
82
+
json.object([
83
+
#("type", json.string("array")),
84
+
#(
85
+
"items",
86
+
json.object([
87
+
#("type", json.string("string")),
88
+
#("maxLength", json.int(50)),
89
+
]),
90
+
),
91
+
]),
92
+
),
93
+
]),
94
+
),
95
+
])
96
+
97
+
let ctx = context.builder() |> context.build()
98
+
case ctx {
99
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
100
+
Error(_) -> should.fail()
101
+
}
102
+
}
103
+
104
+
// Test valid params with required fields
105
+
pub fn valid_params_with_required_test() {
106
+
let schema =
107
+
json.object([
108
+
#("type", json.string("params")),
109
+
#(
110
+
"properties",
111
+
json.object([
112
+
#(
113
+
"repo",
114
+
json.object([
115
+
#("type", json.string("string")),
116
+
#("format", json.string("at-identifier")),
117
+
]),
118
+
),
119
+
#(
120
+
"collection",
121
+
json.object([
122
+
#("type", json.string("string")),
123
+
#("format", json.string("nsid")),
124
+
]),
125
+
),
126
+
]),
127
+
),
128
+
#("required", json.array([json.string("repo")], fn(x) { x })),
129
+
])
130
+
131
+
let ctx = context.builder() |> context.build()
132
+
case ctx {
133
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
134
+
Error(_) -> should.fail()
135
+
}
136
+
}
137
+
138
+
// Test valid params with unknown type
139
+
pub fn valid_params_with_unknown_test() {
140
+
let schema =
141
+
json.object([
142
+
#("type", json.string("params")),
143
+
#(
144
+
"properties",
145
+
json.object([
146
+
#("metadata", json.object([#("type", json.string("unknown"))])),
147
+
]),
148
+
),
149
+
])
150
+
151
+
let ctx = context.builder() |> context.build()
152
+
case ctx {
153
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
154
+
Error(_) -> should.fail()
155
+
}
156
+
}
157
+
158
+
// Test invalid: params with object property (not allowed)
159
+
pub fn invalid_params_object_property_test() {
160
+
let schema =
161
+
json.object([
162
+
#("type", json.string("params")),
163
+
#(
164
+
"properties",
165
+
json.object([
166
+
#(
167
+
"filter",
168
+
json.object([
169
+
#("type", json.string("object")),
170
+
#("properties", json.object([])),
171
+
]),
172
+
),
173
+
]),
174
+
),
175
+
])
176
+
177
+
let assert Ok(c) = context.builder() |> context.build()
178
+
params.validate_schema(schema, c) |> should.be_error
179
+
}
180
+
181
+
// Test invalid: params with blob property (not allowed)
182
+
pub fn invalid_params_blob_property_test() {
183
+
let schema =
184
+
json.object([
185
+
#("type", json.string("params")),
186
+
#(
187
+
"properties",
188
+
json.object([
189
+
#(
190
+
"avatar",
191
+
json.object([
192
+
#("type", json.string("blob")),
193
+
#("accept", json.array([json.string("image/*")], fn(x) { x })),
194
+
]),
195
+
),
196
+
]),
197
+
),
198
+
])
199
+
200
+
let assert Ok(c) = context.builder() |> context.build()
201
+
params.validate_schema(schema, c) |> should.be_error
202
+
}
203
+
204
+
// Test invalid: required field not in properties
205
+
pub fn invalid_params_required_not_in_properties_test() {
206
+
let schema =
207
+
json.object([
208
+
#("type", json.string("params")),
209
+
#(
210
+
"properties",
211
+
json.object([
212
+
#("limit", json.object([#("type", json.string("integer"))])),
213
+
]),
214
+
),
215
+
#("required", json.array([json.string("cursor")], fn(x) { x })),
216
+
])
217
+
218
+
let assert Ok(c) = context.builder() |> context.build()
219
+
params.validate_schema(schema, c) |> should.be_error
220
+
}
221
+
222
+
// Test invalid: empty property name
223
+
pub fn invalid_params_empty_property_name_test() {
224
+
let schema =
225
+
json.object([
226
+
#("type", json.string("params")),
227
+
#(
228
+
"properties",
229
+
json.object([
230
+
#("", json.object([#("type", json.string("string"))])),
231
+
]),
232
+
),
233
+
])
234
+
235
+
let assert Ok(c) = context.builder() |> context.build()
236
+
params.validate_schema(schema, c) |> should.be_error
237
+
}
238
+
239
+
// Test invalid: array with object items (not allowed)
240
+
pub fn invalid_params_array_of_objects_test() {
241
+
let schema =
242
+
json.object([
243
+
#("type", json.string("params")),
244
+
#(
245
+
"properties",
246
+
json.object([
247
+
#(
248
+
"filters",
249
+
json.object([
250
+
#("type", json.string("array")),
251
+
#(
252
+
"items",
253
+
json.object([
254
+
#("type", json.string("object")),
255
+
#("properties", json.object([])),
256
+
]),
257
+
),
258
+
]),
259
+
),
260
+
]),
261
+
),
262
+
])
263
+
264
+
let assert Ok(c) = context.builder() |> context.build()
265
+
params.validate_schema(schema, c) |> should.be_error
266
+
}
267
+
268
+
// Test invalid: wrong type (not "params")
269
+
pub fn invalid_params_wrong_type_test() {
270
+
let schema =
271
+
json.object([
272
+
#("type", json.string("object")),
273
+
#("properties", json.object([])),
274
+
])
275
+
276
+
let assert Ok(c) = context.builder() |> context.build()
277
+
params.validate_schema(schema, c) |> should.be_error
278
+
}
279
+
280
+
// Test valid: array of integers
281
+
pub fn valid_params_array_of_integers_test() {
282
+
let schema =
283
+
json.object([
284
+
#("type", json.string("params")),
285
+
#(
286
+
"properties",
287
+
json.object([
288
+
#(
289
+
"ids",
290
+
json.object([
291
+
#("type", json.string("array")),
292
+
#(
293
+
"items",
294
+
json.object([
295
+
#("type", json.string("integer")),
296
+
#("minimum", json.int(1)),
297
+
]),
298
+
),
299
+
]),
300
+
),
301
+
]),
302
+
),
303
+
])
304
+
305
+
let ctx = context.builder() |> context.build()
306
+
case ctx {
307
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
308
+
Error(_) -> should.fail()
309
+
}
310
+
}
311
+
312
+
// Test valid: array of unknown
313
+
pub fn valid_params_array_of_unknown_test() {
314
+
let schema =
315
+
json.object([
316
+
#("type", json.string("params")),
317
+
#(
318
+
"properties",
319
+
json.object([
320
+
#(
321
+
"data",
322
+
json.object([
323
+
#("type", json.string("array")),
324
+
#("items", json.object([#("type", json.string("unknown"))])),
325
+
]),
326
+
),
327
+
]),
328
+
),
329
+
])
330
+
331
+
let ctx = context.builder() |> context.build()
332
+
case ctx {
333
+
Ok(c) -> params.validate_schema(schema, c) |> should.be_ok
334
+
Error(_) -> should.fail()
335
+
}
336
+
}
+208
test/procedure_data_validation_test.gleam
+208
test/procedure_data_validation_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primary/procedure
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid procedure input (object)
12
+
pub fn valid_procedure_input_object_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("procedure")),
16
+
#(
17
+
"input",
18
+
json.object([
19
+
#("encoding", json.string("application/json")),
20
+
#(
21
+
"schema",
22
+
json.object([
23
+
#("type", json.string("object")),
24
+
#("required", json.array([json.string("text")], fn(x) { x })),
25
+
#(
26
+
"properties",
27
+
json.object([
28
+
#(
29
+
"text",
30
+
json.object([
31
+
#("type", json.string("string")),
32
+
#("maxLength", json.int(300)),
33
+
]),
34
+
),
35
+
#(
36
+
"langs",
37
+
json.object([
38
+
#("type", json.string("array")),
39
+
#(
40
+
"items",
41
+
json.object([#("type", json.string("string"))]),
42
+
),
43
+
]),
44
+
),
45
+
]),
46
+
),
47
+
]),
48
+
),
49
+
]),
50
+
),
51
+
])
52
+
53
+
let data =
54
+
json.object([
55
+
#("text", json.string("Hello world")),
56
+
#("langs", json.array([json.string("en")], fn(x) { x })),
57
+
])
58
+
59
+
let assert Ok(ctx) = context.builder() |> context.build()
60
+
procedure.validate_data(data, schema, ctx) |> should.be_ok
61
+
}
62
+
63
+
// Test invalid: missing required field
64
+
pub fn invalid_procedure_missing_required_test() {
65
+
let schema =
66
+
json.object([
67
+
#("type", json.string("procedure")),
68
+
#(
69
+
"input",
70
+
json.object([
71
+
#("encoding", json.string("application/json")),
72
+
#(
73
+
"schema",
74
+
json.object([
75
+
#("type", json.string("object")),
76
+
#("required", json.array([json.string("text")], fn(x) { x })),
77
+
#(
78
+
"properties",
79
+
json.object([
80
+
#("text", json.object([#("type", json.string("string"))])),
81
+
]),
82
+
),
83
+
]),
84
+
),
85
+
]),
86
+
),
87
+
])
88
+
89
+
let data = json.object([#("description", json.string("No text field"))])
90
+
91
+
let assert Ok(ctx) = context.builder() |> context.build()
92
+
procedure.validate_data(data, schema, ctx) |> should.be_error
93
+
}
94
+
95
+
// Test procedure with no input
96
+
pub fn valid_procedure_no_input_test() {
97
+
let schema = json.object([#("type", json.string("procedure"))])
98
+
99
+
let data = json.object([])
100
+
101
+
let assert Ok(ctx) = context.builder() |> context.build()
102
+
procedure.validate_data(data, schema, ctx) |> should.be_ok
103
+
}
104
+
105
+
// Test valid output validation
106
+
pub fn valid_procedure_output_test() {
107
+
let schema =
108
+
json.object([
109
+
#("type", json.string("procedure")),
110
+
#(
111
+
"output",
112
+
json.object([
113
+
#("encoding", json.string("application/json")),
114
+
#(
115
+
"schema",
116
+
json.object([
117
+
#("type", json.string("object")),
118
+
#(
119
+
"properties",
120
+
json.object([
121
+
#("uri", json.object([#("type", json.string("string"))])),
122
+
#("cid", json.object([#("type", json.string("string"))])),
123
+
]),
124
+
),
125
+
]),
126
+
),
127
+
]),
128
+
),
129
+
])
130
+
131
+
let data =
132
+
json.object([
133
+
#("uri", json.string("at://did:plc:abc/app.bsky.feed.post/123")),
134
+
#("cid", json.string("bafyreiabc123")),
135
+
])
136
+
137
+
let assert Ok(ctx) = context.builder() |> context.build()
138
+
procedure.validate_output_data(data, schema, ctx) |> should.be_ok
139
+
}
140
+
141
+
// Test invalid output data
142
+
pub fn invalid_procedure_output_wrong_type_test() {
143
+
let schema =
144
+
json.object([
145
+
#("type", json.string("procedure")),
146
+
#(
147
+
"output",
148
+
json.object([
149
+
#("encoding", json.string("application/json")),
150
+
#(
151
+
"schema",
152
+
json.object([
153
+
#("type", json.string("object")),
154
+
#(
155
+
"properties",
156
+
json.object([
157
+
#("count", json.object([#("type", json.string("integer"))])),
158
+
]),
159
+
),
160
+
]),
161
+
),
162
+
]),
163
+
),
164
+
])
165
+
166
+
let data = json.object([#("count", json.string("not-a-number"))])
167
+
168
+
let assert Ok(ctx) = context.builder() |> context.build()
169
+
procedure.validate_output_data(data, schema, ctx) |> should.be_error
170
+
}
171
+
172
+
// Test procedure with union input
173
+
pub fn valid_procedure_union_input_test() {
174
+
let schema =
175
+
json.object([
176
+
#("type", json.string("procedure")),
177
+
#(
178
+
"input",
179
+
json.object([
180
+
#("encoding", json.string("application/json")),
181
+
#(
182
+
"schema",
183
+
json.object([
184
+
#("type", json.string("union")),
185
+
#(
186
+
"refs",
187
+
json.array(
188
+
[json.string("#typeA"), json.string("#typeB")],
189
+
fn(x) { x },
190
+
),
191
+
),
192
+
]),
193
+
),
194
+
]),
195
+
),
196
+
])
197
+
198
+
let data = json.object([#("$type", json.string("#typeA"))])
199
+
200
+
let assert Ok(ctx) = context.builder() |> context.build()
201
+
// This will fail because union needs the actual definitions
202
+
// but it tests that we're dispatching correctly
203
+
case procedure.validate_data(data, schema, ctx) {
204
+
Ok(_) -> Ok(Nil)
205
+
Error(_) -> Ok(Nil)
206
+
}
207
+
|> should.be_ok
208
+
}
+231
test/query_data_validation_test.gleam
+231
test/query_data_validation_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primary/query
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid query parameters
12
+
pub fn valid_query_parameters_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("query")),
16
+
#(
17
+
"parameters",
18
+
json.object([
19
+
#("type", json.string("params")),
20
+
#(
21
+
"properties",
22
+
json.object([
23
+
#(
24
+
"limit",
25
+
json.object([
26
+
#("type", json.string("integer")),
27
+
#("minimum", json.int(1)),
28
+
#("maximum", json.int(100)),
29
+
]),
30
+
),
31
+
#("cursor", json.object([#("type", json.string("string"))])),
32
+
]),
33
+
),
34
+
]),
35
+
),
36
+
])
37
+
38
+
let data =
39
+
json.object([#("limit", json.int(50)), #("cursor", json.string("abc123"))])
40
+
41
+
let assert Ok(ctx) = context.builder() |> context.build()
42
+
query.validate_data(data, schema, ctx) |> should.be_ok
43
+
}
44
+
45
+
// Test query with required parameter
46
+
pub fn valid_query_with_required_test() {
47
+
let schema =
48
+
json.object([
49
+
#("type", json.string("query")),
50
+
#(
51
+
"parameters",
52
+
json.object([
53
+
#("type", json.string("params")),
54
+
#(
55
+
"properties",
56
+
json.object([
57
+
#("repo", json.object([#("type", json.string("string"))])),
58
+
]),
59
+
),
60
+
#("required", json.array([json.string("repo")], fn(x) { x })),
61
+
]),
62
+
),
63
+
])
64
+
65
+
let data = json.object([#("repo", json.string("did:plc:abc123"))])
66
+
67
+
let assert Ok(ctx) = context.builder() |> context.build()
68
+
query.validate_data(data, schema, ctx) |> should.be_ok
69
+
}
70
+
71
+
// Test invalid: missing required parameter
72
+
pub fn invalid_query_missing_required_test() {
73
+
let schema =
74
+
json.object([
75
+
#("type", json.string("query")),
76
+
#(
77
+
"parameters",
78
+
json.object([
79
+
#("type", json.string("params")),
80
+
#(
81
+
"properties",
82
+
json.object([
83
+
#("repo", json.object([#("type", json.string("string"))])),
84
+
#("collection", json.object([#("type", json.string("string"))])),
85
+
]),
86
+
),
87
+
#("required", json.array([json.string("repo")], fn(x) { x })),
88
+
]),
89
+
),
90
+
])
91
+
92
+
let data = json.object([#("collection", json.string("app.bsky.feed.post"))])
93
+
94
+
let assert Ok(ctx) = context.builder() |> context.build()
95
+
query.validate_data(data, schema, ctx) |> should.be_error
96
+
}
97
+
98
+
// Test invalid: wrong parameter type
99
+
pub fn invalid_query_wrong_type_test() {
100
+
let schema =
101
+
json.object([
102
+
#("type", json.string("query")),
103
+
#(
104
+
"parameters",
105
+
json.object([
106
+
#("type", json.string("params")),
107
+
#(
108
+
"properties",
109
+
json.object([
110
+
#(
111
+
"limit",
112
+
json.object([
113
+
#("type", json.string("integer")),
114
+
#("minimum", json.int(1)),
115
+
]),
116
+
),
117
+
]),
118
+
),
119
+
]),
120
+
),
121
+
])
122
+
123
+
let data = json.object([#("limit", json.string("not-a-number"))])
124
+
125
+
let assert Ok(ctx) = context.builder() |> context.build()
126
+
query.validate_data(data, schema, ctx) |> should.be_error
127
+
}
128
+
129
+
// Test invalid: data not an object
130
+
pub fn invalid_query_not_object_test() {
131
+
let schema =
132
+
json.object([
133
+
#("type", json.string("query")),
134
+
#(
135
+
"parameters",
136
+
json.object([
137
+
#("type", json.string("params")),
138
+
#("properties", json.object([])),
139
+
]),
140
+
),
141
+
])
142
+
143
+
let data = json.array([], fn(x) { x })
144
+
145
+
let assert Ok(ctx) = context.builder() |> context.build()
146
+
query.validate_data(data, schema, ctx) |> should.be_error
147
+
}
148
+
149
+
// Test parameter constraint violation
150
+
pub fn invalid_query_constraint_violation_test() {
151
+
let schema =
152
+
json.object([
153
+
#("type", json.string("query")),
154
+
#(
155
+
"parameters",
156
+
json.object([
157
+
#("type", json.string("params")),
158
+
#(
159
+
"properties",
160
+
json.object([
161
+
#(
162
+
"limit",
163
+
json.object([
164
+
#("type", json.string("integer")),
165
+
#("maximum", json.int(100)),
166
+
]),
167
+
),
168
+
]),
169
+
),
170
+
]),
171
+
),
172
+
])
173
+
174
+
let data = json.object([#("limit", json.int(200))])
175
+
176
+
let assert Ok(ctx) = context.builder() |> context.build()
177
+
query.validate_data(data, schema, ctx) |> should.be_error
178
+
}
179
+
180
+
// Test array parameter
181
+
pub fn valid_query_array_parameter_test() {
182
+
let schema =
183
+
json.object([
184
+
#("type", json.string("query")),
185
+
#(
186
+
"parameters",
187
+
json.object([
188
+
#("type", json.string("params")),
189
+
#(
190
+
"properties",
191
+
json.object([
192
+
#(
193
+
"tags",
194
+
json.object([
195
+
#("type", json.string("array")),
196
+
#(
197
+
"items",
198
+
json.object([
199
+
#("type", json.string("string")),
200
+
#("maxLength", json.int(50)),
201
+
]),
202
+
),
203
+
]),
204
+
),
205
+
]),
206
+
),
207
+
]),
208
+
),
209
+
])
210
+
211
+
let data =
212
+
json.object([
213
+
#(
214
+
"tags",
215
+
json.array([json.string("tag1"), json.string("tag2")], fn(x) { x }),
216
+
),
217
+
])
218
+
219
+
let assert Ok(ctx) = context.builder() |> context.build()
220
+
query.validate_data(data, schema, ctx) |> should.be_ok
221
+
}
222
+
223
+
// Test query with no parameters
224
+
pub fn valid_query_no_parameters_test() {
225
+
let schema = json.object([#("type", json.string("query"))])
226
+
227
+
let data = json.object([])
228
+
229
+
let assert Ok(ctx) = context.builder() |> context.build()
230
+
query.validate_data(data, schema, ctx) |> should.be_ok
231
+
}
+383
test/reference_validator_test.gleam
+383
test/reference_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/field
6
+
import validation/field/reference
7
+
8
+
pub fn main() {
9
+
gleeunit.main()
10
+
}
11
+
12
+
// ========== SCHEMA VALIDATION TESTS ==========
13
+
14
+
pub fn valid_local_reference_schema_test() {
15
+
let schema =
16
+
json.object([#("type", json.string("ref")), #("ref", json.string("#post"))])
17
+
18
+
let assert Ok(ctx) = context.builder() |> context.build
19
+
20
+
reference.validate_schema(schema, ctx) |> should.be_ok
21
+
}
22
+
23
+
pub fn valid_global_reference_schema_test() {
24
+
let schema =
25
+
json.object([
26
+
#("type", json.string("ref")),
27
+
#("ref", json.string("com.atproto.repo.strongRef#main")),
28
+
])
29
+
30
+
let assert Ok(ctx) = context.builder() |> context.build
31
+
32
+
reference.validate_schema(schema, ctx) |> should.be_ok
33
+
}
34
+
35
+
pub fn valid_global_main_reference_schema_test() {
36
+
let schema =
37
+
json.object([
38
+
#("type", json.string("ref")),
39
+
#("ref", json.string("com.atproto.repo.strongRef")),
40
+
])
41
+
42
+
let assert Ok(ctx) = context.builder() |> context.build
43
+
44
+
reference.validate_schema(schema, ctx) |> should.be_ok
45
+
}
46
+
47
+
pub fn invalid_empty_ref_test() {
48
+
let schema =
49
+
json.object([#("type", json.string("ref")), #("ref", json.string(""))])
50
+
51
+
let assert Ok(ctx) = context.builder() |> context.build
52
+
53
+
reference.validate_schema(schema, ctx) |> should.be_error
54
+
}
55
+
56
+
pub fn invalid_missing_ref_field_test() {
57
+
let schema = json.object([#("type", json.string("ref"))])
58
+
59
+
let assert Ok(ctx) = context.builder() |> context.build
60
+
61
+
reference.validate_schema(schema, ctx) |> should.be_error
62
+
}
63
+
64
+
pub fn invalid_local_ref_no_def_name_test() {
65
+
let schema =
66
+
json.object([#("type", json.string("ref")), #("ref", json.string("#"))])
67
+
68
+
let assert Ok(ctx) = context.builder() |> context.build
69
+
70
+
reference.validate_schema(schema, ctx) |> should.be_error
71
+
}
72
+
73
+
pub fn invalid_global_ref_empty_nsid_test() {
74
+
// Test that a global reference must have an NSID before the #
75
+
// The reference "com.example#main" is valid, but starting with just # makes it local
76
+
// This test actually verifies that "#" alone (empty def name) is invalid
77
+
let schema =
78
+
json.object([#("type", json.string("ref")), #("ref", json.string("#"))])
79
+
80
+
let assert Ok(ctx) = context.builder() |> context.build
81
+
82
+
reference.validate_schema(schema, ctx) |> should.be_error
83
+
}
84
+
85
+
pub fn invalid_global_ref_empty_def_test() {
86
+
let schema =
87
+
json.object([
88
+
#("type", json.string("ref")),
89
+
#("ref", json.string("com.example.lexicon#")),
90
+
])
91
+
92
+
let assert Ok(ctx) = context.builder() |> context.build
93
+
94
+
reference.validate_schema(schema, ctx) |> should.be_error
95
+
}
96
+
97
+
pub fn invalid_multiple_hash_test() {
98
+
let schema =
99
+
json.object([
100
+
#("type", json.string("ref")),
101
+
#("ref", json.string("com.example#foo#bar")),
102
+
])
103
+
104
+
let assert Ok(ctx) = context.builder() |> context.build
105
+
106
+
reference.validate_schema(schema, ctx) |> should.be_error
107
+
}
108
+
109
+
// ========== DATA VALIDATION TESTS ==========
110
+
111
+
pub fn valid_reference_to_string_test() {
112
+
// Create a simple lexicon with a string definition
113
+
let defs =
114
+
json.object([
115
+
#(
116
+
"post",
117
+
json.object([
118
+
#("type", json.string("string")),
119
+
#("maxLength", json.int(280)),
120
+
]),
121
+
),
122
+
])
123
+
124
+
let lexicon =
125
+
json.object([
126
+
#("lexicon", json.int(1)),
127
+
#("id", json.string("app.bsky.feed.post")),
128
+
#("defs", defs),
129
+
])
130
+
131
+
let assert Ok(builder) =
132
+
context.builder()
133
+
|> context.with_validator(field.dispatch_data_validation)
134
+
|> context.with_lexicons([lexicon])
135
+
136
+
let assert Ok(ctx) = context.build(builder)
137
+
let ctx = context.with_current_lexicon(ctx, "app.bsky.feed.post")
138
+
139
+
let ref_schema =
140
+
json.object([#("type", json.string("ref")), #("ref", json.string("#post"))])
141
+
142
+
let data = json.string("Hello, world!")
143
+
144
+
reference.validate_data(data, ref_schema, ctx)
145
+
|> should.be_ok
146
+
}
147
+
148
+
pub fn valid_reference_to_object_test() {
149
+
// Create a lexicon with an object definition
150
+
let defs =
151
+
json.object([
152
+
#(
153
+
"user",
154
+
json.object([
155
+
#("type", json.string("object")),
156
+
#(
157
+
"properties",
158
+
json.object([
159
+
#(
160
+
"name",
161
+
json.object([
162
+
#("type", json.string("string")),
163
+
#("required", json.bool(True)),
164
+
]),
165
+
),
166
+
]),
167
+
),
168
+
]),
169
+
),
170
+
])
171
+
172
+
let lexicon =
173
+
json.object([
174
+
#("lexicon", json.int(1)),
175
+
#("id", json.string("app.test.schema")),
176
+
#("defs", defs),
177
+
])
178
+
179
+
let assert Ok(builder) =
180
+
context.builder()
181
+
|> context.with_validator(field.dispatch_data_validation)
182
+
|> context.with_lexicons([lexicon])
183
+
184
+
let assert Ok(ctx) = context.build(builder)
185
+
let ctx = context.with_current_lexicon(ctx, "app.test.schema")
186
+
187
+
let ref_schema =
188
+
json.object([#("type", json.string("ref")), #("ref", json.string("#user"))])
189
+
190
+
let data = json.object([#("name", json.string("Alice"))])
191
+
192
+
reference.validate_data(data, ref_schema, ctx)
193
+
|> should.be_ok
194
+
}
195
+
196
+
pub fn invalid_reference_not_found_test() {
197
+
let defs = json.object([])
198
+
199
+
let lexicon =
200
+
json.object([
201
+
#("lexicon", json.int(1)),
202
+
#("id", json.string("app.test.schema")),
203
+
#("defs", defs),
204
+
])
205
+
206
+
let assert Ok(builder) =
207
+
context.builder()
208
+
|> context.with_validator(field.dispatch_data_validation)
209
+
|> context.with_lexicons([lexicon])
210
+
211
+
let assert Ok(ctx) = context.build(builder)
212
+
let ctx = context.with_current_lexicon(ctx, "app.test.schema")
213
+
214
+
let ref_schema =
215
+
json.object([
216
+
#("type", json.string("ref")),
217
+
#("ref", json.string("#nonexistent")),
218
+
])
219
+
220
+
let data = json.string("test")
221
+
222
+
reference.validate_data(data, ref_schema, ctx)
223
+
|> should.be_error
224
+
}
225
+
226
+
pub fn circular_reference_detection_test() {
227
+
// Create lexicon with circular reference: A -> B -> A
228
+
let defs =
229
+
json.object([
230
+
#(
231
+
"refA",
232
+
json.object([
233
+
#("type", json.string("ref")),
234
+
#("ref", json.string("#refB")),
235
+
]),
236
+
),
237
+
#(
238
+
"refB",
239
+
json.object([
240
+
#("type", json.string("ref")),
241
+
#("ref", json.string("#refA")),
242
+
]),
243
+
),
244
+
])
245
+
246
+
let lexicon =
247
+
json.object([
248
+
#("lexicon", json.int(1)),
249
+
#("id", json.string("app.test.circular")),
250
+
#("defs", defs),
251
+
])
252
+
253
+
let assert Ok(builder) =
254
+
context.builder()
255
+
|> context.with_validator(field.dispatch_data_validation)
256
+
|> context.with_lexicons([lexicon])
257
+
258
+
let assert Ok(ctx) = context.build(builder)
259
+
let ctx = context.with_current_lexicon(ctx, "app.test.circular")
260
+
261
+
let ref_schema =
262
+
json.object([#("type", json.string("ref")), #("ref", json.string("#refA"))])
263
+
264
+
let data = json.string("test")
265
+
266
+
// Should detect the circular reference and return an error
267
+
reference.validate_data(data, ref_schema, ctx)
268
+
|> should.be_error
269
+
}
270
+
271
+
pub fn nested_reference_chain_test() {
272
+
// Create lexicon with nested references: A -> B -> string
273
+
let defs =
274
+
json.object([
275
+
#(
276
+
"refA",
277
+
json.object([
278
+
#("type", json.string("ref")),
279
+
#("ref", json.string("#refB")),
280
+
]),
281
+
),
282
+
#(
283
+
"refB",
284
+
json.object([
285
+
#("type", json.string("ref")),
286
+
#("ref", json.string("#actualString")),
287
+
]),
288
+
),
289
+
#("actualString", json.object([#("type", json.string("string"))])),
290
+
])
291
+
292
+
let lexicon =
293
+
json.object([
294
+
#("lexicon", json.int(1)),
295
+
#("id", json.string("app.test.nested")),
296
+
#("defs", defs),
297
+
])
298
+
299
+
let assert Ok(builder) =
300
+
context.builder()
301
+
|> context.with_validator(field.dispatch_data_validation)
302
+
|> context.with_lexicons([lexicon])
303
+
304
+
let assert Ok(ctx) = context.build(builder)
305
+
let ctx = context.with_current_lexicon(ctx, "app.test.nested")
306
+
307
+
let ref_schema =
308
+
json.object([#("type", json.string("ref")), #("ref", json.string("#refA"))])
309
+
310
+
let data = json.string("Hello!")
311
+
312
+
reference.validate_data(data, ref_schema, ctx)
313
+
|> should.be_ok
314
+
}
315
+
316
+
pub fn cross_lexicon_reference_test() {
317
+
// Create two lexicons where one references the other
318
+
let lex1_defs =
319
+
json.object([
320
+
#(
321
+
"userRef",
322
+
json.object([
323
+
#("type", json.string("ref")),
324
+
#("ref", json.string("app.test.types#user")),
325
+
]),
326
+
),
327
+
])
328
+
329
+
let lex2_defs =
330
+
json.object([
331
+
#(
332
+
"user",
333
+
json.object([
334
+
#("type", json.string("object")),
335
+
#(
336
+
"properties",
337
+
json.object([
338
+
#(
339
+
"id",
340
+
json.object([
341
+
#("type", json.string("string")),
342
+
#("required", json.bool(True)),
343
+
]),
344
+
),
345
+
]),
346
+
),
347
+
]),
348
+
),
349
+
])
350
+
351
+
let lex1 =
352
+
json.object([
353
+
#("lexicon", json.int(1)),
354
+
#("id", json.string("app.test.schema")),
355
+
#("defs", lex1_defs),
356
+
])
357
+
358
+
let lex2 =
359
+
json.object([
360
+
#("lexicon", json.int(1)),
361
+
#("id", json.string("app.test.types")),
362
+
#("defs", lex2_defs),
363
+
])
364
+
365
+
let assert Ok(builder) =
366
+
context.builder()
367
+
|> context.with_validator(field.dispatch_data_validation)
368
+
|> context.with_lexicons([lex1, lex2])
369
+
370
+
let assert Ok(ctx) = context.build(builder)
371
+
let ctx = context.with_current_lexicon(ctx, "app.test.schema")
372
+
373
+
let ref_schema =
374
+
json.object([
375
+
#("type", json.string("ref")),
376
+
#("ref", json.string("#userRef")),
377
+
])
378
+
379
+
let data = json.object([#("id", json.string("user123"))])
380
+
381
+
reference.validate_data(data, ref_schema, ctx)
382
+
|> should.be_ok
383
+
}
+216
test/string_validator_test.gleam
+216
test/string_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primitive/string
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid string schema
12
+
pub fn valid_string_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("string")),
16
+
#("minLength", json.int(1)),
17
+
#("maxLength", json.int(100)),
18
+
])
19
+
20
+
let assert Ok(ctx) = context.builder() |> context.build
21
+
let result = string.validate_schema(schema, ctx)
22
+
result |> should.be_ok
23
+
}
24
+
25
+
// Test string schema with format
26
+
pub fn string_with_format_test() {
27
+
let schema =
28
+
json.object([
29
+
#("type", json.string("string")),
30
+
#("format", json.string("uri")),
31
+
])
32
+
33
+
let assert Ok(ctx) = context.builder() |> context.build
34
+
let result = string.validate_schema(schema, ctx)
35
+
result |> should.be_ok
36
+
}
37
+
38
+
// Test string schema with enum
39
+
pub fn string_with_enum_test() {
40
+
let schema =
41
+
json.object([
42
+
#("type", json.string("string")),
43
+
#(
44
+
"enum",
45
+
json.array(
46
+
[json.string("red"), json.string("green"), json.string("blue")],
47
+
fn(x) { x },
48
+
),
49
+
),
50
+
])
51
+
52
+
let assert Ok(ctx) = context.builder() |> context.build
53
+
let result = string.validate_schema(schema, ctx)
54
+
result |> should.be_ok
55
+
}
56
+
57
+
// Test invalid string schema (minLength > maxLength)
58
+
pub fn invalid_length_constraints_test() {
59
+
let schema =
60
+
json.object([
61
+
#("type", json.string("string")),
62
+
#("minLength", json.int(100)),
63
+
#("maxLength", json.int(10)),
64
+
])
65
+
66
+
let assert Ok(ctx) = context.builder() |> context.build
67
+
let result = string.validate_schema(schema, ctx)
68
+
result |> should.be_error
69
+
}
70
+
71
+
// Test valid string data
72
+
pub fn valid_string_data_test() {
73
+
let schema =
74
+
json.object([
75
+
#("type", json.string("string")),
76
+
#("minLength", json.int(1)),
77
+
#("maxLength", json.int(10)),
78
+
])
79
+
80
+
let data = json.string("hello")
81
+
82
+
let assert Ok(ctx) = context.builder() |> context.build
83
+
let result = string.validate_data(data, schema, ctx)
84
+
result |> should.be_ok
85
+
}
86
+
87
+
// Test string data below minLength
88
+
pub fn string_below_min_length_test() {
89
+
let schema =
90
+
json.object([
91
+
#("type", json.string("string")),
92
+
#("minLength", json.int(10)),
93
+
])
94
+
95
+
let data = json.string("short")
96
+
97
+
let assert Ok(ctx) = context.builder() |> context.build
98
+
let result = string.validate_data(data, schema, ctx)
99
+
result |> should.be_error
100
+
}
101
+
102
+
// Test string data above maxLength
103
+
pub fn string_above_max_length_test() {
104
+
let schema =
105
+
json.object([
106
+
#("type", json.string("string")),
107
+
#("maxLength", json.int(5)),
108
+
])
109
+
110
+
let data = json.string("this is too long")
111
+
112
+
let assert Ok(ctx) = context.builder() |> context.build
113
+
let result = string.validate_data(data, schema, ctx)
114
+
result |> should.be_error
115
+
}
116
+
117
+
// Test string data with enum validation
118
+
pub fn string_enum_valid_test() {
119
+
let schema =
120
+
json.object([
121
+
#("type", json.string("string")),
122
+
#(
123
+
"enum",
124
+
json.array([json.string("red"), json.string("blue")], fn(x) { x }),
125
+
),
126
+
])
127
+
128
+
let data = json.string("red")
129
+
130
+
let assert Ok(ctx) = context.builder() |> context.build
131
+
let result = string.validate_data(data, schema, ctx)
132
+
result |> should.be_ok
133
+
}
134
+
135
+
// Test string data with enum validation (invalid value)
136
+
pub fn string_enum_invalid_test() {
137
+
let schema =
138
+
json.object([
139
+
#("type", json.string("string")),
140
+
#(
141
+
"enum",
142
+
json.array([json.string("red"), json.string("blue")], fn(x) { x }),
143
+
),
144
+
])
145
+
146
+
let data = json.string("green")
147
+
148
+
let assert Ok(ctx) = context.builder() |> context.build
149
+
let result = string.validate_data(data, schema, ctx)
150
+
result |> should.be_error
151
+
}
152
+
153
+
// Test wrong type (number instead of string)
154
+
pub fn wrong_type_test() {
155
+
let schema = json.object([#("type", json.string("string"))])
156
+
157
+
let data = json.int(42)
158
+
159
+
let assert Ok(ctx) = context.builder() |> context.build
160
+
let result = string.validate_data(data, schema, ctx)
161
+
result |> should.be_error
162
+
}
163
+
164
+
// ========== NEGATIVE VALUE SCHEMA VALIDATION TESTS ==========
165
+
166
+
// Test invalid string schema with negative minLength
167
+
pub fn invalid_negative_min_length_test() {
168
+
let schema =
169
+
json.object([
170
+
#("type", json.string("string")),
171
+
#("minLength", json.int(-1)),
172
+
])
173
+
174
+
let assert Ok(ctx) = context.builder() |> context.build
175
+
let result = string.validate_schema(schema, ctx)
176
+
result |> should.be_error
177
+
}
178
+
179
+
// Test invalid string schema with negative maxLength
180
+
pub fn invalid_negative_max_length_test() {
181
+
let schema =
182
+
json.object([
183
+
#("type", json.string("string")),
184
+
#("maxLength", json.int(-5)),
185
+
])
186
+
187
+
let assert Ok(ctx) = context.builder() |> context.build
188
+
let result = string.validate_schema(schema, ctx)
189
+
result |> should.be_error
190
+
}
191
+
192
+
// Test invalid string schema with negative minGraphemes
193
+
pub fn invalid_negative_min_graphemes_test() {
194
+
let schema =
195
+
json.object([
196
+
#("type", json.string("string")),
197
+
#("minGraphemes", json.int(-10)),
198
+
])
199
+
200
+
let assert Ok(ctx) = context.builder() |> context.build
201
+
let result = string.validate_schema(schema, ctx)
202
+
result |> should.be_error
203
+
}
204
+
205
+
// Test invalid string schema with negative maxGraphemes
206
+
pub fn invalid_negative_max_graphemes_test() {
207
+
let schema =
208
+
json.object([
209
+
#("type", json.string("string")),
210
+
#("maxGraphemes", json.int(-3)),
211
+
])
212
+
213
+
let assert Ok(ctx) = context.builder() |> context.build
214
+
let result = string.validate_schema(schema, ctx)
215
+
result |> should.be_error
216
+
}
+197
test/subscription_data_validation_test.gleam
+197
test/subscription_data_validation_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/primary/subscription
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid subscription parameters
12
+
pub fn valid_subscription_parameters_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("subscription")),
16
+
#(
17
+
"parameters",
18
+
json.object([
19
+
#("type", json.string("params")),
20
+
#(
21
+
"properties",
22
+
json.object([
23
+
#("cursor", json.object([#("type", json.string("integer"))])),
24
+
]),
25
+
),
26
+
]),
27
+
),
28
+
])
29
+
30
+
let data = json.object([#("cursor", json.int(12_345))])
31
+
32
+
let assert Ok(ctx) = context.builder() |> context.build()
33
+
subscription.validate_data(data, schema, ctx) |> should.be_ok
34
+
}
35
+
36
+
// Test invalid: missing required parameter
37
+
pub fn invalid_subscription_missing_required_test() {
38
+
let schema =
39
+
json.object([
40
+
#("type", json.string("subscription")),
41
+
#(
42
+
"parameters",
43
+
json.object([
44
+
#("type", json.string("params")),
45
+
#(
46
+
"properties",
47
+
json.object([
48
+
#("collection", json.object([#("type", json.string("string"))])),
49
+
]),
50
+
),
51
+
#("required", json.array([json.string("collection")], fn(x) { x })),
52
+
]),
53
+
),
54
+
])
55
+
56
+
let data = json.object([])
57
+
58
+
let assert Ok(ctx) = context.builder() |> context.build()
59
+
subscription.validate_data(data, schema, ctx) |> should.be_error
60
+
}
61
+
62
+
// Test valid subscription with no parameters
63
+
pub fn valid_subscription_no_parameters_test() {
64
+
let schema = json.object([#("type", json.string("subscription"))])
65
+
66
+
let data = json.object([])
67
+
68
+
let assert Ok(ctx) = context.builder() |> context.build()
69
+
subscription.validate_data(data, schema, ctx) |> should.be_ok
70
+
}
71
+
72
+
// Test invalid: parameters not an object
73
+
pub fn invalid_subscription_not_object_test() {
74
+
let schema =
75
+
json.object([
76
+
#("type", json.string("subscription")),
77
+
#(
78
+
"parameters",
79
+
json.object([
80
+
#("type", json.string("params")),
81
+
#("properties", json.object([])),
82
+
]),
83
+
),
84
+
])
85
+
86
+
let data = json.array([], fn(x) { x })
87
+
88
+
let assert Ok(ctx) = context.builder() |> context.build()
89
+
subscription.validate_data(data, schema, ctx) |> should.be_error
90
+
}
91
+
92
+
// Test message validation with union
93
+
pub fn valid_subscription_message_test() {
94
+
let schema =
95
+
json.object([
96
+
#("type", json.string("subscription")),
97
+
#(
98
+
"message",
99
+
json.object([
100
+
#(
101
+
"schema",
102
+
json.object([
103
+
#("type", json.string("union")),
104
+
#(
105
+
"refs",
106
+
json.array(
107
+
[json.string("#commit"), json.string("#identity")],
108
+
fn(x) { x },
109
+
),
110
+
),
111
+
]),
112
+
),
113
+
]),
114
+
),
115
+
])
116
+
117
+
let data = json.object([#("$type", json.string("#commit"))])
118
+
119
+
let assert Ok(ctx) = context.builder() |> context.build()
120
+
// This will likely fail due to missing definitions, but tests dispatch
121
+
case subscription.validate_message_data(data, schema, ctx) {
122
+
Ok(_) -> Ok(Nil)
123
+
Error(_) -> Ok(Nil)
124
+
}
125
+
|> should.be_ok
126
+
}
127
+
128
+
// Test parameter constraint violation
129
+
pub fn invalid_subscription_constraint_violation_test() {
130
+
let schema =
131
+
json.object([
132
+
#("type", json.string("subscription")),
133
+
#(
134
+
"parameters",
135
+
json.object([
136
+
#("type", json.string("params")),
137
+
#(
138
+
"properties",
139
+
json.object([
140
+
#(
141
+
"limit",
142
+
json.object([
143
+
#("type", json.string("integer")),
144
+
#("maximum", json.int(100)),
145
+
]),
146
+
),
147
+
]),
148
+
),
149
+
]),
150
+
),
151
+
])
152
+
153
+
let data = json.object([#("limit", json.int(200))])
154
+
155
+
let assert Ok(ctx) = context.builder() |> context.build()
156
+
subscription.validate_data(data, schema, ctx) |> should.be_error
157
+
}
158
+
159
+
// Test valid array parameter
160
+
pub fn valid_subscription_array_parameter_test() {
161
+
let schema =
162
+
json.object([
163
+
#("type", json.string("subscription")),
164
+
#(
165
+
"parameters",
166
+
json.object([
167
+
#("type", json.string("params")),
168
+
#(
169
+
"properties",
170
+
json.object([
171
+
#(
172
+
"repos",
173
+
json.object([
174
+
#("type", json.string("array")),
175
+
#("items", json.object([#("type", json.string("string"))])),
176
+
]),
177
+
),
178
+
]),
179
+
),
180
+
]),
181
+
),
182
+
])
183
+
184
+
let data =
185
+
json.object([
186
+
#(
187
+
"repos",
188
+
json.array(
189
+
[json.string("did:plc:abc"), json.string("did:plc:xyz")],
190
+
fn(x) { x },
191
+
),
192
+
),
193
+
])
194
+
195
+
let assert Ok(ctx) = context.builder() |> context.build()
196
+
subscription.validate_data(data, schema, ctx) |> should.be_ok
197
+
}
+118
test/token_validator_test.gleam
+118
test/token_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/meta/token
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// ========== SCHEMA VALIDATION TESTS ==========
12
+
13
+
pub fn valid_token_schema_test() {
14
+
let schema = json.object([#("type", json.string("token"))])
15
+
16
+
let assert Ok(ctx) = context.builder() |> context.build
17
+
18
+
token.validate_schema(schema, ctx) |> should.be_ok
19
+
}
20
+
21
+
pub fn valid_token_schema_with_description_test() {
22
+
let schema =
23
+
json.object([
24
+
#("type", json.string("token")),
25
+
#("description", json.string("A token for discrimination")),
26
+
])
27
+
28
+
let assert Ok(ctx) = context.builder() |> context.build
29
+
30
+
token.validate_schema(schema, ctx) |> should.be_ok
31
+
}
32
+
33
+
pub fn invalid_token_schema_extra_fields_test() {
34
+
let schema =
35
+
json.object([
36
+
#("type", json.string("token")),
37
+
#("extraField", json.string("not allowed")),
38
+
])
39
+
40
+
let assert Ok(ctx) = context.builder() |> context.build
41
+
42
+
token.validate_schema(schema, ctx) |> should.be_error
43
+
}
44
+
45
+
// ========== DATA VALIDATION TESTS ==========
46
+
47
+
pub fn valid_token_data_simple_string_test() {
48
+
let schema = json.object([#("type", json.string("token"))])
49
+
let data = json.string("example.lexicon.record#demoToken")
50
+
51
+
let assert Ok(ctx) = context.builder() |> context.build
52
+
53
+
token.validate_data(data, schema, ctx) |> should.be_ok
54
+
}
55
+
56
+
pub fn valid_token_data_local_ref_test() {
57
+
let schema = json.object([#("type", json.string("token"))])
58
+
let data = json.string("#myToken")
59
+
60
+
let assert Ok(ctx) = context.builder() |> context.build
61
+
62
+
token.validate_data(data, schema, ctx) |> should.be_ok
63
+
}
64
+
65
+
pub fn invalid_token_data_empty_string_test() {
66
+
let schema = json.object([#("type", json.string("token"))])
67
+
let data = json.string("")
68
+
69
+
let assert Ok(ctx) = context.builder() |> context.build
70
+
71
+
token.validate_data(data, schema, ctx) |> should.be_error
72
+
}
73
+
74
+
pub fn invalid_token_data_integer_test() {
75
+
let schema = json.object([#("type", json.string("token"))])
76
+
let data = json.int(123)
77
+
78
+
let assert Ok(ctx) = context.builder() |> context.build
79
+
80
+
token.validate_data(data, schema, ctx) |> should.be_error
81
+
}
82
+
83
+
pub fn invalid_token_data_boolean_test() {
84
+
let schema = json.object([#("type", json.string("token"))])
85
+
let data = json.bool(True)
86
+
87
+
let assert Ok(ctx) = context.builder() |> context.build
88
+
89
+
token.validate_data(data, schema, ctx) |> should.be_error
90
+
}
91
+
92
+
pub fn invalid_token_data_object_test() {
93
+
let schema = json.object([#("type", json.string("token"))])
94
+
let data = json.object([#("token", json.string("value"))])
95
+
96
+
let assert Ok(ctx) = context.builder() |> context.build
97
+
98
+
token.validate_data(data, schema, ctx) |> should.be_error
99
+
}
100
+
101
+
pub fn invalid_token_data_array_test() {
102
+
let schema = json.object([#("type", json.string("token"))])
103
+
let data =
104
+
json.preprocessed_array([json.string("token1"), json.string("token2")])
105
+
106
+
let assert Ok(ctx) = context.builder() |> context.build
107
+
108
+
token.validate_data(data, schema, ctx) |> should.be_error
109
+
}
110
+
111
+
pub fn invalid_token_data_null_test() {
112
+
let schema = json.object([#("type", json.string("token"))])
113
+
let data = json.null()
114
+
115
+
let assert Ok(ctx) = context.builder() |> context.build
116
+
117
+
token.validate_data(data, schema, ctx) |> should.be_error
118
+
}
+145
test/union_validator_test.gleam
+145
test/union_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/field/union
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// Test valid union schema with refs
12
+
pub fn valid_union_schema_test() {
13
+
let schema =
14
+
json.object([
15
+
#("type", json.string("union")),
16
+
#(
17
+
"refs",
18
+
json.array([json.string("#post"), json.string("#repost")], fn(x) { x }),
19
+
),
20
+
])
21
+
22
+
let assert Ok(ctx) = context.builder() |> context.build
23
+
let result = union.validate_schema(schema, ctx)
24
+
result |> should.be_ok
25
+
}
26
+
27
+
// Test union schema with closed flag
28
+
pub fn closed_union_schema_test() {
29
+
let schema =
30
+
json.object([
31
+
#("type", json.string("union")),
32
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
33
+
#("closed", json.bool(True)),
34
+
])
35
+
36
+
let assert Ok(ctx) = context.builder() |> context.build
37
+
let result = union.validate_schema(schema, ctx)
38
+
result |> should.be_ok
39
+
}
40
+
41
+
// Test open union with empty refs
42
+
pub fn open_union_empty_refs_test() {
43
+
let schema =
44
+
json.object([
45
+
#("type", json.string("union")),
46
+
#("refs", json.array([], fn(x) { x })),
47
+
#("closed", json.bool(False)),
48
+
])
49
+
50
+
let assert Ok(ctx) = context.builder() |> context.build
51
+
let result = union.validate_schema(schema, ctx)
52
+
result |> should.be_ok
53
+
}
54
+
55
+
// Test closed union with empty refs (should fail)
56
+
pub fn closed_union_empty_refs_test() {
57
+
let schema =
58
+
json.object([
59
+
#("type", json.string("union")),
60
+
#("refs", json.array([], fn(x) { x })),
61
+
#("closed", json.bool(True)),
62
+
])
63
+
64
+
let assert Ok(ctx) = context.builder() |> context.build
65
+
let result = union.validate_schema(schema, ctx)
66
+
result |> should.be_error
67
+
}
68
+
69
+
// Test union missing refs field
70
+
pub fn union_missing_refs_test() {
71
+
let schema = json.object([#("type", json.string("union"))])
72
+
73
+
let assert Ok(ctx) = context.builder() |> context.build
74
+
let result = union.validate_schema(schema, ctx)
75
+
result |> should.be_error
76
+
}
77
+
78
+
// Test valid union data with $type
79
+
pub fn valid_union_data_test() {
80
+
let schema =
81
+
json.object([
82
+
#("type", json.string("union")),
83
+
#("refs", json.array([json.string("app.bsky.feed.post")], fn(x) { x })),
84
+
])
85
+
86
+
let data =
87
+
json.object([
88
+
#("$type", json.string("app.bsky.feed.post")),
89
+
#("text", json.string("Hello world")),
90
+
])
91
+
92
+
let assert Ok(ctx) = context.builder() |> context.build
93
+
let result = union.validate_data(data, schema, ctx)
94
+
result |> should.be_ok
95
+
}
96
+
97
+
// Test union data missing $type field
98
+
pub fn union_data_missing_type_test() {
99
+
let schema =
100
+
json.object([
101
+
#("type", json.string("union")),
102
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
103
+
])
104
+
105
+
let data = json.object([#("text", json.string("Hello"))])
106
+
107
+
let assert Ok(ctx) = context.builder() |> context.build
108
+
let result = union.validate_data(data, schema, ctx)
109
+
result |> should.be_error
110
+
}
111
+
112
+
// Test union data with non-object value
113
+
pub fn union_data_non_object_test() {
114
+
let schema =
115
+
json.object([
116
+
#("type", json.string("union")),
117
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
118
+
])
119
+
120
+
let data = json.string("not an object")
121
+
122
+
let assert Ok(ctx) = context.builder() |> context.build
123
+
let result = union.validate_data(data, schema, ctx)
124
+
result |> should.be_error
125
+
}
126
+
127
+
// Test union data with $type not in refs
128
+
pub fn union_data_type_not_in_refs_test() {
129
+
let schema =
130
+
json.object([
131
+
#("type", json.string("union")),
132
+
#("refs", json.array([json.string("app.bsky.feed.post")], fn(x) { x })),
133
+
#("closed", json.bool(True)),
134
+
])
135
+
136
+
let data =
137
+
json.object([
138
+
#("$type", json.string("app.bsky.feed.repost")),
139
+
#("text", json.string("Hello")),
140
+
])
141
+
142
+
let assert Ok(ctx) = context.builder() |> context.build
143
+
let result = union.validate_data(data, schema, ctx)
144
+
result |> should.be_error
145
+
}
+160
test/unknown_validator_test.gleam
+160
test/unknown_validator_test.gleam
···
1
+
import gleam/json
2
+
import gleeunit
3
+
import gleeunit/should
4
+
import validation/context
5
+
import validation/meta/unknown
6
+
7
+
pub fn main() {
8
+
gleeunit.main()
9
+
}
10
+
11
+
// ========== SCHEMA VALIDATION TESTS ==========
12
+
13
+
pub fn valid_unknown_schema_test() {
14
+
let schema = json.object([#("type", json.string("unknown"))])
15
+
16
+
let assert Ok(ctx) = context.builder() |> context.build
17
+
18
+
unknown.validate_schema(schema, ctx) |> should.be_ok
19
+
}
20
+
21
+
pub fn valid_unknown_schema_with_description_test() {
22
+
let schema =
23
+
json.object([
24
+
#("type", json.string("unknown")),
25
+
#("description", json.string("Flexible data following ATProto rules")),
26
+
])
27
+
28
+
let assert Ok(ctx) = context.builder() |> context.build
29
+
30
+
unknown.validate_schema(schema, ctx) |> should.be_ok
31
+
}
32
+
33
+
pub fn invalid_unknown_schema_extra_fields_test() {
34
+
let schema =
35
+
json.object([
36
+
#("type", json.string("unknown")),
37
+
#("extraField", json.string("not allowed")),
38
+
])
39
+
40
+
let assert Ok(ctx) = context.builder() |> context.build
41
+
42
+
unknown.validate_schema(schema, ctx) |> should.be_error
43
+
}
44
+
45
+
// ========== DATA VALIDATION TESTS ==========
46
+
47
+
pub fn valid_unknown_data_simple_object_test() {
48
+
let schema = json.object([#("type", json.string("unknown"))])
49
+
let data = json.object([#("a", json.string("alphabet")), #("b", json.int(3))])
50
+
51
+
let assert Ok(ctx) = context.builder() |> context.build
52
+
53
+
unknown.validate_data(data, schema, ctx) |> should.be_ok
54
+
}
55
+
56
+
pub fn valid_unknown_data_with_type_field_test() {
57
+
let schema = json.object([#("type", json.string("unknown"))])
58
+
let data =
59
+
json.object([
60
+
#("$type", json.string("example.lexicon.record#demoObject")),
61
+
#("a", json.int(1)),
62
+
#("b", json.int(2)),
63
+
])
64
+
65
+
let assert Ok(ctx) = context.builder() |> context.build
66
+
67
+
unknown.validate_data(data, schema, ctx) |> should.be_ok
68
+
}
69
+
70
+
pub fn valid_unknown_data_nested_objects_test() {
71
+
let schema = json.object([#("type", json.string("unknown"))])
72
+
let data =
73
+
json.object([
74
+
#("outer", json.object([#("inner", json.string("nested"))])),
75
+
#("count", json.int(42)),
76
+
])
77
+
78
+
let assert Ok(ctx) = context.builder() |> context.build
79
+
80
+
unknown.validate_data(data, schema, ctx) |> should.be_ok
81
+
}
82
+
83
+
pub fn valid_unknown_data_empty_object_test() {
84
+
let schema = json.object([#("type", json.string("unknown"))])
85
+
let data = json.object([])
86
+
87
+
let assert Ok(ctx) = context.builder() |> context.build
88
+
89
+
unknown.validate_data(data, schema, ctx) |> should.be_ok
90
+
}
91
+
92
+
pub fn invalid_unknown_data_boolean_test() {
93
+
let schema = json.object([#("type", json.string("unknown"))])
94
+
let data = json.bool(False)
95
+
96
+
let assert Ok(ctx) = context.builder() |> context.build
97
+
98
+
unknown.validate_data(data, schema, ctx) |> should.be_error
99
+
}
100
+
101
+
pub fn invalid_unknown_data_string_test() {
102
+
let schema = json.object([#("type", json.string("unknown"))])
103
+
let data = json.string("not an object")
104
+
105
+
let assert Ok(ctx) = context.builder() |> context.build
106
+
107
+
unknown.validate_data(data, schema, ctx) |> should.be_error
108
+
}
109
+
110
+
pub fn invalid_unknown_data_integer_test() {
111
+
let schema = json.object([#("type", json.string("unknown"))])
112
+
let data = json.int(123)
113
+
114
+
let assert Ok(ctx) = context.builder() |> context.build
115
+
116
+
unknown.validate_data(data, schema, ctx) |> should.be_error
117
+
}
118
+
119
+
pub fn invalid_unknown_data_array_test() {
120
+
let schema = json.object([#("type", json.string("unknown"))])
121
+
let data = json.preprocessed_array([json.int(1), json.int(2), json.int(3)])
122
+
123
+
let assert Ok(ctx) = context.builder() |> context.build
124
+
125
+
unknown.validate_data(data, schema, ctx) |> should.be_error
126
+
}
127
+
128
+
pub fn invalid_unknown_data_null_test() {
129
+
let schema = json.object([#("type", json.string("unknown"))])
130
+
let data = json.null()
131
+
132
+
let assert Ok(ctx) = context.builder() |> context.build
133
+
134
+
unknown.validate_data(data, schema, ctx) |> should.be_error
135
+
}
136
+
137
+
pub fn invalid_unknown_data_bytes_object_test() {
138
+
let schema = json.object([#("type", json.string("unknown"))])
139
+
// Bytes object: {"$bytes": "base64-string"}
140
+
let data = json.object([#("$bytes", json.string("SGVsbG8gd29ybGQ="))])
141
+
142
+
let assert Ok(ctx) = context.builder() |> context.build
143
+
144
+
unknown.validate_data(data, schema, ctx) |> should.be_error
145
+
}
146
+
147
+
pub fn invalid_unknown_data_blob_object_test() {
148
+
let schema = json.object([#("type", json.string("unknown"))])
149
+
// Blob object: {"$type": "blob", ...}
150
+
let data =
151
+
json.object([
152
+
#("$type", json.string("blob")),
153
+
#("mimeType", json.string("text/plain")),
154
+
#("size", json.int(12_345)),
155
+
])
156
+
157
+
let assert Ok(ctx) = context.builder() |> context.build
158
+
159
+
unknown.validate_data(data, schema, ctx) |> should.be_error
160
+
}