+2
-1
.gitignore
+2
-1
.gitignore
+25
CHANGELOG.md
+25
CHANGELOG.md
···
1
+
# Changelog
2
+
3
+
## 1.2.0
4
+
5
+
### Added
6
+
7
+
- Validate full ATProto blob structure with stricter field checking
8
+
9
+
## 1.1.0
10
+
11
+
### Added
12
+
13
+
- Add `ValidationContext` type for external use
14
+
- Add `build_validation_context` function to build a reusable validation context from lexicons
15
+
- Add `validate_record_with_context` function for faster batch validation using a pre-built context
16
+
17
+
## 1.0.1
18
+
19
+
### Fixed
20
+
21
+
- Fix `is_null_dynamic` to use `dynamic.classify` for consistent null detection
22
+
23
+
## 1.0.0
24
+
25
+
- Initial release
+201
LICENSE
+201
LICENSE
···
1
+
Apache License
2
+
Version 2.0, January 2004
3
+
http://www.apache.org/licenses/
4
+
5
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+
1. Definitions.
8
+
9
+
"License" shall mean the terms and conditions for use, reproduction,
10
+
and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+
"Licensor" shall mean the copyright owner or entity authorized by
13
+
the copyright owner that is granting the License.
14
+
15
+
"Legal Entity" shall mean the union of the acting entity and all
16
+
other entities that control, are controlled by, or are under common
17
+
control with that entity. For the purposes of this definition,
18
+
"control" means (i) the power, direct or indirect, to cause the
19
+
direction or management of such entity, whether by contract or
20
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+
outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+
"You" (or "Your") shall mean an individual or Legal Entity
24
+
exercising permissions granted by this License.
25
+
26
+
"Source" form shall mean the preferred form for making modifications,
27
+
including but not limited to software source code, documentation
28
+
source, and configuration files.
29
+
30
+
"Object" form shall mean any form resulting from mechanical
31
+
transformation or translation of a Source form, including but
32
+
not limited to compiled object code, generated documentation,
33
+
and conversions to other media types.
34
+
35
+
"Work" shall mean the work of authorship, whether in Source or
36
+
Object form, made available under the License, as indicated by a
37
+
copyright notice that is included in or attached to the work
38
+
(an example is provided in the Appendix below).
39
+
40
+
"Derivative Works" shall mean any work, whether in Source or Object
41
+
form, that is based on (or derived from) the Work and for which the
42
+
editorial revisions, annotations, elaborations, or other modifications
43
+
represent, as a whole, an original work of authorship. For the purposes
44
+
of this License, Derivative Works shall not include works that remain
45
+
separable from, or merely link (or bind by name) to the interfaces of,
46
+
the Work and Derivative Works thereof.
47
+
48
+
"Contribution" shall mean any work of authorship, including
49
+
the original version of the Work and any modifications or additions
50
+
to that Work or Derivative Works thereof, that is intentionally
51
+
submitted to Licensor for inclusion in the Work by the copyright owner
52
+
or by an individual or Legal Entity authorized to submit on behalf of
53
+
the copyright owner. For the purposes of this definition, "submitted"
54
+
means any form of electronic, verbal, or written communication sent
55
+
to the Licensor or its representatives, including but not limited to
56
+
communication on electronic mailing lists, source code control systems,
57
+
and issue tracking systems that are managed by, or on behalf of, the
58
+
Licensor for the purpose of discussing and improving the Work, but
59
+
excluding communication that is conspicuously marked or otherwise
60
+
designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+
"Contributor" shall mean Licensor and any individual or Legal Entity
63
+
on behalf of whom a Contribution has been received by Licensor and
64
+
subsequently incorporated within the Work.
65
+
66
+
2. Grant of Copyright License. Subject to the terms and conditions of
67
+
this License, each Contributor hereby grants to You a perpetual,
68
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+
copyright license to reproduce, prepare Derivative Works of,
70
+
publicly display, publicly perform, sublicense, and distribute the
71
+
Work and such Derivative Works in Source or Object form.
72
+
73
+
3. Grant of Patent License. Subject to the terms and conditions of
74
+
this License, each Contributor hereby grants to You a perpetual,
75
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+
(except as stated in this section) patent license to make, have made,
77
+
use, offer to sell, sell, import, and otherwise transfer the Work,
78
+
where such license applies only to those patent claims licensable
79
+
by such Contributor that are necessarily infringed by their
80
+
Contribution(s) alone or by combination of their Contribution(s)
81
+
with the Work to which such Contribution(s) was submitted. If You
82
+
institute patent litigation against any entity (including a
83
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+
or a Contribution incorporated within the Work constitutes direct
85
+
or contributory patent infringement, then any patent licenses
86
+
granted to You under this License for that Work shall terminate
87
+
as of the date such litigation is filed.
88
+
89
+
4. Redistribution. You may reproduce and distribute copies of the
90
+
Work or Derivative Works thereof in any medium, with or without
91
+
modifications, and in Source or Object form, provided that You
92
+
meet the following conditions:
93
+
94
+
(a) You must give any other recipients of the Work or
95
+
Derivative Works a copy of this License; and
96
+
97
+
(b) You must cause any modified files to carry prominent notices
98
+
stating that You changed the files; and
99
+
100
+
(c) You must retain, in the Source form of any Derivative Works
101
+
that You distribute, all copyright, patent, trademark, and
102
+
attribution notices from the Source form of the Work,
103
+
excluding those notices that do not pertain to any part of
104
+
the Derivative Works; and
105
+
106
+
(d) If the Work includes a "NOTICE" text file as part of its
107
+
distribution, then any Derivative Works that You distribute must
108
+
include a readable copy of the attribution notices contained
109
+
within such NOTICE file, excluding those notices that do not
110
+
pertain to any part of the Derivative Works, in at least one
111
+
of the following places: within a NOTICE text file distributed
112
+
as part of the Derivative Works; within the Source form or
113
+
documentation, if provided along with the Derivative Works; or,
114
+
within a display generated by the Derivative Works, if and
115
+
wherever such third-party notices normally appear. The contents
116
+
of the NOTICE file are for informational purposes only and
117
+
do not modify the License. You may add Your own attribution
118
+
notices within Derivative Works that You distribute, alongside
119
+
or as an addendum to the NOTICE text from the Work, provided
120
+
that such additional attribution notices cannot be construed
121
+
as modifying the License.
122
+
123
+
You may add Your own copyright statement to Your modifications and
124
+
may provide additional or different license terms and conditions
125
+
for use, reproduction, or distribution of Your modifications, or
126
+
for any such Derivative Works as a whole, provided Your use,
127
+
reproduction, and distribution of the Work otherwise complies with
128
+
the conditions stated in this License.
129
+
130
+
5. Submission of Contributions. Unless You explicitly state otherwise,
131
+
any Contribution intentionally submitted for inclusion in the Work
132
+
by You to the Licensor shall be under the terms and conditions of
133
+
this License, without any additional terms or conditions.
134
+
Notwithstanding the above, nothing herein shall supersede or modify
135
+
the terms of any separate license agreement you may have executed
136
+
with Licensor regarding such Contributions.
137
+
138
+
6. Trademarks. This License does not grant permission to use the trade
139
+
names, trademarks, service marks, or product names of the Licensor,
140
+
except as required for reasonable and customary use in describing the
141
+
origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+
7. Disclaimer of Warranty. Unless required by applicable law or
144
+
agreed to in writing, Licensor provides the Work (and each
145
+
Contributor provides its Contributions) on an "AS IS" BASIS,
146
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+
implied, including, without limitation, any warranties or conditions
148
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+
PARTICULAR PURPOSE. You are solely responsible for determining the
150
+
appropriateness of using or redistributing the Work and assume any
151
+
risks associated with Your exercise of permissions under this License.
152
+
153
+
8. Limitation of Liability. In no event and under no legal theory,
154
+
whether in tort (including negligence), contract, or otherwise,
155
+
unless required by applicable law (such as deliberate and grossly
156
+
negligent acts) or agreed to in writing, shall any Contributor be
157
+
liable to You for damages, including any direct, indirect, special,
158
+
incidental, or consequential damages of any character arising as a
159
+
result of this License or out of the use or inability to use the
160
+
Work (including but not limited to damages for loss of goodwill,
161
+
work stoppage, computer failure or malfunction, or any and all
162
+
other commercial damages or losses), even if such Contributor
163
+
has been advised of the possibility of such damages.
164
+
165
+
9. Accepting Warranty or Additional Liability. While redistributing
166
+
the Work or Derivative Works thereof, You may choose to offer,
167
+
and charge a fee for, acceptance of support, warranty, indemnity,
168
+
or other liability obligations and/or rights consistent with this
169
+
License. However, in accepting such obligations, You may act only
170
+
on Your own behalf and on Your sole responsibility, not on behalf
171
+
of any other Contributor, and only if You agree to indemnify,
172
+
defend, and hold each Contributor harmless for any liability
173
+
incurred by, or claims asserted against, such Contributor by reason
174
+
of your accepting any such warranty or additional liability.
175
+
176
+
END OF TERMS AND CONDITIONS
177
+
178
+
APPENDIX: How to apply the Apache License to your work.
179
+
180
+
To apply the Apache License to your work, attach the following
181
+
boilerplate notice, with the fields enclosed by brackets "[]"
182
+
replaced with your own identifying information. (Don't include
183
+
the brackets!) The text should be enclosed in the appropriate
184
+
comment syntax for the file format. We also recommend that a
185
+
file or class name and description of purpose be included on the
186
+
same "printed page" as the copyright notice for easier
187
+
identification within third-party archives.
188
+
189
+
Copyright [yyyy] [name of copyright owner]
190
+
191
+
Licensed under the Apache License, Version 2.0 (the "License");
192
+
you may not use this file except in compliance with the License.
193
+
You may obtain a copy of the License at
194
+
195
+
http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+
Unless required by applicable law or agreed to in writing, software
198
+
distributed under the License is distributed on an "AS IS" BASIS,
199
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+
See the License for the specific language governing permissions and
201
+
limitations under the License.
+20
-26
README.md
+20
-26
README.md
···
5
5
6
6
An [AT Protocol](https://atproto.com/) Lexicon validator for Gleam.
7
7
8
+
> [!WARNING]
9
+
> While I've tried to be as thorough as possible checking the validators against various atproto
10
+
> validation libraries, this may contain bugs. Please report any issues you find.
11
+
8
12
## Installation
9
13
10
14
```sh
···
79
83
80
84
## Features
81
85
82
-
- โ
**Type Validators**: string, integer, boolean, bytes, blob, cid-link, null, object, array, union, ref, record, query, procedure, subscription, token, unknown
83
-
- โ
**String Format Validators**: datetime (RFC3339), uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key
84
-
- โ
**Constraint Validation**: length limits, ranges, enums, required fields
85
-
- โ
**Reference Resolution**: local (`#def`), global (`nsid#def`), and cross-lexicon references
86
-
- โ
**Circular Dependency Detection**: prevents infinite reference loops
87
-
- โ
**Detailed Error Messages**: validation errors with path information
86
+
- **Type Validators**: string, integer, boolean, bytes, blob, cid-link, null, object, array, union, ref, record, query, procedure, subscription, token, unknown
87
+
- **String Format Validators**: datetime (RFC3339), uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key
88
+
- **Constraint Validation**: length limits, ranges, enums, required fields
89
+
- **Reference Resolution**: local (`#def`), global (`nsid#def`), and cross-lexicon references
90
+
- **Detailed Error Messages**: validation errors with path information
88
91
89
-
## API Overview
92
+
## CLI Usage
90
93
91
-
### Main Functions
92
-
93
-
- `validate(lexicons: List(Json))` - Validates one or more lexicon schemas
94
-
- `validate_record(lexicons, nsid, data)` - Validates record data against a schema
95
-
- `is_valid_nsid(value)` - Checks if a string is a valid NSID
96
-
- `validate_string_format(value, format)` - Validates string against a format
94
+
Validate lexicon files from the command line:
97
95
98
-
### Context Builder Pattern
96
+
```sh
97
+
# Validate a single file
98
+
gleam run -m honk check ./lexicons/xyz/statusphere/status.json
99
99
100
-
```gleam
101
-
import validation/context
102
-
import validation/field
100
+
# Validate all .json files in a directory
101
+
gleam run -m honk check ./lexicons/
103
102
104
-
let assert Ok(ctx) =
105
-
context.builder()
106
-
|> context.with_validator(field.dispatch_data_validation)
107
-
|> context.with_lexicons([lexicon])
108
-
|> context.build
103
+
# Show help
104
+
gleam run -m honk help
109
105
```
110
106
107
+
When validating a directory, all lexicons are loaded together to resolve cross-lexicon references
108
+
111
109
## Testing
112
110
113
111
```sh
114
112
gleam test
115
113
```
116
-
117
-
## Implementation
118
-
119
-
This implementation aligns with the [indigo/atproto/lexicon](https://github.com/bluesky-social/indigo/tree/main/atproto/lexicon) implementation as much as possible, ensuring compatibility with the ATProto specification and ecosystem.
120
114
121
115
## Documentation
122
116
+16
build-js.sh
+16
build-js.sh
···
1
+
#!/bin/bash
2
+
set -e
3
+
4
+
gleam build --target javascript
5
+
6
+
rm -rf dist
7
+
mkdir -p dist
8
+
9
+
npx esbuild src/honk_bundle.mjs \
10
+
--bundle \
11
+
--minify \
12
+
--format=iife \
13
+
--global-name=honk \
14
+
--outfile=dist/honk.min.js
15
+
16
+
echo "Built to dist/honk.min.js"
+4
dist/honk.min.js
+4
dist/honk.min.js
···
1
+
var honk=(()=>{var sr=Object.defineProperty;var Zs=Object.getOwnPropertyDescriptor;var Ws=Object.getOwnPropertyNames;var Hs=Object.prototype.hasOwnProperty;var Ys=(e,t)=>{for(var n in t)sr(e,n,{get:t[n],enumerable:!0})},Ks=(e,t,n,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let i of Ws(t))!Hs.call(e,i)&&i!==n&&sr(e,i,{get:()=>t[i],enumerable:!(r=Zs(t,i))||r.enumerable});return e};var Xs=e=>Ks(sr({},"__esModule",{value:!0}),e);var ru={};Ys(ru,{build_validation_context:()=>Rs,dynamic_to_json:()=>Js,is_valid_nsid:()=>eu,parse_json_string:()=>Gs,parse_json_strings:()=>nu,toList:()=>w,validate:()=>Xa,validate_record:()=>Qa,validate_record_with_context:()=>Ps,validate_string_format:()=>tu});var v=class{withFields(t){let n=Object.keys(this).map(r=>r in t?t[r]:this[r]);return new this.constructor(...n)}},Q=class{static fromArray(t,n){let r=n||new y;for(let i=t.length-1;i>=0;--i)r=new Te(t[i],r);return r}[Symbol.iterator](){return new or(this)}toArray(){return[...this]}atLeastLength(t){let n=this;for(;t-- >0&&n;)n=n.tail;return n!==void 0}hasLength(t){let n=this;for(;t-- >0&&n;)n=n.tail;return t===-1&&n instanceof y}countLength(){let t=this,n=0;for(;t;)t=t.tail,n++;return n-1}};function F(e,t){return new Te(e,t)}function w(e,t){return Q.fromArray(e,t)}var or=class{#e;constructor(t){this.#e=t}next(){if(this.#e instanceof y)return{done:!0};{let{head:t,tail:n}=this.#e;return this.#e=n,{value:t,done:!1}}}},y=class extends Q{};var Te=class extends Q{constructor(t,n){super(),this.head=t,this.tail=n}};var di=e=>e instanceof Te,pi=e=>e.head,$i=e=>e.tail,X=class{bitSize;byteSize;bitOffset;rawBuffer;constructor(t,n,r){if(!(t instanceof Uint8Array))throw globalThis.Error("BitArray can only be constructed from a Uint8Array");if(this.bitSize=n??t.length*8,this.byteSize=Math.trunc((this.bitSize+7)/8),this.bitOffset=r??0,this.bitSize<0)throw globalThis.Error(`BitArray bit size is invalid: ${this.bitSize}`);if(this.bitOffset<0||this.bitOffset>7)throw globalThis.Error(`BitArray bit offset is invalid: ${this.bitOffset}`);if(t.length!==Math.trunc((this.bitOffset+this.bitSize+7)/8))throw globalThis.Error("BitArray buffer length is invalid");this.rawBuffer=t}byteAt(t){if(!(t<0||t>=this.byteSize))return nt(this.rawBuffer,this.bitOffset,t)}equals(t){if(this.bitSize!==t.bitSize)return!1;let n=Math.trunc(this.bitSize/8);if(this.bitOffset===0&&t.bitOffset===0){for(let i=0;i<n;i++)if(this.rawBuffer[i]!==t.rawBuffer[i])return!1;let r=this.bitSize%8;if(r){let i=8-r;if(this.rawBuffer[n]>>i!==t.rawBuffer[n]>>i)return!1}}else{for(let i=0;i<n;i++){let s=nt(this.rawBuffer,this.bitOffset,i),l=nt(t.rawBuffer,t.bitOffset,i);if(s!==l)return!1}let r=this.bitSize%8;if(r){let i=nt(this.rawBuffer,this.bitOffset,n),s=nt(t.rawBuffer,t.bitOffset,n),l=8-r;if(i>>l!==s>>l)return!1}}return!0}get buffer(){if(ui("buffer","Use BitArray.byteAt() or BitArray.rawBuffer instead"),this.bitOffset!==0||this.bitSize%8!==0)throw new globalThis.Error("BitArray.buffer does not support unaligned bit arrays");return this.rawBuffer}get length(){if(ui("length","Use BitArray.bitSize or BitArray.byteSize instead"),this.bitOffset!==0||this.bitSize%8!==0)throw new globalThis.Error("BitArray.length does not support unaligned bit arrays");return this.rawBuffer.length}};function nt(e,t,n){if(t===0)return e[n]??0;{let r=e[n]<<t&255,i=e[n+1]>>8-t;return r|i}}var mn=class{constructor(t){this.value=t}},ai={};function ui(e,t){ai[e]||(console.warn(`Deprecated BitArray.${e} property used in JavaScript FFI code. ${t}.`),ai[e]=!0)}function le(e,t,n){if(n??=e.bitSize,Qs(e,t,n),t===n)return new X(new Uint8Array);if(t===0&&n===e.bitSize)return e;t+=e.bitOffset,n+=e.bitOffset;let r=Math.trunc(t/8),s=Math.trunc((n+7)/8)-r,l;return r===0&&s===e.rawBuffer.byteLength?l=e.rawBuffer:l=new Uint8Array(e.rawBuffer.buffer,e.rawBuffer.byteOffset+r,s),new X(l,n-t,t%8)}function hn(e){if(e.length===0)return new X(new Uint8Array);if(e.length===1){let l=e[0];return l instanceof X?l:l instanceof Uint8Array?new X(l):new X(new Uint8Array(e))}let t=0,n=!0;for(let l of e)l instanceof X?(t+=l.bitSize,n=!1):l instanceof Uint8Array?(t+=l.byteLength*8,n=!1):t+=8;if(n)return new X(new Uint8Array(e));let r=new Uint8Array(Math.trunc((t+7)/8)),i=0;for(let l of e){let a=i%8===0;if(l instanceof X)if(a&&l.bitOffset===0){r.set(l.rawBuffer,i/8),i+=l.bitSize;let f=l.bitSize%8;if(f!==0){let c=Math.trunc(i/8);r[c]>>=8-f,r[c]<<=8-f}}else s(l.rawBuffer,l.bitSize,l.bitOffset);else l instanceof Uint8Array?a?(r.set(l,i/8),i+=l.byteLength*8):s(l,l.byteLength*8,0):a?(r[i/8]=l,i+=8):s(new Uint8Array([l]),8,0)}function s(l,a,f){if(a===0)return;let c=Math.trunc(a+7/8),$=i%8,_=8-$,x=Math.trunc(i/8);for(let g=0;g<c;g++){let S=nt(l,f,g);a<8&&(S>>=8-a,S<<=8-a),r[x]|=S>>$;let E=a-Math.max(0,a-_);if(a-=E,i+=E,a===0)break;r[++x]=S<<_,E=a-Math.max(0,a-$),a-=E,i+=E}}return new X(r,t)}function Qs(e,t,n){if(t<0||t>e.bitSize||n<t||n>e.bitSize){let r=`Invalid bit array slice: start = ${t}, end = ${n}, bit size = ${e.bitSize}`;throw new globalThis.Error(r)}}var fi;function _i(e){return fi??=new TextEncoder,fi.encode(e)}var rt=class e extends v{static isResult(t){return t instanceof e}},o=class extends rt{constructor(t){super(),this[0]=t}isOk(){return!0}},mi=e=>new o(e);var u=class extends rt{constructor(t){super(),this[0]=t}isOk(){return!1}},hi=e=>new u(e);function D(e,t){let n=[e,t];for(;n.length;){let r=n.pop(),i=n.pop();if(r===i)continue;if(!ci(r)||!ci(i)||!lo(r,i)||to(r,i)||no(r,i)||ro(r,i)||io(r,i)||so(r,i)||oo(r,i))return!1;let l=Object.getPrototypeOf(r);if(l!==null&&typeof l.equals=="function")try{if(r.equals(i))continue;return!1}catch{}let[a,f]=eo(r),c=a(r),$=a(i);if(c.length!==$.length)return!1;for(let _ of c)n.push(f(r,_),f(i,_))}return!0}function eo(e){if(e instanceof Map)return[t=>t.keys(),(t,n)=>t.get(n)];{let t=e instanceof globalThis.Error?["message"]:[];return[n=>[...t,...Object.keys(n)],(n,r)=>n[r]]}}function to(e,t){return e instanceof Date&&(e>t||e<t)}function no(e,t){return!(e instanceof X)&&e.buffer instanceof ArrayBuffer&&e.BYTES_PER_ELEMENT&&!(e.byteLength===t.byteLength&&e.every((n,r)=>n===t[r]))}function ro(e,t){return Array.isArray(e)&&e.length!==t.length}function io(e,t){return e instanceof Map&&e.size!==t.size}function so(e,t){return e instanceof Set&&(e.size!=t.size||[...e].some(n=>!t.has(n)))}function oo(e,t){return e instanceof RegExp&&(e.source!==t.source||e.flags!==t.flags)}function ci(e){return typeof e=="object"&&e!==null}function lo(e,t){return typeof e!="object"&&typeof t!="object"&&(!e||!t)||[Promise,WeakSet,WeakMap,Function].some(r=>e instanceof r)?!1:e.constructor===t.constructor}function Ut(e,t){return t===0?0:e%t}function Ft(e,t){return Math.trunc(wn(e,t))}function wn(e,t){return t===0?0:e/t}var d=class extends v{constructor(t){super(),this[0]=t}};var A=class extends v{};var wi=new WeakMap,lr=new DataView(new ArrayBuffer(8)),ar=0;function ur(e){let t=wi.get(e);if(t!==void 0)return t;let n=ar++;return ar===2147483647&&(ar=0),wi.set(e,n),n}function fr(e,t){return e^t+2654435769+(e<<6)+(e>>2)|0}function dr(e){let t=0,n=e.length;for(let r=0;r<n;r++)t=Math.imul(31,t)+e.charCodeAt(r)|0;return t}function yi(e){lr.setFloat64(0,e);let t=lr.getInt32(0),n=lr.getInt32(4);return Math.imul(73244475,t>>16^t)^n}function ao(e){return dr(e.toString())}function uo(e){let t=Object.getPrototypeOf(e);if(t!==null&&typeof t.hashCode=="function")try{let r=e.hashCode(e);if(typeof r=="number")return r}catch{}if(e instanceof Promise||e instanceof WeakSet||e instanceof WeakMap)return ur(e);if(e instanceof Date)return yi(e.getTime());let n=0;if(e instanceof ArrayBuffer&&(e=new Uint8Array(e)),Array.isArray(e)||e instanceof Uint8Array)for(let r=0;r<e.length;r++)n=Math.imul(31,n)+pe(e[r])|0;else if(e instanceof Set)e.forEach(r=>{n=n+pe(r)|0});else if(e instanceof Map)e.forEach((r,i)=>{n=n+fr(pe(r),pe(i))|0});else{let r=Object.keys(e);for(let i=0;i<r.length;i++){let s=r[i],l=e[s];n=n+fr(pe(l),dr(s))|0}}return n}function pe(e){if(e===null)return 1108378658;if(e===void 0)return 1108378659;if(e===!0)return 1108378657;if(e===!1)return 1108378656;switch(typeof e){case"number":return yi(e);case"string":return dr(e);case"bigint":return ao(e);case"object":return uo(e);case"symbol":return ur(e);case"function":return ur(e);default:return 0}}var Ee=5,pr=Math.pow(2,Ee),fo=pr-1,co=pr/2,po=pr/4,re=0,Se=1,ae=2,Ye=3,$r={type:ae,bitmap:0,array:[]};function Vt(e,t){return e>>>t&fo}function gn(e,t){return 1<<Vt(e,t)}function $o(e){return e-=e>>1&1431655765,e=(e&858993459)+(e>>2&858993459),e=e+(e>>4)&252645135,e+=e>>8,e+=e>>16,e&127}function _r(e,t){return $o(e&t-1)}function $e(e,t,n){let r=e.length,i=new Array(r);for(let s=0;s<r;++s)i[s]=e[s];return i[t]=n,i}function _o(e,t,n){let r=e.length,i=new Array(r+1),s=0,l=0;for(;s<t;)i[l++]=e[s++];for(i[l++]=n;s<r;)i[l++]=e[s++];return i}function cr(e,t){let n=e.length,r=new Array(n-1),i=0,s=0;for(;i<t;)r[s++]=e[i++];for(++i;i<n;)r[s++]=e[i++];return r}function gi(e,t,n,r,i,s){let l=pe(t);if(l===r)return{type:Ye,hash:l,array:[{type:re,k:t,v:n},{type:re,k:i,v:s}]};let a={val:!1};return Rt(mr($r,e,l,t,n,a),e,r,i,s,a)}function Rt(e,t,n,r,i,s){switch(e.type){case Se:return mo(e,t,n,r,i,s);case ae:return mr(e,t,n,r,i,s);case Ye:return ho(e,t,n,r,i,s)}}function mo(e,t,n,r,i,s){let l=Vt(n,t),a=e.array[l];if(a===void 0)return s.val=!0,{type:Se,size:e.size+1,array:$e(e.array,l,{type:re,k:r,v:i})};if(a.type===re)return D(r,a.k)?i===a.v?e:{type:Se,size:e.size,array:$e(e.array,l,{type:re,k:r,v:i})}:(s.val=!0,{type:Se,size:e.size,array:$e(e.array,l,gi(t+Ee,a.k,a.v,n,r,i))});let f=Rt(a,t+Ee,n,r,i,s);return f===a?e:{type:Se,size:e.size,array:$e(e.array,l,f)}}function mr(e,t,n,r,i,s){let l=gn(n,t),a=_r(e.bitmap,l);if((e.bitmap&l)!==0){let f=e.array[a];if(f.type!==re){let $=Rt(f,t+Ee,n,r,i,s);return $===f?e:{type:ae,bitmap:e.bitmap,array:$e(e.array,a,$)}}let c=f.k;return D(r,c)?i===f.v?e:{type:ae,bitmap:e.bitmap,array:$e(e.array,a,{type:re,k:r,v:i})}:(s.val=!0,{type:ae,bitmap:e.bitmap,array:$e(e.array,a,gi(t+Ee,c,f.v,n,r,i))})}else{let f=e.array.length;if(f>=co){let c=new Array(32),$=Vt(n,t);c[$]=mr($r,t+Ee,n,r,i,s);let _=0,x=e.bitmap;for(let g=0;g<32;g++){if((x&1)!==0){let S=e.array[_++];c[g]=S}x=x>>>1}return{type:Se,size:f+1,array:c}}else{let c=_o(e.array,a,{type:re,k:r,v:i});return s.val=!0,{type:ae,bitmap:e.bitmap|l,array:c}}}}function ho(e,t,n,r,i,s){if(n===e.hash){let l=hr(e,r);if(l!==-1)return e.array[l].v===i?e:{type:Ye,hash:n,array:$e(e.array,l,{type:re,k:r,v:i})};let a=e.array.length;return s.val=!0,{type:Ye,hash:n,array:$e(e.array,a,{type:re,k:r,v:i})}}return Rt({type:ae,bitmap:gn(e.hash,t),array:[e]},t,n,r,i,s)}function hr(e,t){let n=e.array.length;for(let r=0;r<n;r++)if(D(t,e.array[r].k))return r;return-1}function yn(e,t,n,r){switch(e.type){case Se:return wo(e,t,n,r);case ae:return xo(e,t,n,r);case Ye:return yo(e,r)}}function wo(e,t,n,r){let i=Vt(n,t),s=e.array[i];if(s!==void 0){if(s.type!==re)return yn(s,t+Ee,n,r);if(D(r,s.k))return s}}function xo(e,t,n,r){let i=gn(n,t);if((e.bitmap&i)===0)return;let s=_r(e.bitmap,i),l=e.array[s];if(l.type!==re)return yn(l,t+Ee,n,r);if(D(r,l.k))return l}function yo(e,t){let n=hr(e,t);if(!(n<0))return e.array[n]}function wr(e,t,n,r){switch(e.type){case Se:return go(e,t,n,r);case ae:return bo(e,t,n,r);case Ye:return vo(e,r)}}function go(e,t,n,r){let i=Vt(n,t),s=e.array[i];if(s===void 0)return e;let l;if(s.type===re){if(!D(s.k,r))return e}else if(l=wr(s,t+Ee,n,r),l===s)return e;if(l===void 0){if(e.size<=po){let a=e.array,f=new Array(e.size-1),c=0,$=0,_=0;for(;c<i;){let x=a[c];x!==void 0&&(f[$]=x,_|=1<<c,++$),++c}for(++c;c<a.length;){let x=a[c];x!==void 0&&(f[$]=x,_|=1<<c,++$),++c}return{type:ae,bitmap:_,array:f}}return{type:Se,size:e.size-1,array:$e(e.array,i,l)}}return{type:Se,size:e.size,array:$e(e.array,i,l)}}function bo(e,t,n,r){let i=gn(n,t);if((e.bitmap&i)===0)return e;let s=_r(e.bitmap,i),l=e.array[s];if(l.type!==re){let a=wr(l,t+Ee,n,r);return a===l?e:a!==void 0?{type:ae,bitmap:e.bitmap,array:$e(e.array,s,a)}:e.bitmap===i?void 0:{type:ae,bitmap:e.bitmap^i,array:cr(e.array,s)}}return D(r,l.k)?e.bitmap===i?void 0:{type:ae,bitmap:e.bitmap^i,array:cr(e.array,s)}:e}function vo(e,t){let n=hr(e,t);if(n<0)return e;if(e.array.length!==1)return{type:Ye,hash:e.hash,array:cr(e.array,n)}}function bi(e,t){if(e===void 0)return;let n=e.array,r=n.length;for(let i=0;i<r;i++){let s=n[i];if(s!==void 0){if(s.type===re){t(s.v,s.k);continue}bi(s,t)}}}var ye=class e{static fromObject(t){let n=Object.keys(t),r=e.new();for(let i=0;i<n.length;i++){let s=n[i];r=r.set(s,t[s])}return r}static fromMap(t){let n=e.new();return t.forEach((r,i)=>{n=n.set(i,r)}),n}static new(){return new e(void 0,0)}constructor(t,n){this.root=t,this.size=n}get(t,n){if(this.root===void 0)return n;let r=yn(this.root,0,pe(t),t);return r===void 0?n:r.v}set(t,n){let r={val:!1},i=this.root===void 0?$r:this.root,s=Rt(i,0,pe(t),t,n,r);return s===this.root?this:new e(s,r.val?this.size+1:this.size)}delete(t){if(this.root===void 0)return this;let n=wr(this.root,0,pe(t),t);return n===this.root?this:n===void 0?e.new():new e(n,this.size-1)}has(t){return this.root===void 0?!1:yn(this.root,0,pe(t),t)!==void 0}entries(){if(this.root===void 0)return[];let t=[];return this.forEach((n,r)=>t.push([r,n])),t}forEach(t){bi(this.root,t)}hashCode(){let t=0;return this.forEach((n,r)=>{t=t+fr(pe(n),pe(r))|0}),t}equals(t){if(!(t instanceof e)||this.size!==t.size)return!1;try{return this.forEach((n,r)=>{if(!D(t.get(r,!n),n))throw xi}),!0}catch(n){if(n===xi)return!1;throw n}}},xi=Symbol();function xr(e){return jn(e)===0}function _e(e,t,n){return vi(t,n,e)}function jo(e,t){for(;;){let n=e,r=t;if(n instanceof y)return r;{let i=n.tail,s=n.head[0],l=n.head[1];e=i,t=_e(r,s,l)}}}function bn(e){return jo(e,ee())}function ko(e,t){for(;;){let n=e,r=t;if(n instanceof y)return r;{let i=n.head;e=n.tail,t=F(i,r)}}}function So(e,t){for(;;){let n=e,r=t;if(n instanceof y)return ko(r,w([]));{let i=n.tail,s=n.head[0];e=i,t=F(s,r)}}}function vn(e){return So(it(e),w([]))}function Eo(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return i;{let l=r.tail,a=r.head[0],f=r.head[1];e=l,t=s(i,a,f),n=s}}}function me(e,t,n){return Eo(it(e),t,n)}function zo(e,t){for(;;){let n=e,r=t;if(n instanceof y)return r;e=n.tail,t=r+1}}function he(e){return zo(e,0)}function Ao(e,t){for(;;){let n=e,r=t;if(n instanceof y)return r;{let i=n.head;e=n.tail,t=F(i,r)}}}function be(e){return Ao(e,w([]))}function Ke(e){return D(e,w([]))}function st(e,t){for(;;){let n=e,r=t;if(n instanceof y)return!1;{let i=n.head;if(D(i,r))return!0;e=n.tail,t=r}}}function Oo(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return be(s);{let l=r.head,a=r.tail,f;i(l)?f=F(l,s):f=s;let $=f;e=a,t=i,n=$}}}function kn(e,t){return Oo(e,t,w([]))}function Co(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return be(s);{let l=r.head,a=r.tail,f,c=i(l);if(c instanceof o){let _=c[0];f=F(_,s)}else f=s;let $=f;e=a,t=i,n=$}}}function ie(e,t){return Co(e,t,w([]))}function Bo(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return be(s);{let l=r.head;e=r.tail,t=i,n=F(i(l),s)}}}function Ne(e,t){return Bo(e,t,w([]))}function Mo(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return new o(be(s));{let l=r.head,a=r.tail,f=i(l);if(f instanceof o){let c=f[0];e=a,t=i,n=F(c,s)}else return f}}}function qe(e,t){return Mo(e,t,w([]))}function Do(e,t){for(;;){let n=e,r=t;if(n instanceof y)return r;{let i=n.head;e=n.tail,t=F(i,r)}}}function ki(e,t){return Do(be(e),t)}function Sn(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return i;{let l=r.head;e=r.tail,t=s(i,l),n=s}}}function Lo(e,t,n,r){for(;;){let i=e,s=t,l=n,a=r;if(i instanceof y)return s;{let f=i.head;e=i.tail,t=l(s,f,a),n=l,r=a+1}}}function ot(e,t,n){return Lo(e,t,n,0)}function K(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return new o(i);{let l=r.head,a=r.tail,f=s(i,l);if(f instanceof o){let c=f[0];e=a,t=c,n=s}else return f}}}function Si(e,t){for(;;){let n=e,r=t;if(n instanceof y)return new u(void 0);{let i=n.head,s=n.tail;if(r(i))return new o(i);e=s,t=r}}}function En(e,t){for(;;){let n=e,r=t;if(n instanceof y)return!1;{let i=n.head,s=n.tail,l=r(i);if(l)return l;e=s,t=r}}}function Ei(e){for(;;){let t=e;if(t instanceof y)return new u(void 0);{let n=t.tail;if(n instanceof y){let r=t.head;return new o(r)}else e=n}}}var De=class extends v{constructor(t,n,r){super(),this.expected=t,this.found=n,this.path=r}};var we=class extends v{constructor(t){super(),this.function=t}};function j(e,t){let n=t.function(e),r,i;return r=n[0],i=n[1],i instanceof y?new o(r):new u(i)}function Io(e){return[e,w([])]}function No(e,t){return new we(n=>{let r=e.function(n),i,s;return i=r[0],s=r[1],[t(i),s]})}function qo(e,t,n){for(;;){let r=e,i=t,s=n;if(s instanceof y)return i;{let l=s.head,a=s.tail,f=l.function(r),c,$;if(c=f,$=f[1],$ instanceof y)return c;e=r,t=i,n=a}}}function Uo(e,t){return new we(n=>{let r=e.function(n),i,s;return i=r,s=r[1],s instanceof y?i:qo(n,i,t)})}var se=new we(Io);function Oi(e,t){return w([new De(e,ge(t),w([]))])}function vr(e,t,n){let r=n(e);return r instanceof o?[r[0],w([])]:[r[0],w([new De(t,ge(e),w([]))])]}function Fo(e){return D(!0,e)?[!0,w([])]:D(!1,e)?[!1,w([])]:[!1,Oi("Bool",e)]}function Vo(e){return vr(e,"Int",Ti)}function Ro(e){return vr(e,"Float",Li)}var zn=new we(Fo),Ue=new we(Vo),Ci=new we(Ro);function Po(e){return vr(e,"String",Ii)}var L=new we(Po);function Jo(e,t,n,r,i){let s=r(t),l=s[1];if(l instanceof y){let a=s[0],f=i(n),c=f[1];if(c instanceof y){let $=f[0];return[_e(e[0],a,$),e[1]]}else{let $=c;return lt([ee(),$],w(["values"]))}}else{let a=l;return lt([ee(),a],w(["keys"]))}}function at(e,t){return new we(n=>{let r=Di(n);if(r instanceof o){let i=r[0];return me(i,[ee(),w([])],(s,l,a)=>s[1]instanceof y?Jo(s,l,a,e.function,t.function):s)}else return[ee(),Oi("Dict",n)]})}function ut(e){return new we(t=>Mi(t,e.function,(n,r)=>lt(n,w([r])),0,w([])))}function lt(e,t){let n=Uo(L,w([No(Ue,B)])),r=Ne(t,s=>{let l=s,a=j(l,n);return a instanceof o?a[0]:"<"+ge(l)+">"}),i=Ne(e[1],s=>new De(s.expected,s.found,ki(r,s.path)));return[e[0],i]}function Go(e,t,n,r,i){for(;;){let s=e,l=t,a=n,f=r,c=i;if(s instanceof y){let _=a(f);return lt(_,be(l))}else{let $=s.head,_=s.tail,x=Bi(f,$);if(x instanceof o){let g=x[0];if(g instanceof d){let S=g[0];e=_,t=F($,l),n=a,r=S,i=c}else return c(f,F($,l))}else{let g=x[0],S=a(f),E;E=S[0];let I=[E,w([new De(g,ge(f),w([]))])];return lt(I,be(l))}}}}function ft(e,t){return new we(n=>Go(e,w([]),t.function,n,(r,i)=>{let s=t.function(r),l;l=s[0];let a=[l,w([new De("Field","Nothing",w([]))])];return lt(a,be(i))}))}var kr=void 0,Ni={};function Y(e){return e}function Sr(e){return/^[-+]?(\d+)$/.test(e)?new o(parseInt(e)):new u(kr)}function B(e){return e.toString()}function G(e){if(e==="")return 0;let t=Er(e);if(t){let n=0;for(let r of t)n++;return n}else return e.match(/./gsu).length}function Gt(e){let t=Er(e);return t?Q.fromArray(Array.from(t).map(n=>n.segment)):Q.fromArray(e.match(/./gsu))}var qi;function Er(e){if(globalThis.Intl&&Intl.Segmenter)return qi||=new Intl.Segmenter,qi.segment(e)[Symbol.iterator]()}function zr(e,t){return Q.fromArray(e.split(t))}function Ui(e,t,n){return e.slice(t,t+n)}function An(e,t,n){if(n<=0||t>=e.length)return"";let r=Er(e);if(r){for(;t-- >0;)r.next();let i="";for(;n-- >0;){let s=r.next().value;if(s===void 0)break;i+=s.segment}return i}else return e.match(/./gsu).slice(t,t+n).join("")}function Fe(e,t){return e.indexOf(t)>=0}function Z(e,t){return e.startsWith(t)}function Oe(e,t){return e.endsWith(t)}var Fi=[" "," ",`
2
+
`,"\v","\f","\r","\x85","\u2028","\u2029"].join(""),Iu=new RegExp(`^[${Fi}]*`),Nu=new RegExp(`[${Fi}]*$`);function On(e){return hn([_i(e)])}function Cn(e){return e.byteSize}function ee(){return ye.new()}function jn(e){return e.size}function it(e){return Q.fromArray(e.entries())}function ue(e,t){let n=e.get(t,Ni);return n===Ni?new u(kr):new o(n)}function vi(e,t,n){return n.set(e,t)}function Vi(e){try{let t=atob(e),n=t.length,r=new Uint8Array(n);for(let i=0;i<n;i++)r[i]=t.charCodeAt(i);return new o(new X(r))}catch{return new u(kr)}}function ge(e){if(typeof e=="string")return"String";if(typeof e=="boolean")return"Bool";if(e instanceof rt)return"Result";if(e instanceof Q)return"List";if(e instanceof X)return"BitArray";if(e instanceof ye)return"Dict";if(Number.isInteger(e))return"Int";if(Array.isArray(e))return"Array";if(typeof e=="number")return"Float";if(e===null)return"Nil";if(e===void 0)return"Nil";{let t=typeof e;return t.charAt(0).toUpperCase()+t.slice(1)}}function Zt(e){return new TextEncoder().encode(e).length}function Ri(e){return new jr().inspect(e)}function Pi(e){let t=e.toString().replace("+","");if(t.indexOf(".")>=0)return t;{let n=t.indexOf("e");return n>=0?t.slice(0,n)+".0"+t.slice(n):t+".0"}}var jr=class{#e=new Set;inspect(t){let n=typeof t;if(t===!0)return"True";if(t===!1)return"False";if(t===null)return"//js(null)";if(t===void 0)return"Nil";if(n==="string")return this.#s(t);if(n==="bigint"||Number.isInteger(t))return t.toString();if(n==="number")return Pi(t);if(t instanceof mn)return this.#o(t);if(t instanceof X)return this.#l(t);if(t instanceof RegExp)return`//js(${t})`;if(t instanceof Date)return`//js(Date("${t.toISOString()}"))`;if(t instanceof globalThis.Error)return`//js(${t.toString()})`;if(t instanceof Function){let i=[];for(let s of Array(t.length).keys())i.push(String.fromCharCode(s+97));return`//fn(${i.join(", ")}) { ... }`}if(this.#e.size===this.#e.add(t).size)return"//js(circular reference)";let r;if(Array.isArray(t))r=`#(${t.map(i=>this.inspect(i)).join(", ")})`;else if(t instanceof Q)r=this.#i(t);else if(t instanceof v)r=this.#r(t);else if(t instanceof ye)r=this.#n(t);else{if(t instanceof Set)return`//js(Set(${[...t].map(i=>this.inspect(i)).join(", ")}))`;r=this.#t(t)}return this.#e.delete(t),r}#t(t){let n=Object.getPrototypeOf(t)?.constructor?.name||"Object",r=[];for(let l of Object.keys(t))r.push(`${this.inspect(l)}: ${this.inspect(t[l])}`);let i=r.length?" "+r.join(", ")+" ":"";return`//js(${n==="Object"?"":n+" "}{${i}})`}#n(t){let n="dict.from_list([",r=!0;return t.forEach((i,s)=>{r||(n=n+", "),n=n+"#("+this.inspect(s)+", "+this.inspect(i)+")",r=!1}),n+"])"}#r(t){let n=Object.keys(t).map(r=>{let i=this.inspect(t[r]);return isNaN(parseInt(r))?`${r}: ${i}`:i}).join(", ");return n?`${t.constructor.name}(${n})`:t.constructor.name}#i(t){if(t instanceof y)return"[]";let n='charlist.from_string("',r="[",i=t;for(;i instanceof Te;){let s=i.head;i=i.tail,r!=="["&&(r+=", "),r+=this.inspect(s),n&&(Number.isInteger(s)&&s>=32&&s<=126?n+=String.fromCharCode(s):n=null)}return n?n+'")':r+"]"}#s(t){let n='"';for(let r=0;r<t.length;r++){let i=t[r];switch(i){case`
3
+
`:n+="\\n";break;case"\r":n+="\\r";break;case" ":n+="\\t";break;case"\f":n+="\\f";break;case"\\":n+="\\\\";break;case'"':n+='\\"';break;default:i<" "||i>"~"&&i<"\xA0"?n+="\\u{"+i.charCodeAt(0).toString(16).toUpperCase().padStart(4,"0")+"}":n+=i}}return n+='"',n}#o(t){return`//utfcodepoint(${String.fromCodePoint(t.value)})`}#l(t){if(t.bitSize===0)return"<<>>";let n="<<";for(let r=0;r<t.byteSize-1;r++)n+=t.byteAt(r).toString(),n+=", ";if(t.byteSize*8===t.bitSize)n+=t.byteAt(t.byteSize-1).toString();else{let r=t.bitSize%8;n+=t.byteAt(t.byteSize-1)>>8-r,n+=`:size(${r})`}return n+=">>",n}};function Bi(e,t){if(e instanceof ye||e instanceof WeakMap||e instanceof Map){let r={},i=e.get(t,r);return i===r?new o(new A):new o(new d(i))}let n=Number.isInteger(t);if(n&&t>=0&&t<8&&e instanceof Q){let r=0;for(let i of e){if(r===t)return new o(new d(i));r++}return new u("Indexable")}return n&&Array.isArray(e)||e&&typeof e=="object"||e&&Object.getPrototypeOf(e)===Object.prototype?t in e?new o(new d(e[t])):new o(new A):new u(n?"Indexable":"Dict")}function Mi(e,t,n,r,i){if(!(e instanceof Q||Array.isArray(e))){let l=new De("List",ge(e),i);return[i,Q.fromArray([l])]}let s=[];for(let l of e){let a=t(l),[f,c]=a;if(c instanceof Te){let[$,_]=n(a,r.toString());return[i,_]}s.push(f),r++}return[Q.fromArray(s),i]}function Di(e){if(e instanceof ye)return new o(e);if(e instanceof Map||e instanceof WeakMap)return new o(ye.fromMap(e));if(e==null)return new u("Dict");if(typeof e!="object")return new u("Dict");let t=Object.getPrototypeOf(e);return t===Object.prototype||t===null?new o(ye.fromObject(e)):new u("Dict")}function Li(e){return typeof e=="number"?new o(e):new u(0)}function Ti(e){return Number.isInteger(e)?new o(e):new u(0)}function Ii(e){return typeof e=="string"?new o(e):new u("")}function Ce(e){return e===""}function Wt(e,t,n){if(n<=0)return"";if(t<0){let s=G(e)+t;return s<0?"":An(e,s,n)}else return An(e,t,n)}function Ji(e,t){return t<=0?e:Wt(e,0,G(e)-t)}function Or(e,t){return e+t}function Xo(e,t,n){for(;;){let r=e,i=t,s=n,l;r%2===0?l=s:l=s+i;let f=l,c=globalThis.Math.trunc(r/2);if(c<=0)return f;e=c,t=i+i,n=f}}function Gi(e,t){return t<=0?"":Xo(t,e,"")}function Qo(e,t,n){for(;;){let r=e,i=t,s=n;if(r instanceof y)return s;{let l=r.head;e=r.tail,t=i,n=s+i+l}}}function Ve(e,t){if(e instanceof y)return"";{let n=e.head,r=e.tail;return Qo(r,t,n)}}function fe(e,t){if(t==="")return Gt(e);{let r=e,i=zr(r,t);return Ne(i,Y)}}function Ht(e){let n=Ri(e);return n}function Re(e,t){if(t<=0)return e;{let r=An(e,0,t),i=Zt(r);return Ui(e,i,Zt(e)-i)}}function Zi(e){let t,n=Cn(On(e))%4;return n===0?t=e:t=Or(e,Gi("=",4-n)),Vi(t)}function Wi(e){return e instanceof o}function Xe(e,t){if(e instanceof o)return e;{let n=e[0];return new u(t(n))}}function p(e,t){if(e instanceof o){let n=e[0];return t(n)}else return e}function Br(e){return JSON.stringify(e)}function Hi(e){return Object.fromEntries(e)}function Yi(e){let t=[];for(;di(e);)t.push(pi(e)),e=$i(e);return t}function Ki(){return null}function Xi(e){try{let t=JSON.parse(e);return mi(t)}catch(t){return hi(tl(t,e))}}function tl(e,t){return nl(e)?Qi():rl(e,t)}function nl(e){return/((unexpected (end|eof))|(end of data)|(unterminated string)|(json( parse error|\.parse)\: expected '(\:|\}|\])'))/i.test(e.message)}function rl(e,t){let n=[il,sl,ll,ol];for(let r of n){let i=r(e,t);if(i)return i}return ct("")}function il(e){let n=/unexpected token '(.)', ".+" is not valid JSON/i.exec(e.message);if(!n)return null;let r=Bn(n[1]);return ct(r)}function sl(e){let n=/unexpected token (.) in JSON at position (\d+)/i.exec(e.message);if(!n)return null;let r=Bn(n[1]);return ct(r)}function ol(e,t){let r=/(unexpected character|expected .*) at line (\d+) column (\d+)/i.exec(e.message);if(!r)return null;let i=Number(r[2]),s=Number(r[3]),l=al(i,s,t),a=Bn(t[l]);return ct(a)}function ll(e){let n=/unexpected (identifier|token) "(.)"/i.exec(e.message);if(!n)return null;let r=Bn(n[2]);return ct(r)}function Bn(e){return"0x"+e.charCodeAt(0).toString(16).toUpperCase()}function al(e,t,n){if(e===1)return t-1;let r=1,i=0;return n.split("").find((s,l)=>(s===`
4
+
`&&(r+=1),r===e?(i=l+t,!0):!1)),i}var Mr=class extends v{},Qi=()=>new Mr;var Dr=class extends v{constructor(t){super(),this[0]=t}},ct=e=>new Dr(e);var Lr=class extends v{constructor(t){super(),this[0]=t}};function ul(e,t){return p(Xi(e),n=>{let r=j(n,t);return Xe(r,i=>new Lr(i))})}function Mn(e,t){return ul(e,t)}function je(e){return Br(e)}function es(e){return e}function ts(e){return e}function ns(e){return e}function rs(e){return e}function is(){return Ki()}function Ir(e){return Hi(e)}function fl(e){return Yi(e)}function Nr(e,t){let r=Ne(e,t);return fl(r)}var pt=class extends v{constructor(t){super(),this.collection=t}};var $t=class extends v{constructor(t){super(),this.message=t}};var qr=class extends v{constructor(t){super(),this.message=t}};function Ur(e){return e instanceof pt?"Lexicon not found for collection: "+e.collection:e instanceof $t?"Invalid lexicon schema: "+e.message:"Data validation failed: "+e.message}function m(e){return new $t(e)}function h(e){return new qr(e)}function Fr(e){return new pt(e)}function ce(e){let t=je(e),n=Mn(t,se);return Xe(n,r=>"Failed to parse JSON")}function _t(e){return je(e)==="null"}function Pe(e){let t=ce(e);if(t instanceof o){let n=t[0];return j(n,L)instanceof o}else return!1}function mt(e){let t=ce(e);if(t instanceof o){let n=t[0];return j(n,Ue)instanceof o}else return!1}function ht(e){let t=ce(e);if(t instanceof o){let n=t[0];return j(n,zn)instanceof o}else return!1}function Yt(e){let t=ce(e);if(t instanceof o){let n=t[0];return j(n,ut(se))instanceof o}else return!1}function q(e){let t=ce(e);if(t instanceof o){let n=t[0];return j(n,at(L,se))instanceof o}else return!1}function k(e,t){let n=ce(e);if(n instanceof o){let r=n[0],i=j(r,ft(w([t]),L));if(i instanceof o){let s=i[0];return new d(s)}else return new A}else return new A}function M(e,t){let n=ce(e);if(n instanceof o){let r=n[0],i=j(r,ft(w([t]),Ue));if(i instanceof o){let s=i[0];return new d(s)}else return new A}else return new A}function Je(e,t){let n=ce(e);if(n instanceof o){let r=n[0],i=j(r,ft(w([t]),zn));if(i instanceof o){let s=i[0];return new d(s)}else return new A}else return new A}function T(e,t){let n=ce(e);if(n instanceof o){let r=n[0],i=j(r,ft(w([t]),ut(se)));if(i instanceof o){let s=i[0];return new d(s)}else return new A}else return new A}function z(e){let t=ce(e);if(t instanceof o){let n=t[0],r=j(n,at(L,se));if(r instanceof o){let i=r[0];return vn(i)}else return w([])}else return w([])}function Vr(e){let t=ce(e);if(t instanceof o){let n=t[0],r=j(n,ut(se));if(r instanceof o){let i=r[0];return new d(i)}else return new A}else return new A}function ss(e){return ge(e)==="Nil"}function ke(e){let t=ce(e);if(t instanceof o){let n=t[0],r=j(n,at(L,se));return r instanceof o?r:new u(h("Failed to convert JSON to dictionary"))}else return new u(h("Failed to parse JSON as dynamic"))}function te(e){let t=ge(e);if(t==="Nil")return new o(is());if(t==="String"){let n=j(e,L);if(n instanceof o){let r=n[0];return new o(es(r))}else return new u(h("Failed to decode string"))}else if(t==="Int"){let n=j(e,Ue);if(n instanceof o){let r=n[0];return new o(ns(r))}else return new u(h("Failed to decode int"))}else if(t==="Float"){let n=j(e,Ci);if(n instanceof o){let r=n[0];return new o(rs(r))}else return new u(h("Failed to decode float"))}else if(t==="Bool"){let n=j(e,zn);if(n instanceof o){let r=n[0];return new o(ts(r))}else return new u(h("Failed to decode bool"))}else if(t==="List"){let n=j(e,ut(se));if(n instanceof o){let r=n[0],i=qe(r,te);if(i instanceof o){let s=i[0];return new o(Nr(s,l=>l))}else return i}else return new u(h("Failed to decode list"))}else if(t==="Array"){let n=j(e,ut(se));if(n instanceof o){let r=n[0],i=qe(r,te);if(i instanceof o){let s=i[0];return new o(Nr(s,l=>l))}else return i}else return new u(h("Failed to decode list"))}else if(t==="Dict"){let n=j(e,at(L,se));if(n instanceof o){let r=n[0],i=it(r),s=qe(i,l=>{let a,f;a=l[0],f=l[1];let c=te(f);if(c instanceof o){let $=c[0];return new o([a,$])}else return c});if(s instanceof o){let l=s[0];return new o(Ir(l))}else return s}else return new u(h("Failed to decode dict"))}else if(t==="Object"){let n=j(e,at(L,se));if(n instanceof o){let r=n[0],i=it(r),s=qe(i,l=>{let a,f;a=l[0],f=l[1];let c=te(f);if(c instanceof o){let $=c[0];return new o([a,$])}else return c});if(s instanceof o){let l=s[0];return new o(Ir(l))}else return s}else return new u(h("Failed to decode dict"))}else{let n=t;return new u(h("Unsupported type for JSON conversion: "+n))}}function C(e,t){let n=ce(e);if(n instanceof o){let r=n[0],i=j(r,ft(w([t]),se));if(i instanceof o){let s=i[0],l=te(s);if(l instanceof o){let a=l[0];return new d(a)}else return new A}else return new A}else return new A}function Dn(){return ee()}function Qe(e,t){return ue(e,t)instanceof o}function Ln(e,t,n){return me(e,t,n)}var Tn=class extends v{constructor(t,n){super(),this.id=t,this.defs=n}};var wt=class extends v{};var xt=class extends v{};var yt=class extends v{};var gt=class extends v{};var bt=class extends v{};var vt=class extends v{};var jt=class extends v{};var kt=class extends v{};var St=class extends v{};var Et=class extends v{};var Rr=class extends v{};function Pr(e){return e==="datetime"?new o(new wt):e==="uri"?new o(new xt):e==="at-uri"?new o(new yt):e==="did"?new o(new gt):e==="handle"?new o(new bt):e==="at-identifier"?new o(new vt):e==="nsid"?new o(new jt):e==="cid"?new o(new kt):e==="language"?new o(new St):e==="tid"?new o(new Et):e==="record-key"?new o(new Rr):new u(void 0)}function In(e){return e instanceof wt?"datetime":e instanceof xt?"uri":e instanceof yt?"at-uri":e instanceof gt?"did":e instanceof bt?"handle":e instanceof vt?"at-identifier":e instanceof jt?"nsid":e instanceof kt?"cid":e instanceof St?"language":e instanceof Et?"tid":"record-key"}var qn=class extends v{constructor(t){super(),this.dict=t}};function Jr(){return new qn(ee())}function Gr(e,t){let n=e.dict,r=ue(n,t);return Wi(r)}var cl=void 0;function Zr(e,t){return new qn(_e(e.dict,t,cl))}function ls(e,t){return e.lastIndex=0,e.test(t)}function as(e,t){try{let n="gu";return t.case_insensitive&&(n+="i"),t.multi_line&&(n+="m"),new o(new RegExp(e,n))}catch(n){let r=(n.columnNumber||0)|0;return new u(new Un(n.message,r))}}var Un=class extends v{constructor(t,n){super(),this.error=t,this.byte_index=n}};var Wr=class extends v{constructor(t,n){super(),this.case_insensitive=t,this.multi_line=n}};function dl(e,t){return as(e,t)}function Be(e){return dl(e,new Wr(!1,!1))}function Me(e,t){return ls(e,t)}var Xt=class extends v{constructor(t,n){super(),this.seconds=t,this.nanoseconds=n}};function wl(e){let t=1e9,n=Ut(e.nanoseconds,t),r=e.nanoseconds-n,i=e.seconds+Ft(r,t);return n>=0?new Xt(i,n):new Xt(i-1,t+n)}function xl(e){return e%4===0&&(e%100!==0||e%400===0)}function yl(e){if(e.bitSize>=8)if(e.byteAt(0)===43)if((e.bitSize-8)%8===0){let t=le(e,8);return new o(["+",t])}else return new u(void 0);else if(e.byteAt(0)===45&&(e.bitSize-8)%8===0){let t=le(e,8);return new o(["-",t])}else return new u(void 0);else return new u(void 0)}function Kt(e,t){if(e.bitSize>=8&&(e.bitSize-8)%8===0)if(e.byteAt(0)===t){let r=le(e,8);return new o(r)}else return new u(void 0);else return new u(void 0)}function gl(e){return e.bitSize===0?new o(void 0):new u(void 0)}function bl(e,t,n){let r=globalThis.Math.trunc((14-t)/12),i=e+4800-r,s=t+12*r-3;return n+globalThis.Math.trunc((153*s+2)/5)+365*i+globalThis.Math.trunc(i/4)-globalThis.Math.trunc(i/100)+globalThis.Math.trunc(i/400)-32045}var vl=86400,cs=3600,ds=60;function jl(e,t,n){let r=t*cs+n*ds;return e==="-"?-r:r}function kl(e,t,n,r,i,s){return bl(e,t,n)*vl+r*cs+i*ds+s}var Sl=1e9,Yr=58,fs=45;function El(e,t,n){for(;;){let r=e,i=t,s=n,l=globalThis.Math.trunc(s/10);if(r.bitSize>=8&&(r.bitSize-8)%8===0){let a=r.byteAt(0);if(48<=a&&a<=57&&l<1)e=le(r,8),t=i,n=l;else{let f=r.byteAt(0);if(48<=f&&f<=57){let c=le(r,8),$=f-48;e=c,t=i+$*l,n=l}else return new o([i,r])}}else return new o([i,r])}}function zl(e){if(e.bitSize>=8&&e.byteAt(0)===46)if(e.bitSize>=16&&(e.bitSize-16)%8===0){let t=e.byteAt(1);if(48<=t&&t<=57){let n=le(e,16);return El(hn([t,n]),0,Sl)}else return(e.bitSize-8)%8===0?new u(void 0):new o([0,e])}else return(e.bitSize-8)%8===0?new u(void 0):new o([0,e]);else return new o([0,e])}function Al(e,t,n,r){for(;;){let i=e,s=t,l=n,a=r;if(a>=s)return new o([l,i]);if(i.bitSize>=8&&(i.bitSize-8)%8===0){let f=i.byteAt(0);if(48<=f&&f<=57)e=le(i,8),t=s,n=l*10+(f-48),r=a+1;else return new u(void 0)}else return new u(void 0)}}function zt(e,t){return Al(e,t,0,0)}function Ol(e){return zt(e,4)}function Cl(e){return p(zt(e,2),t=>{let n,r;return n=t[0],r=t[1],1<=n&&n<=12?new o([n,r]):new u(void 0)})}function Bl(e,t,n){return p(zt(e,2),r=>{let i,s;return i=r[0],s=r[1],p(n===1?new o(31):n===3?new o(31):n===5?new o(31):n===7?new o(31):n===8?new o(31):n===10?new o(31):n===12?new o(31):n===4?new o(30):n===6?new o(30):n===9?new o(30):n===11?new o(30):n===2?xl(t)?new o(29):new o(28):new u(void 0),l=>1<=i&&i<=l?new o([i,s]):new u(void 0))})}function ps(e){return p(zt(e,2),t=>{let n,r;return n=t[0],r=t[1],0<=n&&n<=23?new o([n,r]):new u(void 0)})}function $s(e){return p(zt(e,2),t=>{let n,r;return n=t[0],r=t[1],0<=n&&n<=59?new o([n,r]):new u(void 0)})}function Ml(e){return p(zt(e,2),t=>{let n,r;return n=t[0],r=t[1],0<=n&&n<=60?new o([n,r]):new u(void 0)})}function Hr(e){return p(yl(e),t=>{let n,r;return n=t[0],r=t[1],p(ps(r),i=>{let s,l;return s=i[0],l=i[1],p(Kt(l,Yr),a=>p($s(a),f=>{let c,$;c=f[0],$=f[1];let _=jl(n,s,c);return new o([_,$])}))})})}function Dl(e){if(e.bitSize>=8)if(e.byteAt(0)===90)if((e.bitSize-8)%8===0){let t=le(e,8);return new o([0,t])}else return Hr(e);else if(e.byteAt(0)===122&&(e.bitSize-8)%8===0){let t=le(e,8);return new o([0,t])}else return Hr(e);else return Hr(e)}function Ll(e){if(e.bitSize>=8&&(e.bitSize-8)%8===0){let t=e.byteAt(0);if(t===84||t===116||t===32){let n=le(e,8);return new o(n)}else return new u(void 0)}else return new u(void 0)}var Tl=210866803200;function Il(e,t,n,r,i,s,l,a){let c=kl(e,t,n,r,i,s)-Tl,$=new Xt(c-a,l);return wl($)}function _s(e){let t=On(e);return p(Ol(t),n=>{let r,i;return r=n[0],i=n[1],p(Kt(i,fs),s=>p(Cl(s),l=>{let a,f;return a=l[0],f=l[1],p(Kt(f,fs),c=>p(Bl(c,r,a),$=>{let _,x;return _=$[0],x=$[1],p(Ll(x),g=>p(ps(g),S=>{let E,I;return E=S[0],I=S[1],p(Kt(I,Yr),xe=>p($s(xe),H=>{let J,de;return J=H[0],de=H[1],p(Kt(de,Yr),We=>p(Ml(We),Le=>{let ir,_n;return ir=Le[0],_n=Le[1],p(zl(_n),Nt=>{let qt,He;return qt=Nt[0],He=Nt[1],p(Dl(He),si=>{let oi,li;return oi=si[0],li=si[1],p(gl(li),iu=>new o(Il(r,a,_,E,J,ir,qt,oi)))})})}))}))}))}))}))})}function ql(e){let t=G(e);if(t===0||t>64)return!1;{let i=Be("^[0-9]{4}-[01][0-9]-[0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9](\\.[0-9]{1,20})?(Z|([+-][0-2][0-9]:[0-5][0-9]))$");if(i instanceof o){let s=i[0],l=Me(s,e);return l&&(Oe(e,"-00:00")?!1:_s(e)instanceof o)}else return!1}}function Ul(e){let t=G(e);if(t===0||t>8192)return!1;{let i=Be("^[a-z][a-z.-]{0,80}:[!-~]+$");if(i instanceof o){let s=i[0];return Me(s,e)}else return!1}}function Qt(e){let t=G(e);if(t===0||t>2048)return!1;{let r=Z(e,"did:");if(r){let s=Be("^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$");if(s instanceof o){let l=s[0];return Me(l,e)}else return!1}else return r}}function en(e){let t="^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$",n=G(e)===0||G(e)>253,r=Be(t);if(n)return!1;if(r instanceof o){let i=r[0],s=Me(i,e);if(s){let l=fe(e,"."),a=Ei(l);if(a instanceof o){let f=a[0];return f==="local"||f==="arpa"||f==="invalid"||f==="localhost"||f==="internal"||f==="example"||f==="onion"?!1:f!=="alt"}else return!1}else return s}else return n}function Fl(e){return Qt(e)||en(e)}function et(e){let n=Be("^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\.[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+$");if(n instanceof o){let r=n[0],i=Me(r,e);if(i){let s=fe(e,".");return he(s)>=3&&G(e)<=317}else return i}else return!1}function Fn(e){let t=G(e);if(t<8||t>256)return!1;if(Z(e,"Qmb"))return!1;{let s=Be("^[a-zA-Z0-9+=]{8,256}$");if(s instanceof o){let l=s[0];return Me(l,e)}else return!1}}function ms(e){let t=Fn(e);return t&&Z(e,"bafkrei")}function Vl(e){let t=G(e);if(t===0||t>128)return!1;{let i=Be("^(i|[a-z]{2,3})(-[a-zA-Z0-9]+)*$");if(i instanceof o){let s=i[0];return Me(s,e)}else return!1}}function hs(e){let t="^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$",n=G(e)===13,r=Be(t);if(n&&r instanceof o){let i=r[0];return Me(i,e)}else return!1}function ws(e){let t=G(e);if(e==="."||e==="..")return!1;{let r=t>=1&&t<=512;if(r){let i=hs(e);if(i)return i;{let l=Be("^[a-zA-Z0-9_~.:-]+$");if(l instanceof o){let a=l[0];return Me(a,e)}else return!1}}else return r}}function Rl(e){let t=G(e);if(t===0||t>8192)return!1;{let r=Z(e,"at://");if(r){let i=Re(e,5),s=fe(i,"/");if(s instanceof y)return!1;{let l=s.tail;if(l instanceof y){let a=s.head;return Qt(a)||en(a)}else{let a=l.tail;if(a instanceof y){let f=s.head,c=l.head,$=Qt(f)||en(f);return $&&et(c)}else if(a.tail instanceof y){let c=s.head,$=l.head,_=a.head,x=Qt(c)||en(c);if(x){let g=et($);return g&&ws(_)}else return x}else return!1}}}else return r}}function Vn(e,t){return t instanceof wt?ql(e):t instanceof xt?Ul(e):t instanceof yt?Rl(e):t instanceof gt?Qt(e):t instanceof bt?en(e):t instanceof vt?Fl(e):t instanceof jt?et(e):t instanceof kt?Fn(e):t instanceof St?Vl(e):t instanceof Et?hs(e):ws(e)}var At=class extends v{constructor(t,n,r,i,s){super(),this.lexicons=t,this.path=n,this.current_lexicon_id=r,this.reference_stack=i,this.validator=s}};var Rn=class extends v{constructor(t,n){super(),this.lexicons=t,this.validator=n}};function Kr(){return new Rn(ee(),new A)}function Xr(e){let t,n=e.validator;n instanceof d?t=n[0]:t=(i,s,l)=>new o(void 0);let r=t;return new o(new At(e.lexicons,"",new A,Jr(),r))}function Pn(e,t){let n=ue(e.lexicons,t);if(n instanceof o){let r=n[0];return new d(r)}else return new A}function b(e){return e.path}function N(e,t){let n;e.path===""?n=t:n=e.path+"."+t;let i=n;return new At(e.lexicons,i,e.current_lexicon_id,e.reference_stack,e.validator)}function Ot(e){return e.current_lexicon_id}function nn(e,t){return new At(e.lexicons,e.path,new d(t),e.reference_stack,e.validator)}function xs(e,t){return new At(e.lexicons,e.path,e.current_lexicon_id,Zr(e.reference_stack,t),e.validator)}function Qr(e,t){return Gr(e.reference_stack,t)}function ys(e,t){let n=fe(t,"#");if(n instanceof y)return new u(m("Invalid reference format: "+t));{let r=n.tail;if(r instanceof y){let i=n.head;return i!==""?new o([i,"main"]):new u(m("Invalid reference format: "+t))}else if(r.tail instanceof y){let s=n.head;if(s===""){let l=r.head,a=e.current_lexicon_id;if(a instanceof d){let f=a[0];return new o([f,l])}else return new u(m("Local reference '"+t+"' used without current lexicon context"))}else{let l=s,a=r.head;return l!==""&&a!==""?new o([l,a]):new u(m("Invalid reference format: "+t))}}else return new u(m("Invalid reference format: "+t))}}function Pl(e){let t,n=k(e,"id");if(n instanceof d){let i=n[0];t=new o(i)}else t=new u(m("Lexicon missing required 'id' field"));return p(t,i=>p(et(i)?new o(void 0):new u(m("Lexicon 'id' field is not a valid NSID: "+i)),s=>{let l,a=C(e,"defs");if(a instanceof d){let c=a[0];q(c)?l=new o(c):l=new u(m("Lexicon 'defs' must be an object at "+i))}else l=new u(m("Lexicon missing required 'defs' field at "+i));return p(l,c=>new o(new Tn(i,c)))}))}function ei(e,t){return K(t,e,(n,r)=>{let i=Pl(r);if(i instanceof o){let s=i[0],l=_e(n.lexicons,s.id,s);return new o(new Rn(l,n.validator))}else return i})}function ti(e,t,n,r,i){let s;if(n instanceof d){let a=n[0];t<a?s=new u(h(e+": "+i+" length "+B(t)+" is less than minLength "+B(a))):s=new o(void 0)}else s=new o(void 0);return p(s,a=>{if(r instanceof d){let f=r[0];return t>f?new u(h(e+": "+i+" length "+B(t)+" exceeds maxLength "+B(f))):new o(void 0)}else return new o(void 0)})}function tt(e,t,n,r){if(t instanceof d&&n instanceof d){let i=t[0],s=n[0];return i>s?new u(m(e+": "+r+" minLength ("+B(i)+") cannot be greater than maxLength ("+B(s)+")")):new o(void 0)}else return new o(void 0)}function gs(e,t,n,r){let i;if(n instanceof d){let l=n[0];t<l?i=new u(h(e+": value "+B(t)+" is less than minimum "+B(l))):i=new o(void 0)}else i=new o(void 0);return p(i,l=>{if(r instanceof d){let a=r[0];return t>a?new u(h(e+": value "+B(t)+" exceeds maximum "+B(a))):new o(void 0)}else return new o(void 0)})}function bs(e,t,n){if(t instanceof d&&n instanceof d){let r=t[0],i=n[0];return r>i?new u(m(e+": minimum ("+B(r)+") cannot be greater than maximum ("+B(i)+")")):new o(void 0)}else return new o(void 0)}function Jn(e,t,n,r,i,s){return En(n,a=>s(t,a))?new o(void 0):new u(h(e+": "+r+" value '"+i(t)+"' is not in enum"))}function Ct(e,t,n,r){return t&&n?new u(m(e+": "+r+" cannot have both 'const' and 'default'")):new o(void 0)}function O(e,t,n,r){let i=kn(t,s=>!st(n,s));if(i instanceof y)return new o(void 0);{let s=i;return new u(m(e+": "+r+" has unknown fields: "+Ve(s,", ")+". Allowed fields: "+Ve(n,", ")))}}function Bt(e,t,n){let r=nn(t,n),i=ys(r,e);if(i instanceof o){let s=i[0][0],l=i[0][1],a=Pn(r,s);if(a instanceof d){let f=a[0],c=C(f.defs,l);if(c instanceof d){let $=c[0];return new o(new d($))}else return new u(m("Definition '"+l+"' not found in lexicon '"+s+"'"))}else return new u(m("Referenced lexicon not found: "+s))}else return i}function Gn(e,t,n){let r=b(n);return p((()=>{let i=k(t,"ref");if(i instanceof d){let s=i[0];return new o(s)}else return new u(h(r+": ref schema missing 'ref' field"))})(),i=>{if(Qr(n,i))return new u(h(r+": circular reference detected: "+i));{let l=xs(n,i);return p((()=>{let a=Ot(l);if(a instanceof d){let f=a[0];return new o(f)}else return new u(h(r+": no current lexicon set for resolving reference"))})(),a=>p(Bt(i,l,a),f=>p((()=>{if(f instanceof d){let c=f[0];return new o(c)}else return new u(h(r+": reference not found: "+i))})(),c=>{let $=l.validator;return $(e,c,l)})))}})}function Jl(e,t){let n=fe(e,"#");if(n instanceof y)return new u(m(t+": global reference can only contain one # character"));{let r=n.tail;if(r instanceof y)return new u(m(t+": global reference can only contain one # character"));if(r.tail instanceof y){let s=n.head,l=r.head;return Ce(s)?new u(m(t+": NSID part of reference cannot be empty")):Ce(l)?new u(m(t+": definition name part of reference cannot be empty")):new o(void 0)}else return new u(m(t+": global reference can only contain one # character"))}}function Gl(e,t){if(Ce(e))return new u(m(t+": reference cannot be empty"));if(Z(e,"#")){let i=Re(e,1);return Ce(i)?new u(m(t+": local reference must have a definition name after #")):new o(void 0)}else return Fe(e,"#")?Jl(e,t):new o(void 0)}var Zl=w(["type","ref","description"]);function rn(e,t){let n=b(t),r=z(e);return p(O(n,r,Zl,"ref"),i=>{let s,l=k(e,"ref");if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(m(n+": ref missing required 'ref' field"));return p(s,f=>p(Gl(f,n),c=>{if(Z(f,"#"))return new o(void 0);{let _=Ot(t);if(_ instanceof d){let x=_[0];return p(Bt(f,t,x),g=>g instanceof d?new o(void 0):new u(m(n+": reference not found: "+f)))}else return new o(void 0)}}))})}function Wl(e,t,n){let r=ie(e,i=>j(i,L));return K(r,void 0,(i,s)=>{let l=Ot(t);if(l instanceof d){let a=l[0];return p(Bt(s,t,a),f=>f instanceof d?new o(void 0):new u(m(n+": reference not found: "+s)))}else return new o(void 0)})}function Hl(e,t,n,r){let i=Ot(n);if(i instanceof d){let s=i[0];return p(Bt(t,n,s),l=>{if(l instanceof d){let a=l[0],f=n.validator;return f(e,a,n)}else return new u(h(r+": reference not found: "+t))})}else return new o(void 0)}function Yl(e,t){let n=e===t;if(n)return n;if(Z(e,"#")){let i=Re(e,1),s=t===i;return s||Oe(t,"#"+i)}else if(Oe(t,"#main")){let s=Ji(t,5);return e===s}else{if(Fe(t,"#"))return!1;{let l=t+"#main";return e===l}}}function Kl(e){return _t(e)?"null":ht(e)?"boolean":mt(e)?"number":Pe(e)?"string":Yt(e)?"array":q(e)?"object":"unknown"}function Zn(e,t,n){let r=b(n);if(q(e)){let s,l=k(e,"$type");if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(h(r+': union data must be an object which includes the "$type" property'));return p(s,f=>{let c,$=T(t,"refs");if($ instanceof d){let x=$[0];c=new o(x)}else c=new u(h(r+": union schema missing or invalid 'refs' field"));return p(c,x=>{if(Ke(x))return new u(h(r+": union schema has empty refs array"));{let S=ie(x,I=>j(I,L)),E=Si(S,I=>Yl(I,f));if(E instanceof o){let I=E[0];return Hl(e,I,n,r)}else{let I,xe=Je(t,"closed");return xe instanceof d?I=xe[0]:I=!1,I?new u(h(r+": union data $type must be one of "+Ve(S,", ")+", found '"+f+"'")):new o(void 0)}}})})}else{let s=Kl(e);return new u(h(r+': union data must be an object which includes the "$type" property, found '+s))}}var Xl=w(["type","refs","closed","description"]);function Wn(e,t){let n=b(t),r=z(e);return p(O(n,r,Xl,"union"),i=>{let s,l=T(e,"refs");if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(m(n+": union missing required 'refs' field"));return p(s,f=>p(ot(f,new o(void 0),(c,$,_)=>p(c,x=>j($,L)instanceof o?new o(void 0):new u(m(n+": refs["+Ht(_)+"] must be a string")))),c=>p((()=>{let $=Je(e,"closed");return $ instanceof d?$[0]&&Ke(f)?new u(m(n+": union cannot be closed with empty refs array")):new o(void 0):new o(void 0)})(),$=>p((()=>{if(Ke(f)){let x=Je(e,"closed");return x instanceof d?x[0]?new u(m(n+": union cannot have empty refs array when closed=true")):new o(void 0):new o(void 0)}else return new o(void 0)})(),_=>Wl(f,t,n)))))})}function js(e,t,n){let r=b(n);if(Pe(e)){let s=je(e),l;return Z(s,'"')&&Oe(s,'"')?l=Wt(s,1,G(s)-2):l=s,Ce(l)?new u(h(r+": token value cannot be empty string")):new o(void 0)}else return new u(h(r+": expected string for token data, got other type"))}var Ql=w(["type","description"]);function Yn(e,t){let n=b(t),r=z(e);return O(n,r,Ql,"token")}function sn(e,t,n){let r=b(n);if(q(e)){if(k(e,"$bytes")instanceof d)return new u(h(r+": unknown type cannot be a bytes object"));{let l=k(e,"$type");return l instanceof d?l[0]==="blob"?new u(h(r+": unknown type cannot be a blob object")):new o(void 0):new o(void 0)}}else return new u(h(r+": unknown type must be an object, not a primitive or array"))}var ea=w(["type","description"]);function Mt(e,t){let n=b(t),r=z(e);return O(n,r,ea,"unknown")}function ta(e,t){let n=C(e,"ref");if(n instanceof d){let r=n[0];if(q(r)){let s=k(r,"$link");if(s instanceof d){let l=s[0];return ms(l)?new o(void 0):new u(h(t+": blob ref.$link must be a valid CID with raw multicodec (bafkrei prefix)"))}else return new u(h(t+": blob ref must have $link field"))}else return new u(h(t+": blob ref must be an object"))}else return new u(h(t+": blob missing required 'ref' field"))}function Ss(e,t,n,r){return Fe(t,"*")?t==="*"?new o(void 0):new u(m(e+": blob MIME type '"+r+"' can only use '*' as a complete wildcard for "+n)):new o(void 0)}function na(e,t,n){if(Ce(t))return new u(m(e+": blob MIME type cannot be empty"));if(t==="*/*")return new o(void 0);if(Fe(t,"/")){let s=fe(t,"/");if(s instanceof y)return new u(m(e+": blob MIME type '"+t+"' must have exactly one '/' character"));{let l=s.tail;if(l instanceof y)return new u(m(e+": blob MIME type '"+t+"' must have exactly one '/' character"));if(l.tail instanceof y){let f=s.head,c=l.head;return p(Ss(e,f,"type",t),$=>Ss(e,c,"subtype",t))}else return new u(m(e+": blob MIME type '"+t+"' must have exactly one '/' character"))}}else return new u(m(e+": blob MIME type '"+t+"' must contain a '/' character"))}function ra(e,t){return ot(t,new o(void 0),(n,r,i)=>p(n,s=>{let l=j(r,L);if(l instanceof o){let a=l[0];return na(e,a,i)}else return new u(m(e+": blob accept["+B(i)+"] must be a string"))}))}function ia(e,t){if(t==="*/*")return!0;{let n=fe(e,"/"),r=fe(t,"/");if(n instanceof y)return!1;if(r instanceof y)return!1;{let i=n.tail;if(i instanceof y)return!1;{let s=r.tail;if(s instanceof y)return!1;if(i.tail instanceof y)if(s.tail instanceof y){let f=n.head,c=r.head,$=i.head,_=s.head,x;c==="*"?x=!0:x=f===c;let g=x,S;return _==="*"?S=!0:S=$===_,g&&S}else return!1;else return!1}}}}function sa(e,t,n){let r=ie(n,s=>j(s,L));return En(r,s=>ia(t,s))?new o(void 0):new u(h(e+": blob mimeType '"+t+"' not accepted. Allowed: "+Ve(r,", ")))}var oa=w(["type","accept","maxSize","description"]);function Kn(e,t){let n=b(t),r=z(e);return p(O(n,r,oa,"blob"),i=>p((()=>{let s=T(e,"accept");if(s instanceof d){let l=s[0];return ra(n,l)}else return new o(void 0)})(),s=>{let l=M(e,"maxSize");return l instanceof d?l[0]>0?new o(void 0):new u(m(n+": blob maxSize must be greater than 0")):new o(void 0)}))}var la=w(["$type","ref","mimeType","size"]);function aa(e,t){let n=kn(t,r=>!st(la,r));if(n instanceof y)return new o(void 0);{let r=n.head;return new u(h(e+": blob has unexpected field '"+r+"'"))}}function Es(e,t,n){let r=b(n);if(q(e)){let s=z(e);return p(aa(r,s),l=>p((()=>{let a=k(e,"$type");if(a instanceof d){let f=a[0];if(f==="blob")return new o(void 0);{let c=f;return new u(h(r+": blob $type must be 'blob', got '"+c+"'"))}}else return new u(h(r+": blob missing required '$type' field"))})(),a=>p(ta(e,r),f=>p((()=>{let c=k(e,"mimeType");if(c instanceof d){let $=c[0];return Ce($)?new u(h(r+": blob mimeType cannot be empty")):new o($)}else return new u(h(r+": blob missing required 'mimeType' field"))})(),c=>p((()=>{let $=M(e,"size");if($ instanceof d){let _=$[0];return _>=0?new o(_):new u(h(r+": blob size must be non-negative"))}else return new u(h(r+": blob missing or invalid 'size' field"))})(),$=>p((()=>{let _=T(t,"accept");if(_ instanceof d){let x=_[0];return sa(r,c,x)}else return new o(void 0)})(),_=>{let x=M(t,"maxSize");if(x instanceof d){let g=x[0];return $<=g?new o(void 0):new u(h(r+": blob size "+B($)+" exceeds maxSize "+B(g)))}else return new o(void 0)}))))))}else return new u(h(r+": expected blob object"))}function ln(e,t,n){let r=b(n);if(ht(e)){let s=je(e),l=s==="true";if(l||s==="false"){let c=l,$=Je(t,"const");if($ instanceof d){let _=$[0];return _!==c?new u(h(r+": must be constant value "+(_?"true":"false"))):new o(void 0)}else return new o(void 0)}else return new u(h(r+": invalid boolean representation"))}else return new u(h(r+": expected boolean, got other type"))}var ua=w(["type","const","default","description"]);function Dt(e,t){let n=b(t),r=z(e);return p(O(n,r,ua,"boolean"),i=>{let s=!D(Je(e,"const"),new A),l=!D(Je(e,"default"),new A);return Ct(n,s,l,"boolean")})}function As(e,t,n){let r=b(n);if(q(e)){let s=z(e);return p(he(s)===1?new o(void 0):new u(h(r+": $bytes objects must have a single field")),l=>{let a=k(e,"$bytes");if(a instanceof d){let f=a[0],c=Zi(f);if(c instanceof o){let $=c[0],_=Cn($),x=M(t,"minLength"),g=M(t,"maxLength");return p((()=>{if(x instanceof d){let S=x[0];return _<S?new u(h(r+": bytes size out of bounds: "+Ht(_))):new o(void 0)}else return new o(void 0)})(),S=>p((()=>{if(g instanceof d){let E=g[0];return _>E?new u(h(r+": bytes size out of bounds: "+Ht(_))):new o(void 0)}else return new o(void 0)})(),E=>new o(void 0)))}else return new u(h(r+": decoding $bytes value: invalid base64 encoding"))}else return new u(h(r+": $bytes field missing or not a string"))})}else return new u(h(r+": expecting bytes"))}var fa=w(["type","minLength","maxLength","description"]);function Xn(e,t){let n=b(t),r=z(e);return p(O(n,r,fa,"bytes"),i=>{let s=M(e,"minLength"),l=M(e,"maxLength");return p(s instanceof d?s[0]<0?new u(m(n+": bytes schema minLength below zero")):new o(void 0):new o(void 0),a=>p(l instanceof d?l[0]<0?new u(m(n+": bytes schema maxLength below zero")):new o(void 0):new o(void 0),f=>tt(n,s,l,"bytes")))})}function Cs(e,t,n){let r=b(n);if(q(e)){let s=k(e,"$link");if(s instanceof d){let l=s[0];return Fn(l)?new o(void 0):new u(h(r+": invalid CID format in $link"))}else return new u(h(r+": CID link must have $link field"))}else return new u(h(r+": expected CID link object"))}var ca=w(["type","description"]);function Qn(e,t){let n=b(t),r=z(e);return O(n,r,ca,"cid-link")}function da(e,t,n){return Jn(n,e,t,"integer",B,(r,i)=>r===i)}function un(e,t,n){let r=b(n);if(mt(e)){let s=je(e),l=Sr(s);if(l instanceof o){let a=l[0],f=M(t,"const");if(f instanceof d){let c=f[0];return c!==a?new u(h(r+": must be constant value "+B(c)+", found "+B(a))):new o(void 0)}else return p((()=>{let c=T(t,"enum");if(c instanceof d){let $=c[0],_=ie($,x=>j(x,Ue));return da(a,_,r)}else return new o(void 0)})(),c=>{let $=M(t,"minimum"),_=M(t,"maximum");return gs(r,a,$,_)})}else return new u(h(r+": failed to parse integer value"))}else return new u(h(r+": expected integer, got other type"))}var pa=w(["type","minimum","maximum","enum","const","default","description"]);function Lt(e,t){let n=b(t),r=z(e);return p(O(n,r,pa,"integer"),i=>{let s=M(e,"minimum"),l=M(e,"maximum");return p(bs(n,s,l),a=>p((()=>{let f=T(e,"enum");if(f instanceof d){let c=f[0];return K(c,void 0,($,_)=>j(_,Ue)instanceof o?new o(void 0):new u(m(n+": enum values must be integers")))}else return new o(void 0)})(),f=>{let c=!D(M(e,"const"),new A),$=!D(M(e,"default"),new A);return Ct(n,c,$,"integer")}))})}function Ms(e,t,n){let r=b(n);return _t(e)?new o(void 0):new u(h(r+": expected null, got other type"))}var $a=w(["type","description"]);function er(e,t){let n=b(t),r=z(e);return O(n,r,$a,"null")}function _a(e,t,n,r){let i=Zt(e);return ti(r,i,t,n,"string")}function ma(e,t,n,r){let i,l=Gt(e);return i=he(l),ti(r,i,t,n,"string (graphemes)")}function ha(e,t,n){if(Vn(e,t))return new o(void 0);{let i=In(t);return new u(h(n+": string does not match format '"+i+"'"))}}function wa(e,t,n){return Jn(n,e,t,"string",r=>r,(r,i)=>r===i)}function cn(e,t,n){let r=b(n);if(Pe(e)){let s=je(e),l;Z(s,'"')&&Oe(s,'"')?l=Wt(s,1,G(s)-2):l=s;let f=l,c=M(t,"minLength"),$=M(t,"maxLength");return p(_a(f,c,$,r),_=>{let x=M(t,"minGraphemes"),g=M(t,"maxGraphemes");return p(ma(f,x,g,r),S=>{let E,I=k(t,"format");if(I instanceof d){let H=I[0],J=Pr(H);if(J instanceof o){let de=J[0];E=ha(f,de,r)}else E=new o(void 0)}else E=new o(void 0);return p(E,H=>{let J=T(t,"enum");if(J instanceof d){let de=J[0],We=ie(de,Le=>j(Le,L));return wa(f,We,r)}else return new o(void 0)})})})}else return new u(h(r+": expected string, got other type"))}var xa=w(["type","format","minLength","maxLength","minGraphemes","maxGraphemes","enum","knownValues","const","default","description"]);function Tt(e,t){let n=b(t),r=z(e);return p(O(n,r,xa,"string"),i=>{let s,l=k(e,"format");if(l instanceof d){let _=l[0];Pr(_)instanceof o?s=new o(void 0):s=new u(m(n+": unknown format '"+_+"'. Valid formats: datetime, uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key"))}else s=new o(void 0);let f=p(s,_=>{let x=M(e,"minLength"),g=M(e,"maxLength"),S=M(e,"minGraphemes"),E=M(e,"maxGraphemes");return p(x instanceof d?x[0]<0?new u(m(n+": string schema minLength below zero")):new o(void 0):new o(void 0),I=>p(g instanceof d?g[0]<0?new u(m(n+": string schema maxLength below zero")):new o(void 0):new o(void 0),xe=>p(S instanceof d?S[0]<0?new u(m(n+": string schema minGraphemes below zero")):new o(void 0):new o(void 0),H=>p(E instanceof d?E[0]<0?new u(m(n+": string schema maxGraphemes below zero")):new o(void 0):new o(void 0),J=>p(tt(n,x,g,"string"),de=>tt(n,S,E,"string (graphemes)"))))))}),c=p(f,_=>{let x=T(e,"enum");if(x instanceof d){let g=x[0];return K(g,void 0,(S,E)=>j(E,L)instanceof o?new o(void 0):new u(m(n+": enum values must be strings")))}else return new o(void 0)}),$=p(c,_=>{let x=T(e,"knownValues");if(x instanceof d){let g=x[0];return K(g,void 0,(S,E)=>j(E,L)instanceof o?new o(void 0):new u(m(n+": knownValues must be strings")))}else return new o(void 0)});return p($,_=>{let x=!D(k(e,"const"),new A),g=!D(k(e,"default"),new A);return Ct(n,x,g,"string")})})}function ya(e,t,n){let r=ie(t,i=>j(i,L));return p(he(r)===he(t)?new o(void 0):new u(m(e+": required fields must be strings")),i=>{if(n instanceof d){let s=n[0],l=ke(s);if(l instanceof o){let a=l[0];return K(r,void 0,(f,c)=>Qe(a,c)?new o(void 0):new u(m(e+": required field '"+c+"' not found in properties")))}else return new o(void 0)}else return Ke(r)?new o(void 0):new u(m(e+": required fields specified but no properties defined"))})}function ga(e,t,n){let r=ie(t,i=>j(i,L));return p(he(r)===he(t)?new o(void 0):new u(m(e+": nullable fields must be strings")),i=>{if(n instanceof d){let s=n[0],l=ke(s);if(l instanceof o){let a=l[0];return K(r,void 0,(f,c)=>Qe(a,c)?new o(void 0):new u(m(e+": nullable field '"+c+"' not found in properties")))}else return new o(void 0)}else return Ke(r)?new o(void 0):new u(m(e+": nullable fields specified but no properties defined"))})}function ba(e,t,n){let r=ie(t,i=>j(i,L));return K(r,void 0,(i,s)=>{if(C(n,s)instanceof d)return new o(void 0);{let a;e===""?a="required field '"+s+"' is missing":a=e+": required field '"+s+"' is missing";let f=a;return new u(h(f))}})}function Ls(e){return _t(e)?"null":ht(e)?"boolean":mt(e)?"number":Pe(e)?"string":Yt(e)?"array":"object"}var va=w(["type","properties","required","nullable","description"]),ja=w(["type","items","minLength","maxLength","description"]);function ka(e,t,n){return ri(e,t,n)}function ri(e,t,n){let r=k(t,"type");if(r instanceof d){let i=r[0];if(i==="string")return cn(e,t,n);if(i==="integer")return un(e,t,n);if(i==="boolean")return ln(e,t,n);if(i==="bytes")return As(e,t,n);if(i==="blob")return Es(e,t,n);if(i==="cid-link")return Cs(e,t,n);if(i==="null")return Ms(e,t,n);if(i==="object")return tr(e,t,n);if(i==="array")return nr(e,t,n);if(i==="union")return Zn(e,t,n);if(i==="ref")return Gn(e,t,n);if(i==="token")return js(e,t,n);if(i==="unknown")return sn(e,t,n);{let s=i;return new u(h("Unknown schema type '"+s+"' at '"+b(n)+"'"))}}else return new u(h("Schema missing type field at '"+b(n)+"'"))}function tr(e,t,n){let r=b(n);if(q(e))return p((()=>{let s=T(t,"required");if(s instanceof d){let l=s[0];return ba(r,l,e)}else return new o(void 0)})(),s=>{let l,a=T(t,"nullable");if(a instanceof d){let $=a[0];l=ie($,_=>j(_,L))}else l=w([]);let f=l,c=C(t,"properties");if(c instanceof d){let $=c[0];return Sa(e,$,f,n)}else return new o(void 0)});{let s=Ls(e);return new u(h("Expected object at '"+r+"', found "+s))}}function Sa(e,t,n,r){let i=ke(e);if(i instanceof o){let s=i[0],l=ke(t);if(l instanceof o){let a=l[0];return me(s,new o(void 0),(f,c,$)=>p(f,_=>{let x=ue(a,c);if(x instanceof o){let g=x[0],S=te(g);if(S instanceof o){let E=S[0],I=N(r,c);if(ss($))return st(n,c)?new o(void 0):new u(h("Field '"+c+"' at '"+b(r)+"' cannot be null"));{let H=te($);if(H instanceof o){let J=H[0];return ka(J,E,I)}else return H}}else return S}else return new o(void 0)}))}else return l}else return i}function Ea(e,t,n){let r=te(e);return p(r,i=>{let s=k(t,"type");return s instanceof d&&s[0]==="ref"?Gn(i,t,n):ri(i,t,n)})}function nr(e,t,n){let r=b(n);if(Yt(e)){let s,l=Vr(e);if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(h(r+": failed to parse array"));return p(s,f=>{let c=he(f);return p((()=>{let $=M(t,"minLength");if($ instanceof d){let _=$[0];return c<_?new u(h(r+": array has length "+B(c)+" but minimum length is "+B(_))):new o(void 0)}else return new o(void 0)})(),$=>p((()=>{let _=M(t,"maxLength");if(_ instanceof d){let x=_[0];return c>x?new u(h(r+": array has length "+B(c)+" but maximum length is "+B(x))):new o(void 0)}else return new o(void 0)})(),_=>{let x=C(t,"items");if(x instanceof d){let g=x[0];return ot(f,new o(void 0),(S,E,I)=>p(S,xe=>{let H=N(n,"["+B(I)+"]");return Ea(E,g,H)}))}else return new o(void 0)}))})}else{let s=Ls(e);return new u(h(r+": expected array, found "+s))}}function za(e,t){return ii(e,t)}function ii(e,t){let n=k(e,"type");if(n instanceof d){let r=n[0];if(r==="string")return Tt(e,t);if(r==="integer")return Lt(e,t);if(r==="boolean")return Dt(e,t);if(r==="bytes")return Xn(e,t);if(r==="blob")return Kn(e,t);if(r==="cid-link")return Qn(e,t);if(r==="null")return er(e,t);if(r==="object")return pn(e,t);if(r==="array")return $n(e,t);if(r==="union")return Wn(e,t);if(r==="ref")return rn(e,t);if(r==="token")return Yn(e,t);if(r==="unknown")return Mt(e,t);{let i=r;return new u(m(b(t)+": unknown type '"+i+"'"))}}else return new u(m(b(t)+": schema missing type field"))}function pn(e,t){let n=b(t),r=z(e);return p(O(n,r,va,"object"),i=>{let s;return T(e,"properties")instanceof d?s=new u(m(n+": properties must be an object, not an array")):q(e)?s=new o(new A):s=new o(new A),p(s,f=>{let c=C(e,"properties");return p((()=>{let $=T(e,"required");if($ instanceof d){let _=$[0];return ya(n,_,c)}else return new o(void 0)})(),$=>p((()=>{let _=T(e,"nullable");if(_ instanceof d){let x=_[0];return ga(n,x,c)}else return new o(void 0)})(),_=>{if(c instanceof d){let x=c[0];return q(x)?Aa(x,t):new o(void 0)}else return new o(void 0)}))})})}function Aa(e,t){let n=ke(e);if(n instanceof o){let r=n[0];return me(r,new o(void 0),(i,s,l)=>p(i,a=>{let f=te(l);if(f instanceof o){let c=f[0],$=N(t,"properties."+s);return za(c,$)}else return f}))}else return n}function Oa(e,t){let n=k(e,"type");return n instanceof d&&n[0]==="ref"?rn(e,t):ii(e,t)}function $n(e,t){let n=b(t),r=z(e);return p(O(n,r,ja,"array"),i=>{let s,l=C(e,"items");if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(m(n+": array missing required 'items' field"));return p(s,f=>{let c=N(t,".items");return p(Oa(f,c),$=>{let _=M(e,"minLength"),x=M(e,"maxLength");return p(tt(n,_,x,"array"),g=>new o(void 0))})})})}function Ca(e,t,n){if(t instanceof d){let r=t[0];return K(r,void 0,(i,s)=>{let l=j(s,L);if(l instanceof o){let a=l[0];return Qe(n,a)?new o(void 0):new u(m(e+": required field '"+a+"' not found in properties"))}else return new u(m(e+": required field must be a string"))})}else return new o(void 0)}function Ge(e,t){let n=k(e,"type");if(n instanceof d){let r=n[0];if(r==="boolean")return Dt(e,t);if(r==="integer")return Lt(e,t);if(r==="string")return Tt(e,t);if(r==="unknown")return Mt(e,t);if(r==="array")return $n(e,t);{let i=r;return new u(m(b(t)+": unknown type '"+i+"'"))}}else return new u(m(b(t)+": schema missing type field"))}function Ba(e,t,n,r){let i=e+".properties."+t,s=k(n,"type");if(s instanceof d){let l=s[0];if(l==="boolean"){let a=N(r,"properties."+t);return Ge(n,a)}else if(l==="integer"){let a=N(r,"properties."+t);return Ge(n,a)}else if(l==="string"){let a=N(r,"properties."+t);return Ge(n,a)}else if(l==="unknown"){let a=N(r,"properties."+t);return Ge(n,a)}else if(l==="array"){let a=C(n,"items");if(a instanceof d){let f=a[0],c=k(f,"type");if(c instanceof d){let $=c[0];if($==="boolean"){let _=N(r,"properties."+t);return Ge(n,_)}else if($==="integer"){let _=N(r,"properties."+t);return Ge(n,_)}else if($==="string"){let _=N(r,"properties."+t);return Ge(n,_)}else if($==="unknown"){let _=N(r,"properties."+t);return Ge(n,_)}else{let _=$;return new u(m(i+": params array items must be boolean, integer, string, or unknown, got '"+_+"'"))}}else return new u(m(i+": array items missing type field"))}else return new u(m(i+": array property missing items field"))}else{let a=l;return new u(m(i+": params properties must be boolean, integer, string, unknown, or arrays of these, got '"+a+"'"))}}else return new u(m(i+": property missing type field"))}function Ma(e,t,n){return Ln(t,new o(void 0),(r,i,s)=>r instanceof o?p(i===""?new u(m(e+": empty property name not allowed")):new o(void 0),l=>p((()=>{let a=te(s);return a instanceof o?a:new u(m(e+": invalid property value for '"+i+"'"))})(),a=>Ba(e,i,a,n))):r)}var Da=w(["type","description","properties","required"]);function Ze(e,t){let n=b(t),r=z(e);return p(O(n,r,Da,"params"),i=>p((()=>{let s=k(e,"type");if(s instanceof d){let l=s[0];if(l==="params")return new o(void 0);{let a=l;return new u(m(n+": expected type 'params', got '"+a+"'"))}}else return new u(m(n+": params missing type field"))})(),s=>{let l,a=C(e,"properties");if(a instanceof d){let x=a[0];l=ke(x)}else l=new o(Dn());let f=l,c,$=T(e,"required");$ instanceof d,c=$;let _=c;return p(f,x=>p(Ca(n,_,x),g=>Ma(n,x,t)))}))}function La(e,t){let n=N(t,"parameters");return Ze(e,n)}function Is(e,t,n){return k(t,"encoding")instanceof d?new o(void 0):new u(m(e+": procedure "+n+" missing encoding field"))}var Ta=w(["type","parameters","input","output","errors","description"]);function Ns(e,t){let n=b(t),r=z(e);return p(O(n,r,Ta,"procedure"),i=>p((()=>{let s=C(e,"parameters");if(s instanceof d){let l=s[0];return La(l,t)}else return new o(void 0)})(),s=>p((()=>{let l=C(e,"input");if(l instanceof d){let a=l[0];return Is(n,a,"input")}else return new o(void 0)})(),l=>p((()=>{let a=C(e,"output");if(a instanceof d){let f=a[0];return Is(n,f,"output")}else return new o(void 0)})(),a=>T(e,"errors")instanceof d?new o(void 0):new o(void 0)))))}function Na(e,t){let n=N(t,"parameters");return Ze(e,n)}function qa(e,t){return k(t,"encoding")instanceof d?new o(void 0):new u(m(e+": query output missing encoding field"))}var Ua=w(["type","parameters","output","errors","description"]);function qs(e,t){let n=b(t),r=z(e);return p(O(n,r,Ua,"query"),i=>p((()=>{let s=C(e,"parameters");if(s instanceof d){let l=s[0];return Na(l,t)}else return new o(void 0)})(),s=>p((()=>{let l=C(e,"output");if(l instanceof d){let a=l[0];return qa(n,a)}else return new o(void 0)})(),l=>T(e,"errors")instanceof d?new o(void 0):new o(void 0))))}function Us(e,t,n){let r=b(n);if(q(e)){let s=C(t,"record");if(s instanceof d){let l=s[0];return tr(e,l,n)}else return new u(h(r+": record schema missing 'record' field"))}else return new u(h(r+": expected object for record"))}function Va(e,t){return t==="tid"?new o(void 0):t==="any"?new o(void 0):t==="nsid"?new o(void 0):Z(t,"literal:")?new o(void 0):new u(m(e+": record has invalid key type '"+t+"'. Must be 'tid', 'any', 'nsid', or 'literal:*'"))}var Ra=w(["type","key","record","description"]),Pa=w(["type","properties","required","nullable","description"]);function Ja(e,t){let n=k(t,"type");if(n instanceof d){let r=n[0];if(r==="object"){let i=z(t);return p(O(e,i,Pa,"record object"),s=>p((()=>{let l=C(t,"properties");if(l instanceof d){let a=l[0];return q(a)?new o(void 0):new u(m(e+": record properties must be an object"))}else return new o(void 0)})(),l=>T(t,"nullable")instanceof d?new o(void 0):C(t,"nullable")instanceof d?new u(m(e+": record nullable field must be an array")):new o(void 0)))}else{let i=r;return new u(m(e+": record field must be type 'object', got '"+i+"'"))}}else return new u(m(e+": record field missing type"))}function Fs(e,t){let n=b(t),r=z(e);return p(O(n,r,Ra,"record"),i=>{let s,l=k(e,"key");if(l instanceof d){let f=l[0];s=new o(f)}else s=new u(m(n+": record missing required 'key' field"));return p(s,f=>p(Va(n,f),c=>{let $,_=C(e,"record");if(_ instanceof d){let g=_[0];$=new o(g)}else $=new u(m(n+": record missing required 'record' field"));return p($,g=>p(Ja(n,g),S=>{let E=N(t,".record");return pn(g,E)}))}))})}function Za(e,t){let n=N(t,"parameters");return Ze(e,n)}function Wa(e,t){let n=C(t,"schema");if(n instanceof d){let r=n[0],i=k(r,"type");if(i instanceof d){let s=i[0];if(s==="union")return new o(void 0);{let l=s;return new u(m(e+": subscription message schema must be type 'union', got '"+l+"'"))}}else return new u(m(e+": subscription message schema missing type field"))}else return new u(m(e+": subscription message missing schema field"))}var Ha=w(["type","parameters","message","errors","description"]);function Vs(e,t){let n=b(t),r=z(e);return p(O(n,r,Ha,"subscription"),i=>p((()=>{let s=C(e,"parameters");if(s instanceof d){let l=s[0];return Za(l,t)}else return new o(void 0)})(),s=>p((()=>{let l=C(e,"message");if(l instanceof d){let a=l[0];return Wa(n,a)}else return new o(void 0)})(),l=>T(e,"errors")instanceof d?new o(void 0):new o(void 0))))}function Ka(e,t){let n=k(e,"type");if(n instanceof d){let r=n[0];if(r==="record")return Fs(e,t);if(r==="query")return qs(e,t);if(r==="procedure")return Ns(e,t);if(r==="subscription")return Vs(e,t);if(r==="params")return Ze(e,t);if(r==="object")return pn(e,t);if(r==="array")return $n(e,t);if(r==="union")return Wn(e,t);if(r==="string")return Tt(e,t);if(r==="integer")return Lt(e,t);if(r==="boolean")return Dt(e,t);if(r==="bytes")return Xn(e,t);if(r==="blob")return Kn(e,t);if(r==="cid-link")return Qn(e,t);if(r==="null")return er(e,t);if(r==="ref")return rn(e,t);if(r==="token")return Yn(e,t);if(r==="unknown")return Mt(e,t);{let i=r;return new u(m("Unknown type: "+i))}}else return new u(m("Definition missing type field"))}function Xa(e){let t,n=Kr();t=ei(n,e);let r=t;if(r instanceof o){let i=r[0],s=Xr(i);if(s instanceof o){let l=s[0],a=me(l.lexicons,ee(),(c,$,_)=>{let x=z(_.defs),g=nn(l,$);return Sn(x,c,(S,E)=>{let I=C(_.defs,E);if(I instanceof d){let xe=I[0],H=Ka(xe,g);if(H instanceof o)return S;{let J=H[0],de;J instanceof pt?de="Lexicon not found: "+J.collection:(J instanceof $t,de=J.message);let We=de,Le;Z(We,": ")?Le=Re(We,2):Le=We;let _n=Le,Nt=$+"#"+E+": "+_n,qt=ue(S,$);if(qt instanceof o){let He=qt[0];return _e(S,$,F(Nt,He))}else return _e(S,$,w([Nt]))}}else return S})});return xr(a)?new o(void 0):new u(a)}else{let l=s[0];return new u(bn(w([["builder",w([Ur(l)])]])))}}else{let i=r[0];return new u(bn(w([["builder",w([Ur(i)])]])))}}function Rs(e){let t,n=Kr();return t=ei(n,e),p(t,i=>Xr(i))}function Ps(e,t,n){let r=Pn(e,t);if(r instanceof d){let i=r[0],s=C(i.defs,"main");if(s instanceof d){let l=s[0],a=nn(e,t),f=N(a,"defs.main");return Us(n,l,f)}else return new u(m("Lexicon '"+t+"' has no main definition"))}else return new u(Fr(t))}function Qa(e,t,n){return p(Rs(e),r=>Ps(r,t,n))}function eu(e){return et(e)}function tu(e,t){if(Vn(e,t))return new o(void 0);{let r=In(t);return new u("Value does not match format: "+r)}}function Js(e){return te(e)}function Gs(e){return p((()=>{let t=Mn(e,se);return Xe(t,n=>m("Failed to parse JSON string"))})(),t=>Js(t))}function nu(e){let n=qe(e,Gs);return Xe(n,r=>m("Failed to parse JSON strings"))}return Xs(ru);})();
+108
example/README.md
+108
example/README.md
···
1
+
# Jetstream Validation Example
2
+
3
+
This example demonstrates using **honk** to validate AT Protocol records from Bluesky's Jetstream firehose in real-time.
4
+
5
+
## What it does
6
+
7
+
1. Connects to Jetstream using **goose** (WebSocket consumer)
8
+
2. Filters for `xyz.statusphere.status` records
9
+
3. Validates each record using **honk**
10
+
4. Displays validation results with emoji status
11
+
12
+
## Running the example
13
+
14
+
```sh
15
+
cd example
16
+
gleam run
17
+
```
18
+
19
+
The example will connect to the live Jetstream firehose and display validation results as records are created:
20
+
21
+
```
22
+
๐ฆข Honk + Goose: Jetstream Validation Example
23
+
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
24
+
25
+
Connecting to Jetstream...
26
+
Filtering for: xyz.statusphere.status
27
+
Validating records with honk...
28
+
29
+
โ VALID | q6gjnaw2blty... | ๐ | 3l4abc123
30
+
โ VALID | wa7b35aakoll... | ๐ | 3l4def456
31
+
โ INVALID | rfov6bpyztcn... | Data: status exceeds maxGraphemes | 3l4ghi789
32
+
โ UPDATED | eygmaihciaxp... | ๐ | 3l4jkl012
33
+
๐๏ธ DELETED | ufbl4k27gp6k... | 3l4mno345
34
+
```
35
+
36
+
## How it works
37
+
38
+
### Lexicon Definition
39
+
40
+
The example defines the `xyz.statusphere.status` lexicon:
41
+
42
+
```json
43
+
{
44
+
"lexicon": 1,
45
+
"id": "xyz.statusphere.status",
46
+
"defs": {
47
+
"main": {
48
+
"type": "record",
49
+
"record": {
50
+
"type": "object",
51
+
"required": ["status", "createdAt"],
52
+
"properties": {
53
+
"status": {
54
+
"type": "string",
55
+
"minLength": 1,
56
+
"maxGraphemes": 1,
57
+
"maxLength": 32
58
+
},
59
+
"createdAt": {
60
+
"type": "string",
61
+
"format": "datetime"
62
+
}
63
+
}
64
+
}
65
+
}
66
+
}
67
+
}
68
+
```
69
+
70
+
### Validation Flow
71
+
72
+
1. **goose** receives Jetstream events via WebSocket
73
+
2. Events are parsed into typed Gleam structures
74
+
3. For `create` and `update` operations:
75
+
- Extract the `record` field (contains the status data)
76
+
- Pass to `honk.validate_record()` with the lexicon
77
+
- Display โ for valid or โ for invalid records
78
+
4. For `delete` operations:
79
+
- Just log the deletion (no record to validate)
80
+
81
+
### Dependencies
82
+
83
+
- **honk**: AT Protocol lexicon validator (local path)
84
+
- **goose**: Jetstream WebSocket consumer library
85
+
- **gleam_json**: JSON encoding/decoding
86
+
- **gleam_stdlib**: Standard library
87
+
88
+
## Code Structure
89
+
90
+
```
91
+
example/
92
+
โโโ gleam.toml # Dependencies configuration
93
+
โโโ README.md # This file
94
+
โโโ src/
95
+
โโโ example.gleam # Main application
96
+
โโโ main() # Entry point
97
+
โโโ handle_event() # Process Jetstream events
98
+
โโโ handle_create/update() # Validate records
99
+
โโโ create_statusphere_lexicon()# Define lexicon
100
+
โโโ format_error/extract_status # Display helpers
101
+
```
102
+
103
+
## Learn More
104
+
105
+
- **honk**: https://hexdocs.pm/honk
106
+
- **goose**: https://hexdocs.pm/goose
107
+
- **Jetstream**: https://docs.bsky.app/docs/advanced-guides/jetstream
108
+
- **AT Protocol**: https://atproto.com/
+12
example/gleam.toml
+12
example/gleam.toml
···
1
+
name = "example"
2
+
version = "1.0.0"
3
+
description = "Example using honk to validate xyz.statusphere.status records from Jetstream"
4
+
5
+
[dependencies]
6
+
gleam_stdlib = ">= 0.44.0 and < 2.0.0"
7
+
gleam_json = ">= 3.0.0 and < 4.0.0"
8
+
honk = { path = ".." }
9
+
goose = ">= 2.0.0 and < 3.0.0"
10
+
11
+
[dev-dependencies]
12
+
gleeunit = ">= 1.0.0 and < 2.0.0"
+29
example/manifest.toml
+29
example/manifest.toml
···
1
+
# This file was generated by Gleam
2
+
# You typically do not need to edit this file
3
+
4
+
packages = [
5
+
{ name = "exception", version = "2.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "exception", source = "hex", outer_checksum = "329D269D5C2A314F7364BD2711372B6F2C58FA6F39981572E5CA68624D291F8C" },
6
+
{ name = "ezstd", version = "1.2.3", build_tools = ["rebar3"], requirements = [], otp_app = "ezstd", source = "hex", outer_checksum = "DE32E0B41BA36A9ED46DB8215DA74777D2F141BB75F67BFC05DBB4B7C3386DEE" },
7
+
{ name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" },
8
+
{ name = "gleam_crypto", version = "1.5.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_crypto", source = "hex", outer_checksum = "50774BAFFF1144E7872814C566C5D653D83A3EBF23ACC3156B757A1B6819086E" },
9
+
{ name = "gleam_erlang", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "1124AD3AA21143E5AF0FC5CF3D9529F6DB8CA03E43A55711B60B6B7B3874375C" },
10
+
{ name = "gleam_http", version = "4.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "82EA6A717C842456188C190AFB372665EA56CE13D8559BF3B1DD9E40F619EE0C" },
11
+
{ name = "gleam_json", version = "3.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "44FDAA8847BE8FC48CA7A1C089706BD54BADCC4C45B237A992EDDF9F2CDB2836" },
12
+
{ name = "gleam_otp", version = "1.2.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_stdlib"], otp_app = "gleam_otp", source = "hex", outer_checksum = "BA6A294E295E428EC1562DC1C11EA7530DCB981E8359134BEABC8493B7B2258E" },
13
+
{ name = "gleam_regexp", version = "1.1.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_regexp", source = "hex", outer_checksum = "9C215C6CA84A5B35BB934A9B61A9A306EC743153BE2B0425A0D032E477B062A9" },
14
+
{ name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
15
+
{ name = "gleam_time", version = "1.5.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "D560E672C7279C89908981E068DF07FD16D0C859DCA266F908B18F04DF0EB8E6" },
16
+
{ name = "gleeunit", version = "1.9.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "DA9553CE58B67924B3C631F96FE3370C49EB6D6DC6B384EC4862CC4AAA718F3C" },
17
+
{ name = "goose", version = "2.0.0", build_tools = ["gleam"], requirements = ["exception", "ezstd", "gleam_crypto", "gleam_erlang", "gleam_http", "gleam_json", "gleam_otp", "gleam_stdlib", "gramps", "logging", "simplifile"], otp_app = "goose", source = "hex", outer_checksum = "E991B275766D28693B8179EF77ADCCD210D58C1D3E3A1B4539C228D6CE58845B" },
18
+
{ name = "gramps", version = "6.0.0", build_tools = ["gleam"], requirements = ["gleam_crypto", "gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gramps", source = "hex", outer_checksum = "8B7195978FBFD30B43DF791A8A272041B81E45D245314D7A41FC57237AA882A0" },
19
+
{ name = "honk", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_regexp", "gleam_stdlib", "gleam_time"], source = "local", path = ".." },
20
+
{ name = "logging", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "logging", source = "hex", outer_checksum = "1098FBF10B54B44C2C7FDF0B01C1253CAFACDACABEFB4B0D027803246753E06D" },
21
+
{ name = "simplifile", version = "2.3.1", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "957E0E5B75927659F1D2A1B7B75D7B9BA96FAA8D0C53EA71C4AD9CD0C6B848F6" },
22
+
]
23
+
24
+
[requirements]
25
+
gleam_json = { version = ">= 3.0.0 and < 4.0.0" }
26
+
gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
27
+
gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
28
+
goose = { version = ">= 2.0.0 and < 3.0.0" }
29
+
honk = { path = ".." }
+250
example/src/example.gleam
+250
example/src/example.gleam
···
1
+
// Example: Validating xyz.statusphere.status records from Jetstream using honk
2
+
//
3
+
// This example connects to Bluesky's Jetstream firehose, filters for
4
+
// xyz.statusphere.status records, and validates them in real-time using honk.
5
+
6
+
import gleam/dynamic/decode
7
+
import gleam/io
8
+
import gleam/json
9
+
import gleam/option
10
+
import gleam/string
11
+
import goose
12
+
import honk
13
+
import honk/errors.{DataValidation, InvalidSchema, LexiconNotFound}
14
+
15
+
pub fn main() {
16
+
io.println("๐ฆข Honk + Goose: Jetstream Validation Example")
17
+
io.println("โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ")
18
+
io.println("")
19
+
io.println("Connecting to Jetstream...")
20
+
io.println("Filtering for: xyz.statusphere.status")
21
+
io.println("Validating records with honk...")
22
+
io.println("")
23
+
24
+
// Define the xyz.statusphere.status lexicon
25
+
let lexicon = create_statusphere_lexicon()
26
+
27
+
// Configure goose to connect to Jetstream
28
+
let config =
29
+
goose.JetstreamConfig(
30
+
endpoint: "wss://jetstream2.us-west.bsky.network/subscribe",
31
+
wanted_collections: ["xyz.statusphere.status"],
32
+
wanted_dids: [],
33
+
cursor: option.None,
34
+
max_message_size_bytes: option.None,
35
+
compress: True,
36
+
require_hello: False,
37
+
)
38
+
39
+
// Start consuming events (this blocks forever)
40
+
goose.start_consumer(config, handle_event(_, lexicon))
41
+
}
42
+
43
+
/// Handles each Jetstream event
44
+
fn handle_event(json_event: String, lexicon: json.Json) -> Nil {
45
+
let event = goose.parse_event(json_event)
46
+
47
+
case event {
48
+
// Handle commit events (create/update/delete)
49
+
goose.CommitEvent(did, time_us, commit) -> {
50
+
case commit.operation {
51
+
"create" -> handle_create(did, time_us, commit, lexicon)
52
+
"update" -> handle_update(did, time_us, commit, lexicon)
53
+
"delete" -> handle_delete(did, time_us, commit)
54
+
_ -> Nil
55
+
}
56
+
}
57
+
58
+
// Ignore identity and account events for this example
59
+
goose.IdentityEvent(_, _, _) -> Nil
60
+
goose.AccountEvent(_, _, _) -> Nil
61
+
goose.UnknownEvent(raw) -> {
62
+
io.println("โ ๏ธ Unknown event: " <> raw)
63
+
}
64
+
}
65
+
}
66
+
67
+
/// Handles create operations - validates the new record
68
+
fn handle_create(
69
+
did: String,
70
+
_time_us: Int,
71
+
commit: goose.CommitData,
72
+
lexicon: json.Json,
73
+
) -> Nil {
74
+
case commit.record {
75
+
option.Some(record_dynamic) -> {
76
+
// Convert Dynamic to JSON for honk validation
77
+
let record_json = dynamic_to_json(record_dynamic)
78
+
79
+
// Validate the record using honk
80
+
case
81
+
honk.validate_record([lexicon], "xyz.statusphere.status", record_json)
82
+
{
83
+
Ok(_) -> {
84
+
// Extract status emoji for display
85
+
let status_emoji = extract_status(record_dynamic)
86
+
io.println(
87
+
"โ VALID | "
88
+
<> truncate_did(did)
89
+
<> " | "
90
+
<> status_emoji
91
+
<> " | "
92
+
<> commit.rkey,
93
+
)
94
+
}
95
+
Error(err) -> {
96
+
io.println(
97
+
"โ INVALID | "
98
+
<> truncate_did(did)
99
+
<> " | "
100
+
<> format_error(err)
101
+
<> " | "
102
+
<> commit.rkey,
103
+
)
104
+
}
105
+
}
106
+
}
107
+
option.None -> {
108
+
io.println("โ ๏ธ CREATE event without record data")
109
+
}
110
+
}
111
+
}
112
+
113
+
/// Handles update operations - validates the updated record
114
+
fn handle_update(
115
+
did: String,
116
+
_time_us: Int,
117
+
commit: goose.CommitData,
118
+
lexicon: json.Json,
119
+
) -> Nil {
120
+
case commit.record {
121
+
option.Some(record_dynamic) -> {
122
+
let record_json = dynamic_to_json(record_dynamic)
123
+
124
+
case
125
+
honk.validate_record([lexicon], "xyz.statusphere.status", record_json)
126
+
{
127
+
Ok(_) -> {
128
+
let status_emoji = extract_status(record_dynamic)
129
+
io.println(
130
+
"โ UPDATED | "
131
+
<> truncate_did(did)
132
+
<> " | "
133
+
<> status_emoji
134
+
<> " | "
135
+
<> commit.rkey,
136
+
)
137
+
}
138
+
Error(err) -> {
139
+
io.println(
140
+
"โ INVALID | "
141
+
<> truncate_did(did)
142
+
<> " | "
143
+
<> format_error(err)
144
+
<> " | "
145
+
<> commit.rkey,
146
+
)
147
+
}
148
+
}
149
+
}
150
+
option.None -> {
151
+
io.println("โ ๏ธ UPDATE event without record data")
152
+
}
153
+
}
154
+
}
155
+
156
+
/// Handles delete operations - no validation needed
157
+
fn handle_delete(did: String, _time_us: Int, commit: goose.CommitData) -> Nil {
158
+
io.println("๐๏ธ DELETED | " <> truncate_did(did) <> " | " <> commit.rkey)
159
+
}
160
+
161
+
/// Creates the xyz.statusphere.status lexicon definition
162
+
fn create_statusphere_lexicon() -> json.Json {
163
+
json.object([
164
+
#("lexicon", json.int(1)),
165
+
#("id", json.string("xyz.statusphere.status")),
166
+
#(
167
+
"defs",
168
+
json.object([
169
+
#(
170
+
"main",
171
+
json.object([
172
+
#("type", json.string("record")),
173
+
#("key", json.string("tid")),
174
+
#(
175
+
"record",
176
+
json.object([
177
+
#("type", json.string("object")),
178
+
#(
179
+
"required",
180
+
json.preprocessed_array([
181
+
json.string("status"),
182
+
json.string("createdAt"),
183
+
]),
184
+
),
185
+
#(
186
+
"properties",
187
+
json.object([
188
+
#(
189
+
"status",
190
+
json.object([
191
+
#("type", json.string("string")),
192
+
#("minLength", json.int(1)),
193
+
#("maxGraphemes", json.int(1)),
194
+
#("maxLength", json.int(32)),
195
+
]),
196
+
),
197
+
#(
198
+
"createdAt",
199
+
json.object([
200
+
#("type", json.string("string")),
201
+
#("format", json.string("datetime")),
202
+
]),
203
+
),
204
+
]),
205
+
),
206
+
]),
207
+
),
208
+
]),
209
+
),
210
+
]),
211
+
),
212
+
])
213
+
}
214
+
215
+
/// Converts Dynamic to Json (they're the same underlying type)
216
+
@external(erlang, "gleam@dynamic", "unsafe_coerce")
217
+
fn dynamic_to_json(value: decode.Dynamic) -> json.Json
218
+
219
+
/// Extracts the status emoji from a record for display
220
+
fn extract_status(record: decode.Dynamic) -> String {
221
+
let decoder = {
222
+
use status <- decode.field("status", decode.string)
223
+
decode.success(status)
224
+
}
225
+
case decode.run(record, decoder) {
226
+
Ok(status) -> status
227
+
Error(_) -> "๏ฟฝ"
228
+
}
229
+
}
230
+
231
+
/// Formats a validation error for display
232
+
fn format_error(err: honk.ValidationError) -> String {
233
+
case err {
234
+
InvalidSchema(msg) -> "Schema: " <> msg
235
+
DataValidation(msg) -> "Data: " <> msg
236
+
LexiconNotFound(id) -> "Not found: " <> id
237
+
}
238
+
}
239
+
240
+
/// Truncates a DID for cleaner display
241
+
fn truncate_did(did: String) -> String {
242
+
case string.split(did, ":") {
243
+
[_, _, suffix] ->
244
+
case string.length(suffix) > 12 {
245
+
True -> string.slice(suffix, 0, 12) <> "..."
246
+
False -> suffix
247
+
}
248
+
_ -> did
249
+
}
250
+
}
+13
example/test/example_test.gleam
+13
example/test/example_test.gleam
+5
-1
gleam.toml
+5
-1
gleam.toml
···
1
1
name = "honk"
2
-
version = "1.0.0"
2
+
version = "1.2.0"
3
3
description = "ATProtocol lexicon validator for Gleam"
4
4
internal_modules = ["honk/internal", "honk/internal/*"]
5
+
licences = ["Apache-2.0"]
6
+
repository = { type = "github", user = "bigmoves", repo = "honk" }
5
7
6
8
[dependencies]
7
9
gleam_stdlib = ">= 0.44.0 and < 2.0.0"
8
10
gleam_json = ">= 3.0.0 and < 4.0.0"
9
11
gleam_regexp = ">= 1.0.0 and < 2.0.0"
10
12
gleam_time = ">= 1.5.0 and < 2.0.0"
13
+
simplifile = ">= 2.3.1 and < 3.0.0"
14
+
argv = ">= 1.0.2 and < 2.0.0"
11
15
12
16
[dev-dependencies]
13
17
gleeunit = ">= 1.0.0 and < 2.0.0"
+7
-2
manifest.toml
+7
-2
manifest.toml
···
2
2
# You typically do not need to edit this file
3
3
4
4
packages = [
5
+
{ name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" },
6
+
{ name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" },
5
7
{ name = "gleam_json", version = "3.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "44FDAA8847BE8FC48CA7A1C089706BD54BADCC4C45B237A992EDDF9F2CDB2836" },
6
8
{ name = "gleam_regexp", version = "1.1.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_regexp", source = "hex", outer_checksum = "9C215C6CA84A5B35BB934A9B61A9A306EC743153BE2B0425A0D032E477B062A9" },
7
-
{ name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
8
-
{ name = "gleam_time", version = "1.5.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "D560E672C7279C89908981E068DF07FD16D0C859DCA266F908B18F04DF0EB8E6" },
9
+
{ name = "gleam_stdlib", version = "0.67.1", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "6CE3E4189A8B8EC2F73AB61A2FBDE49F159D6C9C61C49E3B3082E439F260D3D0" },
10
+
{ name = "gleam_time", version = "1.6.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "0DF3834D20193F0A38D0EB21F0A78D48F2EC276C285969131B86DF8D4EF9E762" },
9
11
{ name = "gleeunit", version = "1.9.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "DA9553CE58B67924B3C631F96FE3370C49EB6D6DC6B384EC4862CC4AAA718F3C" },
12
+
{ name = "simplifile", version = "2.3.1", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "957E0E5B75927659F1D2A1B7B75D7B9BA96FAA8D0C53EA71C4AD9CD0C6B848F6" },
10
13
]
11
14
12
15
[requirements]
16
+
argv = { version = ">= 1.0.2 and < 2.0.0" }
13
17
gleam_json = { version = ">= 3.0.0 and < 4.0.0" }
14
18
gleam_regexp = { version = ">= 1.0.0 and < 2.0.0" }
15
19
gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
16
20
gleam_time = { version = ">= 1.5.0 and < 2.0.0" }
17
21
gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
22
+
simplifile = { version = ">= 2.3.1 and < 3.0.0" }
-32
src/errors.gleam
-32
src/errors.gleam
···
1
-
// Error types for lexicon validation
2
-
3
-
pub type ValidationError {
4
-
LexiconNotFound(collection: String)
5
-
InvalidSchema(message: String)
6
-
DataValidation(message: String)
7
-
}
8
-
9
-
/// Convert error to human-readable string
10
-
pub fn to_string(error: ValidationError) -> String {
11
-
case error {
12
-
LexiconNotFound(collection) ->
13
-
"Lexicon not found for collection: " <> collection
14
-
InvalidSchema(message) -> "Invalid lexicon schema: " <> message
15
-
DataValidation(message) -> "Data validation failed: " <> message
16
-
}
17
-
}
18
-
19
-
/// Create an InvalidSchema error with context
20
-
pub fn invalid_schema(message: String) -> ValidationError {
21
-
InvalidSchema(message)
22
-
}
23
-
24
-
/// Create a DataValidation error with context
25
-
pub fn data_validation(message: String) -> ValidationError {
26
-
DataValidation(message)
27
-
}
28
-
29
-
/// Create a LexiconNotFound error
30
-
pub fn lexicon_not_found(collection: String) -> ValidationError {
31
-
LexiconNotFound(collection)
32
-
}
+32
src/honk/errors.gleam
+32
src/honk/errors.gleam
···
1
+
// Error types for lexicon validation
2
+
3
+
pub type ValidationError {
4
+
LexiconNotFound(collection: String)
5
+
InvalidSchema(message: String)
6
+
DataValidation(message: String)
7
+
}
8
+
9
+
/// Convert error to human-readable string
10
+
pub fn to_string(error: ValidationError) -> String {
11
+
case error {
12
+
LexiconNotFound(collection) ->
13
+
"Lexicon not found for collection: " <> collection
14
+
InvalidSchema(message) -> "Invalid lexicon schema: " <> message
15
+
DataValidation(message) -> "Data validation failed: " <> message
16
+
}
17
+
}
18
+
19
+
/// Create an InvalidSchema error with context
20
+
pub fn invalid_schema(message: String) -> ValidationError {
21
+
InvalidSchema(message)
22
+
}
23
+
24
+
/// Create a DataValidation error with context
25
+
pub fn data_validation(message: String) -> ValidationError {
26
+
DataValidation(message)
27
+
}
28
+
29
+
/// Create a LexiconNotFound error
30
+
pub fn lexicon_not_found(collection: String) -> ValidationError {
31
+
LexiconNotFound(collection)
32
+
}
+8
-8
src/honk/internal/constraints.gleam
+8
-8
src/honk/internal/constraints.gleam
···
1
1
// Reusable constraint validation functions
2
2
3
-
import errors.{type ValidationError}
4
3
import gleam/int
5
4
import gleam/list
6
5
import gleam/option.{type Option, Some}
7
6
import gleam/result
8
7
import gleam/string
8
+
import honk/errors
9
9
10
10
/// Validates length constraints (minLength/maxLength)
11
11
pub fn validate_length_constraints(
···
14
14
min_length: Option(Int),
15
15
max_length: Option(Int),
16
16
type_name: String,
17
-
) -> Result(Nil, ValidationError) {
17
+
) -> Result(Nil, errors.ValidationError) {
18
18
// Check minimum length
19
19
case min_length {
20
20
Some(min) if actual_length < min ->
···
53
53
min_length: Option(Int),
54
54
max_length: Option(Int),
55
55
type_name: String,
56
-
) -> Result(Nil, ValidationError) {
56
+
) -> Result(Nil, errors.ValidationError) {
57
57
case min_length, max_length {
58
58
Some(min), Some(max) if min > max ->
59
59
Error(errors.invalid_schema(
···
76
76
value: Int,
77
77
minimum: Option(Int),
78
78
maximum: Option(Int),
79
-
) -> Result(Nil, ValidationError) {
79
+
) -> Result(Nil, errors.ValidationError) {
80
80
// Check minimum
81
81
case minimum {
82
82
Some(min) if value < min ->
···
110
110
def_name: String,
111
111
minimum: Option(Int),
112
112
maximum: Option(Int),
113
-
) -> Result(Nil, ValidationError) {
113
+
) -> Result(Nil, errors.ValidationError) {
114
114
case minimum, maximum {
115
115
Some(min), Some(max) if min > max ->
116
116
Error(errors.invalid_schema(
···
135
135
type_name: String,
136
136
to_string: fn(a) -> String,
137
137
equal: fn(a, a) -> Bool,
138
-
) -> Result(Nil, ValidationError) {
138
+
) -> Result(Nil, errors.ValidationError) {
139
139
let found = list.any(enum_values, fn(enum_val) { equal(value, enum_val) })
140
140
141
141
case found {
···
158
158
has_const: Bool,
159
159
has_default: Bool,
160
160
type_name: String,
161
-
) -> Result(Nil, ValidationError) {
161
+
) -> Result(Nil, errors.ValidationError) {
162
162
case has_const, has_default {
163
163
True, True ->
164
164
Error(errors.invalid_schema(
···
177
177
actual_fields: List(String),
178
178
allowed_fields: List(String),
179
179
type_name: String,
180
-
) -> Result(Nil, ValidationError) {
180
+
) -> Result(Nil, errors.ValidationError) {
181
181
let unknown_fields =
182
182
list.filter(actual_fields, fn(field) {
183
183
!list.contains(allowed_fields, field)
+57
-61
src/honk/internal/json_helpers.gleam
+57
-61
src/honk/internal/json_helpers.gleam
···
1
1
// JSON helper utilities for extracting and validating fields
2
2
3
-
import errors.{type ValidationError}
4
3
import gleam/dict.{type Dict}
5
4
import gleam/dynamic.{type Dynamic}
6
5
import gleam/dynamic/decode
···
8
7
import gleam/list
9
8
import gleam/option.{type Option, None, Some}
10
9
import gleam/result
10
+
import honk/errors.{type ValidationError, data_validation, invalid_schema}
11
11
12
12
/// Parse JSON string to dynamic for decoding
13
13
fn json_to_dynamic(json_value: Json) -> Result(Dynamic, String) {
···
153
153
case get_string(json_value, field_name) {
154
154
Some(s) -> Ok(s)
155
155
None ->
156
-
Error(errors.invalid_schema(
156
+
Error(invalid_schema(
157
157
def_name <> ": '" <> field_name <> "' must be a string",
158
158
))
159
159
}
···
168
168
case get_int(json_value, field_name) {
169
169
Some(i) -> Ok(i)
170
170
None ->
171
-
Error(errors.invalid_schema(
171
+
Error(invalid_schema(
172
172
def_name <> ": '" <> field_name <> "' must be an integer",
173
173
))
174
174
}
···
183
183
case get_array(json_value, field_name) {
184
184
Some(arr) -> Ok(arr)
185
185
None ->
186
-
Error(errors.invalid_schema(
186
+
Error(invalid_schema(
187
187
def_name <> ": '" <> field_name <> "' must be an array",
188
188
))
189
189
}
···
221
221
222
222
/// Check if dynamic value is null
223
223
pub fn is_null_dynamic(dyn: Dynamic) -> Bool {
224
-
case decode.run(dyn, decode.string) {
225
-
Ok("null") -> True
226
-
_ -> False
227
-
}
224
+
dynamic.classify(dyn) == "Nil"
228
225
}
229
226
230
227
/// Convert JSON object to a dictionary
···
236
233
case decode.run(dyn, decode.dict(decode.string, decode.dynamic)) {
237
234
Ok(dict_val) -> Ok(dict_val)
238
235
Error(_) ->
239
-
Error(errors.data_validation("Failed to convert JSON to dictionary"))
236
+
Error(data_validation("Failed to convert JSON to dictionary"))
240
237
}
241
-
Error(_) -> Error(errors.data_validation("Failed to parse JSON as dynamic"))
238
+
Error(_) -> Error(data_validation("Failed to parse JSON as dynamic"))
242
239
}
243
240
}
244
241
245
242
/// Convert a dynamic value back to Json
246
-
/// This works by trying different decoders
247
243
pub fn dynamic_to_json(dyn: Dynamic) -> Result(Json, ValidationError) {
248
-
// Try null
249
-
case decode.run(dyn, decode.string) {
250
-
Ok(s) -> {
251
-
case s {
252
-
"null" -> Ok(json.null())
253
-
_ -> Ok(json.string(s))
244
+
case dynamic.classify(dyn) {
245
+
"Nil" -> Ok(json.null())
246
+
"String" -> {
247
+
case decode.run(dyn, decode.string) {
248
+
Ok(s) -> Ok(json.string(s))
249
+
Error(_) -> Error(data_validation("Failed to decode string"))
254
250
}
255
251
}
256
-
Error(_) -> {
257
-
// Try number
252
+
"Int" -> {
258
253
case decode.run(dyn, decode.int) {
259
254
Ok(i) -> Ok(json.int(i))
260
-
Error(_) -> {
261
-
// Try boolean
262
-
case decode.run(dyn, decode.bool) {
263
-
Ok(b) -> Ok(json.bool(b))
264
-
Error(_) -> {
265
-
// Try array
266
-
case decode.run(dyn, decode.list(decode.dynamic)) {
267
-
Ok(arr) -> {
268
-
// Recursively convert array items
269
-
case list.try_map(arr, dynamic_to_json) {
270
-
Ok(json_arr) -> Ok(json.array(json_arr, fn(x) { x }))
271
-
Error(e) -> Error(e)
272
-
}
273
-
}
274
-
Error(_) -> {
275
-
// Try object
276
-
case
277
-
decode.run(dyn, decode.dict(decode.string, decode.dynamic))
278
-
{
279
-
Ok(dict_val) -> {
280
-
// Convert dict to object
281
-
let pairs = dict.to_list(dict_val)
282
-
case
283
-
list.try_map(pairs, fn(pair) {
284
-
let #(key, value_dyn) = pair
285
-
case dynamic_to_json(value_dyn) {
286
-
Ok(value_json) -> Ok(#(key, value_json))
287
-
Error(e) -> Error(e)
288
-
}
289
-
})
290
-
{
291
-
Ok(json_pairs) -> Ok(json.object(json_pairs))
292
-
Error(e) -> Error(e)
293
-
}
294
-
}
295
-
Error(_) ->
296
-
Error(errors.data_validation(
297
-
"Failed to convert dynamic to Json",
298
-
))
299
-
}
300
-
}
255
+
Error(_) -> Error(data_validation("Failed to decode int"))
256
+
}
257
+
}
258
+
"Float" -> {
259
+
case decode.run(dyn, decode.float) {
260
+
Ok(f) -> Ok(json.float(f))
261
+
Error(_) -> Error(data_validation("Failed to decode float"))
262
+
}
263
+
}
264
+
"Bool" -> {
265
+
case decode.run(dyn, decode.bool) {
266
+
Ok(b) -> Ok(json.bool(b))
267
+
Error(_) -> Error(data_validation("Failed to decode bool"))
268
+
}
269
+
}
270
+
"List" | "Array" -> {
271
+
case decode.run(dyn, decode.list(decode.dynamic)) {
272
+
Ok(arr) -> {
273
+
case list.try_map(arr, dynamic_to_json) {
274
+
Ok(json_arr) -> Ok(json.array(json_arr, fn(x) { x }))
275
+
Error(e) -> Error(e)
276
+
}
277
+
}
278
+
Error(_) -> Error(data_validation("Failed to decode list"))
279
+
}
280
+
}
281
+
"Dict" | "Object" -> {
282
+
case decode.run(dyn, decode.dict(decode.string, decode.dynamic)) {
283
+
Ok(dict_val) -> {
284
+
let pairs = dict.to_list(dict_val)
285
+
case
286
+
list.try_map(pairs, fn(pair) {
287
+
let #(key, value_dyn) = pair
288
+
case dynamic_to_json(value_dyn) {
289
+
Ok(value_json) -> Ok(#(key, value_json))
290
+
Error(e) -> Error(e)
301
291
}
302
-
}
292
+
})
293
+
{
294
+
Ok(json_pairs) -> Ok(json.object(json_pairs))
295
+
Error(e) -> Error(e)
303
296
}
304
297
}
298
+
Error(_) -> Error(data_validation("Failed to decode dict"))
305
299
}
306
300
}
301
+
other ->
302
+
Error(data_validation("Unsupported type for JSON conversion: " <> other))
307
303
}
308
304
}
309
305
+7
-7
src/honk/internal/resolution.gleam
+7
-7
src/honk/internal/resolution.gleam
···
1
1
// Reference resolution utilities
2
2
3
-
import errors.{type ValidationError}
4
3
import gleam/dict.{type Dict}
5
4
import gleam/json.{type Json}
6
5
import gleam/list
···
8
7
import gleam/result
9
8
import gleam/set.{type Set}
10
9
import gleam/string
10
+
import honk/errors
11
11
import honk/internal/json_helpers
12
-
import validation/context.{type ValidationContext}
12
+
import honk/validation/context.{type ValidationContext}
13
13
14
14
/// Resolves a reference string to its target definition
15
15
pub fn resolve_reference(
16
16
reference: String,
17
17
ctx: ValidationContext,
18
18
current_lexicon_id: String,
19
-
) -> Result(Option(Json), ValidationError) {
19
+
) -> Result(Option(Json), errors.ValidationError) {
20
20
// Update context with current lexicon
21
21
let ctx = context.with_current_lexicon(ctx, current_lexicon_id)
22
22
···
55
55
ctx: ValidationContext,
56
56
current_lexicon_id: String,
57
57
def_path: String,
58
-
) -> Result(Nil, ValidationError) {
58
+
) -> Result(Nil, errors.ValidationError) {
59
59
// Check for circular reference
60
60
case context.has_reference(ctx, reference) {
61
61
True ->
···
119
119
pub fn validate_lexicon_references(
120
120
lexicon_id: String,
121
121
ctx: ValidationContext,
122
-
) -> Result(Nil, ValidationError) {
122
+
) -> Result(Nil, errors.ValidationError) {
123
123
case context.get_lexicon(ctx, lexicon_id) {
124
124
Some(lexicon) -> {
125
125
// Collect all references from the lexicon
···
143
143
/// Validates completeness of all lexicons
144
144
pub fn validate_lexicon_completeness(
145
145
ctx: ValidationContext,
146
-
) -> Result(Nil, ValidationError) {
146
+
) -> Result(Nil, errors.ValidationError) {
147
147
// Get all lexicon IDs
148
148
let lexicon_ids = dict.keys(ctx.lexicons)
149
149
···
156
156
/// Detects circular dependencies in lexicon references
157
157
pub fn detect_circular_dependencies(
158
158
ctx: ValidationContext,
159
-
) -> Result(Nil, ValidationError) {
159
+
) -> Result(Nil, errors.ValidationError) {
160
160
// Build dependency graph
161
161
let graph = build_dependency_graph(ctx)
162
162
+58
src/honk/types.gleam
+58
src/honk/types.gleam
···
1
+
// Core types for AT Protocol lexicon validation
2
+
3
+
import gleam/json.{type Json}
4
+
5
+
/// Represents a parsed lexicon document
6
+
pub type LexiconDoc {
7
+
LexiconDoc(id: String, defs: Json)
8
+
}
9
+
10
+
/// AT Protocol string formats
11
+
pub type StringFormat {
12
+
DateTime
13
+
Uri
14
+
AtUri
15
+
Did
16
+
Handle
17
+
AtIdentifier
18
+
Nsid
19
+
Cid
20
+
Language
21
+
Tid
22
+
RecordKey
23
+
}
24
+
25
+
/// Convert a string to a StringFormat
26
+
pub fn string_to_format(s: String) -> Result(StringFormat, Nil) {
27
+
case s {
28
+
"datetime" -> Ok(DateTime)
29
+
"uri" -> Ok(Uri)
30
+
"at-uri" -> Ok(AtUri)
31
+
"did" -> Ok(Did)
32
+
"handle" -> Ok(Handle)
33
+
"at-identifier" -> Ok(AtIdentifier)
34
+
"nsid" -> Ok(Nsid)
35
+
"cid" -> Ok(Cid)
36
+
"language" -> Ok(Language)
37
+
"tid" -> Ok(Tid)
38
+
"record-key" -> Ok(RecordKey)
39
+
_ -> Error(Nil)
40
+
}
41
+
}
42
+
43
+
/// Convert a StringFormat to string
44
+
pub fn format_to_string(format: StringFormat) -> String {
45
+
case format {
46
+
DateTime -> "datetime"
47
+
Uri -> "uri"
48
+
AtUri -> "at-uri"
49
+
Did -> "did"
50
+
Handle -> "handle"
51
+
AtIdentifier -> "at-identifier"
52
+
Nsid -> "nsid"
53
+
Cid -> "cid"
54
+
Language -> "language"
55
+
Tid -> "tid"
56
+
RecordKey -> "record-key"
57
+
}
58
+
}
+339
src/honk/validation/context.gleam
+339
src/honk/validation/context.gleam
···
1
+
// Validation context and builder
2
+
3
+
import gleam/dict.{type Dict}
4
+
import gleam/json.{type Json}
5
+
import gleam/list
6
+
import gleam/option.{type Option, None, Some}
7
+
import gleam/result
8
+
import gleam/set.{type Set}
9
+
import gleam/string
10
+
import honk/errors
11
+
import honk/internal/json_helpers
12
+
import honk/types
13
+
import honk/validation/formats
14
+
15
+
/// Validation context that tracks state during validation
16
+
pub type ValidationContext {
17
+
ValidationContext(
18
+
// Map of lexicon ID to parsed lexicon document
19
+
lexicons: Dict(String, types.LexiconDoc),
20
+
// Current path in data structure (for error messages)
21
+
path: String,
22
+
// Current lexicon ID (for resolving local references)
23
+
current_lexicon_id: Option(String),
24
+
// Set of references being resolved (for circular detection)
25
+
reference_stack: Set(String),
26
+
// Recursive validator function for dispatching to type-specific validators
27
+
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
28
+
validator: fn(Json, Json, ValidationContext) ->
29
+
Result(Nil, errors.ValidationError),
30
+
)
31
+
}
32
+
33
+
/// Builder for constructing ValidationContext
34
+
pub type ValidationContextBuilder {
35
+
ValidationContextBuilder(
36
+
lexicons: Dict(String, types.LexiconDoc),
37
+
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
38
+
validator: Option(
39
+
fn(Json, Json, ValidationContext) -> Result(Nil, errors.ValidationError),
40
+
),
41
+
)
42
+
}
43
+
44
+
/// Creates a new ValidationContextBuilder with default settings.
45
+
///
46
+
/// Use this to start building a validation context by chaining with
47
+
/// `with_lexicons`, `with_validator`, and `build`.
48
+
///
49
+
/// ## Example
50
+
///
51
+
/// ```gleam
52
+
/// let assert Ok(ctx) =
53
+
/// context.builder()
54
+
/// |> context.with_validator(field.dispatch_data_validation)
55
+
/// |> context.with_lexicons([my_lexicon])
56
+
/// |> context.build
57
+
/// ```
58
+
pub fn builder() -> ValidationContextBuilder {
59
+
ValidationContextBuilder(lexicons: dict.new(), validator: None)
60
+
}
61
+
62
+
/// Adds a list of lexicon JSON documents to the builder.
63
+
///
64
+
/// Each lexicon must have an 'id' field (valid NSID) and a 'defs' object
65
+
/// containing type definitions. Returns an error if any lexicon is invalid.
66
+
///
67
+
/// ## Example
68
+
///
69
+
/// ```gleam
70
+
/// let lexicon = json.object([
71
+
/// #("lexicon", json.int(1)),
72
+
/// #("id", json.string("com.example.post")),
73
+
/// #("defs", json.object([...])),
74
+
/// ])
75
+
///
76
+
/// let assert Ok(builder) =
77
+
/// context.builder()
78
+
/// |> context.with_lexicons([lexicon])
79
+
/// ```
80
+
pub fn with_lexicons(
81
+
builder: ValidationContextBuilder,
82
+
lexicons: List(Json),
83
+
) -> Result(ValidationContextBuilder, errors.ValidationError) {
84
+
// Parse each lexicon and add to the dictionary
85
+
list.try_fold(lexicons, builder, fn(b, lex_json) {
86
+
// Extract id and defs from the lexicon JSON
87
+
case parse_lexicon(lex_json) {
88
+
Ok(lexicon_doc) -> {
89
+
let updated_lexicons =
90
+
dict.insert(b.lexicons, lexicon_doc.id, lexicon_doc)
91
+
Ok(ValidationContextBuilder(..b, lexicons: updated_lexicons))
92
+
}
93
+
Error(e) -> Error(e)
94
+
}
95
+
})
96
+
}
97
+
98
+
/// Set the validator function
99
+
/// Parameters: data (Json), schema (Json), ctx (ValidationContext)
100
+
pub fn with_validator(
101
+
builder: ValidationContextBuilder,
102
+
validator: fn(Json, Json, ValidationContext) ->
103
+
Result(Nil, errors.ValidationError),
104
+
) -> ValidationContextBuilder {
105
+
ValidationContextBuilder(..builder, validator: Some(validator))
106
+
}
107
+
108
+
/// Builds the final ValidationContext from the builder.
109
+
///
110
+
/// Creates a no-op validator if none was set via `with_validator`.
111
+
/// Returns a ValidationContext ready for validating lexicons and data.
112
+
///
113
+
/// ## Example
114
+
///
115
+
/// ```gleam
116
+
/// let assert Ok(ctx) =
117
+
/// context.builder()
118
+
/// |> context.with_validator(field.dispatch_data_validation)
119
+
/// |> context.with_lexicons([lexicon])
120
+
/// |> context.build
121
+
/// ```
122
+
pub fn build(
123
+
builder: ValidationContextBuilder,
124
+
) -> Result(ValidationContext, errors.ValidationError) {
125
+
// Create a default no-op validator if none is set
126
+
let validator = case builder.validator {
127
+
Some(v) -> v
128
+
None -> fn(_data, _schema, _ctx) { Ok(Nil) }
129
+
}
130
+
131
+
Ok(ValidationContext(
132
+
lexicons: builder.lexicons,
133
+
path: "",
134
+
current_lexicon_id: None,
135
+
reference_stack: set.new(),
136
+
validator: validator,
137
+
))
138
+
}
139
+
140
+
/// Retrieves a lexicon document by its NSID from the validation context.
141
+
///
142
+
/// Returns `None` if the lexicon is not found. Use this to access
143
+
/// lexicon definitions when resolving references.
144
+
///
145
+
/// ## Example
146
+
///
147
+
/// ```gleam
148
+
/// case context.get_lexicon(ctx, "com.example.post") {
149
+
/// Some(lexicon) -> // Use the lexicon
150
+
/// None -> // Lexicon not found
151
+
/// }
152
+
/// ```
153
+
pub fn get_lexicon(
154
+
ctx: ValidationContext,
155
+
id: String,
156
+
) -> Option(types.LexiconDoc) {
157
+
case dict.get(ctx.lexicons, id) {
158
+
Ok(lex) -> Some(lex)
159
+
Error(_) -> None
160
+
}
161
+
}
162
+
163
+
/// Returns the current validation path within the data structure.
164
+
///
165
+
/// The path is used for generating detailed error messages that show
166
+
/// exactly where in a nested structure validation failed.
167
+
///
168
+
/// ## Example
169
+
///
170
+
/// ```gleam
171
+
/// let current_path = context.path(ctx)
172
+
/// // Returns something like "defs.post.properties.text"
173
+
/// ```
174
+
pub fn path(ctx: ValidationContext) -> String {
175
+
ctx.path
176
+
}
177
+
178
+
/// Creates a new context with an updated path segment.
179
+
///
180
+
/// Used when traversing nested data structures during validation
181
+
/// to maintain accurate error location information.
182
+
///
183
+
/// ## Example
184
+
///
185
+
/// ```gleam
186
+
/// let nested_ctx = context.with_path(ctx, "properties.name")
187
+
/// // New path might be "defs.user.properties.name"
188
+
/// ```
189
+
pub fn with_path(ctx: ValidationContext, segment: String) -> ValidationContext {
190
+
let new_path = case ctx.path {
191
+
"" -> segment
192
+
_ -> ctx.path <> "." <> segment
193
+
}
194
+
ValidationContext(..ctx, path: new_path)
195
+
}
196
+
197
+
/// Returns the ID of the lexicon currently being validated.
198
+
///
199
+
/// Used for resolving local references (e.g., `#post`) which need to
200
+
/// know which lexicon they belong to.
201
+
///
202
+
/// ## Example
203
+
///
204
+
/// ```gleam
205
+
/// case context.current_lexicon_id(ctx) {
206
+
/// Some(id) -> // id is like "com.example.post"
207
+
/// None -> // No lexicon context set
208
+
/// }
209
+
/// ```
210
+
pub fn current_lexicon_id(ctx: ValidationContext) -> Option(String) {
211
+
ctx.current_lexicon_id
212
+
}
213
+
214
+
/// Creates a new context with a different current lexicon ID.
215
+
///
216
+
/// Used when validating cross-lexicon references to set the correct
217
+
/// lexicon context for resolving local references.
218
+
///
219
+
/// ## Example
220
+
///
221
+
/// ```gleam
222
+
/// let ctx_with_lexicon =
223
+
/// context.with_current_lexicon(ctx, "com.example.post")
224
+
/// ```
225
+
pub fn with_current_lexicon(
226
+
ctx: ValidationContext,
227
+
lexicon_id: String,
228
+
) -> ValidationContext {
229
+
ValidationContext(..ctx, current_lexicon_id: Some(lexicon_id))
230
+
}
231
+
232
+
/// Adds a reference to the reference stack for circular dependency detection.
233
+
///
234
+
/// Used internally during reference resolution to track which references
235
+
/// are currently being validated. This prevents infinite loops when
236
+
/// references form a cycle.
237
+
///
238
+
/// ## Example
239
+
///
240
+
/// ```gleam
241
+
/// let ctx_with_ref =
242
+
/// context.with_reference(ctx, "com.example.post#user")
243
+
/// ```
244
+
pub fn with_reference(
245
+
ctx: ValidationContext,
246
+
reference: String,
247
+
) -> ValidationContext {
248
+
ValidationContext(
249
+
..ctx,
250
+
reference_stack: set.insert(ctx.reference_stack, reference),
251
+
)
252
+
}
253
+
254
+
/// Checks if a reference is already in the reference stack.
255
+
///
256
+
/// Returns `True` if the reference is being validated, indicating a
257
+
/// circular reference that would cause infinite recursion. Used to
258
+
/// detect and prevent circular dependency errors.
259
+
///
260
+
/// ## Example
261
+
///
262
+
/// ```gleam
263
+
/// case context.has_reference(ctx, "#user") {
264
+
/// True -> Error(errors.data_validation("Circular reference detected"))
265
+
/// False -> // Safe to proceed with validation
266
+
/// }
267
+
/// ```
268
+
pub fn has_reference(ctx: ValidationContext, reference: String) -> Bool {
269
+
set.contains(ctx.reference_stack, reference)
270
+
}
271
+
272
+
/// Parse a reference string into (lexicon_id, definition)
273
+
/// Handles: #def, nsid#def, nsid
274
+
pub fn parse_reference(
275
+
ctx: ValidationContext,
276
+
reference: String,
277
+
) -> Result(#(String, String), errors.ValidationError) {
278
+
case string.split(reference, "#") {
279
+
// Local reference: #def
280
+
["", def] ->
281
+
case ctx.current_lexicon_id {
282
+
Some(lex_id) -> Ok(#(lex_id, def))
283
+
None ->
284
+
Error(errors.invalid_schema(
285
+
"Local reference '"
286
+
<> reference
287
+
<> "' used without current lexicon context",
288
+
))
289
+
}
290
+
// Global reference: nsid#def
291
+
[nsid, def] if nsid != "" && def != "" -> Ok(#(nsid, def))
292
+
// Global main: nsid (implicit #main)
293
+
[nsid] if nsid != "" -> Ok(#(nsid, "main"))
294
+
// Invalid
295
+
_ -> Error(errors.invalid_schema("Invalid reference format: " <> reference))
296
+
}
297
+
}
298
+
299
+
/// Helper to parse a lexicon JSON into LexiconDoc
300
+
fn parse_lexicon(
301
+
lex_json: Json,
302
+
) -> Result(types.LexiconDoc, errors.ValidationError) {
303
+
// Extract "id" field (required NSID)
304
+
let id_result = case json_helpers.get_string(lex_json, "id") {
305
+
Some(id) -> Ok(id)
306
+
None -> Error(errors.invalid_schema("Lexicon missing required 'id' field"))
307
+
}
308
+
309
+
use id <- result.try(id_result)
310
+
311
+
// Validate that id is a valid NSID
312
+
use _ <- result.try(case formats.is_valid_nsid(id) {
313
+
True -> Ok(Nil)
314
+
False ->
315
+
Error(errors.invalid_schema(
316
+
"Lexicon 'id' field is not a valid NSID: " <> id,
317
+
))
318
+
})
319
+
320
+
// Extract "defs" field (required object containing definitions)
321
+
let defs_result = case json_helpers.get_field(lex_json, "defs") {
322
+
Some(defs) ->
323
+
case json_helpers.is_object(defs) {
324
+
True -> Ok(defs)
325
+
False ->
326
+
Error(errors.invalid_schema(
327
+
"Lexicon 'defs' must be an object at " <> id,
328
+
))
329
+
}
330
+
None ->
331
+
Error(errors.invalid_schema(
332
+
"Lexicon missing required 'defs' field at " <> id,
333
+
))
334
+
}
335
+
336
+
use defs <- result.try(defs_result)
337
+
338
+
Ok(types.LexiconDoc(id: id, defs: defs))
339
+
}
+208
src/honk/validation/field/reference.gleam
+208
src/honk/validation/field/reference.gleam
···
1
+
// Reference type validator
2
+
3
+
import gleam/json.{type Json}
4
+
import gleam/option.{None, Some}
5
+
import gleam/result
6
+
import gleam/string
7
+
import honk/errors
8
+
import honk/internal/constraints
9
+
import honk/internal/json_helpers
10
+
import honk/internal/resolution
11
+
import honk/validation/context.{type ValidationContext}
12
+
13
+
const allowed_fields = ["type", "ref", "description"]
14
+
15
+
/// Validates reference schema definition
16
+
pub fn validate_schema(
17
+
schema: Json,
18
+
ctx: ValidationContext,
19
+
) -> Result(Nil, errors.ValidationError) {
20
+
let def_name = context.path(ctx)
21
+
22
+
// Validate allowed fields
23
+
let keys = json_helpers.get_keys(schema)
24
+
use _ <- result.try(constraints.validate_allowed_fields(
25
+
def_name,
26
+
keys,
27
+
allowed_fields,
28
+
"ref",
29
+
))
30
+
31
+
// Validate ref field (required)
32
+
let ref_value = case json_helpers.get_string(schema, "ref") {
33
+
Some(ref_str) -> Ok(ref_str)
34
+
None ->
35
+
Error(errors.invalid_schema(
36
+
def_name <> ": ref missing required 'ref' field",
37
+
))
38
+
}
39
+
40
+
use ref_str <- result.try(ref_value)
41
+
42
+
// Validate reference syntax
43
+
use _ <- result.try(validate_ref_syntax(ref_str, def_name))
44
+
45
+
// Validate that the reference can be resolved (only for global refs with full context)
46
+
case string.starts_with(ref_str, "#") {
47
+
True -> Ok(Nil)
48
+
// Local ref - will be validated in same lexicon
49
+
False -> {
50
+
// Global ref - check it exists in catalog if we have a current lexicon
51
+
case context.current_lexicon_id(ctx) {
52
+
Some(lex_id) -> {
53
+
// We have a full validation context, so validate reference resolution
54
+
use resolved <- result.try(resolution.resolve_reference(
55
+
ref_str,
56
+
ctx,
57
+
lex_id,
58
+
))
59
+
60
+
case resolved {
61
+
Some(_) -> Ok(Nil)
62
+
None ->
63
+
Error(errors.invalid_schema(
64
+
def_name <> ": reference not found: " <> ref_str,
65
+
))
66
+
}
67
+
}
68
+
None -> {
69
+
// No current lexicon (e.g., unit test context)
70
+
// Just validate syntax, can't check if reference exists
71
+
Ok(Nil)
72
+
}
73
+
}
74
+
}
75
+
}
76
+
}
77
+
78
+
/// Validates data against the referenced schema
79
+
/// Uses the validator from the context for recursive validation
80
+
pub fn validate_data(
81
+
data: Json,
82
+
schema: Json,
83
+
ctx: ValidationContext,
84
+
) -> Result(Nil, errors.ValidationError) {
85
+
let def_name = context.path(ctx)
86
+
87
+
// Get the reference string
88
+
use ref_str <- result.try(case json_helpers.get_string(schema, "ref") {
89
+
Some(ref_str) -> Ok(ref_str)
90
+
None ->
91
+
Error(errors.data_validation(
92
+
def_name <> ": ref schema missing 'ref' field",
93
+
))
94
+
})
95
+
96
+
// Check for circular references
97
+
case context.has_reference(ctx, ref_str) {
98
+
True ->
99
+
Error(errors.data_validation(
100
+
def_name <> ": circular reference detected: " <> ref_str,
101
+
))
102
+
False -> {
103
+
// Add to reference stack
104
+
let ref_ctx = context.with_reference(ctx, ref_str)
105
+
106
+
// Get current lexicon ID
107
+
use lex_id <- result.try(case context.current_lexicon_id(ref_ctx) {
108
+
Some(id) -> Ok(id)
109
+
None ->
110
+
Error(errors.data_validation(
111
+
def_name <> ": no current lexicon set for resolving reference",
112
+
))
113
+
})
114
+
115
+
// Resolve the reference to get the target schema
116
+
use resolved_opt <- result.try(resolution.resolve_reference(
117
+
ref_str,
118
+
ref_ctx,
119
+
lex_id,
120
+
))
121
+
122
+
use resolved_schema <- result.try(case resolved_opt {
123
+
Some(schema) -> Ok(schema)
124
+
None ->
125
+
Error(errors.data_validation(
126
+
def_name <> ": reference not found: " <> ref_str,
127
+
))
128
+
})
129
+
130
+
// Recursively validate data against the resolved schema
131
+
// Use the validator from the context
132
+
let validator = ref_ctx.validator
133
+
validator(data, resolved_schema, ref_ctx)
134
+
}
135
+
}
136
+
}
137
+
138
+
/// Validates reference syntax
139
+
fn validate_ref_syntax(
140
+
ref_str: String,
141
+
def_name: String,
142
+
) -> Result(Nil, errors.ValidationError) {
143
+
case string.is_empty(ref_str) {
144
+
True ->
145
+
Error(errors.invalid_schema(def_name <> ": reference cannot be empty"))
146
+
False -> {
147
+
case string.starts_with(ref_str, "#") {
148
+
True -> {
149
+
// Local reference
150
+
let def_part = string.drop_start(ref_str, 1)
151
+
case string.is_empty(def_part) {
152
+
True ->
153
+
Error(errors.invalid_schema(
154
+
def_name
155
+
<> ": local reference must have a definition name after #",
156
+
))
157
+
False -> Ok(Nil)
158
+
}
159
+
}
160
+
False -> {
161
+
// Global reference (with or without fragment)
162
+
case string.contains(ref_str, "#") {
163
+
True -> {
164
+
// Global reference with fragment
165
+
validate_global_ref_with_fragment(ref_str, def_name)
166
+
}
167
+
False -> {
168
+
// Global main reference
169
+
// Would validate NSID format here
170
+
Ok(Nil)
171
+
}
172
+
}
173
+
}
174
+
}
175
+
}
176
+
}
177
+
}
178
+
179
+
/// Validates global reference with fragment (e.g., "com.example.lexicon#def")
180
+
fn validate_global_ref_with_fragment(
181
+
ref_str: String,
182
+
def_name: String,
183
+
) -> Result(Nil, errors.ValidationError) {
184
+
// Split on # and validate both parts
185
+
case string.split(ref_str, "#") {
186
+
[nsid, definition] -> {
187
+
case string.is_empty(nsid) {
188
+
True ->
189
+
Error(errors.invalid_schema(
190
+
def_name <> ": NSID part of reference cannot be empty",
191
+
))
192
+
False ->
193
+
case string.is_empty(definition) {
194
+
True ->
195
+
Error(errors.invalid_schema(
196
+
def_name
197
+
<> ": definition name part of reference cannot be empty",
198
+
))
199
+
False -> Ok(Nil)
200
+
}
201
+
}
202
+
}
203
+
_ ->
204
+
Error(errors.invalid_schema(
205
+
def_name <> ": global reference can only contain one # character",
206
+
))
207
+
}
208
+
}
+337
src/honk/validation/field/union.gleam
+337
src/honk/validation/field/union.gleam
···
1
+
// Union type validator
2
+
3
+
import gleam/dynamic/decode
4
+
import gleam/json.{type Json}
5
+
import gleam/list
6
+
import gleam/option.{None, Some}
7
+
import gleam/result
8
+
import gleam/string
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/internal/resolution
13
+
import honk/validation/context.{type ValidationContext}
14
+
15
+
const allowed_fields = ["type", "refs", "closed", "description"]
16
+
17
+
/// Validates union schema definition
18
+
pub fn validate_schema(
19
+
schema: Json,
20
+
ctx: ValidationContext,
21
+
) -> Result(Nil, errors.ValidationError) {
22
+
let def_name = context.path(ctx)
23
+
24
+
// Validate allowed fields
25
+
let keys = json_helpers.get_keys(schema)
26
+
use _ <- result.try(constraints.validate_allowed_fields(
27
+
def_name,
28
+
keys,
29
+
allowed_fields,
30
+
"union",
31
+
))
32
+
33
+
// Validate refs field (required)
34
+
let refs = case json_helpers.get_array(schema, "refs") {
35
+
Some(refs_array) -> Ok(refs_array)
36
+
None ->
37
+
Error(errors.invalid_schema(
38
+
def_name <> ": union missing required 'refs' field",
39
+
))
40
+
}
41
+
42
+
use refs_array <- result.try(refs)
43
+
44
+
// Validate that all refs are strings
45
+
use _ <- result.try(
46
+
list.index_fold(refs_array, Ok(Nil), fn(acc, ref_item, i) {
47
+
use _ <- result.try(acc)
48
+
case decode.run(ref_item, decode.string) {
49
+
Ok(_) -> Ok(Nil)
50
+
Error(_) ->
51
+
Error(errors.invalid_schema(
52
+
def_name <> ": refs[" <> string.inspect(i) <> "] must be a string",
53
+
))
54
+
}
55
+
}),
56
+
)
57
+
58
+
// Validate closed field if present
59
+
use _ <- result.try(case json_helpers.get_bool(schema, "closed") {
60
+
Some(closed) -> {
61
+
// If closed is true and refs is empty, that's invalid
62
+
case closed && list.is_empty(refs_array) {
63
+
True ->
64
+
Error(errors.invalid_schema(
65
+
def_name <> ": union cannot be closed with empty refs array",
66
+
))
67
+
False -> Ok(Nil)
68
+
}
69
+
}
70
+
None -> Ok(Nil)
71
+
})
72
+
73
+
// Empty refs array is only allowed for open unions
74
+
use _ <- result.try(case list.is_empty(refs_array) {
75
+
True -> {
76
+
case json_helpers.get_bool(schema, "closed") {
77
+
Some(True) ->
78
+
Error(errors.invalid_schema(
79
+
def_name <> ": union cannot have empty refs array when closed=true",
80
+
))
81
+
_ -> Ok(Nil)
82
+
}
83
+
}
84
+
False -> Ok(Nil)
85
+
})
86
+
87
+
// Validate that each reference can be resolved
88
+
validate_refs_resolvable(refs_array, ctx, def_name)
89
+
}
90
+
91
+
/// Validates that all references in the refs array can be resolved
92
+
fn validate_refs_resolvable(
93
+
refs_array: List(decode.Dynamic),
94
+
ctx: ValidationContext,
95
+
def_name: String,
96
+
) -> Result(Nil, errors.ValidationError) {
97
+
// Convert refs to strings
98
+
let ref_strings =
99
+
list.filter_map(refs_array, fn(r) { decode.run(r, decode.string) })
100
+
101
+
// Check each reference can be resolved (both local and global refs)
102
+
list.try_fold(ref_strings, Nil, fn(_, ref_str) {
103
+
case context.current_lexicon_id(ctx) {
104
+
Some(lex_id) -> {
105
+
// We have a full validation context, so validate reference resolution
106
+
// This works for both local refs (#def) and global refs (nsid#def)
107
+
use resolved <- result.try(resolution.resolve_reference(
108
+
ref_str,
109
+
ctx,
110
+
lex_id,
111
+
))
112
+
113
+
case resolved {
114
+
Some(_) -> Ok(Nil)
115
+
None ->
116
+
Error(errors.invalid_schema(
117
+
def_name <> ": reference not found: " <> ref_str,
118
+
))
119
+
}
120
+
}
121
+
None -> {
122
+
// No current lexicon (e.g., unit test context)
123
+
// Just validate syntax, can't check if reference exists
124
+
Ok(Nil)
125
+
}
126
+
}
127
+
})
128
+
}
129
+
130
+
/// Validates union data against schema
131
+
pub fn validate_data(
132
+
data: Json,
133
+
schema: Json,
134
+
ctx: ValidationContext,
135
+
) -> Result(Nil, errors.ValidationError) {
136
+
let def_name = context.path(ctx)
137
+
138
+
// Union data must be an object
139
+
case json_helpers.is_object(data) {
140
+
False -> {
141
+
let type_name = get_type_name(data)
142
+
Error(errors.data_validation(
143
+
def_name
144
+
<> ": union data must be an object which includes the \"$type\" property, found "
145
+
<> type_name,
146
+
))
147
+
}
148
+
True -> {
149
+
// Check for $type discriminator field
150
+
let type_field = case json_helpers.get_string(data, "$type") {
151
+
Some(type_name) -> Ok(type_name)
152
+
None ->
153
+
Error(errors.data_validation(
154
+
def_name
155
+
<> ": union data must be an object which includes the \"$type\" property",
156
+
))
157
+
}
158
+
159
+
use type_name <- result.try(type_field)
160
+
161
+
// Get the union's referenced types
162
+
let refs = case json_helpers.get_array(schema, "refs") {
163
+
Some(refs_array) -> Ok(refs_array)
164
+
None ->
165
+
Error(errors.data_validation(
166
+
def_name <> ": union schema missing or invalid 'refs' field",
167
+
))
168
+
}
169
+
170
+
use refs_array <- result.try(refs)
171
+
172
+
case list.is_empty(refs_array) {
173
+
True ->
174
+
Error(errors.data_validation(
175
+
def_name <> ": union schema has empty refs array",
176
+
))
177
+
False -> {
178
+
// Convert refs to strings
179
+
let ref_strings =
180
+
list.filter_map(refs_array, fn(r) { decode.run(r, decode.string) })
181
+
182
+
// Check if the $type matches any of the refs
183
+
case
184
+
list.find(ref_strings, fn(ref_str) {
185
+
refs_contain_type(ref_str, type_name)
186
+
})
187
+
{
188
+
Ok(matching_ref) -> {
189
+
// Found matching ref - validate data against the resolved schema
190
+
validate_against_resolved_ref(data, matching_ref, ctx, def_name)
191
+
}
192
+
Error(Nil) -> {
193
+
// No matching ref found
194
+
// Check if union is closed
195
+
let is_closed = case json_helpers.get_bool(schema, "closed") {
196
+
Some(closed) -> closed
197
+
None -> False
198
+
}
199
+
200
+
case is_closed {
201
+
True -> {
202
+
// Closed union - reject unknown types
203
+
Error(errors.data_validation(
204
+
def_name
205
+
<> ": union data $type must be one of "
206
+
<> string.join(ref_strings, ", ")
207
+
<> ", found '"
208
+
<> type_name
209
+
<> "'",
210
+
))
211
+
}
212
+
False -> {
213
+
// Open union - allow unknown types
214
+
Ok(Nil)
215
+
}
216
+
}
217
+
}
218
+
}
219
+
}
220
+
}
221
+
}
222
+
}
223
+
}
224
+
225
+
/// Validates data against a resolved reference from the union
226
+
fn validate_against_resolved_ref(
227
+
data: Json,
228
+
ref_str: String,
229
+
ctx: ValidationContext,
230
+
def_name: String,
231
+
) -> Result(Nil, errors.ValidationError) {
232
+
// Get current lexicon ID to resolve the reference
233
+
case context.current_lexicon_id(ctx) {
234
+
Some(lex_id) -> {
235
+
// We have a validation context, try to resolve and validate
236
+
use resolved_opt <- result.try(resolution.resolve_reference(
237
+
ref_str,
238
+
ctx,
239
+
lex_id,
240
+
))
241
+
242
+
case resolved_opt {
243
+
Some(resolved_schema) -> {
244
+
// Successfully resolved - validate data against the resolved schema
245
+
let validator = ctx.validator
246
+
validator(data, resolved_schema, ctx)
247
+
}
248
+
None -> {
249
+
// Reference couldn't be resolved
250
+
// This shouldn't happen as schema validation should have caught it,
251
+
// but handle gracefully
252
+
Error(errors.data_validation(
253
+
def_name <> ": reference not found: " <> ref_str,
254
+
))
255
+
}
256
+
}
257
+
}
258
+
None -> {
259
+
// No lexicon context (e.g., unit test)
260
+
// Can't validate against resolved schema, just accept the data
261
+
Ok(Nil)
262
+
}
263
+
}
264
+
}
265
+
266
+
/// Checks if refs array contains the given type
267
+
/// Based on AT Protocol's refsContainType logic - handles both explicit and implicit #main
268
+
fn refs_contain_type(reference: String, type_name: String) -> Bool {
269
+
// Direct match
270
+
case reference == type_name {
271
+
True -> True
272
+
False -> {
273
+
// Handle local reference patterns (#ref)
274
+
case string.starts_with(reference, "#") {
275
+
True -> {
276
+
let ref_name = string.drop_start(reference, 1)
277
+
// Match bare name against local ref
278
+
case type_name == ref_name {
279
+
True -> True
280
+
False -> {
281
+
// Match full NSID#fragment against local ref
282
+
string.ends_with(type_name, "#" <> ref_name)
283
+
}
284
+
}
285
+
}
286
+
False -> {
287
+
// Handle implicit #main patterns
288
+
case string.ends_with(type_name, "#main") {
289
+
True -> {
290
+
// Remove "#main"
291
+
let base_type = string.drop_end(type_name, 5)
292
+
reference == base_type
293
+
}
294
+
False -> {
295
+
// type_name has no fragment, check if ref is the #main version
296
+
case string.contains(type_name, "#") {
297
+
True -> False
298
+
False -> {
299
+
let main_ref = type_name <> "#main"
300
+
reference == main_ref
301
+
}
302
+
}
303
+
}
304
+
}
305
+
}
306
+
}
307
+
}
308
+
}
309
+
}
310
+
311
+
/// Helper to get type name for error messages
312
+
fn get_type_name(data: Json) -> String {
313
+
case json_helpers.is_null(data) {
314
+
True -> "null"
315
+
False ->
316
+
case json_helpers.is_bool(data) {
317
+
True -> "boolean"
318
+
False ->
319
+
case json_helpers.is_int(data) {
320
+
True -> "number"
321
+
False ->
322
+
case json_helpers.is_string(data) {
323
+
True -> "string"
324
+
False ->
325
+
case json_helpers.is_array(data) {
326
+
True -> "array"
327
+
False ->
328
+
case json_helpers.is_object(data) {
329
+
True -> "object"
330
+
False -> "unknown"
331
+
}
332
+
}
333
+
}
334
+
}
335
+
}
336
+
}
337
+
}
+670
src/honk/validation/field.gleam
+670
src/honk/validation/field.gleam
···
1
+
// Field type validators (object and array)
2
+
3
+
import gleam/dict
4
+
import gleam/dynamic.{type Dynamic}
5
+
import gleam/dynamic/decode
6
+
import gleam/int
7
+
import gleam/json.{type Json}
8
+
import gleam/list
9
+
import gleam/option.{None, Some}
10
+
import gleam/result
11
+
import honk/errors
12
+
import honk/internal/constraints
13
+
import honk/internal/json_helpers
14
+
import honk/validation/context.{type ValidationContext}
15
+
16
+
// Import primitive validators
17
+
import honk/validation/primitive/blob
18
+
import honk/validation/primitive/boolean
19
+
import honk/validation/primitive/bytes
20
+
import honk/validation/primitive/cid_link
21
+
import honk/validation/primitive/integer
22
+
import honk/validation/primitive/null
23
+
import honk/validation/primitive/string
24
+
25
+
// Import other field validators
26
+
import honk/validation/field/reference
27
+
import honk/validation/field/union
28
+
29
+
// Import meta validators
30
+
import honk/validation/meta/token
31
+
import honk/validation/meta/unknown
32
+
33
+
// ============================================================================
34
+
// SHARED TYPE DISPATCHER
35
+
// ============================================================================
36
+
37
+
/// Dispatch schema validation based on type field
38
+
/// Handles all primitive and field types
39
+
fn dispatch_schema_validation(
40
+
schema: Json,
41
+
ctx: ValidationContext,
42
+
) -> Result(Nil, errors.ValidationError) {
43
+
case json_helpers.get_string(schema, "type") {
44
+
Some("string") -> string.validate_schema(schema, ctx)
45
+
Some("integer") -> integer.validate_schema(schema, ctx)
46
+
Some("boolean") -> boolean.validate_schema(schema, ctx)
47
+
Some("bytes") -> bytes.validate_schema(schema, ctx)
48
+
Some("blob") -> blob.validate_schema(schema, ctx)
49
+
Some("cid-link") -> cid_link.validate_schema(schema, ctx)
50
+
Some("null") -> null.validate_schema(schema, ctx)
51
+
Some("object") -> validate_object_schema(schema, ctx)
52
+
Some("array") -> validate_array_schema(schema, ctx)
53
+
Some("union") -> union.validate_schema(schema, ctx)
54
+
Some("ref") -> reference.validate_schema(schema, ctx)
55
+
Some("token") -> token.validate_schema(schema, ctx)
56
+
Some("unknown") -> unknown.validate_schema(schema, ctx)
57
+
Some(unknown_type) ->
58
+
Error(errors.invalid_schema(
59
+
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
60
+
))
61
+
None ->
62
+
Error(errors.invalid_schema(
63
+
context.path(ctx) <> ": schema missing type field",
64
+
))
65
+
}
66
+
}
67
+
68
+
/// Dispatches data validation to the appropriate type-specific validator.
69
+
///
70
+
/// This is the central dispatcher that routes validation based on the schema's
71
+
/// `type` field. Handles all primitive types (string, integer, boolean, etc.),
72
+
/// field types (object, array, union, ref), and meta types (token, unknown).
73
+
///
74
+
/// Made public to allow reference validators to recursively validate resolved
75
+
/// schemas. Typically set as the validator function in ValidationContext via
76
+
/// `context.with_validator(field.dispatch_data_validation)`.
77
+
///
78
+
/// ## Example
79
+
///
80
+
/// ```gleam
81
+
/// let schema = json.object([
82
+
/// #("type", json.string("string")),
83
+
/// #("maxLength", json.int(100)),
84
+
/// ])
85
+
/// let data = json.string("Hello")
86
+
///
87
+
/// field.dispatch_data_validation(data, schema, ctx)
88
+
/// // => Ok(Nil) if valid, Error(...) if invalid
89
+
/// ```
90
+
pub fn dispatch_data_validation(
91
+
data: Json,
92
+
schema: Json,
93
+
ctx: ValidationContext,
94
+
) -> Result(Nil, errors.ValidationError) {
95
+
case json_helpers.get_string(schema, "type") {
96
+
Some("string") -> string.validate_data(data, schema, ctx)
97
+
Some("integer") -> integer.validate_data(data, schema, ctx)
98
+
Some("boolean") -> boolean.validate_data(data, schema, ctx)
99
+
Some("bytes") -> bytes.validate_data(data, schema, ctx)
100
+
Some("blob") -> blob.validate_data(data, schema, ctx)
101
+
Some("cid-link") -> cid_link.validate_data(data, schema, ctx)
102
+
Some("null") -> null.validate_data(data, schema, ctx)
103
+
Some("object") -> validate_object_data(data, schema, ctx)
104
+
Some("array") -> validate_array_data(data, schema, ctx)
105
+
Some("union") -> union.validate_data(data, schema, ctx)
106
+
Some("ref") -> reference.validate_data(data, schema, ctx)
107
+
Some("token") -> token.validate_data(data, schema, ctx)
108
+
Some("unknown") -> unknown.validate_data(data, schema, ctx)
109
+
Some(unknown_type) ->
110
+
Error(errors.data_validation(
111
+
"Unknown schema type '"
112
+
<> unknown_type
113
+
<> "' at '"
114
+
<> context.path(ctx)
115
+
<> "'",
116
+
))
117
+
None ->
118
+
Error(errors.data_validation(
119
+
"Schema missing type field at '" <> context.path(ctx) <> "'",
120
+
))
121
+
}
122
+
}
123
+
124
+
// ============================================================================
125
+
// OBJECT VALIDATOR
126
+
// ============================================================================
127
+
128
+
const object_allowed_fields = [
129
+
"type", "properties", "required", "nullable", "description",
130
+
]
131
+
132
+
/// Validates object schema definition
133
+
pub fn validate_object_schema(
134
+
schema: Json,
135
+
ctx: ValidationContext,
136
+
) -> Result(Nil, errors.ValidationError) {
137
+
let def_name = context.path(ctx)
138
+
139
+
// Validate allowed fields
140
+
let keys = json_helpers.get_keys(schema)
141
+
use _ <- result.try(constraints.validate_allowed_fields(
142
+
def_name,
143
+
keys,
144
+
object_allowed_fields,
145
+
"object",
146
+
))
147
+
148
+
// Validate properties structure
149
+
let properties = case json_helpers.get_array(schema, "properties") {
150
+
Some(_) ->
151
+
Error(errors.invalid_schema(
152
+
def_name <> ": properties must be an object, not an array",
153
+
))
154
+
None ->
155
+
case json_helpers.is_object(schema) {
156
+
True -> Ok(None)
157
+
False -> Ok(None)
158
+
}
159
+
}
160
+
161
+
use _ <- result.try(properties)
162
+
163
+
// Get properties for validation
164
+
let properties_json = json_helpers.get_field(schema, "properties")
165
+
166
+
// Validate required fields reference existing properties
167
+
use _ <- result.try(case json_helpers.get_array(schema, "required") {
168
+
Some(required_array) ->
169
+
validate_required_fields(def_name, required_array, properties_json)
170
+
None -> Ok(Nil)
171
+
})
172
+
173
+
// Validate nullable fields reference existing properties
174
+
use _ <- result.try(case json_helpers.get_array(schema, "nullable") {
175
+
Some(nullable_array) ->
176
+
validate_nullable_fields(def_name, nullable_array, properties_json)
177
+
None -> Ok(Nil)
178
+
})
179
+
180
+
// Validate each property schema recursively
181
+
case properties_json {
182
+
Some(properties) -> {
183
+
case json_helpers.is_object(properties) {
184
+
True -> {
185
+
// Get property map and validate each property schema
186
+
validate_property_schemas(properties, ctx)
187
+
}
188
+
False -> Ok(Nil)
189
+
}
190
+
}
191
+
None -> Ok(Nil)
192
+
}
193
+
}
194
+
195
+
/// Validates object data against schema
196
+
pub fn validate_object_data(
197
+
data: Json,
198
+
schema: Json,
199
+
ctx: ValidationContext,
200
+
) -> Result(Nil, errors.ValidationError) {
201
+
let def_name = context.path(ctx)
202
+
203
+
// Check data is an object
204
+
case json_helpers.is_object(data) {
205
+
False -> {
206
+
let type_name = get_type_name(data)
207
+
Error(errors.data_validation(
208
+
"Expected object at '" <> def_name <> "', found " <> type_name,
209
+
))
210
+
}
211
+
True -> {
212
+
// Check required fields are present
213
+
use _ <- result.try(case json_helpers.get_array(schema, "required") {
214
+
Some(required_array) ->
215
+
validate_required_fields_in_data(def_name, required_array, data)
216
+
None -> Ok(Nil)
217
+
})
218
+
219
+
// Get nullable fields for lookup
220
+
let nullable_fields = case json_helpers.get_array(schema, "nullable") {
221
+
Some(nullable_array) ->
222
+
list.filter_map(nullable_array, fn(item) {
223
+
decode.run(item, decode.string)
224
+
})
225
+
None -> []
226
+
}
227
+
228
+
// Validate each property in data against its schema
229
+
case json_helpers.get_field(schema, "properties") {
230
+
Some(properties) -> {
231
+
validate_properties_data(data, properties, nullable_fields, ctx)
232
+
}
233
+
None -> Ok(Nil)
234
+
}
235
+
}
236
+
}
237
+
}
238
+
239
+
/// Helper to validate required fields exist in properties
240
+
fn validate_required_fields(
241
+
def_name: String,
242
+
required: List(Dynamic),
243
+
properties: option.Option(Json),
244
+
) -> Result(Nil, errors.ValidationError) {
245
+
// Convert dynamics to strings
246
+
let field_names =
247
+
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
248
+
249
+
// Check all items are strings
250
+
use _ <- result.try(case list.length(field_names) == list.length(required) {
251
+
True -> Ok(Nil)
252
+
False ->
253
+
Error(errors.invalid_schema(
254
+
def_name <> ": required fields must be strings",
255
+
))
256
+
})
257
+
258
+
// Validate each required field exists in properties
259
+
case properties {
260
+
Some(props) -> {
261
+
case json_helpers.json_to_dict(props) {
262
+
Ok(props_dict) -> {
263
+
list.try_fold(field_names, Nil, fn(_, field_name) {
264
+
case json_helpers.dict_has_key(props_dict, field_name) {
265
+
True -> Ok(Nil)
266
+
False ->
267
+
Error(errors.invalid_schema(
268
+
def_name
269
+
<> ": required field '"
270
+
<> field_name
271
+
<> "' not found in properties",
272
+
))
273
+
}
274
+
})
275
+
}
276
+
Error(_) -> Ok(Nil)
277
+
}
278
+
}
279
+
None -> {
280
+
// No properties defined, but required fields specified - this is an error
281
+
case list.is_empty(field_names) {
282
+
True -> Ok(Nil)
283
+
False ->
284
+
Error(errors.invalid_schema(
285
+
def_name <> ": required fields specified but no properties defined",
286
+
))
287
+
}
288
+
}
289
+
}
290
+
}
291
+
292
+
/// Helper to validate nullable fields exist in properties
293
+
fn validate_nullable_fields(
294
+
def_name: String,
295
+
nullable: List(Dynamic),
296
+
properties: option.Option(Json),
297
+
) -> Result(Nil, errors.ValidationError) {
298
+
// Convert dynamics to strings
299
+
let field_names =
300
+
list.filter_map(nullable, fn(item) { decode.run(item, decode.string) })
301
+
302
+
// Check all items are strings
303
+
use _ <- result.try(case list.length(field_names) == list.length(nullable) {
304
+
True -> Ok(Nil)
305
+
False ->
306
+
Error(errors.invalid_schema(
307
+
def_name <> ": nullable fields must be strings",
308
+
))
309
+
})
310
+
311
+
// Validate each nullable field exists in properties
312
+
case properties {
313
+
Some(props) -> {
314
+
case json_helpers.json_to_dict(props) {
315
+
Ok(props_dict) -> {
316
+
list.try_fold(field_names, Nil, fn(_, field_name) {
317
+
case json_helpers.dict_has_key(props_dict, field_name) {
318
+
True -> Ok(Nil)
319
+
False ->
320
+
Error(errors.invalid_schema(
321
+
def_name
322
+
<> ": nullable field '"
323
+
<> field_name
324
+
<> "' not found in properties",
325
+
))
326
+
}
327
+
})
328
+
}
329
+
Error(_) -> Ok(Nil)
330
+
}
331
+
}
332
+
None -> {
333
+
// No properties defined, but nullable fields specified - this is an error
334
+
case list.is_empty(field_names) {
335
+
True -> Ok(Nil)
336
+
False ->
337
+
Error(errors.invalid_schema(
338
+
def_name <> ": nullable fields specified but no properties defined",
339
+
))
340
+
}
341
+
}
342
+
}
343
+
}
344
+
345
+
/// Helper to validate required fields are present in data
346
+
fn validate_required_fields_in_data(
347
+
def_name: String,
348
+
required: List(Dynamic),
349
+
data: Json,
350
+
) -> Result(Nil, errors.ValidationError) {
351
+
// Convert dynamics to strings
352
+
let field_names =
353
+
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
354
+
355
+
// Check each required field exists in data
356
+
list.try_fold(field_names, Nil, fn(_, field_name) {
357
+
case json_helpers.get_field(data, field_name) {
358
+
Some(_) -> Ok(Nil)
359
+
None -> {
360
+
let message = case def_name {
361
+
"" -> "required field '" <> field_name <> "' is missing"
362
+
_ -> def_name <> ": required field '" <> field_name <> "' is missing"
363
+
}
364
+
Error(errors.data_validation(message))
365
+
}
366
+
}
367
+
})
368
+
}
369
+
370
+
/// Validates all property schemas in an object
371
+
fn validate_property_schemas(
372
+
properties: Json,
373
+
ctx: ValidationContext,
374
+
) -> Result(Nil, errors.ValidationError) {
375
+
// Convert JSON object to dict and validate each property
376
+
case json_helpers.json_to_dict(properties) {
377
+
Ok(props_dict) -> {
378
+
dict.fold(props_dict, Ok(Nil), fn(acc, prop_name, prop_schema_dyn) {
379
+
use _ <- result.try(acc)
380
+
// Convert dynamic to Json
381
+
case json_helpers.dynamic_to_json(prop_schema_dyn) {
382
+
Ok(prop_schema) -> {
383
+
let nested_ctx = context.with_path(ctx, "properties." <> prop_name)
384
+
validate_single_property_schema(prop_schema, nested_ctx)
385
+
}
386
+
Error(e) -> Error(e)
387
+
}
388
+
})
389
+
}
390
+
Error(e) -> Error(e)
391
+
}
392
+
}
393
+
394
+
/// Dispatch validation to appropriate validator based on type
395
+
fn validate_single_property_schema(
396
+
prop_schema: Json,
397
+
ctx: ValidationContext,
398
+
) -> Result(Nil, errors.ValidationError) {
399
+
dispatch_schema_validation(prop_schema, ctx)
400
+
}
401
+
402
+
/// Validates all properties in data against their schemas
403
+
fn validate_properties_data(
404
+
data: Json,
405
+
properties: Json,
406
+
nullable_fields: List(String),
407
+
ctx: ValidationContext,
408
+
) -> Result(Nil, errors.ValidationError) {
409
+
// Convert data to dict
410
+
case json_helpers.json_to_dict(data) {
411
+
Ok(data_dict) -> {
412
+
// Convert properties schema to dict
413
+
case json_helpers.json_to_dict(properties) {
414
+
Ok(props_dict) -> {
415
+
// Iterate through data fields
416
+
dict.fold(data_dict, Ok(Nil), fn(acc, field_name, field_value) {
417
+
use _ <- result.try(acc)
418
+
// Check if field has a schema definition
419
+
case dict.get(props_dict, field_name) {
420
+
Ok(field_schema_dyn) -> {
421
+
// Convert dynamic schema to Json
422
+
case json_helpers.dynamic_to_json(field_schema_dyn) {
423
+
Ok(field_schema) -> {
424
+
let nested_ctx = context.with_path(ctx, field_name)
425
+
// Check for null values
426
+
case json_helpers.is_null_dynamic(field_value) {
427
+
True -> {
428
+
// Check if field is nullable
429
+
case list.contains(nullable_fields, field_name) {
430
+
True -> Ok(Nil)
431
+
False ->
432
+
Error(errors.data_validation(
433
+
"Field '"
434
+
<> field_name
435
+
<> "' at '"
436
+
<> context.path(ctx)
437
+
<> "' cannot be null",
438
+
))
439
+
}
440
+
}
441
+
False -> {
442
+
// Validate field data against schema
443
+
case json_helpers.dynamic_to_json(field_value) {
444
+
Ok(field_value_json) ->
445
+
validate_single_property_data(
446
+
field_value_json,
447
+
field_schema,
448
+
nested_ctx,
449
+
)
450
+
Error(e) -> Error(e)
451
+
}
452
+
}
453
+
}
454
+
}
455
+
Error(e) -> Error(e)
456
+
}
457
+
}
458
+
Error(_) -> {
459
+
// Unknown fields are allowed in objects (open schema)
460
+
Ok(Nil)
461
+
}
462
+
}
463
+
})
464
+
}
465
+
Error(e) -> Error(e)
466
+
}
467
+
}
468
+
Error(e) -> Error(e)
469
+
}
470
+
}
471
+
472
+
/// Dispatch data validation to appropriate validator based on type
473
+
fn validate_single_property_data(
474
+
data: Json,
475
+
schema: Json,
476
+
ctx: ValidationContext,
477
+
) -> Result(Nil, errors.ValidationError) {
478
+
dispatch_data_validation(data, schema, ctx)
479
+
}
480
+
481
+
// ============================================================================
482
+
// ARRAY VALIDATOR
483
+
// ============================================================================
484
+
485
+
const array_allowed_fields = [
486
+
"type", "items", "minLength", "maxLength", "description",
487
+
]
488
+
489
+
/// Validates array schema definition
490
+
pub fn validate_array_schema(
491
+
schema: Json,
492
+
ctx: ValidationContext,
493
+
) -> Result(Nil, errors.ValidationError) {
494
+
let def_name = context.path(ctx)
495
+
496
+
// Validate allowed fields
497
+
let keys = json_helpers.get_keys(schema)
498
+
use _ <- result.try(constraints.validate_allowed_fields(
499
+
def_name,
500
+
keys,
501
+
array_allowed_fields,
502
+
"array",
503
+
))
504
+
505
+
// Validate required 'items' field
506
+
let items = case json_helpers.get_field(schema, "items") {
507
+
Some(items_value) -> Ok(items_value)
508
+
None ->
509
+
Error(errors.invalid_schema(
510
+
def_name <> ": array missing required 'items' field",
511
+
))
512
+
}
513
+
514
+
use items_schema <- result.try(items)
515
+
516
+
// Recursively validate the items schema definition
517
+
let nested_ctx = context.with_path(ctx, ".items")
518
+
use _ <- result.try(validate_array_item_schema(items_schema, nested_ctx))
519
+
520
+
// Validate length constraints
521
+
let min_length = json_helpers.get_int(schema, "minLength")
522
+
let max_length = json_helpers.get_int(schema, "maxLength")
523
+
524
+
// Validate that minLength/maxLength are consistent
525
+
use _ <- result.try(constraints.validate_length_constraint_consistency(
526
+
def_name,
527
+
min_length,
528
+
max_length,
529
+
"array",
530
+
))
531
+
532
+
Ok(Nil)
533
+
}
534
+
535
+
/// Validates array data against schema
536
+
pub fn validate_array_data(
537
+
data: Json,
538
+
schema: Json,
539
+
ctx: ValidationContext,
540
+
) -> Result(Nil, errors.ValidationError) {
541
+
let def_name = context.path(ctx)
542
+
543
+
// Data must be an array
544
+
case json_helpers.is_array(data) {
545
+
False -> {
546
+
let type_name = get_type_name(data)
547
+
Error(errors.data_validation(
548
+
def_name <> ": expected array, found " <> type_name,
549
+
))
550
+
}
551
+
True -> {
552
+
// Get array from data
553
+
let data_array = case json_helpers.get_array_from_value(data) {
554
+
Some(arr) -> Ok(arr)
555
+
None ->
556
+
Error(errors.data_validation(def_name <> ": failed to parse array"))
557
+
}
558
+
559
+
use arr <- result.try(data_array)
560
+
561
+
let array_length = list.length(arr)
562
+
563
+
// Validate minLength constraint
564
+
use _ <- result.try(case json_helpers.get_int(schema, "minLength") {
565
+
Some(min_length) ->
566
+
case array_length < min_length {
567
+
True ->
568
+
Error(errors.data_validation(
569
+
def_name
570
+
<> ": array has length "
571
+
<> int.to_string(array_length)
572
+
<> " but minimum length is "
573
+
<> int.to_string(min_length),
574
+
))
575
+
False -> Ok(Nil)
576
+
}
577
+
None -> Ok(Nil)
578
+
})
579
+
580
+
// Validate maxLength constraint
581
+
use _ <- result.try(case json_helpers.get_int(schema, "maxLength") {
582
+
Some(max_length) ->
583
+
case array_length > max_length {
584
+
True ->
585
+
Error(errors.data_validation(
586
+
def_name
587
+
<> ": array has length "
588
+
<> int.to_string(array_length)
589
+
<> " but maximum length is "
590
+
<> int.to_string(max_length),
591
+
))
592
+
False -> Ok(Nil)
593
+
}
594
+
None -> Ok(Nil)
595
+
})
596
+
597
+
// Validate each array item against the items schema
598
+
case json_helpers.get_field(schema, "items") {
599
+
Some(items_schema) -> {
600
+
// Validate each item with index in path
601
+
list.index_fold(arr, Ok(Nil), fn(acc, item, index) {
602
+
use _ <- result.try(acc)
603
+
let nested_ctx =
604
+
context.with_path(ctx, "[" <> int.to_string(index) <> "]")
605
+
validate_array_item_data(item, items_schema, nested_ctx)
606
+
})
607
+
}
608
+
None -> Ok(Nil)
609
+
}
610
+
}
611
+
}
612
+
}
613
+
614
+
/// Validates an items schema definition recursively
615
+
fn validate_array_item_schema(
616
+
items_schema: Json,
617
+
ctx: ValidationContext,
618
+
) -> Result(Nil, errors.ValidationError) {
619
+
// Handle reference types by delegating to reference validator
620
+
case json_helpers.get_string(items_schema, "type") {
621
+
Some("ref") -> reference.validate_schema(items_schema, ctx)
622
+
_ -> dispatch_schema_validation(items_schema, ctx)
623
+
}
624
+
}
625
+
626
+
/// Validates runtime data against an items schema using recursive validation
627
+
fn validate_array_item_data(
628
+
item: Dynamic,
629
+
items_schema: Json,
630
+
ctx: ValidationContext,
631
+
) -> Result(Nil, errors.ValidationError) {
632
+
// Convert dynamic to Json for validation
633
+
let item_json = json_helpers.dynamic_to_json(item)
634
+
635
+
use item_value <- result.try(item_json)
636
+
637
+
// Handle reference types by delegating to reference validator
638
+
case json_helpers.get_string(items_schema, "type") {
639
+
Some("ref") -> reference.validate_data(item_value, items_schema, ctx)
640
+
_ -> dispatch_data_validation(item_value, items_schema, ctx)
641
+
}
642
+
}
643
+
644
+
// ============================================================================
645
+
// SHARED HELPERS
646
+
// ============================================================================
647
+
648
+
/// Helper to get type name for error messages
649
+
fn get_type_name(data: Json) -> String {
650
+
case json_helpers.is_null(data) {
651
+
True -> "null"
652
+
False ->
653
+
case json_helpers.is_bool(data) {
654
+
True -> "boolean"
655
+
False ->
656
+
case json_helpers.is_int(data) {
657
+
True -> "number"
658
+
False ->
659
+
case json_helpers.is_string(data) {
660
+
True -> "string"
661
+
False ->
662
+
case json_helpers.is_array(data) {
663
+
True -> "array"
664
+
False -> "object"
665
+
}
666
+
}
667
+
}
668
+
}
669
+
}
670
+
}
+306
src/honk/validation/formats.gleam
+306
src/honk/validation/formats.gleam
···
1
+
// String format validation
2
+
3
+
import gleam/list
4
+
import gleam/regexp
5
+
import gleam/string
6
+
import gleam/time/timestamp
7
+
import honk/types
8
+
9
+
/// Validates RFC3339 datetime format
10
+
pub fn is_valid_rfc3339_datetime(value: String) -> Bool {
11
+
// Max length check (64 chars)
12
+
let len = string.length(value)
13
+
case len == 0 || len > 64 {
14
+
True -> False
15
+
False -> {
16
+
// Stricter RFC3339 regex pattern with restricted digit ranges
17
+
let pattern =
18
+
"^[0-9]{4}-[01][0-9]-[0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9](\\.[0-9]{1,20})?(Z|([+-][0-2][0-9]:[0-5][0-9]))$"
19
+
20
+
case regexp.from_string(pattern) {
21
+
Ok(re) ->
22
+
case regexp.check(re, value) {
23
+
False -> False
24
+
True -> {
25
+
// Reject -00:00 timezone suffix (must use +00:00 per ISO-8601)
26
+
case string.ends_with(value, "-00:00") {
27
+
True -> False
28
+
False -> {
29
+
// Attempt actual parsing to validate it's a real datetime
30
+
case timestamp.parse_rfc3339(value) {
31
+
Ok(_) -> True
32
+
Error(_) -> False
33
+
}
34
+
}
35
+
}
36
+
}
37
+
}
38
+
Error(_) -> False
39
+
}
40
+
}
41
+
}
42
+
}
43
+
44
+
/// Validates URI format
45
+
pub fn is_valid_uri(value: String) -> Bool {
46
+
// URI validation with max length and stricter scheme
47
+
// Max length check (8192 chars)
48
+
let len = string.length(value)
49
+
case len == 0 || len > 8192 {
50
+
True -> False
51
+
False -> {
52
+
// Lowercase scheme only, max 81 chars, printable characters after
53
+
// Note: Using [^ \t\n\r\x00-\x1F] for printable/graph chars
54
+
let pattern = "^[a-z][a-z.-]{0,80}:[!-~]+$"
55
+
case regexp.from_string(pattern) {
56
+
Ok(re) -> regexp.check(re, value)
57
+
Error(_) -> False
58
+
}
59
+
}
60
+
}
61
+
}
62
+
63
+
/// Validates AT Protocol URI format (at://did:plc:xxx/collection/rkey)
64
+
pub fn is_valid_at_uri(value: String) -> Bool {
65
+
// Max length check (8192 chars)
66
+
let len = string.length(value)
67
+
case len == 0 || len > 8192 {
68
+
True -> False
69
+
False ->
70
+
case string.starts_with(value, "at://") {
71
+
False -> False
72
+
True -> {
73
+
// Pattern: at://authority[/collection[/rkey]]
74
+
let without_scheme = string.drop_start(value, 5)
75
+
case string.split(without_scheme, "/") {
76
+
[authority] -> {
77
+
// Just authority - must be DID or handle
78
+
is_valid_did(authority) || is_valid_handle(authority)
79
+
}
80
+
[authority, collection] -> {
81
+
// Authority + collection - validate both
82
+
case is_valid_did(authority) || is_valid_handle(authority) {
83
+
False -> False
84
+
True -> is_valid_nsid(collection)
85
+
}
86
+
}
87
+
[authority, collection, rkey] -> {
88
+
// Full URI - validate all parts
89
+
case is_valid_did(authority) || is_valid_handle(authority) {
90
+
False -> False
91
+
True ->
92
+
case is_valid_nsid(collection) {
93
+
False -> False
94
+
True -> is_valid_record_key(rkey)
95
+
}
96
+
}
97
+
}
98
+
_ -> False
99
+
}
100
+
}
101
+
}
102
+
}
103
+
}
104
+
105
+
/// Validates DID format (did:method:identifier)
106
+
pub fn is_valid_did(value: String) -> Bool {
107
+
// Max length check (2048 chars)
108
+
let len = string.length(value)
109
+
case len == 0 || len > 2048 {
110
+
True -> False
111
+
False ->
112
+
case string.starts_with(value, "did:") {
113
+
False -> False
114
+
True -> {
115
+
// Pattern ensures identifier ends with valid char (not %)
116
+
let pattern = "^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$"
117
+
case regexp.from_string(pattern) {
118
+
Ok(re) -> regexp.check(re, value)
119
+
Error(_) -> False
120
+
}
121
+
}
122
+
}
123
+
}
124
+
}
125
+
126
+
/// Validates AT Protocol handle (user.bsky.social)
127
+
pub fn is_valid_handle(value: String) -> Bool {
128
+
// Handle is a domain name (hostname)
129
+
// Must be lowercase, can have dots, no special chars except hyphen
130
+
// Pattern requires at least one dot and TLD starts with letter
131
+
let pattern =
132
+
"^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$"
133
+
134
+
case
135
+
string.length(value) == 0 || string.length(value) > 253,
136
+
regexp.from_string(pattern)
137
+
{
138
+
True, _ -> False
139
+
False, Ok(re) ->
140
+
case regexp.check(re, value) {
141
+
False -> False
142
+
True -> {
143
+
// Extract TLD and check against disallowed list
144
+
let parts = string.split(value, ".")
145
+
case list.last(parts) {
146
+
Ok(tld) ->
147
+
case tld {
148
+
"local"
149
+
| "arpa"
150
+
| "invalid"
151
+
| "localhost"
152
+
| "internal"
153
+
| "example"
154
+
| "onion"
155
+
| "alt" -> False
156
+
_ -> True
157
+
}
158
+
Error(_) -> False
159
+
}
160
+
}
161
+
}
162
+
False, Error(_) -> False
163
+
}
164
+
}
165
+
166
+
/// Validates AT identifier (either DID or handle)
167
+
pub fn is_valid_at_identifier(value: String) -> Bool {
168
+
is_valid_did(value) || is_valid_handle(value)
169
+
}
170
+
171
+
/// Validates NSID format (com.example.type)
172
+
pub fn is_valid_nsid(value: String) -> Bool {
173
+
// NSID: reversed domain name with type
174
+
// Pattern: authority.name (e.g., com.example.record)
175
+
let pattern =
176
+
"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\.[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+$"
177
+
178
+
case regexp.from_string(pattern) {
179
+
Ok(re) -> {
180
+
case regexp.check(re, value) {
181
+
False -> False
182
+
True -> {
183
+
// Must have at least 3 segments and max length 317
184
+
let segments = string.split(value, ".")
185
+
list.length(segments) >= 3 && string.length(value) <= 317
186
+
}
187
+
}
188
+
}
189
+
Error(_) -> False
190
+
}
191
+
}
192
+
193
+
/// Validates CID format (Content Identifier)
194
+
pub fn is_valid_cid(value: String) -> Bool {
195
+
// Informal/incomplete helper for fast string verification
196
+
// Aligned with indigo's atproto/syntax/cid.go approach
197
+
// Length: 8-256 chars, alphanumeric plus += characters
198
+
// Rejects CIDv0 starting with "Qmb"
199
+
let len = string.length(value)
200
+
201
+
case len < 8 || len > 256 {
202
+
True -> False
203
+
False -> {
204
+
// Reject CIDv0 (not allowed in this version of atproto)
205
+
case string.starts_with(value, "Qmb") {
206
+
True -> False
207
+
False -> {
208
+
// Pattern: alphanumeric plus + and =
209
+
let pattern = "^[a-zA-Z0-9+=]{8,256}$"
210
+
case regexp.from_string(pattern) {
211
+
Ok(re) -> regexp.check(re, value)
212
+
Error(_) -> False
213
+
}
214
+
}
215
+
}
216
+
}
217
+
}
218
+
}
219
+
220
+
/// Validates CID format with raw multicodec (0x55) for blobs
221
+
/// Base32 CIDv1 with raw multicodec starts with "bafkrei"
222
+
pub fn is_valid_raw_cid(value: String) -> Bool {
223
+
case is_valid_cid(value) {
224
+
False -> False
225
+
True -> string.starts_with(value, "bafkrei")
226
+
}
227
+
}
228
+
229
+
/// Validates BCP47 language tag
230
+
pub fn is_valid_language_tag(value: String) -> Bool {
231
+
// Lenient BCP47 validation (max 128 chars)
232
+
// Allows: i prefix (IANA), 2-3 letter codes, flexible extensions
233
+
// e.g., en, en-US, zh-Hans-CN, i-enochian
234
+
let len = string.length(value)
235
+
case len == 0 || len > 128 {
236
+
True -> False
237
+
False -> {
238
+
let pattern = "^(i|[a-z]{2,3})(-[a-zA-Z0-9]+)*$"
239
+
case regexp.from_string(pattern) {
240
+
Ok(re) -> regexp.check(re, value)
241
+
Error(_) -> False
242
+
}
243
+
}
244
+
}
245
+
}
246
+
247
+
/// Validates TID format (Timestamp Identifier)
248
+
pub fn is_valid_tid(value: String) -> Bool {
249
+
// TID is base32-sortable timestamp (13 characters)
250
+
// First char restricted to ensure valid timestamp range: 234567abcdefghij
251
+
// Remaining 12 chars use full alphabet: 234567abcdefghijklmnopqrstuvwxyz
252
+
let pattern = "^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$"
253
+
254
+
case string.length(value) == 13, regexp.from_string(pattern) {
255
+
True, Ok(re) -> regexp.check(re, value)
256
+
_, _ -> False
257
+
}
258
+
}
259
+
260
+
/// Validates record key format
261
+
pub fn is_valid_record_key(value: String) -> Bool {
262
+
// Record keys can be TIDs or custom strings
263
+
// Custom strings: alphanumeric, dots, dashes, underscores, tildes, colons
264
+
// Length: 1-512 characters
265
+
// Explicitly reject "." and ".." for security
266
+
let len = string.length(value)
267
+
268
+
case value == "." || value == ".." {
269
+
True -> False
270
+
False ->
271
+
case len >= 1 && len <= 512 {
272
+
False -> False
273
+
True -> {
274
+
// Check if it's a TID first
275
+
case is_valid_tid(value) {
276
+
True -> True
277
+
False -> {
278
+
// Check custom format (added : to allowed chars)
279
+
let pattern = "^[a-zA-Z0-9_~.:-]+$"
280
+
case regexp.from_string(pattern) {
281
+
Ok(re) -> regexp.check(re, value)
282
+
Error(_) -> False
283
+
}
284
+
}
285
+
}
286
+
}
287
+
}
288
+
}
289
+
}
290
+
291
+
/// Validates a string value against a specific format
292
+
pub fn validate_format(value: String, format: types.StringFormat) -> Bool {
293
+
case format {
294
+
types.DateTime -> is_valid_rfc3339_datetime(value)
295
+
types.Uri -> is_valid_uri(value)
296
+
types.AtUri -> is_valid_at_uri(value)
297
+
types.Did -> is_valid_did(value)
298
+
types.Handle -> is_valid_handle(value)
299
+
types.AtIdentifier -> is_valid_at_identifier(value)
300
+
types.Nsid -> is_valid_nsid(value)
301
+
types.Cid -> is_valid_cid(value)
302
+
types.Language -> is_valid_language_tag(value)
303
+
types.Tid -> is_valid_tid(value)
304
+
types.RecordKey -> is_valid_record_key(value)
305
+
}
306
+
}
+63
src/honk/validation/meta/token.gleam
+63
src/honk/validation/meta/token.gleam
···
1
+
// Token type validator
2
+
// Tokens are unit types used for discrimination in unions
3
+
4
+
import gleam/json.{type Json}
5
+
import gleam/string
6
+
import honk/errors
7
+
import honk/internal/constraints
8
+
import honk/internal/json_helpers
9
+
import honk/validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "description"]
12
+
13
+
/// Validates token schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, errors.ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "token")
23
+
}
24
+
25
+
/// Validates token data against schema
26
+
/// Note: Tokens are unit types used for discrimination in unions.
27
+
/// The token value should be a string matching the fully-qualified token name
28
+
/// (e.g., "example.lexicon.record#demoToken"). Full token name validation
29
+
/// happens at the union/record level where the expected token name is known.
30
+
pub fn validate_data(
31
+
data: Json,
32
+
_schema: Json,
33
+
ctx: ValidationContext,
34
+
) -> Result(Nil, errors.ValidationError) {
35
+
let def_name = context.path(ctx)
36
+
37
+
// Token data must be a string (the fully-qualified token name)
38
+
case json_helpers.is_string(data) {
39
+
True -> {
40
+
// Extract and validate the string value
41
+
let json_str = json.to_string(data)
42
+
// Remove quotes from JSON string representation
43
+
let value = case
44
+
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
45
+
{
46
+
True -> string.slice(json_str, 1, string.length(json_str) - 2)
47
+
False -> json_str
48
+
}
49
+
50
+
case string.is_empty(value) {
51
+
True ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": token value cannot be empty string",
54
+
))
55
+
False -> Ok(Nil)
56
+
}
57
+
}
58
+
False ->
59
+
Error(errors.data_validation(
60
+
def_name <> ": expected string for token data, got other type",
61
+
))
62
+
}
63
+
}
+68
src/honk/validation/meta/unknown.gleam
+68
src/honk/validation/meta/unknown.gleam
···
1
+
// Unknown type validator
2
+
// Unknown allows flexible data with AT Protocol data model rules
3
+
4
+
import gleam/json.{type Json}
5
+
import gleam/option.{None, Some}
6
+
import honk/errors
7
+
import honk/internal/constraints
8
+
import honk/internal/json_helpers
9
+
import honk/validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "description"]
12
+
13
+
/// Validates unknown schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, errors.ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "unknown")
23
+
}
24
+
25
+
/// Validates unknown data against schema
26
+
/// Unknown allows flexible data following AT Protocol data model rules
27
+
pub fn validate_data(
28
+
data: Json,
29
+
_schema: Json,
30
+
ctx: ValidationContext,
31
+
) -> Result(Nil, errors.ValidationError) {
32
+
let def_name = context.path(ctx)
33
+
34
+
// Unknown data must be an object (not primitives, arrays, bytes, or blobs)
35
+
case json_helpers.is_object(data) {
36
+
False ->
37
+
Error(errors.data_validation(
38
+
def_name <> ": unknown type must be an object, not a primitive or array",
39
+
))
40
+
True -> {
41
+
// Check for special AT Protocol objects that are not allowed
42
+
// Bytes objects: {"$bytes": "base64-string"}
43
+
case json_helpers.get_string(data, "$bytes") {
44
+
Some(_) ->
45
+
Error(errors.data_validation(
46
+
def_name <> ": unknown type cannot be a bytes object",
47
+
))
48
+
None -> {
49
+
// Blob objects: {"$type": "blob", "ref": {...}, "mimeType": "...", "size": ...}
50
+
case json_helpers.get_string(data, "$type") {
51
+
Some("blob") ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": unknown type cannot be a blob object",
54
+
))
55
+
_ -> {
56
+
// Valid unknown object
57
+
// AT Protocol data model rules:
58
+
// - No floats (only integers) - enforced by gleam_json type system
59
+
// - Objects can contain any valid JSON data
60
+
// - May contain $type field for type discrimination
61
+
Ok(Nil)
62
+
}
63
+
}
64
+
}
65
+
}
66
+
}
67
+
}
68
+
}
+331
src/honk/validation/primary/params.gleam
+331
src/honk/validation/primary/params.gleam
···
1
+
// Params type validator
2
+
// Params define query/procedure/subscription parameters (XRPC endpoint arguments)
3
+
4
+
import gleam/dynamic/decode
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/validation/context.{type ValidationContext}
13
+
import honk/validation/field as validation_field
14
+
import honk/validation/meta/unknown as validation_meta_unknown
15
+
import honk/validation/primitive/boolean as validation_primitive_boolean
16
+
import honk/validation/primitive/integer as validation_primitive_integer
17
+
import honk/validation/primitive/string as validation_primitive_string
18
+
19
+
const allowed_fields = ["type", "description", "properties", "required"]
20
+
21
+
/// Validates params schema definition
22
+
pub fn validate_schema(
23
+
schema: Json,
24
+
ctx: ValidationContext,
25
+
) -> Result(Nil, errors.ValidationError) {
26
+
let def_name = context.path(ctx)
27
+
28
+
// Validate allowed fields
29
+
let keys = json_helpers.get_keys(schema)
30
+
use _ <- result.try(constraints.validate_allowed_fields(
31
+
def_name,
32
+
keys,
33
+
allowed_fields,
34
+
"params",
35
+
))
36
+
37
+
// Validate type field
38
+
use _ <- result.try(case json_helpers.get_string(schema, "type") {
39
+
Some("params") -> Ok(Nil)
40
+
Some(other_type) ->
41
+
Error(errors.invalid_schema(
42
+
def_name <> ": expected type 'params', got '" <> other_type <> "'",
43
+
))
44
+
None ->
45
+
Error(errors.invalid_schema(def_name <> ": params missing type field"))
46
+
})
47
+
48
+
// Get properties and required fields
49
+
let properties_dict = case json_helpers.get_field(schema, "properties") {
50
+
Some(props) -> json_helpers.json_to_dict(props)
51
+
None -> Ok(json_helpers.empty_dict())
52
+
}
53
+
54
+
let required_array = case json_helpers.get_array(schema, "required") {
55
+
Some(arr) -> Some(arr)
56
+
None -> None
57
+
}
58
+
59
+
// Validate required fields exist in properties
60
+
use props_dict <- result.try(properties_dict)
61
+
use _ <- result.try(validate_required_fields(
62
+
def_name,
63
+
required_array,
64
+
props_dict,
65
+
))
66
+
67
+
// Validate each property
68
+
validate_properties(def_name, props_dict, ctx)
69
+
}
70
+
71
+
/// Validates that all required fields exist in properties
72
+
fn validate_required_fields(
73
+
def_name: String,
74
+
required_array: option.Option(List(decode.Dynamic)),
75
+
properties_dict: json_helpers.JsonDict,
76
+
) -> Result(Nil, errors.ValidationError) {
77
+
case required_array {
78
+
None -> Ok(Nil)
79
+
Some(required) -> {
80
+
list.try_fold(required, Nil, fn(_, item) {
81
+
case decode.run(item, decode.string) {
82
+
Ok(field_name) -> {
83
+
case json_helpers.dict_has_key(properties_dict, field_name) {
84
+
True -> Ok(Nil)
85
+
False ->
86
+
Error(errors.invalid_schema(
87
+
def_name
88
+
<> ": required field '"
89
+
<> field_name
90
+
<> "' not found in properties",
91
+
))
92
+
}
93
+
}
94
+
Error(_) ->
95
+
Error(errors.invalid_schema(
96
+
def_name <> ": required field must be a string",
97
+
))
98
+
}
99
+
})
100
+
}
101
+
}
102
+
}
103
+
104
+
/// Validates all properties in the params
105
+
fn validate_properties(
106
+
def_name: String,
107
+
properties_dict: json_helpers.JsonDict,
108
+
ctx: ValidationContext,
109
+
) -> Result(Nil, errors.ValidationError) {
110
+
json_helpers.dict_fold(properties_dict, Ok(Nil), fn(acc, key, value) {
111
+
case acc {
112
+
Error(e) -> Error(e)
113
+
Ok(_) -> {
114
+
// Check property name is not empty
115
+
use _ <- result.try(case key {
116
+
"" ->
117
+
Error(errors.invalid_schema(
118
+
def_name <> ": empty property name not allowed",
119
+
))
120
+
_ -> Ok(Nil)
121
+
})
122
+
123
+
// Convert dynamic value to JSON
124
+
use prop_json <- result.try(case json_helpers.dynamic_to_json(value) {
125
+
Ok(j) -> Ok(j)
126
+
Error(_) ->
127
+
Error(errors.invalid_schema(
128
+
def_name <> ": invalid property value for '" <> key <> "'",
129
+
))
130
+
})
131
+
132
+
// Validate property type restrictions
133
+
validate_property_type(def_name, key, prop_json, ctx)
134
+
}
135
+
}
136
+
})
137
+
}
138
+
139
+
/// Validates that a property has an allowed type
140
+
/// Allowed types: boolean, integer, string, unknown, or arrays of these
141
+
fn validate_property_type(
142
+
def_name: String,
143
+
property_name: String,
144
+
property_schema: Json,
145
+
ctx: ValidationContext,
146
+
) -> Result(Nil, errors.ValidationError) {
147
+
let prop_path = def_name <> ".properties." <> property_name
148
+
149
+
case json_helpers.get_string(property_schema, "type") {
150
+
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
151
+
// These are allowed types - recursively validate the schema
152
+
let prop_ctx = context.with_path(ctx, "properties." <> property_name)
153
+
validate_property_schema(property_schema, prop_ctx)
154
+
}
155
+
Some("array") -> {
156
+
// Arrays are allowed, but items must be one of the allowed types
157
+
case json_helpers.get_field(property_schema, "items") {
158
+
Some(items) -> {
159
+
case json_helpers.get_string(items, "type") {
160
+
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
161
+
// Valid array item type - recursively validate
162
+
let prop_ctx =
163
+
context.with_path(ctx, "properties." <> property_name)
164
+
validate_property_schema(property_schema, prop_ctx)
165
+
}
166
+
Some(other_type) ->
167
+
Error(errors.invalid_schema(
168
+
prop_path
169
+
<> ": params array items must be boolean, integer, string, or unknown, got '"
170
+
<> other_type
171
+
<> "'",
172
+
))
173
+
None ->
174
+
Error(errors.invalid_schema(
175
+
prop_path <> ": array items missing type field",
176
+
))
177
+
}
178
+
}
179
+
None ->
180
+
Error(errors.invalid_schema(
181
+
prop_path <> ": array property missing items field",
182
+
))
183
+
}
184
+
}
185
+
Some(other_type) ->
186
+
Error(errors.invalid_schema(
187
+
prop_path
188
+
<> ": params properties must be boolean, integer, string, unknown, or arrays of these, got '"
189
+
<> other_type
190
+
<> "'",
191
+
))
192
+
None ->
193
+
Error(errors.invalid_schema(prop_path <> ": property missing type field"))
194
+
}
195
+
}
196
+
197
+
/// Validates a property schema by dispatching to the appropriate validator
198
+
fn validate_property_schema(
199
+
schema: Json,
200
+
ctx: ValidationContext,
201
+
) -> Result(Nil, errors.ValidationError) {
202
+
case json_helpers.get_string(schema, "type") {
203
+
Some("boolean") -> validation_primitive_boolean.validate_schema(schema, ctx)
204
+
Some("integer") -> validation_primitive_integer.validate_schema(schema, ctx)
205
+
Some("string") -> validation_primitive_string.validate_schema(schema, ctx)
206
+
Some("unknown") -> validation_meta_unknown.validate_schema(schema, ctx)
207
+
Some("array") -> validation_field.validate_array_schema(schema, ctx)
208
+
Some(unknown_type) ->
209
+
Error(errors.invalid_schema(
210
+
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
211
+
))
212
+
None ->
213
+
Error(errors.invalid_schema(
214
+
context.path(ctx) <> ": schema missing type field",
215
+
))
216
+
}
217
+
}
218
+
219
+
/// Validates params data against schema
220
+
pub fn validate_data(
221
+
data: Json,
222
+
schema: Json,
223
+
ctx: ValidationContext,
224
+
) -> Result(Nil, errors.ValidationError) {
225
+
let def_name = context.path(ctx)
226
+
227
+
// Get data as dict
228
+
use data_dict <- result.try(json_helpers.json_to_dict(data))
229
+
230
+
// Get properties and required from params schema
231
+
let properties_dict = case json_helpers.get_field(schema, "properties") {
232
+
Some(props) -> json_helpers.json_to_dict(props)
233
+
None -> Ok(json_helpers.empty_dict())
234
+
}
235
+
236
+
let required_array = json_helpers.get_array(schema, "required")
237
+
238
+
use props_dict <- result.try(properties_dict)
239
+
240
+
// Check all required parameters are present
241
+
use _ <- result.try(case required_array {
242
+
Some(required) -> {
243
+
list.try_fold(required, Nil, fn(_, item) {
244
+
case decode.run(item, decode.string) {
245
+
Ok(param_name) -> {
246
+
case json_helpers.dict_has_key(data_dict, param_name) {
247
+
True -> Ok(Nil)
248
+
False ->
249
+
Error(errors.data_validation(
250
+
def_name
251
+
<> ": missing required parameter '"
252
+
<> param_name
253
+
<> "'",
254
+
))
255
+
}
256
+
}
257
+
Error(_) -> Ok(Nil)
258
+
}
259
+
})
260
+
}
261
+
None -> Ok(Nil)
262
+
})
263
+
264
+
// Validate each parameter in data
265
+
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
266
+
case acc {
267
+
Error(e) -> Error(e)
268
+
Ok(_) -> {
269
+
// Get the schema for this parameter
270
+
case json_helpers.dict_get(props_dict, param_name) {
271
+
Some(param_schema_dyn) -> {
272
+
// Convert dynamic to JSON
273
+
case json_helpers.dynamic_to_json(param_schema_dyn) {
274
+
Ok(param_schema) -> {
275
+
// Convert param value to JSON
276
+
case json_helpers.dynamic_to_json(param_value) {
277
+
Ok(param_json) -> {
278
+
// Validate the parameter value against its schema
279
+
let param_ctx = context.with_path(ctx, param_name)
280
+
validate_parameter_value(
281
+
param_json,
282
+
param_schema,
283
+
param_ctx,
284
+
)
285
+
}
286
+
Error(e) -> Error(e)
287
+
}
288
+
}
289
+
Error(e) -> Error(e)
290
+
}
291
+
}
292
+
None -> {
293
+
// Parameter not in schema - could warn or allow
294
+
// For now, allow unknown parameters
295
+
Ok(Nil)
296
+
}
297
+
}
298
+
}
299
+
}
300
+
})
301
+
}
302
+
303
+
/// Validates a single parameter value against its schema
304
+
fn validate_parameter_value(
305
+
value: Json,
306
+
schema: Json,
307
+
ctx: ValidationContext,
308
+
) -> Result(Nil, errors.ValidationError) {
309
+
// Dispatch based on schema type
310
+
case json_helpers.get_string(schema, "type") {
311
+
Some("boolean") ->
312
+
validation_primitive_boolean.validate_data(value, schema, ctx)
313
+
Some("integer") ->
314
+
validation_primitive_integer.validate_data(value, schema, ctx)
315
+
Some("string") ->
316
+
validation_primitive_string.validate_data(value, schema, ctx)
317
+
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
318
+
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
319
+
Some(other_type) ->
320
+
Error(errors.data_validation(
321
+
context.path(ctx)
322
+
<> ": unsupported parameter type '"
323
+
<> other_type
324
+
<> "'",
325
+
))
326
+
None ->
327
+
Error(errors.data_validation(
328
+
context.path(ctx) <> ": parameter schema missing type field",
329
+
))
330
+
}
331
+
}
+163
src/honk/validation/primary/procedure.gleam
+163
src/honk/validation/primary/procedure.gleam
···
1
+
// Procedure type validator
2
+
// Procedures are XRPC Procedure (HTTP POST) endpoints for modifying data
3
+
4
+
import gleam/json.{type Json}
5
+
import gleam/option.{None, Some}
6
+
import gleam/result
7
+
import honk/errors
8
+
import honk/internal/constraints
9
+
import honk/internal/json_helpers
10
+
import honk/validation/context.{type ValidationContext}
11
+
import honk/validation/field as validation_field
12
+
import honk/validation/field/reference as validation_field_reference
13
+
import honk/validation/field/union as validation_field_union
14
+
import honk/validation/primary/params
15
+
16
+
const allowed_fields = [
17
+
"type", "parameters", "input", "output", "errors", "description",
18
+
]
19
+
20
+
/// Validates procedure schema definition
21
+
pub fn validate_schema(
22
+
schema: Json,
23
+
ctx: ValidationContext,
24
+
) -> Result(Nil, errors.ValidationError) {
25
+
let def_name = context.path(ctx)
26
+
27
+
// Validate allowed fields
28
+
let keys = json_helpers.get_keys(schema)
29
+
use _ <- result.try(constraints.validate_allowed_fields(
30
+
def_name,
31
+
keys,
32
+
allowed_fields,
33
+
"procedure",
34
+
))
35
+
36
+
// Validate parameters field if present
37
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
38
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
39
+
None -> Ok(Nil)
40
+
})
41
+
42
+
// Validate input field if present
43
+
use _ <- result.try(case json_helpers.get_field(schema, "input") {
44
+
Some(input) -> validate_io_schema(def_name, input, "input")
45
+
None -> Ok(Nil)
46
+
})
47
+
48
+
// Validate output field if present
49
+
use _ <- result.try(case json_helpers.get_field(schema, "output") {
50
+
Some(output) -> validate_io_schema(def_name, output, "output")
51
+
None -> Ok(Nil)
52
+
})
53
+
54
+
// Validate errors field if present
55
+
case json_helpers.get_array(schema, "errors") {
56
+
Some(_) -> Ok(Nil)
57
+
None -> Ok(Nil)
58
+
}
59
+
}
60
+
61
+
/// Validates procedure input data against schema
62
+
/// Data should be the procedure input body as JSON
63
+
pub fn validate_data(
64
+
data: Json,
65
+
schema: Json,
66
+
ctx: ValidationContext,
67
+
) -> Result(Nil, errors.ValidationError) {
68
+
// If schema has input, validate data against it
69
+
case json_helpers.get_field(schema, "input") {
70
+
Some(input) -> {
71
+
let input_ctx = context.with_path(ctx, "input")
72
+
validate_body_data(data, input, input_ctx)
73
+
}
74
+
None -> Ok(Nil)
75
+
}
76
+
}
77
+
78
+
/// Validates procedure output data against schema
79
+
pub fn validate_output_data(
80
+
data: Json,
81
+
schema: Json,
82
+
ctx: ValidationContext,
83
+
) -> Result(Nil, errors.ValidationError) {
84
+
// If schema has output, validate data against it
85
+
case json_helpers.get_field(schema, "output") {
86
+
Some(output) -> {
87
+
let output_ctx = context.with_path(ctx, "output")
88
+
validate_body_data(data, output, output_ctx)
89
+
}
90
+
None -> Ok(Nil)
91
+
}
92
+
}
93
+
94
+
/// Validates data against a SchemaBody (input or output)
95
+
fn validate_body_data(
96
+
data: Json,
97
+
body: Json,
98
+
ctx: ValidationContext,
99
+
) -> Result(Nil, errors.ValidationError) {
100
+
// Get the schema field from the body
101
+
case json_helpers.get_field(body, "schema") {
102
+
Some(schema) -> {
103
+
let schema_ctx = context.with_path(ctx, "schema")
104
+
// Dispatch to appropriate validator based on schema type
105
+
validate_body_schema_data(data, schema, schema_ctx)
106
+
}
107
+
None -> Ok(Nil)
108
+
}
109
+
}
110
+
111
+
/// Validates data against a body schema (object, ref, or union)
112
+
fn validate_body_schema_data(
113
+
data: Json,
114
+
schema: Json,
115
+
ctx: ValidationContext,
116
+
) -> Result(Nil, errors.ValidationError) {
117
+
case json_helpers.get_string(schema, "type") {
118
+
Some("object") -> validation_field.validate_object_data(data, schema, ctx)
119
+
Some("ref") -> {
120
+
// For references, we need to resolve and validate
121
+
// For now, just validate it's structured correctly
122
+
validation_field_reference.validate_data(data, schema, ctx)
123
+
}
124
+
Some("union") -> validation_field_union.validate_data(data, schema, ctx)
125
+
Some(other_type) ->
126
+
Error(errors.data_validation(
127
+
context.path(ctx)
128
+
<> ": unsupported body schema type '"
129
+
<> other_type
130
+
<> "'",
131
+
))
132
+
None ->
133
+
Error(errors.data_validation(
134
+
context.path(ctx) <> ": body schema missing type field",
135
+
))
136
+
}
137
+
}
138
+
139
+
/// Validates parameters schema definition
140
+
fn validate_parameters_schema(
141
+
parameters: Json,
142
+
ctx: ValidationContext,
143
+
) -> Result(Nil, errors.ValidationError) {
144
+
// Validate the full params schema
145
+
let params_ctx = context.with_path(ctx, "parameters")
146
+
params.validate_schema(parameters, params_ctx)
147
+
}
148
+
149
+
/// Validates input/output schema definition
150
+
fn validate_io_schema(
151
+
def_name: String,
152
+
io: Json,
153
+
field_name: String,
154
+
) -> Result(Nil, errors.ValidationError) {
155
+
// Input/output must have encoding field
156
+
case json_helpers.get_string(io, "encoding") {
157
+
Some(_) -> Ok(Nil)
158
+
None ->
159
+
Error(errors.invalid_schema(
160
+
def_name <> ": procedure " <> field_name <> " missing encoding field",
161
+
))
162
+
}
163
+
}
+224
src/honk/validation/primary/query.gleam
+224
src/honk/validation/primary/query.gleam
···
1
+
// Query type validator
2
+
// Queries are XRPC Query (HTTP GET) endpoints for retrieving data
3
+
4
+
import gleam/dynamic/decode
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/validation/context.{type ValidationContext}
13
+
import honk/validation/field as validation_field
14
+
import honk/validation/meta/unknown as validation_meta_unknown
15
+
import honk/validation/primary/params
16
+
import honk/validation/primitive/boolean as validation_primitive_boolean
17
+
import honk/validation/primitive/integer as validation_primitive_integer
18
+
import honk/validation/primitive/string as validation_primitive_string
19
+
20
+
const allowed_fields = ["type", "parameters", "output", "errors", "description"]
21
+
22
+
/// Validates query schema definition
23
+
pub fn validate_schema(
24
+
schema: Json,
25
+
ctx: ValidationContext,
26
+
) -> Result(Nil, errors.ValidationError) {
27
+
let def_name = context.path(ctx)
28
+
29
+
// Validate allowed fields
30
+
let keys = json_helpers.get_keys(schema)
31
+
use _ <- result.try(constraints.validate_allowed_fields(
32
+
def_name,
33
+
keys,
34
+
allowed_fields,
35
+
"query",
36
+
))
37
+
38
+
// Validate parameters field if present
39
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
40
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
41
+
None -> Ok(Nil)
42
+
})
43
+
44
+
// Validate output field if present
45
+
use _ <- result.try(case json_helpers.get_field(schema, "output") {
46
+
Some(output) -> validate_output_schema(def_name, output)
47
+
None -> Ok(Nil)
48
+
})
49
+
50
+
// Validate errors field if present
51
+
case json_helpers.get_array(schema, "errors") {
52
+
Some(_) -> Ok(Nil)
53
+
None -> Ok(Nil)
54
+
}
55
+
}
56
+
57
+
/// Validates query data against schema
58
+
/// Data should be the query parameters as a JSON object
59
+
pub fn validate_data(
60
+
data: Json,
61
+
schema: Json,
62
+
ctx: ValidationContext,
63
+
) -> Result(Nil, errors.ValidationError) {
64
+
let def_name = context.path(ctx)
65
+
66
+
// Query data must be an object (the parameters)
67
+
use _ <- result.try(case json_helpers.is_object(data) {
68
+
True -> Ok(Nil)
69
+
False ->
70
+
Error(errors.data_validation(
71
+
def_name <> ": query parameters must be an object",
72
+
))
73
+
})
74
+
75
+
// If schema has parameters, validate data against them
76
+
case json_helpers.get_field(schema, "parameters") {
77
+
Some(parameters) -> {
78
+
let params_ctx = context.with_path(ctx, "parameters")
79
+
validate_parameters_data(data, parameters, params_ctx)
80
+
}
81
+
None -> Ok(Nil)
82
+
}
83
+
}
84
+
85
+
/// Validates parameter data against params schema
86
+
fn validate_parameters_data(
87
+
data: Json,
88
+
params_schema: Json,
89
+
ctx: ValidationContext,
90
+
) -> Result(Nil, errors.ValidationError) {
91
+
let def_name = context.path(ctx)
92
+
93
+
// Get data as dict
94
+
use data_dict <- result.try(json_helpers.json_to_dict(data))
95
+
96
+
// Get properties and required from params schema
97
+
let properties_dict = case
98
+
json_helpers.get_field(params_schema, "properties")
99
+
{
100
+
Some(props) -> json_helpers.json_to_dict(props)
101
+
None -> Ok(json_helpers.empty_dict())
102
+
}
103
+
104
+
let required_array = json_helpers.get_array(params_schema, "required")
105
+
106
+
use props_dict <- result.try(properties_dict)
107
+
108
+
// Check all required parameters are present
109
+
use _ <- result.try(case required_array {
110
+
Some(required) -> {
111
+
list.try_fold(required, Nil, fn(_, item) {
112
+
case decode.run(item, decode.string) {
113
+
Ok(param_name) -> {
114
+
case json_helpers.dict_has_key(data_dict, param_name) {
115
+
True -> Ok(Nil)
116
+
False ->
117
+
Error(errors.data_validation(
118
+
def_name
119
+
<> ": missing required parameter '"
120
+
<> param_name
121
+
<> "'",
122
+
))
123
+
}
124
+
}
125
+
Error(_) -> Ok(Nil)
126
+
}
127
+
})
128
+
}
129
+
None -> Ok(Nil)
130
+
})
131
+
132
+
// Validate each parameter in data
133
+
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
134
+
case acc {
135
+
Error(e) -> Error(e)
136
+
Ok(_) -> {
137
+
// Get the schema for this parameter
138
+
case json_helpers.dict_get(props_dict, param_name) {
139
+
Some(param_schema_dyn) -> {
140
+
// Convert dynamic to JSON
141
+
case json_helpers.dynamic_to_json(param_schema_dyn) {
142
+
Ok(param_schema) -> {
143
+
// Convert param value to JSON
144
+
case json_helpers.dynamic_to_json(param_value) {
145
+
Ok(param_json) -> {
146
+
// Validate the parameter value against its schema
147
+
let param_ctx = context.with_path(ctx, param_name)
148
+
validate_parameter_value(
149
+
param_json,
150
+
param_schema,
151
+
param_ctx,
152
+
)
153
+
}
154
+
Error(e) -> Error(e)
155
+
}
156
+
}
157
+
Error(e) -> Error(e)
158
+
}
159
+
}
160
+
None -> {
161
+
// Parameter not in schema - could warn or allow
162
+
// For now, allow unknown parameters
163
+
Ok(Nil)
164
+
}
165
+
}
166
+
}
167
+
}
168
+
})
169
+
}
170
+
171
+
/// Validates a single parameter value against its schema
172
+
fn validate_parameter_value(
173
+
value: Json,
174
+
schema: Json,
175
+
ctx: ValidationContext,
176
+
) -> Result(Nil, errors.ValidationError) {
177
+
// Dispatch based on schema type
178
+
case json_helpers.get_string(schema, "type") {
179
+
Some("boolean") ->
180
+
validation_primitive_boolean.validate_data(value, schema, ctx)
181
+
Some("integer") ->
182
+
validation_primitive_integer.validate_data(value, schema, ctx)
183
+
Some("string") ->
184
+
validation_primitive_string.validate_data(value, schema, ctx)
185
+
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
186
+
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
187
+
Some(other_type) ->
188
+
Error(errors.data_validation(
189
+
context.path(ctx)
190
+
<> ": unsupported parameter type '"
191
+
<> other_type
192
+
<> "'",
193
+
))
194
+
None ->
195
+
Error(errors.data_validation(
196
+
context.path(ctx) <> ": parameter schema missing type field",
197
+
))
198
+
}
199
+
}
200
+
201
+
/// Validates parameters schema definition
202
+
fn validate_parameters_schema(
203
+
parameters: Json,
204
+
ctx: ValidationContext,
205
+
) -> Result(Nil, errors.ValidationError) {
206
+
// Validate the full params schema
207
+
let params_ctx = context.with_path(ctx, "parameters")
208
+
params.validate_schema(parameters, params_ctx)
209
+
}
210
+
211
+
/// Validates output schema definition
212
+
fn validate_output_schema(
213
+
def_name: String,
214
+
output: Json,
215
+
) -> Result(Nil, errors.ValidationError) {
216
+
// Output must have encoding field
217
+
case json_helpers.get_string(output, "encoding") {
218
+
Some(_) -> Ok(Nil)
219
+
None ->
220
+
Error(errors.invalid_schema(
221
+
def_name <> ": query output missing encoding field",
222
+
))
223
+
}
224
+
}
+183
src/honk/validation/primary/record.gleam
+183
src/honk/validation/primary/record.gleam
···
1
+
// Record type validator
2
+
3
+
import gleam/json.{type Json}
4
+
import gleam/option.{None, Some}
5
+
import gleam/result
6
+
import gleam/string
7
+
import honk/errors
8
+
import honk/internal/constraints
9
+
import honk/internal/json_helpers
10
+
import honk/validation/context.{type ValidationContext}
11
+
import honk/validation/field
12
+
13
+
const allowed_fields = ["type", "key", "record", "description"]
14
+
15
+
const allowed_record_fields = [
16
+
"type", "properties", "required", "nullable", "description",
17
+
]
18
+
19
+
/// Validates record schema definition
20
+
pub fn validate_schema(
21
+
schema: Json,
22
+
ctx: ValidationContext,
23
+
) -> Result(Nil, errors.ValidationError) {
24
+
let def_name = context.path(ctx)
25
+
26
+
// Validate allowed fields at record level
27
+
let keys = json_helpers.get_keys(schema)
28
+
use _ <- result.try(constraints.validate_allowed_fields(
29
+
def_name,
30
+
keys,
31
+
allowed_fields,
32
+
"record",
33
+
))
34
+
35
+
// Validate required 'key' field
36
+
let key_value = case json_helpers.get_string(schema, "key") {
37
+
Some(key) -> Ok(key)
38
+
None ->
39
+
Error(errors.invalid_schema(
40
+
def_name <> ": record missing required 'key' field",
41
+
))
42
+
}
43
+
44
+
use key <- result.try(key_value)
45
+
use _ <- result.try(validate_key(def_name, key))
46
+
47
+
// Validate required 'record' field
48
+
let record_def = case json_helpers.get_field(schema, "record") {
49
+
Some(record) -> Ok(record)
50
+
None ->
51
+
Error(errors.invalid_schema(
52
+
def_name <> ": record missing required 'record' field",
53
+
))
54
+
}
55
+
56
+
use record <- result.try(record_def)
57
+
58
+
// Validate record object structure
59
+
use _ <- result.try(validate_record_object(def_name, record))
60
+
61
+
// Recursively validate properties - delegate to object validator
62
+
// The record field is an object, so we can use field.validate_object_schema
63
+
let record_ctx = context.with_path(ctx, ".record")
64
+
field.validate_object_schema(record, record_ctx)
65
+
}
66
+
67
+
/// Validates record data against schema
68
+
pub fn validate_data(
69
+
data: Json,
70
+
schema: Json,
71
+
ctx: ValidationContext,
72
+
) -> Result(Nil, errors.ValidationError) {
73
+
let def_name = context.path(ctx)
74
+
75
+
// Data must be an object
76
+
case json_helpers.is_object(data) {
77
+
False -> {
78
+
Error(errors.data_validation(def_name <> ": expected object for record"))
79
+
}
80
+
True -> {
81
+
// Get the record definition
82
+
case json_helpers.get_field(schema, "record") {
83
+
Some(record_def) -> {
84
+
// Delegate to object validator for full validation
85
+
// The record's data validation is the same as object validation
86
+
field.validate_object_data(data, record_def, ctx)
87
+
}
88
+
None ->
89
+
Error(errors.data_validation(
90
+
def_name <> ": record schema missing 'record' field",
91
+
))
92
+
}
93
+
}
94
+
}
95
+
}
96
+
97
+
/// Validates the `key` field of a record definition
98
+
///
99
+
/// Valid key types:
100
+
/// - `tid`: Record key is a Timestamp Identifier (auto-generated)
101
+
/// - `any`: Record key can be any valid record key format
102
+
/// - `nsid`: Record key must be a valid NSID
103
+
/// - `literal:*`: Record key must match the literal value after the colon
104
+
fn validate_key(
105
+
def_name: String,
106
+
key: String,
107
+
) -> Result(Nil, errors.ValidationError) {
108
+
case key {
109
+
"tid" -> Ok(Nil)
110
+
"any" -> Ok(Nil)
111
+
"nsid" -> Ok(Nil)
112
+
_ ->
113
+
case string.starts_with(key, "literal:") {
114
+
True -> Ok(Nil)
115
+
False ->
116
+
Error(errors.invalid_schema(
117
+
def_name
118
+
<> ": record has invalid key type '"
119
+
<> key
120
+
<> "'. Must be 'tid', 'any', 'nsid', or 'literal:*'",
121
+
))
122
+
}
123
+
}
124
+
}
125
+
126
+
/// Validates the structure of a record object definition
127
+
fn validate_record_object(
128
+
def_name: String,
129
+
record_def: Json,
130
+
) -> Result(Nil, errors.ValidationError) {
131
+
// Must be type "object"
132
+
case json_helpers.get_string(record_def, "type") {
133
+
Some("object") -> {
134
+
// Validate allowed fields in record object
135
+
let keys = json_helpers.get_keys(record_def)
136
+
use _ <- result.try(constraints.validate_allowed_fields(
137
+
def_name,
138
+
keys,
139
+
allowed_record_fields,
140
+
"record object",
141
+
))
142
+
143
+
// Validate properties structure
144
+
use _ <- result.try(
145
+
case json_helpers.get_field(record_def, "properties") {
146
+
Some(properties) ->
147
+
case json_helpers.is_object(properties) {
148
+
True -> Ok(Nil)
149
+
False ->
150
+
Error(errors.invalid_schema(
151
+
def_name <> ": record properties must be an object",
152
+
))
153
+
}
154
+
None -> Ok(Nil)
155
+
},
156
+
)
157
+
158
+
// Validate nullable is an array if present
159
+
case json_helpers.get_array(record_def, "nullable") {
160
+
Some(_) -> Ok(Nil)
161
+
None -> {
162
+
// Check if nullable exists but is not an array
163
+
case json_helpers.get_field(record_def, "nullable") {
164
+
Some(_) ->
165
+
Error(errors.invalid_schema(
166
+
def_name <> ": record nullable field must be an array",
167
+
))
168
+
None -> Ok(Nil)
169
+
}
170
+
}
171
+
}
172
+
}
173
+
Some(other_type) ->
174
+
Error(errors.invalid_schema(
175
+
def_name
176
+
<> ": record field must be type 'object', got '"
177
+
<> other_type
178
+
<> "'",
179
+
))
180
+
None ->
181
+
Error(errors.invalid_schema(def_name <> ": record field missing type"))
182
+
}
183
+
}
+269
src/honk/validation/primary/subscription.gleam
+269
src/honk/validation/primary/subscription.gleam
···
1
+
// Subscription type validator
2
+
// Subscriptions are XRPC Subscription (WebSocket) endpoints for real-time data
3
+
4
+
import gleam/dynamic/decode
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/validation/context.{type ValidationContext}
13
+
import honk/validation/field as validation_field
14
+
import honk/validation/field/union as validation_field_union
15
+
import honk/validation/meta/unknown as validation_meta_unknown
16
+
import honk/validation/primary/params
17
+
import honk/validation/primitive/boolean as validation_primitive_boolean
18
+
import honk/validation/primitive/integer as validation_primitive_integer
19
+
import honk/validation/primitive/string as validation_primitive_string
20
+
21
+
const allowed_fields = [
22
+
"type",
23
+
"parameters",
24
+
"message",
25
+
"errors",
26
+
"description",
27
+
]
28
+
29
+
/// Validates subscription schema definition
30
+
pub fn validate_schema(
31
+
schema: Json,
32
+
ctx: ValidationContext,
33
+
) -> Result(Nil, errors.ValidationError) {
34
+
let def_name = context.path(ctx)
35
+
36
+
// Validate allowed fields
37
+
let keys = json_helpers.get_keys(schema)
38
+
use _ <- result.try(constraints.validate_allowed_fields(
39
+
def_name,
40
+
keys,
41
+
allowed_fields,
42
+
"subscription",
43
+
))
44
+
45
+
// Validate parameters field if present
46
+
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
47
+
Some(parameters) -> validate_parameters_schema(parameters, ctx)
48
+
None -> Ok(Nil)
49
+
})
50
+
51
+
// Validate message field if present
52
+
use _ <- result.try(case json_helpers.get_field(schema, "message") {
53
+
Some(message) -> validate_message_schema(def_name, message)
54
+
None -> Ok(Nil)
55
+
})
56
+
57
+
// Validate errors field if present
58
+
case json_helpers.get_array(schema, "errors") {
59
+
Some(_) -> Ok(Nil)
60
+
None -> Ok(Nil)
61
+
}
62
+
}
63
+
64
+
/// Validates subscription parameters data against schema
65
+
/// Data should be the connection parameters as a JSON object
66
+
pub fn validate_data(
67
+
data: Json,
68
+
schema: Json,
69
+
ctx: ValidationContext,
70
+
) -> Result(Nil, errors.ValidationError) {
71
+
let def_name = context.path(ctx)
72
+
73
+
// Subscription parameter data must be an object
74
+
use _ <- result.try(case json_helpers.is_object(data) {
75
+
True -> Ok(Nil)
76
+
False ->
77
+
Error(errors.data_validation(
78
+
def_name <> ": subscription parameters must be an object",
79
+
))
80
+
})
81
+
82
+
// If schema has parameters, validate data against them
83
+
case json_helpers.get_field(schema, "parameters") {
84
+
Some(parameters) -> {
85
+
let params_ctx = context.with_path(ctx, "parameters")
86
+
validate_parameters_data(data, parameters, params_ctx)
87
+
}
88
+
None -> Ok(Nil)
89
+
}
90
+
}
91
+
92
+
/// Validates subscription message data against schema
93
+
pub fn validate_message_data(
94
+
data: Json,
95
+
schema: Json,
96
+
ctx: ValidationContext,
97
+
) -> Result(Nil, errors.ValidationError) {
98
+
// Get the message schema
99
+
case json_helpers.get_field(schema, "message") {
100
+
Some(message) -> {
101
+
case json_helpers.get_field(message, "schema") {
102
+
Some(msg_schema) -> {
103
+
// Message schema must be a union - validate data against it
104
+
let msg_ctx = context.with_path(ctx, "message.schema")
105
+
validation_field_union.validate_data(data, msg_schema, msg_ctx)
106
+
}
107
+
None -> Ok(Nil)
108
+
}
109
+
}
110
+
None -> Ok(Nil)
111
+
}
112
+
}
113
+
114
+
/// Validates parameter data against params schema
115
+
/// (Reused from query validator pattern)
116
+
fn validate_parameters_data(
117
+
data: Json,
118
+
params_schema: Json,
119
+
ctx: ValidationContext,
120
+
) -> Result(Nil, errors.ValidationError) {
121
+
let def_name = context.path(ctx)
122
+
123
+
// Get data as dict
124
+
use data_dict <- result.try(json_helpers.json_to_dict(data))
125
+
126
+
// Get properties and required from params schema
127
+
let properties_dict = case
128
+
json_helpers.get_field(params_schema, "properties")
129
+
{
130
+
Some(props) -> json_helpers.json_to_dict(props)
131
+
None -> Ok(json_helpers.empty_dict())
132
+
}
133
+
134
+
let required_array = json_helpers.get_array(params_schema, "required")
135
+
136
+
use props_dict <- result.try(properties_dict)
137
+
138
+
// Check all required parameters are present
139
+
use _ <- result.try(case required_array {
140
+
Some(required) -> {
141
+
list.try_fold(required, Nil, fn(_, item) {
142
+
case decode.run(item, decode.string) {
143
+
Ok(param_name) -> {
144
+
case json_helpers.dict_has_key(data_dict, param_name) {
145
+
True -> Ok(Nil)
146
+
False ->
147
+
Error(errors.data_validation(
148
+
def_name
149
+
<> ": missing required parameter '"
150
+
<> param_name
151
+
<> "'",
152
+
))
153
+
}
154
+
}
155
+
Error(_) -> Ok(Nil)
156
+
}
157
+
})
158
+
}
159
+
None -> Ok(Nil)
160
+
})
161
+
162
+
// Validate each parameter in data
163
+
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
164
+
case acc {
165
+
Error(e) -> Error(e)
166
+
Ok(_) -> {
167
+
// Get the schema for this parameter
168
+
case json_helpers.dict_get(props_dict, param_name) {
169
+
Some(param_schema_dyn) -> {
170
+
// Convert dynamic to JSON
171
+
case json_helpers.dynamic_to_json(param_schema_dyn) {
172
+
Ok(param_schema) -> {
173
+
// Convert param value to JSON
174
+
case json_helpers.dynamic_to_json(param_value) {
175
+
Ok(param_json) -> {
176
+
// Validate the parameter value against its schema
177
+
let param_ctx = context.with_path(ctx, param_name)
178
+
validate_parameter_value(
179
+
param_json,
180
+
param_schema,
181
+
param_ctx,
182
+
)
183
+
}
184
+
Error(e) -> Error(e)
185
+
}
186
+
}
187
+
Error(e) -> Error(e)
188
+
}
189
+
}
190
+
None -> {
191
+
// Parameter not in schema - allow unknown parameters
192
+
Ok(Nil)
193
+
}
194
+
}
195
+
}
196
+
}
197
+
})
198
+
}
199
+
200
+
/// Validates a single parameter value against its schema
201
+
fn validate_parameter_value(
202
+
value: Json,
203
+
schema: Json,
204
+
ctx: ValidationContext,
205
+
) -> Result(Nil, errors.ValidationError) {
206
+
// Dispatch based on schema type
207
+
case json_helpers.get_string(schema, "type") {
208
+
Some("boolean") ->
209
+
validation_primitive_boolean.validate_data(value, schema, ctx)
210
+
Some("integer") ->
211
+
validation_primitive_integer.validate_data(value, schema, ctx)
212
+
Some("string") ->
213
+
validation_primitive_string.validate_data(value, schema, ctx)
214
+
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
215
+
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
216
+
Some(other_type) ->
217
+
Error(errors.data_validation(
218
+
context.path(ctx)
219
+
<> ": unsupported parameter type '"
220
+
<> other_type
221
+
<> "'",
222
+
))
223
+
None ->
224
+
Error(errors.data_validation(
225
+
context.path(ctx) <> ": parameter schema missing type field",
226
+
))
227
+
}
228
+
}
229
+
230
+
/// Validates parameters schema definition
231
+
fn validate_parameters_schema(
232
+
parameters: Json,
233
+
ctx: ValidationContext,
234
+
) -> Result(Nil, errors.ValidationError) {
235
+
// Validate the full params schema
236
+
let params_ctx = context.with_path(ctx, "parameters")
237
+
params.validate_schema(parameters, params_ctx)
238
+
}
239
+
240
+
/// Validates message schema definition
241
+
fn validate_message_schema(
242
+
def_name: String,
243
+
message: Json,
244
+
) -> Result(Nil, errors.ValidationError) {
245
+
// Message must have schema field
246
+
case json_helpers.get_field(message, "schema") {
247
+
Some(schema_field) -> {
248
+
// Schema must be a union type
249
+
case json_helpers.get_string(schema_field, "type") {
250
+
Some("union") -> Ok(Nil)
251
+
Some(other_type) ->
252
+
Error(errors.invalid_schema(
253
+
def_name
254
+
<> ": subscription message schema must be type 'union', got '"
255
+
<> other_type
256
+
<> "'",
257
+
))
258
+
None ->
259
+
Error(errors.invalid_schema(
260
+
def_name <> ": subscription message schema missing type field",
261
+
))
262
+
}
263
+
}
264
+
None ->
265
+
Error(errors.invalid_schema(
266
+
def_name <> ": subscription message missing schema field",
267
+
))
268
+
}
269
+
}
+359
src/honk/validation/primitive/blob.gleam
+359
src/honk/validation/primitive/blob.gleam
···
1
+
// Blob type validator
2
+
// Blobs are binary objects with MIME types and size constraints
3
+
4
+
import gleam/dynamic.{type Dynamic}
5
+
import gleam/dynamic/decode
6
+
import gleam/int
7
+
import gleam/json.{type Json}
8
+
import gleam/list
9
+
import gleam/option.{None, Some}
10
+
import gleam/result
11
+
import gleam/string
12
+
import honk/errors
13
+
import honk/internal/constraints
14
+
import honk/internal/json_helpers
15
+
import honk/validation/context.{type ValidationContext}
16
+
import honk/validation/formats
17
+
18
+
const allowed_fields = ["type", "accept", "maxSize", "description"]
19
+
20
+
const allowed_data_fields = ["$type", "ref", "mimeType", "size"]
21
+
22
+
/// Validates blob schema definition
23
+
pub fn validate_schema(
24
+
schema: Json,
25
+
ctx: ValidationContext,
26
+
) -> Result(Nil, errors.ValidationError) {
27
+
let def_name = context.path(ctx)
28
+
29
+
// Validate allowed fields
30
+
let keys = json_helpers.get_keys(schema)
31
+
use _ <- result.try(constraints.validate_allowed_fields(
32
+
def_name,
33
+
keys,
34
+
allowed_fields,
35
+
"blob",
36
+
))
37
+
38
+
// Validate accept field if present
39
+
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
40
+
Some(accept_array) -> validate_accept_field(def_name, accept_array)
41
+
None -> Ok(Nil)
42
+
})
43
+
44
+
// Validate maxSize is positive integer if present
45
+
case json_helpers.get_int(schema, "maxSize") {
46
+
Some(max_size) ->
47
+
case max_size > 0 {
48
+
True -> Ok(Nil)
49
+
False ->
50
+
Error(errors.invalid_schema(
51
+
def_name <> ": blob maxSize must be greater than 0",
52
+
))
53
+
}
54
+
None -> Ok(Nil)
55
+
}
56
+
}
57
+
58
+
/// Validates blob data against schema
59
+
pub fn validate_data(
60
+
data: Json,
61
+
schema: Json,
62
+
ctx: ValidationContext,
63
+
) -> Result(Nil, errors.ValidationError) {
64
+
let def_name = context.path(ctx)
65
+
66
+
// Data must be an object
67
+
case json_helpers.is_object(data) {
68
+
False -> {
69
+
Error(errors.data_validation(def_name <> ": expected blob object"))
70
+
}
71
+
True -> {
72
+
// Validate no extra fields (strict mode per atproto implementation)
73
+
let keys = json_helpers.get_keys(data)
74
+
use _ <- result.try(validate_no_extra_fields(def_name, keys))
75
+
76
+
// Validate $type field must be "blob"
77
+
use _ <- result.try(case json_helpers.get_string(data, "$type") {
78
+
Some("blob") -> Ok(Nil)
79
+
Some(other) ->
80
+
Error(errors.data_validation(
81
+
def_name <> ": blob $type must be 'blob', got '" <> other <> "'",
82
+
))
83
+
None ->
84
+
Error(errors.data_validation(
85
+
def_name <> ": blob missing required '$type' field",
86
+
))
87
+
})
88
+
89
+
// Validate ref field with $link containing raw CID
90
+
use _ <- result.try(validate_ref_field(data, def_name))
91
+
92
+
// Validate required mimeType field (non-empty)
93
+
use mime_type <- result.try(
94
+
case json_helpers.get_string(data, "mimeType") {
95
+
Some(mt) ->
96
+
case string.is_empty(mt) {
97
+
True ->
98
+
Error(errors.data_validation(
99
+
def_name <> ": blob mimeType cannot be empty",
100
+
))
101
+
False -> Ok(mt)
102
+
}
103
+
None ->
104
+
Error(errors.data_validation(
105
+
def_name <> ": blob missing required 'mimeType' field",
106
+
))
107
+
},
108
+
)
109
+
110
+
// Validate required size field (non-negative integer)
111
+
use size <- result.try(case json_helpers.get_int(data, "size") {
112
+
Some(s) ->
113
+
case s >= 0 {
114
+
True -> Ok(s)
115
+
False ->
116
+
Error(errors.data_validation(
117
+
def_name <> ": blob size must be non-negative",
118
+
))
119
+
}
120
+
None ->
121
+
Error(errors.data_validation(
122
+
def_name <> ": blob missing or invalid 'size' field",
123
+
))
124
+
})
125
+
126
+
// Validate against accept constraint if present
127
+
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
128
+
Some(accept_array) -> {
129
+
validate_mime_type_against_accept(def_name, mime_type, accept_array)
130
+
}
131
+
None -> Ok(Nil)
132
+
})
133
+
134
+
// Validate against maxSize constraint if present
135
+
case json_helpers.get_int(schema, "maxSize") {
136
+
Some(max_size) ->
137
+
case size <= max_size {
138
+
True -> Ok(Nil)
139
+
False ->
140
+
Error(errors.data_validation(
141
+
def_name
142
+
<> ": blob size "
143
+
<> int.to_string(size)
144
+
<> " exceeds maxSize "
145
+
<> int.to_string(max_size),
146
+
))
147
+
}
148
+
None -> Ok(Nil)
149
+
}
150
+
}
151
+
}
152
+
}
153
+
154
+
/// Validates that blob data has no extra fields
155
+
fn validate_no_extra_fields(
156
+
def_name: String,
157
+
keys: List(String),
158
+
) -> Result(Nil, errors.ValidationError) {
159
+
let extra_keys =
160
+
list.filter(keys, fn(key) { !list.contains(allowed_data_fields, key) })
161
+
case extra_keys {
162
+
[] -> Ok(Nil)
163
+
[first, ..] ->
164
+
Error(errors.data_validation(
165
+
def_name <> ": blob has unexpected field '" <> first <> "'",
166
+
))
167
+
}
168
+
}
169
+
170
+
/// Validates the ref field containing $link with raw CID
171
+
fn validate_ref_field(
172
+
data: Json,
173
+
def_name: String,
174
+
) -> Result(Nil, errors.ValidationError) {
175
+
case json_helpers.get_field(data, "ref") {
176
+
Some(ref_json) ->
177
+
case json_helpers.is_object(ref_json) {
178
+
False ->
179
+
Error(errors.data_validation(
180
+
def_name <> ": blob ref must be an object",
181
+
))
182
+
True ->
183
+
case json_helpers.get_string(ref_json, "$link") {
184
+
Some(cid) ->
185
+
case formats.is_valid_raw_cid(cid) {
186
+
True -> Ok(Nil)
187
+
False ->
188
+
Error(errors.data_validation(
189
+
def_name
190
+
<> ": blob ref.$link must be a valid CID with raw multicodec (bafkrei prefix)",
191
+
))
192
+
}
193
+
None ->
194
+
Error(errors.data_validation(
195
+
def_name <> ": blob ref must have $link field",
196
+
))
197
+
}
198
+
}
199
+
None ->
200
+
Error(errors.data_validation(
201
+
def_name <> ": blob missing required 'ref' field",
202
+
))
203
+
}
204
+
}
205
+
206
+
/// Validates accept field array
207
+
fn validate_accept_field(
208
+
def_name: String,
209
+
accept_array: List(Dynamic),
210
+
) -> Result(Nil, errors.ValidationError) {
211
+
list.index_fold(accept_array, Ok(Nil), fn(acc, item, i) {
212
+
use _ <- result.try(acc)
213
+
case decode.run(item, decode.string) {
214
+
Ok(mime_type) -> validate_mime_type_pattern(def_name, mime_type, i)
215
+
Error(_) ->
216
+
Error(errors.invalid_schema(
217
+
def_name
218
+
<> ": blob accept["
219
+
<> int.to_string(i)
220
+
<> "] must be a string",
221
+
))
222
+
}
223
+
})
224
+
}
225
+
226
+
/// Validates MIME type pattern syntax
227
+
fn validate_mime_type_pattern(
228
+
def_name: String,
229
+
mime_type: String,
230
+
_index: Int,
231
+
) -> Result(Nil, errors.ValidationError) {
232
+
case string.is_empty(mime_type) {
233
+
True ->
234
+
Error(errors.invalid_schema(
235
+
def_name <> ": blob MIME type cannot be empty",
236
+
))
237
+
False -> {
238
+
// Allow */*
239
+
case mime_type {
240
+
"*/*" -> Ok(Nil)
241
+
_ -> {
242
+
// Must contain exactly one /
243
+
case string.contains(mime_type, "/") {
244
+
False ->
245
+
Error(errors.invalid_schema(
246
+
def_name
247
+
<> ": blob MIME type '"
248
+
<> mime_type
249
+
<> "' must contain a '/' character",
250
+
))
251
+
True -> {
252
+
let parts = string.split(mime_type, "/")
253
+
case parts {
254
+
[type_part, subtype_part] -> {
255
+
// Validate * usage
256
+
use _ <- result.try(validate_wildcard(
257
+
def_name,
258
+
type_part,
259
+
"type",
260
+
mime_type,
261
+
))
262
+
validate_wildcard(
263
+
def_name,
264
+
subtype_part,
265
+
"subtype",
266
+
mime_type,
267
+
)
268
+
}
269
+
_ ->
270
+
Error(errors.invalid_schema(
271
+
def_name
272
+
<> ": blob MIME type '"
273
+
<> mime_type
274
+
<> "' must have exactly one '/' character",
275
+
))
276
+
}
277
+
}
278
+
}
279
+
}
280
+
}
281
+
}
282
+
}
283
+
}
284
+
285
+
/// Validates wildcard usage in MIME type parts
286
+
fn validate_wildcard(
287
+
def_name: String,
288
+
part: String,
289
+
part_name: String,
290
+
full_mime_type: String,
291
+
) -> Result(Nil, errors.ValidationError) {
292
+
case string.contains(part, "*") {
293
+
True ->
294
+
case part {
295
+
"*" -> Ok(Nil)
296
+
_ ->
297
+
Error(errors.invalid_schema(
298
+
def_name
299
+
<> ": blob MIME type '"
300
+
<> full_mime_type
301
+
<> "' can only use '*' as a complete wildcard for "
302
+
<> part_name,
303
+
))
304
+
}
305
+
False -> Ok(Nil)
306
+
}
307
+
}
308
+
309
+
/// Validates MIME type against accept patterns
310
+
fn validate_mime_type_against_accept(
311
+
def_name: String,
312
+
mime_type: String,
313
+
accept_array: List(Dynamic),
314
+
) -> Result(Nil, errors.ValidationError) {
315
+
let accept_patterns =
316
+
list.filter_map(accept_array, fn(item) { decode.run(item, decode.string) })
317
+
318
+
// Check if mime_type matches any accept pattern
319
+
case
320
+
list.any(accept_patterns, fn(pattern) {
321
+
mime_type_matches_pattern(mime_type, pattern)
322
+
})
323
+
{
324
+
True -> Ok(Nil)
325
+
False ->
326
+
Error(errors.data_validation(
327
+
def_name
328
+
<> ": blob mimeType '"
329
+
<> mime_type
330
+
<> "' not accepted. Allowed: "
331
+
<> string.join(accept_patterns, ", "),
332
+
))
333
+
}
334
+
}
335
+
336
+
/// Checks if a MIME type matches a pattern
337
+
fn mime_type_matches_pattern(mime_type: String, pattern: String) -> Bool {
338
+
case pattern {
339
+
"*/*" -> True
340
+
_ -> {
341
+
let mime_parts = string.split(mime_type, "/")
342
+
let pattern_parts = string.split(pattern, "/")
343
+
case mime_parts, pattern_parts {
344
+
[mime_type_part, mime_subtype], [pattern_type, pattern_subtype] -> {
345
+
let type_matches = case pattern_type {
346
+
"*" -> True
347
+
_ -> mime_type_part == pattern_type
348
+
}
349
+
let subtype_matches = case pattern_subtype {
350
+
"*" -> True
351
+
_ -> mime_subtype == pattern_subtype
352
+
}
353
+
type_matches && subtype_matches
354
+
}
355
+
_, _ -> False
356
+
}
357
+
}
358
+
}
359
+
}
+86
src/honk/validation/primitive/boolean.gleam
+86
src/honk/validation/primitive/boolean.gleam
···
1
+
// Boolean type validator
2
+
3
+
import gleam/json.{type Json}
4
+
import gleam/option.{None, Some}
5
+
import gleam/result
6
+
import honk/errors
7
+
import honk/internal/constraints
8
+
import honk/internal/json_helpers
9
+
import honk/validation/context.{type ValidationContext}
10
+
11
+
const allowed_fields = ["type", "const", "default", "description"]
12
+
13
+
/// Validates boolean schema definition
14
+
pub fn validate_schema(
15
+
schema: Json,
16
+
ctx: ValidationContext,
17
+
) -> Result(Nil, errors.ValidationError) {
18
+
let def_name = context.path(ctx)
19
+
20
+
// Validate allowed fields
21
+
let keys = json_helpers.get_keys(schema)
22
+
use _ <- result.try(constraints.validate_allowed_fields(
23
+
def_name,
24
+
keys,
25
+
allowed_fields,
26
+
"boolean",
27
+
))
28
+
29
+
// Validate const/default exclusivity
30
+
let has_const = json_helpers.get_bool(schema, "const") != None
31
+
let has_default = json_helpers.get_bool(schema, "default") != None
32
+
33
+
constraints.validate_const_default_exclusivity(
34
+
def_name,
35
+
has_const,
36
+
has_default,
37
+
"boolean",
38
+
)
39
+
}
40
+
41
+
/// Validates boolean data against schema
42
+
pub fn validate_data(
43
+
data: Json,
44
+
schema: Json,
45
+
ctx: ValidationContext,
46
+
) -> Result(Nil, errors.ValidationError) {
47
+
let def_name = context.path(ctx)
48
+
49
+
// Check data is a boolean
50
+
case json_helpers.is_bool(data) {
51
+
False ->
52
+
Error(errors.data_validation(
53
+
def_name <> ": expected boolean, got other type",
54
+
))
55
+
True -> {
56
+
// Extract boolean value
57
+
let json_str = json.to_string(data)
58
+
let is_true = json_str == "true"
59
+
let is_false = json_str == "false"
60
+
61
+
case is_true || is_false {
62
+
False ->
63
+
Error(errors.data_validation(
64
+
def_name <> ": invalid boolean representation",
65
+
))
66
+
True -> {
67
+
let value = is_true
68
+
69
+
// Validate const constraint
70
+
case json_helpers.get_bool(schema, "const") {
71
+
Some(const_val) if const_val != value ->
72
+
Error(errors.data_validation(
73
+
def_name
74
+
<> ": must be constant value "
75
+
<> case const_val {
76
+
True -> "true"
77
+
False -> "false"
78
+
},
79
+
))
80
+
_ -> Ok(Nil)
81
+
}
82
+
}
83
+
}
84
+
}
85
+
}
86
+
}
+134
src/honk/validation/primitive/bytes.gleam
+134
src/honk/validation/primitive/bytes.gleam
···
1
+
// Bytes type validator
2
+
// Bytes are base64-encoded strings
3
+
4
+
import gleam/bit_array
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import gleam/string
10
+
import honk/errors
11
+
import honk/internal/constraints
12
+
import honk/internal/json_helpers
13
+
import honk/validation/context.{type ValidationContext}
14
+
15
+
const allowed_fields = ["type", "minLength", "maxLength", "description"]
16
+
17
+
/// Validates bytes schema definition
18
+
pub fn validate_schema(
19
+
schema: Json,
20
+
ctx: ValidationContext,
21
+
) -> Result(Nil, errors.ValidationError) {
22
+
let def_name = context.path(ctx)
23
+
24
+
// Validate allowed fields
25
+
let keys = json_helpers.get_keys(schema)
26
+
use _ <- result.try(constraints.validate_allowed_fields(
27
+
def_name,
28
+
keys,
29
+
allowed_fields,
30
+
"bytes",
31
+
))
32
+
33
+
// Validate length constraints
34
+
let min_length = json_helpers.get_int(schema, "minLength")
35
+
let max_length = json_helpers.get_int(schema, "maxLength")
36
+
37
+
// Check for negative values
38
+
use _ <- result.try(case min_length {
39
+
Some(min) if min < 0 ->
40
+
Error(errors.invalid_schema(
41
+
def_name <> ": bytes schema minLength below zero",
42
+
))
43
+
_ -> Ok(Nil)
44
+
})
45
+
46
+
use _ <- result.try(case max_length {
47
+
Some(max) if max < 0 ->
48
+
Error(errors.invalid_schema(
49
+
def_name <> ": bytes schema maxLength below zero",
50
+
))
51
+
_ -> Ok(Nil)
52
+
})
53
+
54
+
constraints.validate_length_constraint_consistency(
55
+
def_name,
56
+
min_length,
57
+
max_length,
58
+
"bytes",
59
+
)
60
+
}
61
+
62
+
/// Validates bytes data against schema
63
+
/// Expects data in ATProto format: {"$bytes": "base64-string"}
64
+
pub fn validate_data(
65
+
data: Json,
66
+
schema: Json,
67
+
ctx: ValidationContext,
68
+
) -> Result(Nil, errors.ValidationError) {
69
+
let def_name = context.path(ctx)
70
+
71
+
// Check data is an object
72
+
case json_helpers.is_object(data) {
73
+
False -> Error(errors.data_validation(def_name <> ": expecting bytes"))
74
+
True -> {
75
+
// Get all keys from the object
76
+
let keys = json_helpers.get_keys(data)
77
+
78
+
// Must have exactly one field
79
+
use _ <- result.try(case list.length(keys) {
80
+
1 -> Ok(Nil)
81
+
_ ->
82
+
Error(errors.data_validation(
83
+
def_name <> ": $bytes objects must have a single field",
84
+
))
85
+
})
86
+
87
+
// That field must be "$bytes" with a string value
88
+
case json_helpers.get_string(data, "$bytes") {
89
+
None ->
90
+
Error(errors.data_validation(
91
+
def_name <> ": $bytes field missing or not a string",
92
+
))
93
+
Some(base64_str) -> {
94
+
// Decode the base64 string (using RawStdEncoding - no padding)
95
+
case bit_array.base64_decode(base64_str) {
96
+
Error(_) ->
97
+
Error(errors.data_validation(
98
+
def_name <> ": decoding $bytes value: invalid base64 encoding",
99
+
))
100
+
Ok(decoded_bytes) -> {
101
+
// Validate length of decoded bytes
102
+
let byte_length = bit_array.byte_size(decoded_bytes)
103
+
let min_length = json_helpers.get_int(schema, "minLength")
104
+
let max_length = json_helpers.get_int(schema, "maxLength")
105
+
106
+
// Check length constraints
107
+
use _ <- result.try(case min_length {
108
+
Some(min) if byte_length < min ->
109
+
Error(errors.data_validation(
110
+
def_name
111
+
<> ": bytes size out of bounds: "
112
+
<> string.inspect(byte_length),
113
+
))
114
+
_ -> Ok(Nil)
115
+
})
116
+
117
+
use _ <- result.try(case max_length {
118
+
Some(max) if byte_length > max ->
119
+
Error(errors.data_validation(
120
+
def_name
121
+
<> ": bytes size out of bounds: "
122
+
<> string.inspect(byte_length),
123
+
))
124
+
_ -> Ok(Nil)
125
+
})
126
+
127
+
Ok(Nil)
128
+
}
129
+
}
130
+
}
131
+
}
132
+
}
133
+
}
134
+
}
+63
src/honk/validation/primitive/cid_link.gleam
+63
src/honk/validation/primitive/cid_link.gleam
···
1
+
// CID Link type validator
2
+
// CID links are IPFS content identifiers
3
+
4
+
import gleam/json.{type Json}
5
+
import gleam/option
6
+
import honk/errors
7
+
import honk/internal/constraints
8
+
import honk/internal/json_helpers
9
+
import honk/validation/context.{type ValidationContext}
10
+
import honk/validation/formats
11
+
12
+
const allowed_fields = ["type", "description"]
13
+
14
+
/// Validates cid-link schema definition
15
+
pub fn validate_schema(
16
+
schema: Json,
17
+
ctx: ValidationContext,
18
+
) -> Result(Nil, errors.ValidationError) {
19
+
let def_name = context.path(ctx)
20
+
21
+
// Validate allowed fields
22
+
let keys = json_helpers.get_keys(schema)
23
+
constraints.validate_allowed_fields(
24
+
def_name,
25
+
keys,
26
+
allowed_fields,
27
+
"cid-link",
28
+
)
29
+
}
30
+
31
+
/// Validates cid-link data against schema
32
+
pub fn validate_data(
33
+
data: Json,
34
+
_schema: Json,
35
+
ctx: ValidationContext,
36
+
) -> Result(Nil, errors.ValidationError) {
37
+
let def_name = context.path(ctx)
38
+
39
+
// Check data is an object with $link field
40
+
case json_helpers.is_object(data) {
41
+
False ->
42
+
Error(errors.data_validation(def_name <> ": expected CID link object"))
43
+
True -> {
44
+
// Validate structure: {$link: CID string}
45
+
case json_helpers.get_string(data, "$link") {
46
+
option.Some(cid) -> {
47
+
// Validate CID format
48
+
case formats.is_valid_cid(cid) {
49
+
True -> Ok(Nil)
50
+
False ->
51
+
Error(errors.data_validation(
52
+
def_name <> ": invalid CID format in $link",
53
+
))
54
+
}
55
+
}
56
+
option.None ->
57
+
Error(errors.data_validation(
58
+
def_name <> ": CID link must have $link field",
59
+
))
60
+
}
61
+
}
62
+
}
63
+
}
+153
src/honk/validation/primitive/integer.gleam
+153
src/honk/validation/primitive/integer.gleam
···
1
+
// Integer type validator
2
+
3
+
import gleam/dynamic/decode
4
+
import gleam/int
5
+
import gleam/json.{type Json}
6
+
import gleam/list
7
+
import gleam/option.{None, Some}
8
+
import gleam/result
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/validation/context.{type ValidationContext}
13
+
14
+
const allowed_fields = [
15
+
"type", "minimum", "maximum", "enum", "const", "default", "description",
16
+
]
17
+
18
+
/// Validates integer schema definition
19
+
pub fn validate_schema(
20
+
schema: Json,
21
+
ctx: ValidationContext,
22
+
) -> Result(Nil, errors.ValidationError) {
23
+
let def_name = context.path(ctx)
24
+
25
+
// Validate allowed fields
26
+
let keys = json_helpers.get_keys(schema)
27
+
use _ <- result.try(constraints.validate_allowed_fields(
28
+
def_name,
29
+
keys,
30
+
allowed_fields,
31
+
"integer",
32
+
))
33
+
34
+
// Extract min/max constraints
35
+
let minimum = json_helpers.get_int(schema, "minimum")
36
+
let maximum = json_helpers.get_int(schema, "maximum")
37
+
38
+
// Validate constraint consistency
39
+
use _ <- result.try(constraints.validate_integer_constraint_consistency(
40
+
def_name,
41
+
minimum,
42
+
maximum,
43
+
))
44
+
45
+
// Validate enum is array of integers if present
46
+
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
47
+
Some(enum_array) -> {
48
+
list.try_fold(enum_array, Nil, fn(_, item) {
49
+
case decode.run(item, decode.int) {
50
+
Ok(_) -> Ok(Nil)
51
+
Error(_) ->
52
+
Error(errors.invalid_schema(
53
+
def_name <> ": enum values must be integers",
54
+
))
55
+
}
56
+
})
57
+
}
58
+
None -> Ok(Nil)
59
+
})
60
+
61
+
// Validate const/default exclusivity
62
+
let has_const = json_helpers.get_int(schema, "const") != None
63
+
let has_default = json_helpers.get_int(schema, "default") != None
64
+
65
+
constraints.validate_const_default_exclusivity(
66
+
def_name,
67
+
has_const,
68
+
has_default,
69
+
"integer",
70
+
)
71
+
}
72
+
73
+
/// Validates integer data against schema
74
+
pub fn validate_data(
75
+
data: Json,
76
+
schema: Json,
77
+
ctx: ValidationContext,
78
+
) -> Result(Nil, errors.ValidationError) {
79
+
let def_name = context.path(ctx)
80
+
81
+
// Check data is an integer
82
+
case json_helpers.is_int(data) {
83
+
False ->
84
+
Error(errors.data_validation(
85
+
def_name <> ": expected integer, got other type",
86
+
))
87
+
True -> {
88
+
// Extract integer value
89
+
let json_str = json.to_string(data)
90
+
case int.parse(json_str) {
91
+
Error(_) ->
92
+
Error(errors.data_validation(
93
+
def_name <> ": failed to parse integer value",
94
+
))
95
+
Ok(value) -> {
96
+
// Validate const constraint first (most restrictive)
97
+
case json_helpers.get_int(schema, "const") {
98
+
Some(const_val) if const_val != value ->
99
+
Error(errors.data_validation(
100
+
def_name
101
+
<> ": must be constant value "
102
+
<> int.to_string(const_val)
103
+
<> ", found "
104
+
<> int.to_string(value),
105
+
))
106
+
Some(_) -> Ok(Nil)
107
+
None -> {
108
+
// Validate enum constraint
109
+
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
110
+
Some(enum_array) -> {
111
+
let enum_ints =
112
+
list.filter_map(enum_array, fn(item) {
113
+
decode.run(item, decode.int)
114
+
})
115
+
116
+
validate_integer_enum(value, enum_ints, def_name)
117
+
}
118
+
None -> Ok(Nil)
119
+
})
120
+
121
+
// Validate range constraints
122
+
let minimum = json_helpers.get_int(schema, "minimum")
123
+
let maximum = json_helpers.get_int(schema, "maximum")
124
+
125
+
constraints.validate_integer_range(
126
+
def_name,
127
+
value,
128
+
minimum,
129
+
maximum,
130
+
)
131
+
}
132
+
}
133
+
}
134
+
}
135
+
}
136
+
}
137
+
}
138
+
139
+
/// Helper to validate integer enum
140
+
fn validate_integer_enum(
141
+
value: Int,
142
+
enum_values: List(Int),
143
+
def_name: String,
144
+
) -> Result(Nil, errors.ValidationError) {
145
+
constraints.validate_enum_constraint(
146
+
def_name,
147
+
value,
148
+
enum_values,
149
+
"integer",
150
+
int.to_string,
151
+
fn(a, b) { a == b },
152
+
)
153
+
}
+39
src/honk/validation/primitive/null.gleam
+39
src/honk/validation/primitive/null.gleam
···
1
+
// Null type validator
2
+
3
+
import gleam/json.{type Json}
4
+
import honk/errors
5
+
import honk/internal/constraints
6
+
import honk/internal/json_helpers
7
+
import honk/validation/context.{type ValidationContext}
8
+
9
+
const allowed_fields = ["type", "description"]
10
+
11
+
/// Validates null schema definition
12
+
pub fn validate_schema(
13
+
schema: Json,
14
+
ctx: ValidationContext,
15
+
) -> Result(Nil, errors.ValidationError) {
16
+
let def_name = context.path(ctx)
17
+
18
+
// Validate allowed fields
19
+
let keys = json_helpers.get_keys(schema)
20
+
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "null")
21
+
}
22
+
23
+
/// Validates null data against schema
24
+
pub fn validate_data(
25
+
data: Json,
26
+
_schema: Json,
27
+
ctx: ValidationContext,
28
+
) -> Result(Nil, errors.ValidationError) {
29
+
let def_name = context.path(ctx)
30
+
31
+
// Check data is null
32
+
case json_helpers.is_null(data) {
33
+
True -> Ok(Nil)
34
+
False ->
35
+
Error(errors.data_validation(
36
+
def_name <> ": expected null, got other type",
37
+
))
38
+
}
39
+
}
+297
src/honk/validation/primitive/string.gleam
+297
src/honk/validation/primitive/string.gleam
···
1
+
// String type validator
2
+
3
+
import gleam/dynamic/decode
4
+
import gleam/json.{type Json}
5
+
import gleam/list
6
+
import gleam/option.{type Option, None, Some}
7
+
import gleam/result
8
+
import gleam/string
9
+
import honk/errors
10
+
import honk/internal/constraints
11
+
import honk/internal/json_helpers
12
+
import honk/types
13
+
import honk/validation/context.{type ValidationContext}
14
+
import honk/validation/formats
15
+
16
+
const allowed_fields = [
17
+
"type", "format", "minLength", "maxLength", "minGraphemes", "maxGraphemes",
18
+
"enum", "knownValues", "const", "default", "description",
19
+
]
20
+
21
+
/// Validates string schema definition
22
+
pub fn validate_schema(
23
+
schema: Json,
24
+
ctx: ValidationContext,
25
+
) -> Result(Nil, errors.ValidationError) {
26
+
let def_name = context.path(ctx)
27
+
28
+
// Validate allowed fields
29
+
let keys = json_helpers.get_keys(schema)
30
+
use _ <- result.try(constraints.validate_allowed_fields(
31
+
def_name,
32
+
keys,
33
+
allowed_fields,
34
+
"string",
35
+
))
36
+
37
+
// Validate format if present
38
+
case json_helpers.get_string(schema, "format") {
39
+
Some(format_str) ->
40
+
case types.string_to_format(format_str) {
41
+
Ok(_format) -> Ok(Nil)
42
+
Error(_) ->
43
+
Error(errors.invalid_schema(
44
+
def_name
45
+
<> ": unknown format '"
46
+
<> format_str
47
+
<> "'. Valid formats: datetime, uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key",
48
+
))
49
+
}
50
+
None -> Ok(Nil)
51
+
}
52
+
|> result.try(fn(_) {
53
+
// Extract length constraints
54
+
let min_length = json_helpers.get_int(schema, "minLength")
55
+
let max_length = json_helpers.get_int(schema, "maxLength")
56
+
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
57
+
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
58
+
59
+
// Check for negative values
60
+
use _ <- result.try(case min_length {
61
+
Some(n) if n < 0 ->
62
+
Error(errors.invalid_schema(
63
+
def_name <> ": string schema minLength below zero",
64
+
))
65
+
_ -> Ok(Nil)
66
+
})
67
+
68
+
use _ <- result.try(case max_length {
69
+
Some(n) if n < 0 ->
70
+
Error(errors.invalid_schema(
71
+
def_name <> ": string schema maxLength below zero",
72
+
))
73
+
_ -> Ok(Nil)
74
+
})
75
+
76
+
use _ <- result.try(case min_graphemes {
77
+
Some(n) if n < 0 ->
78
+
Error(errors.invalid_schema(
79
+
def_name <> ": string schema minGraphemes below zero",
80
+
))
81
+
_ -> Ok(Nil)
82
+
})
83
+
84
+
use _ <- result.try(case max_graphemes {
85
+
Some(n) if n < 0 ->
86
+
Error(errors.invalid_schema(
87
+
def_name <> ": string schema maxGraphemes below zero",
88
+
))
89
+
_ -> Ok(Nil)
90
+
})
91
+
92
+
// Validate byte length constraints
93
+
use _ <- result.try(constraints.validate_length_constraint_consistency(
94
+
def_name,
95
+
min_length,
96
+
max_length,
97
+
"string",
98
+
))
99
+
100
+
// Validate grapheme constraints
101
+
constraints.validate_length_constraint_consistency(
102
+
def_name,
103
+
min_graphemes,
104
+
max_graphemes,
105
+
"string (graphemes)",
106
+
)
107
+
})
108
+
|> result.try(fn(_) {
109
+
// Validate enum is array of strings if present
110
+
case json_helpers.get_array(schema, "enum") {
111
+
Some(enum_array) -> {
112
+
// Check each item is a string
113
+
list.try_fold(enum_array, Nil, fn(_, item) {
114
+
case decode.run(item, decode.string) {
115
+
Ok(_) -> Ok(Nil)
116
+
Error(_) ->
117
+
Error(errors.invalid_schema(
118
+
def_name <> ": enum values must be strings",
119
+
))
120
+
}
121
+
})
122
+
}
123
+
None -> Ok(Nil)
124
+
}
125
+
})
126
+
|> result.try(fn(_) {
127
+
// Validate knownValues is array of strings if present
128
+
case json_helpers.get_array(schema, "knownValues") {
129
+
Some(known_array) -> {
130
+
list.try_fold(known_array, Nil, fn(_, item) {
131
+
case decode.run(item, decode.string) {
132
+
Ok(_) -> Ok(Nil)
133
+
Error(_) ->
134
+
Error(errors.invalid_schema(
135
+
def_name <> ": knownValues must be strings",
136
+
))
137
+
}
138
+
})
139
+
}
140
+
None -> Ok(Nil)
141
+
}
142
+
})
143
+
|> result.try(fn(_) {
144
+
// Validate const/default exclusivity
145
+
let has_const = json_helpers.get_string(schema, "const") != option.None
146
+
let has_default = json_helpers.get_string(schema, "default") != option.None
147
+
148
+
constraints.validate_const_default_exclusivity(
149
+
def_name,
150
+
has_const,
151
+
has_default,
152
+
"string",
153
+
)
154
+
})
155
+
}
156
+
157
+
/// Validates string data against schema
158
+
pub fn validate_data(
159
+
data: Json,
160
+
schema: Json,
161
+
ctx: ValidationContext,
162
+
) -> Result(Nil, errors.ValidationError) {
163
+
let def_name = context.path(ctx)
164
+
165
+
// Check data is a string
166
+
case json_helpers.is_string(data) {
167
+
False ->
168
+
Error(errors.data_validation(
169
+
def_name <> ": expected string, got other type",
170
+
))
171
+
True -> {
172
+
// Extract the string value
173
+
let json_str = json.to_string(data)
174
+
// Remove quotes from JSON string representation
175
+
let value = case
176
+
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
177
+
{
178
+
True -> string.slice(json_str, 1, string.length(json_str) - 2)
179
+
False -> json_str
180
+
}
181
+
182
+
// Validate length constraints
183
+
let min_length = json_helpers.get_int(schema, "minLength")
184
+
let max_length = json_helpers.get_int(schema, "maxLength")
185
+
use _ <- result.try(validate_string_length(
186
+
value,
187
+
min_length,
188
+
max_length,
189
+
def_name,
190
+
))
191
+
192
+
// Validate grapheme constraints
193
+
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
194
+
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
195
+
use _ <- result.try(validate_grapheme_length(
196
+
value,
197
+
min_graphemes,
198
+
max_graphemes,
199
+
def_name,
200
+
))
201
+
202
+
// Validate format if specified
203
+
case json_helpers.get_string(schema, "format") {
204
+
Some(format_str) ->
205
+
case types.string_to_format(format_str) {
206
+
Ok(format) -> validate_string_format(value, format, def_name)
207
+
Error(_) -> Ok(Nil)
208
+
}
209
+
None -> Ok(Nil)
210
+
}
211
+
|> result.try(fn(_) {
212
+
// Validate enum if specified
213
+
case json_helpers.get_array(schema, "enum") {
214
+
Some(enum_array) -> {
215
+
// Convert dynamics to strings
216
+
let enum_strings =
217
+
list.filter_map(enum_array, fn(item) {
218
+
decode.run(item, decode.string)
219
+
})
220
+
221
+
validate_string_enum(value, enum_strings, def_name)
222
+
}
223
+
None -> Ok(Nil)
224
+
}
225
+
})
226
+
}
227
+
}
228
+
}
229
+
230
+
/// Helper to validate string length (UTF-8 bytes)
231
+
fn validate_string_length(
232
+
value: String,
233
+
min_length: Option(Int),
234
+
max_length: Option(Int),
235
+
def_name: String,
236
+
) -> Result(Nil, errors.ValidationError) {
237
+
let byte_length = string.byte_size(value)
238
+
constraints.validate_length_constraints(
239
+
def_name,
240
+
byte_length,
241
+
min_length,
242
+
max_length,
243
+
"string",
244
+
)
245
+
}
246
+
247
+
/// Helper to validate grapheme length (visual characters)
248
+
fn validate_grapheme_length(
249
+
value: String,
250
+
min_graphemes: Option(Int),
251
+
max_graphemes: Option(Int),
252
+
def_name: String,
253
+
) -> Result(Nil, errors.ValidationError) {
254
+
// Count grapheme clusters (visual characters) using Gleam's stdlib
255
+
// This correctly handles Unicode combining characters, emoji, etc.
256
+
let grapheme_count = value |> string.to_graphemes() |> list.length()
257
+
constraints.validate_length_constraints(
258
+
def_name,
259
+
grapheme_count,
260
+
min_graphemes,
261
+
max_graphemes,
262
+
"string (graphemes)",
263
+
)
264
+
}
265
+
266
+
/// Helper to validate string format
267
+
fn validate_string_format(
268
+
value: String,
269
+
format: types.StringFormat,
270
+
def_name: String,
271
+
) -> Result(Nil, errors.ValidationError) {
272
+
case formats.validate_format(value, format) {
273
+
True -> Ok(Nil)
274
+
False -> {
275
+
let format_name = types.format_to_string(format)
276
+
Error(errors.data_validation(
277
+
def_name <> ": string does not match format '" <> format_name <> "'",
278
+
))
279
+
}
280
+
}
281
+
}
282
+
283
+
/// Helper to validate string enum
284
+
fn validate_string_enum(
285
+
value: String,
286
+
enum_values: List(String),
287
+
def_name: String,
288
+
) -> Result(Nil, errors.ValidationError) {
289
+
constraints.validate_enum_constraint(
290
+
def_name,
291
+
value,
292
+
enum_values,
293
+
"string",
294
+
fn(s) { s },
295
+
fn(a, b) { a == b },
296
+
)
297
+
}
+449
-93
src/honk.gleam
+449
-93
src/honk.gleam
···
1
1
// Main public API for the ATProtocol lexicon validator
2
2
3
-
import errors.{type ValidationError}
3
+
@target(erlang)
4
+
import argv
4
5
import gleam/dict.{type Dict}
6
+
import gleam/dynamic
7
+
import gleam/dynamic/decode
8
+
@target(erlang)
9
+
import gleam/int
10
+
@target(erlang)
11
+
import gleam/io
5
12
import gleam/json.{type Json}
13
+
import gleam/list
6
14
import gleam/option.{None, Some}
7
15
import gleam/result
16
+
import gleam/string
17
+
import honk/errors
8
18
import honk/internal/json_helpers
9
-
import types
10
-
import validation/context
11
-
import validation/formats
19
+
import honk/types
20
+
import honk/validation/context
21
+
import honk/validation/formats
22
+
@target(erlang)
23
+
import simplifile
12
24
13
25
// Import validators
14
-
import validation/field as validation_field
15
-
import validation/field/reference as validation_field_reference
16
-
import validation/field/union as validation_field_union
17
-
import validation/meta/token as validation_meta_token
18
-
import validation/meta/unknown as validation_meta_unknown
19
-
import validation/primary/params as validation_primary_params
20
-
import validation/primary/procedure as validation_primary_procedure
21
-
import validation/primary/query as validation_primary_query
22
-
import validation/primary/record as validation_primary_record
23
-
import validation/primary/subscription as validation_primary_subscription
24
-
import validation/primitive/blob as validation_primitive_blob
25
-
import validation/primitive/boolean as validation_primitive_boolean
26
-
import validation/primitive/bytes as validation_primitive_bytes
27
-
import validation/primitive/cid_link as validation_primitive_cid_link
28
-
import validation/primitive/integer as validation_primitive_integer
29
-
import validation/primitive/null as validation_primitive_null
30
-
import validation/primitive/string as validation_primitive_string
31
-
32
-
// Re-export core types
33
-
pub type LexiconDoc =
34
-
types.LexiconDoc
35
-
36
-
pub type StringFormat {
37
-
DateTime
38
-
Uri
39
-
AtUri
40
-
Did
41
-
Handle
42
-
AtIdentifier
43
-
Nsid
44
-
Cid
45
-
Language
46
-
Tid
47
-
RecordKey
48
-
}
26
+
import honk/validation/field as validation_field
27
+
import honk/validation/field/reference as validation_field_reference
28
+
import honk/validation/field/union as validation_field_union
29
+
import honk/validation/meta/token as validation_meta_token
30
+
import honk/validation/meta/unknown as validation_meta_unknown
31
+
import honk/validation/primary/params as validation_primary_params
32
+
import honk/validation/primary/procedure as validation_primary_procedure
33
+
import honk/validation/primary/query as validation_primary_query
34
+
import honk/validation/primary/record as validation_primary_record
35
+
import honk/validation/primary/subscription as validation_primary_subscription
36
+
import honk/validation/primitive/blob as validation_primitive_blob
37
+
import honk/validation/primitive/boolean as validation_primitive_boolean
38
+
import honk/validation/primitive/bytes as validation_primitive_bytes
39
+
import honk/validation/primitive/cid_link as validation_primitive_cid_link
40
+
import honk/validation/primitive/integer as validation_primitive_integer
41
+
import honk/validation/primitive/null as validation_primitive_null
42
+
import honk/validation/primitive/string as validation_primitive_string
49
43
50
-
pub type ValidationContext =
51
-
context.ValidationContext
44
+
// Re-export error type for public API error handling
45
+
pub type ValidationError =
46
+
errors.ValidationError
52
47
53
-
/// Main validation function for lexicon documents
54
-
/// Returns Ok(Nil) if all lexicons are valid
55
-
/// Returns Error with a map of lexicon ID to list of error messages
48
+
/// Validates lexicon documents
49
+
///
50
+
/// Validates lexicon structure (id, defs) and ALL definitions within each lexicon.
51
+
/// Each definition in the defs object is validated according to its type.
52
+
///
53
+
/// Returns Ok(Nil) if all lexicons and their definitions are valid.
54
+
/// Returns Error with a map of lexicon ID to list of error messages.
55
+
/// Error messages include the definition name (e.g., "lex.id#defName: error").
56
56
pub fn validate(lexicons: List(Json)) -> Result(Nil, Dict(String, List(String))) {
57
57
// Build validation context
58
58
let builder_result =
···
63
63
Ok(builder) ->
64
64
case context.build(builder) {
65
65
Ok(ctx) -> {
66
-
// Validate each lexicon's main definition
66
+
// Validate ALL definitions in each lexicon
67
67
let error_map =
68
68
dict.fold(ctx.lexicons, dict.new(), fn(errors, lex_id, lexicon) {
69
-
// Validate the main definition if it exists
70
-
case json_helpers.get_field(lexicon.defs, "main") {
71
-
Some(main_def) -> {
72
-
let lex_ctx = context.with_current_lexicon(ctx, lex_id)
73
-
case validate_definition(main_def, lex_ctx) {
74
-
Ok(_) -> errors
75
-
Error(e) ->
76
-
dict.insert(errors, lex_id, [errors.to_string(e)])
69
+
// Get all definition names from the defs object
70
+
let def_keys = json_helpers.get_keys(lexicon.defs)
71
+
let lex_ctx = context.with_current_lexicon(ctx, lex_id)
72
+
73
+
// Validate each definition
74
+
list.fold(def_keys, errors, fn(errors_acc, def_name) {
75
+
case json_helpers.get_field(lexicon.defs, def_name) {
76
+
Some(def) -> {
77
+
case validate_definition(def, lex_ctx) {
78
+
Ok(_) -> errors_acc
79
+
Error(e) -> {
80
+
// Include def name in error for better context
81
+
// Extract just the message without wrapper text
82
+
let message = case e {
83
+
errors.InvalidSchema(msg) -> msg
84
+
errors.DataValidation(msg) -> msg
85
+
errors.LexiconNotFound(msg) ->
86
+
"Lexicon not found: " <> msg
87
+
}
88
+
// Clean up leading ": " if present
89
+
let clean_message = case
90
+
string.starts_with(message, ": ")
91
+
{
92
+
True -> string.drop_start(message, 2)
93
+
False -> message
94
+
}
95
+
let error_msg =
96
+
lex_id <> "#" <> def_name <> ": " <> clean_message
97
+
case dict.get(errors_acc, lex_id) {
98
+
Ok(existing_errors) ->
99
+
dict.insert(errors_acc, lex_id, [
100
+
error_msg,
101
+
..existing_errors
102
+
])
103
+
Error(_) ->
104
+
dict.insert(errors_acc, lex_id, [error_msg])
105
+
}
106
+
}
107
+
}
77
108
}
109
+
None -> errors_acc
78
110
}
79
-
None -> errors
80
-
// No main definition is OK
81
-
}
111
+
})
82
112
})
83
113
84
114
case dict.is_empty(error_map) {
···
123
153
}
124
154
}
125
155
126
-
/// Validates a single data record against a collection schema
127
-
pub fn validate_record(
156
+
/// Validation context type (re-exported for external use)
157
+
pub type ValidationContext =
158
+
context.ValidationContext
159
+
160
+
/// Build a reusable validation context from lexicons
161
+
/// Call this once, then use validate_record_with_context for each record
162
+
pub fn build_validation_context(
128
163
lexicons: List(Json),
129
-
collection: String,
130
-
record: Json,
131
-
) -> Result(Nil, ValidationError) {
132
-
// Build validation context
164
+
) -> Result(ValidationContext, ValidationError) {
133
165
let builder_result =
134
166
context.builder()
135
167
|> context.with_lexicons(lexicons)
136
168
137
169
use builder <- result.try(builder_result)
138
-
use ctx <- result.try(context.build(builder))
170
+
context.build(builder)
171
+
}
139
172
173
+
/// Validates a single data record against a collection schema using pre-built context
174
+
/// This is much faster when validating many records - build context once with
175
+
/// build_validation_context, then call this for each record
176
+
pub fn validate_record_with_context(
177
+
ctx: ValidationContext,
178
+
collection: String,
179
+
record: Json,
180
+
) -> Result(Nil, ValidationError) {
140
181
// Get the lexicon for this collection
141
182
case context.get_lexicon(ctx, collection) {
142
183
Some(lexicon) -> {
···
144
185
case json_helpers.get_field(lexicon.defs, "main") {
145
186
Some(main_def) -> {
146
187
let lex_ctx = context.with_current_lexicon(ctx, collection)
188
+
// Set the path to include the definition name
189
+
let def_ctx = context.with_path(lex_ctx, "defs.main")
147
190
// Validate the record data against the main definition
148
-
validation_primary_record.validate_data(record, main_def, lex_ctx)
191
+
validation_primary_record.validate_data(record, main_def, def_ctx)
149
192
}
150
193
None ->
151
194
Error(errors.invalid_schema(
···
157
200
}
158
201
}
159
202
203
+
/// Validates a single data record against a collection schema
204
+
pub fn validate_record(
205
+
lexicons: List(Json),
206
+
collection: String,
207
+
record: Json,
208
+
) -> Result(Nil, ValidationError) {
209
+
// Build validation context
210
+
use ctx <- result.try(build_validation_context(lexicons))
211
+
validate_record_with_context(ctx, collection, record)
212
+
}
213
+
160
214
/// Validates NSID format
161
215
pub fn is_valid_nsid(nsid: String) -> Bool {
162
216
formats.is_valid_nsid(nsid)
···
165
219
/// Validates a string value against a specific format
166
220
pub fn validate_string_format(
167
221
value: String,
168
-
format: StringFormat,
222
+
format: types.StringFormat,
169
223
) -> Result(Nil, String) {
170
-
// Convert our StringFormat to types.StringFormat
171
-
let types_format = case format {
172
-
DateTime -> types.DateTime
173
-
Uri -> types.Uri
174
-
AtUri -> types.AtUri
175
-
Did -> types.Did
176
-
Handle -> types.Handle
177
-
AtIdentifier -> types.AtIdentifier
178
-
Nsid -> types.Nsid
179
-
Cid -> types.Cid
180
-
Language -> types.Language
181
-
Tid -> types.Tid
182
-
RecordKey -> types.RecordKey
183
-
}
184
-
185
-
case formats.validate_format(value, types_format) {
224
+
case formats.validate_format(value, format) {
186
225
True -> Ok(Nil)
187
226
False -> {
188
-
let format_name = types.format_to_string(types_format)
227
+
let format_name = types.format_to_string(format)
189
228
Error("Value does not match format: " <> format_name)
190
229
}
191
230
}
192
231
}
193
232
194
-
/// Entry point for the honk lexicon validator.
233
+
/// Convert a Dynamic value to Json
195
234
///
196
-
/// This function serves as an example entry point and can be used
197
-
/// for basic CLI or testing purposes. For actual validation,
198
-
/// use the `validate()` or `validate_record()` functions.
235
+
/// This is useful when parsing JSON strings with `json.parse(str, decode.dynamic)`
236
+
/// and then needing to convert to Json for validation.
199
237
///
200
238
/// ## Example
239
+
/// ```gleam
240
+
/// use dyn <- result.try(json.parse(json_str, decode.dynamic))
241
+
/// use json_val <- result.try(honk.dynamic_to_json(dyn))
242
+
/// honk.validate([json_val])
243
+
/// ```
244
+
pub fn dynamic_to_json(dyn: dynamic.Dynamic) -> Result(Json, ValidationError) {
245
+
json_helpers.dynamic_to_json(dyn)
246
+
}
247
+
248
+
/// Parse a JSON string and convert to Json for validation
201
249
///
250
+
/// This is a convenience function that combines `json.parse()` and `dynamic_to_json()`.
251
+
/// It's useful when you have JSON stored as strings (e.g., in a database) and want
252
+
/// to validate it with honk.
253
+
///
254
+
/// ## Example
202
255
/// ```gleam
203
-
/// import honk
256
+
/// use json_val <- result.try(honk.parse_json_string(stored_json))
257
+
/// honk.validate([json_val])
258
+
/// ```
259
+
pub fn parse_json_string(json_str: String) -> Result(Json, ValidationError) {
260
+
use dyn <- result.try(
261
+
json.parse(json_str, decode.dynamic)
262
+
|> result.map_error(fn(_) {
263
+
errors.invalid_schema("Failed to parse JSON string")
264
+
}),
265
+
)
266
+
dynamic_to_json(dyn)
267
+
}
268
+
269
+
/// Parse multiple JSON strings and convert to Json for validation
270
+
///
271
+
/// This is a convenience function for batch parsing JSON strings.
204
272
///
205
-
/// pub fn main() {
206
-
/// honk.main()
207
-
/// }
273
+
/// ## Example
274
+
/// ```gleam
275
+
/// use json_vals <- result.try(honk.parse_json_strings(stored_jsons))
276
+
/// honk.validate(json_vals)
208
277
/// ```
278
+
pub fn parse_json_strings(
279
+
json_strs: List(String),
280
+
) -> Result(List(Json), ValidationError) {
281
+
json_strs
282
+
|> list.try_map(parse_json_string)
283
+
|> result.map_error(fn(_) {
284
+
errors.invalid_schema("Failed to parse JSON strings")
285
+
})
286
+
}
287
+
288
+
@target(erlang)
289
+
/// CLI entry point for the honk lexicon validator
290
+
///
291
+
/// Usage:
292
+
/// gleam run -m honk check <path>
293
+
/// gleam run -m honk help
209
294
pub fn main() -> Nil {
210
-
// This would typically be called from tests or CLI
211
-
let _example_result = is_valid_nsid("com.example.record")
212
-
Nil
295
+
case argv.load().arguments {
296
+
["check", path] -> validate_path(path)
297
+
["help"] | [] -> show_help()
298
+
_ -> {
299
+
io.println_error("Unknown command. Use 'help' for usage information.")
300
+
Nil
301
+
}
302
+
}
303
+
}
304
+
305
+
@target(erlang)
306
+
/// Validate a path (auto-detects file or directory)
307
+
fn validate_path(path: String) -> Nil {
308
+
case simplifile.is_file(path) {
309
+
Ok(True) -> validate_file(path)
310
+
Ok(False) ->
311
+
case simplifile.is_directory(path) {
312
+
Ok(True) -> validate_directory(path)
313
+
Ok(False) -> {
314
+
io.println_error(
315
+
"Error: Path is neither a file nor a directory: " <> path,
316
+
)
317
+
Nil
318
+
}
319
+
Error(_) -> {
320
+
io.println_error("Error: Cannot access path: " <> path)
321
+
Nil
322
+
}
323
+
}
324
+
Error(_) -> {
325
+
io.println_error("Error: Cannot access path: " <> path)
326
+
Nil
327
+
}
328
+
}
329
+
}
330
+
331
+
@target(erlang)
332
+
/// Validate a single lexicon file
333
+
fn validate_file(file_path: String) -> Nil {
334
+
case read_and_validate_file(file_path) {
335
+
Ok(_) -> {
336
+
io.println("โ " <> file_path <> " - valid")
337
+
Nil
338
+
}
339
+
Error(msg) -> {
340
+
io.println_error("โ " <> file_path)
341
+
io.println_error(" " <> msg)
342
+
Nil
343
+
}
344
+
}
345
+
}
346
+
347
+
@target(erlang)
348
+
/// Validate all .json files in a directory
349
+
fn validate_directory(dir_path: String) -> Nil {
350
+
case simplifile.get_files(dir_path) {
351
+
Error(_) -> {
352
+
io.println_error("Error: Cannot read directory: " <> dir_path)
353
+
Nil
354
+
}
355
+
Ok(all_files) -> {
356
+
// Filter for .json files
357
+
let json_files =
358
+
all_files
359
+
|> list.filter(fn(path) { string.ends_with(path, ".json") })
360
+
361
+
case json_files {
362
+
[] -> {
363
+
io.println("No .json files found in " <> dir_path)
364
+
Nil
365
+
}
366
+
files -> {
367
+
// Read and parse all files
368
+
let file_results =
369
+
files
370
+
|> list.map(fn(file) {
371
+
case read_json_file(file) {
372
+
Ok(json_value) -> #(file, Ok(json_value))
373
+
Error(msg) -> #(file, Error(msg))
374
+
}
375
+
})
376
+
377
+
// Separate successful parses from failures
378
+
let #(parse_errors, parsed_files) =
379
+
list.partition(file_results, fn(result) {
380
+
case result {
381
+
#(_, Error(_)) -> True
382
+
#(_, Ok(_)) -> False
383
+
}
384
+
})
385
+
386
+
// Display parse errors
387
+
parse_errors
388
+
|> list.each(fn(result) {
389
+
case result {
390
+
#(file, Error(msg)) -> {
391
+
io.println_error("โ " <> file)
392
+
io.println_error(" " <> msg)
393
+
}
394
+
_ -> Nil
395
+
}
396
+
})
397
+
398
+
// Get all successfully parsed lexicons
399
+
let lexicons =
400
+
parsed_files
401
+
|> list.filter_map(fn(result) {
402
+
case result {
403
+
#(_, Ok(json)) -> Ok(json)
404
+
_ -> Error(Nil)
405
+
}
406
+
})
407
+
408
+
// Validate all lexicons together (allows cross-lexicon references)
409
+
case validate(lexicons) {
410
+
Ok(_) -> {
411
+
// All lexicons are valid
412
+
parsed_files
413
+
|> list.each(fn(result) {
414
+
case result {
415
+
#(file, Ok(_)) -> io.println("โ " <> file)
416
+
_ -> Nil
417
+
}
418
+
})
419
+
}
420
+
Error(error_map) -> {
421
+
// Some lexicons have errors - map errors back to files
422
+
parsed_files
423
+
|> list.each(fn(result) {
424
+
case result {
425
+
#(file, Ok(json)) -> {
426
+
// Get the lexicon ID for this file
427
+
case json_helpers.get_string(json, "id") {
428
+
Some(lex_id) -> {
429
+
case dict.get(error_map, lex_id) {
430
+
Ok(errors) -> {
431
+
io.println_error("โ " <> file)
432
+
errors
433
+
|> list.each(fn(err) {
434
+
io.println_error(" " <> err)
435
+
})
436
+
}
437
+
Error(_) -> io.println("โ " <> file)
438
+
}
439
+
}
440
+
None -> {
441
+
io.println_error("โ " <> file)
442
+
io.println_error(" Missing lexicon id")
443
+
}
444
+
}
445
+
}
446
+
_ -> Nil
447
+
}
448
+
})
449
+
}
450
+
}
451
+
452
+
// Summary
453
+
let total = list.length(files)
454
+
let parse_error_count = list.length(parse_errors)
455
+
let validation_error_count = case validate(lexicons) {
456
+
Ok(_) -> 0
457
+
Error(error_map) -> dict.size(error_map)
458
+
}
459
+
let total_errors = parse_error_count + validation_error_count
460
+
461
+
case total_errors {
462
+
0 ->
463
+
io.println(
464
+
"\nAll "
465
+
<> int.to_string(total)
466
+
<> " schemas validated successfully.",
467
+
)
468
+
_ ->
469
+
io.println_error(
470
+
"\n"
471
+
<> int.to_string(total_errors)
472
+
<> " of "
473
+
<> int.to_string(total)
474
+
<> " schemas failed validation.",
475
+
)
476
+
}
477
+
478
+
Nil
479
+
}
480
+
}
481
+
}
482
+
}
483
+
}
484
+
485
+
@target(erlang)
486
+
/// Read and parse a JSON file (without validation)
487
+
fn read_json_file(file_path: String) -> Result(Json, String) {
488
+
use content <- result.try(
489
+
simplifile.read(file_path)
490
+
|> result.map_error(fn(_) { "Cannot read file" }),
491
+
)
492
+
493
+
use json_dynamic <- result.try(
494
+
json.parse(content, decode.dynamic)
495
+
|> result.map_error(fn(_) { "Invalid JSON" }),
496
+
)
497
+
498
+
json_helpers.dynamic_to_json(json_dynamic)
499
+
|> result.map_error(fn(_) { "Failed to convert JSON" })
500
+
}
501
+
502
+
@target(erlang)
503
+
/// Read a file and validate it as a lexicon
504
+
fn read_and_validate_file(file_path: String) -> Result(Nil, String) {
505
+
use content <- result.try(
506
+
simplifile.read(file_path)
507
+
|> result.map_error(fn(_) { "Cannot read file" }),
508
+
)
509
+
510
+
use json_dynamic <- result.try(
511
+
json.parse(content, decode.dynamic)
512
+
|> result.map_error(fn(_) { "Invalid JSON" }),
513
+
)
514
+
515
+
use json_value <- result.try(
516
+
json_helpers.dynamic_to_json(json_dynamic)
517
+
|> result.map_error(fn(_) { "Failed to convert JSON" }),
518
+
)
519
+
520
+
use _ <- result.try(
521
+
validate([json_value])
522
+
|> result.map_error(fn(error_map) { format_validation_errors(error_map) }),
523
+
)
524
+
525
+
Ok(Nil)
526
+
}
527
+
528
+
@target(erlang)
529
+
/// Format validation errors from the error map
530
+
fn format_validation_errors(error_map: Dict(String, List(String))) -> String {
531
+
error_map
532
+
|> dict.to_list
533
+
|> list.map(fn(entry) {
534
+
let #(_key, errors) = entry
535
+
string.join(errors, "\n ")
536
+
})
537
+
|> string.join("\n ")
538
+
}
539
+
540
+
@target(erlang)
541
+
/// Show help text
542
+
fn show_help() -> Nil {
543
+
io.println(
544
+
"
545
+
honk - ATProtocol Lexicon Validator
546
+
547
+
USAGE:
548
+
gleam run -m honk check <path>
549
+
gleam run -m honk help
550
+
551
+
COMMANDS:
552
+
check <path> Check a lexicon file or directory
553
+
- If <path> is a file: validates that single lexicon
554
+
- If <path> is a directory: recursively validates all .json files
555
+
556
+
help Show this help message
557
+
558
+
EXAMPLES:
559
+
gleam run -m honk check ./lexicons/xyz/statusphere/status.json
560
+
gleam run -m honk check ./lexicons
561
+
562
+
VALIDATION:
563
+
- Validates lexicon structure (id, defs)
564
+
- Validates ALL definitions in each lexicon
565
+
- Checks types, constraints, and references
566
+
- Reports errors with definition context (lex.id#defName)
567
+
",
568
+
)
213
569
}
+3
src/honk_bundle.mjs
+3
src/honk_bundle.mjs
-58
src/types.gleam
-58
src/types.gleam
···
1
-
// Core types for AT Protocol lexicon validation
2
-
3
-
import gleam/json.{type Json}
4
-
5
-
/// Represents a parsed lexicon document
6
-
pub type LexiconDoc {
7
-
LexiconDoc(id: String, defs: Json)
8
-
}
9
-
10
-
/// AT Protocol string formats
11
-
pub type StringFormat {
12
-
DateTime
13
-
Uri
14
-
AtUri
15
-
Did
16
-
Handle
17
-
AtIdentifier
18
-
Nsid
19
-
Cid
20
-
Language
21
-
Tid
22
-
RecordKey
23
-
}
24
-
25
-
/// Convert a string to a StringFormat
26
-
pub fn string_to_format(s: String) -> Result(StringFormat, Nil) {
27
-
case s {
28
-
"datetime" -> Ok(DateTime)
29
-
"uri" -> Ok(Uri)
30
-
"at-uri" -> Ok(AtUri)
31
-
"did" -> Ok(Did)
32
-
"handle" -> Ok(Handle)
33
-
"at-identifier" -> Ok(AtIdentifier)
34
-
"nsid" -> Ok(Nsid)
35
-
"cid" -> Ok(Cid)
36
-
"language" -> Ok(Language)
37
-
"tid" -> Ok(Tid)
38
-
"record-key" -> Ok(RecordKey)
39
-
_ -> Error(Nil)
40
-
}
41
-
}
42
-
43
-
/// Convert a StringFormat to string
44
-
pub fn format_to_string(format: StringFormat) -> String {
45
-
case format {
46
-
DateTime -> "datetime"
47
-
Uri -> "uri"
48
-
AtUri -> "at-uri"
49
-
Did -> "did"
50
-
Handle -> "handle"
51
-
AtIdentifier -> "at-identifier"
52
-
Nsid -> "nsid"
53
-
Cid -> "cid"
54
-
Language -> "language"
55
-
Tid -> "tid"
56
-
RecordKey -> "record-key"
57
-
}
58
-
}
-332
src/validation/context.gleam
-332
src/validation/context.gleam
···
1
-
// Validation context and builder
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/dict.{type Dict}
5
-
import gleam/json.{type Json}
6
-
import gleam/list
7
-
import gleam/option.{type Option, None, Some}
8
-
import gleam/result
9
-
import gleam/set.{type Set}
10
-
import gleam/string
11
-
import honk/internal/json_helpers
12
-
import types.{type LexiconDoc, LexiconDoc}
13
-
import validation/formats
14
-
15
-
/// Validation context that tracks state during validation
16
-
pub type ValidationContext {
17
-
ValidationContext(
18
-
// Map of lexicon ID to parsed lexicon document
19
-
lexicons: Dict(String, LexiconDoc),
20
-
// Current path in data structure (for error messages)
21
-
path: String,
22
-
// Current lexicon ID (for resolving local references)
23
-
current_lexicon_id: Option(String),
24
-
// Set of references being resolved (for circular detection)
25
-
reference_stack: Set(String),
26
-
// Recursive validator function for dispatching to type-specific validators
27
-
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
28
-
validator: fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
29
-
)
30
-
}
31
-
32
-
/// Builder for constructing ValidationContext
33
-
pub type ValidationContextBuilder {
34
-
ValidationContextBuilder(
35
-
lexicons: Dict(String, LexiconDoc),
36
-
// Parameters: data (Json), schema (Json), ctx (ValidationContext)
37
-
validator: Option(
38
-
fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
39
-
),
40
-
)
41
-
}
42
-
43
-
/// Creates a new ValidationContextBuilder with default settings.
44
-
///
45
-
/// Use this to start building a validation context by chaining with
46
-
/// `with_lexicons`, `with_validator`, and `build`.
47
-
///
48
-
/// ## Example
49
-
///
50
-
/// ```gleam
51
-
/// let assert Ok(ctx) =
52
-
/// context.builder()
53
-
/// |> context.with_validator(field.dispatch_data_validation)
54
-
/// |> context.with_lexicons([my_lexicon])
55
-
/// |> context.build
56
-
/// ```
57
-
pub fn builder() -> ValidationContextBuilder {
58
-
ValidationContextBuilder(lexicons: dict.new(), validator: None)
59
-
}
60
-
61
-
/// Adds a list of lexicon JSON documents to the builder.
62
-
///
63
-
/// Each lexicon must have an 'id' field (valid NSID) and a 'defs' object
64
-
/// containing type definitions. Returns an error if any lexicon is invalid.
65
-
///
66
-
/// ## Example
67
-
///
68
-
/// ```gleam
69
-
/// let lexicon = json.object([
70
-
/// #("lexicon", json.int(1)),
71
-
/// #("id", json.string("com.example.post")),
72
-
/// #("defs", json.object([...])),
73
-
/// ])
74
-
///
75
-
/// let assert Ok(builder) =
76
-
/// context.builder()
77
-
/// |> context.with_lexicons([lexicon])
78
-
/// ```
79
-
pub fn with_lexicons(
80
-
builder: ValidationContextBuilder,
81
-
lexicons: List(Json),
82
-
) -> Result(ValidationContextBuilder, ValidationError) {
83
-
// Parse each lexicon and add to the dictionary
84
-
list.try_fold(lexicons, builder, fn(b, lex_json) {
85
-
// Extract id and defs from the lexicon JSON
86
-
case parse_lexicon(lex_json) {
87
-
Ok(lexicon_doc) -> {
88
-
let updated_lexicons =
89
-
dict.insert(b.lexicons, lexicon_doc.id, lexicon_doc)
90
-
Ok(ValidationContextBuilder(..b, lexicons: updated_lexicons))
91
-
}
92
-
Error(e) -> Error(e)
93
-
}
94
-
})
95
-
}
96
-
97
-
/// Set the validator function
98
-
/// Parameters: data (Json), schema (Json), ctx (ValidationContext)
99
-
pub fn with_validator(
100
-
builder: ValidationContextBuilder,
101
-
validator: fn(Json, Json, ValidationContext) -> Result(Nil, ValidationError),
102
-
) -> ValidationContextBuilder {
103
-
ValidationContextBuilder(..builder, validator: Some(validator))
104
-
}
105
-
106
-
/// Builds the final ValidationContext from the builder.
107
-
///
108
-
/// Creates a no-op validator if none was set via `with_validator`.
109
-
/// Returns a ValidationContext ready for validating lexicons and data.
110
-
///
111
-
/// ## Example
112
-
///
113
-
/// ```gleam
114
-
/// let assert Ok(ctx) =
115
-
/// context.builder()
116
-
/// |> context.with_validator(field.dispatch_data_validation)
117
-
/// |> context.with_lexicons([lexicon])
118
-
/// |> context.build
119
-
/// ```
120
-
pub fn build(
121
-
builder: ValidationContextBuilder,
122
-
) -> Result(ValidationContext, ValidationError) {
123
-
// Create a default no-op validator if none is set
124
-
let validator = case builder.validator {
125
-
Some(v) -> v
126
-
None -> fn(_data, _schema, _ctx) { Ok(Nil) }
127
-
}
128
-
129
-
Ok(ValidationContext(
130
-
lexicons: builder.lexicons,
131
-
path: "",
132
-
current_lexicon_id: None,
133
-
reference_stack: set.new(),
134
-
validator: validator,
135
-
))
136
-
}
137
-
138
-
/// Retrieves a lexicon document by its NSID from the validation context.
139
-
///
140
-
/// Returns `None` if the lexicon is not found. Use this to access
141
-
/// lexicon definitions when resolving references.
142
-
///
143
-
/// ## Example
144
-
///
145
-
/// ```gleam
146
-
/// case context.get_lexicon(ctx, "com.example.post") {
147
-
/// Some(lexicon) -> // Use the lexicon
148
-
/// None -> // Lexicon not found
149
-
/// }
150
-
/// ```
151
-
pub fn get_lexicon(ctx: ValidationContext, id: String) -> Option(LexiconDoc) {
152
-
case dict.get(ctx.lexicons, id) {
153
-
Ok(lex) -> Some(lex)
154
-
Error(_) -> None
155
-
}
156
-
}
157
-
158
-
/// Returns the current validation path within the data structure.
159
-
///
160
-
/// The path is used for generating detailed error messages that show
161
-
/// exactly where in a nested structure validation failed.
162
-
///
163
-
/// ## Example
164
-
///
165
-
/// ```gleam
166
-
/// let current_path = context.path(ctx)
167
-
/// // Returns something like "defs.post.properties.text"
168
-
/// ```
169
-
pub fn path(ctx: ValidationContext) -> String {
170
-
ctx.path
171
-
}
172
-
173
-
/// Creates a new context with an updated path segment.
174
-
///
175
-
/// Used when traversing nested data structures during validation
176
-
/// to maintain accurate error location information.
177
-
///
178
-
/// ## Example
179
-
///
180
-
/// ```gleam
181
-
/// let nested_ctx = context.with_path(ctx, "properties.name")
182
-
/// // New path might be "defs.user.properties.name"
183
-
/// ```
184
-
pub fn with_path(ctx: ValidationContext, segment: String) -> ValidationContext {
185
-
let new_path = case ctx.path {
186
-
"" -> segment
187
-
_ -> ctx.path <> "." <> segment
188
-
}
189
-
ValidationContext(..ctx, path: new_path)
190
-
}
191
-
192
-
/// Returns the ID of the lexicon currently being validated.
193
-
///
194
-
/// Used for resolving local references (e.g., `#post`) which need to
195
-
/// know which lexicon they belong to.
196
-
///
197
-
/// ## Example
198
-
///
199
-
/// ```gleam
200
-
/// case context.current_lexicon_id(ctx) {
201
-
/// Some(id) -> // id is like "com.example.post"
202
-
/// None -> // No lexicon context set
203
-
/// }
204
-
/// ```
205
-
pub fn current_lexicon_id(ctx: ValidationContext) -> Option(String) {
206
-
ctx.current_lexicon_id
207
-
}
208
-
209
-
/// Creates a new context with a different current lexicon ID.
210
-
///
211
-
/// Used when validating cross-lexicon references to set the correct
212
-
/// lexicon context for resolving local references.
213
-
///
214
-
/// ## Example
215
-
///
216
-
/// ```gleam
217
-
/// let ctx_with_lexicon =
218
-
/// context.with_current_lexicon(ctx, "com.example.post")
219
-
/// ```
220
-
pub fn with_current_lexicon(
221
-
ctx: ValidationContext,
222
-
lexicon_id: String,
223
-
) -> ValidationContext {
224
-
ValidationContext(..ctx, current_lexicon_id: Some(lexicon_id))
225
-
}
226
-
227
-
/// Adds a reference to the reference stack for circular dependency detection.
228
-
///
229
-
/// Used internally during reference resolution to track which references
230
-
/// are currently being validated. This prevents infinite loops when
231
-
/// references form a cycle.
232
-
///
233
-
/// ## Example
234
-
///
235
-
/// ```gleam
236
-
/// let ctx_with_ref =
237
-
/// context.with_reference(ctx, "com.example.post#user")
238
-
/// ```
239
-
pub fn with_reference(
240
-
ctx: ValidationContext,
241
-
reference: String,
242
-
) -> ValidationContext {
243
-
ValidationContext(
244
-
..ctx,
245
-
reference_stack: set.insert(ctx.reference_stack, reference),
246
-
)
247
-
}
248
-
249
-
/// Checks if a reference is already in the reference stack.
250
-
///
251
-
/// Returns `True` if the reference is being validated, indicating a
252
-
/// circular reference that would cause infinite recursion. Used to
253
-
/// detect and prevent circular dependency errors.
254
-
///
255
-
/// ## Example
256
-
///
257
-
/// ```gleam
258
-
/// case context.has_reference(ctx, "#user") {
259
-
/// True -> Error(errors.data_validation("Circular reference detected"))
260
-
/// False -> // Safe to proceed with validation
261
-
/// }
262
-
/// ```
263
-
pub fn has_reference(ctx: ValidationContext, reference: String) -> Bool {
264
-
set.contains(ctx.reference_stack, reference)
265
-
}
266
-
267
-
/// Parse a reference string into (lexicon_id, definition)
268
-
/// Handles: #def, nsid#def, nsid
269
-
pub fn parse_reference(
270
-
ctx: ValidationContext,
271
-
reference: String,
272
-
) -> Result(#(String, String), ValidationError) {
273
-
case string.split(reference, "#") {
274
-
// Local reference: #def
275
-
["", def] ->
276
-
case ctx.current_lexicon_id {
277
-
Some(lex_id) -> Ok(#(lex_id, def))
278
-
None ->
279
-
Error(errors.invalid_schema(
280
-
"Local reference '"
281
-
<> reference
282
-
<> "' used without current lexicon context",
283
-
))
284
-
}
285
-
// Global reference: nsid#def
286
-
[nsid, def] if nsid != "" && def != "" -> Ok(#(nsid, def))
287
-
// Global main: nsid (implicit #main)
288
-
[nsid] if nsid != "" -> Ok(#(nsid, "main"))
289
-
// Invalid
290
-
_ -> Error(errors.invalid_schema("Invalid reference format: " <> reference))
291
-
}
292
-
}
293
-
294
-
/// Helper to parse a lexicon JSON into LexiconDoc
295
-
fn parse_lexicon(lex_json: Json) -> Result(LexiconDoc, ValidationError) {
296
-
// Extract "id" field (required NSID)
297
-
let id_result = case json_helpers.get_string(lex_json, "id") {
298
-
Some(id) -> Ok(id)
299
-
None -> Error(errors.invalid_schema("Lexicon missing required 'id' field"))
300
-
}
301
-
302
-
use id <- result.try(id_result)
303
-
304
-
// Validate that id is a valid NSID
305
-
use _ <- result.try(case formats.is_valid_nsid(id) {
306
-
True -> Ok(Nil)
307
-
False ->
308
-
Error(errors.invalid_schema(
309
-
"Lexicon 'id' field is not a valid NSID: " <> id,
310
-
))
311
-
})
312
-
313
-
// Extract "defs" field (required object containing definitions)
314
-
let defs_result = case json_helpers.get_field(lex_json, "defs") {
315
-
Some(defs) ->
316
-
case json_helpers.is_object(defs) {
317
-
True -> Ok(defs)
318
-
False ->
319
-
Error(errors.invalid_schema(
320
-
"Lexicon 'defs' must be an object at " <> id,
321
-
))
322
-
}
323
-
None ->
324
-
Error(errors.invalid_schema(
325
-
"Lexicon missing required 'defs' field at " <> id,
326
-
))
327
-
}
328
-
329
-
use defs <- result.try(defs_result)
330
-
331
-
Ok(LexiconDoc(id: id, defs: defs))
332
-
}
-176
src/validation/field/reference.gleam
-176
src/validation/field/reference.gleam
···
1
-
// Reference type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/json.{type Json}
5
-
import gleam/option.{None, Some}
6
-
import gleam/result
7
-
import gleam/string
8
-
import honk/internal/constraints
9
-
import honk/internal/json_helpers
10
-
import honk/internal/resolution
11
-
import validation/context.{type ValidationContext}
12
-
13
-
const allowed_fields = ["type", "ref", "description"]
14
-
15
-
/// Validates reference schema definition
16
-
pub fn validate_schema(
17
-
schema: Json,
18
-
ctx: ValidationContext,
19
-
) -> Result(Nil, ValidationError) {
20
-
let def_name = context.path(ctx)
21
-
22
-
// Validate allowed fields
23
-
let keys = json_helpers.get_keys(schema)
24
-
use _ <- result.try(constraints.validate_allowed_fields(
25
-
def_name,
26
-
keys,
27
-
allowed_fields,
28
-
"ref",
29
-
))
30
-
31
-
// Validate ref field (required)
32
-
let ref_value = case json_helpers.get_string(schema, "ref") {
33
-
Some(ref_str) -> Ok(ref_str)
34
-
None ->
35
-
Error(errors.invalid_schema(
36
-
def_name <> ": ref missing required 'ref' field",
37
-
))
38
-
}
39
-
40
-
use ref_str <- result.try(ref_value)
41
-
42
-
// Validate reference syntax
43
-
validate_ref_syntax(ref_str, def_name)
44
-
}
45
-
46
-
/// Validates data against the referenced schema
47
-
/// Uses the validator from the context for recursive validation
48
-
pub fn validate_data(
49
-
data: Json,
50
-
schema: Json,
51
-
ctx: ValidationContext,
52
-
) -> Result(Nil, ValidationError) {
53
-
let def_name = context.path(ctx)
54
-
55
-
// Get the reference string
56
-
use ref_str <- result.try(case json_helpers.get_string(schema, "ref") {
57
-
Some(ref_str) -> Ok(ref_str)
58
-
None ->
59
-
Error(errors.data_validation(
60
-
def_name <> ": ref schema missing 'ref' field",
61
-
))
62
-
})
63
-
64
-
// Check for circular references
65
-
case context.has_reference(ctx, ref_str) {
66
-
True ->
67
-
Error(errors.data_validation(
68
-
def_name <> ": circular reference detected: " <> ref_str,
69
-
))
70
-
False -> {
71
-
// Add to reference stack
72
-
let ref_ctx = context.with_reference(ctx, ref_str)
73
-
74
-
// Get current lexicon ID
75
-
use lex_id <- result.try(case context.current_lexicon_id(ref_ctx) {
76
-
Some(id) -> Ok(id)
77
-
None ->
78
-
Error(errors.data_validation(
79
-
def_name <> ": no current lexicon set for resolving reference",
80
-
))
81
-
})
82
-
83
-
// Resolve the reference to get the target schema
84
-
use resolved_opt <- result.try(resolution.resolve_reference(
85
-
ref_str,
86
-
ref_ctx,
87
-
lex_id,
88
-
))
89
-
90
-
use resolved_schema <- result.try(case resolved_opt {
91
-
Some(schema) -> Ok(schema)
92
-
None ->
93
-
Error(errors.data_validation(
94
-
def_name <> ": reference not found: " <> ref_str,
95
-
))
96
-
})
97
-
98
-
// Recursively validate data against the resolved schema
99
-
// Use the validator from the context
100
-
let validator = ref_ctx.validator
101
-
validator(data, resolved_schema, ref_ctx)
102
-
}
103
-
}
104
-
}
105
-
106
-
/// Validates reference syntax
107
-
fn validate_ref_syntax(
108
-
ref_str: String,
109
-
def_name: String,
110
-
) -> Result(Nil, ValidationError) {
111
-
case string.is_empty(ref_str) {
112
-
True ->
113
-
Error(errors.invalid_schema(def_name <> ": reference cannot be empty"))
114
-
False -> {
115
-
case string.starts_with(ref_str, "#") {
116
-
True -> {
117
-
// Local reference
118
-
let def_part = string.drop_start(ref_str, 1)
119
-
case string.is_empty(def_part) {
120
-
True ->
121
-
Error(errors.invalid_schema(
122
-
def_name
123
-
<> ": local reference must have a definition name after #",
124
-
))
125
-
False -> Ok(Nil)
126
-
}
127
-
}
128
-
False -> {
129
-
// Global reference (with or without fragment)
130
-
case string.contains(ref_str, "#") {
131
-
True -> {
132
-
// Global reference with fragment
133
-
validate_global_ref_with_fragment(ref_str, def_name)
134
-
}
135
-
False -> {
136
-
// Global main reference
137
-
// Would validate NSID format here
138
-
Ok(Nil)
139
-
}
140
-
}
141
-
}
142
-
}
143
-
}
144
-
}
145
-
}
146
-
147
-
/// Validates global reference with fragment (e.g., "com.example.lexicon#def")
148
-
fn validate_global_ref_with_fragment(
149
-
ref_str: String,
150
-
def_name: String,
151
-
) -> Result(Nil, ValidationError) {
152
-
// Split on # and validate both parts
153
-
case string.split(ref_str, "#") {
154
-
[nsid, definition] -> {
155
-
case string.is_empty(nsid) {
156
-
True ->
157
-
Error(errors.invalid_schema(
158
-
def_name <> ": NSID part of reference cannot be empty",
159
-
))
160
-
False ->
161
-
case string.is_empty(definition) {
162
-
True ->
163
-
Error(errors.invalid_schema(
164
-
def_name
165
-
<> ": definition name part of reference cannot be empty",
166
-
))
167
-
False -> Ok(Nil)
168
-
}
169
-
}
170
-
}
171
-
_ ->
172
-
Error(errors.invalid_schema(
173
-
def_name <> ": global reference can only contain one # character",
174
-
))
175
-
}
176
-
}
-255
src/validation/field/union.gleam
-255
src/validation/field/union.gleam
···
1
-
// Union type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/dynamic/decode
5
-
import gleam/json.{type Json}
6
-
import gleam/list
7
-
import gleam/option.{None, Some}
8
-
import gleam/result
9
-
import gleam/string
10
-
import honk/internal/constraints
11
-
import honk/internal/json_helpers
12
-
import validation/context.{type ValidationContext}
13
-
14
-
const allowed_fields = ["type", "refs", "closed", "description"]
15
-
16
-
/// Validates union schema definition
17
-
pub fn validate_schema(
18
-
schema: Json,
19
-
ctx: ValidationContext,
20
-
) -> Result(Nil, ValidationError) {
21
-
let def_name = context.path(ctx)
22
-
23
-
// Validate allowed fields
24
-
let keys = json_helpers.get_keys(schema)
25
-
use _ <- result.try(constraints.validate_allowed_fields(
26
-
def_name,
27
-
keys,
28
-
allowed_fields,
29
-
"union",
30
-
))
31
-
32
-
// Validate refs field (required)
33
-
let refs = case json_helpers.get_array(schema, "refs") {
34
-
Some(refs_array) -> Ok(refs_array)
35
-
None ->
36
-
Error(errors.invalid_schema(
37
-
def_name <> ": union missing required 'refs' field",
38
-
))
39
-
}
40
-
41
-
use refs_array <- result.try(refs)
42
-
43
-
// Validate that all refs are strings
44
-
use _ <- result.try(
45
-
list.index_fold(refs_array, Ok(Nil), fn(acc, ref_item, i) {
46
-
use _ <- result.try(acc)
47
-
case decode.run(ref_item, decode.string) {
48
-
Ok(_) -> Ok(Nil)
49
-
Error(_) ->
50
-
Error(errors.invalid_schema(
51
-
def_name <> ": refs[" <> string.inspect(i) <> "] must be a string",
52
-
))
53
-
}
54
-
}),
55
-
)
56
-
57
-
// Validate closed field if present
58
-
use _ <- result.try(case json_helpers.get_bool(schema, "closed") {
59
-
Some(closed) -> {
60
-
// If closed is true and refs is empty, that's invalid
61
-
case closed && list.is_empty(refs_array) {
62
-
True ->
63
-
Error(errors.invalid_schema(
64
-
def_name <> ": union cannot be closed with empty refs array",
65
-
))
66
-
False -> Ok(Nil)
67
-
}
68
-
}
69
-
None -> Ok(Nil)
70
-
})
71
-
72
-
// Empty refs array is only allowed for open unions
73
-
case list.is_empty(refs_array) {
74
-
True -> {
75
-
case json_helpers.get_bool(schema, "closed") {
76
-
Some(True) ->
77
-
Error(errors.invalid_schema(
78
-
def_name <> ": union cannot have empty refs array when closed=true",
79
-
))
80
-
_ -> Ok(Nil)
81
-
}
82
-
}
83
-
False -> Ok(Nil)
84
-
}
85
-
// Note: Full implementation would validate that each reference can be resolved
86
-
}
87
-
88
-
/// Validates union data against schema
89
-
pub fn validate_data(
90
-
data: Json,
91
-
schema: Json,
92
-
ctx: ValidationContext,
93
-
) -> Result(Nil, ValidationError) {
94
-
let def_name = context.path(ctx)
95
-
96
-
// Union data must be an object
97
-
case json_helpers.is_object(data) {
98
-
False -> {
99
-
let type_name = get_type_name(data)
100
-
Error(errors.data_validation(
101
-
def_name
102
-
<> ": union data must be an object which includes the \"$type\" property, found "
103
-
<> type_name,
104
-
))
105
-
}
106
-
True -> {
107
-
// Check for $type discriminator field
108
-
let type_field = case json_helpers.get_string(data, "$type") {
109
-
Some(type_name) -> Ok(type_name)
110
-
None ->
111
-
Error(errors.data_validation(
112
-
def_name
113
-
<> ": union data must be an object which includes the \"$type\" property",
114
-
))
115
-
}
116
-
117
-
use type_name <- result.try(type_field)
118
-
119
-
// Get the union's referenced types
120
-
let refs = case json_helpers.get_array(schema, "refs") {
121
-
Some(refs_array) -> Ok(refs_array)
122
-
None ->
123
-
Error(errors.data_validation(
124
-
def_name <> ": union schema missing or invalid 'refs' field",
125
-
))
126
-
}
127
-
128
-
use refs_array <- result.try(refs)
129
-
130
-
case list.is_empty(refs_array) {
131
-
True ->
132
-
Error(errors.data_validation(
133
-
def_name <> ": union schema has empty refs array",
134
-
))
135
-
False -> {
136
-
// Convert refs to strings
137
-
let ref_strings =
138
-
list.filter_map(refs_array, fn(r) { decode.run(r, decode.string) })
139
-
140
-
// Check if the $type matches any of the refs
141
-
case
142
-
list.find(ref_strings, fn(ref_str) {
143
-
refs_contain_type(ref_str, type_name)
144
-
})
145
-
{
146
-
Ok(_matching_ref) -> {
147
-
// Found matching ref
148
-
// In full implementation, would validate against the resolved schema
149
-
Ok(Nil)
150
-
}
151
-
Error(Nil) -> {
152
-
// No matching ref found
153
-
// Check if union is closed
154
-
let is_closed = case json_helpers.get_bool(schema, "closed") {
155
-
Some(closed) -> closed
156
-
None -> False
157
-
}
158
-
159
-
case is_closed {
160
-
True -> {
161
-
// Closed union - reject unknown types
162
-
Error(errors.data_validation(
163
-
def_name
164
-
<> ": union data $type must be one of "
165
-
<> string.join(ref_strings, ", ")
166
-
<> ", found '"
167
-
<> type_name
168
-
<> "'",
169
-
))
170
-
}
171
-
False -> {
172
-
// Open union - allow unknown types
173
-
Ok(Nil)
174
-
}
175
-
}
176
-
}
177
-
}
178
-
}
179
-
}
180
-
}
181
-
}
182
-
}
183
-
184
-
/// Checks if refs array contains the given type
185
-
/// Based on AT Protocol's refsContainType logic - handles both explicit and implicit #main
186
-
fn refs_contain_type(reference: String, type_name: String) -> Bool {
187
-
// Direct match
188
-
case reference == type_name {
189
-
True -> True
190
-
False -> {
191
-
// Handle local reference patterns (#ref)
192
-
case string.starts_with(reference, "#") {
193
-
True -> {
194
-
let ref_name = string.drop_start(reference, 1)
195
-
// Match bare name against local ref
196
-
case type_name == ref_name {
197
-
True -> True
198
-
False -> {
199
-
// Match full NSID#fragment against local ref
200
-
string.ends_with(type_name, "#" <> ref_name)
201
-
}
202
-
}
203
-
}
204
-
False -> {
205
-
// Handle implicit #main patterns
206
-
case string.ends_with(type_name, "#main") {
207
-
True -> {
208
-
// Remove "#main"
209
-
let base_type = string.drop_end(type_name, 5)
210
-
reference == base_type
211
-
}
212
-
False -> {
213
-
// type_name has no fragment, check if ref is the #main version
214
-
case string.contains(type_name, "#") {
215
-
True -> False
216
-
False -> {
217
-
let main_ref = type_name <> "#main"
218
-
reference == main_ref
219
-
}
220
-
}
221
-
}
222
-
}
223
-
}
224
-
}
225
-
}
226
-
}
227
-
}
228
-
229
-
/// Helper to get type name for error messages
230
-
fn get_type_name(data: Json) -> String {
231
-
case json_helpers.is_null(data) {
232
-
True -> "null"
233
-
False ->
234
-
case json_helpers.is_bool(data) {
235
-
True -> "boolean"
236
-
False ->
237
-
case json_helpers.is_int(data) {
238
-
True -> "number"
239
-
False ->
240
-
case json_helpers.is_string(data) {
241
-
True -> "string"
242
-
False ->
243
-
case json_helpers.is_array(data) {
244
-
True -> "array"
245
-
False ->
246
-
case json_helpers.is_object(data) {
247
-
True -> "object"
248
-
False -> "unknown"
249
-
}
250
-
}
251
-
}
252
-
}
253
-
}
254
-
}
255
-
}
-598
src/validation/field.gleam
-598
src/validation/field.gleam
···
1
-
// Field type validators (object and array)
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/dict
5
-
import gleam/dynamic.{type Dynamic}
6
-
import gleam/dynamic/decode
7
-
import gleam/int
8
-
import gleam/json.{type Json}
9
-
import gleam/list
10
-
import gleam/option.{None, Some}
11
-
import gleam/result
12
-
import honk/internal/constraints
13
-
import honk/internal/json_helpers
14
-
import validation/context.{type ValidationContext}
15
-
16
-
// Import primitive validators
17
-
import validation/primitive/blob
18
-
import validation/primitive/boolean
19
-
import validation/primitive/bytes
20
-
import validation/primitive/cid_link
21
-
import validation/primitive/integer
22
-
import validation/primitive/null
23
-
import validation/primitive/string
24
-
25
-
// Import other field validators
26
-
import validation/field/reference
27
-
import validation/field/union
28
-
29
-
// Import meta validators
30
-
import validation/meta/token
31
-
import validation/meta/unknown
32
-
33
-
// ============================================================================
34
-
// SHARED TYPE DISPATCHER
35
-
// ============================================================================
36
-
37
-
/// Dispatch schema validation based on type field
38
-
/// Handles all primitive and field types
39
-
fn dispatch_schema_validation(
40
-
schema: Json,
41
-
ctx: ValidationContext,
42
-
) -> Result(Nil, ValidationError) {
43
-
case json_helpers.get_string(schema, "type") {
44
-
Some("string") -> string.validate_schema(schema, ctx)
45
-
Some("integer") -> integer.validate_schema(schema, ctx)
46
-
Some("boolean") -> boolean.validate_schema(schema, ctx)
47
-
Some("bytes") -> bytes.validate_schema(schema, ctx)
48
-
Some("blob") -> blob.validate_schema(schema, ctx)
49
-
Some("cid-link") -> cid_link.validate_schema(schema, ctx)
50
-
Some("null") -> null.validate_schema(schema, ctx)
51
-
Some("object") -> validate_object_schema(schema, ctx)
52
-
Some("array") -> validate_array_schema(schema, ctx)
53
-
Some("union") -> union.validate_schema(schema, ctx)
54
-
Some("ref") -> reference.validate_schema(schema, ctx)
55
-
Some("token") -> token.validate_schema(schema, ctx)
56
-
Some("unknown") -> unknown.validate_schema(schema, ctx)
57
-
Some(unknown_type) ->
58
-
Error(errors.invalid_schema(
59
-
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
60
-
))
61
-
None ->
62
-
Error(errors.invalid_schema(
63
-
context.path(ctx) <> ": schema missing type field",
64
-
))
65
-
}
66
-
}
67
-
68
-
/// Dispatches data validation to the appropriate type-specific validator.
69
-
///
70
-
/// This is the central dispatcher that routes validation based on the schema's
71
-
/// `type` field. Handles all primitive types (string, integer, boolean, etc.),
72
-
/// field types (object, array, union, ref), and meta types (token, unknown).
73
-
///
74
-
/// Made public to allow reference validators to recursively validate resolved
75
-
/// schemas. Typically set as the validator function in ValidationContext via
76
-
/// `context.with_validator(field.dispatch_data_validation)`.
77
-
///
78
-
/// ## Example
79
-
///
80
-
/// ```gleam
81
-
/// let schema = json.object([
82
-
/// #("type", json.string("string")),
83
-
/// #("maxLength", json.int(100)),
84
-
/// ])
85
-
/// let data = json.string("Hello")
86
-
///
87
-
/// field.dispatch_data_validation(data, schema, ctx)
88
-
/// // => Ok(Nil) if valid, Error(...) if invalid
89
-
/// ```
90
-
pub fn dispatch_data_validation(
91
-
data: Json,
92
-
schema: Json,
93
-
ctx: ValidationContext,
94
-
) -> Result(Nil, ValidationError) {
95
-
case json_helpers.get_string(schema, "type") {
96
-
Some("string") -> string.validate_data(data, schema, ctx)
97
-
Some("integer") -> integer.validate_data(data, schema, ctx)
98
-
Some("boolean") -> boolean.validate_data(data, schema, ctx)
99
-
Some("bytes") -> bytes.validate_data(data, schema, ctx)
100
-
Some("blob") -> blob.validate_data(data, schema, ctx)
101
-
Some("cid-link") -> cid_link.validate_data(data, schema, ctx)
102
-
Some("null") -> null.validate_data(data, schema, ctx)
103
-
Some("object") -> validate_object_data(data, schema, ctx)
104
-
Some("array") -> validate_array_data(data, schema, ctx)
105
-
Some("union") -> union.validate_data(data, schema, ctx)
106
-
Some("ref") -> reference.validate_data(data, schema, ctx)
107
-
Some("token") -> token.validate_data(data, schema, ctx)
108
-
Some("unknown") -> unknown.validate_data(data, schema, ctx)
109
-
Some(unknown_type) ->
110
-
Error(errors.data_validation(
111
-
"Unknown schema type '"
112
-
<> unknown_type
113
-
<> "' at '"
114
-
<> context.path(ctx)
115
-
<> "'",
116
-
))
117
-
None ->
118
-
Error(errors.data_validation(
119
-
"Schema missing type field at '" <> context.path(ctx) <> "'",
120
-
))
121
-
}
122
-
}
123
-
124
-
// ============================================================================
125
-
// OBJECT VALIDATOR
126
-
// ============================================================================
127
-
128
-
const object_allowed_fields = [
129
-
"type", "properties", "required", "nullable", "description",
130
-
]
131
-
132
-
/// Validates object schema definition
133
-
pub fn validate_object_schema(
134
-
schema: Json,
135
-
ctx: ValidationContext,
136
-
) -> Result(Nil, ValidationError) {
137
-
let def_name = context.path(ctx)
138
-
139
-
// Validate allowed fields
140
-
let keys = json_helpers.get_keys(schema)
141
-
use _ <- result.try(constraints.validate_allowed_fields(
142
-
def_name,
143
-
keys,
144
-
object_allowed_fields,
145
-
"object",
146
-
))
147
-
148
-
// Validate properties structure
149
-
let properties = case json_helpers.get_array(schema, "properties") {
150
-
Some(_) ->
151
-
Error(errors.invalid_schema(
152
-
def_name <> ": properties must be an object, not an array",
153
-
))
154
-
None ->
155
-
case json_helpers.is_object(schema) {
156
-
True -> Ok(None)
157
-
False -> Ok(None)
158
-
}
159
-
}
160
-
161
-
use _ <- result.try(properties)
162
-
163
-
// Validate required fields reference existing properties
164
-
use _ <- result.try(case json_helpers.get_array(schema, "required") {
165
-
Some(required_array) -> validate_required_fields(def_name, required_array)
166
-
None -> Ok(Nil)
167
-
})
168
-
169
-
// Validate nullable fields reference existing properties
170
-
use _ <- result.try(case json_helpers.get_array(schema, "nullable") {
171
-
Some(nullable_array) -> validate_nullable_fields(def_name, nullable_array)
172
-
None -> Ok(Nil)
173
-
})
174
-
175
-
// Validate each property schema recursively
176
-
case json_helpers.get_field(schema, "properties") {
177
-
Some(properties) -> {
178
-
case json_helpers.is_object(properties) {
179
-
True -> {
180
-
// Get property map and validate each property schema
181
-
validate_property_schemas(properties, ctx)
182
-
}
183
-
False -> Ok(Nil)
184
-
}
185
-
}
186
-
None -> Ok(Nil)
187
-
}
188
-
}
189
-
190
-
/// Validates object data against schema
191
-
pub fn validate_object_data(
192
-
data: Json,
193
-
schema: Json,
194
-
ctx: ValidationContext,
195
-
) -> Result(Nil, ValidationError) {
196
-
let def_name = context.path(ctx)
197
-
198
-
// Check data is an object
199
-
case json_helpers.is_object(data) {
200
-
False -> {
201
-
let type_name = get_type_name(data)
202
-
Error(errors.data_validation(
203
-
"Expected object at '" <> def_name <> "', found " <> type_name,
204
-
))
205
-
}
206
-
True -> {
207
-
// Check required fields are present
208
-
use _ <- result.try(case json_helpers.get_array(schema, "required") {
209
-
Some(required_array) ->
210
-
validate_required_fields_in_data(def_name, required_array, data)
211
-
None -> Ok(Nil)
212
-
})
213
-
214
-
// Get nullable fields for lookup
215
-
let nullable_fields = case json_helpers.get_array(schema, "nullable") {
216
-
Some(nullable_array) ->
217
-
list.filter_map(nullable_array, fn(item) {
218
-
decode.run(item, decode.string)
219
-
})
220
-
None -> []
221
-
}
222
-
223
-
// Validate each property in data against its schema
224
-
case json_helpers.get_field(schema, "properties") {
225
-
Some(properties) -> {
226
-
validate_properties_data(data, properties, nullable_fields, ctx)
227
-
}
228
-
None -> Ok(Nil)
229
-
}
230
-
}
231
-
}
232
-
}
233
-
234
-
/// Helper to validate required fields exist in properties
235
-
fn validate_required_fields(
236
-
def_name: String,
237
-
required: List(Dynamic),
238
-
) -> Result(Nil, ValidationError) {
239
-
// Convert dynamics to strings
240
-
let field_names =
241
-
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
242
-
243
-
// Each required field should be validated against properties
244
-
// Simplified: just check they're strings
245
-
case list.length(field_names) == list.length(required) {
246
-
True -> Ok(Nil)
247
-
False ->
248
-
Error(errors.invalid_schema(
249
-
def_name <> ": required fields must be strings",
250
-
))
251
-
}
252
-
}
253
-
254
-
/// Helper to validate nullable fields exist in properties
255
-
fn validate_nullable_fields(
256
-
def_name: String,
257
-
nullable: List(Dynamic),
258
-
) -> Result(Nil, ValidationError) {
259
-
// Convert dynamics to strings
260
-
let field_names =
261
-
list.filter_map(nullable, fn(item) { decode.run(item, decode.string) })
262
-
263
-
// Each nullable field should be validated against properties
264
-
// Simplified: just check they're strings
265
-
case list.length(field_names) == list.length(nullable) {
266
-
True -> Ok(Nil)
267
-
False ->
268
-
Error(errors.invalid_schema(
269
-
def_name <> ": nullable fields must be strings",
270
-
))
271
-
}
272
-
}
273
-
274
-
/// Helper to validate required fields are present in data
275
-
fn validate_required_fields_in_data(
276
-
def_name: String,
277
-
required: List(Dynamic),
278
-
data: Json,
279
-
) -> Result(Nil, ValidationError) {
280
-
// Convert dynamics to strings
281
-
let field_names =
282
-
list.filter_map(required, fn(item) { decode.run(item, decode.string) })
283
-
284
-
// Check each required field exists in data
285
-
list.try_fold(field_names, Nil, fn(_, field_name) {
286
-
case json_helpers.get_string(data, field_name) {
287
-
Some(_) -> Ok(Nil)
288
-
None ->
289
-
// Field might not be a string, check if it exists at all
290
-
// Simplified: just report missing
291
-
Error(errors.data_validation(
292
-
def_name <> ": required field '" <> field_name <> "' is missing",
293
-
))
294
-
}
295
-
})
296
-
}
297
-
298
-
/// Validates all property schemas in an object
299
-
fn validate_property_schemas(
300
-
properties: Json,
301
-
ctx: ValidationContext,
302
-
) -> Result(Nil, ValidationError) {
303
-
// Convert JSON object to dict and validate each property
304
-
case json_helpers.json_to_dict(properties) {
305
-
Ok(props_dict) -> {
306
-
dict.fold(props_dict, Ok(Nil), fn(acc, prop_name, prop_schema_dyn) {
307
-
use _ <- result.try(acc)
308
-
// Convert dynamic to Json
309
-
case json_helpers.dynamic_to_json(prop_schema_dyn) {
310
-
Ok(prop_schema) -> {
311
-
let nested_ctx = context.with_path(ctx, "properties." <> prop_name)
312
-
validate_single_property_schema(prop_schema, nested_ctx)
313
-
}
314
-
Error(e) -> Error(e)
315
-
}
316
-
})
317
-
}
318
-
Error(e) -> Error(e)
319
-
}
320
-
}
321
-
322
-
/// Dispatch validation to appropriate validator based on type
323
-
fn validate_single_property_schema(
324
-
prop_schema: Json,
325
-
ctx: ValidationContext,
326
-
) -> Result(Nil, ValidationError) {
327
-
dispatch_schema_validation(prop_schema, ctx)
328
-
}
329
-
330
-
/// Validates all properties in data against their schemas
331
-
fn validate_properties_data(
332
-
data: Json,
333
-
properties: Json,
334
-
nullable_fields: List(String),
335
-
ctx: ValidationContext,
336
-
) -> Result(Nil, ValidationError) {
337
-
// Convert data to dict
338
-
case json_helpers.json_to_dict(data) {
339
-
Ok(data_dict) -> {
340
-
// Convert properties schema to dict
341
-
case json_helpers.json_to_dict(properties) {
342
-
Ok(props_dict) -> {
343
-
// Iterate through data fields
344
-
dict.fold(data_dict, Ok(Nil), fn(acc, field_name, field_value) {
345
-
use _ <- result.try(acc)
346
-
// Check if field has a schema definition
347
-
case dict.get(props_dict, field_name) {
348
-
Ok(field_schema_dyn) -> {
349
-
// Convert dynamic schema to Json
350
-
case json_helpers.dynamic_to_json(field_schema_dyn) {
351
-
Ok(field_schema) -> {
352
-
let nested_ctx = context.with_path(ctx, field_name)
353
-
// Check for null values
354
-
case json_helpers.is_null_dynamic(field_value) {
355
-
True -> {
356
-
// Check if field is nullable
357
-
case list.contains(nullable_fields, field_name) {
358
-
True -> Ok(Nil)
359
-
False ->
360
-
Error(errors.data_validation(
361
-
"Field '"
362
-
<> field_name
363
-
<> "' at '"
364
-
<> context.path(ctx)
365
-
<> "' cannot be null",
366
-
))
367
-
}
368
-
}
369
-
False -> {
370
-
// Validate field data against schema
371
-
case json_helpers.dynamic_to_json(field_value) {
372
-
Ok(field_value_json) ->
373
-
validate_single_property_data(
374
-
field_value_json,
375
-
field_schema,
376
-
nested_ctx,
377
-
)
378
-
Error(e) -> Error(e)
379
-
}
380
-
}
381
-
}
382
-
}
383
-
Error(e) -> Error(e)
384
-
}
385
-
}
386
-
Error(_) -> {
387
-
// Unknown fields are allowed in objects (open schema)
388
-
Ok(Nil)
389
-
}
390
-
}
391
-
})
392
-
}
393
-
Error(e) -> Error(e)
394
-
}
395
-
}
396
-
Error(e) -> Error(e)
397
-
}
398
-
}
399
-
400
-
/// Dispatch data validation to appropriate validator based on type
401
-
fn validate_single_property_data(
402
-
data: Json,
403
-
schema: Json,
404
-
ctx: ValidationContext,
405
-
) -> Result(Nil, ValidationError) {
406
-
dispatch_data_validation(data, schema, ctx)
407
-
}
408
-
409
-
// ============================================================================
410
-
// ARRAY VALIDATOR
411
-
// ============================================================================
412
-
413
-
const array_allowed_fields = [
414
-
"type", "items", "minLength", "maxLength", "description",
415
-
]
416
-
417
-
/// Validates array schema definition
418
-
pub fn validate_array_schema(
419
-
schema: Json,
420
-
ctx: ValidationContext,
421
-
) -> Result(Nil, ValidationError) {
422
-
let def_name = context.path(ctx)
423
-
424
-
// Validate allowed fields
425
-
let keys = json_helpers.get_keys(schema)
426
-
use _ <- result.try(constraints.validate_allowed_fields(
427
-
def_name,
428
-
keys,
429
-
array_allowed_fields,
430
-
"array",
431
-
))
432
-
433
-
// Validate required 'items' field
434
-
let items = case json_helpers.get_field(schema, "items") {
435
-
Some(items_value) -> Ok(items_value)
436
-
None ->
437
-
Error(errors.invalid_schema(
438
-
def_name <> ": array missing required 'items' field",
439
-
))
440
-
}
441
-
442
-
use items_schema <- result.try(items)
443
-
444
-
// Recursively validate the items schema definition
445
-
let nested_ctx = context.with_path(ctx, ".items")
446
-
use _ <- result.try(validate_array_item_schema(items_schema, nested_ctx))
447
-
448
-
// Validate length constraints
449
-
let min_length = json_helpers.get_int(schema, "minLength")
450
-
let max_length = json_helpers.get_int(schema, "maxLength")
451
-
452
-
// Validate that minLength/maxLength are consistent
453
-
use _ <- result.try(constraints.validate_length_constraint_consistency(
454
-
def_name,
455
-
min_length,
456
-
max_length,
457
-
"array",
458
-
))
459
-
460
-
Ok(Nil)
461
-
}
462
-
463
-
/// Validates array data against schema
464
-
pub fn validate_array_data(
465
-
data: Json,
466
-
schema: Json,
467
-
ctx: ValidationContext,
468
-
) -> Result(Nil, ValidationError) {
469
-
let def_name = context.path(ctx)
470
-
471
-
// Data must be an array
472
-
case json_helpers.is_array(data) {
473
-
False -> {
474
-
let type_name = get_type_name(data)
475
-
Error(errors.data_validation(
476
-
def_name <> ": expected array, found " <> type_name,
477
-
))
478
-
}
479
-
True -> {
480
-
// Get array from data
481
-
let data_array = case json_helpers.get_array_from_value(data) {
482
-
Some(arr) -> Ok(arr)
483
-
None ->
484
-
Error(errors.data_validation(def_name <> ": failed to parse array"))
485
-
}
486
-
487
-
use arr <- result.try(data_array)
488
-
489
-
let array_length = list.length(arr)
490
-
491
-
// Validate minLength constraint
492
-
use _ <- result.try(case json_helpers.get_int(schema, "minLength") {
493
-
Some(min_length) ->
494
-
case array_length < min_length {
495
-
True ->
496
-
Error(errors.data_validation(
497
-
def_name
498
-
<> ": array has length "
499
-
<> int.to_string(array_length)
500
-
<> " but minimum length is "
501
-
<> int.to_string(min_length),
502
-
))
503
-
False -> Ok(Nil)
504
-
}
505
-
None -> Ok(Nil)
506
-
})
507
-
508
-
// Validate maxLength constraint
509
-
use _ <- result.try(case json_helpers.get_int(schema, "maxLength") {
510
-
Some(max_length) ->
511
-
case array_length > max_length {
512
-
True ->
513
-
Error(errors.data_validation(
514
-
def_name
515
-
<> ": array has length "
516
-
<> int.to_string(array_length)
517
-
<> " but maximum length is "
518
-
<> int.to_string(max_length),
519
-
))
520
-
False -> Ok(Nil)
521
-
}
522
-
None -> Ok(Nil)
523
-
})
524
-
525
-
// Validate each array item against the items schema
526
-
case json_helpers.get_field(schema, "items") {
527
-
Some(items_schema) -> {
528
-
// Validate each item with index in path
529
-
list.index_fold(arr, Ok(Nil), fn(acc, item, index) {
530
-
use _ <- result.try(acc)
531
-
let nested_ctx =
532
-
context.with_path(ctx, "[" <> int.to_string(index) <> "]")
533
-
validate_array_item_data(item, items_schema, nested_ctx)
534
-
})
535
-
}
536
-
None -> Ok(Nil)
537
-
}
538
-
}
539
-
}
540
-
}
541
-
542
-
/// Validates an items schema definition recursively
543
-
fn validate_array_item_schema(
544
-
items_schema: Json,
545
-
ctx: ValidationContext,
546
-
) -> Result(Nil, ValidationError) {
547
-
// Handle reference types by delegating to reference validator
548
-
case json_helpers.get_string(items_schema, "type") {
549
-
Some("ref") -> reference.validate_schema(items_schema, ctx)
550
-
_ -> dispatch_schema_validation(items_schema, ctx)
551
-
}
552
-
}
553
-
554
-
/// Validates runtime data against an items schema using recursive validation
555
-
fn validate_array_item_data(
556
-
item: Dynamic,
557
-
items_schema: Json,
558
-
ctx: ValidationContext,
559
-
) -> Result(Nil, ValidationError) {
560
-
// Convert dynamic to Json for validation
561
-
let item_json = json_helpers.dynamic_to_json(item)
562
-
563
-
use item_value <- result.try(item_json)
564
-
565
-
// Handle reference types by delegating to reference validator
566
-
case json_helpers.get_string(items_schema, "type") {
567
-
Some("ref") -> reference.validate_data(item_value, items_schema, ctx)
568
-
_ -> dispatch_data_validation(item_value, items_schema, ctx)
569
-
}
570
-
}
571
-
572
-
// ============================================================================
573
-
// SHARED HELPERS
574
-
// ============================================================================
575
-
576
-
/// Helper to get type name for error messages
577
-
fn get_type_name(data: Json) -> String {
578
-
case json_helpers.is_null(data) {
579
-
True -> "null"
580
-
False ->
581
-
case json_helpers.is_bool(data) {
582
-
True -> "boolean"
583
-
False ->
584
-
case json_helpers.is_int(data) {
585
-
True -> "number"
586
-
False ->
587
-
case json_helpers.is_string(data) {
588
-
True -> "string"
589
-
False ->
590
-
case json_helpers.is_array(data) {
591
-
True -> "array"
592
-
False -> "object"
593
-
}
594
-
}
595
-
}
596
-
}
597
-
}
598
-
}
-297
src/validation/formats.gleam
-297
src/validation/formats.gleam
···
1
-
// String format validation
2
-
3
-
import gleam/list
4
-
import gleam/regexp
5
-
import gleam/string
6
-
import gleam/time/timestamp
7
-
import types.{type StringFormat}
8
-
9
-
/// Validates RFC3339 datetime format
10
-
pub fn is_valid_rfc3339_datetime(value: String) -> Bool {
11
-
// Max length check (64 chars)
12
-
let len = string.length(value)
13
-
case len == 0 || len > 64 {
14
-
True -> False
15
-
False -> {
16
-
// Stricter RFC3339 regex pattern with restricted digit ranges
17
-
let pattern =
18
-
"^[0-9]{4}-[01][0-9]-[0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9](\\.[0-9]{1,20})?(Z|([+-][0-2][0-9]:[0-5][0-9]))$"
19
-
20
-
case regexp.from_string(pattern) {
21
-
Ok(re) ->
22
-
case regexp.check(re, value) {
23
-
False -> False
24
-
True -> {
25
-
// Reject -00:00 timezone suffix (must use +00:00 per ISO-8601)
26
-
case string.ends_with(value, "-00:00") {
27
-
True -> False
28
-
False -> {
29
-
// Attempt actual parsing to validate it's a real datetime
30
-
case timestamp.parse_rfc3339(value) {
31
-
Ok(_) -> True
32
-
Error(_) -> False
33
-
}
34
-
}
35
-
}
36
-
}
37
-
}
38
-
Error(_) -> False
39
-
}
40
-
}
41
-
}
42
-
}
43
-
44
-
/// Validates URI format
45
-
pub fn is_valid_uri(value: String) -> Bool {
46
-
// URI validation with max length and stricter scheme
47
-
// Max length check (8192 chars)
48
-
let len = string.length(value)
49
-
case len == 0 || len > 8192 {
50
-
True -> False
51
-
False -> {
52
-
// Lowercase scheme only, max 81 chars, printable characters after
53
-
// Note: Using [^ \t\n\r\x00-\x1F] for printable/graph chars
54
-
let pattern = "^[a-z][a-z.-]{0,80}:[!-~]+$"
55
-
case regexp.from_string(pattern) {
56
-
Ok(re) -> regexp.check(re, value)
57
-
Error(_) -> False
58
-
}
59
-
}
60
-
}
61
-
}
62
-
63
-
/// Validates AT Protocol URI format (at://did:plc:xxx/collection/rkey)
64
-
pub fn is_valid_at_uri(value: String) -> Bool {
65
-
// Max length check (8192 chars)
66
-
let len = string.length(value)
67
-
case len == 0 || len > 8192 {
68
-
True -> False
69
-
False ->
70
-
case string.starts_with(value, "at://") {
71
-
False -> False
72
-
True -> {
73
-
// Pattern: at://authority[/collection[/rkey]]
74
-
let without_scheme = string.drop_start(value, 5)
75
-
case string.split(without_scheme, "/") {
76
-
[authority] -> {
77
-
// Just authority - must be DID or handle
78
-
is_valid_did(authority) || is_valid_handle(authority)
79
-
}
80
-
[authority, collection] -> {
81
-
// Authority + collection - validate both
82
-
case is_valid_did(authority) || is_valid_handle(authority) {
83
-
False -> False
84
-
True -> is_valid_nsid(collection)
85
-
}
86
-
}
87
-
[authority, collection, rkey] -> {
88
-
// Full URI - validate all parts
89
-
case is_valid_did(authority) || is_valid_handle(authority) {
90
-
False -> False
91
-
True ->
92
-
case is_valid_nsid(collection) {
93
-
False -> False
94
-
True -> is_valid_record_key(rkey)
95
-
}
96
-
}
97
-
}
98
-
_ -> False
99
-
}
100
-
}
101
-
}
102
-
}
103
-
}
104
-
105
-
/// Validates DID format (did:method:identifier)
106
-
pub fn is_valid_did(value: String) -> Bool {
107
-
// Max length check (2048 chars)
108
-
let len = string.length(value)
109
-
case len == 0 || len > 2048 {
110
-
True -> False
111
-
False ->
112
-
case string.starts_with(value, "did:") {
113
-
False -> False
114
-
True -> {
115
-
// Pattern ensures identifier ends with valid char (not %)
116
-
let pattern = "^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$"
117
-
case regexp.from_string(pattern) {
118
-
Ok(re) -> regexp.check(re, value)
119
-
Error(_) -> False
120
-
}
121
-
}
122
-
}
123
-
}
124
-
}
125
-
126
-
/// Validates AT Protocol handle (user.bsky.social)
127
-
pub fn is_valid_handle(value: String) -> Bool {
128
-
// Handle is a domain name (hostname)
129
-
// Must be lowercase, can have dots, no special chars except hyphen
130
-
// Pattern requires at least one dot and TLD starts with letter
131
-
let pattern =
132
-
"^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$"
133
-
134
-
case
135
-
string.length(value) == 0 || string.length(value) > 253,
136
-
regexp.from_string(pattern)
137
-
{
138
-
True, _ -> False
139
-
False, Ok(re) ->
140
-
case regexp.check(re, value) {
141
-
False -> False
142
-
True -> {
143
-
// Extract TLD and check against disallowed list
144
-
let parts = string.split(value, ".")
145
-
case list.last(parts) {
146
-
Ok(tld) ->
147
-
case tld {
148
-
"local"
149
-
| "arpa"
150
-
| "invalid"
151
-
| "localhost"
152
-
| "internal"
153
-
| "example"
154
-
| "onion"
155
-
| "alt" -> False
156
-
_ -> True
157
-
}
158
-
Error(_) -> False
159
-
}
160
-
}
161
-
}
162
-
False, Error(_) -> False
163
-
}
164
-
}
165
-
166
-
/// Validates AT identifier (either DID or handle)
167
-
pub fn is_valid_at_identifier(value: String) -> Bool {
168
-
is_valid_did(value) || is_valid_handle(value)
169
-
}
170
-
171
-
/// Validates NSID format (com.example.type)
172
-
pub fn is_valid_nsid(value: String) -> Bool {
173
-
// NSID: reversed domain name with type
174
-
// Pattern: authority.name (e.g., com.example.record)
175
-
let pattern =
176
-
"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\.[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+$"
177
-
178
-
case regexp.from_string(pattern) {
179
-
Ok(re) -> {
180
-
case regexp.check(re, value) {
181
-
False -> False
182
-
True -> {
183
-
// Must have at least 3 segments and max length 317
184
-
let segments = string.split(value, ".")
185
-
list.length(segments) >= 3 && string.length(value) <= 317
186
-
}
187
-
}
188
-
}
189
-
Error(_) -> False
190
-
}
191
-
}
192
-
193
-
/// Validates CID format (Content Identifier)
194
-
pub fn is_valid_cid(value: String) -> Bool {
195
-
// Informal/incomplete helper for fast string verification
196
-
// Aligned with indigo's atproto/syntax/cid.go approach
197
-
// Length: 8-256 chars, alphanumeric plus += characters
198
-
// Rejects CIDv0 starting with "Qmb"
199
-
let len = string.length(value)
200
-
201
-
case len < 8 || len > 256 {
202
-
True -> False
203
-
False -> {
204
-
// Reject CIDv0 (not allowed in this version of atproto)
205
-
case string.starts_with(value, "Qmb") {
206
-
True -> False
207
-
False -> {
208
-
// Pattern: alphanumeric plus + and =
209
-
let pattern = "^[a-zA-Z0-9+=]{8,256}$"
210
-
case regexp.from_string(pattern) {
211
-
Ok(re) -> regexp.check(re, value)
212
-
Error(_) -> False
213
-
}
214
-
}
215
-
}
216
-
}
217
-
}
218
-
}
219
-
220
-
/// Validates BCP47 language tag
221
-
pub fn is_valid_language_tag(value: String) -> Bool {
222
-
// Lenient BCP47 validation (max 128 chars)
223
-
// Allows: i prefix (IANA), 2-3 letter codes, flexible extensions
224
-
// e.g., en, en-US, zh-Hans-CN, i-enochian
225
-
let len = string.length(value)
226
-
case len == 0 || len > 128 {
227
-
True -> False
228
-
False -> {
229
-
let pattern = "^(i|[a-z]{2,3})(-[a-zA-Z0-9]+)*$"
230
-
case regexp.from_string(pattern) {
231
-
Ok(re) -> regexp.check(re, value)
232
-
Error(_) -> False
233
-
}
234
-
}
235
-
}
236
-
}
237
-
238
-
/// Validates TID format (Timestamp Identifier)
239
-
pub fn is_valid_tid(value: String) -> Bool {
240
-
// TID is base32-sortable timestamp (13 characters)
241
-
// First char restricted to ensure valid timestamp range: 234567abcdefghij
242
-
// Remaining 12 chars use full alphabet: 234567abcdefghijklmnopqrstuvwxyz
243
-
let pattern = "^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$"
244
-
245
-
case string.length(value) == 13, regexp.from_string(pattern) {
246
-
True, Ok(re) -> regexp.check(re, value)
247
-
_, _ -> False
248
-
}
249
-
}
250
-
251
-
/// Validates record key format
252
-
pub fn is_valid_record_key(value: String) -> Bool {
253
-
// Record keys can be TIDs or custom strings
254
-
// Custom strings: alphanumeric, dots, dashes, underscores, tildes, colons
255
-
// Length: 1-512 characters
256
-
// Explicitly reject "." and ".." for security
257
-
let len = string.length(value)
258
-
259
-
case value == "." || value == ".." {
260
-
True -> False
261
-
False ->
262
-
case len >= 1 && len <= 512 {
263
-
False -> False
264
-
True -> {
265
-
// Check if it's a TID first
266
-
case is_valid_tid(value) {
267
-
True -> True
268
-
False -> {
269
-
// Check custom format (added : to allowed chars)
270
-
let pattern = "^[a-zA-Z0-9_~.:-]+$"
271
-
case regexp.from_string(pattern) {
272
-
Ok(re) -> regexp.check(re, value)
273
-
Error(_) -> False
274
-
}
275
-
}
276
-
}
277
-
}
278
-
}
279
-
}
280
-
}
281
-
282
-
/// Validates a string value against a specific format
283
-
pub fn validate_format(value: String, format: StringFormat) -> Bool {
284
-
case format {
285
-
types.DateTime -> is_valid_rfc3339_datetime(value)
286
-
types.Uri -> is_valid_uri(value)
287
-
types.AtUri -> is_valid_at_uri(value)
288
-
types.Did -> is_valid_did(value)
289
-
types.Handle -> is_valid_handle(value)
290
-
types.AtIdentifier -> is_valid_at_identifier(value)
291
-
types.Nsid -> is_valid_nsid(value)
292
-
types.Cid -> is_valid_cid(value)
293
-
types.Language -> is_valid_language_tag(value)
294
-
types.Tid -> is_valid_tid(value)
295
-
types.RecordKey -> is_valid_record_key(value)
296
-
}
297
-
}
-63
src/validation/meta/token.gleam
-63
src/validation/meta/token.gleam
···
1
-
// Token type validator
2
-
// Tokens are unit types used for discrimination in unions
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/json.{type Json}
6
-
import gleam/string
7
-
import honk/internal/constraints
8
-
import honk/internal/json_helpers
9
-
import validation/context.{type ValidationContext}
10
-
11
-
const allowed_fields = ["type", "description"]
12
-
13
-
/// Validates token schema definition
14
-
pub fn validate_schema(
15
-
schema: Json,
16
-
ctx: ValidationContext,
17
-
) -> Result(Nil, ValidationError) {
18
-
let def_name = context.path(ctx)
19
-
20
-
// Validate allowed fields
21
-
let keys = json_helpers.get_keys(schema)
22
-
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "token")
23
-
}
24
-
25
-
/// Validates token data against schema
26
-
/// Note: Tokens are unit types used for discrimination in unions.
27
-
/// The token value should be a string matching the fully-qualified token name
28
-
/// (e.g., "example.lexicon.record#demoToken"). Full token name validation
29
-
/// happens at the union/record level where the expected token name is known.
30
-
pub fn validate_data(
31
-
data: Json,
32
-
_schema: Json,
33
-
ctx: ValidationContext,
34
-
) -> Result(Nil, ValidationError) {
35
-
let def_name = context.path(ctx)
36
-
37
-
// Token data must be a string (the fully-qualified token name)
38
-
case json_helpers.is_string(data) {
39
-
True -> {
40
-
// Extract and validate the string value
41
-
let json_str = json.to_string(data)
42
-
// Remove quotes from JSON string representation
43
-
let value = case
44
-
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
45
-
{
46
-
True -> string.slice(json_str, 1, string.length(json_str) - 2)
47
-
False -> json_str
48
-
}
49
-
50
-
case string.is_empty(value) {
51
-
True ->
52
-
Error(errors.data_validation(
53
-
def_name <> ": token value cannot be empty string",
54
-
))
55
-
False -> Ok(Nil)
56
-
}
57
-
}
58
-
False ->
59
-
Error(errors.data_validation(
60
-
def_name <> ": expected string for token data, got other type",
61
-
))
62
-
}
63
-
}
-68
src/validation/meta/unknown.gleam
-68
src/validation/meta/unknown.gleam
···
1
-
// Unknown type validator
2
-
// Unknown allows flexible data with AT Protocol data model rules
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/json.{type Json}
6
-
import gleam/option.{None, Some}
7
-
import honk/internal/constraints
8
-
import honk/internal/json_helpers
9
-
import validation/context.{type ValidationContext}
10
-
11
-
const allowed_fields = ["type", "description"]
12
-
13
-
/// Validates unknown schema definition
14
-
pub fn validate_schema(
15
-
schema: Json,
16
-
ctx: ValidationContext,
17
-
) -> Result(Nil, ValidationError) {
18
-
let def_name = context.path(ctx)
19
-
20
-
// Validate allowed fields
21
-
let keys = json_helpers.get_keys(schema)
22
-
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "unknown")
23
-
}
24
-
25
-
/// Validates unknown data against schema
26
-
/// Unknown allows flexible data following AT Protocol data model rules
27
-
pub fn validate_data(
28
-
data: Json,
29
-
_schema: Json,
30
-
ctx: ValidationContext,
31
-
) -> Result(Nil, ValidationError) {
32
-
let def_name = context.path(ctx)
33
-
34
-
// Unknown data must be an object (not primitives, arrays, bytes, or blobs)
35
-
case json_helpers.is_object(data) {
36
-
False ->
37
-
Error(errors.data_validation(
38
-
def_name <> ": unknown type must be an object, not a primitive or array",
39
-
))
40
-
True -> {
41
-
// Check for special AT Protocol objects that are not allowed
42
-
// Bytes objects: {"$bytes": "base64-string"}
43
-
case json_helpers.get_string(data, "$bytes") {
44
-
Some(_) ->
45
-
Error(errors.data_validation(
46
-
def_name <> ": unknown type cannot be a bytes object",
47
-
))
48
-
None -> {
49
-
// Blob objects: {"$type": "blob", "ref": {...}, "mimeType": "...", "size": ...}
50
-
case json_helpers.get_string(data, "$type") {
51
-
Some("blob") ->
52
-
Error(errors.data_validation(
53
-
def_name <> ": unknown type cannot be a blob object",
54
-
))
55
-
_ -> {
56
-
// Valid unknown object
57
-
// AT Protocol data model rules:
58
-
// - No floats (only integers) - enforced by gleam_json type system
59
-
// - Objects can contain any valid JSON data
60
-
// - May contain $type field for type discrimination
61
-
Ok(Nil)
62
-
}
63
-
}
64
-
}
65
-
}
66
-
}
67
-
}
68
-
}
-230
src/validation/primary/params.gleam
-230
src/validation/primary/params.gleam
···
1
-
// Params type validator
2
-
// Mirrors the Go implementation's validation/primary/params
3
-
// Params define query/procedure/subscription parameters (XRPC endpoint arguments)
4
-
5
-
import errors.{type ValidationError}
6
-
import gleam/dynamic/decode
7
-
import gleam/json.{type Json}
8
-
import gleam/list
9
-
import gleam/option.{None, Some}
10
-
import gleam/result
11
-
import honk/internal/constraints
12
-
import honk/internal/json_helpers
13
-
import validation/context.{type ValidationContext}
14
-
import validation/field as validation_field
15
-
import validation/meta/unknown as validation_meta_unknown
16
-
import validation/primitive/boolean as validation_primitive_boolean
17
-
import validation/primitive/integer as validation_primitive_integer
18
-
import validation/primitive/string as validation_primitive_string
19
-
20
-
const allowed_fields = ["type", "description", "properties", "required"]
21
-
22
-
/// Validates params schema definition
23
-
pub fn validate_schema(
24
-
schema: Json,
25
-
ctx: ValidationContext,
26
-
) -> Result(Nil, ValidationError) {
27
-
let def_name = context.path(ctx)
28
-
29
-
// Validate allowed fields
30
-
let keys = json_helpers.get_keys(schema)
31
-
use _ <- result.try(constraints.validate_allowed_fields(
32
-
def_name,
33
-
keys,
34
-
allowed_fields,
35
-
"params",
36
-
))
37
-
38
-
// Validate type field
39
-
use _ <- result.try(case json_helpers.get_string(schema, "type") {
40
-
Some("params") -> Ok(Nil)
41
-
Some(other_type) ->
42
-
Error(errors.invalid_schema(
43
-
def_name <> ": expected type 'params', got '" <> other_type <> "'",
44
-
))
45
-
None ->
46
-
Error(errors.invalid_schema(def_name <> ": params missing type field"))
47
-
})
48
-
49
-
// Get properties and required fields
50
-
let properties_dict = case json_helpers.get_field(schema, "properties") {
51
-
Some(props) -> json_helpers.json_to_dict(props)
52
-
None -> Ok(json_helpers.empty_dict())
53
-
}
54
-
55
-
let required_array = case json_helpers.get_array(schema, "required") {
56
-
Some(arr) -> Some(arr)
57
-
None -> None
58
-
}
59
-
60
-
// Validate required fields exist in properties
61
-
use props_dict <- result.try(properties_dict)
62
-
use _ <- result.try(validate_required_fields(
63
-
def_name,
64
-
required_array,
65
-
props_dict,
66
-
))
67
-
68
-
// Validate each property
69
-
validate_properties(def_name, props_dict, ctx)
70
-
}
71
-
72
-
/// Validates that all required fields exist in properties
73
-
fn validate_required_fields(
74
-
def_name: String,
75
-
required_array: option.Option(List(decode.Dynamic)),
76
-
properties_dict: json_helpers.JsonDict,
77
-
) -> Result(Nil, ValidationError) {
78
-
case required_array {
79
-
None -> Ok(Nil)
80
-
Some(required) -> {
81
-
list.try_fold(required, Nil, fn(_, item) {
82
-
case decode.run(item, decode.string) {
83
-
Ok(field_name) -> {
84
-
case json_helpers.dict_has_key(properties_dict, field_name) {
85
-
True -> Ok(Nil)
86
-
False ->
87
-
Error(errors.invalid_schema(
88
-
def_name
89
-
<> ": required field '"
90
-
<> field_name
91
-
<> "' not found in properties",
92
-
))
93
-
}
94
-
}
95
-
Error(_) ->
96
-
Error(errors.invalid_schema(
97
-
def_name <> ": required field must be a string",
98
-
))
99
-
}
100
-
})
101
-
}
102
-
}
103
-
}
104
-
105
-
/// Validates all properties in the params
106
-
fn validate_properties(
107
-
def_name: String,
108
-
properties_dict: json_helpers.JsonDict,
109
-
ctx: ValidationContext,
110
-
) -> Result(Nil, ValidationError) {
111
-
json_helpers.dict_fold(properties_dict, Ok(Nil), fn(acc, key, value) {
112
-
case acc {
113
-
Error(e) -> Error(e)
114
-
Ok(_) -> {
115
-
// Check property name is not empty
116
-
use _ <- result.try(case key {
117
-
"" ->
118
-
Error(errors.invalid_schema(
119
-
def_name <> ": empty property name not allowed",
120
-
))
121
-
_ -> Ok(Nil)
122
-
})
123
-
124
-
// Convert dynamic value to JSON
125
-
use prop_json <- result.try(case json_helpers.dynamic_to_json(value) {
126
-
Ok(j) -> Ok(j)
127
-
Error(_) ->
128
-
Error(errors.invalid_schema(
129
-
def_name <> ": invalid property value for '" <> key <> "'",
130
-
))
131
-
})
132
-
133
-
// Validate property type restrictions
134
-
validate_property_type(def_name, key, prop_json, ctx)
135
-
}
136
-
}
137
-
})
138
-
}
139
-
140
-
/// Validates that a property has an allowed type
141
-
/// Allowed types: boolean, integer, string, unknown, or arrays of these
142
-
fn validate_property_type(
143
-
def_name: String,
144
-
property_name: String,
145
-
property_schema: Json,
146
-
ctx: ValidationContext,
147
-
) -> Result(Nil, ValidationError) {
148
-
let prop_path = def_name <> ".properties." <> property_name
149
-
150
-
case json_helpers.get_string(property_schema, "type") {
151
-
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
152
-
// These are allowed types - recursively validate the schema
153
-
let prop_ctx = context.with_path(ctx, "properties." <> property_name)
154
-
validate_property_schema(property_schema, prop_ctx)
155
-
}
156
-
Some("array") -> {
157
-
// Arrays are allowed, but items must be one of the allowed types
158
-
case json_helpers.get_field(property_schema, "items") {
159
-
Some(items) -> {
160
-
case json_helpers.get_string(items, "type") {
161
-
Some("boolean") | Some("integer") | Some("string") | Some("unknown") -> {
162
-
// Valid array item type - recursively validate
163
-
let prop_ctx =
164
-
context.with_path(ctx, "properties." <> property_name)
165
-
validate_property_schema(property_schema, prop_ctx)
166
-
}
167
-
Some(other_type) ->
168
-
Error(errors.invalid_schema(
169
-
prop_path
170
-
<> ": params array items must be boolean, integer, string, or unknown, got '"
171
-
<> other_type
172
-
<> "'",
173
-
))
174
-
None ->
175
-
Error(errors.invalid_schema(
176
-
prop_path <> ": array items missing type field",
177
-
))
178
-
}
179
-
}
180
-
None ->
181
-
Error(errors.invalid_schema(
182
-
prop_path <> ": array property missing items field",
183
-
))
184
-
}
185
-
}
186
-
Some(other_type) ->
187
-
Error(errors.invalid_schema(
188
-
prop_path
189
-
<> ": params properties must be boolean, integer, string, unknown, or arrays of these, got '"
190
-
<> other_type
191
-
<> "'",
192
-
))
193
-
None ->
194
-
Error(errors.invalid_schema(prop_path <> ": property missing type field"))
195
-
}
196
-
}
197
-
198
-
/// Validates a property schema by dispatching to the appropriate validator
199
-
fn validate_property_schema(
200
-
schema: Json,
201
-
ctx: ValidationContext,
202
-
) -> Result(Nil, ValidationError) {
203
-
case json_helpers.get_string(schema, "type") {
204
-
Some("boolean") -> validation_primitive_boolean.validate_schema(schema, ctx)
205
-
Some("integer") -> validation_primitive_integer.validate_schema(schema, ctx)
206
-
Some("string") -> validation_primitive_string.validate_schema(schema, ctx)
207
-
Some("unknown") -> validation_meta_unknown.validate_schema(schema, ctx)
208
-
Some("array") -> validation_field.validate_array_schema(schema, ctx)
209
-
Some(unknown_type) ->
210
-
Error(errors.invalid_schema(
211
-
context.path(ctx) <> ": unknown type '" <> unknown_type <> "'",
212
-
))
213
-
None ->
214
-
Error(errors.invalid_schema(
215
-
context.path(ctx) <> ": schema missing type field",
216
-
))
217
-
}
218
-
}
219
-
220
-
/// Validates params data against schema
221
-
pub fn validate_data(
222
-
_data: Json,
223
-
_schema: Json,
224
-
_ctx: ValidationContext,
225
-
) -> Result(Nil, ValidationError) {
226
-
// Params data validation would check that all required parameters are present
227
-
// and that each parameter value matches its schema
228
-
// For now, simplified implementation
229
-
Ok(Nil)
230
-
}
-163
src/validation/primary/procedure.gleam
-163
src/validation/primary/procedure.gleam
···
1
-
// Procedure type validator
2
-
// Procedures are XRPC Procedure (HTTP POST) endpoints for modifying data
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/json.{type Json}
6
-
import gleam/option.{None, Some}
7
-
import gleam/result
8
-
import honk/internal/constraints
9
-
import honk/internal/json_helpers
10
-
import validation/context.{type ValidationContext}
11
-
import validation/field as validation_field
12
-
import validation/field/reference as validation_field_reference
13
-
import validation/field/union as validation_field_union
14
-
import validation/primary/params
15
-
16
-
const allowed_fields = [
17
-
"type", "parameters", "input", "output", "errors", "description",
18
-
]
19
-
20
-
/// Validates procedure schema definition
21
-
pub fn validate_schema(
22
-
schema: Json,
23
-
ctx: ValidationContext,
24
-
) -> Result(Nil, ValidationError) {
25
-
let def_name = context.path(ctx)
26
-
27
-
// Validate allowed fields
28
-
let keys = json_helpers.get_keys(schema)
29
-
use _ <- result.try(constraints.validate_allowed_fields(
30
-
def_name,
31
-
keys,
32
-
allowed_fields,
33
-
"procedure",
34
-
))
35
-
36
-
// Validate parameters field if present
37
-
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
38
-
Some(parameters) -> validate_parameters_schema(parameters, ctx)
39
-
None -> Ok(Nil)
40
-
})
41
-
42
-
// Validate input field if present
43
-
use _ <- result.try(case json_helpers.get_field(schema, "input") {
44
-
Some(input) -> validate_io_schema(def_name, input, "input")
45
-
None -> Ok(Nil)
46
-
})
47
-
48
-
// Validate output field if present
49
-
use _ <- result.try(case json_helpers.get_field(schema, "output") {
50
-
Some(output) -> validate_io_schema(def_name, output, "output")
51
-
None -> Ok(Nil)
52
-
})
53
-
54
-
// Validate errors field if present
55
-
case json_helpers.get_array(schema, "errors") {
56
-
Some(_) -> Ok(Nil)
57
-
None -> Ok(Nil)
58
-
}
59
-
}
60
-
61
-
/// Validates procedure input data against schema
62
-
/// Data should be the procedure input body as JSON
63
-
pub fn validate_data(
64
-
data: Json,
65
-
schema: Json,
66
-
ctx: ValidationContext,
67
-
) -> Result(Nil, ValidationError) {
68
-
// If schema has input, validate data against it
69
-
case json_helpers.get_field(schema, "input") {
70
-
Some(input) -> {
71
-
let input_ctx = context.with_path(ctx, "input")
72
-
validate_body_data(data, input, input_ctx)
73
-
}
74
-
None -> Ok(Nil)
75
-
}
76
-
}
77
-
78
-
/// Validates procedure output data against schema
79
-
pub fn validate_output_data(
80
-
data: Json,
81
-
schema: Json,
82
-
ctx: ValidationContext,
83
-
) -> Result(Nil, ValidationError) {
84
-
// If schema has output, validate data against it
85
-
case json_helpers.get_field(schema, "output") {
86
-
Some(output) -> {
87
-
let output_ctx = context.with_path(ctx, "output")
88
-
validate_body_data(data, output, output_ctx)
89
-
}
90
-
None -> Ok(Nil)
91
-
}
92
-
}
93
-
94
-
/// Validates data against a SchemaBody (input or output)
95
-
fn validate_body_data(
96
-
data: Json,
97
-
body: Json,
98
-
ctx: ValidationContext,
99
-
) -> Result(Nil, ValidationError) {
100
-
// Get the schema field from the body
101
-
case json_helpers.get_field(body, "schema") {
102
-
Some(schema) -> {
103
-
let schema_ctx = context.with_path(ctx, "schema")
104
-
// Dispatch to appropriate validator based on schema type
105
-
validate_body_schema_data(data, schema, schema_ctx)
106
-
}
107
-
None -> Ok(Nil)
108
-
}
109
-
}
110
-
111
-
/// Validates data against a body schema (object, ref, or union)
112
-
fn validate_body_schema_data(
113
-
data: Json,
114
-
schema: Json,
115
-
ctx: ValidationContext,
116
-
) -> Result(Nil, ValidationError) {
117
-
case json_helpers.get_string(schema, "type") {
118
-
Some("object") -> validation_field.validate_object_data(data, schema, ctx)
119
-
Some("ref") -> {
120
-
// For references, we need to resolve and validate
121
-
// For now, just validate it's structured correctly
122
-
validation_field_reference.validate_data(data, schema, ctx)
123
-
}
124
-
Some("union") -> validation_field_union.validate_data(data, schema, ctx)
125
-
Some(other_type) ->
126
-
Error(errors.data_validation(
127
-
context.path(ctx)
128
-
<> ": unsupported body schema type '"
129
-
<> other_type
130
-
<> "'",
131
-
))
132
-
None ->
133
-
Error(errors.data_validation(
134
-
context.path(ctx) <> ": body schema missing type field",
135
-
))
136
-
}
137
-
}
138
-
139
-
/// Validates parameters schema definition
140
-
fn validate_parameters_schema(
141
-
parameters: Json,
142
-
ctx: ValidationContext,
143
-
) -> Result(Nil, ValidationError) {
144
-
// Validate the full params schema
145
-
let params_ctx = context.with_path(ctx, "parameters")
146
-
params.validate_schema(parameters, params_ctx)
147
-
}
148
-
149
-
/// Validates input/output schema definition
150
-
fn validate_io_schema(
151
-
def_name: String,
152
-
io: Json,
153
-
field_name: String,
154
-
) -> Result(Nil, ValidationError) {
155
-
// Input/output must have encoding field
156
-
case json_helpers.get_string(io, "encoding") {
157
-
Some(_) -> Ok(Nil)
158
-
None ->
159
-
Error(errors.invalid_schema(
160
-
def_name <> ": procedure " <> field_name <> " missing encoding field",
161
-
))
162
-
}
163
-
}
-224
src/validation/primary/query.gleam
-224
src/validation/primary/query.gleam
···
1
-
// Query type validator
2
-
// Queries are XRPC Query (HTTP GET) endpoints for retrieving data
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/dynamic/decode
6
-
import gleam/json.{type Json}
7
-
import gleam/list
8
-
import gleam/option.{None, Some}
9
-
import gleam/result
10
-
import honk/internal/constraints
11
-
import honk/internal/json_helpers
12
-
import validation/context.{type ValidationContext}
13
-
import validation/field as validation_field
14
-
import validation/meta/unknown as validation_meta_unknown
15
-
import validation/primary/params
16
-
import validation/primitive/boolean as validation_primitive_boolean
17
-
import validation/primitive/integer as validation_primitive_integer
18
-
import validation/primitive/string as validation_primitive_string
19
-
20
-
const allowed_fields = ["type", "parameters", "output", "errors", "description"]
21
-
22
-
/// Validates query schema definition
23
-
pub fn validate_schema(
24
-
schema: Json,
25
-
ctx: ValidationContext,
26
-
) -> Result(Nil, ValidationError) {
27
-
let def_name = context.path(ctx)
28
-
29
-
// Validate allowed fields
30
-
let keys = json_helpers.get_keys(schema)
31
-
use _ <- result.try(constraints.validate_allowed_fields(
32
-
def_name,
33
-
keys,
34
-
allowed_fields,
35
-
"query",
36
-
))
37
-
38
-
// Validate parameters field if present
39
-
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
40
-
Some(parameters) -> validate_parameters_schema(parameters, ctx)
41
-
None -> Ok(Nil)
42
-
})
43
-
44
-
// Validate output field if present
45
-
use _ <- result.try(case json_helpers.get_field(schema, "output") {
46
-
Some(output) -> validate_output_schema(def_name, output)
47
-
None -> Ok(Nil)
48
-
})
49
-
50
-
// Validate errors field if present
51
-
case json_helpers.get_array(schema, "errors") {
52
-
Some(_) -> Ok(Nil)
53
-
None -> Ok(Nil)
54
-
}
55
-
}
56
-
57
-
/// Validates query data against schema
58
-
/// Data should be the query parameters as a JSON object
59
-
pub fn validate_data(
60
-
data: Json,
61
-
schema: Json,
62
-
ctx: ValidationContext,
63
-
) -> Result(Nil, ValidationError) {
64
-
let def_name = context.path(ctx)
65
-
66
-
// Query data must be an object (the parameters)
67
-
use _ <- result.try(case json_helpers.is_object(data) {
68
-
True -> Ok(Nil)
69
-
False ->
70
-
Error(errors.data_validation(
71
-
def_name <> ": query parameters must be an object",
72
-
))
73
-
})
74
-
75
-
// If schema has parameters, validate data against them
76
-
case json_helpers.get_field(schema, "parameters") {
77
-
Some(parameters) -> {
78
-
let params_ctx = context.with_path(ctx, "parameters")
79
-
validate_parameters_data(data, parameters, params_ctx)
80
-
}
81
-
None -> Ok(Nil)
82
-
}
83
-
}
84
-
85
-
/// Validates parameter data against params schema
86
-
fn validate_parameters_data(
87
-
data: Json,
88
-
params_schema: Json,
89
-
ctx: ValidationContext,
90
-
) -> Result(Nil, ValidationError) {
91
-
let def_name = context.path(ctx)
92
-
93
-
// Get data as dict
94
-
use data_dict <- result.try(json_helpers.json_to_dict(data))
95
-
96
-
// Get properties and required from params schema
97
-
let properties_dict = case
98
-
json_helpers.get_field(params_schema, "properties")
99
-
{
100
-
Some(props) -> json_helpers.json_to_dict(props)
101
-
None -> Ok(json_helpers.empty_dict())
102
-
}
103
-
104
-
let required_array = json_helpers.get_array(params_schema, "required")
105
-
106
-
use props_dict <- result.try(properties_dict)
107
-
108
-
// Check all required parameters are present
109
-
use _ <- result.try(case required_array {
110
-
Some(required) -> {
111
-
list.try_fold(required, Nil, fn(_, item) {
112
-
case decode.run(item, decode.string) {
113
-
Ok(param_name) -> {
114
-
case json_helpers.dict_has_key(data_dict, param_name) {
115
-
True -> Ok(Nil)
116
-
False ->
117
-
Error(errors.data_validation(
118
-
def_name
119
-
<> ": missing required parameter '"
120
-
<> param_name
121
-
<> "'",
122
-
))
123
-
}
124
-
}
125
-
Error(_) -> Ok(Nil)
126
-
}
127
-
})
128
-
}
129
-
None -> Ok(Nil)
130
-
})
131
-
132
-
// Validate each parameter in data
133
-
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
134
-
case acc {
135
-
Error(e) -> Error(e)
136
-
Ok(_) -> {
137
-
// Get the schema for this parameter
138
-
case json_helpers.dict_get(props_dict, param_name) {
139
-
Some(param_schema_dyn) -> {
140
-
// Convert dynamic to JSON
141
-
case json_helpers.dynamic_to_json(param_schema_dyn) {
142
-
Ok(param_schema) -> {
143
-
// Convert param value to JSON
144
-
case json_helpers.dynamic_to_json(param_value) {
145
-
Ok(param_json) -> {
146
-
// Validate the parameter value against its schema
147
-
let param_ctx = context.with_path(ctx, param_name)
148
-
validate_parameter_value(
149
-
param_json,
150
-
param_schema,
151
-
param_ctx,
152
-
)
153
-
}
154
-
Error(e) -> Error(e)
155
-
}
156
-
}
157
-
Error(e) -> Error(e)
158
-
}
159
-
}
160
-
None -> {
161
-
// Parameter not in schema - could warn or allow
162
-
// For now, allow unknown parameters
163
-
Ok(Nil)
164
-
}
165
-
}
166
-
}
167
-
}
168
-
})
169
-
}
170
-
171
-
/// Validates a single parameter value against its schema
172
-
fn validate_parameter_value(
173
-
value: Json,
174
-
schema: Json,
175
-
ctx: ValidationContext,
176
-
) -> Result(Nil, ValidationError) {
177
-
// Dispatch based on schema type
178
-
case json_helpers.get_string(schema, "type") {
179
-
Some("boolean") ->
180
-
validation_primitive_boolean.validate_data(value, schema, ctx)
181
-
Some("integer") ->
182
-
validation_primitive_integer.validate_data(value, schema, ctx)
183
-
Some("string") ->
184
-
validation_primitive_string.validate_data(value, schema, ctx)
185
-
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
186
-
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
187
-
Some(other_type) ->
188
-
Error(errors.data_validation(
189
-
context.path(ctx)
190
-
<> ": unsupported parameter type '"
191
-
<> other_type
192
-
<> "'",
193
-
))
194
-
None ->
195
-
Error(errors.data_validation(
196
-
context.path(ctx) <> ": parameter schema missing type field",
197
-
))
198
-
}
199
-
}
200
-
201
-
/// Validates parameters schema definition
202
-
fn validate_parameters_schema(
203
-
parameters: Json,
204
-
ctx: ValidationContext,
205
-
) -> Result(Nil, ValidationError) {
206
-
// Validate the full params schema
207
-
let params_ctx = context.with_path(ctx, "parameters")
208
-
params.validate_schema(parameters, params_ctx)
209
-
}
210
-
211
-
/// Validates output schema definition
212
-
fn validate_output_schema(
213
-
def_name: String,
214
-
output: Json,
215
-
) -> Result(Nil, ValidationError) {
216
-
// Output must have encoding field
217
-
case json_helpers.get_string(output, "encoding") {
218
-
Some(_) -> Ok(Nil)
219
-
None ->
220
-
Error(errors.invalid_schema(
221
-
def_name <> ": query output missing encoding field",
222
-
))
223
-
}
224
-
}
-180
src/validation/primary/record.gleam
-180
src/validation/primary/record.gleam
···
1
-
// Record type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/json.{type Json}
5
-
import gleam/option.{None, Some}
6
-
import gleam/result
7
-
import gleam/string
8
-
import honk/internal/constraints
9
-
import honk/internal/json_helpers
10
-
import validation/context.{type ValidationContext}
11
-
import validation/field
12
-
13
-
const allowed_fields = ["type", "key", "record", "description"]
14
-
15
-
const allowed_record_fields = [
16
-
"type", "properties", "required", "nullable", "description",
17
-
]
18
-
19
-
/// Validates record schema definition
20
-
pub fn validate_schema(
21
-
schema: Json,
22
-
ctx: ValidationContext,
23
-
) -> Result(Nil, ValidationError) {
24
-
let def_name = context.path(ctx)
25
-
26
-
// Validate allowed fields at record level
27
-
let keys = json_helpers.get_keys(schema)
28
-
use _ <- result.try(constraints.validate_allowed_fields(
29
-
def_name,
30
-
keys,
31
-
allowed_fields,
32
-
"record",
33
-
))
34
-
35
-
// Validate required 'key' field
36
-
let key_value = case json_helpers.get_string(schema, "key") {
37
-
Some(key) -> Ok(key)
38
-
None ->
39
-
Error(errors.invalid_schema(
40
-
def_name <> ": record missing required 'key' field",
41
-
))
42
-
}
43
-
44
-
use key <- result.try(key_value)
45
-
use _ <- result.try(validate_key(def_name, key))
46
-
47
-
// Validate required 'record' field
48
-
let record_def = case json_helpers.get_field(schema, "record") {
49
-
Some(record) -> Ok(record)
50
-
None ->
51
-
Error(errors.invalid_schema(
52
-
def_name <> ": record missing required 'record' field",
53
-
))
54
-
}
55
-
56
-
use record <- result.try(record_def)
57
-
58
-
// Validate record object structure
59
-
use _ <- result.try(validate_record_object(def_name, record))
60
-
61
-
// Recursively validate properties - delegate to object validator
62
-
// The record field is an object, so we can use field.validate_object_schema
63
-
let record_ctx = context.with_path(ctx, ".record")
64
-
field.validate_object_schema(record, record_ctx)
65
-
}
66
-
67
-
/// Validates record data against schema
68
-
pub fn validate_data(
69
-
data: Json,
70
-
schema: Json,
71
-
ctx: ValidationContext,
72
-
) -> Result(Nil, ValidationError) {
73
-
let def_name = context.path(ctx)
74
-
75
-
// Data must be an object
76
-
case json_helpers.is_object(data) {
77
-
False -> {
78
-
Error(errors.data_validation(def_name <> ": expected object for record"))
79
-
}
80
-
True -> {
81
-
// Get the record definition
82
-
case json_helpers.get_field(schema, "record") {
83
-
Some(record_def) -> {
84
-
// Delegate to object validator for full validation
85
-
// The record's data validation is the same as object validation
86
-
field.validate_object_data(data, record_def, ctx)
87
-
}
88
-
None ->
89
-
Error(errors.data_validation(
90
-
def_name <> ": record schema missing 'record' field",
91
-
))
92
-
}
93
-
}
94
-
}
95
-
}
96
-
97
-
/// Validates the `key` field of a record definition
98
-
///
99
-
/// Valid key types:
100
-
/// - `tid`: Record key is a Timestamp Identifier (auto-generated)
101
-
/// - `any`: Record key can be any valid record key format
102
-
/// - `nsid`: Record key must be a valid NSID
103
-
/// - `literal:*`: Record key must match the literal value after the colon
104
-
fn validate_key(def_name: String, key: String) -> Result(Nil, ValidationError) {
105
-
case key {
106
-
"tid" -> Ok(Nil)
107
-
"any" -> Ok(Nil)
108
-
"nsid" -> Ok(Nil)
109
-
_ ->
110
-
case string.starts_with(key, "literal:") {
111
-
True -> Ok(Nil)
112
-
False ->
113
-
Error(errors.invalid_schema(
114
-
def_name
115
-
<> ": record has invalid key type '"
116
-
<> key
117
-
<> "'. Must be 'tid', 'any', 'nsid', or 'literal:*'",
118
-
))
119
-
}
120
-
}
121
-
}
122
-
123
-
/// Validates the structure of a record object definition
124
-
fn validate_record_object(
125
-
def_name: String,
126
-
record_def: Json,
127
-
) -> Result(Nil, ValidationError) {
128
-
// Must be type "object"
129
-
case json_helpers.get_string(record_def, "type") {
130
-
Some("object") -> {
131
-
// Validate allowed fields in record object
132
-
let keys = json_helpers.get_keys(record_def)
133
-
use _ <- result.try(constraints.validate_allowed_fields(
134
-
def_name,
135
-
keys,
136
-
allowed_record_fields,
137
-
"record object",
138
-
))
139
-
140
-
// Validate properties structure
141
-
use _ <- result.try(
142
-
case json_helpers.get_field(record_def, "properties") {
143
-
Some(properties) ->
144
-
case json_helpers.is_object(properties) {
145
-
True -> Ok(Nil)
146
-
False ->
147
-
Error(errors.invalid_schema(
148
-
def_name <> ": record properties must be an object",
149
-
))
150
-
}
151
-
None -> Ok(Nil)
152
-
},
153
-
)
154
-
155
-
// Validate nullable is an array if present
156
-
case json_helpers.get_array(record_def, "nullable") {
157
-
Some(_) -> Ok(Nil)
158
-
None -> {
159
-
// Check if nullable exists but is not an array
160
-
case json_helpers.get_field(record_def, "nullable") {
161
-
Some(_) ->
162
-
Error(errors.invalid_schema(
163
-
def_name <> ": record nullable field must be an array",
164
-
))
165
-
None -> Ok(Nil)
166
-
}
167
-
}
168
-
}
169
-
}
170
-
Some(other_type) ->
171
-
Error(errors.invalid_schema(
172
-
def_name
173
-
<> ": record field must be type 'object', got '"
174
-
<> other_type
175
-
<> "'",
176
-
))
177
-
None ->
178
-
Error(errors.invalid_schema(def_name <> ": record field missing type"))
179
-
}
180
-
}
-269
src/validation/primary/subscription.gleam
-269
src/validation/primary/subscription.gleam
···
1
-
// Subscription type validator
2
-
// Subscriptions are XRPC Subscription (WebSocket) endpoints for real-time data
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/dynamic/decode
6
-
import gleam/json.{type Json}
7
-
import gleam/list
8
-
import gleam/option.{None, Some}
9
-
import gleam/result
10
-
import honk/internal/constraints
11
-
import honk/internal/json_helpers
12
-
import validation/context.{type ValidationContext}
13
-
import validation/field as validation_field
14
-
import validation/field/union as validation_field_union
15
-
import validation/meta/unknown as validation_meta_unknown
16
-
import validation/primary/params
17
-
import validation/primitive/boolean as validation_primitive_boolean
18
-
import validation/primitive/integer as validation_primitive_integer
19
-
import validation/primitive/string as validation_primitive_string
20
-
21
-
const allowed_fields = [
22
-
"type",
23
-
"parameters",
24
-
"message",
25
-
"errors",
26
-
"description",
27
-
]
28
-
29
-
/// Validates subscription schema definition
30
-
pub fn validate_schema(
31
-
schema: Json,
32
-
ctx: ValidationContext,
33
-
) -> Result(Nil, ValidationError) {
34
-
let def_name = context.path(ctx)
35
-
36
-
// Validate allowed fields
37
-
let keys = json_helpers.get_keys(schema)
38
-
use _ <- result.try(constraints.validate_allowed_fields(
39
-
def_name,
40
-
keys,
41
-
allowed_fields,
42
-
"subscription",
43
-
))
44
-
45
-
// Validate parameters field if present
46
-
use _ <- result.try(case json_helpers.get_field(schema, "parameters") {
47
-
Some(parameters) -> validate_parameters_schema(parameters, ctx)
48
-
None -> Ok(Nil)
49
-
})
50
-
51
-
// Validate message field if present
52
-
use _ <- result.try(case json_helpers.get_field(schema, "message") {
53
-
Some(message) -> validate_message_schema(def_name, message)
54
-
None -> Ok(Nil)
55
-
})
56
-
57
-
// Validate errors field if present
58
-
case json_helpers.get_array(schema, "errors") {
59
-
Some(_) -> Ok(Nil)
60
-
None -> Ok(Nil)
61
-
}
62
-
}
63
-
64
-
/// Validates subscription parameters data against schema
65
-
/// Data should be the connection parameters as a JSON object
66
-
pub fn validate_data(
67
-
data: Json,
68
-
schema: Json,
69
-
ctx: ValidationContext,
70
-
) -> Result(Nil, ValidationError) {
71
-
let def_name = context.path(ctx)
72
-
73
-
// Subscription parameter data must be an object
74
-
use _ <- result.try(case json_helpers.is_object(data) {
75
-
True -> Ok(Nil)
76
-
False ->
77
-
Error(errors.data_validation(
78
-
def_name <> ": subscription parameters must be an object",
79
-
))
80
-
})
81
-
82
-
// If schema has parameters, validate data against them
83
-
case json_helpers.get_field(schema, "parameters") {
84
-
Some(parameters) -> {
85
-
let params_ctx = context.with_path(ctx, "parameters")
86
-
validate_parameters_data(data, parameters, params_ctx)
87
-
}
88
-
None -> Ok(Nil)
89
-
}
90
-
}
91
-
92
-
/// Validates subscription message data against schema
93
-
pub fn validate_message_data(
94
-
data: Json,
95
-
schema: Json,
96
-
ctx: ValidationContext,
97
-
) -> Result(Nil, ValidationError) {
98
-
// Get the message schema
99
-
case json_helpers.get_field(schema, "message") {
100
-
Some(message) -> {
101
-
case json_helpers.get_field(message, "schema") {
102
-
Some(msg_schema) -> {
103
-
// Message schema must be a union - validate data against it
104
-
let msg_ctx = context.with_path(ctx, "message.schema")
105
-
validation_field_union.validate_data(data, msg_schema, msg_ctx)
106
-
}
107
-
None -> Ok(Nil)
108
-
}
109
-
}
110
-
None -> Ok(Nil)
111
-
}
112
-
}
113
-
114
-
/// Validates parameter data against params schema
115
-
/// (Reused from query validator pattern)
116
-
fn validate_parameters_data(
117
-
data: Json,
118
-
params_schema: Json,
119
-
ctx: ValidationContext,
120
-
) -> Result(Nil, ValidationError) {
121
-
let def_name = context.path(ctx)
122
-
123
-
// Get data as dict
124
-
use data_dict <- result.try(json_helpers.json_to_dict(data))
125
-
126
-
// Get properties and required from params schema
127
-
let properties_dict = case
128
-
json_helpers.get_field(params_schema, "properties")
129
-
{
130
-
Some(props) -> json_helpers.json_to_dict(props)
131
-
None -> Ok(json_helpers.empty_dict())
132
-
}
133
-
134
-
let required_array = json_helpers.get_array(params_schema, "required")
135
-
136
-
use props_dict <- result.try(properties_dict)
137
-
138
-
// Check all required parameters are present
139
-
use _ <- result.try(case required_array {
140
-
Some(required) -> {
141
-
list.try_fold(required, Nil, fn(_, item) {
142
-
case decode.run(item, decode.string) {
143
-
Ok(param_name) -> {
144
-
case json_helpers.dict_has_key(data_dict, param_name) {
145
-
True -> Ok(Nil)
146
-
False ->
147
-
Error(errors.data_validation(
148
-
def_name
149
-
<> ": missing required parameter '"
150
-
<> param_name
151
-
<> "'",
152
-
))
153
-
}
154
-
}
155
-
Error(_) -> Ok(Nil)
156
-
}
157
-
})
158
-
}
159
-
None -> Ok(Nil)
160
-
})
161
-
162
-
// Validate each parameter in data
163
-
json_helpers.dict_fold(data_dict, Ok(Nil), fn(acc, param_name, param_value) {
164
-
case acc {
165
-
Error(e) -> Error(e)
166
-
Ok(_) -> {
167
-
// Get the schema for this parameter
168
-
case json_helpers.dict_get(props_dict, param_name) {
169
-
Some(param_schema_dyn) -> {
170
-
// Convert dynamic to JSON
171
-
case json_helpers.dynamic_to_json(param_schema_dyn) {
172
-
Ok(param_schema) -> {
173
-
// Convert param value to JSON
174
-
case json_helpers.dynamic_to_json(param_value) {
175
-
Ok(param_json) -> {
176
-
// Validate the parameter value against its schema
177
-
let param_ctx = context.with_path(ctx, param_name)
178
-
validate_parameter_value(
179
-
param_json,
180
-
param_schema,
181
-
param_ctx,
182
-
)
183
-
}
184
-
Error(e) -> Error(e)
185
-
}
186
-
}
187
-
Error(e) -> Error(e)
188
-
}
189
-
}
190
-
None -> {
191
-
// Parameter not in schema - allow unknown parameters
192
-
Ok(Nil)
193
-
}
194
-
}
195
-
}
196
-
}
197
-
})
198
-
}
199
-
200
-
/// Validates a single parameter value against its schema
201
-
fn validate_parameter_value(
202
-
value: Json,
203
-
schema: Json,
204
-
ctx: ValidationContext,
205
-
) -> Result(Nil, ValidationError) {
206
-
// Dispatch based on schema type
207
-
case json_helpers.get_string(schema, "type") {
208
-
Some("boolean") ->
209
-
validation_primitive_boolean.validate_data(value, schema, ctx)
210
-
Some("integer") ->
211
-
validation_primitive_integer.validate_data(value, schema, ctx)
212
-
Some("string") ->
213
-
validation_primitive_string.validate_data(value, schema, ctx)
214
-
Some("unknown") -> validation_meta_unknown.validate_data(value, schema, ctx)
215
-
Some("array") -> validation_field.validate_array_data(value, schema, ctx)
216
-
Some(other_type) ->
217
-
Error(errors.data_validation(
218
-
context.path(ctx)
219
-
<> ": unsupported parameter type '"
220
-
<> other_type
221
-
<> "'",
222
-
))
223
-
None ->
224
-
Error(errors.data_validation(
225
-
context.path(ctx) <> ": parameter schema missing type field",
226
-
))
227
-
}
228
-
}
229
-
230
-
/// Validates parameters schema definition
231
-
fn validate_parameters_schema(
232
-
parameters: Json,
233
-
ctx: ValidationContext,
234
-
) -> Result(Nil, ValidationError) {
235
-
// Validate the full params schema
236
-
let params_ctx = context.with_path(ctx, "parameters")
237
-
params.validate_schema(parameters, params_ctx)
238
-
}
239
-
240
-
/// Validates message schema definition
241
-
fn validate_message_schema(
242
-
def_name: String,
243
-
message: Json,
244
-
) -> Result(Nil, ValidationError) {
245
-
// Message must have schema field
246
-
case json_helpers.get_field(message, "schema") {
247
-
Some(schema_field) -> {
248
-
// Schema must be a union type
249
-
case json_helpers.get_string(schema_field, "type") {
250
-
Some("union") -> Ok(Nil)
251
-
Some(other_type) ->
252
-
Error(errors.invalid_schema(
253
-
def_name
254
-
<> ": subscription message schema must be type 'union', got '"
255
-
<> other_type
256
-
<> "'",
257
-
))
258
-
None ->
259
-
Error(errors.invalid_schema(
260
-
def_name <> ": subscription message schema missing type field",
261
-
))
262
-
}
263
-
}
264
-
None ->
265
-
Error(errors.invalid_schema(
266
-
def_name <> ": subscription message missing schema field",
267
-
))
268
-
}
269
-
}
-270
src/validation/primitive/blob.gleam
-270
src/validation/primitive/blob.gleam
···
1
-
// Blob type validator
2
-
// Blobs are binary objects with MIME types and size constraints
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/dynamic.{type Dynamic}
6
-
import gleam/dynamic/decode
7
-
import gleam/int
8
-
import gleam/json.{type Json}
9
-
import gleam/list
10
-
import gleam/option.{None, Some}
11
-
import gleam/result
12
-
import gleam/string
13
-
import honk/internal/constraints
14
-
import honk/internal/json_helpers
15
-
import validation/context.{type ValidationContext}
16
-
17
-
const allowed_fields = ["type", "accept", "maxSize", "description"]
18
-
19
-
/// Validates blob schema definition
20
-
pub fn validate_schema(
21
-
schema: Json,
22
-
ctx: ValidationContext,
23
-
) -> Result(Nil, ValidationError) {
24
-
let def_name = context.path(ctx)
25
-
26
-
// Validate allowed fields
27
-
let keys = json_helpers.get_keys(schema)
28
-
use _ <- result.try(constraints.validate_allowed_fields(
29
-
def_name,
30
-
keys,
31
-
allowed_fields,
32
-
"blob",
33
-
))
34
-
35
-
// Validate accept field if present
36
-
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
37
-
Some(accept_array) -> validate_accept_field(def_name, accept_array)
38
-
None -> Ok(Nil)
39
-
})
40
-
41
-
// Validate maxSize is positive integer if present
42
-
case json_helpers.get_int(schema, "maxSize") {
43
-
Some(max_size) ->
44
-
case max_size > 0 {
45
-
True -> Ok(Nil)
46
-
False ->
47
-
Error(errors.invalid_schema(
48
-
def_name <> ": blob maxSize must be greater than 0",
49
-
))
50
-
}
51
-
None -> Ok(Nil)
52
-
}
53
-
}
54
-
55
-
/// Validates blob data against schema
56
-
pub fn validate_data(
57
-
data: Json,
58
-
schema: Json,
59
-
ctx: ValidationContext,
60
-
) -> Result(Nil, ValidationError) {
61
-
let def_name = context.path(ctx)
62
-
63
-
// Data must be an object
64
-
case json_helpers.is_object(data) {
65
-
False -> {
66
-
Error(errors.data_validation(def_name <> ": expected blob object"))
67
-
}
68
-
True -> {
69
-
// Validate required mimeType field
70
-
use mime_type <- result.try(
71
-
case json_helpers.get_string(data, "mimeType") {
72
-
Some(mt) -> Ok(mt)
73
-
None ->
74
-
Error(errors.data_validation(
75
-
def_name <> ": blob missing required 'mimeType' field",
76
-
))
77
-
},
78
-
)
79
-
80
-
// Validate required size field
81
-
use size <- result.try(case json_helpers.get_int(data, "size") {
82
-
Some(s) -> Ok(s)
83
-
None ->
84
-
Error(errors.data_validation(
85
-
def_name <> ": blob missing or invalid 'size' field",
86
-
))
87
-
})
88
-
89
-
// Validate against accept constraint if present
90
-
use _ <- result.try(case json_helpers.get_array(schema, "accept") {
91
-
Some(accept_array) -> {
92
-
validate_mime_type_against_accept(def_name, mime_type, accept_array)
93
-
}
94
-
None -> Ok(Nil)
95
-
})
96
-
97
-
// Validate against maxSize constraint if present
98
-
case json_helpers.get_int(schema, "maxSize") {
99
-
Some(max_size) ->
100
-
case size <= max_size {
101
-
True -> Ok(Nil)
102
-
False ->
103
-
Error(errors.data_validation(
104
-
def_name
105
-
<> ": blob size "
106
-
<> int.to_string(size)
107
-
<> " exceeds maxSize "
108
-
<> int.to_string(max_size),
109
-
))
110
-
}
111
-
None -> Ok(Nil)
112
-
}
113
-
}
114
-
}
115
-
}
116
-
117
-
/// Validates accept field array
118
-
fn validate_accept_field(
119
-
def_name: String,
120
-
accept_array: List(Dynamic),
121
-
) -> Result(Nil, ValidationError) {
122
-
list.index_fold(accept_array, Ok(Nil), fn(acc, item, i) {
123
-
use _ <- result.try(acc)
124
-
case decode.run(item, decode.string) {
125
-
Ok(mime_type) -> validate_mime_type_pattern(def_name, mime_type, i)
126
-
Error(_) ->
127
-
Error(errors.invalid_schema(
128
-
def_name
129
-
<> ": blob accept["
130
-
<> int.to_string(i)
131
-
<> "] must be a string",
132
-
))
133
-
}
134
-
})
135
-
}
136
-
137
-
/// Validates MIME type pattern syntax
138
-
fn validate_mime_type_pattern(
139
-
def_name: String,
140
-
mime_type: String,
141
-
_index: Int,
142
-
) -> Result(Nil, ValidationError) {
143
-
case string.is_empty(mime_type) {
144
-
True ->
145
-
Error(errors.invalid_schema(
146
-
def_name <> ": blob MIME type cannot be empty",
147
-
))
148
-
False -> {
149
-
// Allow */*
150
-
case mime_type {
151
-
"*/*" -> Ok(Nil)
152
-
_ -> {
153
-
// Must contain exactly one /
154
-
case string.contains(mime_type, "/") {
155
-
False ->
156
-
Error(errors.invalid_schema(
157
-
def_name
158
-
<> ": blob MIME type '"
159
-
<> mime_type
160
-
<> "' must contain a '/' character",
161
-
))
162
-
True -> {
163
-
let parts = string.split(mime_type, "/")
164
-
case parts {
165
-
[type_part, subtype_part] -> {
166
-
// Validate * usage
167
-
use _ <- result.try(validate_wildcard(
168
-
def_name,
169
-
type_part,
170
-
"type",
171
-
mime_type,
172
-
))
173
-
validate_wildcard(
174
-
def_name,
175
-
subtype_part,
176
-
"subtype",
177
-
mime_type,
178
-
)
179
-
}
180
-
_ ->
181
-
Error(errors.invalid_schema(
182
-
def_name
183
-
<> ": blob MIME type '"
184
-
<> mime_type
185
-
<> "' must have exactly one '/' character",
186
-
))
187
-
}
188
-
}
189
-
}
190
-
}
191
-
}
192
-
}
193
-
}
194
-
}
195
-
196
-
/// Validates wildcard usage in MIME type parts
197
-
fn validate_wildcard(
198
-
def_name: String,
199
-
part: String,
200
-
part_name: String,
201
-
full_mime_type: String,
202
-
) -> Result(Nil, ValidationError) {
203
-
case string.contains(part, "*") {
204
-
True ->
205
-
case part {
206
-
"*" -> Ok(Nil)
207
-
_ ->
208
-
Error(errors.invalid_schema(
209
-
def_name
210
-
<> ": blob MIME type '"
211
-
<> full_mime_type
212
-
<> "' can only use '*' as a complete wildcard for "
213
-
<> part_name,
214
-
))
215
-
}
216
-
False -> Ok(Nil)
217
-
}
218
-
}
219
-
220
-
/// Validates MIME type against accept patterns
221
-
fn validate_mime_type_against_accept(
222
-
def_name: String,
223
-
mime_type: String,
224
-
accept_array: List(Dynamic),
225
-
) -> Result(Nil, ValidationError) {
226
-
let accept_patterns =
227
-
list.filter_map(accept_array, fn(item) { decode.run(item, decode.string) })
228
-
229
-
// Check if mime_type matches any accept pattern
230
-
case
231
-
list.any(accept_patterns, fn(pattern) {
232
-
mime_type_matches_pattern(mime_type, pattern)
233
-
})
234
-
{
235
-
True -> Ok(Nil)
236
-
False ->
237
-
Error(errors.data_validation(
238
-
def_name
239
-
<> ": blob mimeType '"
240
-
<> mime_type
241
-
<> "' not accepted. Allowed: "
242
-
<> string.join(accept_patterns, ", "),
243
-
))
244
-
}
245
-
}
246
-
247
-
/// Checks if a MIME type matches a pattern
248
-
fn mime_type_matches_pattern(mime_type: String, pattern: String) -> Bool {
249
-
case pattern {
250
-
"*/*" -> True
251
-
_ -> {
252
-
let mime_parts = string.split(mime_type, "/")
253
-
let pattern_parts = string.split(pattern, "/")
254
-
case mime_parts, pattern_parts {
255
-
[mime_type_part, mime_subtype], [pattern_type, pattern_subtype] -> {
256
-
let type_matches = case pattern_type {
257
-
"*" -> True
258
-
_ -> mime_type_part == pattern_type
259
-
}
260
-
let subtype_matches = case pattern_subtype {
261
-
"*" -> True
262
-
_ -> mime_subtype == pattern_subtype
263
-
}
264
-
type_matches && subtype_matches
265
-
}
266
-
_, _ -> False
267
-
}
268
-
}
269
-
}
270
-
}
-86
src/validation/primitive/boolean.gleam
-86
src/validation/primitive/boolean.gleam
···
1
-
// Boolean type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/json.{type Json}
5
-
import gleam/option.{None, Some}
6
-
import gleam/result
7
-
import honk/internal/constraints
8
-
import honk/internal/json_helpers
9
-
import validation/context.{type ValidationContext}
10
-
11
-
const allowed_fields = ["type", "const", "default", "description"]
12
-
13
-
/// Validates boolean schema definition
14
-
pub fn validate_schema(
15
-
schema: Json,
16
-
ctx: ValidationContext,
17
-
) -> Result(Nil, ValidationError) {
18
-
let def_name = context.path(ctx)
19
-
20
-
// Validate allowed fields
21
-
let keys = json_helpers.get_keys(schema)
22
-
use _ <- result.try(constraints.validate_allowed_fields(
23
-
def_name,
24
-
keys,
25
-
allowed_fields,
26
-
"boolean",
27
-
))
28
-
29
-
// Validate const/default exclusivity
30
-
let has_const = json_helpers.get_bool(schema, "const") != None
31
-
let has_default = json_helpers.get_bool(schema, "default") != None
32
-
33
-
constraints.validate_const_default_exclusivity(
34
-
def_name,
35
-
has_const,
36
-
has_default,
37
-
"boolean",
38
-
)
39
-
}
40
-
41
-
/// Validates boolean data against schema
42
-
pub fn validate_data(
43
-
data: Json,
44
-
schema: Json,
45
-
ctx: ValidationContext,
46
-
) -> Result(Nil, ValidationError) {
47
-
let def_name = context.path(ctx)
48
-
49
-
// Check data is a boolean
50
-
case json_helpers.is_bool(data) {
51
-
False ->
52
-
Error(errors.data_validation(
53
-
def_name <> ": expected boolean, got other type",
54
-
))
55
-
True -> {
56
-
// Extract boolean value
57
-
let json_str = json.to_string(data)
58
-
let is_true = json_str == "true"
59
-
let is_false = json_str == "false"
60
-
61
-
case is_true || is_false {
62
-
False ->
63
-
Error(errors.data_validation(
64
-
def_name <> ": invalid boolean representation",
65
-
))
66
-
True -> {
67
-
let value = is_true
68
-
69
-
// Validate const constraint
70
-
case json_helpers.get_bool(schema, "const") {
71
-
Some(const_val) if const_val != value ->
72
-
Error(errors.data_validation(
73
-
def_name
74
-
<> ": must be constant value "
75
-
<> case const_val {
76
-
True -> "true"
77
-
False -> "false"
78
-
},
79
-
))
80
-
_ -> Ok(Nil)
81
-
}
82
-
}
83
-
}
84
-
}
85
-
}
86
-
}
-134
src/validation/primitive/bytes.gleam
-134
src/validation/primitive/bytes.gleam
···
1
-
// Bytes type validator
2
-
// Bytes are base64-encoded strings
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/bit_array
6
-
import gleam/json.{type Json}
7
-
import gleam/list
8
-
import gleam/option.{None, Some}
9
-
import gleam/result
10
-
import gleam/string
11
-
import honk/internal/constraints
12
-
import honk/internal/json_helpers
13
-
import validation/context.{type ValidationContext}
14
-
15
-
const allowed_fields = ["type", "minLength", "maxLength", "description"]
16
-
17
-
/// Validates bytes schema definition
18
-
pub fn validate_schema(
19
-
schema: Json,
20
-
ctx: ValidationContext,
21
-
) -> Result(Nil, ValidationError) {
22
-
let def_name = context.path(ctx)
23
-
24
-
// Validate allowed fields
25
-
let keys = json_helpers.get_keys(schema)
26
-
use _ <- result.try(constraints.validate_allowed_fields(
27
-
def_name,
28
-
keys,
29
-
allowed_fields,
30
-
"bytes",
31
-
))
32
-
33
-
// Validate length constraints
34
-
let min_length = json_helpers.get_int(schema, "minLength")
35
-
let max_length = json_helpers.get_int(schema, "maxLength")
36
-
37
-
// Check for negative values
38
-
use _ <- result.try(case min_length {
39
-
Some(min) if min < 0 ->
40
-
Error(errors.invalid_schema(
41
-
def_name <> ": bytes schema minLength below zero",
42
-
))
43
-
_ -> Ok(Nil)
44
-
})
45
-
46
-
use _ <- result.try(case max_length {
47
-
Some(max) if max < 0 ->
48
-
Error(errors.invalid_schema(
49
-
def_name <> ": bytes schema maxLength below zero",
50
-
))
51
-
_ -> Ok(Nil)
52
-
})
53
-
54
-
constraints.validate_length_constraint_consistency(
55
-
def_name,
56
-
min_length,
57
-
max_length,
58
-
"bytes",
59
-
)
60
-
}
61
-
62
-
/// Validates bytes data against schema
63
-
/// Expects data in ATProto format: {"$bytes": "base64-string"}
64
-
pub fn validate_data(
65
-
data: Json,
66
-
schema: Json,
67
-
ctx: ValidationContext,
68
-
) -> Result(Nil, ValidationError) {
69
-
let def_name = context.path(ctx)
70
-
71
-
// Check data is an object
72
-
case json_helpers.is_object(data) {
73
-
False -> Error(errors.data_validation(def_name <> ": expecting bytes"))
74
-
True -> {
75
-
// Get all keys from the object
76
-
let keys = json_helpers.get_keys(data)
77
-
78
-
// Must have exactly one field
79
-
use _ <- result.try(case list.length(keys) {
80
-
1 -> Ok(Nil)
81
-
_ ->
82
-
Error(errors.data_validation(
83
-
def_name <> ": $bytes objects must have a single field",
84
-
))
85
-
})
86
-
87
-
// That field must be "$bytes" with a string value
88
-
case json_helpers.get_string(data, "$bytes") {
89
-
None ->
90
-
Error(errors.data_validation(
91
-
def_name <> ": $bytes field missing or not a string",
92
-
))
93
-
Some(base64_str) -> {
94
-
// Decode the base64 string (using RawStdEncoding - no padding)
95
-
case bit_array.base64_decode(base64_str) {
96
-
Error(_) ->
97
-
Error(errors.data_validation(
98
-
def_name <> ": decoding $bytes value: invalid base64 encoding",
99
-
))
100
-
Ok(decoded_bytes) -> {
101
-
// Validate length of decoded bytes
102
-
let byte_length = bit_array.byte_size(decoded_bytes)
103
-
let min_length = json_helpers.get_int(schema, "minLength")
104
-
let max_length = json_helpers.get_int(schema, "maxLength")
105
-
106
-
// Check length constraints
107
-
use _ <- result.try(case min_length {
108
-
Some(min) if byte_length < min ->
109
-
Error(errors.data_validation(
110
-
def_name
111
-
<> ": bytes size out of bounds: "
112
-
<> string.inspect(byte_length),
113
-
))
114
-
_ -> Ok(Nil)
115
-
})
116
-
117
-
use _ <- result.try(case max_length {
118
-
Some(max) if byte_length > max ->
119
-
Error(errors.data_validation(
120
-
def_name
121
-
<> ": bytes size out of bounds: "
122
-
<> string.inspect(byte_length),
123
-
))
124
-
_ -> Ok(Nil)
125
-
})
126
-
127
-
Ok(Nil)
128
-
}
129
-
}
130
-
}
131
-
}
132
-
}
133
-
}
134
-
}
-63
src/validation/primitive/cid_link.gleam
-63
src/validation/primitive/cid_link.gleam
···
1
-
// CID Link type validator
2
-
// CID links are IPFS content identifiers
3
-
4
-
import errors.{type ValidationError}
5
-
import gleam/json.{type Json}
6
-
import gleam/option
7
-
import honk/internal/constraints
8
-
import honk/internal/json_helpers
9
-
import validation/context.{type ValidationContext}
10
-
import validation/formats
11
-
12
-
const allowed_fields = ["type", "description"]
13
-
14
-
/// Validates cid-link schema definition
15
-
pub fn validate_schema(
16
-
schema: Json,
17
-
ctx: ValidationContext,
18
-
) -> Result(Nil, ValidationError) {
19
-
let def_name = context.path(ctx)
20
-
21
-
// Validate allowed fields
22
-
let keys = json_helpers.get_keys(schema)
23
-
constraints.validate_allowed_fields(
24
-
def_name,
25
-
keys,
26
-
allowed_fields,
27
-
"cid-link",
28
-
)
29
-
}
30
-
31
-
/// Validates cid-link data against schema
32
-
pub fn validate_data(
33
-
data: Json,
34
-
_schema: Json,
35
-
ctx: ValidationContext,
36
-
) -> Result(Nil, ValidationError) {
37
-
let def_name = context.path(ctx)
38
-
39
-
// Check data is an object with $link field
40
-
case json_helpers.is_object(data) {
41
-
False ->
42
-
Error(errors.data_validation(def_name <> ": expected CID link object"))
43
-
True -> {
44
-
// Validate structure: {$link: CID string}
45
-
case json_helpers.get_string(data, "$link") {
46
-
option.Some(cid) -> {
47
-
// Validate CID format
48
-
case formats.is_valid_cid(cid) {
49
-
True -> Ok(Nil)
50
-
False ->
51
-
Error(errors.data_validation(
52
-
def_name <> ": invalid CID format in $link",
53
-
))
54
-
}
55
-
}
56
-
option.None ->
57
-
Error(errors.data_validation(
58
-
def_name <> ": CID link must have $link field",
59
-
))
60
-
}
61
-
}
62
-
}
63
-
}
-153
src/validation/primitive/integer.gleam
-153
src/validation/primitive/integer.gleam
···
1
-
// Integer type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/dynamic/decode
5
-
import gleam/int
6
-
import gleam/json.{type Json}
7
-
import gleam/list
8
-
import gleam/option.{None, Some}
9
-
import gleam/result
10
-
import honk/internal/constraints
11
-
import honk/internal/json_helpers
12
-
import validation/context.{type ValidationContext}
13
-
14
-
const allowed_fields = [
15
-
"type", "minimum", "maximum", "enum", "const", "default", "description",
16
-
]
17
-
18
-
/// Validates integer schema definition
19
-
pub fn validate_schema(
20
-
schema: Json,
21
-
ctx: ValidationContext,
22
-
) -> Result(Nil, ValidationError) {
23
-
let def_name = context.path(ctx)
24
-
25
-
// Validate allowed fields
26
-
let keys = json_helpers.get_keys(schema)
27
-
use _ <- result.try(constraints.validate_allowed_fields(
28
-
def_name,
29
-
keys,
30
-
allowed_fields,
31
-
"integer",
32
-
))
33
-
34
-
// Extract min/max constraints
35
-
let minimum = json_helpers.get_int(schema, "minimum")
36
-
let maximum = json_helpers.get_int(schema, "maximum")
37
-
38
-
// Validate constraint consistency
39
-
use _ <- result.try(constraints.validate_integer_constraint_consistency(
40
-
def_name,
41
-
minimum,
42
-
maximum,
43
-
))
44
-
45
-
// Validate enum is array of integers if present
46
-
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
47
-
Some(enum_array) -> {
48
-
list.try_fold(enum_array, Nil, fn(_, item) {
49
-
case decode.run(item, decode.int) {
50
-
Ok(_) -> Ok(Nil)
51
-
Error(_) ->
52
-
Error(errors.invalid_schema(
53
-
def_name <> ": enum values must be integers",
54
-
))
55
-
}
56
-
})
57
-
}
58
-
None -> Ok(Nil)
59
-
})
60
-
61
-
// Validate const/default exclusivity
62
-
let has_const = json_helpers.get_int(schema, "const") != None
63
-
let has_default = json_helpers.get_int(schema, "default") != None
64
-
65
-
constraints.validate_const_default_exclusivity(
66
-
def_name,
67
-
has_const,
68
-
has_default,
69
-
"integer",
70
-
)
71
-
}
72
-
73
-
/// Validates integer data against schema
74
-
pub fn validate_data(
75
-
data: Json,
76
-
schema: Json,
77
-
ctx: ValidationContext,
78
-
) -> Result(Nil, ValidationError) {
79
-
let def_name = context.path(ctx)
80
-
81
-
// Check data is an integer
82
-
case json_helpers.is_int(data) {
83
-
False ->
84
-
Error(errors.data_validation(
85
-
def_name <> ": expected integer, got other type",
86
-
))
87
-
True -> {
88
-
// Extract integer value
89
-
let json_str = json.to_string(data)
90
-
case int.parse(json_str) {
91
-
Error(_) ->
92
-
Error(errors.data_validation(
93
-
def_name <> ": failed to parse integer value",
94
-
))
95
-
Ok(value) -> {
96
-
// Validate const constraint first (most restrictive)
97
-
case json_helpers.get_int(schema, "const") {
98
-
Some(const_val) if const_val != value ->
99
-
Error(errors.data_validation(
100
-
def_name
101
-
<> ": must be constant value "
102
-
<> int.to_string(const_val)
103
-
<> ", found "
104
-
<> int.to_string(value),
105
-
))
106
-
Some(_) -> Ok(Nil)
107
-
None -> {
108
-
// Validate enum constraint
109
-
use _ <- result.try(case json_helpers.get_array(schema, "enum") {
110
-
Some(enum_array) -> {
111
-
let enum_ints =
112
-
list.filter_map(enum_array, fn(item) {
113
-
decode.run(item, decode.int)
114
-
})
115
-
116
-
validate_integer_enum(value, enum_ints, def_name)
117
-
}
118
-
None -> Ok(Nil)
119
-
})
120
-
121
-
// Validate range constraints
122
-
let minimum = json_helpers.get_int(schema, "minimum")
123
-
let maximum = json_helpers.get_int(schema, "maximum")
124
-
125
-
constraints.validate_integer_range(
126
-
def_name,
127
-
value,
128
-
minimum,
129
-
maximum,
130
-
)
131
-
}
132
-
}
133
-
}
134
-
}
135
-
}
136
-
}
137
-
}
138
-
139
-
/// Helper to validate integer enum
140
-
fn validate_integer_enum(
141
-
value: Int,
142
-
enum_values: List(Int),
143
-
def_name: String,
144
-
) -> Result(Nil, ValidationError) {
145
-
constraints.validate_enum_constraint(
146
-
def_name,
147
-
value,
148
-
enum_values,
149
-
"integer",
150
-
int.to_string,
151
-
fn(a, b) { a == b },
152
-
)
153
-
}
-39
src/validation/primitive/null.gleam
-39
src/validation/primitive/null.gleam
···
1
-
// Null type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/json.{type Json}
5
-
import honk/internal/constraints
6
-
import honk/internal/json_helpers
7
-
import validation/context.{type ValidationContext}
8
-
9
-
const allowed_fields = ["type", "description"]
10
-
11
-
/// Validates null schema definition
12
-
pub fn validate_schema(
13
-
schema: Json,
14
-
ctx: ValidationContext,
15
-
) -> Result(Nil, ValidationError) {
16
-
let def_name = context.path(ctx)
17
-
18
-
// Validate allowed fields
19
-
let keys = json_helpers.get_keys(schema)
20
-
constraints.validate_allowed_fields(def_name, keys, allowed_fields, "null")
21
-
}
22
-
23
-
/// Validates null data against schema
24
-
pub fn validate_data(
25
-
data: Json,
26
-
_schema: Json,
27
-
ctx: ValidationContext,
28
-
) -> Result(Nil, ValidationError) {
29
-
let def_name = context.path(ctx)
30
-
31
-
// Check data is null
32
-
case json_helpers.is_null(data) {
33
-
True -> Ok(Nil)
34
-
False ->
35
-
Error(errors.data_validation(
36
-
def_name <> ": expected null, got other type",
37
-
))
38
-
}
39
-
}
-297
src/validation/primitive/string.gleam
-297
src/validation/primitive/string.gleam
···
1
-
// String type validator
2
-
3
-
import errors.{type ValidationError}
4
-
import gleam/dynamic/decode
5
-
import gleam/json.{type Json}
6
-
import gleam/list
7
-
import gleam/option.{type Option, None, Some}
8
-
import gleam/result
9
-
import gleam/string
10
-
import honk/internal/constraints
11
-
import honk/internal/json_helpers
12
-
import types
13
-
import validation/context.{type ValidationContext}
14
-
import validation/formats
15
-
16
-
const allowed_fields = [
17
-
"type", "format", "minLength", "maxLength", "minGraphemes", "maxGraphemes",
18
-
"enum", "knownValues", "const", "default", "description",
19
-
]
20
-
21
-
/// Validates string schema definition
22
-
pub fn validate_schema(
23
-
schema: Json,
24
-
ctx: ValidationContext,
25
-
) -> Result(Nil, ValidationError) {
26
-
let def_name = context.path(ctx)
27
-
28
-
// Validate allowed fields
29
-
let keys = json_helpers.get_keys(schema)
30
-
use _ <- result.try(constraints.validate_allowed_fields(
31
-
def_name,
32
-
keys,
33
-
allowed_fields,
34
-
"string",
35
-
))
36
-
37
-
// Validate format if present
38
-
case json_helpers.get_string(schema, "format") {
39
-
Some(format_str) ->
40
-
case types.string_to_format(format_str) {
41
-
Ok(_format) -> Ok(Nil)
42
-
Error(_) ->
43
-
Error(errors.invalid_schema(
44
-
def_name
45
-
<> ": unknown format '"
46
-
<> format_str
47
-
<> "'. Valid formats: datetime, uri, at-uri, did, handle, at-identifier, nsid, cid, language, tid, record-key",
48
-
))
49
-
}
50
-
None -> Ok(Nil)
51
-
}
52
-
|> result.try(fn(_) {
53
-
// Extract length constraints
54
-
let min_length = json_helpers.get_int(schema, "minLength")
55
-
let max_length = json_helpers.get_int(schema, "maxLength")
56
-
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
57
-
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
58
-
59
-
// Check for negative values
60
-
use _ <- result.try(case min_length {
61
-
Some(n) if n < 0 ->
62
-
Error(errors.invalid_schema(
63
-
def_name <> ": string schema minLength below zero",
64
-
))
65
-
_ -> Ok(Nil)
66
-
})
67
-
68
-
use _ <- result.try(case max_length {
69
-
Some(n) if n < 0 ->
70
-
Error(errors.invalid_schema(
71
-
def_name <> ": string schema maxLength below zero",
72
-
))
73
-
_ -> Ok(Nil)
74
-
})
75
-
76
-
use _ <- result.try(case min_graphemes {
77
-
Some(n) if n < 0 ->
78
-
Error(errors.invalid_schema(
79
-
def_name <> ": string schema minGraphemes below zero",
80
-
))
81
-
_ -> Ok(Nil)
82
-
})
83
-
84
-
use _ <- result.try(case max_graphemes {
85
-
Some(n) if n < 0 ->
86
-
Error(errors.invalid_schema(
87
-
def_name <> ": string schema maxGraphemes below zero",
88
-
))
89
-
_ -> Ok(Nil)
90
-
})
91
-
92
-
// Validate byte length constraints
93
-
use _ <- result.try(constraints.validate_length_constraint_consistency(
94
-
def_name,
95
-
min_length,
96
-
max_length,
97
-
"string",
98
-
))
99
-
100
-
// Validate grapheme constraints
101
-
constraints.validate_length_constraint_consistency(
102
-
def_name,
103
-
min_graphemes,
104
-
max_graphemes,
105
-
"string (graphemes)",
106
-
)
107
-
})
108
-
|> result.try(fn(_) {
109
-
// Validate enum is array of strings if present
110
-
case json_helpers.get_array(schema, "enum") {
111
-
Some(enum_array) -> {
112
-
// Check each item is a string
113
-
list.try_fold(enum_array, Nil, fn(_, item) {
114
-
case decode.run(item, decode.string) {
115
-
Ok(_) -> Ok(Nil)
116
-
Error(_) ->
117
-
Error(errors.invalid_schema(
118
-
def_name <> ": enum values must be strings",
119
-
))
120
-
}
121
-
})
122
-
}
123
-
None -> Ok(Nil)
124
-
}
125
-
})
126
-
|> result.try(fn(_) {
127
-
// Validate knownValues is array of strings if present
128
-
case json_helpers.get_array(schema, "knownValues") {
129
-
Some(known_array) -> {
130
-
list.try_fold(known_array, Nil, fn(_, item) {
131
-
case decode.run(item, decode.string) {
132
-
Ok(_) -> Ok(Nil)
133
-
Error(_) ->
134
-
Error(errors.invalid_schema(
135
-
def_name <> ": knownValues must be strings",
136
-
))
137
-
}
138
-
})
139
-
}
140
-
None -> Ok(Nil)
141
-
}
142
-
})
143
-
|> result.try(fn(_) {
144
-
// Validate const/default exclusivity
145
-
let has_const = json_helpers.get_string(schema, "const") != option.None
146
-
let has_default = json_helpers.get_string(schema, "default") != option.None
147
-
148
-
constraints.validate_const_default_exclusivity(
149
-
def_name,
150
-
has_const,
151
-
has_default,
152
-
"string",
153
-
)
154
-
})
155
-
}
156
-
157
-
/// Validates string data against schema
158
-
pub fn validate_data(
159
-
data: Json,
160
-
schema: Json,
161
-
ctx: ValidationContext,
162
-
) -> Result(Nil, ValidationError) {
163
-
let def_name = context.path(ctx)
164
-
165
-
// Check data is a string
166
-
case json_helpers.is_string(data) {
167
-
False ->
168
-
Error(errors.data_validation(
169
-
def_name <> ": expected string, got other type",
170
-
))
171
-
True -> {
172
-
// Extract the string value
173
-
let json_str = json.to_string(data)
174
-
// Remove quotes from JSON string representation
175
-
let value = case
176
-
string.starts_with(json_str, "\"") && string.ends_with(json_str, "\"")
177
-
{
178
-
True -> string.slice(json_str, 1, string.length(json_str) - 2)
179
-
False -> json_str
180
-
}
181
-
182
-
// Validate length constraints
183
-
let min_length = json_helpers.get_int(schema, "minLength")
184
-
let max_length = json_helpers.get_int(schema, "maxLength")
185
-
use _ <- result.try(validate_string_length(
186
-
value,
187
-
min_length,
188
-
max_length,
189
-
def_name,
190
-
))
191
-
192
-
// Validate grapheme constraints
193
-
let min_graphemes = json_helpers.get_int(schema, "minGraphemes")
194
-
let max_graphemes = json_helpers.get_int(schema, "maxGraphemes")
195
-
use _ <- result.try(validate_grapheme_length(
196
-
value,
197
-
min_graphemes,
198
-
max_graphemes,
199
-
def_name,
200
-
))
201
-
202
-
// Validate format if specified
203
-
case json_helpers.get_string(schema, "format") {
204
-
Some(format_str) ->
205
-
case types.string_to_format(format_str) {
206
-
Ok(format) -> validate_string_format(value, format, def_name)
207
-
Error(_) -> Ok(Nil)
208
-
}
209
-
None -> Ok(Nil)
210
-
}
211
-
|> result.try(fn(_) {
212
-
// Validate enum if specified
213
-
case json_helpers.get_array(schema, "enum") {
214
-
Some(enum_array) -> {
215
-
// Convert dynamics to strings
216
-
let enum_strings =
217
-
list.filter_map(enum_array, fn(item) {
218
-
decode.run(item, decode.string)
219
-
})
220
-
221
-
validate_string_enum(value, enum_strings, def_name)
222
-
}
223
-
None -> Ok(Nil)
224
-
}
225
-
})
226
-
}
227
-
}
228
-
}
229
-
230
-
/// Helper to validate string length (UTF-8 bytes)
231
-
fn validate_string_length(
232
-
value: String,
233
-
min_length: Option(Int),
234
-
max_length: Option(Int),
235
-
def_name: String,
236
-
) -> Result(Nil, ValidationError) {
237
-
let byte_length = string.byte_size(value)
238
-
constraints.validate_length_constraints(
239
-
def_name,
240
-
byte_length,
241
-
min_length,
242
-
max_length,
243
-
"string",
244
-
)
245
-
}
246
-
247
-
/// Helper to validate grapheme length (visual characters)
248
-
fn validate_grapheme_length(
249
-
value: String,
250
-
min_graphemes: Option(Int),
251
-
max_graphemes: Option(Int),
252
-
def_name: String,
253
-
) -> Result(Nil, ValidationError) {
254
-
// Count grapheme clusters (visual characters) using Gleam's stdlib
255
-
// This correctly handles Unicode combining characters, emoji, etc.
256
-
let grapheme_count = value |> string.to_graphemes() |> list.length()
257
-
constraints.validate_length_constraints(
258
-
def_name,
259
-
grapheme_count,
260
-
min_graphemes,
261
-
max_graphemes,
262
-
"string (graphemes)",
263
-
)
264
-
}
265
-
266
-
/// Helper to validate string format
267
-
fn validate_string_format(
268
-
value: String,
269
-
format: types.StringFormat,
270
-
def_name: String,
271
-
) -> Result(Nil, ValidationError) {
272
-
case formats.validate_format(value, format) {
273
-
True -> Ok(Nil)
274
-
False -> {
275
-
let format_name = types.format_to_string(format)
276
-
Error(errors.data_validation(
277
-
def_name <> ": string does not match format '" <> format_name <> "'",
278
-
))
279
-
}
280
-
}
281
-
}
282
-
283
-
/// Helper to validate string enum
284
-
fn validate_string_enum(
285
-
value: String,
286
-
enum_values: List(String),
287
-
def_name: String,
288
-
) -> Result(Nil, ValidationError) {
289
-
constraints.validate_enum_constraint(
290
-
def_name,
291
-
value,
292
-
enum_values,
293
-
"string",
294
-
fn(s) { s },
295
-
fn(a, b) { a == b },
296
-
)
297
-
}
+2
-2
test/array_validator_test.gleam
+2
-2
test/array_validator_test.gleam
+338
-4
test/blob_validator_test.gleam
+338
-4
test/blob_validator_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/primitive/blob
4
+
import honk/validation/context
5
+
import honk/validation/primitive/blob
6
6
7
7
pub fn main() {
8
8
gleeunit.main()
···
90
90
91
91
let data =
92
92
json.object([
93
+
#("$type", json.string("blob")),
94
+
#(
95
+
"ref",
96
+
json.object([
97
+
#(
98
+
"$link",
99
+
json.string(
100
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
101
+
),
102
+
),
103
+
]),
104
+
),
93
105
#("mimeType", json.string("image/jpeg")),
94
106
#("size", json.int(50_000)),
95
107
])
···
109
121
110
122
let data =
111
123
json.object([
124
+
#("$type", json.string("blob")),
125
+
#(
126
+
"ref",
127
+
json.object([
128
+
#(
129
+
"$link",
130
+
json.string(
131
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
132
+
),
133
+
),
134
+
]),
135
+
),
112
136
#("mimeType", json.string("video/mp4")),
113
137
#("size", json.int(50_000)),
114
138
])
···
128
152
129
153
let data =
130
154
json.object([
155
+
#("$type", json.string("blob")),
156
+
#(
157
+
"ref",
158
+
json.object([
159
+
#(
160
+
"$link",
161
+
json.string(
162
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
163
+
),
164
+
),
165
+
]),
166
+
),
131
167
#("mimeType", json.string("image/jpeg")),
132
168
#("size", json.int(50_000)),
133
169
])
···
141
177
pub fn missing_mime_type_test() {
142
178
let schema = json.object([#("type", json.string("blob"))])
143
179
144
-
let data = json.object([#("size", json.int(50_000))])
180
+
let data =
181
+
json.object([
182
+
#("$type", json.string("blob")),
183
+
#(
184
+
"ref",
185
+
json.object([
186
+
#(
187
+
"$link",
188
+
json.string(
189
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
190
+
),
191
+
),
192
+
]),
193
+
),
194
+
#("size", json.int(50_000)),
195
+
])
145
196
146
197
let assert Ok(ctx) = context.builder() |> context.build
147
198
let result = blob.validate_data(data, schema, ctx)
···
152
203
pub fn missing_size_test() {
153
204
let schema = json.object([#("type", json.string("blob"))])
154
205
155
-
let data = json.object([#("mimeType", json.string("image/jpeg"))])
206
+
let data =
207
+
json.object([
208
+
#("$type", json.string("blob")),
209
+
#(
210
+
"ref",
211
+
json.object([
212
+
#(
213
+
"$link",
214
+
json.string(
215
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
216
+
),
217
+
),
218
+
]),
219
+
),
220
+
#("mimeType", json.string("image/jpeg")),
221
+
])
222
+
223
+
let assert Ok(ctx) = context.builder() |> context.build
224
+
let result = blob.validate_data(data, schema, ctx)
225
+
result |> should.be_error
226
+
}
227
+
228
+
// ========== FULL BLOB STRUCTURE TESTS ==========
229
+
230
+
// Test valid full blob structure
231
+
pub fn valid_full_blob_structure_test() {
232
+
let schema = json.object([#("type", json.string("blob"))])
233
+
234
+
let data =
235
+
json.object([
236
+
#("$type", json.string("blob")),
237
+
#(
238
+
"ref",
239
+
json.object([
240
+
#(
241
+
"$link",
242
+
json.string(
243
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
244
+
),
245
+
),
246
+
]),
247
+
),
248
+
#("mimeType", json.string("image/jpeg")),
249
+
#("size", json.int(50_000)),
250
+
])
251
+
252
+
let assert Ok(ctx) = context.builder() |> context.build
253
+
let result = blob.validate_data(data, schema, ctx)
254
+
result |> should.be_ok
255
+
}
256
+
257
+
// Test missing $type field
258
+
pub fn missing_type_field_test() {
259
+
let schema = json.object([#("type", json.string("blob"))])
260
+
261
+
let data =
262
+
json.object([
263
+
#(
264
+
"ref",
265
+
json.object([
266
+
#(
267
+
"$link",
268
+
json.string(
269
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
270
+
),
271
+
),
272
+
]),
273
+
),
274
+
#("mimeType", json.string("image/jpeg")),
275
+
#("size", json.int(50_000)),
276
+
])
277
+
278
+
let assert Ok(ctx) = context.builder() |> context.build
279
+
let result = blob.validate_data(data, schema, ctx)
280
+
result |> should.be_error
281
+
}
282
+
283
+
// Test wrong $type value
284
+
pub fn wrong_type_value_test() {
285
+
let schema = json.object([#("type", json.string("blob"))])
286
+
287
+
let data =
288
+
json.object([
289
+
#("$type", json.string("notblob")),
290
+
#(
291
+
"ref",
292
+
json.object([
293
+
#(
294
+
"$link",
295
+
json.string(
296
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
297
+
),
298
+
),
299
+
]),
300
+
),
301
+
#("mimeType", json.string("image/jpeg")),
302
+
#("size", json.int(50_000)),
303
+
])
304
+
305
+
let assert Ok(ctx) = context.builder() |> context.build
306
+
let result = blob.validate_data(data, schema, ctx)
307
+
result |> should.be_error
308
+
}
309
+
310
+
// Test missing ref field
311
+
pub fn missing_ref_field_test() {
312
+
let schema = json.object([#("type", json.string("blob"))])
313
+
314
+
let data =
315
+
json.object([
316
+
#("$type", json.string("blob")),
317
+
#("mimeType", json.string("image/jpeg")),
318
+
#("size", json.int(50_000)),
319
+
])
320
+
321
+
let assert Ok(ctx) = context.builder() |> context.build
322
+
let result = blob.validate_data(data, schema, ctx)
323
+
result |> should.be_error
324
+
}
325
+
326
+
// Test ref without $link
327
+
pub fn ref_missing_link_test() {
328
+
let schema = json.object([#("type", json.string("blob"))])
329
+
330
+
let data =
331
+
json.object([
332
+
#("$type", json.string("blob")),
333
+
#("ref", json.object([#("cid", json.string("bafkrei..."))])),
334
+
#("mimeType", json.string("image/jpeg")),
335
+
#("size", json.int(50_000)),
336
+
])
337
+
338
+
let assert Ok(ctx) = context.builder() |> context.build
339
+
let result = blob.validate_data(data, schema, ctx)
340
+
result |> should.be_error
341
+
}
342
+
343
+
// Test ref with invalid CID
344
+
pub fn ref_invalid_cid_test() {
345
+
let schema = json.object([#("type", json.string("blob"))])
346
+
347
+
let data =
348
+
json.object([
349
+
#("$type", json.string("blob")),
350
+
#("ref", json.object([#("$link", json.string("not-a-valid-cid"))])),
351
+
#("mimeType", json.string("image/jpeg")),
352
+
#("size", json.int(50_000)),
353
+
])
354
+
355
+
let assert Ok(ctx) = context.builder() |> context.build
356
+
let result = blob.validate_data(data, schema, ctx)
357
+
result |> should.be_error
358
+
}
359
+
360
+
// Test ref with dag-cbor CID (should fail - blobs need raw multicodec)
361
+
pub fn ref_dag_cbor_cid_test() {
362
+
let schema = json.object([#("type", json.string("blob"))])
363
+
364
+
let data =
365
+
json.object([
366
+
#("$type", json.string("blob")),
367
+
#(
368
+
"ref",
369
+
json.object([
370
+
#(
371
+
"$link",
372
+
json.string(
373
+
"bafyreidfayvfuwqa7qlnopdjiqrxzs6blmoeu4rujcjtnci5beludirz2a",
374
+
),
375
+
),
376
+
]),
377
+
),
378
+
#("mimeType", json.string("image/jpeg")),
379
+
#("size", json.int(50_000)),
380
+
])
381
+
382
+
let assert Ok(ctx) = context.builder() |> context.build
383
+
let result = blob.validate_data(data, schema, ctx)
384
+
result |> should.be_error
385
+
}
386
+
387
+
// Test empty mimeType rejected
388
+
pub fn empty_mime_type_test() {
389
+
let schema = json.object([#("type", json.string("blob"))])
390
+
391
+
let data =
392
+
json.object([
393
+
#("$type", json.string("blob")),
394
+
#(
395
+
"ref",
396
+
json.object([
397
+
#(
398
+
"$link",
399
+
json.string(
400
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
401
+
),
402
+
),
403
+
]),
404
+
),
405
+
#("mimeType", json.string("")),
406
+
#("size", json.int(50_000)),
407
+
])
408
+
409
+
let assert Ok(ctx) = context.builder() |> context.build
410
+
let result = blob.validate_data(data, schema, ctx)
411
+
result |> should.be_error
412
+
}
413
+
414
+
// Test size zero is allowed (per atproto implementation)
415
+
pub fn size_zero_allowed_test() {
416
+
let schema = json.object([#("type", json.string("blob"))])
417
+
418
+
let data =
419
+
json.object([
420
+
#("$type", json.string("blob")),
421
+
#(
422
+
"ref",
423
+
json.object([
424
+
#(
425
+
"$link",
426
+
json.string(
427
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
428
+
),
429
+
),
430
+
]),
431
+
),
432
+
#("mimeType", json.string("image/jpeg")),
433
+
#("size", json.int(0)),
434
+
])
435
+
436
+
let assert Ok(ctx) = context.builder() |> context.build
437
+
let result = blob.validate_data(data, schema, ctx)
438
+
result |> should.be_ok
439
+
}
440
+
441
+
// Test negative size rejected
442
+
pub fn negative_size_test() {
443
+
let schema = json.object([#("type", json.string("blob"))])
444
+
445
+
let data =
446
+
json.object([
447
+
#("$type", json.string("blob")),
448
+
#(
449
+
"ref",
450
+
json.object([
451
+
#(
452
+
"$link",
453
+
json.string(
454
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
455
+
),
456
+
),
457
+
]),
458
+
),
459
+
#("mimeType", json.string("image/jpeg")),
460
+
#("size", json.int(-100)),
461
+
])
462
+
463
+
let assert Ok(ctx) = context.builder() |> context.build
464
+
let result = blob.validate_data(data, schema, ctx)
465
+
result |> should.be_error
466
+
}
467
+
468
+
// Test extra fields are rejected (strict mode per atproto implementation)
469
+
pub fn extra_fields_rejected_test() {
470
+
let schema = json.object([#("type", json.string("blob"))])
471
+
472
+
let data =
473
+
json.object([
474
+
#("$type", json.string("blob")),
475
+
#(
476
+
"ref",
477
+
json.object([
478
+
#(
479
+
"$link",
480
+
json.string(
481
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
482
+
),
483
+
),
484
+
]),
485
+
),
486
+
#("mimeType", json.string("image/jpeg")),
487
+
#("size", json.int(50_000)),
488
+
#("extraField", json.string("not allowed")),
489
+
])
156
490
157
491
let assert Ok(ctx) = context.builder() |> context.build
158
492
let result = blob.validate_data(data, schema, ctx)
+2
-2
test/bytes_validator_test.gleam
+2
-2
test/bytes_validator_test.gleam
+328
-3
test/end_to_end_test.gleam
+328
-3
test/end_to_end_test.gleam
···
1
+
import gleam/dict
1
2
import gleam/json
3
+
import gleam/list
4
+
import gleam/string
2
5
import gleeunit
3
6
import gleeunit/should
4
7
import honk
8
+
import honk/errors
9
+
import honk/types.{DateTime, Uri}
5
10
6
11
pub fn main() {
7
12
gleeunit.main()
···
192
197
193
198
// Test string format validation helper
194
199
pub fn validate_string_format_test() {
195
-
honk.validate_string_format("2024-01-01T12:00:00Z", honk.DateTime)
200
+
honk.validate_string_format("2024-01-01T12:00:00Z", DateTime)
196
201
|> should.be_ok
197
202
198
-
honk.validate_string_format("not a datetime", honk.DateTime)
203
+
honk.validate_string_format("not a datetime", DateTime)
199
204
|> should.be_error
200
205
201
-
honk.validate_string_format("https://example.com", honk.Uri)
206
+
honk.validate_string_format("https://example.com", Uri)
207
+
|> should.be_ok
208
+
}
209
+
210
+
// Test lexicon with multiple valid definitions
211
+
pub fn validate_lexicon_multiple_defs_test() {
212
+
let lexicon =
213
+
json.object([
214
+
#("lexicon", json.int(1)),
215
+
#("id", json.string("com.example.multi")),
216
+
#(
217
+
"defs",
218
+
json.object([
219
+
#(
220
+
"main",
221
+
json.object([
222
+
#("type", json.string("record")),
223
+
#("key", json.string("tid")),
224
+
#(
225
+
"record",
226
+
json.object([
227
+
#("type", json.string("object")),
228
+
#("properties", json.object([])),
229
+
]),
230
+
),
231
+
]),
232
+
),
233
+
#(
234
+
"stringFormats",
235
+
json.object([
236
+
#("type", json.string("object")),
237
+
#("properties", json.object([])),
238
+
]),
239
+
),
240
+
#("additionalType", json.object([#("type", json.string("string"))])),
241
+
]),
242
+
),
243
+
])
244
+
245
+
honk.validate([lexicon])
246
+
|> should.be_ok
247
+
}
248
+
249
+
// Test lexicon with only non-main definitions
250
+
pub fn validate_lexicon_no_main_def_test() {
251
+
let lexicon =
252
+
json.object([
253
+
#("lexicon", json.int(1)),
254
+
#("id", json.string("com.example.nomain")),
255
+
#(
256
+
"defs",
257
+
json.object([
258
+
#("customType", json.object([#("type", json.string("string"))])),
259
+
#("anotherType", json.object([#("type", json.string("integer"))])),
260
+
]),
261
+
),
262
+
])
263
+
264
+
honk.validate([lexicon])
265
+
|> should.be_ok
266
+
}
267
+
268
+
// Test lexicon with invalid non-main definition
269
+
pub fn validate_lexicon_invalid_non_main_def_test() {
270
+
let lexicon =
271
+
json.object([
272
+
#("lexicon", json.int(1)),
273
+
#("id", json.string("com.example.invalid")),
274
+
#(
275
+
"defs",
276
+
json.object([
277
+
#(
278
+
"main",
279
+
json.object([
280
+
#("type", json.string("record")),
281
+
#("key", json.string("tid")),
282
+
#(
283
+
"record",
284
+
json.object([
285
+
#("type", json.string("object")),
286
+
#("properties", json.object([])),
287
+
]),
288
+
),
289
+
]),
290
+
),
291
+
#(
292
+
"badDef",
293
+
json.object([
294
+
#("type", json.string("string")),
295
+
#("minLength", json.int(10)),
296
+
#("maxLength", json.int(5)),
297
+
]),
298
+
),
299
+
]),
300
+
),
301
+
])
302
+
303
+
case honk.validate([lexicon]) {
304
+
Error(error_map) -> {
305
+
// Should have error for this lexicon
306
+
case dict.get(error_map, "com.example.invalid") {
307
+
Ok(errors) -> {
308
+
// Error message should include the def name
309
+
list.any(errors, fn(msg) { string.contains(msg, "#badDef") })
310
+
|> should.be_true
311
+
}
312
+
Error(_) -> panic as "Expected error for com.example.invalid"
313
+
}
314
+
}
315
+
Ok(_) -> panic as "Expected validation to fail"
316
+
}
317
+
}
318
+
319
+
// Test empty defs object
320
+
pub fn validate_lexicon_empty_defs_test() {
321
+
let lexicon =
322
+
json.object([
323
+
#("lexicon", json.int(1)),
324
+
#("id", json.string("com.example.empty")),
325
+
#("defs", json.object([])),
326
+
])
327
+
328
+
honk.validate([lexicon])
202
329
|> should.be_ok
203
330
}
331
+
332
+
// Test missing required field error message with full defs.main path
333
+
pub fn validate_record_missing_required_field_message_test() {
334
+
let lexicon =
335
+
json.object([
336
+
#("lexicon", json.int(1)),
337
+
#("id", json.string("com.example.post")),
338
+
#(
339
+
"defs",
340
+
json.object([
341
+
#(
342
+
"main",
343
+
json.object([
344
+
#("type", json.string("record")),
345
+
#("key", json.string("tid")),
346
+
#(
347
+
"record",
348
+
json.object([
349
+
#("type", json.string("object")),
350
+
#("required", json.array([json.string("title")], fn(x) { x })),
351
+
#(
352
+
"properties",
353
+
json.object([
354
+
#(
355
+
"title",
356
+
json.object([#("type", json.string("string"))]),
357
+
),
358
+
]),
359
+
),
360
+
]),
361
+
),
362
+
]),
363
+
),
364
+
]),
365
+
),
366
+
])
367
+
368
+
let data = json.object([#("description", json.string("No title"))])
369
+
370
+
let assert Error(error) =
371
+
honk.validate_record([lexicon], "com.example.post", data)
372
+
373
+
let error_message = errors.to_string(error)
374
+
error_message
375
+
|> should.equal(
376
+
"Data validation failed: defs.main: required field 'title' is missing",
377
+
)
378
+
}
379
+
380
+
// Test missing required field in nested object with full path
381
+
pub fn validate_record_nested_missing_required_field_message_test() {
382
+
let lexicon =
383
+
json.object([
384
+
#("lexicon", json.int(1)),
385
+
#("id", json.string("com.example.post")),
386
+
#(
387
+
"defs",
388
+
json.object([
389
+
#(
390
+
"main",
391
+
json.object([
392
+
#("type", json.string("record")),
393
+
#("key", json.string("tid")),
394
+
#(
395
+
"record",
396
+
json.object([
397
+
#("type", json.string("object")),
398
+
#(
399
+
"properties",
400
+
json.object([
401
+
#(
402
+
"title",
403
+
json.object([#("type", json.string("string"))]),
404
+
),
405
+
#(
406
+
"metadata",
407
+
json.object([
408
+
#("type", json.string("object")),
409
+
#(
410
+
"required",
411
+
json.array([json.string("author")], fn(x) { x }),
412
+
),
413
+
#(
414
+
"properties",
415
+
json.object([
416
+
#(
417
+
"author",
418
+
json.object([#("type", json.string("string"))]),
419
+
),
420
+
]),
421
+
),
422
+
]),
423
+
),
424
+
]),
425
+
),
426
+
]),
427
+
),
428
+
]),
429
+
),
430
+
]),
431
+
),
432
+
])
433
+
434
+
let data =
435
+
json.object([
436
+
#("title", json.string("My Post")),
437
+
#("metadata", json.object([#("tags", json.string("tech"))])),
438
+
])
439
+
440
+
let assert Error(error) =
441
+
honk.validate_record([lexicon], "com.example.post", data)
442
+
443
+
let error_message = errors.to_string(error)
444
+
error_message
445
+
|> should.equal(
446
+
"Data validation failed: defs.main.metadata: required field 'author' is missing",
447
+
)
448
+
}
449
+
450
+
// Test schema validation error for non-main definition includes correct path
451
+
pub fn validate_schema_non_main_definition_error_test() {
452
+
let lexicon =
453
+
json.object([
454
+
#("lexicon", json.int(1)),
455
+
#("id", json.string("com.example.test")),
456
+
#(
457
+
"defs",
458
+
json.object([
459
+
#(
460
+
"objectDef",
461
+
json.object([
462
+
#("type", json.string("object")),
463
+
#(
464
+
"properties",
465
+
json.object([
466
+
#(
467
+
"fieldA",
468
+
json.object([
469
+
#("type", json.string("string")),
470
+
// Invalid: maxLength must be an integer, not a string
471
+
#("maxLength", json.string("300")),
472
+
]),
473
+
),
474
+
]),
475
+
),
476
+
]),
477
+
),
478
+
#(
479
+
"recordDef",
480
+
json.object([
481
+
#("type", json.string("record")),
482
+
#("key", json.string("tid")),
483
+
#(
484
+
"record",
485
+
json.object([
486
+
#("type", json.string("object")),
487
+
#(
488
+
"properties",
489
+
json.object([
490
+
#(
491
+
"fieldB",
492
+
json.object([
493
+
#("type", json.string("ref")),
494
+
// Invalid: missing required "ref" field for ref type
495
+
]),
496
+
),
497
+
]),
498
+
),
499
+
]),
500
+
),
501
+
]),
502
+
),
503
+
]),
504
+
),
505
+
])
506
+
507
+
let result = honk.validate([lexicon])
508
+
509
+
// Should have errors
510
+
result |> should.be_error
511
+
512
+
case result {
513
+
Error(error_map) -> {
514
+
// Get errors for this lexicon
515
+
case dict.get(error_map, "com.example.test") {
516
+
Ok(error_list) -> {
517
+
// Should have exactly one error from the recordDef (ref missing 'ref' field)
518
+
error_list
519
+
|> should.equal([
520
+
"com.example.test#recordDef: .record.properties.fieldB: ref missing required 'ref' field",
521
+
])
522
+
}
523
+
Error(_) -> should.fail()
524
+
}
525
+
}
526
+
Ok(_) -> should.fail()
527
+
}
528
+
}
+20
test/fixtures/com.atproto.repo.strongRef.json
+20
test/fixtures/com.atproto.repo.strongRef.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "com.atproto.repo.strongRef",
4
+
"defs": {
5
+
"main": {
6
+
"type": "object",
7
+
"required": ["uri", "cid"],
8
+
"properties": {
9
+
"uri": {
10
+
"type": "string",
11
+
"format": "at-uri"
12
+
},
13
+
"cid": {
14
+
"type": "string",
15
+
"format": "cid"
16
+
}
17
+
}
18
+
}
19
+
}
20
+
}
+46
test/fixtures/com.example.post.json
+46
test/fixtures/com.example.post.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "com.example.post",
4
+
"defs": {
5
+
"main": {
6
+
"type": "record",
7
+
"key": "tid",
8
+
"record": {
9
+
"type": "object",
10
+
"required": ["text", "createdAt"],
11
+
"properties": {
12
+
"text": {
13
+
"type": "string",
14
+
"maxLength": 300
15
+
},
16
+
"author": {
17
+
"type": "ref",
18
+
"ref": "com.example.user#profile"
19
+
},
20
+
"createdAt": {
21
+
"type": "string",
22
+
"format": "datetime"
23
+
},
24
+
"reply": {
25
+
"type": "ref",
26
+
"ref": "#replyRef"
27
+
}
28
+
}
29
+
}
30
+
},
31
+
"replyRef": {
32
+
"type": "object",
33
+
"required": ["parent", "root"],
34
+
"properties": {
35
+
"parent": {
36
+
"type": "ref",
37
+
"ref": "com.atproto.repo.strongRef"
38
+
},
39
+
"root": {
40
+
"type": "ref",
41
+
"ref": "com.atproto.repo.strongRef"
42
+
}
43
+
}
44
+
}
45
+
}
46
+
}
+41
test/fixtures/com.example.user.json
+41
test/fixtures/com.example.user.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "com.example.user",
4
+
"defs": {
5
+
"main": {
6
+
"type": "record",
7
+
"key": "tid",
8
+
"record": {
9
+
"type": "object",
10
+
"required": ["handle", "displayName"],
11
+
"properties": {
12
+
"handle": {
13
+
"type": "string",
14
+
"format": "handle"
15
+
},
16
+
"displayName": {
17
+
"type": "string",
18
+
"maxLength": 64
19
+
},
20
+
"bio": {
21
+
"type": "string",
22
+
"maxLength": 256
23
+
}
24
+
}
25
+
}
26
+
},
27
+
"profile": {
28
+
"type": "object",
29
+
"required": ["handle"],
30
+
"properties": {
31
+
"handle": {
32
+
"type": "string",
33
+
"format": "handle"
34
+
},
35
+
"displayName": {
36
+
"type": "string"
37
+
}
38
+
}
39
+
}
40
+
}
41
+
}
+19
test/fixtures/invalid-ref.json
+19
test/fixtures/invalid-ref.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "com.example.invalidref",
4
+
"defs": {
5
+
"main": {
6
+
"type": "record",
7
+
"key": "tid",
8
+
"record": {
9
+
"type": "object",
10
+
"properties": {
11
+
"brokenRef": {
12
+
"type": "ref",
13
+
"ref": "com.example.nonexistent#thing"
14
+
}
15
+
}
16
+
}
17
+
}
18
+
}
19
+
}
+19
test/fixtures/invalid.json
+19
test/fixtures/invalid.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "com.example.invalid",
4
+
"defs": {
5
+
"main": {
6
+
"type": "record",
7
+
"key": "tid",
8
+
"record": {
9
+
"type": "object",
10
+
"properties": {}
11
+
}
12
+
},
13
+
"badDef": {
14
+
"type": "string",
15
+
"minLength": 10,
16
+
"maxLength": 5
17
+
}
18
+
}
19
+
}
+26
test/fixtures/valid.json
+26
test/fixtures/valid.json
···
1
+
{
2
+
"lexicon": 1,
3
+
"id": "xyz.statusphere.status",
4
+
"defs": {
5
+
"main": {
6
+
"type": "record",
7
+
"key": "tid",
8
+
"record": {
9
+
"type": "object",
10
+
"required": ["status", "createdAt"],
11
+
"properties": {
12
+
"status": {
13
+
"type": "string",
14
+
"minLength": 1,
15
+
"maxGraphemes": 1,
16
+
"maxLength": 32
17
+
},
18
+
"createdAt": {
19
+
"type": "string",
20
+
"format": "datetime"
21
+
}
22
+
}
23
+
}
24
+
}
25
+
}
26
+
}
+31
-1
test/format_validator_test.gleam
+31
-1
test/format_validator_test.gleam
···
1
1
import gleeunit
2
2
import gleeunit/should
3
-
import validation/formats
3
+
import honk/validation/formats
4
4
5
5
pub fn main() {
6
6
gleeunit.main()
···
254
254
255
255
pub fn cid_empty_test() {
256
256
formats.is_valid_cid("") |> should.be_false
257
+
}
258
+
259
+
// ========== RAW CID TESTS ==========
260
+
261
+
// Test valid raw CID (bafkrei prefix = CIDv1 + raw multicodec 0x55)
262
+
pub fn valid_raw_cid_test() {
263
+
formats.is_valid_raw_cid(
264
+
"bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy",
265
+
)
266
+
|> should.be_true
267
+
}
268
+
269
+
// Test dag-cbor CID rejected (bafyrei prefix = CIDv1 + dag-cbor multicodec 0x71)
270
+
pub fn invalid_raw_cid_dag_cbor_test() {
271
+
formats.is_valid_raw_cid(
272
+
"bafyreidfayvfuwqa7qlnopdjiqrxzs6blmoeu4rujcjtnci5beludirz2a",
273
+
)
274
+
|> should.be_false
275
+
}
276
+
277
+
// Test CIDv0 rejected for raw CID
278
+
pub fn invalid_raw_cid_v0_test() {
279
+
formats.is_valid_raw_cid("QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR")
280
+
|> should.be_false
281
+
}
282
+
283
+
// Test invalid CID rejected
284
+
pub fn invalid_raw_cid_garbage_test() {
285
+
formats.is_valid_raw_cid("not-a-cid")
286
+
|> should.be_false
257
287
}
258
288
259
289
// ========== LANGUAGE TESTS ==========
+2
-2
test/integer_validator_test.gleam
+2
-2
test/integer_validator_test.gleam
+90
-2
test/integration_test.gleam
+90
-2
test/integration_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/primary/record
4
+
import honk/errors
5
+
import honk/validation/context
6
+
import honk/validation/primary/record
6
7
7
8
pub fn main() {
8
9
gleeunit.main()
···
230
231
let result = record.validate_schema(schema, ctx)
231
232
result |> should.be_ok
232
233
}
234
+
235
+
// Test missing required field error message at record root level
236
+
pub fn record_missing_required_field_message_test() {
237
+
let schema =
238
+
json.object([
239
+
#("type", json.string("record")),
240
+
#("key", json.string("tid")),
241
+
#(
242
+
"record",
243
+
json.object([
244
+
#("type", json.string("object")),
245
+
#("required", json.array([json.string("title")], fn(x) { x })),
246
+
#(
247
+
"properties",
248
+
json.object([
249
+
#("title", json.object([#("type", json.string("string"))])),
250
+
]),
251
+
),
252
+
]),
253
+
),
254
+
])
255
+
256
+
let data = json.object([#("description", json.string("No title"))])
257
+
258
+
let assert Ok(ctx) = context.builder() |> context.build
259
+
let assert Error(error) = record.validate_data(data, schema, ctx)
260
+
261
+
let error_message = errors.to_string(error)
262
+
error_message
263
+
|> should.equal("Data validation failed: required field 'title' is missing")
264
+
}
265
+
266
+
// Test missing required field error message in nested object
267
+
pub fn record_nested_missing_required_field_message_test() {
268
+
let schema =
269
+
json.object([
270
+
#("type", json.string("record")),
271
+
#("key", json.string("tid")),
272
+
#(
273
+
"record",
274
+
json.object([
275
+
#("type", json.string("object")),
276
+
#(
277
+
"properties",
278
+
json.object([
279
+
#("title", json.object([#("type", json.string("string"))])),
280
+
#(
281
+
"metadata",
282
+
json.object([
283
+
#("type", json.string("object")),
284
+
#(
285
+
"required",
286
+
json.array([json.string("author")], fn(x) { x }),
287
+
),
288
+
#(
289
+
"properties",
290
+
json.object([
291
+
#(
292
+
"author",
293
+
json.object([#("type", json.string("string"))]),
294
+
),
295
+
#("tags", json.object([#("type", json.string("string"))])),
296
+
]),
297
+
),
298
+
]),
299
+
),
300
+
]),
301
+
),
302
+
]),
303
+
),
304
+
])
305
+
306
+
let data =
307
+
json.object([
308
+
#("title", json.string("My Post")),
309
+
#("metadata", json.object([#("tags", json.string("tech"))])),
310
+
])
311
+
312
+
let assert Ok(ctx) = context.builder() |> context.build
313
+
let assert Error(error) = record.validate_data(data, schema, ctx)
314
+
315
+
let error_message = errors.to_string(error)
316
+
error_message
317
+
|> should.equal(
318
+
"Data validation failed: metadata: required field 'author' is missing",
319
+
)
320
+
}
+119
-2
test/object_validator_test.gleam
+119
-2
test/object_validator_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/field
4
+
import honk/errors
5
+
import honk/validation/context
6
+
import honk/validation/field
6
7
7
8
pub fn main() {
8
9
gleeunit.main()
···
74
75
let result = field.validate_object_data(data, schema, ctx)
75
76
result |> should.be_error
76
77
}
78
+
79
+
// Test missing required field error message at root level (no path)
80
+
pub fn missing_required_field_message_root_test() {
81
+
let schema =
82
+
json.object([
83
+
#("type", json.string("object")),
84
+
#(
85
+
"properties",
86
+
json.object([
87
+
#("title", json.object([#("type", json.string("string"))])),
88
+
]),
89
+
),
90
+
#("required", json.array([json.string("title")], fn(x) { x })),
91
+
])
92
+
93
+
let data = json.object([#("other", json.string("value"))])
94
+
95
+
let assert Ok(ctx) = context.builder() |> context.build
96
+
let assert Error(error) = field.validate_object_data(data, schema, ctx)
97
+
98
+
let error_message = errors.to_string(error)
99
+
error_message
100
+
|> should.equal("Data validation failed: required field 'title' is missing")
101
+
}
102
+
103
+
// Test nullable field accepts null value
104
+
pub fn nullable_field_accepts_null_test() {
105
+
let schema =
106
+
json.object([
107
+
#("type", json.string("object")),
108
+
#(
109
+
"properties",
110
+
json.object([
111
+
#("name", json.object([#("type", json.string("string"))])),
112
+
#("duration", json.object([#("type", json.string("integer"))])),
113
+
]),
114
+
),
115
+
#("nullable", json.array([json.string("duration")], fn(x) { x })),
116
+
])
117
+
118
+
let data =
119
+
json.object([
120
+
#("name", json.string("test")),
121
+
#("duration", json.null()),
122
+
])
123
+
124
+
let assert Ok(ctx) = context.builder() |> context.build
125
+
let result = field.validate_object_data(data, schema, ctx)
126
+
result |> should.be_ok
127
+
}
128
+
129
+
// Test non-nullable field rejects null value
130
+
pub fn non_nullable_field_rejects_null_test() {
131
+
let schema =
132
+
json.object([
133
+
#("type", json.string("object")),
134
+
#(
135
+
"properties",
136
+
json.object([
137
+
#("name", json.object([#("type", json.string("string"))])),
138
+
#("count", json.object([#("type", json.string("integer"))])),
139
+
]),
140
+
),
141
+
// No nullable array - count cannot be null
142
+
])
143
+
144
+
let data =
145
+
json.object([
146
+
#("name", json.string("test")),
147
+
#("count", json.null()),
148
+
])
149
+
150
+
let assert Ok(ctx) = context.builder() |> context.build
151
+
let result = field.validate_object_data(data, schema, ctx)
152
+
result |> should.be_error
153
+
}
154
+
155
+
// Test nullable field must exist in properties (schema validation)
156
+
pub fn nullable_field_not_in_properties_fails_test() {
157
+
let schema =
158
+
json.object([
159
+
#("type", json.string("object")),
160
+
#(
161
+
"properties",
162
+
json.object([
163
+
#("name", json.object([#("type", json.string("string"))])),
164
+
]),
165
+
),
166
+
// "nonexistent" is not in properties
167
+
#("nullable", json.array([json.string("nonexistent")], fn(x) { x })),
168
+
])
169
+
170
+
let assert Ok(ctx) = context.builder() |> context.build
171
+
let result = field.validate_object_schema(schema, ctx)
172
+
result |> should.be_error
173
+
}
174
+
175
+
// Test valid nullable schema passes validation
176
+
pub fn valid_nullable_schema_test() {
177
+
let schema =
178
+
json.object([
179
+
#("type", json.string("object")),
180
+
#(
181
+
"properties",
182
+
json.object([
183
+
#("name", json.object([#("type", json.string("string"))])),
184
+
#("duration", json.object([#("type", json.string("integer"))])),
185
+
]),
186
+
),
187
+
#("nullable", json.array([json.string("duration")], fn(x) { x })),
188
+
])
189
+
190
+
let assert Ok(ctx) = context.builder() |> context.build
191
+
let result = field.validate_object_schema(schema, ctx)
192
+
result |> should.be_ok
193
+
}
+273
-2
test/params_validator_test.gleam
+273
-2
test/params_validator_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/primary/params
4
+
import honk/validation/context
5
+
import honk/validation/primary/params
6
6
7
7
pub fn main() {
8
8
gleeunit.main()
···
334
334
Error(_) -> should.fail()
335
335
}
336
336
}
337
+
338
+
// ==================== DATA VALIDATION TESTS ====================
339
+
340
+
// Test valid data with required parameters
341
+
pub fn valid_data_with_required_params_test() {
342
+
let schema =
343
+
json.object([
344
+
#("type", json.string("params")),
345
+
#(
346
+
"properties",
347
+
json.object([
348
+
#("repo", json.object([#("type", json.string("string"))])),
349
+
#("limit", json.object([#("type", json.string("integer"))])),
350
+
]),
351
+
),
352
+
#(
353
+
"required",
354
+
json.array([json.string("repo"), json.string("limit")], fn(x) { x }),
355
+
),
356
+
])
357
+
358
+
let data =
359
+
json.object([
360
+
#("repo", json.string("alice.bsky.social")),
361
+
#("limit", json.int(50)),
362
+
])
363
+
364
+
let assert Ok(c) = context.builder() |> context.build()
365
+
params.validate_data(data, schema, c) |> should.be_ok
366
+
}
367
+
368
+
// Test valid data with optional parameters
369
+
pub fn valid_data_with_optional_params_test() {
370
+
let schema =
371
+
json.object([
372
+
#("type", json.string("params")),
373
+
#(
374
+
"properties",
375
+
json.object([
376
+
#("repo", json.object([#("type", json.string("string"))])),
377
+
#("cursor", json.object([#("type", json.string("string"))])),
378
+
]),
379
+
),
380
+
#("required", json.array([json.string("repo")], fn(x) { x })),
381
+
])
382
+
383
+
// Data has required param but not optional cursor
384
+
let data = json.object([#("repo", json.string("alice.bsky.social"))])
385
+
386
+
let assert Ok(c) = context.builder() |> context.build()
387
+
params.validate_data(data, schema, c) |> should.be_ok
388
+
}
389
+
390
+
// Test valid data with all parameter types
391
+
pub fn valid_data_all_types_test() {
392
+
let schema =
393
+
json.object([
394
+
#("type", json.string("params")),
395
+
#(
396
+
"properties",
397
+
json.object([
398
+
#("name", json.object([#("type", json.string("string"))])),
399
+
#("count", json.object([#("type", json.string("integer"))])),
400
+
#("enabled", json.object([#("type", json.string("boolean"))])),
401
+
#("metadata", json.object([#("type", json.string("unknown"))])),
402
+
]),
403
+
),
404
+
])
405
+
406
+
let data =
407
+
json.object([
408
+
#("name", json.string("test")),
409
+
#("count", json.int(42)),
410
+
#("enabled", json.bool(True)),
411
+
#("metadata", json.object([#("key", json.string("value"))])),
412
+
])
413
+
414
+
let assert Ok(c) = context.builder() |> context.build()
415
+
params.validate_data(data, schema, c) |> should.be_ok
416
+
}
417
+
418
+
// Test valid data with array parameter
419
+
pub fn valid_data_with_array_test() {
420
+
let schema =
421
+
json.object([
422
+
#("type", json.string("params")),
423
+
#(
424
+
"properties",
425
+
json.object([
426
+
#(
427
+
"tags",
428
+
json.object([
429
+
#("type", json.string("array")),
430
+
#("items", json.object([#("type", json.string("string"))])),
431
+
]),
432
+
),
433
+
]),
434
+
),
435
+
])
436
+
437
+
let data =
438
+
json.object([
439
+
#(
440
+
"tags",
441
+
json.array([json.string("foo"), json.string("bar")], fn(x) { x }),
442
+
),
443
+
])
444
+
445
+
let assert Ok(c) = context.builder() |> context.build()
446
+
params.validate_data(data, schema, c) |> should.be_ok
447
+
}
448
+
449
+
// Test invalid data: missing required parameter
450
+
pub fn invalid_data_missing_required_test() {
451
+
let schema =
452
+
json.object([
453
+
#("type", json.string("params")),
454
+
#(
455
+
"properties",
456
+
json.object([
457
+
#("repo", json.object([#("type", json.string("string"))])),
458
+
#("limit", json.object([#("type", json.string("integer"))])),
459
+
]),
460
+
),
461
+
#("required", json.array([json.string("repo")], fn(x) { x })),
462
+
])
463
+
464
+
// Data is missing required "repo" parameter
465
+
let data = json.object([#("limit", json.int(50))])
466
+
467
+
let assert Ok(c) = context.builder() |> context.build()
468
+
params.validate_data(data, schema, c) |> should.be_error
469
+
}
470
+
471
+
// Test invalid data: wrong type for parameter
472
+
pub fn invalid_data_wrong_type_test() {
473
+
let schema =
474
+
json.object([
475
+
#("type", json.string("params")),
476
+
#(
477
+
"properties",
478
+
json.object([
479
+
#("limit", json.object([#("type", json.string("integer"))])),
480
+
]),
481
+
),
482
+
])
483
+
484
+
// limit should be integer but is string
485
+
let data = json.object([#("limit", json.string("not a number"))])
486
+
487
+
let assert Ok(c) = context.builder() |> context.build()
488
+
params.validate_data(data, schema, c) |> should.be_error
489
+
}
490
+
491
+
// Test invalid data: string exceeds maxLength
492
+
pub fn invalid_data_string_too_long_test() {
493
+
let schema =
494
+
json.object([
495
+
#("type", json.string("params")),
496
+
#(
497
+
"properties",
498
+
json.object([
499
+
#(
500
+
"name",
501
+
json.object([
502
+
#("type", json.string("string")),
503
+
#("maxLength", json.int(5)),
504
+
]),
505
+
),
506
+
]),
507
+
),
508
+
])
509
+
510
+
// name is longer than maxLength of 5
511
+
let data = json.object([#("name", json.string("toolongname"))])
512
+
513
+
let assert Ok(c) = context.builder() |> context.build()
514
+
params.validate_data(data, schema, c) |> should.be_error
515
+
}
516
+
517
+
// Test invalid data: integer below minimum
518
+
pub fn invalid_data_integer_below_minimum_test() {
519
+
let schema =
520
+
json.object([
521
+
#("type", json.string("params")),
522
+
#(
523
+
"properties",
524
+
json.object([
525
+
#(
526
+
"count",
527
+
json.object([
528
+
#("type", json.string("integer")),
529
+
#("minimum", json.int(1)),
530
+
]),
531
+
),
532
+
]),
533
+
),
534
+
])
535
+
536
+
// count is below minimum of 1
537
+
let data = json.object([#("count", json.int(0))])
538
+
539
+
let assert Ok(c) = context.builder() |> context.build()
540
+
params.validate_data(data, schema, c) |> should.be_error
541
+
}
542
+
543
+
// Test invalid data: array with wrong item type
544
+
pub fn invalid_data_array_wrong_item_type_test() {
545
+
let schema =
546
+
json.object([
547
+
#("type", json.string("params")),
548
+
#(
549
+
"properties",
550
+
json.object([
551
+
#(
552
+
"ids",
553
+
json.object([
554
+
#("type", json.string("array")),
555
+
#("items", json.object([#("type", json.string("integer"))])),
556
+
]),
557
+
),
558
+
]),
559
+
),
560
+
])
561
+
562
+
// Array contains strings instead of integers
563
+
let data =
564
+
json.object([
565
+
#(
566
+
"ids",
567
+
json.array([json.string("one"), json.string("two")], fn(x) { x }),
568
+
),
569
+
])
570
+
571
+
let assert Ok(c) = context.builder() |> context.build()
572
+
params.validate_data(data, schema, c) |> should.be_error
573
+
}
574
+
575
+
// Test valid data with no properties defined (empty schema)
576
+
pub fn valid_data_empty_schema_test() {
577
+
let schema = json.object([#("type", json.string("params"))])
578
+
579
+
let data = json.object([])
580
+
581
+
let assert Ok(c) = context.builder() |> context.build()
582
+
params.validate_data(data, schema, c) |> should.be_ok
583
+
}
584
+
585
+
// Test valid data allows unknown parameters not in schema
586
+
pub fn valid_data_unknown_parameters_allowed_test() {
587
+
let schema =
588
+
json.object([
589
+
#("type", json.string("params")),
590
+
#(
591
+
"properties",
592
+
json.object([
593
+
#("repo", json.object([#("type", json.string("string"))])),
594
+
]),
595
+
),
596
+
])
597
+
598
+
// Data has "extra" parameter not in schema
599
+
let data =
600
+
json.object([
601
+
#("repo", json.string("alice.bsky.social")),
602
+
#("extra", json.string("allowed")),
603
+
])
604
+
605
+
let assert Ok(c) = context.builder() |> context.build()
606
+
params.validate_data(data, schema, c) |> should.be_ok
607
+
}
+2
-2
test/procedure_data_validation_test.gleam
+2
-2
test/procedure_data_validation_test.gleam
+2
-2
test/query_data_validation_test.gleam
+2
-2
test/query_data_validation_test.gleam
+3
-3
test/reference_validator_test.gleam
+3
-3
test/reference_validator_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/field
6
-
import validation/field/reference
4
+
import honk/validation/context
5
+
import honk/validation/field
6
+
import honk/validation/field/reference
7
7
8
8
pub fn main() {
9
9
gleeunit.main()
+2
-2
test/string_validator_test.gleam
+2
-2
test/string_validator_test.gleam
+2
-2
test/subscription_data_validation_test.gleam
+2
-2
test/subscription_data_validation_test.gleam
+2
-2
test/token_validator_test.gleam
+2
-2
test/token_validator_test.gleam
+634
-40
test/union_validator_test.gleam
+634
-40
test/union_validator_test.gleam
···
1
1
import gleam/json
2
2
import gleeunit
3
3
import gleeunit/should
4
-
import validation/context
5
-
import validation/field/union
4
+
import honk/validation/context
5
+
import honk/validation/field
6
+
import honk/validation/field/union
6
7
7
8
pub fn main() {
8
9
gleeunit.main()
9
10
}
10
11
11
-
// Test valid union schema with refs
12
-
pub fn valid_union_schema_test() {
13
-
let schema =
14
-
json.object([
15
-
#("type", json.string("union")),
16
-
#(
17
-
"refs",
18
-
json.array([json.string("#post"), json.string("#repost")], fn(x) { x }),
19
-
),
20
-
])
21
-
22
-
let assert Ok(ctx) = context.builder() |> context.build
23
-
let result = union.validate_schema(schema, ctx)
24
-
result |> should.be_ok
25
-
}
26
-
27
-
// Test union schema with closed flag
28
-
pub fn closed_union_schema_test() {
29
-
let schema =
30
-
json.object([
31
-
#("type", json.string("union")),
32
-
#("refs", json.array([json.string("#post")], fn(x) { x })),
33
-
#("closed", json.bool(True)),
34
-
])
35
-
36
-
let assert Ok(ctx) = context.builder() |> context.build
37
-
let result = union.validate_schema(schema, ctx)
38
-
result |> should.be_ok
39
-
}
40
-
41
12
// Test open union with empty refs
42
13
pub fn open_union_empty_refs_test() {
43
14
let schema =
···
75
46
result |> should.be_error
76
47
}
77
48
78
-
// Test valid union data with $type
49
+
// Test valid union data with $type matching global ref
79
50
pub fn valid_union_data_test() {
80
51
let schema =
81
52
json.object([
82
53
#("type", json.string("union")),
83
-
#("refs", json.array([json.string("app.bsky.feed.post")], fn(x) { x })),
54
+
#("refs", json.array([json.string("com.example.post")], fn(x) { x })),
84
55
])
85
56
86
57
let data =
87
58
json.object([
88
-
#("$type", json.string("app.bsky.feed.post")),
59
+
#("$type", json.string("com.example.post")),
89
60
#("text", json.string("Hello world")),
90
61
])
91
62
···
99
70
let schema =
100
71
json.object([
101
72
#("type", json.string("union")),
102
-
#("refs", json.array([json.string("#post")], fn(x) { x })),
73
+
#("refs", json.array([json.string("com.example.post")], fn(x) { x })),
103
74
])
104
75
105
76
let data = json.object([#("text", json.string("Hello"))])
···
114
85
let schema =
115
86
json.object([
116
87
#("type", json.string("union")),
117
-
#("refs", json.array([json.string("#post")], fn(x) { x })),
88
+
#("refs", json.array([json.string("com.example.post")], fn(x) { x })),
118
89
])
119
90
120
91
let data = json.string("not an object")
···
124
95
result |> should.be_error
125
96
}
126
97
127
-
// Test union data with $type not in refs
98
+
// Test closed union rejects $type not in refs
128
99
pub fn union_data_type_not_in_refs_test() {
129
100
let schema =
130
101
json.object([
131
102
#("type", json.string("union")),
132
-
#("refs", json.array([json.string("app.bsky.feed.post")], fn(x) { x })),
103
+
#("refs", json.array([json.string("com.example.typeA")], fn(x) { x })),
133
104
#("closed", json.bool(True)),
134
105
])
135
106
136
107
let data =
137
108
json.object([
138
-
#("$type", json.string("app.bsky.feed.repost")),
109
+
#("$type", json.string("com.example.typeB")),
110
+
#("data", json.string("some data")),
111
+
])
112
+
113
+
let assert Ok(ctx) = context.builder() |> context.build
114
+
let result = union.validate_data(data, schema, ctx)
115
+
result |> should.be_error
116
+
}
117
+
118
+
// Test union with invalid ref (non-string in array)
119
+
pub fn union_with_invalid_ref_type_test() {
120
+
let schema =
121
+
json.object([
122
+
#("type", json.string("union")),
123
+
#(
124
+
"refs",
125
+
json.array([json.int(123), json.string("com.example.post")], fn(x) { x }),
126
+
),
127
+
])
128
+
129
+
let assert Ok(ctx) = context.builder() |> context.build
130
+
let result = union.validate_schema(schema, ctx)
131
+
result |> should.be_error
132
+
}
133
+
134
+
// Test local ref matching in data validation
135
+
pub fn union_data_local_ref_matching_test() {
136
+
let schema =
137
+
json.object([
138
+
#("type", json.string("union")),
139
+
#(
140
+
"refs",
141
+
json.array([json.string("#post"), json.string("#reply")], fn(x) { x }),
142
+
),
143
+
])
144
+
145
+
// Data with $type matching local ref pattern
146
+
let data =
147
+
json.object([
148
+
#("$type", json.string("post")),
139
149
#("text", json.string("Hello")),
140
150
])
141
151
142
152
let assert Ok(ctx) = context.builder() |> context.build
143
153
let result = union.validate_data(data, schema, ctx)
154
+
// Should pass because local ref #post matches bare name "post"
155
+
result |> should.be_ok
156
+
}
157
+
158
+
// Test local ref with NSID in data
159
+
pub fn union_data_local_ref_with_nsid_test() {
160
+
let schema =
161
+
json.object([
162
+
#("type", json.string("union")),
163
+
#("refs", json.array([json.string("#view")], fn(x) { x })),
164
+
])
165
+
166
+
// Data with $type as full NSID#fragment
167
+
let data =
168
+
json.object([
169
+
#("$type", json.string("com.example.feed#view")),
170
+
#("uri", json.string("at://did:plc:abc/com.example.feed/123")),
171
+
])
172
+
173
+
let assert Ok(ctx) = context.builder() |> context.build
174
+
let result = union.validate_data(data, schema, ctx)
175
+
// Should pass because local ref #view matches NSID with #view fragment
176
+
result |> should.be_ok
177
+
}
178
+
179
+
// Test multiple local refs in schema
180
+
pub fn union_with_multiple_local_refs_test() {
181
+
let schema =
182
+
json.object([
183
+
#("type", json.string("union")),
184
+
#(
185
+
"refs",
186
+
json.array(
187
+
[json.string("#post"), json.string("#repost"), json.string("#reply")],
188
+
fn(x) { x },
189
+
),
190
+
),
191
+
])
192
+
193
+
let assert Ok(ctx) = context.builder() |> context.build
194
+
let result = union.validate_schema(schema, ctx)
195
+
// In test context without lexicon catalog, local refs are syntactically valid
196
+
result |> should.be_ok
197
+
}
198
+
199
+
// Test mixed global and local refs
200
+
pub fn union_with_mixed_refs_test() {
201
+
let schema =
202
+
json.object([
203
+
#("type", json.string("union")),
204
+
#(
205
+
"refs",
206
+
json.array(
207
+
[json.string("com.example.post"), json.string("#localDef")],
208
+
fn(x) { x },
209
+
),
210
+
),
211
+
])
212
+
213
+
let assert Ok(ctx) = context.builder() |> context.build
214
+
let result = union.validate_schema(schema, ctx)
215
+
// In test context without lexicon catalog, both types are syntactically valid
216
+
result |> should.be_ok
217
+
}
218
+
219
+
// Test all primitive types for non-object validation
220
+
pub fn union_data_all_non_object_types_test() {
221
+
let schema =
222
+
json.object([
223
+
#("type", json.string("union")),
224
+
#("refs", json.array([json.string("com.example.post")], fn(x) { x })),
225
+
])
226
+
227
+
let assert Ok(ctx) = context.builder() |> context.build
228
+
229
+
// Test number
230
+
let number_data = json.int(123)
231
+
union.validate_data(number_data, schema, ctx) |> should.be_error
232
+
233
+
// Test string
234
+
let string_data = json.string("not an object")
235
+
union.validate_data(string_data, schema, ctx) |> should.be_error
236
+
237
+
// Test null
238
+
let null_data = json.null()
239
+
union.validate_data(null_data, schema, ctx) |> should.be_error
240
+
241
+
// Test array
242
+
let array_data = json.array([json.string("item")], fn(x) { x })
243
+
union.validate_data(array_data, schema, ctx) |> should.be_error
244
+
245
+
// Test boolean
246
+
let bool_data = json.bool(True)
247
+
union.validate_data(bool_data, schema, ctx) |> should.be_error
248
+
}
249
+
250
+
// Test empty refs in data validation context
251
+
pub fn union_data_empty_refs_test() {
252
+
let schema =
253
+
json.object([
254
+
#("type", json.string("union")),
255
+
#("refs", json.array([], fn(x) { x })),
256
+
])
257
+
258
+
let data =
259
+
json.object([
260
+
#("$type", json.string("any.type")),
261
+
#("data", json.string("some data")),
262
+
])
263
+
264
+
let assert Ok(ctx) = context.builder() |> context.build
265
+
let result = union.validate_data(data, schema, ctx)
266
+
// Data validation should fail with empty refs array
144
267
result |> should.be_error
145
268
}
269
+
270
+
// Test comprehensive reference matching with full lexicon catalog
271
+
pub fn union_data_reference_matching_test() {
272
+
// Set up lexicons with local, global main, and fragment refs
273
+
let main_lexicon =
274
+
json.object([
275
+
#("lexicon", json.int(1)),
276
+
#("id", json.string("com.example.test")),
277
+
#(
278
+
"defs",
279
+
json.object([
280
+
#(
281
+
"main",
282
+
json.object([
283
+
#("type", json.string("union")),
284
+
#(
285
+
"refs",
286
+
json.array(
287
+
[
288
+
json.string("#localType"),
289
+
json.string("com.example.global#main"),
290
+
json.string("com.example.types#fragmentType"),
291
+
],
292
+
fn(x) { x },
293
+
),
294
+
),
295
+
]),
296
+
),
297
+
#(
298
+
"localType",
299
+
json.object([
300
+
#("type", json.string("object")),
301
+
#("properties", json.object([])),
302
+
]),
303
+
),
304
+
]),
305
+
),
306
+
])
307
+
308
+
let global_lexicon =
309
+
json.object([
310
+
#("lexicon", json.int(1)),
311
+
#("id", json.string("com.example.global")),
312
+
#(
313
+
"defs",
314
+
json.object([
315
+
#(
316
+
"main",
317
+
json.object([
318
+
#("type", json.string("object")),
319
+
#("properties", json.object([])),
320
+
]),
321
+
),
322
+
]),
323
+
),
324
+
])
325
+
326
+
let types_lexicon =
327
+
json.object([
328
+
#("lexicon", json.int(1)),
329
+
#("id", json.string("com.example.types")),
330
+
#(
331
+
"defs",
332
+
json.object([
333
+
#(
334
+
"fragmentType",
335
+
json.object([
336
+
#("type", json.string("object")),
337
+
#("properties", json.object([])),
338
+
]),
339
+
),
340
+
]),
341
+
),
342
+
])
343
+
344
+
let assert Ok(builder) =
345
+
context.builder()
346
+
|> context.with_validator(field.dispatch_data_validation)
347
+
|> context.with_lexicons([main_lexicon, global_lexicon, types_lexicon])
348
+
349
+
let assert Ok(ctx) = builder |> context.build()
350
+
let ctx = context.with_current_lexicon(ctx, "com.example.test")
351
+
352
+
let schema =
353
+
json.object([
354
+
#("type", json.string("union")),
355
+
#(
356
+
"refs",
357
+
json.array(
358
+
[
359
+
json.string("#localType"),
360
+
json.string("com.example.global#main"),
361
+
json.string("com.example.types#fragmentType"),
362
+
],
363
+
fn(x) { x },
364
+
),
365
+
),
366
+
])
367
+
368
+
// Test local reference match
369
+
let local_data = json.object([#("$type", json.string("localType"))])
370
+
union.validate_data(local_data, schema, ctx) |> should.be_ok
371
+
372
+
// Test global main reference match
373
+
let global_data =
374
+
json.object([#("$type", json.string("com.example.global#main"))])
375
+
union.validate_data(global_data, schema, ctx) |> should.be_ok
376
+
377
+
// Test global fragment reference match
378
+
let fragment_data =
379
+
json.object([#("$type", json.string("com.example.types#fragmentType"))])
380
+
union.validate_data(fragment_data, schema, ctx) |> should.be_ok
381
+
}
382
+
383
+
// Test full schema resolution with constraint validation
384
+
pub fn union_data_with_schema_resolution_test() {
385
+
let main_lexicon =
386
+
json.object([
387
+
#("lexicon", json.int(1)),
388
+
#("id", json.string("com.example.feed")),
389
+
#(
390
+
"defs",
391
+
json.object([
392
+
#(
393
+
"main",
394
+
json.object([
395
+
#("type", json.string("union")),
396
+
#(
397
+
"refs",
398
+
json.array(
399
+
[
400
+
json.string("#post"),
401
+
json.string("#repost"),
402
+
json.string("com.example.types#like"),
403
+
],
404
+
fn(x) { x },
405
+
),
406
+
),
407
+
]),
408
+
),
409
+
#(
410
+
"post",
411
+
json.object([
412
+
#("type", json.string("object")),
413
+
#(
414
+
"properties",
415
+
json.object([
416
+
#(
417
+
"title",
418
+
json.object([
419
+
#("type", json.string("string")),
420
+
#("maxLength", json.int(100)),
421
+
]),
422
+
),
423
+
#("content", json.object([#("type", json.string("string"))])),
424
+
]),
425
+
),
426
+
#("required", json.array([json.string("title")], fn(x) { x })),
427
+
]),
428
+
),
429
+
#(
430
+
"repost",
431
+
json.object([
432
+
#("type", json.string("object")),
433
+
#(
434
+
"properties",
435
+
json.object([
436
+
#("original", json.object([#("type", json.string("string"))])),
437
+
#("comment", json.object([#("type", json.string("string"))])),
438
+
]),
439
+
),
440
+
#("required", json.array([json.string("original")], fn(x) { x })),
441
+
]),
442
+
),
443
+
]),
444
+
),
445
+
])
446
+
447
+
let types_lexicon =
448
+
json.object([
449
+
#("lexicon", json.int(1)),
450
+
#("id", json.string("com.example.types")),
451
+
#(
452
+
"defs",
453
+
json.object([
454
+
#(
455
+
"like",
456
+
json.object([
457
+
#("type", json.string("object")),
458
+
#(
459
+
"properties",
460
+
json.object([
461
+
#("target", json.object([#("type", json.string("string"))])),
462
+
#(
463
+
"emoji",
464
+
json.object([
465
+
#("type", json.string("string")),
466
+
#("maxLength", json.int(10)),
467
+
]),
468
+
),
469
+
]),
470
+
),
471
+
#("required", json.array([json.string("target")], fn(x) { x })),
472
+
]),
473
+
),
474
+
]),
475
+
),
476
+
])
477
+
478
+
let assert Ok(builder) =
479
+
context.builder()
480
+
|> context.with_validator(field.dispatch_data_validation)
481
+
|> context.with_lexicons([main_lexicon, types_lexicon])
482
+
483
+
let assert Ok(ctx) = builder |> context.build()
484
+
let ctx = context.with_current_lexicon(ctx, "com.example.feed")
485
+
486
+
let union_schema =
487
+
json.object([
488
+
#("type", json.string("union")),
489
+
#(
490
+
"refs",
491
+
json.array(
492
+
[
493
+
json.string("#post"),
494
+
json.string("#repost"),
495
+
json.string("com.example.types#like"),
496
+
],
497
+
fn(x) { x },
498
+
),
499
+
),
500
+
])
501
+
502
+
// Test valid post data (with all required fields)
503
+
let valid_post =
504
+
json.object([
505
+
#("$type", json.string("post")),
506
+
#("title", json.string("My Post")),
507
+
#("content", json.string("This is my post content")),
508
+
])
509
+
union.validate_data(valid_post, union_schema, ctx) |> should.be_ok
510
+
511
+
// Test invalid post data (missing required field)
512
+
let invalid_post =
513
+
json.object([
514
+
#("$type", json.string("post")),
515
+
#("content", json.string("This is missing a title")),
516
+
])
517
+
union.validate_data(invalid_post, union_schema, ctx) |> should.be_error
518
+
519
+
// Test valid repost data (with all required fields)
520
+
let valid_repost =
521
+
json.object([
522
+
#("$type", json.string("repost")),
523
+
#("original", json.string("original-post-uri")),
524
+
#("comment", json.string("Great post!")),
525
+
])
526
+
union.validate_data(valid_repost, union_schema, ctx) |> should.be_ok
527
+
528
+
// Test valid like data (global reference with all required fields)
529
+
let valid_like =
530
+
json.object([
531
+
#("$type", json.string("com.example.types#like")),
532
+
#("target", json.string("post-uri")),
533
+
#("emoji", json.string("๐")),
534
+
])
535
+
union.validate_data(valid_like, union_schema, ctx) |> should.be_ok
536
+
537
+
// Test invalid like data (missing required field)
538
+
let invalid_like =
539
+
json.object([
540
+
#("$type", json.string("com.example.types#like")),
541
+
#("emoji", json.string("๐")),
542
+
])
543
+
union.validate_data(invalid_like, union_schema, ctx) |> should.be_error
544
+
}
545
+
546
+
// Test open vs closed union comparison
547
+
pub fn union_data_open_vs_closed_test() {
548
+
let lexicon =
549
+
json.object([
550
+
#("lexicon", json.int(1)),
551
+
#("id", json.string("com.example.test")),
552
+
#(
553
+
"defs",
554
+
json.object([
555
+
#(
556
+
"main",
557
+
json.object([
558
+
#("type", json.string("union")),
559
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
560
+
#("closed", json.bool(False)),
561
+
]),
562
+
),
563
+
#(
564
+
"post",
565
+
json.object([
566
+
#("type", json.string("object")),
567
+
#(
568
+
"properties",
569
+
json.object([
570
+
#("title", json.object([#("type", json.string("string"))])),
571
+
]),
572
+
),
573
+
]),
574
+
),
575
+
]),
576
+
),
577
+
])
578
+
579
+
let assert Ok(builder) =
580
+
context.builder()
581
+
|> context.with_validator(field.dispatch_data_validation)
582
+
|> context.with_lexicons([lexicon])
583
+
let assert Ok(ctx) = builder |> context.build()
584
+
let ctx = context.with_current_lexicon(ctx, "com.example.test")
585
+
586
+
let open_union_schema =
587
+
json.object([
588
+
#("type", json.string("union")),
589
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
590
+
#("closed", json.bool(False)),
591
+
])
592
+
593
+
let closed_union_schema =
594
+
json.object([
595
+
#("type", json.string("union")),
596
+
#("refs", json.array([json.string("#post")], fn(x) { x })),
597
+
#("closed", json.bool(True)),
598
+
])
599
+
600
+
// Known $type should work in both
601
+
let known_type =
602
+
json.object([
603
+
#("$type", json.string("post")),
604
+
#("title", json.string("Test")),
605
+
])
606
+
union.validate_data(known_type, open_union_schema, ctx) |> should.be_ok
607
+
union.validate_data(known_type, closed_union_schema, ctx) |> should.be_ok
608
+
609
+
// Unknown $type - behavior differs between open/closed
610
+
let unknown_type =
611
+
json.object([
612
+
#("$type", json.string("unknown_type")),
613
+
#("data", json.string("test")),
614
+
])
615
+
// Open union should accept unknown types
616
+
union.validate_data(unknown_type, open_union_schema, ctx) |> should.be_ok
617
+
// Closed union should reject unknown types
618
+
union.validate_data(unknown_type, closed_union_schema, ctx) |> should.be_error
619
+
}
620
+
621
+
// Test basic union with full lexicon context
622
+
pub fn union_data_basic_with_full_context_test() {
623
+
let main_lexicon =
624
+
json.object([
625
+
#("lexicon", json.int(1)),
626
+
#("id", json.string("com.example.test")),
627
+
#(
628
+
"defs",
629
+
json.object([
630
+
#(
631
+
"main",
632
+
json.object([
633
+
#("type", json.string("union")),
634
+
#(
635
+
"refs",
636
+
json.array(
637
+
[
638
+
json.string("#post"),
639
+
json.string("#repost"),
640
+
json.string("com.example.like#main"),
641
+
],
642
+
fn(x) { x },
643
+
),
644
+
),
645
+
]),
646
+
),
647
+
#(
648
+
"post",
649
+
json.object([
650
+
#("type", json.string("object")),
651
+
#(
652
+
"properties",
653
+
json.object([
654
+
#("title", json.object([#("type", json.string("string"))])),
655
+
#("content", json.object([#("type", json.string("string"))])),
656
+
]),
657
+
),
658
+
]),
659
+
),
660
+
#(
661
+
"repost",
662
+
json.object([
663
+
#("type", json.string("object")),
664
+
#(
665
+
"properties",
666
+
json.object([
667
+
#("original", json.object([#("type", json.string("string"))])),
668
+
]),
669
+
),
670
+
]),
671
+
),
672
+
]),
673
+
),
674
+
])
675
+
676
+
let like_lexicon =
677
+
json.object([
678
+
#("lexicon", json.int(1)),
679
+
#("id", json.string("com.example.like")),
680
+
#(
681
+
"defs",
682
+
json.object([
683
+
#(
684
+
"main",
685
+
json.object([
686
+
#("type", json.string("object")),
687
+
#(
688
+
"properties",
689
+
json.object([
690
+
#("target", json.object([#("type", json.string("string"))])),
691
+
]),
692
+
),
693
+
]),
694
+
),
695
+
]),
696
+
),
697
+
])
698
+
699
+
let assert Ok(builder) =
700
+
context.builder()
701
+
|> context.with_validator(field.dispatch_data_validation)
702
+
|> context.with_lexicons([main_lexicon, like_lexicon])
703
+
704
+
let assert Ok(ctx) = builder |> context.build()
705
+
let ctx = context.with_current_lexicon(ctx, "com.example.test")
706
+
707
+
let schema =
708
+
json.object([
709
+
#("type", json.string("union")),
710
+
#(
711
+
"refs",
712
+
json.array(
713
+
[
714
+
json.string("#post"),
715
+
json.string("#repost"),
716
+
json.string("com.example.like#main"),
717
+
],
718
+
fn(x) { x },
719
+
),
720
+
),
721
+
])
722
+
723
+
// Valid union data with local reference
724
+
let post_data =
725
+
json.object([
726
+
#("$type", json.string("post")),
727
+
#("title", json.string("My Post")),
728
+
#("content", json.string("Post content")),
729
+
])
730
+
union.validate_data(post_data, schema, ctx) |> should.be_ok
731
+
732
+
// Valid union data with global reference
733
+
let like_data =
734
+
json.object([
735
+
#("$type", json.string("com.example.like#main")),
736
+
#("target", json.string("some-target")),
737
+
])
738
+
union.validate_data(like_data, schema, ctx) |> should.be_ok
739
+
}