+5
.gitignore
+5
.gitignore
+26
.tangled/workflows/deploy-docs.yml
+26
.tangled/workflows/deploy-docs.yml
···
1
+
when:
2
+
- event: ["push"]
3
+
branch: main
4
+
5
+
engine: nixery
6
+
7
+
dependencies:
8
+
nixpkgs:
9
+
- bun
10
+
- curl
11
+
12
+
environment:
13
+
WISP_DID: "did:plc:mkqt76xvfgxuemlwlx6ruc3w"
14
+
WISP_SITE_NAME: "docs"
15
+
16
+
steps:
17
+
- name: build site
18
+
command: |
19
+
bun ./scripts/build-site.mjs
20
+
21
+
- name: deploy docs to wisp
22
+
command: |
23
+
test -n "$WISP_APP_PASSWORD"
24
+
curl -sSL https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-x86_64-linux -o wisp-cli
25
+
chmod +x wisp-cli
26
+
./wisp-cli deploy "$WISP_DID" --path ./site-out --site "$WISP_SITE_NAME" --password "$WISP_APP_PASSWORD"
+15
.tangled/workflows/publish-docs.yml
+15
.tangled/workflows/publish-docs.yml
+31
CHANGELOG.md
+31
CHANGELOG.md
···
1
+
# changelog
2
+
3
+
## 0.1.2
4
+
5
+
- `extractAt` now logs diagnostic info on parse failures (enable with `.zat` debug scope)
6
+
7
+
## 0.1.1
8
+
9
+
- xrpc client sets `Content-Type: application/json` for POST requests
10
+
- docs published as `site.standard.document` records on tag releases
11
+
12
+
## 0.1.0
13
+
14
+
sync types for firehose consumption:
15
+
16
+
- `CommitAction` - `.create`, `.update`, `.delete`
17
+
- `EventKind` - `.commit`, `.sync`, `.identity`, `.account`, `.info`
18
+
- `AccountStatus` - `.takendown`, `.suspended`, `.deleted`, `.deactivated`, `.desynchronized`, `.throttled`
19
+
20
+
these integrate with `std.json` for automatic parsing.
21
+
22
+
## 0.0.2
23
+
24
+
- xrpc client with gzip workaround for zig 0.15.x deflate bug
25
+
- jwt parsing and verification
26
+
27
+
## 0.0.1
28
+
29
+
- string primitives (Tid, Did, Handle, Nsid, Rkey, AtUri)
30
+
- did/handle resolution
31
+
- json helpers
+15
CONTRIBUTING.md
+15
CONTRIBUTING.md
+48
-3
README.md
+48
-3
README.md
···
1
-
# zat
1
+
# [zat](https://zat.dev)
2
+
3
+
AT Protocol building blocks for zig.
4
+
5
+
<details>
6
+
<summary><strong>this readme is an ATProto record</strong></summary>
7
+
8
+
โ [view in zat.dev's repository](https://at-me.zzstoatzz.io/view?handle=zat.dev)
2
9
3
-
zig primitives for AT Protocol.
10
+
zat publishes these docs as [`site.standard.document`](https://standard.site) records, signed by its DID.
11
+
12
+
</details>
4
13
5
14
## install
6
15
7
16
```bash
8
-
zig fetch --save https://tangled.sh/zzstoatzz.io/zat/archive/main
17
+
zig fetch --save https://tangled.sh/zat.dev/zat/archive/main
9
18
```
10
19
11
20
then in `build.zig`:
···
93
102
</details>
94
103
95
104
<details>
105
+
<summary><strong>sync types</strong> - enums for firehose/event stream consumption</summary>
106
+
107
+
```zig
108
+
// use in struct definitions for automatic json parsing:
109
+
const RepoOp = struct {
110
+
action: zat.CommitAction, // .create, .update, .delete
111
+
path: []const u8,
112
+
cid: ?[]const u8,
113
+
};
114
+
115
+
// then exhaustive switch:
116
+
switch (op.action) {
117
+
.create, .update => processUpsert(op),
118
+
.delete => processDelete(op),
119
+
}
120
+
```
121
+
122
+
- **CommitAction** - `.create`, `.update`, `.delete`
123
+
- **EventKind** - `.commit`, `.sync`, `.identity`, `.account`, `.info`
124
+
- **AccountStatus** - `.takendown`, `.suspended`, `.deleted`, `.deactivated`, `.desynchronized`, `.throttled`
125
+
126
+
</details>
127
+
128
+
<details>
96
129
<summary><strong>json helpers</strong> - navigate nested json without verbose if-chains</summary>
97
130
98
131
```zig
···
150
183
151
184
validation follows [atproto.com/specs](https://atproto.com/specs/atp).
152
185
186
+
## versioning
187
+
188
+
pre-1.0 semver:
189
+
- `0.x.0` - new features (backwards compatible)
190
+
- `0.x.y` - bug fixes
191
+
192
+
breaking changes bump the minor version and are documented in commit messages.
193
+
153
194
## license
154
195
155
196
MIT
197
+
198
+
---
199
+
200
+
[roadmap](docs/roadmap.md) ยท [changelog](CHANGELOG.md)
+12
build.zig
+12
build.zig
···
15
15
16
16
const test_step = b.step("test", "run unit tests");
17
17
test_step.dependOn(&run_tests.step);
18
+
19
+
// publish-docs script (uses zat to publish docs to ATProto)
20
+
const publish_docs = b.addExecutable(.{
21
+
.name = "publish-docs",
22
+
.root_module = b.createModule(.{
23
+
.root_source_file = b.path("scripts/publish-docs.zig"),
24
+
.target = target,
25
+
.optimize = optimize,
26
+
.imports = &.{.{ .name = "zat", .module = mod }},
27
+
}),
28
+
});
29
+
b.installArtifact(publish_docs);
18
30
}
+1
-1
build.zig.zon
+1
-1
build.zig.zon
+35
devlog/001-self-publishing-docs.md
+35
devlog/001-self-publishing-docs.md
···
1
+
# zat publishes its own docs to ATProto
2
+
3
+
zat uses itself to publish these docs as `site.standard.document` records. here's how.
4
+
5
+
## the idea
6
+
7
+
i'm working on [search for leaflet](https://leaflet-search.pages.dev/) and more generally, search for [standard.site](https://standard.site/) records. many are [currently thinking about how to facilitate better idea sharing on atproto right now](https://bsky.app/profile/eugenevinitsky.bsky.social/post/3mbpqpylv3s2e).
8
+
9
+
this is me doing a rep of shipping a "standard.site", so i know what i'll be searching through, and to better understand why blogging platforms choose their schema extensions etc for i start indexing/searching their record types.
10
+
11
+
## what we built
12
+
13
+
a zig script ([`scripts/publish-docs.zig`](https://tangled.sh/zat.dev/zat/tree/main/scripts/publish-docs.zig)) that:
14
+
15
+
1. authenticates with the PDS via `com.atproto.server.createSession`
16
+
2. creates a `site.standard.publication` record
17
+
3. publishes each doc as a `site.standard.document` pointing to that publication
18
+
4. uses deterministic TIDs so records get the same rkey every time (idempotent updates)
19
+
20
+
## the mechanics
21
+
22
+
### TIDs
23
+
24
+
timestamp identifiers. base32-sortable. we use a fixed base timestamp with incrementing clock_id so each doc gets a stable rkey:
25
+
26
+
```zig
27
+
const pub_tid = zat.Tid.fromTimestamp(1704067200000000, 0); // publication
28
+
const doc_tid = zat.Tid.fromTimestamp(1704067200000000, i + 1); // docs get 1, 2, 3...
29
+
```
30
+
31
+
### CI
32
+
33
+
[`.tangled/workflows/publish-docs.yml`](https://tangled.sh/zat.dev/zat/tree/main/.tangled/workflows/publish-docs.yml) triggers on `v*` tags. tag a release, docs publish automatically.
34
+
35
+
`putRecord` with the same rkey overwrites, so the CI job overwrites `standard.site` records when you cut a tag.
+102
docs/archive/plan-expanded.md
+102
docs/archive/plan-expanded.md
···
1
+
# archived: expanded plan (partially implemented)
2
+
3
+
This file is preserved for context/history. Current direction lives in `docs/roadmap.md`.
4
+
5
+
# zat - expanded scope
6
+
7
+
the initial release delivered string primitives (Tid, Did, Handle, Nsid, Rkey, AtUri). this plan expands toward a usable AT Protocol sdk.
8
+
9
+
## motivation
10
+
11
+
real-world usage shows repeated implementations of:
12
+
- DID resolution (plc.directory lookups, did:web fetches)
13
+
- JWT parsing and signature verification
14
+
- ECDSA verification (P256, secp256k1)
15
+
- base58/base64url decoding
16
+
- XRPC calls with manual json navigation
17
+
18
+
this is shared infrastructure across any atproto app. zat can absorb it incrementally.
19
+
20
+
## next: did resolution
21
+
22
+
```zig
23
+
pub const DidResolver = struct {
24
+
/// resolve a did to its document
25
+
pub fn resolve(self: *DidResolver, did: Did) !DidDocument
26
+
27
+
/// resolve did:plc via plc.directory
28
+
fn resolvePlc(self: *DidResolver, id: []const u8) !DidDocument
29
+
30
+
/// resolve did:web via .well-known
31
+
fn resolveWeb(self: *DidResolver, domain: []const u8) !DidDocument
32
+
};
33
+
34
+
pub const DidDocument = struct {
35
+
id: Did,
36
+
also_known_as: [][]const u8, // handles
37
+
verification_methods: []VerificationMethod,
38
+
services: []Service,
39
+
40
+
pub fn pdsEndpoint(self: DidDocument) ?[]const u8
41
+
pub fn handle(self: DidDocument) ?[]const u8
42
+
};
43
+
```
44
+
45
+
## next: cid (content identifiers)
46
+
47
+
```zig
48
+
pub const Cid = struct {
49
+
raw: []const u8,
50
+
51
+
pub fn parse(s: []const u8) ?Cid
52
+
pub fn version(self: Cid) u8
53
+
pub fn codec(self: Cid) u64
54
+
pub fn hash(self: Cid) []const u8
55
+
};
56
+
```
57
+
58
+
## later: xrpc client
59
+
60
+
```zig
61
+
pub const XrpcClient = struct {
62
+
pds: []const u8,
63
+
access_token: ?[]const u8,
64
+
65
+
pub fn query(self: *XrpcClient, nsid: Nsid, params: anytype) !JsonValue
66
+
pub fn procedure(self: *XrpcClient, nsid: Nsid, input: anytype) !JsonValue
67
+
};
68
+
```
69
+
70
+
## later: jwt verification
71
+
72
+
```zig
73
+
pub const Jwt = struct {
74
+
header: JwtHeader,
75
+
payload: JwtPayload,
76
+
signature: []const u8,
77
+
78
+
pub fn parse(token: []const u8) ?Jwt
79
+
pub fn verify(self: Jwt, public_key: PublicKey) bool
80
+
};
81
+
```
82
+
83
+
## out of scope
84
+
85
+
- lexicon codegen (separate project)
86
+
- session management / token refresh (app-specific)
87
+
- jetstream client (websocket.zig + json is enough)
88
+
- application frameworks (too opinionated)
89
+
90
+
## design principles
91
+
92
+
1. **layered** - each piece usable independently (use Did without DidResolver)
93
+
2. **explicit** - no hidden allocations, pass allocators where needed
94
+
3. **borrowing** - parse returns slices into input where possible
95
+
4. **fallible** - return errors/optionals, don't panic
96
+
5. **protocol-focused** - AT Protocol primitives, not app-specific features
97
+
98
+
## open questions
99
+
100
+
- should DidResolver cache? or leave that to caller?
101
+
- should XrpcClient handle auth refresh? or just expose tokens?
102
+
- how to handle json parsing without imposing a specific json library?
+192
docs/archive/plan-initial.md
+192
docs/archive/plan-initial.md
···
1
+
# archived: initial plan (out of date)
2
+
3
+
This file is preserved for context/history. Current direction lives in `docs/roadmap.md`.
4
+
5
+
# zat - zig atproto primitives
6
+
7
+
low-level building blocks for atproto applications in zig. not a full sdk - just the pieces that everyone reimplements.
8
+
9
+
## philosophy
10
+
11
+
from studying the wishlists: the pain is real, but the suggested solutions often over-engineer. we want:
12
+
13
+
1. **primitives, not frameworks** - types and parsers, not http clients or feed scaffolds
14
+
2. **layered design** - each piece usable independently
15
+
3. **zig idioms** - explicit buffers, comptime validation, no hidden allocations
16
+
4. **minimal scope** - solve the repeated pain, not every possible need
17
+
18
+
## scope
19
+
20
+
### in scope (v0.1)
21
+
22
+
**tid** - timestamp identifiers
23
+
- parse tid string to timestamp (microseconds)
24
+
- generate tid from timestamp
25
+
- extract clock id
26
+
- comptime validation of format
27
+
28
+
**at-uri** - `at://did:plc:xyz/collection/rkey`
29
+
- parse to components (did, collection, rkey)
30
+
- construct from components
31
+
- validation
32
+
33
+
**did** - decentralized identifiers
34
+
- parse did:plc and did:web
35
+
- validate format
36
+
- type-safe wrapper (not just `[]const u8`)
37
+
38
+
### maybe v0.2
39
+
40
+
**facets** - extract links/mentions/tags from post records
41
+
- given a json value with `text` and `facets`, extract urls
42
+
- byte-offset handling for utf-8
43
+
44
+
**cid** - content identifiers
45
+
- parse cid strings
46
+
- validate format
47
+
48
+
### out of scope (for now)
49
+
50
+
- lexicon codegen (too big, could be its own project)
51
+
- xrpc client (std.http.Client is fine)
52
+
- session management (app-specific)
53
+
- jetstream client (websocket.zig exists, just wire it)
54
+
- feed generator framework (each feed is unique)
55
+
- did resolution (requires http, out of primitive scope)
56
+
57
+
## design
58
+
59
+
### tid.zig
60
+
61
+
```zig
62
+
pub const Tid = struct {
63
+
raw: [13]u8,
64
+
65
+
/// parse a tid string. returns null if invalid.
66
+
pub fn parse(s: []const u8) ?Tid
67
+
68
+
/// timestamp in microseconds since unix epoch
69
+
pub fn timestamp(self: Tid) u64
70
+
71
+
/// clock identifier (lower 10 bits)
72
+
pub fn clockId(self: Tid) u10
73
+
74
+
/// generate tid for current time
75
+
pub fn now() Tid
76
+
77
+
/// generate tid for specific timestamp
78
+
pub fn fromTimestamp(ts: u64, clock_id: u10) Tid
79
+
80
+
/// format to string
81
+
pub fn format(self: Tid, buf: *[13]u8) void
82
+
};
83
+
```
84
+
85
+
encoding: base32-sortable (chars `234567abcdefghijklmnopqrstuvwxyz`), 13 chars, first 11 encode 53-bit timestamp, last 2 encode 10-bit clock id.
86
+
87
+
### at_uri.zig
88
+
89
+
```zig
90
+
pub const AtUri = struct {
91
+
/// the full uri string (borrowed, not owned)
92
+
raw: []const u8,
93
+
94
+
/// offsets into raw for each component
95
+
did_end: usize,
96
+
collection_end: usize,
97
+
98
+
pub fn parse(s: []const u8) ?AtUri
99
+
100
+
pub fn did(self: AtUri) []const u8
101
+
pub fn collection(self: AtUri) []const u8
102
+
pub fn rkey(self: AtUri) []const u8
103
+
104
+
/// construct a new uri. caller owns the buffer.
105
+
pub fn format(
106
+
buf: []u8,
107
+
did: []const u8,
108
+
collection: []const u8,
109
+
rkey: []const u8,
110
+
) ?[]const u8
111
+
};
112
+
```
113
+
114
+
### did.zig
115
+
116
+
```zig
117
+
pub const Did = union(enum) {
118
+
plc: [24]u8, // the identifier after "did:plc:"
119
+
web: []const u8, // the domain after "did:web:"
120
+
121
+
pub fn parse(s: []const u8) ?Did
122
+
123
+
/// format to string
124
+
pub fn format(self: Did, buf: []u8) ?[]const u8
125
+
126
+
/// check if this is a plc did
127
+
pub fn isPlc(self: Did) bool
128
+
};
129
+
```
130
+
131
+
## structure
132
+
133
+
```
134
+
zat/
135
+
โโโ build.zig
136
+
โโโ build.zig.zon
137
+
โโโ src/
138
+
โ โโโ root.zig # public API (stable exports)
139
+
โ โโโ internal.zig # internal API (experimental)
140
+
โ โโโ internal/
141
+
โ โโโ tid.zig
142
+
โ โโโ at_uri.zig
143
+
โ โโโ did.zig
144
+
โโโ docs/
145
+
โโโ plan.md
146
+
```
147
+
148
+
## internal โ public promotion
149
+
150
+
new features start in `internal` where we can iterate freely. when an API stabilizes:
151
+
152
+
```zig
153
+
// in root.zig, uncomment to promote:
154
+
pub const Tid = internal.Tid;
155
+
```
156
+
157
+
users who need bleeding-edge access can always use:
158
+
159
+
```zig
160
+
const zat = @import("zat");
161
+
const tid = zat.internal.Tid.parse("...");
162
+
```
163
+
164
+
this pattern exists indefinitely - even after 1.0, new experimental features start in internal.
165
+
166
+
## decisions
167
+
168
+
### why not typed lexicons?
169
+
170
+
codegen from lexicon json is a big project on its own. the core pain (json navigation) can be partially addressed by documenting patterns, and the sdk should work regardless of how people parse json.
171
+
172
+
### why not an http client wrapper?
173
+
174
+
zig 0.15's `std.http.Client` with `Io.Writer.Allocating` works well. wrapping it doesn't add much value. the real pain is around auth token refresh and rate limiting - those are better solved at the application level where retry logic is domain-specific.
175
+
176
+
### why not websocket/jetstream?
177
+
178
+
websocket.zig already exists and works well. the jetstream protocol is simple json messages. a thin wrapper doesn't justify a dependency.
179
+
180
+
### borrowing vs owning
181
+
182
+
for parse operations, we borrow slices into the input rather than allocating. callers who need owned data can dupe. this matches zig's explicit memory style.
183
+
184
+
## next steps
185
+
186
+
1. ~~implement tid.zig with tests~~ done
187
+
2. ~~implement at_uri.zig with tests~~ done
188
+
3. ~~implement did.zig with tests~~ done
189
+
4. ~~wire up build.zig as a module~~ done
190
+
5. try using it in find-bufo or music-atmosphere-feed to validate the api
191
+
6. iterate on internal APIs based on real usage
192
+
7. promote stable APIs to root.zig
-98
docs/plan-expanded.md
-98
docs/plan-expanded.md
···
1
-
# zat - expanded scope
2
-
3
-
the initial release delivered string primitives (Tid, Did, Handle, Nsid, Rkey, AtUri). this plan expands toward a usable AT Protocol sdk.
4
-
5
-
## motivation
6
-
7
-
real-world usage shows repeated implementations of:
8
-
- DID resolution (plc.directory lookups, did:web fetches)
9
-
- JWT parsing and signature verification
10
-
- ECDSA verification (P256, secp256k1)
11
-
- base58/base64url decoding
12
-
- XRPC calls with manual json navigation
13
-
14
-
this is shared infrastructure across any atproto app. zat can absorb it incrementally.
15
-
16
-
## next: did resolution
17
-
18
-
```zig
19
-
pub const DidResolver = struct {
20
-
/// resolve a did to its document
21
-
pub fn resolve(self: *DidResolver, did: Did) !DidDocument
22
-
23
-
/// resolve did:plc via plc.directory
24
-
fn resolvePlc(self: *DidResolver, id: []const u8) !DidDocument
25
-
26
-
/// resolve did:web via .well-known
27
-
fn resolveWeb(self: *DidResolver, domain: []const u8) !DidDocument
28
-
};
29
-
30
-
pub const DidDocument = struct {
31
-
id: Did,
32
-
also_known_as: [][]const u8, // handles
33
-
verification_methods: []VerificationMethod,
34
-
services: []Service,
35
-
36
-
pub fn pdsEndpoint(self: DidDocument) ?[]const u8
37
-
pub fn handle(self: DidDocument) ?[]const u8
38
-
};
39
-
```
40
-
41
-
## next: cid (content identifiers)
42
-
43
-
```zig
44
-
pub const Cid = struct {
45
-
raw: []const u8,
46
-
47
-
pub fn parse(s: []const u8) ?Cid
48
-
pub fn version(self: Cid) u8
49
-
pub fn codec(self: Cid) u64
50
-
pub fn hash(self: Cid) []const u8
51
-
};
52
-
```
53
-
54
-
## later: xrpc client
55
-
56
-
```zig
57
-
pub const XrpcClient = struct {
58
-
pds: []const u8,
59
-
access_token: ?[]const u8,
60
-
61
-
pub fn query(self: *XrpcClient, nsid: Nsid, params: anytype) !JsonValue
62
-
pub fn procedure(self: *XrpcClient, nsid: Nsid, input: anytype) !JsonValue
63
-
};
64
-
```
65
-
66
-
## later: jwt verification
67
-
68
-
```zig
69
-
pub const Jwt = struct {
70
-
header: JwtHeader,
71
-
payload: JwtPayload,
72
-
signature: []const u8,
73
-
74
-
pub fn parse(token: []const u8) ?Jwt
75
-
pub fn verify(self: Jwt, public_key: PublicKey) bool
76
-
};
77
-
```
78
-
79
-
## out of scope
80
-
81
-
- lexicon codegen (separate project)
82
-
- session management / token refresh (app-specific)
83
-
- jetstream client (websocket.zig + json is enough)
84
-
- application frameworks (too opinionated)
85
-
86
-
## design principles
87
-
88
-
1. **layered** - each piece usable independently (use Did without DidResolver)
89
-
2. **explicit** - no hidden allocations, pass allocators where needed
90
-
3. **borrowing** - parse returns slices into input where possible
91
-
4. **fallible** - return errors/optionals, don't panic
92
-
5. **protocol-focused** - AT Protocol primitives, not app-specific features
93
-
94
-
## open questions
95
-
96
-
- should DidResolver cache? or leave that to caller?
97
-
- should XrpcClient handle auth refresh? or just expose tokens?
98
-
- how to handle json parsing without imposing a specific json library?
-188
docs/plan-initial.md
-188
docs/plan-initial.md
···
1
-
# zat - zig atproto primitives
2
-
3
-
low-level building blocks for atproto applications in zig. not a full sdk - just the pieces that everyone reimplements.
4
-
5
-
## philosophy
6
-
7
-
from studying the wishlists: the pain is real, but the suggested solutions often over-engineer. we want:
8
-
9
-
1. **primitives, not frameworks** - types and parsers, not http clients or feed scaffolds
10
-
2. **layered design** - each piece usable independently
11
-
3. **zig idioms** - explicit buffers, comptime validation, no hidden allocations
12
-
4. **minimal scope** - solve the repeated pain, not every possible need
13
-
14
-
## scope
15
-
16
-
### in scope (v0.1)
17
-
18
-
**tid** - timestamp identifiers
19
-
- parse tid string to timestamp (microseconds)
20
-
- generate tid from timestamp
21
-
- extract clock id
22
-
- comptime validation of format
23
-
24
-
**at-uri** - `at://did:plc:xyz/collection/rkey`
25
-
- parse to components (did, collection, rkey)
26
-
- construct from components
27
-
- validation
28
-
29
-
**did** - decentralized identifiers
30
-
- parse did:plc and did:web
31
-
- validate format
32
-
- type-safe wrapper (not just `[]const u8`)
33
-
34
-
### maybe v0.2
35
-
36
-
**facets** - extract links/mentions/tags from post records
37
-
- given a json value with `text` and `facets`, extract urls
38
-
- byte-offset handling for utf-8
39
-
40
-
**cid** - content identifiers
41
-
- parse cid strings
42
-
- validate format
43
-
44
-
### out of scope (for now)
45
-
46
-
- lexicon codegen (too big, could be its own project)
47
-
- xrpc client (std.http.Client is fine)
48
-
- session management (app-specific)
49
-
- jetstream client (websocket.zig exists, just wire it)
50
-
- feed generator framework (each feed is unique)
51
-
- did resolution (requires http, out of primitive scope)
52
-
53
-
## design
54
-
55
-
### tid.zig
56
-
57
-
```zig
58
-
pub const Tid = struct {
59
-
raw: [13]u8,
60
-
61
-
/// parse a tid string. returns null if invalid.
62
-
pub fn parse(s: []const u8) ?Tid
63
-
64
-
/// timestamp in microseconds since unix epoch
65
-
pub fn timestamp(self: Tid) u64
66
-
67
-
/// clock identifier (lower 10 bits)
68
-
pub fn clockId(self: Tid) u10
69
-
70
-
/// generate tid for current time
71
-
pub fn now() Tid
72
-
73
-
/// generate tid for specific timestamp
74
-
pub fn fromTimestamp(ts: u64, clock_id: u10) Tid
75
-
76
-
/// format to string
77
-
pub fn format(self: Tid, buf: *[13]u8) void
78
-
};
79
-
```
80
-
81
-
encoding: base32-sortable (chars `234567abcdefghijklmnopqrstuvwxyz`), 13 chars, first 11 encode 53-bit timestamp, last 2 encode 10-bit clock id.
82
-
83
-
### at_uri.zig
84
-
85
-
```zig
86
-
pub const AtUri = struct {
87
-
/// the full uri string (borrowed, not owned)
88
-
raw: []const u8,
89
-
90
-
/// offsets into raw for each component
91
-
did_end: usize,
92
-
collection_end: usize,
93
-
94
-
pub fn parse(s: []const u8) ?AtUri
95
-
96
-
pub fn did(self: AtUri) []const u8
97
-
pub fn collection(self: AtUri) []const u8
98
-
pub fn rkey(self: AtUri) []const u8
99
-
100
-
/// construct a new uri. caller owns the buffer.
101
-
pub fn format(
102
-
buf: []u8,
103
-
did: []const u8,
104
-
collection: []const u8,
105
-
rkey: []const u8,
106
-
) ?[]const u8
107
-
};
108
-
```
109
-
110
-
### did.zig
111
-
112
-
```zig
113
-
pub const Did = union(enum) {
114
-
plc: [24]u8, // the identifier after "did:plc:"
115
-
web: []const u8, // the domain after "did:web:"
116
-
117
-
pub fn parse(s: []const u8) ?Did
118
-
119
-
/// format to string
120
-
pub fn format(self: Did, buf: []u8) ?[]const u8
121
-
122
-
/// check if this is a plc did
123
-
pub fn isPlc(self: Did) bool
124
-
};
125
-
```
126
-
127
-
## structure
128
-
129
-
```
130
-
zat/
131
-
โโโ build.zig
132
-
โโโ build.zig.zon
133
-
โโโ src/
134
-
โ โโโ root.zig # public API (stable exports)
135
-
โ โโโ internal.zig # internal API (experimental)
136
-
โ โโโ internal/
137
-
โ โโโ tid.zig
138
-
โ โโโ at_uri.zig
139
-
โ โโโ did.zig
140
-
โโโ docs/
141
-
โโโ plan.md
142
-
```
143
-
144
-
## internal โ public promotion
145
-
146
-
new features start in `internal` where we can iterate freely. when an API stabilizes:
147
-
148
-
```zig
149
-
// in root.zig, uncomment to promote:
150
-
pub const Tid = internal.Tid;
151
-
```
152
-
153
-
users who need bleeding-edge access can always use:
154
-
155
-
```zig
156
-
const zat = @import("zat");
157
-
const tid = zat.internal.Tid.parse("...");
158
-
```
159
-
160
-
this pattern exists indefinitely - even after 1.0, new experimental features start in internal.
161
-
162
-
## decisions
163
-
164
-
### why not typed lexicons?
165
-
166
-
codegen from lexicon json is a big project on its own. the core pain (json navigation) can be partially addressed by documenting patterns, and the sdk should work regardless of how people parse json.
167
-
168
-
### why not an http client wrapper?
169
-
170
-
zig 0.15's `std.http.Client` with `Io.Writer.Allocating` works well. wrapping it doesn't add much value. the real pain is around auth token refresh and rate limiting - those are better solved at the application level where retry logic is domain-specific.
171
-
172
-
### why not websocket/jetstream?
173
-
174
-
websocket.zig already exists and works well. the jetstream protocol is simple json messages. a thin wrapper doesn't justify a dependency.
175
-
176
-
### borrowing vs owning
177
-
178
-
for parse operations, we borrow slices into the input rather than allocating. callers who need owned data can dupe. this matches zig's explicit memory style.
179
-
180
-
## next steps
181
-
182
-
1. ~~implement tid.zig with tests~~ done
183
-
2. ~~implement at_uri.zig with tests~~ done
184
-
3. ~~implement did.zig with tests~~ done
185
-
4. ~~wire up build.zig as a module~~ done
186
-
5. try using it in find-bufo or music-atmosphere-feed to validate the api
187
-
6. iterate on internal APIs based on real usage
188
-
7. promote stable APIs to root.zig
+40
docs/roadmap.md
+40
docs/roadmap.md
···
1
+
# roadmap
2
+
3
+
zat started as a small set of string primitives for AT Protocol - the types everyone reimplements (`Tid`, `Did`, `Handle`, `Nsid`, `Rkey`, `AtUri`). the scope grew based on real usage.
4
+
5
+
## history
6
+
7
+
**initial scope** - string primitives with parsing and validation. the philosophy: primitives not frameworks, layered design, zig idioms, minimal scope.
8
+
9
+
**what grew from usage:**
10
+
- DID resolution was originally "out of scope" - real projects needed it, so `DidResolver` and `DidDocument` got added
11
+
- XRPC client and JSON helpers - same story
12
+
- JWT verification for service auth
13
+
- handle resolution via HTTP well-known
14
+
- handle resolution via DNS-over-HTTP (community contribution)
15
+
- sync types for firehose consumption (`CommitAction`, `EventKind`, `AccountStatus`)
16
+
17
+
this pattern - start minimal, expand based on real pain - continues.
18
+
19
+
## now
20
+
21
+
use zat in real projects. let usage drive what's next.
22
+
23
+
the primitives are reasonably complete. what's missing will show up when people build things. until then, no speculative features.
24
+
25
+
## maybe later
26
+
27
+
these stay out of scope unless real demand emerges:
28
+
29
+
- lexicon codegen - probably a separate project
30
+
- higher-level clients/frameworks - too opinionated
31
+
- token refresh/session management - app-specific
32
+
- feed generator scaffolding - each feed is unique
33
+
34
+
## non-goals
35
+
36
+
zat is not trying to be:
37
+
38
+
- a "one true SDK" that does everything
39
+
- an opinionated app framework
40
+
- a replacement for understanding the protocol
+17
justfile
+17
justfile
+215
scripts/build-site.mjs
+215
scripts/build-site.mjs
···
1
+
import {
2
+
readdir,
3
+
readFile,
4
+
mkdir,
5
+
rm,
6
+
cp,
7
+
writeFile,
8
+
access,
9
+
} from "node:fs/promises";
10
+
import path from "node:path";
11
+
import { execFile } from "node:child_process";
12
+
import { promisify } from "node:util";
13
+
14
+
const repoRoot = path.resolve(new URL("..", import.meta.url).pathname);
15
+
const docsDir = path.join(repoRoot, "docs");
16
+
const devlogDir = path.join(repoRoot, "devlog");
17
+
const siteSrcDir = path.join(repoRoot, "site");
18
+
const outDir = path.join(repoRoot, "site-out");
19
+
const outDocsDir = path.join(outDir, "docs");
20
+
21
+
const execFileAsync = promisify(execFile);
22
+
23
+
async function exists(filePath) {
24
+
try {
25
+
await access(filePath);
26
+
return true;
27
+
} catch {
28
+
return false;
29
+
}
30
+
}
31
+
32
+
function isMarkdown(filePath) {
33
+
return filePath.toLowerCase().endsWith(".md");
34
+
}
35
+
36
+
async function listMarkdownFiles(dir, prefix = "") {
37
+
const entries = await readdir(dir, { withFileTypes: true });
38
+
const out = [];
39
+
for (const e of entries) {
40
+
if (e.name.startsWith(".")) continue;
41
+
const rel = path.join(prefix, e.name);
42
+
const abs = path.join(dir, e.name);
43
+
if (e.isDirectory()) {
44
+
out.push(...(await listMarkdownFiles(abs, rel)));
45
+
} else if (e.isFile() && isMarkdown(e.name)) {
46
+
out.push(rel.replaceAll(path.sep, "/"));
47
+
}
48
+
}
49
+
return out.sort((a, b) => a.localeCompare(b));
50
+
}
51
+
52
+
function titleFromMarkdown(md, fallback) {
53
+
const lines = md.split(/\r?\n/);
54
+
for (const line of lines) {
55
+
const m = /^#\s+(.+)\s*$/.exec(line);
56
+
if (m) return m[1].trim();
57
+
}
58
+
return fallback.replace(/\.md$/i, "");
59
+
}
60
+
61
+
function normalizeTitle(title) {
62
+
let t = String(title || "").trim();
63
+
// Strip markdown links: [text](url) -> text
64
+
t = t.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1");
65
+
// If pages follow a "zat - ..." style, drop the redundant prefix in the nav.
66
+
t = t.replace(/^zat\s*-\s*/i, "");
67
+
// Cheaply capitalize (keeps the rest as-authored).
68
+
if (t.length) t = t[0].toUpperCase() + t.slice(1);
69
+
return t;
70
+
}
71
+
72
+
async function getBuildId() {
73
+
try {
74
+
const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD"], {
75
+
cwd: repoRoot,
76
+
});
77
+
const full = String(stdout || "").trim();
78
+
if (full) return full.slice(0, 12);
79
+
} catch {
80
+
// ignore
81
+
}
82
+
return String(Date.now());
83
+
}
84
+
85
+
async function main() {
86
+
await rm(outDir, { recursive: true, force: true });
87
+
await mkdir(outDir, { recursive: true });
88
+
89
+
// Copy static site shell
90
+
await cp(siteSrcDir, outDir, { recursive: true });
91
+
92
+
// Cache-bust immutable assets on Wisp by appending a per-commit query string.
93
+
const buildId = await getBuildId();
94
+
const outIndex = path.join(outDir, "index.html");
95
+
if (await exists(outIndex)) {
96
+
let html = await readFile(outIndex, "utf8");
97
+
html = html.replaceAll('href="./style.css"', `href="./style.css?v=${buildId}"`);
98
+
html = html.replaceAll(
99
+
'src="./vendor/marked.min.js"',
100
+
`src="./vendor/marked.min.js?v=${buildId}"`,
101
+
);
102
+
html = html.replaceAll(
103
+
'src="./app.js"',
104
+
`src="./app.js?v=${buildId}"`,
105
+
);
106
+
html = html.replaceAll(
107
+
'href="./favicon.svg"',
108
+
`href="./favicon.svg?v=${buildId}"`,
109
+
);
110
+
await writeFile(outIndex, html, "utf8");
111
+
}
112
+
113
+
// Copy docs
114
+
await mkdir(outDocsDir, { recursive: true });
115
+
116
+
const pages = [];
117
+
118
+
// Prefer an explicit docs homepage if present; otherwise use repo README as index.
119
+
const docsIndex = path.join(docsDir, "index.md");
120
+
if (!(await exists(docsIndex))) {
121
+
const readme = path.join(repoRoot, "README.md");
122
+
if (await exists(readme)) {
123
+
let md = await readFile(readme, "utf8");
124
+
// Strip docs/ prefix from links since we're now inside the docs context.
125
+
md = md.replace(/\]\(docs\//g, "](");
126
+
await writeFile(path.join(outDocsDir, "index.md"), md, "utf8");
127
+
pages.push({
128
+
path: "index.md",
129
+
title: normalizeTitle(titleFromMarkdown(md, "index.md")),
130
+
});
131
+
}
132
+
}
133
+
134
+
const changelog = path.join(repoRoot, "CHANGELOG.md");
135
+
const docsChangelog = path.join(docsDir, "changelog.md");
136
+
if ((await exists(changelog)) && !(await exists(docsChangelog))) {
137
+
const md = await readFile(changelog, "utf8");
138
+
await writeFile(path.join(outDocsDir, "changelog.md"), md, "utf8");
139
+
pages.push({
140
+
path: "changelog.md",
141
+
title: normalizeTitle(titleFromMarkdown(md, "changelog.md")),
142
+
});
143
+
}
144
+
145
+
const mdFiles = (await exists(docsDir)) ? await listMarkdownFiles(docsDir) : [];
146
+
147
+
// Copy all markdown under docs/ (including archives), but only include non-archive
148
+
// paths in the sidebar manifest.
149
+
for (const rel of mdFiles) {
150
+
const src = path.join(docsDir, rel);
151
+
const dst = path.join(outDocsDir, rel);
152
+
await mkdir(path.dirname(dst), { recursive: true });
153
+
await cp(src, dst);
154
+
155
+
const md = await readFile(src, "utf8");
156
+
if (!rel.startsWith("archive/")) {
157
+
pages.push({ path: rel, title: normalizeTitle(titleFromMarkdown(md, rel)) });
158
+
}
159
+
}
160
+
161
+
// Copy devlog files to docs/devlog/ and generate an index
162
+
const devlogFiles = (await exists(devlogDir)) ? await listMarkdownFiles(devlogDir) : [];
163
+
const devlogEntries = [];
164
+
165
+
for (const rel of devlogFiles) {
166
+
const src = path.join(devlogDir, rel);
167
+
const dst = path.join(outDocsDir, "devlog", rel);
168
+
await mkdir(path.dirname(dst), { recursive: true });
169
+
await cp(src, dst);
170
+
171
+
const md = await readFile(src, "utf8");
172
+
devlogEntries.push({
173
+
path: `devlog/${rel}`,
174
+
title: titleFromMarkdown(md, rel),
175
+
});
176
+
}
177
+
178
+
// Generate devlog index listing all entries (newest first by filename)
179
+
if (devlogEntries.length > 0) {
180
+
devlogEntries.sort((a, b) => b.path.localeCompare(a.path));
181
+
const indexMd = [
182
+
"# devlog",
183
+
"",
184
+
...devlogEntries.map((e) => `- [${e.title}](${e.path})`),
185
+
"",
186
+
].join("\n");
187
+
await writeFile(path.join(outDocsDir, "devlog", "index.md"), indexMd, "utf8");
188
+
}
189
+
190
+
// Stable nav order: README homepage, then roadmap, then changelog, then the rest.
191
+
pages.sort((a, b) => {
192
+
const order = (p) => {
193
+
if (p === "index.md") return 0;
194
+
if (p === "roadmap.md") return 1;
195
+
if (p === "changelog.md") return 2;
196
+
return 3;
197
+
};
198
+
const ao = order(a.path);
199
+
const bo = order(b.path);
200
+
if (ao !== bo) return ao - bo;
201
+
return a.title.localeCompare(b.title);
202
+
});
203
+
204
+
await writeFile(
205
+
path.join(outDir, "manifest.json"),
206
+
JSON.stringify({ pages }, null, 2) + "\n",
207
+
"utf8",
208
+
);
209
+
210
+
process.stdout.write(
211
+
`Built Wisp docs site: ${pages.length} markdown file(s) -> ${outDir}\n`,
212
+
);
213
+
}
214
+
215
+
await main();
+263
scripts/publish-docs.zig
+263
scripts/publish-docs.zig
···
1
+
const std = @import("std");
2
+
const zat = @import("zat");
3
+
4
+
const Allocator = std.mem.Allocator;
5
+
6
+
const DocEntry = struct { path: []const u8, file: []const u8 };
7
+
8
+
/// docs to publish as site.standard.document records
9
+
const docs = [_]DocEntry{
10
+
.{ .path = "/", .file = "README.md" },
11
+
.{ .path = "/roadmap", .file = "docs/roadmap.md" },
12
+
.{ .path = "/changelog", .file = "CHANGELOG.md" },
13
+
};
14
+
15
+
/// devlog entries
16
+
const devlog = [_]DocEntry{
17
+
.{ .path = "/devlog/001", .file = "devlog/001-self-publishing-docs.md" },
18
+
};
19
+
20
+
pub fn main() !void {
21
+
// use page_allocator for CLI tool - OS reclaims on exit
22
+
const allocator = std.heap.page_allocator;
23
+
24
+
const handle = "zat.dev";
25
+
26
+
const password = std.posix.getenv("ATPROTO_PASSWORD") orelse {
27
+
std.debug.print("error: ATPROTO_PASSWORD not set\n", .{});
28
+
return error.MissingEnv;
29
+
};
30
+
31
+
const pds = std.posix.getenv("ATPROTO_PDS") orelse "https://bsky.social";
32
+
33
+
var client = zat.XrpcClient.init(allocator, pds);
34
+
defer client.deinit();
35
+
36
+
const session = try createSession(&client, allocator, handle, password);
37
+
defer {
38
+
allocator.free(session.did);
39
+
allocator.free(session.access_token);
40
+
}
41
+
42
+
std.debug.print("authenticated as {s}\n", .{session.did});
43
+
client.setAuth(session.access_token);
44
+
45
+
// generate TID for publication (fixed timestamp for deterministic rkey)
46
+
// using 2024-01-01 00:00:00 UTC as base timestamp (1704067200 seconds = 1704067200000000 microseconds)
47
+
const pub_tid = zat.Tid.fromTimestamp(1704067200000000, 0);
48
+
const pub_record = Publication{
49
+
.url = "https://zat.dev",
50
+
.name = "zat",
51
+
.description = "AT Protocol building blocks for zig",
52
+
};
53
+
54
+
try putRecord(&client, allocator, session.did, "site.standard.publication", pub_tid.str(), pub_record);
55
+
std.debug.print("created publication: at://{s}/site.standard.publication/{s}\n", .{ session.did, pub_tid.str() });
56
+
57
+
var pub_uri_buf: std.ArrayList(u8) = .empty;
58
+
defer pub_uri_buf.deinit(allocator);
59
+
try pub_uri_buf.print(allocator, "at://{s}/site.standard.publication/{s}", .{ session.did, pub_tid.str() });
60
+
const pub_uri = pub_uri_buf.items;
61
+
62
+
// publish each doc with deterministic TIDs (same base timestamp, incrementing clock_id)
63
+
const now = timestamp();
64
+
65
+
for (docs, 0..) |doc, i| {
66
+
const content = std.fs.cwd().readFileAlloc(allocator, doc.file, 1024 * 1024) catch |err| {
67
+
std.debug.print("warning: could not read {s}: {}\n", .{ doc.file, err });
68
+
continue;
69
+
};
70
+
defer allocator.free(content);
71
+
72
+
const title = extractTitle(content) orelse doc.file;
73
+
const tid = zat.Tid.fromTimestamp(1704067200000000, @intCast(i + 1)); // clock_id 1, 2, 3...
74
+
75
+
const doc_record = Document{
76
+
.site = pub_uri,
77
+
.title = title,
78
+
.path = doc.path,
79
+
.textContent = content,
80
+
.publishedAt = &now,
81
+
};
82
+
83
+
try putRecord(&client, allocator, session.did, "site.standard.document", tid.str(), doc_record);
84
+
std.debug.print("published: {s} -> at://{s}/site.standard.document/{s}\n", .{ doc.file, session.did, tid.str() });
85
+
}
86
+
87
+
// devlog publication (clock_id 100 to separate from docs)
88
+
const devlog_tid = zat.Tid.fromTimestamp(1704067200000000, 100);
89
+
const devlog_pub = Publication{
90
+
.url = "https://zat.dev",
91
+
.name = "zat devlog",
92
+
.description = "building zat in public",
93
+
};
94
+
95
+
try putRecord(&client, allocator, session.did, "site.standard.publication", devlog_tid.str(), devlog_pub);
96
+
std.debug.print("created publication: at://{s}/site.standard.publication/{s}\n", .{ session.did, devlog_tid.str() });
97
+
98
+
var devlog_uri_buf: std.ArrayList(u8) = .empty;
99
+
defer devlog_uri_buf.deinit(allocator);
100
+
try devlog_uri_buf.print(allocator, "at://{s}/site.standard.publication/{s}", .{ session.did, devlog_tid.str() });
101
+
const devlog_uri = devlog_uri_buf.items;
102
+
103
+
// publish devlog entries (clock_id 101, 102, ...)
104
+
for (devlog, 0..) |entry, i| {
105
+
const content = std.fs.cwd().readFileAlloc(allocator, entry.file, 1024 * 1024) catch |err| {
106
+
std.debug.print("warning: could not read {s}: {}\n", .{ entry.file, err });
107
+
continue;
108
+
};
109
+
defer allocator.free(content);
110
+
111
+
const title = extractTitle(content) orelse entry.file;
112
+
const tid = zat.Tid.fromTimestamp(1704067200000000, @intCast(101 + i));
113
+
114
+
const doc_record = Document{
115
+
.site = devlog_uri,
116
+
.title = title,
117
+
.path = entry.path,
118
+
.textContent = content,
119
+
.publishedAt = &now,
120
+
};
121
+
122
+
try putRecord(&client, allocator, session.did, "site.standard.document", tid.str(), doc_record);
123
+
std.debug.print("published: {s} -> at://{s}/site.standard.document/{s}\n", .{ entry.file, session.did, tid.str() });
124
+
}
125
+
126
+
std.debug.print("done\n", .{});
127
+
}
128
+
129
+
const Publication = struct {
130
+
@"$type": []const u8 = "site.standard.publication",
131
+
url: []const u8,
132
+
name: []const u8,
133
+
description: ?[]const u8 = null,
134
+
};
135
+
136
+
const Document = struct {
137
+
@"$type": []const u8 = "site.standard.document",
138
+
site: []const u8,
139
+
title: []const u8,
140
+
path: ?[]const u8 = null,
141
+
textContent: ?[]const u8 = null,
142
+
publishedAt: []const u8,
143
+
};
144
+
145
+
const Session = struct {
146
+
did: []const u8,
147
+
access_token: []const u8,
148
+
};
149
+
150
+
fn createSession(client: *zat.XrpcClient, allocator: Allocator, handle: []const u8, password: []const u8) !Session {
151
+
const CreateSessionInput = struct {
152
+
identifier: []const u8,
153
+
password: []const u8,
154
+
};
155
+
156
+
var buf: std.ArrayList(u8) = .empty;
157
+
defer buf.deinit(allocator);
158
+
try buf.print(allocator, "{f}", .{std.json.fmt(CreateSessionInput{
159
+
.identifier = handle,
160
+
.password = password,
161
+
}, .{})});
162
+
163
+
const nsid = zat.Nsid.parse("com.atproto.server.createSession").?;
164
+
var response = try client.procedure(nsid, buf.items);
165
+
defer response.deinit();
166
+
167
+
if (!response.ok()) {
168
+
std.debug.print("createSession failed: {s}\n", .{response.body});
169
+
return error.AuthFailed;
170
+
}
171
+
172
+
var parsed = try response.json();
173
+
defer parsed.deinit();
174
+
175
+
const did = zat.json.getString(parsed.value, "did") orelse return error.MissingDid;
176
+
const token = zat.json.getString(parsed.value, "accessJwt") orelse return error.MissingToken;
177
+
178
+
return .{
179
+
.did = try allocator.dupe(u8, did),
180
+
.access_token = try allocator.dupe(u8, token),
181
+
};
182
+
}
183
+
184
+
fn putRecord(client: *zat.XrpcClient, allocator: Allocator, repo: []const u8, collection: []const u8, rkey: []const u8, record: anytype) !void {
185
+
// serialize record to json
186
+
var record_buf: std.ArrayList(u8) = .empty;
187
+
defer record_buf.deinit(allocator);
188
+
try record_buf.print(allocator, "{f}", .{std.json.fmt(record, .{})});
189
+
190
+
// build request body
191
+
var body: std.ArrayList(u8) = .empty;
192
+
defer body.deinit(allocator);
193
+
194
+
try body.appendSlice(allocator, "{\"repo\":\"");
195
+
try body.appendSlice(allocator, repo);
196
+
try body.appendSlice(allocator, "\",\"collection\":\"");
197
+
try body.appendSlice(allocator, collection);
198
+
try body.appendSlice(allocator, "\",\"rkey\":\"");
199
+
try body.appendSlice(allocator, rkey);
200
+
try body.appendSlice(allocator, "\",\"record\":");
201
+
try body.appendSlice(allocator, record_buf.items);
202
+
try body.append(allocator, '}');
203
+
204
+
const nsid = zat.Nsid.parse("com.atproto.repo.putRecord").?;
205
+
var response = try client.procedure(nsid, body.items);
206
+
defer response.deinit();
207
+
208
+
if (!response.ok()) {
209
+
std.debug.print("putRecord failed: {s}\n", .{response.body});
210
+
return error.PutFailed;
211
+
}
212
+
}
213
+
214
+
fn extractTitle(content: []const u8) ?[]const u8 {
215
+
var lines = std.mem.splitScalar(u8, content, '\n');
216
+
while (lines.next()) |line| {
217
+
const trimmed = std.mem.trim(u8, line, " \t\r");
218
+
if (trimmed.len > 2 and trimmed[0] == '#' and trimmed[1] == ' ') {
219
+
var title = trimmed[2..];
220
+
// strip markdown link: [text](url) -> text
221
+
if (std.mem.indexOf(u8, title, "](")) |bracket| {
222
+
if (title[0] == '[') {
223
+
title = title[1..bracket];
224
+
}
225
+
}
226
+
return title;
227
+
}
228
+
}
229
+
return null;
230
+
}
231
+
232
+
fn timestamp() [20]u8 {
233
+
const epoch_seconds = std.time.timestamp();
234
+
const days: i32 = @intCast(@divFloor(epoch_seconds, std.time.s_per_day));
235
+
const day_secs: u32 = @intCast(@mod(epoch_seconds, std.time.s_per_day));
236
+
237
+
// calculate year/month/day from days since epoch (1970-01-01)
238
+
var y: i32 = 1970;
239
+
var remaining = days;
240
+
while (true) {
241
+
const year_days: i32 = if (@mod(y, 4) == 0 and (@mod(y, 100) != 0 or @mod(y, 400) == 0)) 366 else 365;
242
+
if (remaining < year_days) break;
243
+
remaining -= year_days;
244
+
y += 1;
245
+
}
246
+
247
+
const is_leap = @mod(y, 4) == 0 and (@mod(y, 100) != 0 or @mod(y, 400) == 0);
248
+
const month_days = [12]u8{ 31, if (is_leap) 29 else 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
249
+
var m: usize = 0;
250
+
while (m < 12 and remaining >= month_days[m]) : (m += 1) {
251
+
remaining -= month_days[m];
252
+
}
253
+
254
+
const hours = day_secs / 3600;
255
+
const mins = (day_secs % 3600) / 60;
256
+
const secs = day_secs % 60;
257
+
258
+
var buf: [20]u8 = undefined;
259
+
_ = std.fmt.bufPrint(&buf, "{d:0>4}-{d:0>2}-{d:0>2}T{d:0>2}:{d:0>2}:{d:0>2}Z", .{
260
+
@as(u32, @intCast(y)), @as(u32, @intCast(m + 1)), @as(u32, @intCast(remaining + 1)), hours, mins, secs,
261
+
}) catch unreachable;
262
+
return buf;
263
+
}
+165
site/app.js
+165
site/app.js
···
1
+
const navEl = document.getElementById("nav");
2
+
const contentEl = document.getElementById("content");
3
+
const menuToggle = document.querySelector(".menu-toggle");
4
+
const sidebar = document.querySelector(".sidebar");
5
+
const overlay = document.querySelector(".overlay");
6
+
7
+
function toggleMenu(open) {
8
+
const isOpen = open ?? !sidebar.classList.contains("open");
9
+
sidebar.classList.toggle("open", isOpen);
10
+
overlay?.classList.toggle("open", isOpen);
11
+
menuToggle?.setAttribute("aria-expanded", isOpen);
12
+
document.body.style.overflow = isOpen ? "hidden" : "";
13
+
}
14
+
15
+
menuToggle?.addEventListener("click", () => toggleMenu());
16
+
overlay?.addEventListener("click", () => toggleMenu(false));
17
+
18
+
// Close menu when nav link clicked (mobile)
19
+
navEl?.addEventListener("click", (e) => {
20
+
if (e.target.closest("a")) toggleMenu(false);
21
+
});
22
+
23
+
const buildId = new URL(import.meta.url).searchParams.get("v") || "";
24
+
25
+
function withBuild(url) {
26
+
if (!buildId) return url;
27
+
const sep = url.includes("?") ? "&" : "?";
28
+
return `${url}${sep}v=${encodeURIComponent(buildId)}`;
29
+
}
30
+
31
+
function escapeHtml(text) {
32
+
return text
33
+
.replaceAll("&", "&")
34
+
.replaceAll("<", "<")
35
+
.replaceAll(">", ">")
36
+
.replaceAll('"', """)
37
+
.replaceAll("'", "'");
38
+
}
39
+
40
+
function normalizeDocPath(docPath) {
41
+
let p = String(docPath || "").trim();
42
+
p = p.replaceAll("\\", "/");
43
+
p = p.replace(/^\/+/, "");
44
+
p = p.replace(/\.\.\//g, "");
45
+
if (!p.endsWith(".md")) p += ".md";
46
+
return p;
47
+
}
48
+
49
+
function getSelectedPath() {
50
+
const hash = (location.hash || "").replace(/^#/, "");
51
+
if (!hash) return null;
52
+
return normalizeDocPath(hash);
53
+
}
54
+
55
+
function setSelectedPath(docPath) {
56
+
location.hash = normalizeDocPath(docPath);
57
+
}
58
+
59
+
async function fetchJson(path) {
60
+
const res = await fetch(withBuild(path), { cache: "no-store" });
61
+
if (!res.ok) throw new Error(`Failed to fetch ${path}: ${res.status}`);
62
+
return res.json();
63
+
}
64
+
65
+
async function fetchText(path) {
66
+
const res = await fetch(withBuild(path), { cache: "no-store" });
67
+
if (!res.ok) throw new Error(`Failed to fetch ${path}: ${res.status}`);
68
+
return res.text();
69
+
}
70
+
71
+
function renderNav(pages, activePath) {
72
+
if (!pages.length) {
73
+
navEl.innerHTML = "";
74
+
return;
75
+
}
76
+
77
+
navEl.innerHTML = pages
78
+
.map((p) => {
79
+
const path = normalizeDocPath(p.path);
80
+
const title = escapeHtml(p.title || path);
81
+
const current = activePath === path ? ` aria-current="page"` : "";
82
+
return `<a href="#${encodeURIComponent(path)}"${current}>${title}</a>`;
83
+
})
84
+
.join("");
85
+
}
86
+
87
+
function installContentLinkHandler() {
88
+
contentEl.addEventListener("click", (e) => {
89
+
const a = e.target?.closest?.("a");
90
+
if (!a) return;
91
+
92
+
const href = a.getAttribute("href") || "";
93
+
if (
94
+
href.startsWith("http://") ||
95
+
href.startsWith("https://") ||
96
+
href.startsWith("mailto:") ||
97
+
href.startsWith("#")
98
+
) {
99
+
return;
100
+
}
101
+
102
+
// Route relative markdown links through the SPA.
103
+
if (href.endsWith(".md")) {
104
+
e.preventDefault();
105
+
setSelectedPath(href);
106
+
return;
107
+
}
108
+
});
109
+
}
110
+
111
+
async function main() {
112
+
if (!globalThis.marked) {
113
+
contentEl.innerHTML = `<p class="empty">Markdown renderer failed to load.</p>`;
114
+
return;
115
+
}
116
+
117
+
installContentLinkHandler();
118
+
119
+
let manifest;
120
+
try {
121
+
manifest = await fetchJson("./manifest.json");
122
+
} catch (e) {
123
+
contentEl.innerHTML = `<p class="empty">Missing <code>manifest.json</code>. Deploy the site via CI.</p>`;
124
+
navEl.innerHTML = "";
125
+
console.error(e);
126
+
return;
127
+
}
128
+
129
+
const pages = Array.isArray(manifest.pages) ? manifest.pages : [];
130
+
const defaultPath = pages[0]?.path ? normalizeDocPath(pages[0].path) : null;
131
+
132
+
async function render() {
133
+
const activePath = getSelectedPath() || defaultPath;
134
+
renderNav(pages, activePath);
135
+
136
+
if (!activePath) {
137
+
contentEl.innerHTML = `<p class="empty">No docs yet. Add markdown files under <code>zat/docs/</code> and push to <code>main</code>.</p>`;
138
+
return;
139
+
}
140
+
141
+
try {
142
+
const md = await fetchText(`./docs/${activePath}`);
143
+
const html = globalThis.marked.parse(md);
144
+
contentEl.innerHTML = html;
145
+
146
+
// Update current marker after navigation re-render.
147
+
for (const a of navEl.querySelectorAll("a")) {
148
+
const href = decodeURIComponent((a.getAttribute("href") || "").slice(1));
149
+
a.toggleAttribute("aria-current", normalizeDocPath(href) === activePath);
150
+
}
151
+
} catch (e) {
152
+
contentEl.innerHTML = `<p class="empty">Failed to load <code>${escapeHtml(
153
+
activePath,
154
+
)}</code>.</p>`;
155
+
console.error(e);
156
+
}
157
+
}
158
+
159
+
window.addEventListener("hashchange", () => render());
160
+
161
+
if (!getSelectedPath() && defaultPath) setSelectedPath(defaultPath);
162
+
await render();
163
+
}
164
+
165
+
main();
+4
site/favicon.svg
+4
site/favicon.svg
+53
site/index.html
+53
site/index.html
···
1
+
<!doctype html>
2
+
<html lang="en">
3
+
<head>
4
+
<meta charset="utf-8" />
5
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
6
+
<title>zat.dev</title>
7
+
<meta name="description" content="zat documentation" />
8
+
<link rel="icon" href="./favicon.svg" type="image/svg+xml" />
9
+
<link rel="stylesheet" href="./style.css" />
10
+
</head>
11
+
<body>
12
+
<div class="app">
13
+
<header class="header">
14
+
<button class="menu-toggle" aria-label="Toggle navigation" aria-expanded="false">
15
+
<span></span>
16
+
</button>
17
+
<a class="brand" href="./">zat.dev</a>
18
+
<div class="header-links">
19
+
<a class="header-link" href="#devlog/index.md">devlog</a>
20
+
<a class="header-link" href="https://tangled.sh/zat.dev/zat" target="_blank" rel="noopener noreferrer">repo</a>
21
+
</div>
22
+
</header>
23
+
24
+
<div class="overlay"></div>
25
+
<div class="layout">
26
+
<nav class="sidebar">
27
+
<div id="nav" class="nav"></div>
28
+
</nav>
29
+
30
+
<main class="main">
31
+
<article id="content" class="content">
32
+
<noscript>This docs site requires JavaScript.</noscript>
33
+
</article>
34
+
</main>
35
+
</div>
36
+
37
+
<footer class="site-footer">
38
+
<a
39
+
class="footer-link"
40
+
href="https://wisp.place"
41
+
target="_blank"
42
+
rel="noopener noreferrer"
43
+
>
44
+
powered by wisp.place
45
+
</a>
46
+
</footer>
47
+
</div>
48
+
49
+
<!-- Markdown renderer (no build step). -->
50
+
<script src="./vendor/marked.min.js"></script>
51
+
<script type="module" src="./app.js"></script>
52
+
</body>
53
+
</html>
+367
site/style.css
+367
site/style.css
···
1
+
:root {
2
+
color-scheme: light dark;
3
+
--bg: #0b0b0f;
4
+
--panel: #10101a;
5
+
--text: #f3f4f6;
6
+
--muted: #a1a1aa;
7
+
--border: rgba(255, 255, 255, 0.08);
8
+
--link: #93c5fd;
9
+
--codebg: rgba(255, 255, 255, 0.06);
10
+
--shadow: rgba(0, 0, 0, 0.35);
11
+
--max: 900px;
12
+
--radius: 12px;
13
+
--gutter: 16px;
14
+
--mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono",
15
+
"Courier New", monospace;
16
+
--sans: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica,
17
+
Arial, "Apple Color Emoji", "Segoe UI Emoji";
18
+
}
19
+
20
+
@media (prefers-color-scheme: light) {
21
+
:root {
22
+
--bg: #fafafa;
23
+
--panel: #ffffff;
24
+
--text: #0b0b0f;
25
+
--muted: #52525b;
26
+
--border: rgba(0, 0, 0, 0.08);
27
+
--link: #2563eb;
28
+
--codebg: rgba(0, 0, 0, 0.04);
29
+
--shadow: rgba(0, 0, 0, 0.08);
30
+
}
31
+
}
32
+
33
+
* {
34
+
box-sizing: border-box;
35
+
}
36
+
37
+
html,
38
+
body {
39
+
height: 100%;
40
+
}
41
+
42
+
body {
43
+
margin: 0;
44
+
font-family: var(--sans);
45
+
background: var(--bg);
46
+
color: var(--text);
47
+
}
48
+
49
+
a {
50
+
color: var(--link);
51
+
text-decoration: none;
52
+
}
53
+
a:hover {
54
+
text-decoration: underline;
55
+
}
56
+
57
+
/* App shell */
58
+
.app {
59
+
min-height: 100%;
60
+
display: flex;
61
+
flex-direction: column;
62
+
}
63
+
64
+
/* Header */
65
+
.header {
66
+
position: sticky;
67
+
top: 0;
68
+
z-index: 20;
69
+
display: flex;
70
+
align-items: center;
71
+
gap: 12px;
72
+
padding: 12px var(--gutter);
73
+
border-bottom: 1px solid var(--border);
74
+
background: color-mix(in srgb, var(--panel) 92%, transparent);
75
+
backdrop-filter: blur(10px);
76
+
}
77
+
78
+
.menu-toggle {
79
+
display: none;
80
+
align-items: center;
81
+
justify-content: center;
82
+
width: 36px;
83
+
height: 36px;
84
+
padding: 0;
85
+
background: transparent;
86
+
border: 1px solid var(--border);
87
+
border-radius: 8px;
88
+
cursor: pointer;
89
+
flex-shrink: 0;
90
+
}
91
+
.menu-toggle span {
92
+
display: block;
93
+
width: 16px;
94
+
height: 2px;
95
+
background: var(--text);
96
+
border-radius: 1px;
97
+
position: relative;
98
+
}
99
+
.menu-toggle span::before,
100
+
.menu-toggle span::after {
101
+
content: "";
102
+
position: absolute;
103
+
left: 0;
104
+
width: 16px;
105
+
height: 2px;
106
+
background: var(--text);
107
+
border-radius: 1px;
108
+
transition: transform 0.2s;
109
+
}
110
+
.menu-toggle span::before {
111
+
top: -5px;
112
+
}
113
+
.menu-toggle span::after {
114
+
top: 5px;
115
+
}
116
+
.menu-toggle[aria-expanded="true"] span {
117
+
background: transparent;
118
+
}
119
+
.menu-toggle[aria-expanded="true"] span::before {
120
+
transform: translateY(5px) rotate(45deg);
121
+
}
122
+
.menu-toggle[aria-expanded="true"] span::after {
123
+
transform: translateY(-5px) rotate(-45deg);
124
+
}
125
+
126
+
.brand {
127
+
font-weight: 700;
128
+
font-size: 15px;
129
+
color: var(--text);
130
+
padding: 6px 0;
131
+
}
132
+
.brand:hover {
133
+
text-decoration: none;
134
+
opacity: 0.8;
135
+
}
136
+
137
+
.header-links {
138
+
display: flex;
139
+
gap: 8px;
140
+
margin-left: auto;
141
+
}
142
+
143
+
.header-link {
144
+
padding: 6px 12px;
145
+
font-size: 14px;
146
+
border-radius: 8px;
147
+
border: 1px solid var(--border);
148
+
color: var(--text);
149
+
}
150
+
.header-link:hover {
151
+
background: var(--codebg);
152
+
text-decoration: none;
153
+
}
154
+
155
+
/* Overlay */
156
+
.overlay {
157
+
display: none;
158
+
position: fixed;
159
+
inset: 0;
160
+
z-index: 15;
161
+
background: rgba(0, 0, 0, 0.5);
162
+
}
163
+
.overlay.open {
164
+
display: block;
165
+
}
166
+
167
+
/* Layout */
168
+
.layout {
169
+
display: flex;
170
+
gap: var(--gutter);
171
+
padding: var(--gutter);
172
+
flex: 1;
173
+
max-width: 1200px;
174
+
margin: 0 auto;
175
+
width: 100%;
176
+
}
177
+
178
+
/* Sidebar */
179
+
.sidebar {
180
+
width: 240px;
181
+
flex-shrink: 0;
182
+
position: sticky;
183
+
top: 72px;
184
+
align-self: flex-start;
185
+
max-height: calc(100vh - 88px);
186
+
overflow-y: auto;
187
+
border: 1px solid var(--border);
188
+
border-radius: var(--radius);
189
+
background: var(--panel);
190
+
}
191
+
192
+
.nav {
193
+
padding: 8px;
194
+
display: flex;
195
+
flex-direction: column;
196
+
gap: 2px;
197
+
}
198
+
199
+
.nav a {
200
+
display: block;
201
+
padding: 10px 12px;
202
+
border-radius: 8px;
203
+
color: var(--text);
204
+
font-size: 14px;
205
+
}
206
+
.nav a:hover {
207
+
background: var(--codebg);
208
+
text-decoration: none;
209
+
}
210
+
.nav a[aria-current="page"] {
211
+
background: color-mix(in srgb, var(--link) 15%, transparent);
212
+
}
213
+
214
+
/* Main content */
215
+
.main {
216
+
flex: 1;
217
+
min-width: 0;
218
+
}
219
+
220
+
.content {
221
+
border: 1px solid var(--border);
222
+
border-radius: var(--radius);
223
+
background: var(--panel);
224
+
padding: 24px;
225
+
}
226
+
227
+
/* Footer */
228
+
.site-footer {
229
+
padding: 16px var(--gutter);
230
+
text-align: center;
231
+
border-top: 1px solid var(--border);
232
+
}
233
+
234
+
.footer-link {
235
+
font-size: 13px;
236
+
color: var(--muted);
237
+
}
238
+
.footer-link:hover {
239
+
color: var(--text);
240
+
text-decoration: none;
241
+
}
242
+
243
+
/* Content typography */
244
+
.content h1,
245
+
.content h2,
246
+
.content h3 {
247
+
scroll-margin-top: 80px;
248
+
}
249
+
250
+
.content h1 {
251
+
margin-top: 0;
252
+
font-size: 28px;
253
+
}
254
+
255
+
.content h2 {
256
+
font-size: 20px;
257
+
margin-top: 32px;
258
+
}
259
+
260
+
.content h3 {
261
+
font-size: 16px;
262
+
margin-top: 24px;
263
+
}
264
+
265
+
.content p,
266
+
.content li {
267
+
line-height: 1.65;
268
+
}
269
+
270
+
.content code {
271
+
font-family: var(--mono);
272
+
font-size: 0.9em;
273
+
background: var(--codebg);
274
+
padding: 2px 6px;
275
+
border-radius: 6px;
276
+
}
277
+
278
+
.content pre {
279
+
overflow-x: auto;
280
+
padding: 16px;
281
+
border-radius: 10px;
282
+
background: var(--codebg);
283
+
border: 1px solid var(--border);
284
+
font-size: 14px;
285
+
line-height: 1.5;
286
+
}
287
+
288
+
.content pre code {
289
+
background: transparent;
290
+
padding: 0;
291
+
}
292
+
293
+
.content details {
294
+
margin: 16px 0;
295
+
}
296
+
297
+
.content details summary {
298
+
cursor: pointer;
299
+
padding: 8px 0;
300
+
}
301
+
302
+
.empty {
303
+
color: var(--muted);
304
+
}
305
+
306
+
/* Mobile */
307
+
@media (max-width: 768px) {
308
+
:root {
309
+
--gutter: 16px;
310
+
}
311
+
312
+
.menu-toggle {
313
+
display: flex;
314
+
}
315
+
316
+
.layout {
317
+
flex-direction: column;
318
+
}
319
+
320
+
.sidebar {
321
+
position: fixed;
322
+
top: 0;
323
+
left: 0;
324
+
bottom: 0;
325
+
width: 280px;
326
+
max-width: 80vw;
327
+
z-index: 16;
328
+
border: none;
329
+
border-radius: 0;
330
+
border-right: 1px solid var(--border);
331
+
max-height: none;
332
+
padding-top: 60px;
333
+
transform: translateX(-100%);
334
+
transition: transform 0.2s ease-out;
335
+
}
336
+
337
+
.sidebar.open {
338
+
transform: translateX(0);
339
+
}
340
+
341
+
.nav {
342
+
padding: 12px;
343
+
}
344
+
345
+
.nav a {
346
+
padding: 12px 14px;
347
+
font-size: 15px;
348
+
}
349
+
350
+
.content {
351
+
padding: 20px;
352
+
border-radius: 10px;
353
+
}
354
+
355
+
.content h1 {
356
+
font-size: 24px;
357
+
}
358
+
359
+
.content h2 {
360
+
font-size: 18px;
361
+
}
362
+
363
+
.content pre {
364
+
font-size: 13px;
365
+
padding: 14px;
366
+
}
367
+
}
+69
site/vendor/marked.min.js
+69
site/vendor/marked.min.js
···
1
+
/**
2
+
* marked v15.0.12 - a markdown parser
3
+
* Copyright (c) 2011-2025, Christopher Jeffrey. (MIT Licensed)
4
+
* https://github.com/markedjs/marked
5
+
*/
6
+
7
+
/**
8
+
* DO NOT EDIT THIS FILE
9
+
* The code in this file is generated from files in ./src/
10
+
*/
11
+
(function(g,f){if(typeof exports=="object"&&typeof module<"u"){module.exports=f()}else if("function"==typeof define && define.amd){define("marked",f)}else {g["marked"]=f()}}(typeof globalThis < "u" ? globalThis : typeof self < "u" ? self : this,function(){var exports={};var __exports=exports;var module={exports};
12
+
"use strict";var H=Object.defineProperty;var be=Object.getOwnPropertyDescriptor;var Te=Object.getOwnPropertyNames;var we=Object.prototype.hasOwnProperty;var ye=(l,e)=>{for(var t in e)H(l,t,{get:e[t],enumerable:!0})},Re=(l,e,t,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of Te(e))!we.call(l,s)&&s!==t&&H(l,s,{get:()=>e[s],enumerable:!(n=be(e,s))||n.enumerable});return l};var Se=l=>Re(H({},"__esModule",{value:!0}),l);var kt={};ye(kt,{Hooks:()=>L,Lexer:()=>x,Marked:()=>E,Parser:()=>b,Renderer:()=>$,TextRenderer:()=>_,Tokenizer:()=>S,defaults:()=>w,getDefaults:()=>z,lexer:()=>ht,marked:()=>k,options:()=>it,parse:()=>pt,parseInline:()=>ct,parser:()=>ut,setOptions:()=>ot,use:()=>lt,walkTokens:()=>at});module.exports=Se(kt);function z(){return{async:!1,breaks:!1,extensions:null,gfm:!0,hooks:null,pedantic:!1,renderer:null,silent:!1,tokenizer:null,walkTokens:null}}var w=z();function N(l){w=l}var I={exec:()=>null};function h(l,e=""){let t=typeof l=="string"?l:l.source,n={replace:(s,i)=>{let r=typeof i=="string"?i:i.source;return r=r.replace(m.caret,"$1"),t=t.replace(s,r),n},getRegex:()=>new RegExp(t,e)};return n}var m={codeRemoveIndent:/^(?: {1,4}| {0,3}\t)/gm,outputLinkReplace:/\\([\[\]])/g,indentCodeCompensation:/^(\s+)(?:```)/,beginningSpace:/^\s+/,endingHash:/#$/,startingSpaceChar:/^ /,endingSpaceChar:/ $/,nonSpaceChar:/[^ ]/,newLineCharGlobal:/\n/g,tabCharGlobal:/\t/g,multipleSpaceGlobal:/\s+/g,blankLine:/^[ \t]*$/,doubleBlankLine:/\n[ \t]*\n[ \t]*$/,blockquoteStart:/^ {0,3}>/,blockquoteSetextReplace:/\n {0,3}((?:=+|-+) *)(?=\n|$)/g,blockquoteSetextReplace2:/^ {0,3}>[ \t]?/gm,listReplaceTabs:/^\t+/,listReplaceNesting:/^ {1,4}(?=( {4})*[^ ])/g,listIsTask:/^\[[ xX]\] /,listReplaceTask:/^\[[ xX]\] +/,anyLine:/\n.*\n/,hrefBrackets:/^<(.*)>$/,tableDelimiter:/[:|]/,tableAlignChars:/^\||\| *$/g,tableRowBlankLine:/\n[ \t]*$/,tableAlignRight:/^ *-+: *$/,tableAlignCenter:/^ *:-+: *$/,tableAlignLeft:/^ *:-+ *$/,startATag:/^<a /i,endATag:/^<\/a>/i,startPreScriptTag:/^<(pre|code|kbd|script)(\s|>)/i,endPreScriptTag:/^<\/(pre|code|kbd|script)(\s|>)/i,startAngleBracket:/^</,endAngleBracket:/>$/,pedanticHrefTitle:/^([^'"]*[^\s])\s+(['"])(.*)\2/,unicodeAlphaNumeric:/[\p{L}\p{N}]/u,escapeTest:/[&<>"']/,escapeReplace:/[&<>"']/g,escapeTestNoEncode:/[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/,escapeReplaceNoEncode:/[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/g,unescapeTest:/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig,caret:/(^|[^\[])\^/g,percentDecode:/%25/g,findPipe:/\|/g,splitPipe:/ \|/,slashPipe:/\\\|/g,carriageReturn:/\r\n|\r/g,spaceLine:/^ +$/gm,notSpaceStart:/^\S*/,endingNewline:/\n$/,listItemRegex:l=>new RegExp(`^( {0,3}${l})((?:[ ][^\\n]*)?(?:\\n|$))`),nextBulletRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}(?:[*+-]|\\d{1,9}[.)])((?:[ ][^\\n]*)?(?:\\n|$))`),hrRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)`),fencesBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}(?:\`\`\`|~~~)`),headingBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}#`),htmlBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}<(?:[a-z].*>|!--)`,"i")},$e=/^(?:[ \t]*(?:\n|$))+/,_e=/^((?: {4}| {0,3}\t)[^\n]+(?:\n(?:[ \t]*(?:\n|$))*)?)+/,Le=/^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/,O=/^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/,ze=/^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/,F=/(?:[*+-]|\d{1,9}[.)])/,ie=/^(?!bull |blockCode|fences|blockquote|heading|html|table)((?:.|\n(?!\s*?\n|bull |blockCode|fences|blockquote|heading|html|table))+?)\n {0,3}(=+|-+) *(?:\n+|$)/,oe=h(ie).replace(/bull/g,F).replace(/blockCode/g,/(?: {4}| {0,3}\t)/).replace(/fences/g,/ {0,3}(?:`{3,}|~{3,})/).replace(/blockquote/g,/ {0,3}>/).replace(/heading/g,/ {0,3}#{1,6}/).replace(/html/g,/ {0,3}<[^\n>]+>\n/).replace(/\|table/g,"").getRegex(),Me=h(ie).replace(/bull/g,F).replace(/blockCode/g,/(?: {4}| {0,3}\t)/).replace(/fences/g,/ {0,3}(?:`{3,}|~{3,})/).replace(/blockquote/g,/ {0,3}>/).replace(/heading/g,/ {0,3}#{1,6}/).replace(/html/g,/ {0,3}<[^\n>]+>\n/).replace(/table/g,/ {0,3}\|?(?:[:\- ]*\|)+[\:\- ]*\n/).getRegex(),Q=/^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/,Pe=/^[^\n]+/,U=/(?!\s*\])(?:\\.|[^\[\]\\])+/,Ae=h(/^ {0,3}\[(label)\]: *(?:\n[ \t]*)?([^<\s][^\s]*|<.*?>)(?:(?: +(?:\n[ \t]*)?| *\n[ \t]*)(title))? *(?:\n+|$)/).replace("label",U).replace("title",/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/).getRegex(),Ee=h(/^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/).replace(/bull/g,F).getRegex(),v="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|search|section|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",K=/<!--(?:-?>|[\s\S]*?(?:-->|$))/,Ce=h("^ {0,3}(?:<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:</\\1>[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?(?:\\?>\\n*|$)|<![A-Z][\\s\\S]*?(?:>\\n*|$)|<!\\[CDATA\\[[\\s\\S]*?(?:\\]\\]>\\n*|$)|</?(tag)(?: +|\\n|/?>)[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$)|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$)|</(?!script|pre|style|textarea)[a-z][\\w-]*\\s*>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$))","i").replace("comment",K).replace("tag",v).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),le=h(Q).replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("|table","").replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html","</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex(),Ie=h(/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/).replace("paragraph",le).getRegex(),X={blockquote:Ie,code:_e,def:Ae,fences:Le,heading:ze,hr:O,html:Ce,lheading:oe,list:Ee,newline:$e,paragraph:le,table:I,text:Pe},re=h("^ *([^\\n ].*)\\n {0,3}((?:\\| *)?:?-+:? *(?:\\| *:?-+:? *)*(?:\\| *)?)(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)").replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("blockquote"," {0,3}>").replace("code","(?: {4}| {0,3} )[^\\n]").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html","</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex(),Oe={...X,lheading:Me,table:re,paragraph:h(Q).replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("table",re).replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html","</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex()},Be={...X,html:h(`^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)|<tag(?:"[^"]*"|'[^']*'|\\s[^'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))`).replace("comment",K).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/,heading:/^(#{1,6})(.*)(?:\n+|$)/,fences:I,lheading:/^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/,paragraph:h(Q).replace("hr",O).replace("heading",` *#{1,6} *[^
13
+
]`).replace("lheading",oe).replace("|table","").replace("blockquote"," {0,3}>").replace("|fences","").replace("|list","").replace("|html","").replace("|tag","").getRegex()},qe=/^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,ve=/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,ae=/^( {2,}|\\)\n(?!\s*$)/,De=/^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\<!\[`*_]|\b_|$)|[^ ](?= {2,}\n)))/,D=/[\p{P}\p{S}]/u,W=/[\s\p{P}\p{S}]/u,ce=/[^\s\p{P}\p{S}]/u,Ze=h(/^((?![*_])punctSpace)/,"u").replace(/punctSpace/g,W).getRegex(),pe=/(?!~)[\p{P}\p{S}]/u,Ge=/(?!~)[\s\p{P}\p{S}]/u,He=/(?:[^\s\p{P}\p{S}]|~)/u,Ne=/\[[^[\]]*?\]\((?:\\.|[^\\\(\)]|\((?:\\.|[^\\\(\)])*\))*\)|`[^`]*?`|<[^<>]*?>/g,ue=/^(?:\*+(?:((?!\*)punct)|[^\s*]))|^_+(?:((?!_)punct)|([^\s_]))/,je=h(ue,"u").replace(/punct/g,D).getRegex(),Fe=h(ue,"u").replace(/punct/g,pe).getRegex(),he="^[^_*]*?__[^_*]*?\\*[^_*]*?(?=__)|[^*]+(?=[^*])|(?!\\*)punct(\\*+)(?=[\\s]|$)|notPunctSpace(\\*+)(?!\\*)(?=punctSpace|$)|(?!\\*)punctSpace(\\*+)(?=notPunctSpace)|[\\s](\\*+)(?!\\*)(?=punct)|(?!\\*)punct(\\*+)(?!\\*)(?=punct)|notPunctSpace(\\*+)(?=notPunctSpace)",Qe=h(he,"gu").replace(/notPunctSpace/g,ce).replace(/punctSpace/g,W).replace(/punct/g,D).getRegex(),Ue=h(he,"gu").replace(/notPunctSpace/g,He).replace(/punctSpace/g,Ge).replace(/punct/g,pe).getRegex(),Ke=h("^[^_*]*?\\*\\*[^_*]*?_[^_*]*?(?=\\*\\*)|[^_]+(?=[^_])|(?!_)punct(_+)(?=[\\s]|$)|notPunctSpace(_+)(?!_)(?=punctSpace|$)|(?!_)punctSpace(_+)(?=notPunctSpace)|[\\s](_+)(?!_)(?=punct)|(?!_)punct(_+)(?!_)(?=punct)","gu").replace(/notPunctSpace/g,ce).replace(/punctSpace/g,W).replace(/punct/g,D).getRegex(),Xe=h(/\\(punct)/,"gu").replace(/punct/g,D).getRegex(),We=h(/^<(scheme:[^\s\x00-\x1f<>]*|email)>/).replace("scheme",/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/).replace("email",/[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/).getRegex(),Je=h(K).replace("(?:-->|$)","-->").getRegex(),Ve=h("^comment|^</[a-zA-Z][\\w:-]*\\s*>|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^<![a-zA-Z]+\\s[\\s\\S]*?>|^<!\\[CDATA\\[[\\s\\S]*?\\]\\]>").replace("comment",Je).replace("attribute",/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/).getRegex(),q=/(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/,Ye=h(/^!?\[(label)\]\(\s*(href)(?:(?:[ \t]*(?:\n[ \t]*)?)(title))?\s*\)/).replace("label",q).replace("href",/<(?:\\.|[^\n<>\\])+>|[^ \t\n\x00-\x1f]*/).replace("title",/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/).getRegex(),ke=h(/^!?\[(label)\]\[(ref)\]/).replace("label",q).replace("ref",U).getRegex(),ge=h(/^!?\[(ref)\](?:\[\])?/).replace("ref",U).getRegex(),et=h("reflink|nolink(?!\\()","g").replace("reflink",ke).replace("nolink",ge).getRegex(),J={_backpedal:I,anyPunctuation:Xe,autolink:We,blockSkip:Ne,br:ae,code:ve,del:I,emStrongLDelim:je,emStrongRDelimAst:Qe,emStrongRDelimUnd:Ke,escape:qe,link:Ye,nolink:ge,punctuation:Ze,reflink:ke,reflinkSearch:et,tag:Ve,text:De,url:I},tt={...J,link:h(/^!?\[(label)\]\((.*?)\)/).replace("label",q).getRegex(),reflink:h(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace("label",q).getRegex()},j={...J,emStrongRDelimAst:Ue,emStrongLDelim:Fe,url:h(/^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,"i").replace("email",/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/).getRegex(),_backpedal:/(?:[^?!.,:;*_'"~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_'"~)]+(?!$))+/,del:/^(~~?)(?=[^\s~])((?:\\.|[^\\])*?(?:\\.|[^\s~\\]))\1(?=[^~]|$)/,text:/^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/},nt={...j,br:h(ae).replace("{2,}","*").getRegex(),text:h(j.text).replace("\\b_","\\b_| {2,}\\n").replace(/\{2,\}/g,"*").getRegex()},B={normal:X,gfm:Oe,pedantic:Be},P={normal:J,gfm:j,breaks:nt,pedantic:tt};var st={"&":"&","<":"<",">":">",'"':""","'":"'"},fe=l=>st[l];function R(l,e){if(e){if(m.escapeTest.test(l))return l.replace(m.escapeReplace,fe)}else if(m.escapeTestNoEncode.test(l))return l.replace(m.escapeReplaceNoEncode,fe);return l}function V(l){try{l=encodeURI(l).replace(m.percentDecode,"%")}catch{return null}return l}function Y(l,e){let t=l.replace(m.findPipe,(i,r,o)=>{let a=!1,c=r;for(;--c>=0&&o[c]==="\\";)a=!a;return a?"|":" |"}),n=t.split(m.splitPipe),s=0;if(n[0].trim()||n.shift(),n.length>0&&!n.at(-1)?.trim()&&n.pop(),e)if(n.length>e)n.splice(e);else for(;n.length<e;)n.push("");for(;s<n.length;s++)n[s]=n[s].trim().replace(m.slashPipe,"|");return n}function A(l,e,t){let n=l.length;if(n===0)return"";let s=0;for(;s<n;){let i=l.charAt(n-s-1);if(i===e&&!t)s++;else if(i!==e&&t)s++;else break}return l.slice(0,n-s)}function de(l,e){if(l.indexOf(e[1])===-1)return-1;let t=0;for(let n=0;n<l.length;n++)if(l[n]==="\\")n++;else if(l[n]===e[0])t++;else if(l[n]===e[1]&&(t--,t<0))return n;return t>0?-2:-1}function me(l,e,t,n,s){let i=e.href,r=e.title||null,o=l[1].replace(s.other.outputLinkReplace,"$1");n.state.inLink=!0;let a={type:l[0].charAt(0)==="!"?"image":"link",raw:t,href:i,title:r,text:o,tokens:n.inlineTokens(o)};return n.state.inLink=!1,a}function rt(l,e,t){let n=l.match(t.other.indentCodeCompensation);if(n===null)return e;let s=n[1];return e.split(`
14
+
`).map(i=>{let r=i.match(t.other.beginningSpace);if(r===null)return i;let[o]=r;return o.length>=s.length?i.slice(s.length):i}).join(`
15
+
`)}var S=class{options;rules;lexer;constructor(e){this.options=e||w}space(e){let t=this.rules.block.newline.exec(e);if(t&&t[0].length>0)return{type:"space",raw:t[0]}}code(e){let t=this.rules.block.code.exec(e);if(t){let n=t[0].replace(this.rules.other.codeRemoveIndent,"");return{type:"code",raw:t[0],codeBlockStyle:"indented",text:this.options.pedantic?n:A(n,`
16
+
`)}}}fences(e){let t=this.rules.block.fences.exec(e);if(t){let n=t[0],s=rt(n,t[3]||"",this.rules);return{type:"code",raw:n,lang:t[2]?t[2].trim().replace(this.rules.inline.anyPunctuation,"$1"):t[2],text:s}}}heading(e){let t=this.rules.block.heading.exec(e);if(t){let n=t[2].trim();if(this.rules.other.endingHash.test(n)){let s=A(n,"#");(this.options.pedantic||!s||this.rules.other.endingSpaceChar.test(s))&&(n=s.trim())}return{type:"heading",raw:t[0],depth:t[1].length,text:n,tokens:this.lexer.inline(n)}}}hr(e){let t=this.rules.block.hr.exec(e);if(t)return{type:"hr",raw:A(t[0],`
17
+
`)}}blockquote(e){let t=this.rules.block.blockquote.exec(e);if(t){let n=A(t[0],`
18
+
`).split(`
19
+
`),s="",i="",r=[];for(;n.length>0;){let o=!1,a=[],c;for(c=0;c<n.length;c++)if(this.rules.other.blockquoteStart.test(n[c]))a.push(n[c]),o=!0;else if(!o)a.push(n[c]);else break;n=n.slice(c);let p=a.join(`
20
+
`),u=p.replace(this.rules.other.blockquoteSetextReplace,`
21
+
$1`).replace(this.rules.other.blockquoteSetextReplace2,"");s=s?`${s}
22
+
${p}`:p,i=i?`${i}
23
+
${u}`:u;let d=this.lexer.state.top;if(this.lexer.state.top=!0,this.lexer.blockTokens(u,r,!0),this.lexer.state.top=d,n.length===0)break;let g=r.at(-1);if(g?.type==="code")break;if(g?.type==="blockquote"){let T=g,f=T.raw+`
24
+
`+n.join(`
25
+
`),y=this.blockquote(f);r[r.length-1]=y,s=s.substring(0,s.length-T.raw.length)+y.raw,i=i.substring(0,i.length-T.text.length)+y.text;break}else if(g?.type==="list"){let T=g,f=T.raw+`
26
+
`+n.join(`
27
+
`),y=this.list(f);r[r.length-1]=y,s=s.substring(0,s.length-g.raw.length)+y.raw,i=i.substring(0,i.length-T.raw.length)+y.raw,n=f.substring(r.at(-1).raw.length).split(`
28
+
`);continue}}return{type:"blockquote",raw:s,tokens:r,text:i}}}list(e){let t=this.rules.block.list.exec(e);if(t){let n=t[1].trim(),s=n.length>1,i={type:"list",raw:"",ordered:s,start:s?+n.slice(0,-1):"",loose:!1,items:[]};n=s?`\\d{1,9}\\${n.slice(-1)}`:`\\${n}`,this.options.pedantic&&(n=s?n:"[*+-]");let r=this.rules.other.listItemRegex(n),o=!1;for(;e;){let c=!1,p="",u="";if(!(t=r.exec(e))||this.rules.block.hr.test(e))break;p=t[0],e=e.substring(p.length);let d=t[2].split(`
29
+
`,1)[0].replace(this.rules.other.listReplaceTabs,Z=>" ".repeat(3*Z.length)),g=e.split(`
30
+
`,1)[0],T=!d.trim(),f=0;if(this.options.pedantic?(f=2,u=d.trimStart()):T?f=t[1].length+1:(f=t[2].search(this.rules.other.nonSpaceChar),f=f>4?1:f,u=d.slice(f),f+=t[1].length),T&&this.rules.other.blankLine.test(g)&&(p+=g+`
31
+
`,e=e.substring(g.length+1),c=!0),!c){let Z=this.rules.other.nextBulletRegex(f),te=this.rules.other.hrRegex(f),ne=this.rules.other.fencesBeginRegex(f),se=this.rules.other.headingBeginRegex(f),xe=this.rules.other.htmlBeginRegex(f);for(;e;){let G=e.split(`
32
+
`,1)[0],C;if(g=G,this.options.pedantic?(g=g.replace(this.rules.other.listReplaceNesting," "),C=g):C=g.replace(this.rules.other.tabCharGlobal," "),ne.test(g)||se.test(g)||xe.test(g)||Z.test(g)||te.test(g))break;if(C.search(this.rules.other.nonSpaceChar)>=f||!g.trim())u+=`
33
+
`+C.slice(f);else{if(T||d.replace(this.rules.other.tabCharGlobal," ").search(this.rules.other.nonSpaceChar)>=4||ne.test(d)||se.test(d)||te.test(d))break;u+=`
34
+
`+g}!T&&!g.trim()&&(T=!0),p+=G+`
35
+
`,e=e.substring(G.length+1),d=C.slice(f)}}i.loose||(o?i.loose=!0:this.rules.other.doubleBlankLine.test(p)&&(o=!0));let y=null,ee;this.options.gfm&&(y=this.rules.other.listIsTask.exec(u),y&&(ee=y[0]!=="[ ] ",u=u.replace(this.rules.other.listReplaceTask,""))),i.items.push({type:"list_item",raw:p,task:!!y,checked:ee,loose:!1,text:u,tokens:[]}),i.raw+=p}let a=i.items.at(-1);if(a)a.raw=a.raw.trimEnd(),a.text=a.text.trimEnd();else return;i.raw=i.raw.trimEnd();for(let c=0;c<i.items.length;c++)if(this.lexer.state.top=!1,i.items[c].tokens=this.lexer.blockTokens(i.items[c].text,[]),!i.loose){let p=i.items[c].tokens.filter(d=>d.type==="space"),u=p.length>0&&p.some(d=>this.rules.other.anyLine.test(d.raw));i.loose=u}if(i.loose)for(let c=0;c<i.items.length;c++)i.items[c].loose=!0;return i}}html(e){let t=this.rules.block.html.exec(e);if(t)return{type:"html",block:!0,raw:t[0],pre:t[1]==="pre"||t[1]==="script"||t[1]==="style",text:t[0]}}def(e){let t=this.rules.block.def.exec(e);if(t){let n=t[1].toLowerCase().replace(this.rules.other.multipleSpaceGlobal," "),s=t[2]?t[2].replace(this.rules.other.hrefBrackets,"$1").replace(this.rules.inline.anyPunctuation,"$1"):"",i=t[3]?t[3].substring(1,t[3].length-1).replace(this.rules.inline.anyPunctuation,"$1"):t[3];return{type:"def",tag:n,raw:t[0],href:s,title:i}}}table(e){let t=this.rules.block.table.exec(e);if(!t||!this.rules.other.tableDelimiter.test(t[2]))return;let n=Y(t[1]),s=t[2].replace(this.rules.other.tableAlignChars,"").split("|"),i=t[3]?.trim()?t[3].replace(this.rules.other.tableRowBlankLine,"").split(`
36
+
`):[],r={type:"table",raw:t[0],header:[],align:[],rows:[]};if(n.length===s.length){for(let o of s)this.rules.other.tableAlignRight.test(o)?r.align.push("right"):this.rules.other.tableAlignCenter.test(o)?r.align.push("center"):this.rules.other.tableAlignLeft.test(o)?r.align.push("left"):r.align.push(null);for(let o=0;o<n.length;o++)r.header.push({text:n[o],tokens:this.lexer.inline(n[o]),header:!0,align:r.align[o]});for(let o of i)r.rows.push(Y(o,r.header.length).map((a,c)=>({text:a,tokens:this.lexer.inline(a),header:!1,align:r.align[c]})));return r}}lheading(e){let t=this.rules.block.lheading.exec(e);if(t)return{type:"heading",raw:t[0],depth:t[2].charAt(0)==="="?1:2,text:t[1],tokens:this.lexer.inline(t[1])}}paragraph(e){let t=this.rules.block.paragraph.exec(e);if(t){let n=t[1].charAt(t[1].length-1)===`
37
+
`?t[1].slice(0,-1):t[1];return{type:"paragraph",raw:t[0],text:n,tokens:this.lexer.inline(n)}}}text(e){let t=this.rules.block.text.exec(e);if(t)return{type:"text",raw:t[0],text:t[0],tokens:this.lexer.inline(t[0])}}escape(e){let t=this.rules.inline.escape.exec(e);if(t)return{type:"escape",raw:t[0],text:t[1]}}tag(e){let t=this.rules.inline.tag.exec(e);if(t)return!this.lexer.state.inLink&&this.rules.other.startATag.test(t[0])?this.lexer.state.inLink=!0:this.lexer.state.inLink&&this.rules.other.endATag.test(t[0])&&(this.lexer.state.inLink=!1),!this.lexer.state.inRawBlock&&this.rules.other.startPreScriptTag.test(t[0])?this.lexer.state.inRawBlock=!0:this.lexer.state.inRawBlock&&this.rules.other.endPreScriptTag.test(t[0])&&(this.lexer.state.inRawBlock=!1),{type:"html",raw:t[0],inLink:this.lexer.state.inLink,inRawBlock:this.lexer.state.inRawBlock,block:!1,text:t[0]}}link(e){let t=this.rules.inline.link.exec(e);if(t){let n=t[2].trim();if(!this.options.pedantic&&this.rules.other.startAngleBracket.test(n)){if(!this.rules.other.endAngleBracket.test(n))return;let r=A(n.slice(0,-1),"\\");if((n.length-r.length)%2===0)return}else{let r=de(t[2],"()");if(r===-2)return;if(r>-1){let a=(t[0].indexOf("!")===0?5:4)+t[1].length+r;t[2]=t[2].substring(0,r),t[0]=t[0].substring(0,a).trim(),t[3]=""}}let s=t[2],i="";if(this.options.pedantic){let r=this.rules.other.pedanticHrefTitle.exec(s);r&&(s=r[1],i=r[3])}else i=t[3]?t[3].slice(1,-1):"";return s=s.trim(),this.rules.other.startAngleBracket.test(s)&&(this.options.pedantic&&!this.rules.other.endAngleBracket.test(n)?s=s.slice(1):s=s.slice(1,-1)),me(t,{href:s&&s.replace(this.rules.inline.anyPunctuation,"$1"),title:i&&i.replace(this.rules.inline.anyPunctuation,"$1")},t[0],this.lexer,this.rules)}}reflink(e,t){let n;if((n=this.rules.inline.reflink.exec(e))||(n=this.rules.inline.nolink.exec(e))){let s=(n[2]||n[1]).replace(this.rules.other.multipleSpaceGlobal," "),i=t[s.toLowerCase()];if(!i){let r=n[0].charAt(0);return{type:"text",raw:r,text:r}}return me(n,i,n[0],this.lexer,this.rules)}}emStrong(e,t,n=""){let s=this.rules.inline.emStrongLDelim.exec(e);if(!s||s[3]&&n.match(this.rules.other.unicodeAlphaNumeric))return;if(!(s[1]||s[2]||"")||!n||this.rules.inline.punctuation.exec(n)){let r=[...s[0]].length-1,o,a,c=r,p=0,u=s[0][0]==="*"?this.rules.inline.emStrongRDelimAst:this.rules.inline.emStrongRDelimUnd;for(u.lastIndex=0,t=t.slice(-1*e.length+r);(s=u.exec(t))!=null;){if(o=s[1]||s[2]||s[3]||s[4]||s[5]||s[6],!o)continue;if(a=[...o].length,s[3]||s[4]){c+=a;continue}else if((s[5]||s[6])&&r%3&&!((r+a)%3)){p+=a;continue}if(c-=a,c>0)continue;a=Math.min(a,a+c+p);let d=[...s[0]][0].length,g=e.slice(0,r+s.index+d+a);if(Math.min(r,a)%2){let f=g.slice(1,-1);return{type:"em",raw:g,text:f,tokens:this.lexer.inlineTokens(f)}}let T=g.slice(2,-2);return{type:"strong",raw:g,text:T,tokens:this.lexer.inlineTokens(T)}}}}codespan(e){let t=this.rules.inline.code.exec(e);if(t){let n=t[2].replace(this.rules.other.newLineCharGlobal," "),s=this.rules.other.nonSpaceChar.test(n),i=this.rules.other.startingSpaceChar.test(n)&&this.rules.other.endingSpaceChar.test(n);return s&&i&&(n=n.substring(1,n.length-1)),{type:"codespan",raw:t[0],text:n}}}br(e){let t=this.rules.inline.br.exec(e);if(t)return{type:"br",raw:t[0]}}del(e){let t=this.rules.inline.del.exec(e);if(t)return{type:"del",raw:t[0],text:t[2],tokens:this.lexer.inlineTokens(t[2])}}autolink(e){let t=this.rules.inline.autolink.exec(e);if(t){let n,s;return t[2]==="@"?(n=t[1],s="mailto:"+n):(n=t[1],s=n),{type:"link",raw:t[0],text:n,href:s,tokens:[{type:"text",raw:n,text:n}]}}}url(e){let t;if(t=this.rules.inline.url.exec(e)){let n,s;if(t[2]==="@")n=t[0],s="mailto:"+n;else{let i;do i=t[0],t[0]=this.rules.inline._backpedal.exec(t[0])?.[0]??"";while(i!==t[0]);n=t[0],t[1]==="www."?s="http://"+t[0]:s=t[0]}return{type:"link",raw:t[0],text:n,href:s,tokens:[{type:"text",raw:n,text:n}]}}}inlineText(e){let t=this.rules.inline.text.exec(e);if(t){let n=this.lexer.state.inRawBlock;return{type:"text",raw:t[0],text:t[0],escaped:n}}}};var x=class l{tokens;options;state;tokenizer;inlineQueue;constructor(e){this.tokens=[],this.tokens.links=Object.create(null),this.options=e||w,this.options.tokenizer=this.options.tokenizer||new S,this.tokenizer=this.options.tokenizer,this.tokenizer.options=this.options,this.tokenizer.lexer=this,this.inlineQueue=[],this.state={inLink:!1,inRawBlock:!1,top:!0};let t={other:m,block:B.normal,inline:P.normal};this.options.pedantic?(t.block=B.pedantic,t.inline=P.pedantic):this.options.gfm&&(t.block=B.gfm,this.options.breaks?t.inline=P.breaks:t.inline=P.gfm),this.tokenizer.rules=t}static get rules(){return{block:B,inline:P}}static lex(e,t){return new l(t).lex(e)}static lexInline(e,t){return new l(t).inlineTokens(e)}lex(e){e=e.replace(m.carriageReturn,`
38
+
`),this.blockTokens(e,this.tokens);for(let t=0;t<this.inlineQueue.length;t++){let n=this.inlineQueue[t];this.inlineTokens(n.src,n.tokens)}return this.inlineQueue=[],this.tokens}blockTokens(e,t=[],n=!1){for(this.options.pedantic&&(e=e.replace(m.tabCharGlobal," ").replace(m.spaceLine,""));e;){let s;if(this.options.extensions?.block?.some(r=>(s=r.call({lexer:this},e,t))?(e=e.substring(s.raw.length),t.push(s),!0):!1))continue;if(s=this.tokenizer.space(e)){e=e.substring(s.raw.length);let r=t.at(-1);s.raw.length===1&&r!==void 0?r.raw+=`
39
+
`:t.push(s);continue}if(s=this.tokenizer.code(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="paragraph"||r?.type==="text"?(r.raw+=`
40
+
`+s.raw,r.text+=`
41
+
`+s.text,this.inlineQueue.at(-1).src=r.text):t.push(s);continue}if(s=this.tokenizer.fences(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.heading(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.hr(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.blockquote(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.list(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.html(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.def(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="paragraph"||r?.type==="text"?(r.raw+=`
42
+
`+s.raw,r.text+=`
43
+
`+s.raw,this.inlineQueue.at(-1).src=r.text):this.tokens.links[s.tag]||(this.tokens.links[s.tag]={href:s.href,title:s.title});continue}if(s=this.tokenizer.table(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.lheading(e)){e=e.substring(s.raw.length),t.push(s);continue}let i=e;if(this.options.extensions?.startBlock){let r=1/0,o=e.slice(1),a;this.options.extensions.startBlock.forEach(c=>{a=c.call({lexer:this},o),typeof a=="number"&&a>=0&&(r=Math.min(r,a))}),r<1/0&&r>=0&&(i=e.substring(0,r+1))}if(this.state.top&&(s=this.tokenizer.paragraph(i))){let r=t.at(-1);n&&r?.type==="paragraph"?(r.raw+=`
44
+
`+s.raw,r.text+=`
45
+
`+s.text,this.inlineQueue.pop(),this.inlineQueue.at(-1).src=r.text):t.push(s),n=i.length!==e.length,e=e.substring(s.raw.length);continue}if(s=this.tokenizer.text(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="text"?(r.raw+=`
46
+
`+s.raw,r.text+=`
47
+
`+s.text,this.inlineQueue.pop(),this.inlineQueue.at(-1).src=r.text):t.push(s);continue}if(e){let r="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(r);break}else throw new Error(r)}}return this.state.top=!0,t}inline(e,t=[]){return this.inlineQueue.push({src:e,tokens:t}),t}inlineTokens(e,t=[]){let n=e,s=null;if(this.tokens.links){let o=Object.keys(this.tokens.links);if(o.length>0)for(;(s=this.tokenizer.rules.inline.reflinkSearch.exec(n))!=null;)o.includes(s[0].slice(s[0].lastIndexOf("[")+1,-1))&&(n=n.slice(0,s.index)+"["+"a".repeat(s[0].length-2)+"]"+n.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex))}for(;(s=this.tokenizer.rules.inline.anyPunctuation.exec(n))!=null;)n=n.slice(0,s.index)+"++"+n.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);for(;(s=this.tokenizer.rules.inline.blockSkip.exec(n))!=null;)n=n.slice(0,s.index)+"["+"a".repeat(s[0].length-2)+"]"+n.slice(this.tokenizer.rules.inline.blockSkip.lastIndex);let i=!1,r="";for(;e;){i||(r=""),i=!1;let o;if(this.options.extensions?.inline?.some(c=>(o=c.call({lexer:this},e,t))?(e=e.substring(o.raw.length),t.push(o),!0):!1))continue;if(o=this.tokenizer.escape(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.tag(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.link(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.reflink(e,this.tokens.links)){e=e.substring(o.raw.length);let c=t.at(-1);o.type==="text"&&c?.type==="text"?(c.raw+=o.raw,c.text+=o.text):t.push(o);continue}if(o=this.tokenizer.emStrong(e,n,r)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.codespan(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.br(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.del(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.autolink(e)){e=e.substring(o.raw.length),t.push(o);continue}if(!this.state.inLink&&(o=this.tokenizer.url(e))){e=e.substring(o.raw.length),t.push(o);continue}let a=e;if(this.options.extensions?.startInline){let c=1/0,p=e.slice(1),u;this.options.extensions.startInline.forEach(d=>{u=d.call({lexer:this},p),typeof u=="number"&&u>=0&&(c=Math.min(c,u))}),c<1/0&&c>=0&&(a=e.substring(0,c+1))}if(o=this.tokenizer.inlineText(a)){e=e.substring(o.raw.length),o.raw.slice(-1)!=="_"&&(r=o.raw.slice(-1)),i=!0;let c=t.at(-1);c?.type==="text"?(c.raw+=o.raw,c.text+=o.text):t.push(o);continue}if(e){let c="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(c);break}else throw new Error(c)}}return t}};var $=class{options;parser;constructor(e){this.options=e||w}space(e){return""}code({text:e,lang:t,escaped:n}){let s=(t||"").match(m.notSpaceStart)?.[0],i=e.replace(m.endingNewline,"")+`
48
+
`;return s?'<pre><code class="language-'+R(s)+'">'+(n?i:R(i,!0))+`</code></pre>
49
+
`:"<pre><code>"+(n?i:R(i,!0))+`</code></pre>
50
+
`}blockquote({tokens:e}){return`<blockquote>
51
+
${this.parser.parse(e)}</blockquote>
52
+
`}html({text:e}){return e}heading({tokens:e,depth:t}){return`<h${t}>${this.parser.parseInline(e)}</h${t}>
53
+
`}hr(e){return`<hr>
54
+
`}list(e){let t=e.ordered,n=e.start,s="";for(let o=0;o<e.items.length;o++){let a=e.items[o];s+=this.listitem(a)}let i=t?"ol":"ul",r=t&&n!==1?' start="'+n+'"':"";return"<"+i+r+`>
55
+
`+s+"</"+i+`>
56
+
`}listitem(e){let t="";if(e.task){let n=this.checkbox({checked:!!e.checked});e.loose?e.tokens[0]?.type==="paragraph"?(e.tokens[0].text=n+" "+e.tokens[0].text,e.tokens[0].tokens&&e.tokens[0].tokens.length>0&&e.tokens[0].tokens[0].type==="text"&&(e.tokens[0].tokens[0].text=n+" "+R(e.tokens[0].tokens[0].text),e.tokens[0].tokens[0].escaped=!0)):e.tokens.unshift({type:"text",raw:n+" ",text:n+" ",escaped:!0}):t+=n+" "}return t+=this.parser.parse(e.tokens,!!e.loose),`<li>${t}</li>
57
+
`}checkbox({checked:e}){return"<input "+(e?'checked="" ':"")+'disabled="" type="checkbox">'}paragraph({tokens:e}){return`<p>${this.parser.parseInline(e)}</p>
58
+
`}table(e){let t="",n="";for(let i=0;i<e.header.length;i++)n+=this.tablecell(e.header[i]);t+=this.tablerow({text:n});let s="";for(let i=0;i<e.rows.length;i++){let r=e.rows[i];n="";for(let o=0;o<r.length;o++)n+=this.tablecell(r[o]);s+=this.tablerow({text:n})}return s&&(s=`<tbody>${s}</tbody>`),`<table>
59
+
<thead>
60
+
`+t+`</thead>
61
+
`+s+`</table>
62
+
`}tablerow({text:e}){return`<tr>
63
+
${e}</tr>
64
+
`}tablecell(e){let t=this.parser.parseInline(e.tokens),n=e.header?"th":"td";return(e.align?`<${n} align="${e.align}">`:`<${n}>`)+t+`</${n}>
65
+
`}strong({tokens:e}){return`<strong>${this.parser.parseInline(e)}</strong>`}em({tokens:e}){return`<em>${this.parser.parseInline(e)}</em>`}codespan({text:e}){return`<code>${R(e,!0)}</code>`}br(e){return"<br>"}del({tokens:e}){return`<del>${this.parser.parseInline(e)}</del>`}link({href:e,title:t,tokens:n}){let s=this.parser.parseInline(n),i=V(e);if(i===null)return s;e=i;let r='<a href="'+e+'"';return t&&(r+=' title="'+R(t)+'"'),r+=">"+s+"</a>",r}image({href:e,title:t,text:n,tokens:s}){s&&(n=this.parser.parseInline(s,this.parser.textRenderer));let i=V(e);if(i===null)return R(n);e=i;let r=`<img src="${e}" alt="${n}"`;return t&&(r+=` title="${R(t)}"`),r+=">",r}text(e){return"tokens"in e&&e.tokens?this.parser.parseInline(e.tokens):"escaped"in e&&e.escaped?e.text:R(e.text)}};var _=class{strong({text:e}){return e}em({text:e}){return e}codespan({text:e}){return e}del({text:e}){return e}html({text:e}){return e}text({text:e}){return e}link({text:e}){return""+e}image({text:e}){return""+e}br(){return""}};var b=class l{options;renderer;textRenderer;constructor(e){this.options=e||w,this.options.renderer=this.options.renderer||new $,this.renderer=this.options.renderer,this.renderer.options=this.options,this.renderer.parser=this,this.textRenderer=new _}static parse(e,t){return new l(t).parse(e)}static parseInline(e,t){return new l(t).parseInline(e)}parse(e,t=!0){let n="";for(let s=0;s<e.length;s++){let i=e[s];if(this.options.extensions?.renderers?.[i.type]){let o=i,a=this.options.extensions.renderers[o.type].call({parser:this},o);if(a!==!1||!["space","hr","heading","code","table","blockquote","list","html","paragraph","text"].includes(o.type)){n+=a||"";continue}}let r=i;switch(r.type){case"space":{n+=this.renderer.space(r);continue}case"hr":{n+=this.renderer.hr(r);continue}case"heading":{n+=this.renderer.heading(r);continue}case"code":{n+=this.renderer.code(r);continue}case"table":{n+=this.renderer.table(r);continue}case"blockquote":{n+=this.renderer.blockquote(r);continue}case"list":{n+=this.renderer.list(r);continue}case"html":{n+=this.renderer.html(r);continue}case"paragraph":{n+=this.renderer.paragraph(r);continue}case"text":{let o=r,a=this.renderer.text(o);for(;s+1<e.length&&e[s+1].type==="text";)o=e[++s],a+=`
66
+
`+this.renderer.text(o);t?n+=this.renderer.paragraph({type:"paragraph",raw:a,text:a,tokens:[{type:"text",raw:a,text:a,escaped:!0}]}):n+=a;continue}default:{let o='Token with "'+r.type+'" type was not found.';if(this.options.silent)return console.error(o),"";throw new Error(o)}}}return n}parseInline(e,t=this.renderer){let n="";for(let s=0;s<e.length;s++){let i=e[s];if(this.options.extensions?.renderers?.[i.type]){let o=this.options.extensions.renderers[i.type].call({parser:this},i);if(o!==!1||!["escape","html","link","image","strong","em","codespan","br","del","text"].includes(i.type)){n+=o||"";continue}}let r=i;switch(r.type){case"escape":{n+=t.text(r);break}case"html":{n+=t.html(r);break}case"link":{n+=t.link(r);break}case"image":{n+=t.image(r);break}case"strong":{n+=t.strong(r);break}case"em":{n+=t.em(r);break}case"codespan":{n+=t.codespan(r);break}case"br":{n+=t.br(r);break}case"del":{n+=t.del(r);break}case"text":{n+=t.text(r);break}default:{let o='Token with "'+r.type+'" type was not found.';if(this.options.silent)return console.error(o),"";throw new Error(o)}}}return n}};var L=class{options;block;constructor(e){this.options=e||w}static passThroughHooks=new Set(["preprocess","postprocess","processAllTokens"]);preprocess(e){return e}postprocess(e){return e}processAllTokens(e){return e}provideLexer(){return this.block?x.lex:x.lexInline}provideParser(){return this.block?b.parse:b.parseInline}};var E=class{defaults=z();options=this.setOptions;parse=this.parseMarkdown(!0);parseInline=this.parseMarkdown(!1);Parser=b;Renderer=$;TextRenderer=_;Lexer=x;Tokenizer=S;Hooks=L;constructor(...e){this.use(...e)}walkTokens(e,t){let n=[];for(let s of e)switch(n=n.concat(t.call(this,s)),s.type){case"table":{let i=s;for(let r of i.header)n=n.concat(this.walkTokens(r.tokens,t));for(let r of i.rows)for(let o of r)n=n.concat(this.walkTokens(o.tokens,t));break}case"list":{let i=s;n=n.concat(this.walkTokens(i.items,t));break}default:{let i=s;this.defaults.extensions?.childTokens?.[i.type]?this.defaults.extensions.childTokens[i.type].forEach(r=>{let o=i[r].flat(1/0);n=n.concat(this.walkTokens(o,t))}):i.tokens&&(n=n.concat(this.walkTokens(i.tokens,t)))}}return n}use(...e){let t=this.defaults.extensions||{renderers:{},childTokens:{}};return e.forEach(n=>{let s={...n};if(s.async=this.defaults.async||s.async||!1,n.extensions&&(n.extensions.forEach(i=>{if(!i.name)throw new Error("extension name required");if("renderer"in i){let r=t.renderers[i.name];r?t.renderers[i.name]=function(...o){let a=i.renderer.apply(this,o);return a===!1&&(a=r.apply(this,o)),a}:t.renderers[i.name]=i.renderer}if("tokenizer"in i){if(!i.level||i.level!=="block"&&i.level!=="inline")throw new Error("extension level must be 'block' or 'inline'");let r=t[i.level];r?r.unshift(i.tokenizer):t[i.level]=[i.tokenizer],i.start&&(i.level==="block"?t.startBlock?t.startBlock.push(i.start):t.startBlock=[i.start]:i.level==="inline"&&(t.startInline?t.startInline.push(i.start):t.startInline=[i.start]))}"childTokens"in i&&i.childTokens&&(t.childTokens[i.name]=i.childTokens)}),s.extensions=t),n.renderer){let i=this.defaults.renderer||new $(this.defaults);for(let r in n.renderer){if(!(r in i))throw new Error(`renderer '${r}' does not exist`);if(["options","parser"].includes(r))continue;let o=r,a=n.renderer[o],c=i[o];i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u||""}}s.renderer=i}if(n.tokenizer){let i=this.defaults.tokenizer||new S(this.defaults);for(let r in n.tokenizer){if(!(r in i))throw new Error(`tokenizer '${r}' does not exist`);if(["options","rules","lexer"].includes(r))continue;let o=r,a=n.tokenizer[o],c=i[o];i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u}}s.tokenizer=i}if(n.hooks){let i=this.defaults.hooks||new L;for(let r in n.hooks){if(!(r in i))throw new Error(`hook '${r}' does not exist`);if(["options","block"].includes(r))continue;let o=r,a=n.hooks[o],c=i[o];L.passThroughHooks.has(r)?i[o]=p=>{if(this.defaults.async)return Promise.resolve(a.call(i,p)).then(d=>c.call(i,d));let u=a.call(i,p);return c.call(i,u)}:i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u}}s.hooks=i}if(n.walkTokens){let i=this.defaults.walkTokens,r=n.walkTokens;s.walkTokens=function(o){let a=[];return a.push(r.call(this,o)),i&&(a=a.concat(i.call(this,o))),a}}this.defaults={...this.defaults,...s}}),this}setOptions(e){return this.defaults={...this.defaults,...e},this}lexer(e,t){return x.lex(e,t??this.defaults)}parser(e,t){return b.parse(e,t??this.defaults)}parseMarkdown(e){return(n,s)=>{let i={...s},r={...this.defaults,...i},o=this.onError(!!r.silent,!!r.async);if(this.defaults.async===!0&&i.async===!1)return o(new Error("marked(): The async option was set to true by an extension. Remove async: false from the parse options object to return a Promise."));if(typeof n>"u"||n===null)return o(new Error("marked(): input parameter is undefined or null"));if(typeof n!="string")return o(new Error("marked(): input parameter is of type "+Object.prototype.toString.call(n)+", string expected"));r.hooks&&(r.hooks.options=r,r.hooks.block=e);let a=r.hooks?r.hooks.provideLexer():e?x.lex:x.lexInline,c=r.hooks?r.hooks.provideParser():e?b.parse:b.parseInline;if(r.async)return Promise.resolve(r.hooks?r.hooks.preprocess(n):n).then(p=>a(p,r)).then(p=>r.hooks?r.hooks.processAllTokens(p):p).then(p=>r.walkTokens?Promise.all(this.walkTokens(p,r.walkTokens)).then(()=>p):p).then(p=>c(p,r)).then(p=>r.hooks?r.hooks.postprocess(p):p).catch(o);try{r.hooks&&(n=r.hooks.preprocess(n));let p=a(n,r);r.hooks&&(p=r.hooks.processAllTokens(p)),r.walkTokens&&this.walkTokens(p,r.walkTokens);let u=c(p,r);return r.hooks&&(u=r.hooks.postprocess(u)),u}catch(p){return o(p)}}}onError(e,t){return n=>{if(n.message+=`
67
+
Please report this to https://github.com/markedjs/marked.`,e){let s="<p>An error occurred:</p><pre>"+R(n.message+"",!0)+"</pre>";return t?Promise.resolve(s):s}if(t)return Promise.reject(n);throw n}}};var M=new E;function k(l,e){return M.parse(l,e)}k.options=k.setOptions=function(l){return M.setOptions(l),k.defaults=M.defaults,N(k.defaults),k};k.getDefaults=z;k.defaults=w;k.use=function(...l){return M.use(...l),k.defaults=M.defaults,N(k.defaults),k};k.walkTokens=function(l,e){return M.walkTokens(l,e)};k.parseInline=M.parseInline;k.Parser=b;k.parser=b.parse;k.Renderer=$;k.TextRenderer=_;k.Lexer=x;k.lexer=x.lex;k.Tokenizer=S;k.Hooks=L;k.parse=k;var it=k.options,ot=k.setOptions,lt=k.use,at=k.walkTokens,ct=k.parseInline,pt=k,ut=b.parse,ht=x.lex;
68
+
69
+
if(__exports != exports)module.exports = exports;return module.exports}));
+141
-3
src/internal/handle_resolver.zig
+141
-3
src/internal/handle_resolver.zig
···
15
15
pub const HandleResolver = struct {
16
16
allocator: std.mem.Allocator,
17
17
http_client: std.http.Client,
18
+
doh_endpoint: []const u8,
18
19
19
20
pub fn init(allocator: std.mem.Allocator) HandleResolver {
20
21
return .{
21
22
.allocator = allocator,
22
23
.http_client = .{ .allocator = allocator },
24
+
.doh_endpoint = "https://cloudflare-dns.com/dns-query",
23
25
};
24
26
}
25
27
···
29
31
30
32
/// resolve a handle to a DID via HTTP well-known
31
33
pub fn resolve(self: *HandleResolver, handle: Handle) ![]const u8 {
32
-
return try self.resolveHttp(handle);
34
+
if (self.resolveHttp(handle)) |did| {
35
+
return did;
36
+
} else |_| {
37
+
return try self.resolveDns(handle);
38
+
}
33
39
}
34
40
35
41
/// resolve via HTTP at https://{handle}/.well-known/atproto-did
···
63
69
64
70
return try self.allocator.dupe(u8, did_str);
65
71
}
72
+
73
+
/// resolve via DoH default: https://cloudflare-dns.com/dns-query
74
+
pub fn resolveDns(self: *HandleResolver, handle: Handle) ![]const u8 {
75
+
const dns_name = try std.fmt.allocPrint(
76
+
self.allocator,
77
+
"_atproto.{s}",
78
+
.{handle.str()},
79
+
);
80
+
defer self.allocator.free(dns_name);
81
+
82
+
const url = try std.fmt.allocPrint(
83
+
self.allocator,
84
+
"{s}?name={s}&type=TXT",
85
+
.{ self.doh_endpoint, dns_name },
86
+
);
87
+
defer self.allocator.free(url);
88
+
89
+
var aw: std.io.Writer.Allocating = .init(self.allocator);
90
+
defer aw.deinit();
91
+
92
+
const result = self.http_client.fetch(.{
93
+
.location = .{ .url = url },
94
+
.extra_headers = &.{
95
+
.{ .name = "accept", .value = "application/dns-json" },
96
+
},
97
+
.response_writer = &aw.writer,
98
+
}) catch return error.DnsResolutionFailed;
99
+
100
+
if (result.status != .ok) {
101
+
return error.DnsResolutionFailed;
102
+
}
103
+
104
+
const response_body = aw.toArrayList().items;
105
+
const parsed = std.json.parseFromSlice(
106
+
DnsResponse,
107
+
self.allocator,
108
+
response_body,
109
+
.{},
110
+
) catch return error.InvalidDnsResponse;
111
+
defer parsed.deinit();
112
+
113
+
const dns_response = parsed.value;
114
+
if (dns_response.Answer == null or dns_response.Answer.?.len == 0) {
115
+
return error.NoDnsRecordsFound;
116
+
}
117
+
118
+
for (dns_response.Answer.?) |answer| {
119
+
const data = answer.data orelse continue;
120
+
const did_str = extractDidFromTxt(data) orelse continue;
121
+
122
+
if (Did.parse(did_str) != null) {
123
+
return try self.allocator.dupe(u8, did_str);
124
+
}
125
+
}
126
+
127
+
return error.NoValidDidFound;
128
+
}
129
+
};
130
+
131
+
fn extractDidFromTxt(txt_data: []const u8) ?[]const u8 {
132
+
var data = txt_data;
133
+
if (data.len >= 2 and data[0] == '"' and data[data.len - 1] == '"') {
134
+
data = data[1 .. data.len - 1];
135
+
}
136
+
137
+
const prefix = "did=";
138
+
if (std.mem.startsWith(u8, data, prefix)) {
139
+
return data[prefix.len..];
140
+
}
141
+
142
+
return null;
143
+
}
144
+
145
+
const DnsResponse = struct {
146
+
Status: i32,
147
+
TC: bool,
148
+
RD: bool,
149
+
RA: bool,
150
+
AD: bool,
151
+
CD: bool,
152
+
Question: ?[]Question = null,
153
+
Answer: ?[]Answer = null,
154
+
};
155
+
156
+
const Question = struct {
157
+
name: []const u8,
158
+
type: i32,
159
+
};
160
+
161
+
const Answer = struct {
162
+
name: []const u8,
163
+
type: i32,
164
+
TTL: i32,
165
+
data: ?[]const u8 = null,
66
166
};
67
167
68
168
// === integration tests ===
69
169
// these actually hit the network - run with: zig test src/internal/handle_resolver.zig
70
170
71
-
test "resolve handle - integration" {
171
+
test "resolve handle (http) - integration" {
72
172
// use arena for http client internals that may leak
73
173
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
74
174
defer arena.deinit();
···
78
178
79
179
// resolve a known handle that has .well-known/atproto-did
80
180
const handle = Handle.parse("jay.bsky.social") orelse return error.InvalidHandle;
81
-
const did = resolver.resolve(handle) catch |err| {
181
+
const did = resolver.resolveHttp(handle) catch |err| {
82
182
// network errors are ok in CI without network access
83
183
std.debug.print("network error (expected in some CI): {}\n", .{err});
84
184
return;
···
88
188
try std.testing.expect(Did.parse(did) != null);
89
189
try std.testing.expect(std.mem.startsWith(u8, did, "did:plc:"));
90
190
}
191
+
192
+
test "resolve handle (dns over http) - integration" {
193
+
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
194
+
defer arena.deinit();
195
+
196
+
var resolver = HandleResolver.init(arena.allocator());
197
+
defer resolver.deinit();
198
+
199
+
const handle = Handle.parse("seiso.moe") orelse return error.InvalidHandle;
200
+
const did = resolver.resolveDns(handle) catch |err| {
201
+
// network errors are ok in CI without network access
202
+
std.debug.print("network error (expected in some CI): {}\n", .{err});
203
+
return;
204
+
};
205
+
206
+
// should be a valid DID
207
+
try std.testing.expect(Did.parse(did) != null);
208
+
try std.testing.expect(std.mem.startsWith(u8, did, "did:"));
209
+
}
210
+
211
+
test "resolve handle - integration" {
212
+
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
213
+
defer arena.deinit();
214
+
215
+
var resolver = HandleResolver.init(arena.allocator());
216
+
defer resolver.deinit();
217
+
218
+
const handle = Handle.parse("jay.bsky.social") orelse return error.InvalidHandle;
219
+
const did = resolver.resolve(handle) catch |err| {
220
+
// network errors are ok in CI without network access
221
+
std.debug.print("network error (expected in some CI): {}\n", .{err});
222
+
return;
223
+
};
224
+
225
+
// should be a valid DID
226
+
try std.testing.expect(Did.parse(did) != null);
227
+
try std.testing.expect(std.mem.startsWith(u8, did, "did:"));
228
+
}
+107
-3
src/internal/json.zig
+107
-3
src/internal/json.zig
···
6
6
//! two approaches:
7
7
//! - runtime paths: getString(value, "embed.external.uri") - for dynamic paths
8
8
//! - comptime paths: extractAt(T, alloc, value, .{"embed", "external"}) - for static paths with type safety
9
+
//!
10
+
//! debug logging:
11
+
//! enable with `pub const std_options = .{ .log_scope_levels = &.{.{ .scope = .zat, .level = .debug }} };`
9
12
10
13
const std = @import("std");
14
+
const log = std.log.scoped(.zat);
11
15
12
16
/// navigate a json value by dot-separated path
13
17
/// returns null if any segment is missing or wrong type
···
92
96
/// extract a typed struct from a nested path
93
97
/// uses comptime tuple for path segments - no runtime string parsing
94
98
/// leverages std.json.parseFromValueLeaky for type-safe extraction
99
+
///
100
+
/// on failure, logs diagnostic info when debug logging is enabled for .zat scope
95
101
pub fn extractAt(
96
102
comptime T: type,
97
103
allocator: std.mem.Allocator,
···
101
107
var current = value;
102
108
inline for (path) |segment| {
103
109
current = switch (current) {
104
-
.object => |obj| obj.get(segment) orelse return error.MissingField,
105
-
else => return error.UnexpectedToken,
110
+
.object => |obj| obj.get(segment) orelse {
111
+
log.debug("extractAt: missing field \"{s}\" in path {any}, expected {s}", .{
112
+
segment,
113
+
path,
114
+
@typeName(T),
115
+
});
116
+
return error.MissingField;
117
+
},
118
+
else => {
119
+
log.debug("extractAt: expected object at \"{s}\" in path {any}, got {s}", .{
120
+
segment,
121
+
path,
122
+
@tagName(current),
123
+
});
124
+
return error.UnexpectedToken;
125
+
},
106
126
};
107
127
}
108
-
return std.json.parseFromValueLeaky(T, allocator, current, .{});
128
+
return std.json.parseFromValueLeaky(T, allocator, current, .{ .ignore_unknown_fields = true }) catch |err| {
129
+
log.debug("extractAt: parse failed for {s} at path {any}: {s} (json type: {s})", .{
130
+
@typeName(T),
131
+
path,
132
+
@errorName(err),
133
+
@tagName(current),
134
+
});
135
+
return err;
136
+
};
109
137
}
110
138
111
139
/// extract a typed value, returning null if path doesn't exist
···
278
306
const missing = extractAtOptional(Thing, arena.allocator(), parsed.value, .{"missing"});
279
307
try std.testing.expect(missing == null);
280
308
}
309
+
310
+
test "extractAt logs diagnostic on enum parse failure" {
311
+
// simulates the issue: unknown enum value from external API
312
+
const json_str =
313
+
\\{"op": {"action": "archive", "path": "app.bsky.feed.post/abc"}}
314
+
;
315
+
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
316
+
defer arena.deinit();
317
+
318
+
const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{});
319
+
320
+
const Action = enum { create, update, delete };
321
+
const Op = struct {
322
+
action: Action,
323
+
path: []const u8,
324
+
};
325
+
326
+
// "archive" is not a valid Action variant - this should fail
327
+
// with debug logging enabled, you'd see:
328
+
// debug(zat): extractAt: parse failed for json.Op at path { "op" }: InvalidEnumTag (json type: object)
329
+
const result = extractAtOptional(Op, arena.allocator(), parsed.value, .{"op"});
330
+
try std.testing.expect(result == null);
331
+
}
332
+
333
+
test "extractAt logs diagnostic on missing field" {
334
+
const json_str =
335
+
\\{"data": {"name": "test"}}
336
+
;
337
+
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
338
+
defer arena.deinit();
339
+
340
+
const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{});
341
+
342
+
const Thing = struct { value: i64 };
343
+
344
+
// path "data.missing" doesn't exist
345
+
// with debug logging enabled, you'd see:
346
+
// debug(zat): extractAt: missing field "missing" in path { "data", "missing" }, expected json.Thing
347
+
const result = extractAtOptional(Thing, arena.allocator(), parsed.value, .{ "data", "missing" });
348
+
try std.testing.expect(result == null);
349
+
}
350
+
351
+
test "extractAt ignores unknown fields" {
352
+
// real-world case: TAP messages have extra fields (live, rev, cid) that we don't need
353
+
const json_str =
354
+
\\{
355
+
\\ "record": {
356
+
\\ "live": true,
357
+
\\ "did": "did:plc:abc123",
358
+
\\ "rev": "3mbspmpaidl2a",
359
+
\\ "collection": "pub.leaflet.document",
360
+
\\ "rkey": "xyz789",
361
+
\\ "action": "create",
362
+
\\ "cid": "bafyreitest"
363
+
\\ }
364
+
\\}
365
+
;
366
+
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
367
+
defer arena.deinit();
368
+
369
+
const parsed = try std.json.parseFromSlice(std.json.Value, arena.allocator(), json_str, .{});
370
+
371
+
// only extract the fields we care about
372
+
const Record = struct {
373
+
collection: []const u8,
374
+
action: []const u8,
375
+
did: []const u8,
376
+
rkey: []const u8,
377
+
};
378
+
379
+
const rec = try extractAt(Record, arena.allocator(), parsed.value, .{"record"});
380
+
try std.testing.expectEqualStrings("pub.leaflet.document", rec.collection);
381
+
try std.testing.expectEqualStrings("create", rec.action);
382
+
try std.testing.expectEqualStrings("did:plc:abc123", rec.did);
383
+
try std.testing.expectEqualStrings("xyz789", rec.rkey);
384
+
}
+90
src/internal/sync.zig
+90
src/internal/sync.zig
···
1
+
//! sync types - com.atproto.sync.subscribeRepos
2
+
//!
3
+
//! enums for firehose/event stream consumption.
4
+
//! see: https://atproto.com/specs/event-stream
5
+
6
+
const std = @import("std");
7
+
8
+
/// repo operation action (create/update/delete)
9
+
///
10
+
/// from com.atproto.sync.subscribeRepos#repoOp
11
+
/// used in firehose commit messages to indicate what happened to a record.
12
+
pub const CommitAction = enum {
13
+
create,
14
+
update,
15
+
delete,
16
+
17
+
/// parse from string (for manual parsing)
18
+
pub fn parse(s: []const u8) ?CommitAction {
19
+
return std.meta.stringToEnum(CommitAction, s);
20
+
}
21
+
};
22
+
23
+
/// event stream message types
24
+
///
25
+
/// from com.atproto.sync.subscribeRepos message union
26
+
/// the top-level discriminator for firehose messages.
27
+
pub const EventKind = enum {
28
+
commit,
29
+
sync,
30
+
identity,
31
+
account,
32
+
info,
33
+
34
+
pub fn parse(s: []const u8) ?EventKind {
35
+
return std.meta.stringToEnum(EventKind, s);
36
+
}
37
+
};
38
+
39
+
/// account status reasons
40
+
///
41
+
/// from com.atproto.sync.subscribeRepos#account status field
42
+
/// indicates why an account is inactive.
43
+
pub const AccountStatus = enum {
44
+
takendown,
45
+
suspended,
46
+
deleted,
47
+
deactivated,
48
+
desynchronized,
49
+
throttled,
50
+
51
+
pub fn parse(s: []const u8) ?AccountStatus {
52
+
return std.meta.stringToEnum(AccountStatus, s);
53
+
}
54
+
};
55
+
56
+
// === tests ===
57
+
58
+
test "CommitAction parse" {
59
+
try std.testing.expectEqual(CommitAction.create, CommitAction.parse("create").?);
60
+
try std.testing.expectEqual(CommitAction.update, CommitAction.parse("update").?);
61
+
try std.testing.expectEqual(CommitAction.delete, CommitAction.parse("delete").?);
62
+
try std.testing.expect(CommitAction.parse("invalid") == null);
63
+
}
64
+
65
+
test "CommitAction json parsing" {
66
+
const json_str =
67
+
\\{"action": "create", "path": "app.bsky.feed.post/abc"}
68
+
;
69
+
70
+
const Op = struct {
71
+
action: CommitAction,
72
+
path: []const u8,
73
+
};
74
+
75
+
const parsed = try std.json.parseFromSlice(Op, std.testing.allocator, json_str, .{});
76
+
defer parsed.deinit();
77
+
78
+
try std.testing.expectEqual(CommitAction.create, parsed.value.action);
79
+
}
80
+
81
+
test "EventKind parse" {
82
+
try std.testing.expectEqual(EventKind.commit, EventKind.parse("commit").?);
83
+
try std.testing.expectEqual(EventKind.identity, EventKind.parse("identity").?);
84
+
try std.testing.expect(EventKind.parse("unknown") == null);
85
+
}
86
+
87
+
test "AccountStatus parse" {
88
+
try std.testing.expectEqual(AccountStatus.takendown, AccountStatus.parse("takendown").?);
89
+
try std.testing.expectEqual(AccountStatus.suspended, AccountStatus.parse("suspended").?);
90
+
}
+5
-1
src/internal/xrpc.zig
+5
-1
src/internal/xrpc.zig
···
18
18
/// bearer token for authenticated requests
19
19
access_token: ?[]const u8 = null,
20
20
21
+
/// atproto JWTs are ~1KB; buffer needs room for "Bearer " prefix
22
+
const max_auth_header_len = 2048;
23
+
21
24
pub fn init(allocator: std.mem.Allocator, host: []const u8) XrpcClient {
22
25
return .{
23
26
.allocator = allocator,
···
89
92
// https://github.com/ziglang/zig/issues/25021
90
93
var extra_headers: std.http.Client.Request.Headers = .{
91
94
.accept_encoding = .{ .override = "identity" },
95
+
.content_type = if (body != null) .{ .override = "application/json" } else .default,
92
96
};
93
-
var auth_header_buf: [256]u8 = undefined;
97
+
var auth_header_buf: [max_auth_header_len]u8 = undefined;
94
98
if (self.access_token) |token| {
95
99
const auth_value = try std.fmt.bufPrint(&auth_header_buf, "Bearer {s}", .{token});
96
100
extra_headers.authorization = .{ .override = auth_value };
+6
src/root.zig
+6
src/root.zig
···
26
26
pub const Jwt = @import("internal/jwt.zig").Jwt;
27
27
pub const multibase = @import("internal/multibase.zig");
28
28
pub const multicodec = @import("internal/multicodec.zig");
29
+
30
+
// sync / firehose
31
+
const sync = @import("internal/sync.zig");
32
+
pub const CommitAction = sync.CommitAction;
33
+
pub const EventKind = sync.EventKind;
34
+
pub const AccountStatus = sync.AccountStatus;