+545
-627
Cargo.lock
+545
-627
Cargo.lock
···
3
3
version = 4
4
4
5
5
[[package]]
6
-
name = "addr2line"
7
-
version = "0.24.2"
8
-
source = "registry+https://github.com/rust-lang/crates.io-index"
9
-
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
10
-
dependencies = [
11
-
"gimli",
12
-
]
13
-
14
-
[[package]]
15
-
name = "adler2"
16
-
version = "2.0.0"
17
-
source = "registry+https://github.com/rust-lang/crates.io-index"
18
-
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
19
-
20
-
[[package]]
21
6
name = "aho-corasick"
22
-
version = "1.1.3"
7
+
version = "1.1.4"
23
8
source = "registry+https://github.com/rust-lang/crates.io-index"
24
-
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
9
+
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
25
10
dependencies = [
26
11
"memchr",
27
12
]
···
34
19
35
20
[[package]]
36
21
name = "anstream"
37
-
version = "0.6.19"
22
+
version = "0.6.21"
38
23
source = "registry+https://github.com/rust-lang/crates.io-index"
39
-
checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933"
24
+
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
40
25
dependencies = [
41
26
"anstyle",
42
27
"anstyle-parse",
···
49
34
50
35
[[package]]
51
36
name = "anstyle"
52
-
version = "1.0.11"
37
+
version = "1.0.13"
53
38
source = "registry+https://github.com/rust-lang/crates.io-index"
54
-
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
39
+
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
55
40
56
41
[[package]]
57
42
name = "anstyle-parse"
···
64
49
65
50
[[package]]
66
51
name = "anstyle-query"
67
-
version = "1.1.3"
52
+
version = "1.1.4"
68
53
source = "registry+https://github.com/rust-lang/crates.io-index"
69
-
checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9"
54
+
checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
70
55
dependencies = [
71
-
"windows-sys 0.59.0",
56
+
"windows-sys 0.60.2",
72
57
]
73
58
74
59
[[package]]
75
60
name = "anstyle-wincon"
76
-
version = "3.0.9"
61
+
version = "3.0.10"
77
62
source = "registry+https://github.com/rust-lang/crates.io-index"
78
-
checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882"
63
+
checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
79
64
dependencies = [
80
65
"anstyle",
81
66
"once_cell_polyfill",
82
-
"windows-sys 0.59.0",
67
+
"windows-sys 0.60.2",
83
68
]
84
69
85
70
[[package]]
86
71
name = "anyhow"
87
-
version = "1.0.98"
72
+
version = "1.0.100"
88
73
source = "registry+https://github.com/rust-lang/crates.io-index"
89
-
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
74
+
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
90
75
91
76
[[package]]
92
77
name = "async-trait"
93
-
version = "0.1.88"
78
+
version = "0.1.89"
94
79
source = "registry+https://github.com/rust-lang/crates.io-index"
95
-
checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
80
+
checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
96
81
dependencies = [
97
82
"proc-macro2",
98
83
"quote",
99
-
"syn",
84
+
"syn 2.0.109",
100
85
]
101
86
102
87
[[package]]
···
115
100
"atproto-identity",
116
101
"atproto-record",
117
102
"base64",
103
+
"chrono",
118
104
"cid",
119
105
"clap",
120
106
"elliptic-curve",
···
126
112
"serde_ipld_dagcbor",
127
113
"serde_json",
128
114
"sha2",
129
-
"thiserror 2.0.12",
115
+
"thiserror 2.0.17",
130
116
"tokio",
131
117
]
132
118
···
148
134
"secrecy",
149
135
"serde",
150
136
"serde_json",
151
-
"thiserror 2.0.12",
137
+
"thiserror 2.0.17",
152
138
"tokio",
153
139
"tracing",
154
140
"urlencoding",
155
141
]
156
142
157
143
[[package]]
144
+
name = "atproto-extras"
145
+
version = "0.13.0"
146
+
dependencies = [
147
+
"anyhow",
148
+
"async-trait",
149
+
"atproto-identity",
150
+
"atproto-record",
151
+
"clap",
152
+
"regex",
153
+
"reqwest",
154
+
"serde_json",
155
+
"tokio",
156
+
]
157
+
158
+
[[package]]
158
159
name = "atproto-identity"
159
160
version = "0.13.0"
160
161
dependencies = [
···
174
175
"serde",
175
176
"serde_ipld_dagcbor",
176
177
"serde_json",
177
-
"thiserror 2.0.12",
178
+
"thiserror 2.0.17",
178
179
"tokio",
179
180
"tracing",
180
181
"url",
···
194
195
"http",
195
196
"serde",
196
197
"serde_json",
197
-
"thiserror 2.0.12",
198
+
"thiserror 2.0.17",
198
199
"tokio",
199
200
"tokio-util",
200
201
"tokio-websockets",
···
217
218
"reqwest",
218
219
"serde",
219
220
"serde_json",
220
-
"thiserror 2.0.12",
221
+
"thiserror 2.0.17",
221
222
"tokio",
222
223
"tracing",
223
224
"zeroize",
···
248
249
"serde_ipld_dagcbor",
249
250
"serde_json",
250
251
"sha2",
251
-
"thiserror 2.0.12",
252
+
"thiserror 2.0.17",
252
253
"tokio",
253
254
"tracing",
254
255
"ulid",
···
266
267
"reqwest",
267
268
"serde",
268
269
"serde_json",
269
-
"thiserror 2.0.12",
270
+
"thiserror 2.0.17",
270
271
"zeroize",
271
272
]
272
273
···
293
294
"secrecy",
294
295
"serde",
295
296
"serde_json",
296
-
"thiserror 2.0.12",
297
+
"thiserror 2.0.17",
297
298
"tokio",
298
299
"tracing",
299
300
"zeroize",
···
316
317
"serde_ipld_dagcbor",
317
318
"serde_json",
318
319
"sha2",
319
-
"thiserror 2.0.12",
320
+
"thiserror 2.0.17",
321
+
"tokio",
322
+
]
323
+
324
+
[[package]]
325
+
name = "atproto-tap"
326
+
version = "0.13.0"
327
+
dependencies = [
328
+
"atproto-client",
329
+
"atproto-identity",
330
+
"base64",
331
+
"clap",
332
+
"compact_str",
333
+
"futures",
334
+
"http",
335
+
"itoa",
336
+
"reqwest",
337
+
"serde",
338
+
"serde_json",
339
+
"thiserror 2.0.17",
320
340
"tokio",
341
+
"tokio-stream",
342
+
"tokio-websockets",
343
+
"tracing",
344
+
"tracing-subscriber",
321
345
]
322
346
323
347
[[package]]
···
341
365
"reqwest-middleware",
342
366
"serde",
343
367
"serde_json",
344
-
"thiserror 2.0.12",
368
+
"thiserror 2.0.17",
345
369
"tokio",
346
370
"tracing",
347
371
]
···
368
392
"reqwest-middleware",
369
393
"serde",
370
394
"serde_json",
371
-
"thiserror 2.0.12",
395
+
"thiserror 2.0.17",
372
396
"tokio",
373
397
"tracing",
374
398
]
375
399
376
400
[[package]]
377
401
name = "autocfg"
378
-
version = "1.4.0"
402
+
version = "1.5.0"
379
403
source = "registry+https://github.com/rust-lang/crates.io-index"
380
-
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
404
+
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
381
405
382
406
[[package]]
383
407
name = "axum"
384
-
version = "0.8.4"
408
+
version = "0.8.6"
385
409
source = "registry+https://github.com/rust-lang/crates.io-index"
386
-
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
410
+
checksum = "8a18ed336352031311f4e0b4dd2ff392d4fbb370777c9d18d7fc9d7359f73871"
387
411
dependencies = [
388
412
"axum-core",
389
413
"axum-macros",
···
401
425
"mime",
402
426
"percent-encoding",
403
427
"pin-project-lite",
404
-
"rustversion",
405
-
"serde",
428
+
"serde_core",
406
429
"serde_json",
407
430
"serde_path_to_error",
408
431
"serde_urlencoded",
···
416
439
417
440
[[package]]
418
441
name = "axum-core"
419
-
version = "0.5.2"
442
+
version = "0.5.5"
420
443
source = "registry+https://github.com/rust-lang/crates.io-index"
421
-
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
444
+
checksum = "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22"
422
445
dependencies = [
423
446
"bytes",
424
447
"futures-core",
···
427
450
"http-body-util",
428
451
"mime",
429
452
"pin-project-lite",
430
-
"rustversion",
431
453
"sync_wrapper",
432
454
"tower-layer",
433
455
"tower-service",
···
442
464
dependencies = [
443
465
"proc-macro2",
444
466
"quote",
445
-
"syn",
446
-
]
447
-
448
-
[[package]]
449
-
name = "backtrace"
450
-
version = "0.3.75"
451
-
source = "registry+https://github.com/rust-lang/crates.io-index"
452
-
checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
453
-
dependencies = [
454
-
"addr2line",
455
-
"cfg-if",
456
-
"libc",
457
-
"miniz_oxide",
458
-
"object",
459
-
"rustc-demangle",
460
-
"windows-targets 0.52.6",
467
+
"syn 2.0.109",
461
468
]
462
469
463
470
[[package]]
···
473
480
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
474
481
475
482
[[package]]
483
+
name = "base256emoji"
484
+
version = "1.0.2"
485
+
source = "registry+https://github.com/rust-lang/crates.io-index"
486
+
checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c"
487
+
dependencies = [
488
+
"const-str",
489
+
"match-lookup",
490
+
]
491
+
492
+
[[package]]
476
493
name = "base64"
477
494
version = "0.22.1"
478
495
source = "registry+https://github.com/rust-lang/crates.io-index"
···
480
497
481
498
[[package]]
482
499
name = "base64ct"
483
-
version = "1.7.3"
500
+
version = "1.8.0"
484
501
source = "registry+https://github.com/rust-lang/crates.io-index"
485
-
checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3"
502
+
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
486
503
487
504
[[package]]
488
505
name = "bitflags"
489
-
version = "2.9.1"
506
+
version = "2.10.0"
490
507
source = "registry+https://github.com/rust-lang/crates.io-index"
491
-
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
508
+
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
492
509
493
510
[[package]]
494
511
name = "block-buffer"
···
501
518
502
519
[[package]]
503
520
name = "bumpalo"
504
-
version = "3.17.0"
521
+
version = "3.19.0"
505
522
source = "registry+https://github.com/rust-lang/crates.io-index"
506
-
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
523
+
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
507
524
508
525
[[package]]
509
526
name = "bytes"
···
512
529
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
513
530
514
531
[[package]]
532
+
name = "castaway"
533
+
version = "0.2.4"
534
+
source = "registry+https://github.com/rust-lang/crates.io-index"
535
+
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
536
+
dependencies = [
537
+
"rustversion",
538
+
]
539
+
540
+
[[package]]
515
541
name = "cbor4ii"
516
542
version = "0.2.14"
517
543
source = "registry+https://github.com/rust-lang/crates.io-index"
···
522
548
523
549
[[package]]
524
550
name = "cc"
525
-
version = "1.2.24"
551
+
version = "1.2.44"
526
552
source = "registry+https://github.com/rust-lang/crates.io-index"
527
-
checksum = "16595d3be041c03b09d08d0858631facccee9221e579704070e6e9e4915d3bc7"
553
+
checksum = "37521ac7aabe3d13122dc382493e20c9416f299d2ccd5b3a5340a2570cdeb0f3"
528
554
dependencies = [
555
+
"find-msvc-tools",
529
556
"jobserver",
530
557
"libc",
531
558
"shlex",
···
533
560
534
561
[[package]]
535
562
name = "cfg-if"
536
-
version = "1.0.0"
563
+
version = "1.0.4"
537
564
source = "registry+https://github.com/rust-lang/crates.io-index"
538
-
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
565
+
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
539
566
540
567
[[package]]
541
568
name = "cfg_aliases"
···
545
572
546
573
[[package]]
547
574
name = "chrono"
548
-
version = "0.4.41"
575
+
version = "0.4.42"
549
576
source = "registry+https://github.com/rust-lang/crates.io-index"
550
-
checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
577
+
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
551
578
dependencies = [
552
579
"num-traits",
553
580
"serde",
···
569
596
570
597
[[package]]
571
598
name = "clap"
572
-
version = "4.5.40"
599
+
version = "4.5.51"
573
600
source = "registry+https://github.com/rust-lang/crates.io-index"
574
-
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
601
+
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
575
602
dependencies = [
576
603
"clap_builder",
577
604
"clap_derive",
···
579
606
580
607
[[package]]
581
608
name = "clap_builder"
582
-
version = "4.5.40"
609
+
version = "4.5.51"
583
610
source = "registry+https://github.com/rust-lang/crates.io-index"
584
-
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
611
+
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
585
612
dependencies = [
586
613
"anstream",
587
614
"anstyle",
···
591
618
592
619
[[package]]
593
620
name = "clap_derive"
594
-
version = "4.5.40"
621
+
version = "4.5.49"
595
622
source = "registry+https://github.com/rust-lang/crates.io-index"
596
-
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
623
+
checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
597
624
dependencies = [
598
625
"heck",
599
626
"proc-macro2",
600
627
"quote",
601
-
"syn",
628
+
"syn 2.0.109",
602
629
]
603
630
604
631
[[package]]
605
632
name = "clap_lex"
606
-
version = "0.7.5"
633
+
version = "0.7.6"
607
634
source = "registry+https://github.com/rust-lang/crates.io-index"
608
-
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
635
+
checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
609
636
610
637
[[package]]
611
638
name = "colorchoice"
612
639
version = "1.0.4"
613
640
source = "registry+https://github.com/rust-lang/crates.io-index"
614
641
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
642
+
643
+
[[package]]
644
+
name = "compact_str"
645
+
version = "0.8.1"
646
+
source = "registry+https://github.com/rust-lang/crates.io-index"
647
+
checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32"
648
+
dependencies = [
649
+
"castaway",
650
+
"cfg-if",
651
+
"itoa",
652
+
"rustversion",
653
+
"ryu",
654
+
"serde",
655
+
"static_assertions",
656
+
]
615
657
616
658
[[package]]
617
659
name = "const-oid"
···
620
662
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
621
663
622
664
[[package]]
665
+
name = "const-str"
666
+
version = "0.4.3"
667
+
source = "registry+https://github.com/rust-lang/crates.io-index"
668
+
checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3"
669
+
670
+
[[package]]
623
671
name = "core-foundation"
624
672
version = "0.9.4"
625
673
source = "registry+https://github.com/rust-lang/crates.io-index"
···
738
786
checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976"
739
787
dependencies = [
740
788
"data-encoding",
741
-
"syn",
789
+
"syn 2.0.109",
742
790
]
743
791
744
792
[[package]]
···
772
820
dependencies = [
773
821
"proc-macro2",
774
822
"quote",
775
-
"syn",
823
+
"syn 2.0.109",
776
824
]
777
825
778
826
[[package]]
···
832
880
"heck",
833
881
"proc-macro2",
834
882
"quote",
835
-
"syn",
883
+
"syn 2.0.109",
836
884
]
837
885
838
886
[[package]]
···
852
900
]
853
901
854
902
[[package]]
903
+
name = "find-msvc-tools"
904
+
version = "0.1.4"
905
+
source = "registry+https://github.com/rust-lang/crates.io-index"
906
+
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
907
+
908
+
[[package]]
855
909
name = "fnv"
856
910
version = "1.0.7"
857
911
source = "registry+https://github.com/rust-lang/crates.io-index"
···
865
919
866
920
[[package]]
867
921
name = "form_urlencoded"
868
-
version = "1.2.1"
922
+
version = "1.2.2"
869
923
source = "registry+https://github.com/rust-lang/crates.io-index"
870
-
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
924
+
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
871
925
dependencies = [
872
926
"percent-encoding",
873
927
]
···
928
982
dependencies = [
929
983
"proc-macro2",
930
984
"quote",
931
-
"syn",
985
+
"syn 2.0.109",
932
986
]
933
987
934
988
[[package]]
···
962
1016
]
963
1017
964
1018
[[package]]
965
-
name = "generator"
966
-
version = "0.8.5"
967
-
source = "registry+https://github.com/rust-lang/crates.io-index"
968
-
checksum = "d18470a76cb7f8ff746cf1f7470914f900252ec36bbc40b569d74b1258446827"
969
-
dependencies = [
970
-
"cc",
971
-
"cfg-if",
972
-
"libc",
973
-
"log",
974
-
"rustversion",
975
-
"windows",
976
-
]
977
-
978
-
[[package]]
979
1019
name = "generic-array"
980
-
version = "0.14.7"
1020
+
version = "0.14.9"
981
1021
source = "registry+https://github.com/rust-lang/crates.io-index"
982
-
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
1022
+
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
983
1023
dependencies = [
984
1024
"typenum",
985
1025
"version_check",
···
995
1035
"cfg-if",
996
1036
"js-sys",
997
1037
"libc",
998
-
"wasi 0.11.0+wasi-snapshot-preview1",
1038
+
"wasi",
999
1039
"wasm-bindgen",
1000
1040
]
1001
1041
1002
1042
[[package]]
1003
1043
name = "getrandom"
1004
-
version = "0.3.3"
1044
+
version = "0.3.4"
1005
1045
source = "registry+https://github.com/rust-lang/crates.io-index"
1006
-
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
1046
+
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
1007
1047
dependencies = [
1008
1048
"cfg-if",
1009
1049
"js-sys",
1010
1050
"libc",
1011
1051
"r-efi",
1012
-
"wasi 0.14.2+wasi-0.2.4",
1052
+
"wasip2",
1013
1053
"wasm-bindgen",
1014
1054
]
1015
1055
1016
1056
[[package]]
1017
-
name = "gimli"
1018
-
version = "0.31.1"
1019
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1020
-
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
1021
-
1022
-
[[package]]
1023
1057
name = "group"
1024
1058
version = "0.13.0"
1025
1059
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1032
1066
1033
1067
[[package]]
1034
1068
name = "h2"
1035
-
version = "0.4.10"
1069
+
version = "0.4.12"
1036
1070
source = "registry+https://github.com/rust-lang/crates.io-index"
1037
-
checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5"
1071
+
checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386"
1038
1072
dependencies = [
1039
1073
"atomic-waker",
1040
1074
"bytes",
···
1051
1085
1052
1086
[[package]]
1053
1087
name = "hashbrown"
1054
-
version = "0.15.3"
1088
+
version = "0.15.5"
1055
1089
source = "registry+https://github.com/rust-lang/crates.io-index"
1056
-
checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
1090
+
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
1057
1091
dependencies = [
1058
1092
"allocator-api2",
1059
1093
"equivalent",
···
1061
1095
]
1062
1096
1063
1097
[[package]]
1098
+
name = "hashbrown"
1099
+
version = "0.16.0"
1100
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1101
+
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
1102
+
1103
+
[[package]]
1064
1104
name = "heck"
1065
1105
version = "0.5.0"
1066
1106
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1082
1122
"idna",
1083
1123
"ipnet",
1084
1124
"once_cell",
1085
-
"rand 0.9.1",
1125
+
"rand 0.9.2",
1086
1126
"ring",
1087
-
"thiserror 2.0.12",
1127
+
"thiserror 2.0.17",
1088
1128
"tinyvec",
1089
1129
"tokio",
1090
1130
"tracing",
···
1104
1144
"moka",
1105
1145
"once_cell",
1106
1146
"parking_lot",
1107
-
"rand 0.9.1",
1147
+
"rand 0.9.2",
1108
1148
"resolv-conf",
1109
1149
"smallvec",
1110
-
"thiserror 2.0.12",
1150
+
"thiserror 2.0.17",
1111
1151
"tokio",
1112
1152
"tracing",
1113
1153
]
···
1178
1218
1179
1219
[[package]]
1180
1220
name = "hyper"
1181
-
version = "1.6.0"
1221
+
version = "1.7.0"
1182
1222
source = "registry+https://github.com/rust-lang/crates.io-index"
1183
-
checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
1223
+
checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e"
1184
1224
dependencies = [
1225
+
"atomic-waker",
1185
1226
"bytes",
1186
1227
"futures-channel",
1187
-
"futures-util",
1228
+
"futures-core",
1188
1229
"h2",
1189
1230
"http",
1190
1231
"http-body",
···
1192
1233
"httpdate",
1193
1234
"itoa",
1194
1235
"pin-project-lite",
1236
+
"pin-utils",
1195
1237
"smallvec",
1196
1238
"tokio",
1197
1239
"want",
···
1199
1241
1200
1242
[[package]]
1201
1243
name = "hyper-rustls"
1202
-
version = "0.27.6"
1244
+
version = "0.27.7"
1203
1245
source = "registry+https://github.com/rust-lang/crates.io-index"
1204
-
checksum = "03a01595e11bdcec50946522c32dde3fc6914743000a68b93000965f2f02406d"
1246
+
checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
1205
1247
dependencies = [
1206
1248
"http",
1207
1249
"hyper",
···
1216
1258
1217
1259
[[package]]
1218
1260
name = "hyper-util"
1219
-
version = "0.1.13"
1261
+
version = "0.1.17"
1220
1262
source = "registry+https://github.com/rust-lang/crates.io-index"
1221
-
checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8"
1263
+
checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8"
1222
1264
dependencies = [
1223
1265
"base64",
1224
1266
"bytes",
···
1232
1274
"libc",
1233
1275
"percent-encoding",
1234
1276
"pin-project-lite",
1235
-
"socket2",
1277
+
"socket2 0.6.1",
1236
1278
"system-configuration",
1237
1279
"tokio",
1238
1280
"tower-service",
···
1242
1284
1243
1285
[[package]]
1244
1286
name = "icu_collections"
1245
-
version = "2.0.0"
1287
+
version = "2.1.1"
1246
1288
source = "registry+https://github.com/rust-lang/crates.io-index"
1247
-
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
1289
+
checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
1248
1290
dependencies = [
1249
1291
"displaydoc",
1250
1292
"potential_utf",
···
1255
1297
1256
1298
[[package]]
1257
1299
name = "icu_locale_core"
1258
-
version = "2.0.0"
1300
+
version = "2.1.1"
1259
1301
source = "registry+https://github.com/rust-lang/crates.io-index"
1260
-
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
1302
+
checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
1261
1303
dependencies = [
1262
1304
"displaydoc",
1263
1305
"litemap",
···
1268
1310
1269
1311
[[package]]
1270
1312
name = "icu_normalizer"
1271
-
version = "2.0.0"
1313
+
version = "2.1.1"
1272
1314
source = "registry+https://github.com/rust-lang/crates.io-index"
1273
-
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
1315
+
checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
1274
1316
dependencies = [
1275
-
"displaydoc",
1276
1317
"icu_collections",
1277
1318
"icu_normalizer_data",
1278
1319
"icu_properties",
···
1283
1324
1284
1325
[[package]]
1285
1326
name = "icu_normalizer_data"
1286
-
version = "2.0.0"
1327
+
version = "2.1.1"
1287
1328
source = "registry+https://github.com/rust-lang/crates.io-index"
1288
-
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
1329
+
checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
1289
1330
1290
1331
[[package]]
1291
1332
name = "icu_properties"
1292
-
version = "2.0.1"
1333
+
version = "2.1.1"
1293
1334
source = "registry+https://github.com/rust-lang/crates.io-index"
1294
-
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
1335
+
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
1295
1336
dependencies = [
1296
-
"displaydoc",
1297
1337
"icu_collections",
1298
1338
"icu_locale_core",
1299
1339
"icu_properties_data",
1300
1340
"icu_provider",
1301
-
"potential_utf",
1302
1341
"zerotrie",
1303
1342
"zerovec",
1304
1343
]
1305
1344
1306
1345
[[package]]
1307
1346
name = "icu_properties_data"
1308
-
version = "2.0.1"
1347
+
version = "2.1.1"
1309
1348
source = "registry+https://github.com/rust-lang/crates.io-index"
1310
-
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
1349
+
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
1311
1350
1312
1351
[[package]]
1313
1352
name = "icu_provider"
1314
-
version = "2.0.0"
1353
+
version = "2.1.1"
1315
1354
source = "registry+https://github.com/rust-lang/crates.io-index"
1316
-
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
1355
+
checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
1317
1356
dependencies = [
1318
1357
"displaydoc",
1319
1358
"icu_locale_core",
1320
-
"stable_deref_trait",
1321
-
"tinystr",
1322
1359
"writeable",
1323
1360
"yoke",
1324
1361
"zerofrom",
···
1328
1365
1329
1366
[[package]]
1330
1367
name = "idna"
1331
-
version = "1.0.3"
1368
+
version = "1.1.0"
1332
1369
source = "registry+https://github.com/rust-lang/crates.io-index"
1333
-
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
1370
+
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
1334
1371
dependencies = [
1335
1372
"idna_adapter",
1336
1373
"smallvec",
···
1349
1386
1350
1387
[[package]]
1351
1388
name = "indexmap"
1352
-
version = "2.9.0"
1389
+
version = "2.12.0"
1353
1390
source = "registry+https://github.com/rust-lang/crates.io-index"
1354
-
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
1391
+
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
1355
1392
dependencies = [
1356
1393
"equivalent",
1357
-
"hashbrown",
1394
+
"hashbrown 0.16.0",
1358
1395
]
1359
1396
1360
1397
[[package]]
···
1363
1400
source = "registry+https://github.com/rust-lang/crates.io-index"
1364
1401
checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f"
1365
1402
dependencies = [
1366
-
"socket2",
1403
+
"socket2 0.5.10",
1367
1404
"widestring",
1368
1405
"windows-sys 0.48.0",
1369
1406
"winreg",
···
1388
1425
1389
1426
[[package]]
1390
1427
name = "iri-string"
1391
-
version = "0.7.8"
1428
+
version = "0.7.9"
1392
1429
source = "registry+https://github.com/rust-lang/crates.io-index"
1393
-
checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2"
1430
+
checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397"
1394
1431
dependencies = [
1395
1432
"memchr",
1396
1433
"serde",
···
1398
1435
1399
1436
[[package]]
1400
1437
name = "is_terminal_polyfill"
1401
-
version = "1.70.1"
1438
+
version = "1.70.2"
1402
1439
source = "registry+https://github.com/rust-lang/crates.io-index"
1403
-
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
1440
+
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
1404
1441
1405
1442
[[package]]
1406
1443
name = "itoa"
···
1410
1447
1411
1448
[[package]]
1412
1449
name = "jobserver"
1413
-
version = "0.1.33"
1450
+
version = "0.1.34"
1414
1451
source = "registry+https://github.com/rust-lang/crates.io-index"
1415
-
checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a"
1452
+
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
1416
1453
dependencies = [
1417
-
"getrandom 0.3.3",
1454
+
"getrandom 0.3.4",
1418
1455
"libc",
1419
1456
]
1420
1457
1421
1458
[[package]]
1422
1459
name = "js-sys"
1423
-
version = "0.3.77"
1460
+
version = "0.3.82"
1424
1461
source = "registry+https://github.com/rust-lang/crates.io-index"
1425
-
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
1462
+
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
1426
1463
dependencies = [
1427
1464
"once_cell",
1428
1465
"wasm-bindgen",
···
1450
1487
1451
1488
[[package]]
1452
1489
name = "libc"
1453
-
version = "0.2.172"
1490
+
version = "0.2.177"
1454
1491
source = "registry+https://github.com/rust-lang/crates.io-index"
1455
-
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
1492
+
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
1456
1493
1457
1494
[[package]]
1458
1495
name = "litemap"
1459
-
version = "0.8.0"
1496
+
version = "0.8.1"
1460
1497
source = "registry+https://github.com/rust-lang/crates.io-index"
1461
-
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
1498
+
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
1462
1499
1463
1500
[[package]]
1464
1501
name = "lock_api"
1465
-
version = "0.4.13"
1502
+
version = "0.4.14"
1466
1503
source = "registry+https://github.com/rust-lang/crates.io-index"
1467
-
checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
1504
+
checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
1468
1505
dependencies = [
1469
-
"autocfg",
1470
1506
"scopeguard",
1471
1507
]
1472
1508
1473
1509
[[package]]
1474
1510
name = "log"
1475
-
version = "0.4.27"
1511
+
version = "0.4.28"
1476
1512
source = "registry+https://github.com/rust-lang/crates.io-index"
1477
-
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
1478
-
1479
-
[[package]]
1480
-
name = "loom"
1481
-
version = "0.7.2"
1482
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1483
-
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
1484
-
dependencies = [
1485
-
"cfg-if",
1486
-
"generator",
1487
-
"scoped-tls",
1488
-
"tracing",
1489
-
"tracing-subscriber",
1490
-
]
1513
+
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
1491
1514
1492
1515
[[package]]
1493
1516
name = "lru"
···
1495
1518
source = "registry+https://github.com/rust-lang/crates.io-index"
1496
1519
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
1497
1520
dependencies = [
1498
-
"hashbrown",
1521
+
"hashbrown 0.15.5",
1499
1522
]
1500
1523
1501
1524
[[package]]
···
1503
1526
version = "0.1.2"
1504
1527
source = "registry+https://github.com/rust-lang/crates.io-index"
1505
1528
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
1529
+
1530
+
[[package]]
1531
+
name = "match-lookup"
1532
+
version = "0.1.1"
1533
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1534
+
checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e"
1535
+
dependencies = [
1536
+
"proc-macro2",
1537
+
"quote",
1538
+
"syn 1.0.109",
1539
+
]
1506
1540
1507
1541
[[package]]
1508
1542
name = "matchers"
1509
-
version = "0.1.0"
1543
+
version = "0.2.0"
1510
1544
source = "registry+https://github.com/rust-lang/crates.io-index"
1511
-
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
1545
+
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
1512
1546
dependencies = [
1513
-
"regex-automata 0.1.10",
1547
+
"regex-automata",
1514
1548
]
1515
1549
1516
1550
[[package]]
···
1521
1555
1522
1556
[[package]]
1523
1557
name = "memchr"
1524
-
version = "2.7.4"
1558
+
version = "2.7.6"
1525
1559
source = "registry+https://github.com/rust-lang/crates.io-index"
1526
-
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
1560
+
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
1527
1561
1528
1562
[[package]]
1529
1563
name = "mime"
···
1542
1576
]
1543
1577
1544
1578
[[package]]
1545
-
name = "miniz_oxide"
1546
-
version = "0.8.8"
1547
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1548
-
checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
1549
-
dependencies = [
1550
-
"adler2",
1551
-
]
1552
-
1553
-
[[package]]
1554
1579
name = "mio"
1555
-
version = "1.0.4"
1580
+
version = "1.1.0"
1556
1581
source = "registry+https://github.com/rust-lang/crates.io-index"
1557
-
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
1582
+
checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873"
1558
1583
dependencies = [
1559
1584
"libc",
1560
-
"wasi 0.11.0+wasi-snapshot-preview1",
1561
-
"windows-sys 0.59.0",
1585
+
"wasi",
1586
+
"windows-sys 0.61.2",
1562
1587
]
1563
1588
1564
1589
[[package]]
1565
1590
name = "moka"
1566
-
version = "0.12.10"
1591
+
version = "0.12.11"
1567
1592
source = "registry+https://github.com/rust-lang/crates.io-index"
1568
-
checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926"
1593
+
checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077"
1569
1594
dependencies = [
1570
1595
"crossbeam-channel",
1571
1596
"crossbeam-epoch",
1572
1597
"crossbeam-utils",
1573
-
"loom",
1598
+
"equivalent",
1574
1599
"parking_lot",
1575
1600
"portable-atomic",
1576
1601
"rustc_version",
1577
1602
"smallvec",
1578
1603
"tagptr",
1579
-
"thiserror 1.0.69",
1580
1604
"uuid",
1581
1605
]
1582
1606
1583
1607
[[package]]
1584
1608
name = "multibase"
1585
-
version = "0.9.1"
1609
+
version = "0.9.2"
1586
1610
source = "registry+https://github.com/rust-lang/crates.io-index"
1587
-
checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404"
1611
+
checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77"
1588
1612
dependencies = [
1589
1613
"base-x",
1614
+
"base256emoji",
1590
1615
"data-encoding",
1591
1616
"data-encoding-macro",
1592
1617
]
···
1604
1629
1605
1630
[[package]]
1606
1631
name = "nu-ansi-term"
1607
-
version = "0.46.0"
1632
+
version = "0.50.3"
1608
1633
source = "registry+https://github.com/rust-lang/crates.io-index"
1609
-
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
1634
+
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
1610
1635
dependencies = [
1611
-
"overload",
1612
-
"winapi",
1636
+
"windows-sys 0.61.2",
1613
1637
]
1614
1638
1615
1639
[[package]]
···
1619
1643
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
1620
1644
dependencies = [
1621
1645
"autocfg",
1622
-
]
1623
-
1624
-
[[package]]
1625
-
name = "object"
1626
-
version = "0.36.7"
1627
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1628
-
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
1629
-
dependencies = [
1630
-
"memchr",
1631
1646
]
1632
1647
1633
1648
[[package]]
···
1642
1657
1643
1658
[[package]]
1644
1659
name = "once_cell_polyfill"
1645
-
version = "1.70.1"
1660
+
version = "1.70.2"
1646
1661
source = "registry+https://github.com/rust-lang/crates.io-index"
1647
-
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
1662
+
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
1648
1663
1649
1664
[[package]]
1650
1665
name = "openssl-probe"
1651
1666
version = "0.1.6"
1652
1667
source = "registry+https://github.com/rust-lang/crates.io-index"
1653
1668
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
1654
-
1655
-
[[package]]
1656
-
name = "overload"
1657
-
version = "0.1.1"
1658
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1659
-
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
1660
1669
1661
1670
[[package]]
1662
1671
name = "p256"
···
1686
1695
1687
1696
[[package]]
1688
1697
name = "parking_lot"
1689
-
version = "0.12.4"
1698
+
version = "0.12.5"
1690
1699
source = "registry+https://github.com/rust-lang/crates.io-index"
1691
-
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
1700
+
checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
1692
1701
dependencies = [
1693
1702
"lock_api",
1694
1703
"parking_lot_core",
···
1696
1705
1697
1706
[[package]]
1698
1707
name = "parking_lot_core"
1699
-
version = "0.9.11"
1708
+
version = "0.9.12"
1700
1709
source = "registry+https://github.com/rust-lang/crates.io-index"
1701
-
checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
1710
+
checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
1702
1711
dependencies = [
1703
1712
"cfg-if",
1704
1713
"libc",
1705
1714
"redox_syscall",
1706
1715
"smallvec",
1707
-
"windows-targets 0.52.6",
1716
+
"windows-link 0.2.1",
1708
1717
]
1709
1718
1710
1719
[[package]]
···
1718
1727
1719
1728
[[package]]
1720
1729
name = "percent-encoding"
1721
-
version = "2.3.1"
1730
+
version = "2.3.2"
1722
1731
source = "registry+https://github.com/rust-lang/crates.io-index"
1723
-
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
1732
+
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
1724
1733
1725
1734
[[package]]
1726
1735
name = "pin-project-lite"
···
1752
1761
1753
1762
[[package]]
1754
1763
name = "portable-atomic"
1755
-
version = "1.11.0"
1764
+
version = "1.11.1"
1756
1765
source = "registry+https://github.com/rust-lang/crates.io-index"
1757
-
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
1766
+
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
1758
1767
1759
1768
[[package]]
1760
1769
name = "potential_utf"
1761
-
version = "0.1.2"
1770
+
version = "0.1.4"
1762
1771
source = "registry+https://github.com/rust-lang/crates.io-index"
1763
-
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
1772
+
checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
1764
1773
dependencies = [
1765
1774
"zerovec",
1766
1775
]
···
1786
1795
1787
1796
[[package]]
1788
1797
name = "proc-macro2"
1789
-
version = "1.0.95"
1798
+
version = "1.0.103"
1790
1799
source = "registry+https://github.com/rust-lang/crates.io-index"
1791
-
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
1800
+
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
1792
1801
dependencies = [
1793
1802
"unicode-ident",
1794
1803
]
1795
1804
1796
1805
[[package]]
1797
1806
name = "quinn"
1798
-
version = "0.11.8"
1807
+
version = "0.11.9"
1799
1808
source = "registry+https://github.com/rust-lang/crates.io-index"
1800
-
checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8"
1809
+
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
1801
1810
dependencies = [
1802
1811
"bytes",
1803
1812
"cfg_aliases",
···
1806
1815
"quinn-udp",
1807
1816
"rustc-hash",
1808
1817
"rustls",
1809
-
"socket2",
1810
-
"thiserror 2.0.12",
1818
+
"socket2 0.6.1",
1819
+
"thiserror 2.0.17",
1811
1820
"tokio",
1812
1821
"tracing",
1813
1822
"web-time",
···
1815
1824
1816
1825
[[package]]
1817
1826
name = "quinn-proto"
1818
-
version = "0.11.12"
1827
+
version = "0.11.13"
1819
1828
source = "registry+https://github.com/rust-lang/crates.io-index"
1820
-
checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e"
1829
+
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
1821
1830
dependencies = [
1822
1831
"bytes",
1823
-
"getrandom 0.3.3",
1832
+
"getrandom 0.3.4",
1824
1833
"lru-slab",
1825
-
"rand 0.9.1",
1834
+
"rand 0.9.2",
1826
1835
"ring",
1827
1836
"rustc-hash",
1828
1837
"rustls",
1829
1838
"rustls-pki-types",
1830
1839
"slab",
1831
-
"thiserror 2.0.12",
1840
+
"thiserror 2.0.17",
1832
1841
"tinyvec",
1833
1842
"tracing",
1834
1843
"web-time",
···
1836
1845
1837
1846
[[package]]
1838
1847
name = "quinn-udp"
1839
-
version = "0.5.12"
1848
+
version = "0.5.14"
1840
1849
source = "registry+https://github.com/rust-lang/crates.io-index"
1841
-
checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842"
1850
+
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
1842
1851
dependencies = [
1843
1852
"cfg_aliases",
1844
1853
"libc",
1845
1854
"once_cell",
1846
-
"socket2",
1855
+
"socket2 0.6.1",
1847
1856
"tracing",
1848
-
"windows-sys 0.59.0",
1857
+
"windows-sys 0.60.2",
1849
1858
]
1850
1859
1851
1860
[[package]]
1852
1861
name = "quote"
1853
-
version = "1.0.40"
1862
+
version = "1.0.41"
1854
1863
source = "registry+https://github.com/rust-lang/crates.io-index"
1855
-
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
1864
+
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
1856
1865
dependencies = [
1857
1866
"proc-macro2",
1858
1867
]
1859
1868
1860
1869
[[package]]
1861
1870
name = "r-efi"
1862
-
version = "5.2.0"
1871
+
version = "5.3.0"
1863
1872
source = "registry+https://github.com/rust-lang/crates.io-index"
1864
-
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
1873
+
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
1865
1874
1866
1875
[[package]]
1867
1876
name = "rand"
···
1876
1885
1877
1886
[[package]]
1878
1887
name = "rand"
1879
-
version = "0.9.1"
1888
+
version = "0.9.2"
1880
1889
source = "registry+https://github.com/rust-lang/crates.io-index"
1881
-
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
1890
+
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
1882
1891
dependencies = [
1883
1892
"rand_chacha 0.9.0",
1884
1893
"rand_core 0.9.3",
···
1919
1928
source = "registry+https://github.com/rust-lang/crates.io-index"
1920
1929
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
1921
1930
dependencies = [
1922
-
"getrandom 0.3.3",
1931
+
"getrandom 0.3.4",
1923
1932
]
1924
1933
1925
1934
[[package]]
1926
1935
name = "redox_syscall"
1927
-
version = "0.5.12"
1936
+
version = "0.5.18"
1928
1937
source = "registry+https://github.com/rust-lang/crates.io-index"
1929
-
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
1938
+
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
1930
1939
dependencies = [
1931
1940
"bitflags",
1932
1941
]
1933
1942
1934
1943
[[package]]
1935
1944
name = "regex"
1936
-
version = "1.11.1"
1945
+
version = "1.12.2"
1937
1946
source = "registry+https://github.com/rust-lang/crates.io-index"
1938
-
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
1947
+
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
1939
1948
dependencies = [
1940
1949
"aho-corasick",
1941
1950
"memchr",
1942
-
"regex-automata 0.4.9",
1943
-
"regex-syntax 0.8.5",
1944
-
]
1945
-
1946
-
[[package]]
1947
-
name = "regex-automata"
1948
-
version = "0.1.10"
1949
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1950
-
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
1951
-
dependencies = [
1952
-
"regex-syntax 0.6.29",
1951
+
"regex-automata",
1952
+
"regex-syntax",
1953
1953
]
1954
1954
1955
1955
[[package]]
1956
1956
name = "regex-automata"
1957
-
version = "0.4.9"
1957
+
version = "0.4.13"
1958
1958
source = "registry+https://github.com/rust-lang/crates.io-index"
1959
-
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
1959
+
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
1960
1960
dependencies = [
1961
1961
"aho-corasick",
1962
1962
"memchr",
1963
-
"regex-syntax 0.8.5",
1963
+
"regex-syntax",
1964
1964
]
1965
1965
1966
1966
[[package]]
1967
1967
name = "regex-syntax"
1968
-
version = "0.6.29"
1969
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1970
-
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
1971
-
1972
-
[[package]]
1973
-
name = "regex-syntax"
1974
-
version = "0.8.5"
1968
+
version = "0.8.8"
1975
1969
source = "registry+https://github.com/rust-lang/crates.io-index"
1976
-
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
1970
+
checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
1977
1971
1978
1972
[[package]]
1979
1973
name = "reqwest"
1980
-
version = "0.12.18"
1974
+
version = "0.12.24"
1981
1975
source = "registry+https://github.com/rust-lang/crates.io-index"
1982
-
checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5"
1976
+
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
1983
1977
dependencies = [
1984
1978
"base64",
1985
1979
"bytes",
···
1993
1987
"hyper",
1994
1988
"hyper-rustls",
1995
1989
"hyper-util",
1996
-
"ipnet",
1997
1990
"js-sys",
1998
1991
"log",
1999
1992
"mime",
2000
1993
"mime_guess",
2001
-
"once_cell",
2002
1994
"percent-encoding",
2003
1995
"pin-project-lite",
2004
1996
"quinn",
···
2049
2041
2050
2042
[[package]]
2051
2043
name = "resolv-conf"
2052
-
version = "0.7.4"
2044
+
version = "0.7.5"
2053
2045
source = "registry+https://github.com/rust-lang/crates.io-index"
2054
-
checksum = "95325155c684b1c89f7765e30bc1c42e4a6da51ca513615660cb8a62ef9a88e3"
2046
+
checksum = "6b3789b30bd25ba102de4beabd95d21ac45b69b1be7d14522bab988c526d6799"
2055
2047
2056
2048
[[package]]
2057
2049
name = "rfc6979"
···
2099
2091
]
2100
2092
2101
2093
[[package]]
2102
-
name = "rustc-demangle"
2103
-
version = "0.1.24"
2104
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2105
-
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
2106
-
2107
-
[[package]]
2108
2094
name = "rustc-hash"
2109
2095
version = "2.1.1"
2110
2096
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2121
2107
2122
2108
[[package]]
2123
2109
name = "rustls"
2124
-
version = "0.23.27"
2110
+
version = "0.23.35"
2125
2111
source = "registry+https://github.com/rust-lang/crates.io-index"
2126
-
checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321"
2112
+
checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f"
2127
2113
dependencies = [
2128
2114
"once_cell",
2129
2115
"ring",
···
2135
2121
2136
2122
[[package]]
2137
2123
name = "rustls-native-certs"
2138
-
version = "0.8.1"
2124
+
version = "0.8.2"
2139
2125
source = "registry+https://github.com/rust-lang/crates.io-index"
2140
-
checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3"
2126
+
checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923"
2141
2127
dependencies = [
2142
2128
"openssl-probe",
2143
2129
"rustls-pki-types",
···
2147
2133
2148
2134
[[package]]
2149
2135
name = "rustls-pki-types"
2150
-
version = "1.12.0"
2136
+
version = "1.13.0"
2151
2137
source = "registry+https://github.com/rust-lang/crates.io-index"
2152
-
checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79"
2138
+
checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a"
2153
2139
dependencies = [
2154
2140
"web-time",
2155
2141
"zeroize",
···
2157
2143
2158
2144
[[package]]
2159
2145
name = "rustls-webpki"
2160
-
version = "0.103.3"
2146
+
version = "0.103.8"
2161
2147
source = "registry+https://github.com/rust-lang/crates.io-index"
2162
-
checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435"
2148
+
checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52"
2163
2149
dependencies = [
2164
2150
"ring",
2165
2151
"rustls-pki-types",
···
2168
2154
2169
2155
[[package]]
2170
2156
name = "rustversion"
2171
-
version = "1.0.21"
2157
+
version = "1.0.22"
2172
2158
source = "registry+https://github.com/rust-lang/crates.io-index"
2173
-
checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d"
2159
+
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
2174
2160
2175
2161
[[package]]
2176
2162
name = "ryu"
···
2180
2166
2181
2167
[[package]]
2182
2168
name = "schannel"
2183
-
version = "0.1.27"
2169
+
version = "0.1.28"
2184
2170
source = "registry+https://github.com/rust-lang/crates.io-index"
2185
-
checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d"
2171
+
checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
2186
2172
dependencies = [
2187
-
"windows-sys 0.59.0",
2173
+
"windows-sys 0.61.2",
2188
2174
]
2189
-
2190
-
[[package]]
2191
-
name = "scoped-tls"
2192
-
version = "1.0.1"
2193
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2194
-
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
2195
2175
2196
2176
[[package]]
2197
2177
name = "scopeguard"
···
2226
2206
2227
2207
[[package]]
2228
2208
name = "security-framework"
2229
-
version = "3.2.0"
2209
+
version = "3.5.1"
2230
2210
source = "registry+https://github.com/rust-lang/crates.io-index"
2231
-
checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316"
2211
+
checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef"
2232
2212
dependencies = [
2233
2213
"bitflags",
2234
2214
"core-foundation 0.10.1",
···
2239
2219
2240
2220
[[package]]
2241
2221
name = "security-framework-sys"
2242
-
version = "2.14.0"
2222
+
version = "2.15.0"
2243
2223
source = "registry+https://github.com/rust-lang/crates.io-index"
2244
-
checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32"
2224
+
checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
2245
2225
dependencies = [
2246
2226
"core-foundation-sys",
2247
2227
"libc",
···
2249
2229
2250
2230
[[package]]
2251
2231
name = "semver"
2252
-
version = "1.0.26"
2232
+
version = "1.0.27"
2253
2233
source = "registry+https://github.com/rust-lang/crates.io-index"
2254
-
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
2234
+
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
2255
2235
2256
2236
[[package]]
2257
2237
name = "serde"
2258
-
version = "1.0.219"
2238
+
version = "1.0.228"
2259
2239
source = "registry+https://github.com/rust-lang/crates.io-index"
2260
-
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
2240
+
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
2261
2241
dependencies = [
2242
+
"serde_core",
2262
2243
"serde_derive",
2263
2244
]
2264
2245
2265
2246
[[package]]
2266
2247
name = "serde_bytes"
2267
-
version = "0.11.17"
2248
+
version = "0.11.19"
2268
2249
source = "registry+https://github.com/rust-lang/crates.io-index"
2269
-
checksum = "8437fd221bde2d4ca316d61b90e337e9e702b3820b87d63caa9ba6c02bd06d96"
2250
+
checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8"
2270
2251
dependencies = [
2271
2252
"serde",
2253
+
"serde_core",
2254
+
]
2255
+
2256
+
[[package]]
2257
+
name = "serde_core"
2258
+
version = "1.0.228"
2259
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2260
+
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
2261
+
dependencies = [
2262
+
"serde_derive",
2272
2263
]
2273
2264
2274
2265
[[package]]
2275
2266
name = "serde_derive"
2276
-
version = "1.0.219"
2267
+
version = "1.0.228"
2277
2268
source = "registry+https://github.com/rust-lang/crates.io-index"
2278
-
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
2269
+
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
2279
2270
dependencies = [
2280
2271
"proc-macro2",
2281
2272
"quote",
2282
-
"syn",
2273
+
"syn 2.0.109",
2283
2274
]
2284
2275
2285
2276
[[package]]
2286
2277
name = "serde_ipld_dagcbor"
2287
-
version = "0.6.3"
2278
+
version = "0.6.4"
2288
2279
source = "registry+https://github.com/rust-lang/crates.io-index"
2289
-
checksum = "99600723cf53fb000a66175555098db7e75217c415bdd9a16a65d52a19dcc4fc"
2280
+
checksum = "46182f4f08349a02b45c998ba3215d3f9de826246ba02bb9dddfe9a2a2100778"
2290
2281
dependencies = [
2291
2282
"cbor4ii",
2292
2283
"ipld-core",
···
2296
2287
2297
2288
[[package]]
2298
2289
name = "serde_json"
2299
-
version = "1.0.140"
2290
+
version = "1.0.145"
2300
2291
source = "registry+https://github.com/rust-lang/crates.io-index"
2301
-
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
2292
+
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
2302
2293
dependencies = [
2294
+
"indexmap",
2303
2295
"itoa",
2304
2296
"memchr",
2305
2297
"ryu",
2306
2298
"serde",
2299
+
"serde_core",
2307
2300
]
2308
2301
2309
2302
[[package]]
2310
2303
name = "serde_path_to_error"
2311
-
version = "0.1.17"
2304
+
version = "0.1.20"
2312
2305
source = "registry+https://github.com/rust-lang/crates.io-index"
2313
-
checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a"
2306
+
checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
2314
2307
dependencies = [
2315
2308
"itoa",
2316
2309
"serde",
2310
+
"serde_core",
2317
2311
]
2318
2312
2319
2313
[[package]]
···
2366
2360
2367
2361
[[package]]
2368
2362
name = "signal-hook-registry"
2369
-
version = "1.4.5"
2363
+
version = "1.4.6"
2370
2364
source = "registry+https://github.com/rust-lang/crates.io-index"
2371
-
checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410"
2365
+
checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b"
2372
2366
dependencies = [
2373
2367
"libc",
2374
2368
]
···
2391
2385
2392
2386
[[package]]
2393
2387
name = "slab"
2394
-
version = "0.4.9"
2388
+
version = "0.4.11"
2395
2389
source = "registry+https://github.com/rust-lang/crates.io-index"
2396
-
checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
2397
-
dependencies = [
2398
-
"autocfg",
2399
-
]
2390
+
checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
2400
2391
2401
2392
[[package]]
2402
2393
name = "smallvec"
2403
-
version = "1.15.0"
2394
+
version = "1.15.1"
2404
2395
source = "registry+https://github.com/rust-lang/crates.io-index"
2405
-
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
2396
+
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
2406
2397
2407
2398
[[package]]
2408
2399
name = "socket2"
···
2412
2403
dependencies = [
2413
2404
"libc",
2414
2405
"windows-sys 0.52.0",
2406
+
]
2407
+
2408
+
[[package]]
2409
+
name = "socket2"
2410
+
version = "0.6.1"
2411
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2412
+
checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881"
2413
+
dependencies = [
2414
+
"libc",
2415
+
"windows-sys 0.60.2",
2415
2416
]
2416
2417
2417
2418
[[package]]
···
2426
2427
2427
2428
[[package]]
2428
2429
name = "stable_deref_trait"
2429
-
version = "1.2.0"
2430
+
version = "1.2.1"
2430
2431
source = "registry+https://github.com/rust-lang/crates.io-index"
2431
-
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
2432
+
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
2433
+
2434
+
[[package]]
2435
+
name = "static_assertions"
2436
+
version = "1.1.0"
2437
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2438
+
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
2432
2439
2433
2440
[[package]]
2434
2441
name = "strsim"
···
2444
2451
2445
2452
[[package]]
2446
2453
name = "syn"
2447
-
version = "2.0.101"
2454
+
version = "1.0.109"
2455
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2456
+
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
2457
+
dependencies = [
2458
+
"proc-macro2",
2459
+
"quote",
2460
+
"unicode-ident",
2461
+
]
2462
+
2463
+
[[package]]
2464
+
name = "syn"
2465
+
version = "2.0.109"
2448
2466
source = "registry+https://github.com/rust-lang/crates.io-index"
2449
-
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
2467
+
checksum = "2f17c7e013e88258aa9543dcbe81aca68a667a9ac37cd69c9fbc07858bfe0e2f"
2450
2468
dependencies = [
2451
2469
"proc-macro2",
2452
2470
"quote",
···
2470
2488
dependencies = [
2471
2489
"proc-macro2",
2472
2490
"quote",
2473
-
"syn",
2491
+
"syn 2.0.109",
2474
2492
]
2475
2493
2476
2494
[[package]]
···
2511
2529
2512
2530
[[package]]
2513
2531
name = "thiserror"
2514
-
version = "2.0.12"
2532
+
version = "2.0.17"
2515
2533
source = "registry+https://github.com/rust-lang/crates.io-index"
2516
-
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
2534
+
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
2517
2535
dependencies = [
2518
-
"thiserror-impl 2.0.12",
2536
+
"thiserror-impl 2.0.17",
2519
2537
]
2520
2538
2521
2539
[[package]]
···
2526
2544
dependencies = [
2527
2545
"proc-macro2",
2528
2546
"quote",
2529
-
"syn",
2547
+
"syn 2.0.109",
2530
2548
]
2531
2549
2532
2550
[[package]]
2533
2551
name = "thiserror-impl"
2534
-
version = "2.0.12"
2552
+
version = "2.0.17"
2535
2553
source = "registry+https://github.com/rust-lang/crates.io-index"
2536
-
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
2554
+
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
2537
2555
dependencies = [
2538
2556
"proc-macro2",
2539
2557
"quote",
2540
-
"syn",
2558
+
"syn 2.0.109",
2541
2559
]
2542
2560
2543
2561
[[package]]
2544
2562
name = "thread_local"
2545
-
version = "1.1.8"
2563
+
version = "1.1.9"
2546
2564
source = "registry+https://github.com/rust-lang/crates.io-index"
2547
-
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
2565
+
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
2548
2566
dependencies = [
2549
2567
"cfg-if",
2550
-
"once_cell",
2551
2568
]
2552
2569
2553
2570
[[package]]
2554
2571
name = "tinystr"
2555
-
version = "0.8.1"
2572
+
version = "0.8.2"
2556
2573
source = "registry+https://github.com/rust-lang/crates.io-index"
2557
-
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
2574
+
checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
2558
2575
dependencies = [
2559
2576
"displaydoc",
2560
2577
"zerovec",
···
2562
2579
2563
2580
[[package]]
2564
2581
name = "tinyvec"
2565
-
version = "1.9.0"
2582
+
version = "1.10.0"
2566
2583
source = "registry+https://github.com/rust-lang/crates.io-index"
2567
-
checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71"
2584
+
checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
2568
2585
dependencies = [
2569
2586
"tinyvec_macros",
2570
2587
]
···
2577
2594
2578
2595
[[package]]
2579
2596
name = "tokio"
2580
-
version = "1.45.1"
2597
+
version = "1.48.0"
2581
2598
source = "registry+https://github.com/rust-lang/crates.io-index"
2582
-
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
2599
+
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
2583
2600
dependencies = [
2584
-
"backtrace",
2585
2601
"bytes",
2586
2602
"libc",
2587
2603
"mio",
2588
2604
"parking_lot",
2589
2605
"pin-project-lite",
2590
2606
"signal-hook-registry",
2591
-
"socket2",
2607
+
"socket2 0.6.1",
2592
2608
"tokio-macros",
2593
-
"windows-sys 0.52.0",
2609
+
"windows-sys 0.61.2",
2594
2610
]
2595
2611
2596
2612
[[package]]
2597
2613
name = "tokio-macros"
2598
-
version = "2.5.0"
2614
+
version = "2.6.0"
2599
2615
source = "registry+https://github.com/rust-lang/crates.io-index"
2600
-
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
2616
+
checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
2601
2617
dependencies = [
2602
2618
"proc-macro2",
2603
2619
"quote",
2604
-
"syn",
2620
+
"syn 2.0.109",
2605
2621
]
2606
2622
2607
2623
[[package]]
2608
2624
name = "tokio-rustls"
2609
-
version = "0.26.2"
2625
+
version = "0.26.4"
2610
2626
source = "registry+https://github.com/rust-lang/crates.io-index"
2611
-
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
2627
+
checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61"
2612
2628
dependencies = [
2613
2629
"rustls",
2614
2630
"tokio",
2615
2631
]
2616
2632
2617
2633
[[package]]
2634
+
name = "tokio-stream"
2635
+
version = "0.1.17"
2636
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2637
+
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
2638
+
dependencies = [
2639
+
"futures-core",
2640
+
"pin-project-lite",
2641
+
"tokio",
2642
+
]
2643
+
2644
+
[[package]]
2618
2645
name = "tokio-util"
2619
-
version = "0.7.15"
2646
+
version = "0.7.17"
2620
2647
source = "registry+https://github.com/rust-lang/crates.io-index"
2621
-
checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df"
2648
+
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
2622
2649
dependencies = [
2623
2650
"bytes",
2624
2651
"futures-core",
···
2639
2666
"futures-sink",
2640
2667
"http",
2641
2668
"httparse",
2642
-
"rand 0.9.1",
2669
+
"rand 0.9.2",
2643
2670
"ring",
2644
2671
"rustls-native-certs",
2645
2672
"rustls-pki-types",
···
2667
2694
2668
2695
[[package]]
2669
2696
name = "tower-http"
2670
-
version = "0.6.4"
2697
+
version = "0.6.6"
2671
2698
source = "registry+https://github.com/rust-lang/crates.io-index"
2672
-
checksum = "0fdb0c213ca27a9f57ab69ddb290fd80d970922355b83ae380b395d3986b8a2e"
2699
+
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
2673
2700
dependencies = [
2674
2701
"bitflags",
2675
2702
"bytes",
···
2709
2736
2710
2737
[[package]]
2711
2738
name = "tracing-attributes"
2712
-
version = "0.1.28"
2739
+
version = "0.1.30"
2713
2740
source = "registry+https://github.com/rust-lang/crates.io-index"
2714
-
checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
2741
+
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
2715
2742
dependencies = [
2716
2743
"proc-macro2",
2717
2744
"quote",
2718
-
"syn",
2745
+
"syn 2.0.109",
2719
2746
]
2720
2747
2721
2748
[[package]]
2722
2749
name = "tracing-core"
2723
-
version = "0.1.33"
2750
+
version = "0.1.34"
2724
2751
source = "registry+https://github.com/rust-lang/crates.io-index"
2725
-
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
2752
+
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
2726
2753
dependencies = [
2727
2754
"once_cell",
2728
2755
"valuable",
···
2741
2768
2742
2769
[[package]]
2743
2770
name = "tracing-subscriber"
2744
-
version = "0.3.19"
2771
+
version = "0.3.20"
2745
2772
source = "registry+https://github.com/rust-lang/crates.io-index"
2746
-
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
2773
+
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
2747
2774
dependencies = [
2748
2775
"matchers",
2749
2776
"nu-ansi-term",
2750
2777
"once_cell",
2751
-
"regex",
2778
+
"regex-automata",
2752
2779
"sharded-slab",
2753
2780
"smallvec",
2754
2781
"thread_local",
···
2765
2792
2766
2793
[[package]]
2767
2794
name = "typenum"
2768
-
version = "1.18.0"
2795
+
version = "1.19.0"
2769
2796
source = "registry+https://github.com/rust-lang/crates.io-index"
2770
-
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
2797
+
checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
2771
2798
2772
2799
[[package]]
2773
2800
name = "ulid"
···
2775
2802
source = "registry+https://github.com/rust-lang/crates.io-index"
2776
2803
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
2777
2804
dependencies = [
2778
-
"rand 0.9.1",
2805
+
"rand 0.9.2",
2779
2806
"web-time",
2780
2807
]
2781
2808
···
2787
2814
2788
2815
[[package]]
2789
2816
name = "unicode-ident"
2790
-
version = "1.0.18"
2817
+
version = "1.0.22"
2791
2818
source = "registry+https://github.com/rust-lang/crates.io-index"
2792
-
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
2819
+
checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
2793
2820
2794
2821
[[package]]
2795
2822
name = "unsigned-varint"
···
2805
2832
2806
2833
[[package]]
2807
2834
name = "url"
2808
-
version = "2.5.4"
2835
+
version = "2.5.7"
2809
2836
source = "registry+https://github.com/rust-lang/crates.io-index"
2810
-
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
2837
+
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
2811
2838
dependencies = [
2812
2839
"form_urlencoded",
2813
2840
"idna",
2814
2841
"percent-encoding",
2842
+
"serde",
2815
2843
]
2816
2844
2817
2845
[[package]]
···
2834
2862
2835
2863
[[package]]
2836
2864
name = "uuid"
2837
-
version = "1.17.0"
2865
+
version = "1.18.1"
2838
2866
source = "registry+https://github.com/rust-lang/crates.io-index"
2839
-
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
2867
+
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
2840
2868
dependencies = [
2841
-
"getrandom 0.3.3",
2869
+
"getrandom 0.3.4",
2842
2870
"js-sys",
2843
2871
"wasm-bindgen",
2844
2872
]
···
2866
2894
2867
2895
[[package]]
2868
2896
name = "wasi"
2869
-
version = "0.11.0+wasi-snapshot-preview1"
2897
+
version = "0.11.1+wasi-snapshot-preview1"
2870
2898
source = "registry+https://github.com/rust-lang/crates.io-index"
2871
-
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
2899
+
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
2872
2900
2873
2901
[[package]]
2874
-
name = "wasi"
2875
-
version = "0.14.2+wasi-0.2.4"
2902
+
name = "wasip2"
2903
+
version = "1.0.1+wasi-0.2.4"
2876
2904
source = "registry+https://github.com/rust-lang/crates.io-index"
2877
-
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
2905
+
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
2878
2906
dependencies = [
2879
-
"wit-bindgen-rt",
2907
+
"wit-bindgen",
2880
2908
]
2881
2909
2882
2910
[[package]]
2883
2911
name = "wasm-bindgen"
2884
-
version = "0.2.100"
2912
+
version = "0.2.105"
2885
2913
source = "registry+https://github.com/rust-lang/crates.io-index"
2886
-
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
2914
+
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
2887
2915
dependencies = [
2888
2916
"cfg-if",
2889
2917
"once_cell",
2890
2918
"rustversion",
2891
2919
"wasm-bindgen-macro",
2892
-
]
2893
-
2894
-
[[package]]
2895
-
name = "wasm-bindgen-backend"
2896
-
version = "0.2.100"
2897
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2898
-
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
2899
-
dependencies = [
2900
-
"bumpalo",
2901
-
"log",
2902
-
"proc-macro2",
2903
-
"quote",
2904
-
"syn",
2905
2920
"wasm-bindgen-shared",
2906
2921
]
2907
2922
2908
2923
[[package]]
2909
2924
name = "wasm-bindgen-futures"
2910
-
version = "0.4.50"
2925
+
version = "0.4.55"
2911
2926
source = "registry+https://github.com/rust-lang/crates.io-index"
2912
-
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
2927
+
checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
2913
2928
dependencies = [
2914
2929
"cfg-if",
2915
2930
"js-sys",
···
2920
2935
2921
2936
[[package]]
2922
2937
name = "wasm-bindgen-macro"
2923
-
version = "0.2.100"
2938
+
version = "0.2.105"
2924
2939
source = "registry+https://github.com/rust-lang/crates.io-index"
2925
-
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
2940
+
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
2926
2941
dependencies = [
2927
2942
"quote",
2928
2943
"wasm-bindgen-macro-support",
···
2930
2945
2931
2946
[[package]]
2932
2947
name = "wasm-bindgen-macro-support"
2933
-
version = "0.2.100"
2948
+
version = "0.2.105"
2934
2949
source = "registry+https://github.com/rust-lang/crates.io-index"
2935
-
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
2950
+
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
2936
2951
dependencies = [
2952
+
"bumpalo",
2937
2953
"proc-macro2",
2938
2954
"quote",
2939
-
"syn",
2940
-
"wasm-bindgen-backend",
2955
+
"syn 2.0.109",
2941
2956
"wasm-bindgen-shared",
2942
2957
]
2943
2958
2944
2959
[[package]]
2945
2960
name = "wasm-bindgen-shared"
2946
-
version = "0.2.100"
2961
+
version = "0.2.105"
2947
2962
source = "registry+https://github.com/rust-lang/crates.io-index"
2948
-
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
2963
+
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
2949
2964
dependencies = [
2950
2965
"unicode-ident",
2951
2966
]
2952
2967
2953
2968
[[package]]
2954
2969
name = "web-sys"
2955
-
version = "0.3.77"
2970
+
version = "0.3.82"
2956
2971
source = "registry+https://github.com/rust-lang/crates.io-index"
2957
-
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
2972
+
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
2958
2973
dependencies = [
2959
2974
"js-sys",
2960
2975
"wasm-bindgen",
···
2972
2987
2973
2988
[[package]]
2974
2989
name = "webpki-roots"
2975
-
version = "1.0.0"
2990
+
version = "1.0.4"
2976
2991
source = "registry+https://github.com/rust-lang/crates.io-index"
2977
-
checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb"
2992
+
checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e"
2978
2993
dependencies = [
2979
2994
"rustls-pki-types",
2980
2995
]
2981
2996
2982
2997
[[package]]
2983
2998
name = "widestring"
2984
-
version = "1.2.0"
2985
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2986
-
checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d"
2987
-
2988
-
[[package]]
2989
-
name = "winapi"
2990
-
version = "0.3.9"
2991
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2992
-
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
2993
-
dependencies = [
2994
-
"winapi-i686-pc-windows-gnu",
2995
-
"winapi-x86_64-pc-windows-gnu",
2996
-
]
2997
-
2998
-
[[package]]
2999
-
name = "winapi-i686-pc-windows-gnu"
3000
-
version = "0.4.0"
2999
+
version = "1.2.1"
3001
3000
source = "registry+https://github.com/rust-lang/crates.io-index"
3002
-
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
3001
+
checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471"
3003
3002
3004
3003
[[package]]
3005
-
name = "winapi-x86_64-pc-windows-gnu"
3006
-
version = "0.4.0"
3004
+
name = "windows-link"
3005
+
version = "0.1.3"
3007
3006
source = "registry+https://github.com/rust-lang/crates.io-index"
3008
-
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
3007
+
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
3009
3008
3010
3009
[[package]]
3011
-
name = "windows"
3012
-
version = "0.61.1"
3013
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3014
-
checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
3015
-
dependencies = [
3016
-
"windows-collections",
3017
-
"windows-core",
3018
-
"windows-future",
3019
-
"windows-link",
3020
-
"windows-numerics",
3021
-
]
3022
-
3023
-
[[package]]
3024
-
name = "windows-collections"
3025
-
version = "0.2.0"
3026
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3027
-
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
3028
-
dependencies = [
3029
-
"windows-core",
3030
-
]
3031
-
3032
-
[[package]]
3033
-
name = "windows-core"
3034
-
version = "0.61.2"
3035
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3036
-
checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
3037
-
dependencies = [
3038
-
"windows-implement",
3039
-
"windows-interface",
3040
-
"windows-link",
3041
-
"windows-result",
3042
-
"windows-strings 0.4.2",
3043
-
]
3044
-
3045
-
[[package]]
3046
-
name = "windows-future"
3010
+
name = "windows-link"
3047
3011
version = "0.2.1"
3048
3012
source = "registry+https://github.com/rust-lang/crates.io-index"
3049
-
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
3050
-
dependencies = [
3051
-
"windows-core",
3052
-
"windows-link",
3053
-
"windows-threading",
3054
-
]
3055
-
3056
-
[[package]]
3057
-
name = "windows-implement"
3058
-
version = "0.60.0"
3059
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3060
-
checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
3061
-
dependencies = [
3062
-
"proc-macro2",
3063
-
"quote",
3064
-
"syn",
3065
-
]
3066
-
3067
-
[[package]]
3068
-
name = "windows-interface"
3069
-
version = "0.59.1"
3070
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3071
-
checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
3072
-
dependencies = [
3073
-
"proc-macro2",
3074
-
"quote",
3075
-
"syn",
3076
-
]
3077
-
3078
-
[[package]]
3079
-
name = "windows-link"
3080
-
version = "0.1.1"
3081
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3082
-
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
3083
-
3084
-
[[package]]
3085
-
name = "windows-numerics"
3086
-
version = "0.2.0"
3087
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3088
-
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
3089
-
dependencies = [
3090
-
"windows-core",
3091
-
"windows-link",
3092
-
]
3013
+
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
3093
3014
3094
3015
[[package]]
3095
3016
name = "windows-registry"
3096
-
version = "0.4.0"
3017
+
version = "0.5.3"
3097
3018
source = "registry+https://github.com/rust-lang/crates.io-index"
3098
-
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
3019
+
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
3099
3020
dependencies = [
3021
+
"windows-link 0.1.3",
3100
3022
"windows-result",
3101
-
"windows-strings 0.3.1",
3102
-
"windows-targets 0.53.0",
3023
+
"windows-strings",
3103
3024
]
3104
3025
3105
3026
[[package]]
···
3108
3029
source = "registry+https://github.com/rust-lang/crates.io-index"
3109
3030
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
3110
3031
dependencies = [
3111
-
"windows-link",
3112
-
]
3113
-
3114
-
[[package]]
3115
-
name = "windows-strings"
3116
-
version = "0.3.1"
3117
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3118
-
checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
3119
-
dependencies = [
3120
-
"windows-link",
3032
+
"windows-link 0.1.3",
3121
3033
]
3122
3034
3123
3035
[[package]]
···
3126
3038
source = "registry+https://github.com/rust-lang/crates.io-index"
3127
3039
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
3128
3040
dependencies = [
3129
-
"windows-link",
3041
+
"windows-link 0.1.3",
3130
3042
]
3131
3043
3132
3044
[[package]]
···
3157
3069
]
3158
3070
3159
3071
[[package]]
3072
+
name = "windows-sys"
3073
+
version = "0.60.2"
3074
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3075
+
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
3076
+
dependencies = [
3077
+
"windows-targets 0.53.5",
3078
+
]
3079
+
3080
+
[[package]]
3081
+
name = "windows-sys"
3082
+
version = "0.61.2"
3083
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3084
+
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
3085
+
dependencies = [
3086
+
"windows-link 0.2.1",
3087
+
]
3088
+
3089
+
[[package]]
3160
3090
name = "windows-targets"
3161
3091
version = "0.48.5"
3162
3092
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3189
3119
3190
3120
[[package]]
3191
3121
name = "windows-targets"
3192
-
version = "0.53.0"
3122
+
version = "0.53.5"
3193
3123
source = "registry+https://github.com/rust-lang/crates.io-index"
3194
-
checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
3124
+
checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
3195
3125
dependencies = [
3196
-
"windows_aarch64_gnullvm 0.53.0",
3197
-
"windows_aarch64_msvc 0.53.0",
3198
-
"windows_i686_gnu 0.53.0",
3199
-
"windows_i686_gnullvm 0.53.0",
3200
-
"windows_i686_msvc 0.53.0",
3201
-
"windows_x86_64_gnu 0.53.0",
3202
-
"windows_x86_64_gnullvm 0.53.0",
3203
-
"windows_x86_64_msvc 0.53.0",
3204
-
]
3205
-
3206
-
[[package]]
3207
-
name = "windows-threading"
3208
-
version = "0.1.0"
3209
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3210
-
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
3211
-
dependencies = [
3212
-
"windows-link",
3126
+
"windows-link 0.2.1",
3127
+
"windows_aarch64_gnullvm 0.53.1",
3128
+
"windows_aarch64_msvc 0.53.1",
3129
+
"windows_i686_gnu 0.53.1",
3130
+
"windows_i686_gnullvm 0.53.1",
3131
+
"windows_i686_msvc 0.53.1",
3132
+
"windows_x86_64_gnu 0.53.1",
3133
+
"windows_x86_64_gnullvm 0.53.1",
3134
+
"windows_x86_64_msvc 0.53.1",
3213
3135
]
3214
3136
3215
3137
[[package]]
···
3226
3148
3227
3149
[[package]]
3228
3150
name = "windows_aarch64_gnullvm"
3229
-
version = "0.53.0"
3151
+
version = "0.53.1"
3230
3152
source = "registry+https://github.com/rust-lang/crates.io-index"
3231
-
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
3153
+
checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
3232
3154
3233
3155
[[package]]
3234
3156
name = "windows_aarch64_msvc"
···
3244
3166
3245
3167
[[package]]
3246
3168
name = "windows_aarch64_msvc"
3247
-
version = "0.53.0"
3169
+
version = "0.53.1"
3248
3170
source = "registry+https://github.com/rust-lang/crates.io-index"
3249
-
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
3171
+
checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
3250
3172
3251
3173
[[package]]
3252
3174
name = "windows_i686_gnu"
···
3262
3184
3263
3185
[[package]]
3264
3186
name = "windows_i686_gnu"
3265
-
version = "0.53.0"
3187
+
version = "0.53.1"
3266
3188
source = "registry+https://github.com/rust-lang/crates.io-index"
3267
-
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
3189
+
checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
3268
3190
3269
3191
[[package]]
3270
3192
name = "windows_i686_gnullvm"
···
3274
3196
3275
3197
[[package]]
3276
3198
name = "windows_i686_gnullvm"
3277
-
version = "0.53.0"
3199
+
version = "0.53.1"
3278
3200
source = "registry+https://github.com/rust-lang/crates.io-index"
3279
-
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
3201
+
checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
3280
3202
3281
3203
[[package]]
3282
3204
name = "windows_i686_msvc"
···
3292
3214
3293
3215
[[package]]
3294
3216
name = "windows_i686_msvc"
3295
-
version = "0.53.0"
3217
+
version = "0.53.1"
3296
3218
source = "registry+https://github.com/rust-lang/crates.io-index"
3297
-
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
3219
+
checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
3298
3220
3299
3221
[[package]]
3300
3222
name = "windows_x86_64_gnu"
···
3310
3232
3311
3233
[[package]]
3312
3234
name = "windows_x86_64_gnu"
3313
-
version = "0.53.0"
3235
+
version = "0.53.1"
3314
3236
source = "registry+https://github.com/rust-lang/crates.io-index"
3315
-
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
3237
+
checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
3316
3238
3317
3239
[[package]]
3318
3240
name = "windows_x86_64_gnullvm"
···
3328
3250
3329
3251
[[package]]
3330
3252
name = "windows_x86_64_gnullvm"
3331
-
version = "0.53.0"
3253
+
version = "0.53.1"
3332
3254
source = "registry+https://github.com/rust-lang/crates.io-index"
3333
-
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
3255
+
checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
3334
3256
3335
3257
[[package]]
3336
3258
name = "windows_x86_64_msvc"
···
3346
3268
3347
3269
[[package]]
3348
3270
name = "windows_x86_64_msvc"
3349
-
version = "0.53.0"
3271
+
version = "0.53.1"
3350
3272
source = "registry+https://github.com/rust-lang/crates.io-index"
3351
-
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
3273
+
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
3352
3274
3353
3275
[[package]]
3354
3276
name = "winreg"
···
3361
3283
]
3362
3284
3363
3285
[[package]]
3364
-
name = "wit-bindgen-rt"
3365
-
version = "0.39.0"
3286
+
name = "wit-bindgen"
3287
+
version = "0.46.0"
3366
3288
source = "registry+https://github.com/rust-lang/crates.io-index"
3367
-
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
3368
-
dependencies = [
3369
-
"bitflags",
3370
-
]
3289
+
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
3371
3290
3372
3291
[[package]]
3373
3292
name = "writeable"
3374
-
version = "0.6.1"
3293
+
version = "0.6.2"
3375
3294
source = "registry+https://github.com/rust-lang/crates.io-index"
3376
-
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
3295
+
checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
3377
3296
3378
3297
[[package]]
3379
3298
name = "yoke"
3380
-
version = "0.8.0"
3299
+
version = "0.8.1"
3381
3300
source = "registry+https://github.com/rust-lang/crates.io-index"
3382
-
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
3301
+
checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
3383
3302
dependencies = [
3384
-
"serde",
3385
3303
"stable_deref_trait",
3386
3304
"yoke-derive",
3387
3305
"zerofrom",
···
3389
3307
3390
3308
[[package]]
3391
3309
name = "yoke-derive"
3392
-
version = "0.8.0"
3310
+
version = "0.8.1"
3393
3311
source = "registry+https://github.com/rust-lang/crates.io-index"
3394
-
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
3312
+
checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
3395
3313
dependencies = [
3396
3314
"proc-macro2",
3397
3315
"quote",
3398
-
"syn",
3316
+
"syn 2.0.109",
3399
3317
"synstructure",
3400
3318
]
3401
3319
3402
3320
[[package]]
3403
3321
name = "zerocopy"
3404
-
version = "0.8.25"
3322
+
version = "0.8.27"
3405
3323
source = "registry+https://github.com/rust-lang/crates.io-index"
3406
-
checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb"
3324
+
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
3407
3325
dependencies = [
3408
3326
"zerocopy-derive",
3409
3327
]
3410
3328
3411
3329
[[package]]
3412
3330
name = "zerocopy-derive"
3413
-
version = "0.8.25"
3331
+
version = "0.8.27"
3414
3332
source = "registry+https://github.com/rust-lang/crates.io-index"
3415
-
checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef"
3333
+
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
3416
3334
dependencies = [
3417
3335
"proc-macro2",
3418
3336
"quote",
3419
-
"syn",
3337
+
"syn 2.0.109",
3420
3338
]
3421
3339
3422
3340
[[package]]
···
3436
3354
dependencies = [
3437
3355
"proc-macro2",
3438
3356
"quote",
3439
-
"syn",
3357
+
"syn 2.0.109",
3440
3358
"synstructure",
3441
3359
]
3442
3360
3443
3361
[[package]]
3444
3362
name = "zeroize"
3445
-
version = "1.8.1"
3363
+
version = "1.8.2"
3446
3364
source = "registry+https://github.com/rust-lang/crates.io-index"
3447
-
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
3365
+
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
3448
3366
dependencies = [
3449
3367
"zeroize_derive",
3450
3368
]
···
3457
3375
dependencies = [
3458
3376
"proc-macro2",
3459
3377
"quote",
3460
-
"syn",
3378
+
"syn 2.0.109",
3461
3379
]
3462
3380
3463
3381
[[package]]
3464
3382
name = "zerotrie"
3465
-
version = "0.2.2"
3383
+
version = "0.2.3"
3466
3384
source = "registry+https://github.com/rust-lang/crates.io-index"
3467
-
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
3385
+
checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
3468
3386
dependencies = [
3469
3387
"displaydoc",
3470
3388
"yoke",
···
3473
3391
3474
3392
[[package]]
3475
3393
name = "zerovec"
3476
-
version = "0.11.2"
3394
+
version = "0.11.5"
3477
3395
source = "registry+https://github.com/rust-lang/crates.io-index"
3478
-
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
3396
+
checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
3479
3397
dependencies = [
3480
3398
"yoke",
3481
3399
"zerofrom",
···
3484
3402
3485
3403
[[package]]
3486
3404
name = "zerovec-derive"
3487
-
version = "0.11.1"
3405
+
version = "0.11.2"
3488
3406
source = "registry+https://github.com/rust-lang/crates.io-index"
3489
-
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
3407
+
checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
3490
3408
dependencies = [
3491
3409
"proc-macro2",
3492
3410
"quote",
3493
-
"syn",
3411
+
"syn 2.0.109",
3494
3412
]
3495
3413
3496
3414
[[package]]
···
3513
3431
3514
3432
[[package]]
3515
3433
name = "zstd-sys"
3516
-
version = "2.0.15+zstd.1.5.7"
3434
+
version = "2.0.16+zstd.1.5.7"
3517
3435
source = "registry+https://github.com/rust-lang/crates.io-index"
3518
-
checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237"
3436
+
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
3519
3437
dependencies = [
3520
3438
"cc",
3521
3439
"pkg-config",
+26
-20
Cargo.toml
+26
-20
Cargo.toml
···
1
1
[workspace]
2
2
members = [
3
3
"crates/atproto-client",
4
+
"crates/atproto-extras",
4
5
"crates/atproto-identity",
5
6
"crates/atproto-jetstream",
6
7
"crates/atproto-oauth-aip",
7
8
"crates/atproto-oauth-axum",
8
9
"crates/atproto-oauth",
9
10
"crates/atproto-record",
11
+
"crates/atproto-tap",
10
12
"crates/atproto-xrpcs-helloworld",
11
13
"crates/atproto-xrpcs",
12
14
"crates/atproto-lexicon",
···
24
26
categories = ["command-line-utilities", "web-programming"]
25
27
26
28
[workspace.dependencies]
29
+
atproto-attestation = { version = "0.13.0", path = "crates/atproto-attestation" }
27
30
atproto-client = { version = "0.13.0", path = "crates/atproto-client" }
31
+
atproto-extras = { version = "0.13.0", path = "crates/atproto-extras" }
28
32
atproto-identity = { version = "0.13.0", path = "crates/atproto-identity" }
33
+
atproto-jetstream = { version = "0.13.0", path = "crates/atproto-jetstream" }
29
34
atproto-oauth = { version = "0.13.0", path = "crates/atproto-oauth" }
30
-
atproto-oauth-axum = { version = "0.13.0", path = "crates/atproto-oauth-axum" }
31
35
atproto-oauth-aip = { version = "0.13.0", path = "crates/atproto-oauth-aip" }
36
+
atproto-oauth-axum = { version = "0.13.0", path = "crates/atproto-oauth-axum" }
32
37
atproto-record = { version = "0.13.0", path = "crates/atproto-record" }
38
+
atproto-tap = { version = "0.13.0", path = "crates/atproto-tap" }
33
39
atproto-xrpcs = { version = "0.13.0", path = "crates/atproto-xrpcs" }
34
-
atproto-jetstream = { version = "0.13.0", path = "crates/atproto-jetstream" }
35
-
atproto-attestation = { version = "0.13.0", path = "crates/atproto-attestation" }
36
40
41
+
ammonia = "4.0"
37
42
anyhow = "1.0"
38
-
async-trait = "0.1.88"
39
-
base64 = "0.22.1"
40
-
chrono = {version = "0.4.41", default-features = false, features = ["std", "now"]}
43
+
async-trait = "0.1"
44
+
base64 = "0.22"
45
+
chrono = {version = "0.4", default-features = false, features = ["std", "now"]}
41
46
clap = { version = "4.5", features = ["derive", "env"] }
42
-
ecdsa = { version = "0.16.9", features = ["std"] }
43
-
elliptic-curve = { version = "0.13.8", features = ["jwk", "serde"] }
47
+
ecdsa = { version = "0.16", features = ["std"] }
48
+
elliptic-curve = { version = "0.13", features = ["jwk", "serde"] }
44
49
futures = "0.3"
45
50
hickory-resolver = { version = "0.25" }
46
-
http = "1.3.1"
47
-
k256 = "0.13.4"
51
+
http = "1.3"
52
+
k256 = "0.13"
48
53
lru = "0.12"
49
-
multibase = "0.9.1"
50
-
p256 = "0.13.2"
51
-
p384 = "0.13.0"
54
+
multibase = "0.9"
55
+
p256 = "0.13"
56
+
p384 = "0.13"
52
57
rand = "0.8"
58
+
regex = "1.11"
53
59
reqwest = { version = "0.12", default-features = false, features = ["charset", "http2", "system-proxy", "json", "rustls-tls"] }
54
-
reqwest-chain = "1.0.0"
55
-
reqwest-middleware = { version = "0.4.2", features = ["json", "multipart"]}
60
+
reqwest-chain = "1.0"
61
+
reqwest-middleware = { version = "0.4", features = ["json", "multipart"]}
56
62
rpassword = "7.3"
57
63
secrecy = { version = "0.10", features = ["serde"] }
58
64
serde = { version = "1.0", features = ["derive"] }
59
-
serde_ipld_dagcbor = "0.6.3"
60
-
serde_json = "1.0"
61
-
sha2 = "0.10.9"
65
+
serde_ipld_dagcbor = "0.6"
66
+
serde_json = { version = "1.0", features = ["unbounded_depth"] }
67
+
sha2 = "0.10"
62
68
thiserror = "2.0"
63
69
tokio = { version = "1.41", features = ["macros", "rt", "rt-multi-thread"] }
64
70
tokio-websockets = { version = "0.11.4", features = ["client", "rustls-native-roots", "rand", "ring"] }
65
71
tokio-util = "0.7"
66
72
tracing = { version = "0.1", features = ["async-await"] }
67
-
ulid = "1.2.1"
73
+
ulid = "1.2"
68
74
zstd = "0.13"
69
75
url = "2.5"
70
76
urlencoding = "2.1"
71
77
72
-
zeroize = { version = "1.8.1", features = ["zeroize_derive"] }
78
+
zeroize = { version = "1.8", features = ["zeroize_derive"] }
73
79
74
80
[workspace.lints.rust]
75
81
unsafe_code = "forbid"
+16
-8
README.md
+16
-8
README.md
···
88
88
89
89
```rust
90
90
use atproto_identity::key::{identify_key, to_public};
91
-
use atproto_attestation::{create_inline_attestation, verify_all_signatures, VerificationStatus};
91
+
use atproto_attestation::{
92
+
create_inline_attestation, verify_all_signatures, VerificationStatus,
93
+
input::{AnyInput, PhantomSignature}
94
+
};
92
95
use serde_json::json;
93
96
94
97
#[tokio::main]
···
96
99
let private_key = identify_key("did:key:zQ3shNzMp4oaaQ1gQRzCxMGXFrSW3NEM1M9T6KCY9eA7HhyEA")?;
97
100
let public_key = to_public(&private_key)?;
98
101
let key_reference = format!("{}", &public_key);
102
+
let repository_did = "did:plc:repo123";
99
103
100
104
let record = json!({
101
105
"$type": "app.bsky.feed.post",
···
110
114
"issuedAt": "2024-01-01T00:00:00.000Z"
111
115
});
112
116
113
-
let signed_record =
114
-
create_inline_attestation(&record, &sig_metadata, &private_key)?;
117
+
let signed_record = create_inline_attestation::<PhantomSignature, PhantomSignature>(
118
+
AnyInput::Json(record),
119
+
AnyInput::Json(sig_metadata),
120
+
repository_did,
121
+
&private_key
122
+
)?;
115
123
116
-
let reports = verify_all_signatures(&signed_record, None).await?;
124
+
let reports = verify_all_signatures(&signed_record, repository_did, None).await?;
117
125
assert!(reports.iter().all(|report| matches!(report.status, VerificationStatus::Valid { .. })));
118
126
119
127
Ok(())
···
123
131
### XRPC Service
124
132
125
133
```rust
126
-
use atproto_xrpcs::authorization::ResolvingAuthorization;
134
+
use atproto_xrpcs::authorization::Authorization;
127
135
use axum::{Json, Router, extract::Query, routing::get};
128
136
use serde::Deserialize;
129
137
use serde_json::json;
···
135
143
136
144
async fn handle_hello(
137
145
params: Query<HelloParams>,
138
-
authorization: Option<ResolvingAuthorization>,
146
+
authorization: Option<Authorization>,
139
147
) -> Json<serde_json::Value> {
140
148
let subject = params.subject.as_deref().unwrap_or("World");
141
-
149
+
142
150
let message = if let Some(auth) = authorization {
143
151
format!("Hello, authenticated {}! (caller: {})", subject, auth.subject())
144
152
} else {
145
153
format!("Hello, {}!", subject)
146
154
};
147
-
155
+
148
156
Json(json!({ "message": message }))
149
157
}
150
158
+2
-1
crates/atproto-attestation/Cargo.toml
+2
-1
crates/atproto-attestation/Cargo.toml
···
34
34
anyhow.workspace = true
35
35
base64.workspace = true
36
36
serde.workspace = true
37
-
serde_json.workspace = true
37
+
serde_json = {workspace = true, features = ["preserve_order"]}
38
38
serde_ipld_dagcbor.workspace = true
39
39
sha2.workspace = true
40
40
thiserror.workspace = true
···
52
52
53
53
[dev-dependencies]
54
54
async-trait = "0.1"
55
+
chrono = { workspace = true }
55
56
tokio = { workspace = true, features = ["macros", "rt"] }
56
57
57
58
[features]
+180
-203
crates/atproto-attestation/README.md
+180
-203
crates/atproto-attestation/README.md
···
1
1
# atproto-attestation
2
2
3
-
Utilities for preparing, signing, and verifying AT Protocol record attestations using the CID-first workflow.
3
+
Utilities for creating and verifying AT Protocol record attestations using the CID-first workflow.
4
4
5
5
## Overview
6
6
7
7
A Rust library implementing the CID-first attestation specification for AT Protocol records. This crate provides cryptographic signature creation and verification for records, supporting both inline attestations (signatures embedded directly in records) and remote attestations (separate proof records with strongRef references).
8
8
9
-
The attestation workflow ensures deterministic signing payloads by:
10
-
1. Preparing records with `$sig` metadata
9
+
The attestation workflow ensures deterministic signing payloads and prevents replay attacks by:
10
+
1. Automatically preparing records with `$sig` metadata containing `$type` and `repository` fields
11
11
2. Generating content identifiers (CIDs) using DAG-CBOR serialization
12
-
3. Signing CID bytes with elliptic curve cryptography
13
-
4. Embedding or referencing signatures in records
14
-
5. Verifying signatures against resolved public keys
12
+
3. Signing CID bytes with elliptic curve cryptography (for inline attestations)
13
+
4. Normalizing signatures to low-S form to prevent malleability attacks
14
+
5. Embedding signatures or creating proof records with strongRef references
15
+
16
+
**Critical Security Feature**: The `repository` field in `$sig` metadata binds attestations to specific repositories, preventing replay attacks where an attacker might attempt to clone records from one repository into their own.
15
17
16
18
## Features
17
19
···
19
21
- **Remote attestations**: Create separate proof records with CID-based strongRef references
20
22
- **CID-first workflow**: Deterministic signing based on content identifiers
21
23
- **Multi-curve support**: Full support for P-256, P-384, and K-256 elliptic curves
22
-
- **Signature normalization**: Automatic low-S normalization for ECDSA signatures
23
-
- **Key resolution**: Resolve verification keys from DID documents or did:key identifiers
24
-
- **Flexible verification**: Verify individual signatures or all signatures in a record
25
-
- **Structured reporting**: Detailed verification reports with success/failure status
24
+
- **Signature normalization**: Automatic low-S normalization for ECDSA signatures to prevent malleability
25
+
- **Flexible input types**: Accept records as JSON strings, JSON values, or typed lexicons
26
+
- **Repository binding**: Automatic prevention of replay attacks
26
27
27
28
## CLI Tools
28
29
···
38
39
Inline attestations embed the signature bytes directly in the record:
39
40
40
41
```rust
41
-
use atproto_identity::key::{identify_key, to_public};
42
-
use atproto_attestation::create_inline_attestation;
42
+
use atproto_identity::key::{generate_key, to_public, KeyType};
43
+
use atproto_attestation::{create_inline_attestation, input::{AnyInput, PhantomSignature}};
43
44
use serde_json::json;
44
45
45
-
#[tokio::main]
46
-
async fn main() -> anyhow::Result<()> {
47
-
// Parse the signing key from a did:key
48
-
let private_key = identify_key("did:key:zQ3sh...")?;
46
+
fn main() -> anyhow::Result<()> {
47
+
// Generate a signing key
48
+
let private_key = generate_key(KeyType::K256Private)?;
49
49
let public_key = to_public(&private_key)?;
50
50
let key_reference = format!("{}", &public_key);
51
51
···
56
56
"createdAt": "2024-01-01T00:00:00.000Z"
57
57
});
58
58
59
-
// Attestation metadata (required fields: $type, key)
59
+
// Repository housing this record (for replay attack prevention)
60
+
let repository_did = "did:plc:repo123";
61
+
62
+
// Attestation metadata (required: $type and key for inline attestations)
63
+
// Note: repository field is automatically added during CID generation
60
64
let sig_metadata = json!({
61
65
"$type": "com.example.inlineSignature",
62
66
"key": &key_reference,
···
64
68
"issuedAt": "2024-01-01T00:00:00.000Z"
65
69
});
66
70
67
-
// Create inline attestation
68
-
let signed_record = create_inline_attestation(&record, &sig_metadata, &private_key)?;
71
+
// Create inline attestation (repository_did is bound into the CID)
72
+
// Signature is automatically normalized to low-S form
73
+
let signed_record = create_inline_attestation::<PhantomSignature, PhantomSignature>(
74
+
AnyInput::Json(record),
75
+
AnyInput::Json(sig_metadata),
76
+
repository_did,
77
+
&private_key
78
+
)?;
69
79
70
80
println!("{}", serde_json::to_string_pretty(&signed_record)?);
71
81
···
86
96
"key": "did:key:zQ3sh...",
87
97
"issuer": "did:plc:issuer123",
88
98
"issuedAt": "2024-01-01T00:00:00.000Z",
99
+
"cid": "bafyrei...",
89
100
"signature": {
90
-
"$bytes": "base64-encoded-signature-bytes"
101
+
"$bytes": "base64-encoded-normalized-signature-bytes"
91
102
}
92
103
}
93
104
]
···
99
110
Remote attestations create a separate proof record that must be stored in a repository:
100
111
101
112
```rust
102
-
use atproto_attestation::{create_remote_attestation, create_remote_attestation_reference};
113
+
use atproto_attestation::{create_remote_attestation, input::{AnyInput, PhantomSignature}};
103
114
use serde_json::json;
104
115
105
-
let record = json!({
106
-
"$type": "app.bsky.feed.post",
107
-
"text": "Hello world!"
108
-
});
109
-
110
-
let metadata = json!({
111
-
"$type": "com.example.attestation",
112
-
"issuer": "did:plc:issuer123",
113
-
"purpose": "verification"
114
-
});
115
-
116
-
// Create the proof record (contains the CID)
117
-
let proof_record = create_remote_attestation(&record, &metadata)?;
118
-
119
-
// Create the source record with strongRef
120
-
let repository_did = "did:plc:repo123";
121
-
let attested_record = create_remote_attestation_reference(
122
-
&record,
123
-
&proof_record,
124
-
repository_did
125
-
)?;
126
-
127
-
// The proof_record should be stored in the repository
128
-
// The attested_record contains the strongRef reference
129
-
```
130
-
131
-
### Verifying Signatures
116
+
fn main() -> anyhow::Result<()> {
117
+
let record = json!({
118
+
"$type": "app.bsky.feed.post",
119
+
"text": "Hello world!"
120
+
});
132
121
133
-
Verify signatures embedded in records:
122
+
// Repository housing the original record (for replay attack prevention)
123
+
let repository_did = "did:plc:repo123";
134
124
135
-
```rust
136
-
use atproto_attestation::{verify_all_signatures, VerificationStatus};
125
+
// DID of the entity creating the attestation (will store the proof record)
126
+
let attestor_did = "did:plc:attestor456";
137
127
138
-
#[tokio::main]
139
-
async fn main() -> anyhow::Result<()> {
140
-
// Signed record with signatures array
141
-
let signed_record = /* ... */;
128
+
let metadata = json!({
129
+
"$type": "com.example.attestation",
130
+
"issuer": "did:plc:issuer123",
131
+
"purpose": "verification"
132
+
});
142
133
143
-
// Verify all signatures (remote attestations will be unverified)
144
-
let reports = verify_all_signatures(&signed_record, None).await?;
134
+
// Create both the attested record and proof record in one call
135
+
// Returns: (attested_record_with_strongRef, proof_record)
136
+
let (attested_record, proof_record) = create_remote_attestation::<PhantomSignature, PhantomSignature>(
137
+
AnyInput::Json(record),
138
+
AnyInput::Json(metadata),
139
+
repository_did, // Repository housing the original record
140
+
attestor_did // Repository that will store the proof record
141
+
)?;
145
142
146
-
for report in reports {
147
-
match report.status {
148
-
VerificationStatus::Valid { cid } => {
149
-
println!("โ Signature {} is valid (CID: {})", report.index, cid);
150
-
}
151
-
VerificationStatus::Invalid { error } => {
152
-
println!("โ Signature {} is invalid: {}", report.index, error);
153
-
}
154
-
VerificationStatus::Unverified { reason } => {
155
-
println!("? Signature {} unverified: {}", report.index, reason);
156
-
}
157
-
}
158
-
}
143
+
// The proof_record should be stored in the attestor's repository
144
+
// The attested_record contains the strongRef reference
145
+
println!("Proof record:\n{}", serde_json::to_string_pretty(&proof_record)?);
146
+
println!("Attested record:\n{}", serde_json::to_string_pretty(&attested_record)?);
159
147
160
148
Ok(())
161
149
}
162
150
```
163
151
164
-
### Verifying with Custom Key Resolver
152
+
### Verifying Signatures
165
153
166
-
For signatures that reference DID document keys (not did:key), provide a key resolver:
154
+
Verify all signatures in a record:
167
155
168
156
```rust
169
-
use atproto_attestation::verify_all_signatures;
157
+
use atproto_attestation::{verify_record, input::AnyInput};
170
158
use atproto_identity::key::IdentityDocumentKeyResolver;
171
-
use atproto_identity::resolve::{HickoryDnsResolver, InnerIdentityResolver};
172
-
use std::sync::Arc;
159
+
use atproto_client::record_resolver::HttpRecordResolver;
173
160
174
161
#[tokio::main]
175
162
async fn main() -> anyhow::Result<()> {
176
-
let http_client = reqwest::Client::new();
177
-
let dns_resolver = HickoryDnsResolver::create_resolver(&[]);
178
-
179
-
// Create identity and key resolvers
180
-
let identity_resolver = Arc::new(InnerIdentityResolver {
181
-
http_client: http_client.clone(),
182
-
dns_resolver: Arc::new(dns_resolver),
183
-
plc_hostname: "plc.directory".to_string(),
184
-
});
185
-
let key_resolver = IdentityDocumentKeyResolver::new(identity_resolver);
186
-
163
+
// Signed record with signatures array
187
164
let signed_record = /* ... */;
188
165
189
-
// Verify with key resolver for DID document keys
190
-
let reports = verify_all_signatures(&signed_record, Some(&key_resolver)).await?;
191
-
192
-
Ok(())
193
-
}
194
-
```
195
-
196
-
### Verifying Remote Attestations
166
+
// The repository DID where this record is stored
167
+
// CRITICAL: This must match the repository used during signing to prevent replay attacks
168
+
let repository_did = "did:plc:repo123";
197
169
198
-
To verify remote attestations (strongRef), use `verify_all_signatures_with_resolver` and provide a `RecordResolver` that can fetch proof records:
170
+
// Create resolvers for key and record fetching
171
+
let key_resolver = /* ... */; // IdentityDocumentKeyResolver
172
+
let record_resolver = HttpRecordResolver::new(/* ... */);
199
173
200
-
```rust
201
-
use atproto_attestation::verify_all_signatures_with_resolver;
202
-
use atproto_client::record_resolver::RecordResolver;
203
-
use atproto_identity::resolve::{HickoryDnsResolver, InnerIdentityResolver};
204
-
use atproto_identity::traits::IdentityResolver;
205
-
use std::sync::Arc;
174
+
// Verify all signatures with repository validation
175
+
verify_record(
176
+
AnyInput::Json(signed_record),
177
+
repository_did,
178
+
key_resolver,
179
+
record_resolver
180
+
).await?;
206
181
207
-
// Custom record resolver that resolves DIDs to find PDS endpoints
208
-
struct MyRecordResolver {
209
-
http_client: reqwest::Client,
210
-
identity_resolver: InnerIdentityResolver,
211
-
}
212
-
213
-
#[async_trait::async_trait]
214
-
impl RecordResolver for MyRecordResolver {
215
-
async fn resolve<T>(&self, aturi: &str) -> anyhow::Result<T>
216
-
where
217
-
T: serde::de::DeserializeOwned + Send,
218
-
{
219
-
// Parse AT-URI, resolve DID to PDS, fetch record
220
-
// See atproto-attestation-verify.rs for full implementation
221
-
todo!()
222
-
}
223
-
}
224
-
225
-
#[tokio::main]
226
-
async fn main() -> anyhow::Result<()> {
227
-
let http_client = reqwest::Client::new();
228
-
let dns_resolver = HickoryDnsResolver::create_resolver(&[]);
229
-
230
-
let identity_resolver = InnerIdentityResolver {
231
-
http_client: http_client.clone(),
232
-
dns_resolver: Arc::new(dns_resolver),
233
-
plc_hostname: "plc.directory".to_string(),
234
-
};
235
-
236
-
let record_resolver = MyRecordResolver {
237
-
http_client,
238
-
identity_resolver,
239
-
};
240
-
241
-
let signed_record = /* ... */;
242
-
243
-
// Verify all signatures including remote attestations
244
-
let reports = verify_all_signatures_with_resolver(&signed_record, None, Some(&record_resolver)).await?;
182
+
println!("โ All signatures verified successfully");
245
183
246
184
Ok(())
247
185
}
248
186
```
249
187
250
-
### Manual CID Generation
251
-
252
-
For advanced use cases, manually generate CIDs:
253
-
254
-
```rust
255
-
use atproto_attestation::{prepare_signing_record, create_cid};
256
-
use serde_json::json;
257
-
258
-
let record = json!({
259
-
"$type": "app.bsky.feed.post",
260
-
"text": "Manual CID generation"
261
-
});
262
-
263
-
let metadata = json!({
264
-
"$type": "com.example.signature",
265
-
"key": "did:key:z..."
266
-
});
267
-
268
-
// Prepare the signing record (adds $sig, removes signatures)
269
-
let signing_record = prepare_signing_record(&record, &metadata)?;
270
-
271
-
// Generate the CID
272
-
let cid = create_cid(&signing_record)?;
273
-
println!("CID: {}", cid);
274
-
```
275
-
276
188
## Command Line Usage
277
189
278
190
### Signing Records
···
313
225
metadata.json
314
226
315
227
# This outputs TWO JSON objects:
316
-
# 1. Proof record (store this in the repository)
228
+
# 1. Proof record (store this in the attestor's repository)
317
229
# 2. Source record with strongRef attestation
318
230
```
319
231
320
232
### Verifying Signatures
321
233
322
-
#### Verify All Signatures in a Record
323
-
324
234
```bash
325
235
# Verify all signatures in a record from file
326
236
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
327
-
./signed_record.json
328
-
329
-
# Verify all signatures from AT-URI (fetches from PDS)
330
-
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
331
-
at://did:plc:abc123/app.bsky.feed.post/3k2k4j5h6g
237
+
./signed_record.json \
238
+
did:plc:repo123
332
239
333
240
# Verify from stdin
334
-
cat signed_record.json | cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- -
241
+
cat signed_record.json | cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
242
+
- \
243
+
did:plc:repo123
335
244
336
245
# Verify from inline JSON
337
246
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
338
-
'{"$type":"app.bsky.feed.post","text":"Hello","signatures":[...]}'
247
+
'{"$type":"app.bsky.feed.post","text":"Hello","signatures":[...]}' \
248
+
did:plc:repo123
339
249
340
-
# Output shows each signature status:
341
-
# โ Signature 0 valid (key: did:key:zQ3sh...pb3) [CID: bafyrei...]
342
-
# ? Signature 1 unverified: Remote attestations require fetching the proof record via strongRef.
343
-
#
344
-
# Summary: 2 total, 1 valid
250
+
# Verify specific attestation against record
251
+
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
252
+
./record.json \
253
+
did:plc:repo123 \
254
+
./attestation.json
345
255
```
346
256
347
-
#### Verify Specific Attestation Against Record
257
+
## Public API
258
+
259
+
The crate exposes the following public functions:
260
+
261
+
### Attestation Creation
262
+
263
+
- **`create_inline_attestation`**: Create a signed record with embedded signature
264
+
- Automatically normalizes signatures to low-S form
265
+
- Binds attestation to repository DID
266
+
- Returns signed record with `signatures` array
267
+
268
+
- **`create_remote_attestation`**: Create separate proof record and strongRef
269
+
- Returns tuple of (attested_record, proof_record)
270
+
- Proof record must be stored in attestor's repository
271
+
272
+
### CID Generation
273
+
274
+
- **`create_cid`**: Generate CID for a record with `$sig` metadata
275
+
- **`create_dagbor_cid`**: Generate CID for any serializable data
276
+
- **`create_attestation_cid`**: High-level CID generation with automatic `$sig` preparation
348
277
349
-
```bash
350
-
# Verify a specific attestation record (both from files)
351
-
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
352
-
./record.json \
353
-
./attestation.json
278
+
### Signature Operations
354
279
355
-
# Verify attestation from AT-URI against local record
356
-
cargo run --package atproto-attestation --features clap,tokio --bin atproto-attestation-verify -- \
357
-
./record.json \
358
-
at://did:plc:xyz/com.example.attestation/abc123
280
+
- **`normalize_signature`**: Normalize raw signature bytes to low-S form
281
+
- Prevents signature malleability attacks
282
+
- Supports P-256, P-384, and K-256 curves
359
283
360
-
# On success, outputs:
361
-
# OK
362
-
# CID: bafyrei...
363
-
```
284
+
### Verification
285
+
286
+
- **`verify_record`**: Verify all signatures in a record
287
+
- Validates repository binding
288
+
- Supports both inline and remote attestations
289
+
- Requires key and record resolvers
290
+
291
+
### Input Types
292
+
293
+
- **`AnyInput`**: Flexible input enum supporting:
294
+
- `String`: JSON string to parse
295
+
- `Json`: serde_json::Value
296
+
- `TypedLexicon`: Strongly-typed lexicon records
364
297
365
298
## Attestation Specification
366
299
···
368
301
369
302
1. **Deterministic signing**: Records are serialized to DAG-CBOR with `$sig` metadata, producing consistent CIDs
370
303
2. **Content addressing**: Signatures are over CID bytes, not the full record
371
-
3. **Flexible metadata**: Custom fields in `$sig` are preserved and included in the CID calculation
372
-
4. **Signature normalization**: ECDSA signatures are normalized to low-S form
373
-
5. **Multiple attestations**: Records can have multiple signatures in the `signatures` array
304
+
3. **Repository binding**: Every attestation is bound to a specific repository DID to prevent replay attacks
305
+
4. **Signature normalization**: ECDSA signatures are normalized to low-S form to prevent malleability
306
+
5. **Flexible metadata**: Custom fields in `$sig` are preserved and included in the CID calculation
307
+
6. **Multiple attestations**: Records can have multiple signatures in the `signatures` array
374
308
375
309
### Signature Structure
376
310
···
380
314
"$type": "com.example.signature",
381
315
"key": "did:key:z...",
382
316
"issuer": "did:plc:...",
317
+
"cid": "bafyrei...",
383
318
"signature": {
384
-
"$bytes": "base64-signature"
319
+
"$bytes": "base64-normalized-signature"
385
320
}
386
321
}
387
322
```
···
405
340
- `SignatureCreationFailed`: Key signing operation failed
406
341
- `SignatureValidationFailed`: Signature verification failed
407
342
- `SignatureNotNormalized`: ECDSA signature not in low-S form
343
+
- `SignatureLengthInvalid`: Signature bytes have incorrect length
408
344
- `KeyResolutionFailed`: Could not resolve verification key
409
345
- `UnsupportedKeyType`: Key type not supported for signing/verification
346
+
- `RemoteAttestationFetchFailed`: Failed to fetch remote proof record
410
347
411
348
## Security Considerations
412
349
413
-
- **Key management**: Private keys should be protected and never logged or transmitted
414
-
- **Signature normalization**: All signatures are normalized to low-S form to prevent malleability
415
-
- **CID verification**: Always verify signatures against the reconstructed CID, not the record content
416
-
- **Key resolution**: Use trusted key resolvers to prevent key substitution attacks
417
-
- **Timestamp validation**: Check `issuedAt` and `expiry` fields if present in metadata
350
+
### Repository Binding and Replay Attack Prevention
351
+
352
+
The most critical security feature of this attestation framework is **repository binding**. Every attestation includes the repository DID in the `$sig` metadata during CID generation, which:
353
+
354
+
- **Prevents replay attacks**: An attacker cannot copy a signed record from one repository to another because the signature is bound to the original repository DID
355
+
- **Ensures context integrity**: Attestations are only valid within their intended repository context
356
+
- **Automatic enforcement**: The library automatically adds the repository field during CID generation
357
+
358
+
**Important**: Always verify signatures with the correct repository DID. Verifying with a different repository DID will (correctly) fail validation, as this would indicate a potential replay attack.
359
+
360
+
### Signature Normalization
361
+
362
+
All ECDSA signatures are automatically normalized to low-S form to prevent signature malleability attacks:
363
+
364
+
- The library enforces low-S normalization during signature creation
365
+
- Verification accepts only normalized signatures
366
+
- This prevents attackers from creating alternate valid signatures for the same content
367
+
368
+
### Key Management Best Practices
369
+
370
+
- **Private keys**: Never log, transmit, or store private keys in plaintext
371
+
- **Key rotation**: Plan for key rotation by using verification method references that can be updated in DID documents
372
+
- **Key types**: The library supports P-256, P-384, and K-256 elliptic curves
373
+
- **did:key**: For testing and simple use cases, did:key identifiers provide self-contained key references
374
+
375
+
### CID Verification
376
+
377
+
- **Always verify against CIDs**: Signatures are over CID bytes, not the original record content
378
+
- **Deterministic generation**: The same record with the same `$sig` metadata always produces the same CID
379
+
- **Content integrity**: Any modification to the record will produce a different CID and invalidate signatures
380
+
381
+
### Metadata Validation
382
+
383
+
When creating attestations:
384
+
385
+
- The `$type` field is always required in metadata to scope the attestation
386
+
- The `repository` field is automatically added and must not be manually set
387
+
- Custom metadata fields are preserved and included in CID calculation
388
+
- The `cid` field is automatically added to inline attestation metadata
389
+
390
+
### Remote Attestation Considerations
391
+
392
+
- **Proof record storage**: Store proof records in the attestor's repository with appropriate access controls
393
+
- **CID matching**: Verify that the CID in the proof record matches the computed CID of the attested content
394
+
- **Record resolution**: Use trusted record resolvers when fetching remote proof records
418
395
419
396
## License
420
397
+787
crates/atproto-attestation/src/attestation.rs
+787
crates/atproto-attestation/src/attestation.rs
···
1
+
//! Core attestation creation functions.
2
+
//!
3
+
//! This module provides functions for creating inline and remote attestations
4
+
//! and attaching attestation references.
5
+
6
+
use crate::cid::{create_attestation_cid, create_dagbor_cid};
7
+
use crate::errors::AttestationError;
8
+
pub use crate::input::AnyInput;
9
+
use crate::signature::normalize_signature;
10
+
use crate::utils::BASE64;
11
+
use atproto_identity::key::{KeyData, KeyResolver, sign, validate};
12
+
use atproto_record::lexicon::com::atproto::repo::STRONG_REF_NSID;
13
+
use atproto_record::tid::Tid;
14
+
use base64::Engine;
15
+
use serde::Serialize;
16
+
use serde_json::{Value, json, Map};
17
+
use std::convert::TryInto;
18
+
19
+
/// Helper function to extract and validate signatures array from a record
20
+
fn extract_signatures(record_obj: &Map<String, Value>) -> Result<Vec<Value>, AttestationError> {
21
+
match record_obj.get("signatures") {
22
+
Some(value) => value
23
+
.as_array()
24
+
.ok_or(AttestationError::SignaturesFieldInvalid)
25
+
.cloned(),
26
+
None => Ok(vec![]),
27
+
}
28
+
}
29
+
30
+
/// Helper function to append a signature to a record and return the modified record
31
+
fn append_signature_to_record(
32
+
mut record_obj: Map<String, Value>,
33
+
signature: Value,
34
+
) -> Result<Value, AttestationError> {
35
+
let mut signatures = extract_signatures(&record_obj)?;
36
+
signatures.push(signature);
37
+
38
+
record_obj.insert(
39
+
"signatures".to_string(),
40
+
Value::Array(signatures),
41
+
);
42
+
43
+
Ok(Value::Object(record_obj))
44
+
}
45
+
46
+
/// Creates a cryptographic signature for a record with attestation metadata.
47
+
///
48
+
/// This is a low-level function that generates just the signature bytes without
49
+
/// embedding them in a record structure. It's useful when you need to create
50
+
/// signatures independently or for custom attestation workflows.
51
+
///
52
+
/// The signature is created over a content CID that binds together:
53
+
/// - The record content
54
+
/// - The attestation metadata
55
+
/// - The repository DID (to prevent replay attacks)
56
+
///
57
+
/// # Arguments
58
+
///
59
+
/// * `record_input` - The record to sign (as AnyInput: String, Json, or TypedLexicon)
60
+
/// * `attestation_input` - The attestation metadata (as AnyInput)
61
+
/// * `repository` - The repository DID where this record will be stored
62
+
/// * `private_key_data` - The private key to use for signing
63
+
///
64
+
/// # Returns
65
+
///
66
+
/// A byte vector containing the normalized ECDSA signature that can be verified
67
+
/// against the same content CID.
68
+
///
69
+
/// # Errors
70
+
///
71
+
/// Returns an error if:
72
+
/// - CID generation fails
73
+
/// - Signature creation fails
74
+
/// - Signature normalization fails
75
+
///
76
+
/// # Example
77
+
///
78
+
/// ```rust
79
+
/// use atproto_attestation::{create_signature, input::AnyInput};
80
+
/// use atproto_identity::key::{KeyType, generate_key};
81
+
/// use serde_json::json;
82
+
///
83
+
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
84
+
/// let private_key = generate_key(KeyType::K256Private)?;
85
+
///
86
+
/// let record = json!({"$type": "app.bsky.feed.post", "text": "Hello!"});
87
+
/// let metadata = json!({"$type": "com.example.signature"});
88
+
///
89
+
/// let signature_bytes = create_signature(
90
+
/// AnyInput::Serialize(record),
91
+
/// AnyInput::Serialize(metadata),
92
+
/// "did:plc:repo123",
93
+
/// &private_key
94
+
/// )?;
95
+
///
96
+
/// // signature_bytes can now be base64-encoded or used as needed
97
+
/// # Ok(())
98
+
/// # }
99
+
/// ```
100
+
pub fn create_signature<R, M>(
101
+
record_input: AnyInput<R>,
102
+
attestation_input: AnyInput<M>,
103
+
repository: &str,
104
+
private_key_data: &KeyData,
105
+
) -> Result<Vec<u8>, AttestationError>
106
+
where
107
+
R: Serialize + Clone,
108
+
M: Serialize + Clone,
109
+
{
110
+
// Step 1: Create a content CID from record + attestation + repository
111
+
let content_cid = create_attestation_cid(record_input, attestation_input, repository)?;
112
+
113
+
// Step 2: Sign the CID bytes
114
+
let raw_signature = sign(private_key_data, &content_cid.to_bytes())
115
+
.map_err(|error| AttestationError::SignatureCreationFailed { error })?;
116
+
117
+
// Step 3: Normalize the signature to ensure consistent format
118
+
normalize_signature(raw_signature, private_key_data.key_type())
119
+
}
120
+
121
+
/// Creates a remote attestation with both the attested record and proof record.
122
+
///
123
+
/// This is the recommended way to create remote attestations. It generates both:
124
+
/// 1. The attested record with a strongRef in the signatures array
125
+
/// 2. The proof record containing the CID to be stored in the attestation repository
126
+
///
127
+
/// The CID is generated with the repository DID included in the `$sig` metadata
128
+
/// to bind the attestation to a specific repository and prevent replay attacks.
129
+
///
130
+
/// # Arguments
131
+
///
132
+
/// * `record_input` - The record to attest (as AnyInput: String, Json, or TypedLexicon)
133
+
/// * `metadata_input` - The attestation metadata (must include `$type`)
134
+
/// * `repository` - The DID of the repository housing the original record
135
+
/// * `attestation_repository` - The DID of the repository that will store the proof record
136
+
///
137
+
/// # Returns
138
+
///
139
+
/// A tuple containing:
140
+
/// * `(attested_record, proof_record)` - Both records needed for remote attestation
141
+
///
142
+
/// # Errors
143
+
///
144
+
/// Returns an error if:
145
+
/// - The record or metadata are not valid JSON objects
146
+
/// - The metadata is missing the required `$type` field
147
+
/// - CID generation fails
148
+
///
149
+
/// # Example
150
+
///
151
+
/// ```rust
152
+
/// use atproto_attestation::{create_remote_attestation, input::AnyInput};
153
+
/// use serde_json::json;
154
+
///
155
+
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
156
+
/// let record = json!({"$type": "app.bsky.feed.post", "text": "Hello!"});
157
+
/// let metadata = json!({"$type": "com.example.attestation"});
158
+
///
159
+
/// let (attested_record, proof_record) = create_remote_attestation(
160
+
/// AnyInput::Serialize(record),
161
+
/// AnyInput::Serialize(metadata),
162
+
/// "did:plc:repo123", // Source repository
163
+
/// "did:plc:attestor456" // Attestation repository
164
+
/// )?;
165
+
/// # Ok(())
166
+
/// # }
167
+
/// ```
168
+
pub fn create_remote_attestation<
169
+
R: Serialize + Clone,
170
+
M: Serialize + Clone,
171
+
>(
172
+
record_input: AnyInput<R>,
173
+
metadata_input: AnyInput<M>,
174
+
repository: &str,
175
+
attestation_repository: &str,
176
+
) -> Result<(Value, Value), AttestationError> {
177
+
// Step 1: Create a content CID
178
+
let content_cid =
179
+
create_attestation_cid(record_input.clone(), metadata_input.clone(), repository)?;
180
+
181
+
let record_obj: Map<String, Value> = record_input
182
+
.try_into()
183
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
184
+
185
+
// Step 2: Create the remote attestation record
186
+
let (remote_attestation_record, remote_attestation_type) = {
187
+
let mut metadata_obj: Map<String, Value> = metadata_input
188
+
.try_into()
189
+
.map_err(|_| AttestationError::MetadataMustBeObject)?;
190
+
191
+
// Extract the type from metadata before modifying it
192
+
let remote_type = metadata_obj
193
+
.get("$type")
194
+
.and_then(Value::as_str)
195
+
.ok_or(AttestationError::MetadataMissingType)?
196
+
.to_string();
197
+
198
+
metadata_obj.insert("cid".to_string(), Value::String(content_cid.to_string()));
199
+
(serde_json::Value::Object(metadata_obj), remote_type)
200
+
};
201
+
202
+
// Step 3: Create the remote attestation reference (type, AT-URI, and CID)
203
+
let remote_attestation_record_key = Tid::new();
204
+
let remote_attestation_cid = create_dagbor_cid(&remote_attestation_record)?;
205
+
206
+
let attestation_reference = json!({
207
+
"$type": STRONG_REF_NSID,
208
+
"uri": format!("at://{attestation_repository}/{remote_attestation_type}/{remote_attestation_record_key}"),
209
+
"cid": remote_attestation_cid.to_string()
210
+
});
211
+
212
+
// Step 4: Append the attestation reference to the record "signatures" array
213
+
let attested_record = append_signature_to_record(record_obj, attestation_reference)?;
214
+
215
+
Ok((attested_record, remote_attestation_record))
216
+
}
217
+
218
+
/// Creates an inline attestation with signature embedded in the record.
219
+
///
220
+
/// This is the v2 API that supports flexible input types (String, Json, TypedLexicon)
221
+
/// and provides a more streamlined interface for creating inline attestations.
222
+
///
223
+
/// The CID is generated with the repository DID included in the `$sig` metadata
224
+
/// to bind the attestation to a specific repository and prevent replay attacks.
225
+
///
226
+
/// # Arguments
227
+
///
228
+
/// * `record_input` - The record to sign (as AnyInput: String, Json, or TypedLexicon)
229
+
/// * `metadata_input` - The attestation metadata (must include `$type` and `key`)
230
+
/// * `repository` - The DID of the repository that will house this record
231
+
/// * `private_key_data` - The private key to use for signing
232
+
///
233
+
/// # Returns
234
+
///
235
+
/// The record with an inline attestation embedded in the signatures array
236
+
///
237
+
/// # Errors
238
+
///
239
+
/// Returns an error if:
240
+
/// - The record or metadata are not valid JSON objects
241
+
/// - The metadata is missing required fields
242
+
/// - Signature creation fails
243
+
/// - CID generation fails
244
+
///
245
+
/// # Example
246
+
///
247
+
/// ```rust
248
+
/// use atproto_attestation::{create_inline_attestation, input::AnyInput};
249
+
/// use atproto_identity::key::{KeyType, generate_key, to_public};
250
+
/// use serde_json::json;
251
+
///
252
+
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
253
+
/// let private_key = generate_key(KeyType::K256Private)?;
254
+
/// let public_key = to_public(&private_key)?;
255
+
///
256
+
/// let record = json!({"$type": "app.bsky.feed.post", "text": "Hello!"});
257
+
/// let metadata = json!({
258
+
/// "$type": "com.example.signature",
259
+
/// "key": format!("{}", public_key)
260
+
/// });
261
+
///
262
+
/// let signed_record = create_inline_attestation(
263
+
/// AnyInput::Serialize(record),
264
+
/// AnyInput::Serialize(metadata),
265
+
/// "did:plc:repo123",
266
+
/// &private_key
267
+
/// )?;
268
+
/// # Ok(())
269
+
/// # }
270
+
/// ```
271
+
pub fn create_inline_attestation<
272
+
R: Serialize + Clone,
273
+
M: Serialize + Clone,
274
+
>(
275
+
record_input: AnyInput<R>,
276
+
metadata_input: AnyInput<M>,
277
+
repository: &str,
278
+
private_key_data: &KeyData,
279
+
) -> Result<Value, AttestationError> {
280
+
// Step 1: Create a content CID
281
+
let content_cid =
282
+
create_attestation_cid(record_input.clone(), metadata_input.clone(), repository)?;
283
+
284
+
let record_obj: Map<String, Value> = record_input
285
+
.try_into()
286
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
287
+
288
+
// Step 2: Create the inline attestation record
289
+
let inline_attestation_record = {
290
+
let mut metadata_obj: Map<String, Value> = metadata_input
291
+
.try_into()
292
+
.map_err(|_| AttestationError::MetadataMustBeObject)?;
293
+
294
+
metadata_obj.insert("cid".to_string(), Value::String(content_cid.to_string()));
295
+
296
+
let raw_signature = sign(private_key_data, &content_cid.to_bytes())
297
+
.map_err(|error| AttestationError::SignatureCreationFailed { error })?;
298
+
let signature_bytes = normalize_signature(raw_signature, private_key_data.key_type())?;
299
+
300
+
metadata_obj.insert(
301
+
"signature".to_string(),
302
+
json!({"$bytes": BASE64.encode(signature_bytes)}),
303
+
);
304
+
305
+
serde_json::Value::Object(metadata_obj)
306
+
};
307
+
308
+
// Step 4: Append the attestation reference to the record "signatures" array
309
+
append_signature_to_record(record_obj, inline_attestation_record)
310
+
}
311
+
312
+
/// Validates an existing proof record and appends a strongRef to it in the record's signatures array.
313
+
///
314
+
/// This function validates that an existing proof record (attestation metadata with CID)
315
+
/// is valid for the given record and repository, then creates and appends a strongRef to it.
316
+
///
317
+
/// Unlike `create_remote_attestation` which creates a new proof record, this function validates
318
+
/// an existing proof record that was already created and stored in an attestor's repository.
319
+
///
320
+
/// # Security
321
+
///
322
+
/// - **Repository binding validation**: Ensures the attestation was created for the specified repository DID
323
+
/// - **CID verification**: Validates the proof record's CID matches the computed CID
324
+
/// - **Content validation**: Ensures the proof record content matches what should be attested
325
+
///
326
+
/// # Workflow
327
+
///
328
+
/// 1. Compute the content CID from record + metadata + repository (same as attestation creation)
329
+
/// 2. Extract the claimed CID from the proof record metadata
330
+
/// 3. Verify the claimed CID matches the computed CID
331
+
/// 4. Extract the proof record's storage CID (DAG-CBOR CID of the full proof record)
332
+
/// 5. Create a strongRef with the AT-URI and proof record CID
333
+
/// 6. Append the strongRef to the record's signatures array
334
+
///
335
+
/// # Arguments
336
+
///
337
+
/// * `record_input` - The record to append the attestation to (as AnyInput)
338
+
/// * `metadata_input` - The proof record metadata (must include `$type`, `cid`, and attestation fields)
339
+
/// * `repository` - The repository DID where the source record is stored (for replay attack prevention)
340
+
/// * `attestation_uri` - The AT-URI where the proof record is stored (e.g., "at://did:plc:attestor/com.example.attestation/abc123")
341
+
///
342
+
/// # Returns
343
+
///
344
+
/// The modified record with the strongRef appended to its `signatures` array
345
+
///
346
+
/// # Errors
347
+
///
348
+
/// Returns an error if:
349
+
/// - The record or metadata are not valid JSON objects
350
+
/// - The metadata is missing the `cid` field
351
+
/// - The computed CID doesn't match the claimed CID in the metadata
352
+
/// - The metadata is missing required attestation fields
353
+
///
354
+
/// # Type Parameters
355
+
///
356
+
/// * `R` - The record type (must implement Serialize + LexiconType + PartialEq + Clone)
357
+
/// * `A` - The attestation type (must implement Serialize + LexiconType + PartialEq + Clone)
358
+
///
359
+
/// # Example
360
+
///
361
+
/// ```ignore
362
+
/// use atproto_attestation::{append_remote_attestation, input::AnyInput};
363
+
/// use serde_json::json;
364
+
///
365
+
/// let record = json!({
366
+
/// "$type": "app.bsky.feed.post",
367
+
/// "text": "Hello world!"
368
+
/// });
369
+
///
370
+
/// // This is the proof record that was previously created and stored
371
+
/// let proof_metadata = json!({
372
+
/// "$type": "com.example.attestation",
373
+
/// "issuer": "did:plc:issuer",
374
+
/// "cid": "bafyrei...", // Content CID computed from record+metadata+repository
375
+
/// // ... other attestation fields
376
+
/// });
377
+
///
378
+
/// let repository_did = "did:plc:repo123";
379
+
/// let attestation_uri = "at://did:plc:attestor456/com.example.attestation/abc123";
380
+
///
381
+
/// let signed_record = append_remote_attestation(
382
+
/// AnyInput::Serialize(record),
383
+
/// AnyInput::Serialize(proof_metadata),
384
+
/// repository_did,
385
+
/// attestation_uri
386
+
/// )?;
387
+
/// ```
388
+
pub fn append_remote_attestation<R, A>(
389
+
record_input: AnyInput<R>,
390
+
metadata_input: AnyInput<A>,
391
+
repository: &str,
392
+
attestation_uri: &str,
393
+
) -> Result<Value, AttestationError>
394
+
where
395
+
R: Serialize + Clone,
396
+
A: Serialize + Clone,
397
+
{
398
+
// Step 1: Compute the content CID (same as create_remote_attestation)
399
+
let content_cid =
400
+
create_attestation_cid(record_input.clone(), metadata_input.clone(), repository)?;
401
+
402
+
// Step 2: Convert metadata to JSON and extract the claimed CID
403
+
let metadata_obj: Map<String, Value> = metadata_input
404
+
.try_into()
405
+
.map_err(|_| AttestationError::MetadataMustBeObject)?;
406
+
407
+
let claimed_cid = metadata_obj
408
+
.get("cid")
409
+
.and_then(Value::as_str)
410
+
.filter(|value| !value.is_empty())
411
+
.ok_or(AttestationError::SignatureMissingField {
412
+
field: "cid".to_string(),
413
+
})?;
414
+
415
+
// Step 3: Verify the claimed CID matches the computed content CID
416
+
if content_cid.to_string() != claimed_cid {
417
+
return Err(AttestationError::RemoteAttestationCidMismatch {
418
+
expected: claimed_cid.to_string(),
419
+
actual: content_cid.to_string(),
420
+
});
421
+
}
422
+
423
+
// Step 4: Compute the proof record's DAG-CBOR CID
424
+
let proof_record_cid = create_dagbor_cid(&metadata_obj)?;
425
+
426
+
// Step 5: Create the strongRef
427
+
let strongref = json!({
428
+
"$type": STRONG_REF_NSID,
429
+
"uri": attestation_uri,
430
+
"cid": proof_record_cid.to_string()
431
+
});
432
+
433
+
// Step 6: Convert record to JSON object and append the strongRef
434
+
let record_obj: Map<String, Value> = record_input
435
+
.try_into()
436
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
437
+
438
+
append_signature_to_record(record_obj, strongref)
439
+
}
440
+
441
+
/// Validates an inline attestation and appends it to a record's signatures array.
442
+
///
443
+
/// Inline attestations contain cryptographic signatures embedded directly in the record.
444
+
/// This function validates the attestation signature against the record and repository,
445
+
/// then appends it if validation succeeds.
446
+
///
447
+
/// # Security
448
+
///
449
+
/// - **Repository binding validation**: Ensures the attestation was created for the specified repository DID
450
+
/// - **CID verification**: Validates the CID in the attestation matches the computed CID
451
+
/// - **Signature verification**: Cryptographically verifies the ECDSA signature
452
+
/// - **Key resolution**: Resolves and validates the verification key
453
+
///
454
+
/// # Arguments
455
+
///
456
+
/// * `record_input` - The record to append the attestation to (as AnyInput)
457
+
/// * `attestation_input` - The inline attestation to validate and append (as AnyInput)
458
+
/// * `repository` - The repository DID where this record is stored (for replay attack prevention)
459
+
/// * `key_resolver` - Resolver for looking up verification keys from DIDs
460
+
///
461
+
/// # Returns
462
+
///
463
+
/// The modified record with the validated attestation appended to its `signatures` array
464
+
///
465
+
/// # Errors
466
+
///
467
+
/// Returns an error if:
468
+
/// - The record or attestation are not valid JSON objects
469
+
/// - The attestation is missing required fields (`$type`, `key`, `cid`, `signature`)
470
+
/// - The attestation CID doesn't match the computed CID for the record
471
+
/// - The signature bytes are invalid or not base64-encoded
472
+
/// - Signature verification fails
473
+
/// - Key resolution fails
474
+
///
475
+
/// # Type Parameters
476
+
///
477
+
/// * `R` - The record type (must implement Serialize + LexiconType + PartialEq + Clone)
478
+
/// * `A` - The attestation type (must implement Serialize + LexiconType + PartialEq + Clone)
479
+
/// * `KR` - The key resolver type (must implement KeyResolver)
480
+
///
481
+
/// # Example
482
+
///
483
+
/// ```ignore
484
+
/// use atproto_attestation::{append_inline_attestation, input::AnyInput};
485
+
/// use serde_json::json;
486
+
///
487
+
/// let record = json!({
488
+
/// "$type": "app.bsky.feed.post",
489
+
/// "text": "Hello world!"
490
+
/// });
491
+
///
492
+
/// let attestation = json!({
493
+
/// "$type": "com.example.inlineSignature",
494
+
/// "key": "did:key:zQ3sh...",
495
+
/// "cid": "bafyrei...",
496
+
/// "signature": {"$bytes": "base64-signature-bytes"}
497
+
/// });
498
+
///
499
+
/// let repository_did = "did:plc:repo123";
500
+
/// let key_resolver = /* your KeyResolver implementation */;
501
+
///
502
+
/// let signed_record = append_inline_attestation(
503
+
/// AnyInput::Serialize(record),
504
+
/// AnyInput::Serialize(attestation),
505
+
/// repository_did,
506
+
/// key_resolver
507
+
/// ).await?;
508
+
/// ```
509
+
pub async fn append_inline_attestation<R, A, KR>(
510
+
record_input: AnyInput<R>,
511
+
attestation_input: AnyInput<A>,
512
+
repository: &str,
513
+
key_resolver: KR,
514
+
) -> Result<Value, AttestationError>
515
+
where
516
+
R: Serialize + Clone,
517
+
A: Serialize + Clone,
518
+
KR: KeyResolver,
519
+
{
520
+
// Step 1: Create a content CID
521
+
let content_cid =
522
+
create_attestation_cid(record_input.clone(), attestation_input.clone(), repository)?;
523
+
524
+
let record_obj: Map<String, Value> = record_input
525
+
.try_into()
526
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
527
+
528
+
let attestation_obj: Map<String, Value> = attestation_input
529
+
.try_into()
530
+
.map_err(|_| AttestationError::MetadataMustBeObject)?;
531
+
532
+
let key = attestation_obj
533
+
.get("key")
534
+
.and_then(Value::as_str)
535
+
.filter(|value| !value.is_empty())
536
+
.ok_or(AttestationError::SignatureMissingField {
537
+
field: "key".to_string(),
538
+
})?;
539
+
let key_data =
540
+
key_resolver
541
+
.resolve(key)
542
+
.await
543
+
.map_err(|error| AttestationError::KeyResolutionFailed {
544
+
key: key.to_string(),
545
+
error,
546
+
})?;
547
+
548
+
let signature_bytes = attestation_obj
549
+
.get("signature")
550
+
.and_then(Value::as_object)
551
+
.and_then(|object| object.get("$bytes"))
552
+
.and_then(Value::as_str)
553
+
.ok_or(AttestationError::SignatureBytesFormatInvalid)?;
554
+
555
+
let signature_bytes = BASE64
556
+
.decode(signature_bytes)
557
+
.map_err(|error| AttestationError::SignatureDecodingFailed { error })?;
558
+
559
+
let computed_cid_bytes = content_cid.to_bytes();
560
+
561
+
validate(&key_data, &signature_bytes, &computed_cid_bytes)
562
+
.map_err(|error| AttestationError::SignatureValidationFailed { error })?;
563
+
564
+
// Step 6: Append the validated attestation to the signatures array
565
+
append_signature_to_record(record_obj, json!(attestation_obj))
566
+
}
567
+
568
+
#[cfg(test)]
569
+
mod tests {
570
+
use super::*;
571
+
use atproto_identity::key::{KeyType, generate_key, to_public};
572
+
use serde_json::json;
573
+
574
+
#[test]
575
+
fn create_remote_attestation_produces_both_records() -> Result<(), Box<dyn std::error::Error>> {
576
+
577
+
let record = json!({
578
+
"$type": "app.example.record",
579
+
"body": "remote attestation"
580
+
});
581
+
582
+
let metadata = json!({
583
+
"$type": "com.example.attestation"
584
+
});
585
+
586
+
let source_repository = "did:plc:test";
587
+
let attestation_repository = "did:plc:attestor";
588
+
589
+
let (attested_record, proof_record) =
590
+
create_remote_attestation(
591
+
AnyInput::Serialize(record.clone()),
592
+
AnyInput::Serialize(metadata),
593
+
source_repository,
594
+
attestation_repository,
595
+
)?;
596
+
597
+
// Verify proof record structure
598
+
let proof_object = proof_record.as_object().expect("proof should be an object");
599
+
assert_eq!(
600
+
proof_object.get("$type").and_then(Value::as_str),
601
+
Some("com.example.attestation")
602
+
);
603
+
assert!(
604
+
proof_object.get("cid").and_then(Value::as_str).is_some(),
605
+
"proof must contain a cid"
606
+
);
607
+
assert!(
608
+
proof_object.get("repository").is_none(),
609
+
"repository should not be in final proof record"
610
+
);
611
+
612
+
// Verify attested record has strongRef
613
+
let attested_object = attested_record
614
+
.as_object()
615
+
.expect("attested record should be an object");
616
+
let signatures = attested_object
617
+
.get("signatures")
618
+
.and_then(Value::as_array)
619
+
.expect("attested record should have signatures array");
620
+
assert_eq!(signatures.len(), 1, "should have one signature");
621
+
622
+
let signature = &signatures[0];
623
+
assert_eq!(
624
+
signature.get("$type").and_then(Value::as_str),
625
+
Some("com.atproto.repo.strongRef"),
626
+
"signature should be a strongRef"
627
+
);
628
+
assert!(
629
+
signature.get("uri").and_then(Value::as_str).is_some(),
630
+
"strongRef must contain a uri"
631
+
);
632
+
assert!(
633
+
signature.get("cid").and_then(Value::as_str).is_some(),
634
+
"strongRef must contain a cid"
635
+
);
636
+
637
+
Ok(())
638
+
}
639
+
640
+
#[tokio::test]
641
+
async fn create_inline_attestation_full_workflow() -> Result<(), Box<dyn std::error::Error>> {
642
+
let private_key = generate_key(KeyType::K256Private)?;
643
+
let public_key = to_public(&private_key)?;
644
+
let key_reference = format!("{}", &public_key);
645
+
let repository_did = "did:plc:testrepository123";
646
+
647
+
let base_record = json!({
648
+
"$type": "app.example.record",
649
+
"body": "Sign me"
650
+
});
651
+
652
+
let sig_metadata = json!({
653
+
"$type": "com.example.inlineSignature",
654
+
"key": key_reference,
655
+
"purpose": "unit-test"
656
+
});
657
+
658
+
let signed = create_inline_attestation(
659
+
AnyInput::Serialize(base_record),
660
+
AnyInput::Serialize(sig_metadata),
661
+
repository_did,
662
+
&private_key,
663
+
)?;
664
+
665
+
// Verify structure
666
+
let signatures = signed
667
+
.get("signatures")
668
+
.and_then(Value::as_array)
669
+
.expect("should have signatures array");
670
+
assert_eq!(signatures.len(), 1);
671
+
672
+
let sig = &signatures[0];
673
+
assert_eq!(
674
+
sig.get("$type").and_then(Value::as_str),
675
+
Some("com.example.inlineSignature")
676
+
);
677
+
assert!(sig.get("signature").is_some());
678
+
assert!(sig.get("key").is_some());
679
+
assert!(sig.get("repository").is_none()); // Should not be in final signature
680
+
681
+
Ok(())
682
+
}
683
+
684
+
#[test]
685
+
fn create_signature_returns_valid_bytes() -> Result<(), Box<dyn std::error::Error>> {
686
+
let private_key = generate_key(KeyType::K256Private)?;
687
+
let public_key = to_public(&private_key)?;
688
+
689
+
let record = json!({
690
+
"$type": "app.example.record",
691
+
"body": "Test signature creation"
692
+
});
693
+
694
+
let metadata = json!({
695
+
"$type": "com.example.signature",
696
+
"key": format!("{}", public_key)
697
+
});
698
+
699
+
let repository = "did:plc:test123";
700
+
701
+
// Create signature
702
+
let signature_bytes = create_signature(
703
+
AnyInput::Serialize(record.clone()),
704
+
AnyInput::Serialize(metadata.clone()),
705
+
repository,
706
+
&private_key,
707
+
)?;
708
+
709
+
// Verify signature is not empty
710
+
assert!(!signature_bytes.is_empty(), "Signature bytes should not be empty");
711
+
712
+
// Verify signature length is reasonable for ECDSA (typically 64-72 bytes for secp256k1)
713
+
assert!(
714
+
signature_bytes.len() >= 64 && signature_bytes.len() <= 73,
715
+
"Signature length should be between 64 and 73 bytes, got {}",
716
+
signature_bytes.len()
717
+
);
718
+
719
+
// Verify we can validate the signature
720
+
let content_cid = create_attestation_cid(
721
+
AnyInput::Serialize(record),
722
+
AnyInput::Serialize(metadata),
723
+
repository,
724
+
)?;
725
+
726
+
validate(&public_key, &signature_bytes, &content_cid.to_bytes())?;
727
+
728
+
Ok(())
729
+
}
730
+
731
+
#[test]
732
+
fn create_signature_different_inputs_produce_different_signatures() -> Result<(), Box<dyn std::error::Error>> {
733
+
let private_key = generate_key(KeyType::K256Private)?;
734
+
735
+
let record1 = json!({"$type": "app.example.record", "body": "First message"});
736
+
let record2 = json!({"$type": "app.example.record", "body": "Second message"});
737
+
let metadata = json!({"$type": "com.example.signature"});
738
+
let repository = "did:plc:test123";
739
+
740
+
let sig1 = create_signature(
741
+
AnyInput::Serialize(record1),
742
+
AnyInput::Serialize(metadata.clone()),
743
+
repository,
744
+
&private_key,
745
+
)?;
746
+
747
+
let sig2 = create_signature(
748
+
AnyInput::Serialize(record2),
749
+
AnyInput::Serialize(metadata),
750
+
repository,
751
+
&private_key,
752
+
)?;
753
+
754
+
assert_ne!(sig1, sig2, "Different records should produce different signatures");
755
+
756
+
Ok(())
757
+
}
758
+
759
+
#[test]
760
+
fn create_signature_different_repositories_produce_different_signatures() -> Result<(), Box<dyn std::error::Error>> {
761
+
let private_key = generate_key(KeyType::K256Private)?;
762
+
763
+
let record = json!({"$type": "app.example.record", "body": "Message"});
764
+
let metadata = json!({"$type": "com.example.signature"});
765
+
766
+
let sig1 = create_signature(
767
+
AnyInput::Serialize(record.clone()),
768
+
AnyInput::Serialize(metadata.clone()),
769
+
"did:plc:repo1",
770
+
&private_key,
771
+
)?;
772
+
773
+
let sig2 = create_signature(
774
+
AnyInput::Serialize(record),
775
+
AnyInput::Serialize(metadata),
776
+
"did:plc:repo2",
777
+
&private_key,
778
+
)?;
779
+
780
+
assert_ne!(
781
+
sig1, sig2,
782
+
"Different repository DIDs should produce different signatures"
783
+
);
784
+
785
+
Ok(())
786
+
}
787
+
}
+94
-38
crates/atproto-attestation/src/bin/atproto-attestation-sign.rs
+94
-38
crates/atproto-attestation/src/bin/atproto-attestation-sign.rs
···
8
8
//!
9
9
//! ### Remote Attestation
10
10
//! ```bash
11
-
//! atproto-attestation-sign remote <source_record> <repository_did> <metadata_record>
11
+
//! atproto-attestation-sign remote <source_repository_did> <source_record> <attestation_repository_did> <metadata_record>
12
12
//! ```
13
13
//!
14
14
//! ### Inline Attestation
15
15
//! ```bash
16
-
//! atproto-attestation-sign inline <source_record> <signing_key> <metadata_record>
16
+
//! atproto-attestation-sign inline <source_record> <repository_did> <signing_key> <metadata_record>
17
17
//! ```
18
18
//!
19
19
//! ## Arguments
20
20
//!
21
+
//! - `source_repository_did`: (Remote mode) DID of the repository housing the source record (prevents replay attacks)
21
22
//! - `source_record`: JSON string or path to JSON file containing the record being attested
22
-
//! - `repository_did`: (Remote mode) DID of the repository that will contain the remote attestation record
23
+
//! - `attestation_repository_did`: (Remote mode) DID of the repository where the attestation proof will be stored
24
+
//! - `repository_did`: (Inline mode) DID of the repository that will house the record (prevents replay attacks)
23
25
//! - `signing_key`: (Inline mode) Private key string (did:key format) used to sign the attestation
24
26
//! - `metadata_record`: JSON string or path to JSON file with attestation metadata used during CID creation
25
27
//!
···
28
30
//! ```bash
29
31
//! # Remote attestation - creates proof record and strongRef
30
32
//! atproto-attestation-sign remote \
33
+
//! did:plc:sourceRepo... \
31
34
//! record.json \
32
-
//! did:plc:xyz123... \
35
+
//! did:plc:attestationRepo... \
33
36
//! metadata.json
34
37
//!
35
38
//! # Inline attestation - embeds signature in record
36
39
//! atproto-attestation-sign inline \
37
40
//! record.json \
41
+
//! did:plc:xyz123... \
38
42
//! did:key:z42tv1pb3... \
39
43
//! '{"$type":"com.example.attestation","purpose":"demo"}'
40
44
//!
41
45
//! # Read from stdin
42
-
//! cat record.json | atproto-attestation-sign inline \
46
+
//! cat record.json | atproto-attestation-sign remote \
47
+
//! did:plc:sourceRepo... \
43
48
//! - \
44
-
//! did:key:z42tv1pb3... \
49
+
//! did:plc:attestationRepo... \
45
50
//! metadata.json
46
51
//! ```
47
52
48
53
use anyhow::{Context, Result, anyhow};
49
54
use atproto_attestation::{
50
-
create_inline_attestation, create_remote_attestation, create_remote_attestation_reference,
55
+
create_inline_attestation, create_remote_attestation,
56
+
input::AnyInput,
51
57
};
52
58
use atproto_identity::key::identify_key;
53
59
use clap::{Parser, Subcommand};
···
75
81
76
82
MODES:
77
83
remote Creates a separate proof record with strongRef reference
78
-
Syntax: remote <source_record> <repository_did> <metadata_record>
84
+
Syntax: remote <source_repository_did> <source_record> <attestation_repository_did> <metadata_record>
79
85
80
86
inline Embeds signature bytes directly in the record
81
-
Syntax: inline <source_record> <signing_key> <metadata_record>
87
+
Syntax: inline <source_record> <repository_did> <signing_key> <metadata_record>
82
88
83
89
ARGUMENTS:
84
-
source_record JSON string or file path to the record being attested
85
-
repository_did (Remote) DID of repository containing the attestation record
86
-
signing_key (Inline) Private key in did:key format for signing
87
-
metadata_record JSON string or file path with attestation metadata
90
+
source_repository_did (Remote) DID of repository housing the source record (for replay prevention)
91
+
source_record JSON string or file path to the record being attested
92
+
attestation_repository_did (Remote) DID of repository where attestation proof will be stored
93
+
repository_did (Inline) DID of repository that will house the record (for replay prevention)
94
+
signing_key (Inline) Private key in did:key format for signing
95
+
metadata_record JSON string or file path with attestation metadata
88
96
89
97
EXAMPLES:
90
98
# Remote attestation (creates proof record + strongRef):
91
99
atproto-attestation-sign remote \\
100
+
did:plc:sourceRepo... \\
92
101
record.json \\
93
-
did:plc:xyz123abc... \\
102
+
did:plc:attestationRepo... \\
94
103
metadata.json
95
104
96
105
# Inline attestation (embeds signature):
97
106
atproto-attestation-sign inline \\
98
107
record.json \\
108
+
did:plc:xyz123abc... \\
99
109
did:key:z42tv1pb3Dzog28Q1udyieg1YJP3x1Un5vraE1bttXeCDSpW \\
100
110
'{\"$type\":\"com.example.attestation\",\"purpose\":\"demo\"}'
101
111
102
112
# Read source record from stdin:
103
-
cat record.json | atproto-attestation-sign inline \\
113
+
cat record.json | atproto-attestation-sign remote \\
114
+
did:plc:sourceRepo... \\
104
115
- \\
105
-
did:key:z42tv1pb3... \\
116
+
did:plc:attestationRepo... \\
106
117
metadata.json
107
118
108
119
OUTPUT:
···
124
135
/// Create a remote attestation with separate proof record
125
136
///
126
137
/// Generates a proof record containing the CID and returns both the proof
127
-
/// record (to be stored in the repository) and the source record with a
128
-
/// strongRef attestation reference.
138
+
/// record (to be stored in the attestation repository) and the source record
139
+
/// with a strongRef attestation reference.
129
140
#[command(visible_alias = "r")]
130
141
Remote {
142
+
/// DID of the repository housing the source record (for replay attack prevention)
143
+
source_repository_did: String,
144
+
131
145
/// Source record JSON string or file path (use '-' for stdin)
132
146
source_record: String,
133
147
134
-
/// Repository DID that will contain the remote attestation record
135
-
repository_did: String,
148
+
/// DID of the repository where the attestation proof will be stored
149
+
attestation_repository_did: String,
136
150
137
151
/// Attestation metadata JSON string or file path
138
152
metadata_record: String,
···
147
161
/// Source record JSON string or file path (use '-' for stdin)
148
162
source_record: String,
149
163
164
+
/// Repository DID that will house the record (for replay attack prevention)
165
+
repository_did: String,
166
+
150
167
/// Private signing key in did:key format (e.g., did:key:z...)
151
168
signing_key: String,
152
169
···
161
178
162
179
match args.command {
163
180
Commands::Remote {
181
+
source_repository_did,
164
182
source_record,
165
-
repository_did,
183
+
attestation_repository_did,
166
184
metadata_record,
167
-
} => handle_remote_attestation(&source_record, &repository_did, &metadata_record)?,
185
+
} => handle_remote_attestation(
186
+
&source_record,
187
+
&source_repository_did,
188
+
&metadata_record,
189
+
&attestation_repository_did,
190
+
)?,
168
191
169
192
Commands::Inline {
170
193
source_record,
194
+
repository_did,
171
195
signing_key,
172
196
metadata_record,
173
-
} => handle_inline_attestation(&source_record, &signing_key, &metadata_record)?,
197
+
} => handle_inline_attestation(
198
+
&source_record,
199
+
&repository_did,
200
+
&signing_key,
201
+
&metadata_record,
202
+
)?,
174
203
}
175
204
176
205
Ok(())
···
180
209
///
181
210
/// Creates a proof record and appends a strongRef to the source record.
182
211
/// Outputs both the proof record and the updated source record.
212
+
///
213
+
/// - `source_repository_did`: Used for signature binding (prevents replay attacks)
214
+
/// - `attestation_repository_did`: Where the attestation proof record will be stored
183
215
fn handle_remote_attestation(
184
216
source_record: &str,
185
-
repository_did: &str,
217
+
source_repository_did: &str,
186
218
metadata_record: &str,
219
+
attestation_repository_did: &str,
187
220
) -> Result<()> {
188
221
// Load source record and metadata
189
222
let record_json = load_json_input(source_record)?;
···
198
231
return Err(anyhow!("Metadata record must be a JSON object"));
199
232
}
200
233
201
-
// Validate repository DID
202
-
if !repository_did.starts_with("did:") {
234
+
// Validate repository DIDs
235
+
if !source_repository_did.starts_with("did:") {
203
236
return Err(anyhow!(
204
-
"Repository DID must start with 'did:' prefix, got: {}",
205
-
repository_did
237
+
"Source repository DID must start with 'did:' prefix, got: {}",
238
+
source_repository_did
206
239
));
207
240
}
208
241
209
-
// Create the remote attestation proof record
210
-
let proof_record = create_remote_attestation(&record_json, &metadata_json)
211
-
.context("Failed to create remote attestation proof record")?;
242
+
if !attestation_repository_did.starts_with("did:") {
243
+
return Err(anyhow!(
244
+
"Attestation repository DID must start with 'did:' prefix, got: {}",
245
+
attestation_repository_did
246
+
));
247
+
}
212
248
213
-
// Create the source record with strongRef reference
214
-
let attested_record =
215
-
create_remote_attestation_reference(&record_json, &proof_record, repository_did)
216
-
.context("Failed to create remote attestation reference")?;
249
+
// Create the remote attestation using v2 API
250
+
// This creates both the attested record with strongRef and the proof record in one call
251
+
let (attested_record, proof_record) =
252
+
create_remote_attestation(
253
+
AnyInput::Serialize(record_json),
254
+
AnyInput::Serialize(metadata_json),
255
+
source_repository_did,
256
+
attestation_repository_did,
257
+
)
258
+
.context("Failed to create remote attestation")?;
217
259
218
260
// Output both records
219
261
println!("=== Proof Record (store in repository) ===");
···
231
273
/// Outputs the record with inline attestation.
232
274
fn handle_inline_attestation(
233
275
source_record: &str,
276
+
repository_did: &str,
234
277
signing_key: &str,
235
278
metadata_record: &str,
236
279
) -> Result<()> {
···
247
290
return Err(anyhow!("Metadata record must be a JSON object"));
248
291
}
249
292
293
+
// Validate repository DID
294
+
if !repository_did.starts_with("did:") {
295
+
return Err(anyhow!(
296
+
"Repository DID must start with 'did:' prefix, got: {}",
297
+
repository_did
298
+
));
299
+
}
300
+
250
301
// Parse the signing key
251
302
let key_data = identify_key(signing_key)
252
303
.with_context(|| format!("Failed to parse signing key: {}", signing_key))?;
253
304
254
-
// Create inline attestation
255
-
let signed_record = create_inline_attestation(&record_json, &metadata_json, &key_data)
256
-
.context("Failed to create inline attestation")?;
305
+
// Create inline attestation with repository binding using v2 API
306
+
let signed_record = create_inline_attestation(
307
+
AnyInput::Serialize(record_json),
308
+
AnyInput::Serialize(metadata_json),
309
+
repository_did,
310
+
&key_data,
311
+
)
312
+
.context("Failed to create inline attestation")?;
257
313
258
314
// Output the signed record
259
315
println!("{}", serde_json::to_string_pretty(&signed_record)?);
+63
-140
crates/atproto-attestation/src/bin/atproto-attestation-verify.rs
+63
-140
crates/atproto-attestation/src/bin/atproto-attestation-verify.rs
···
8
8
//!
9
9
//! ### Verify all signatures in a record
10
10
//! ```bash
11
-
//! atproto-attestation-verify <record>
11
+
//! atproto-attestation-verify <record> <repository_did>
12
12
//! ```
13
13
//!
14
14
//! ### Verify a specific attestation against a record
15
15
//! ```bash
16
-
//! atproto-attestation-verify <record> <attestation>
16
+
//! atproto-attestation-verify <record> <repository_did> <attestation>
17
17
//! ```
18
18
//!
19
19
//! ## Parameter Formats
···
27
27
//!
28
28
//! ```bash
29
29
//! # Verify all signatures in a record from file
30
-
//! atproto-attestation-verify ./signed_post.json
30
+
//! atproto-attestation-verify ./signed_post.json did:plc:repo123
31
31
//!
32
32
//! # Verify all signatures in a record from AT-URI
33
-
//! atproto-attestation-verify at://did:plc:abc123/app.bsky.feed.post/3k2k4j5h6g
33
+
//! atproto-attestation-verify at://did:plc:abc123/app.bsky.feed.post/3k2k4j5h6g did:plc:abc123
34
34
//!
35
35
//! # Verify specific attestation against a record (both from files)
36
-
//! atproto-attestation-verify ./record.json ./attestation.json
36
+
//! atproto-attestation-verify ./record.json did:plc:repo123 ./attestation.json
37
37
//!
38
38
//! # Verify specific attestation (from AT-URI) against record (from file)
39
-
//! atproto-attestation-verify ./record.json at://did:plc:xyz/com.example.attestation/abc123
39
+
//! atproto-attestation-verify ./record.json did:plc:repo123 at://did:plc:xyz/com.example.attestation/abc123
40
40
//!
41
41
//! # Read record from stdin, verify all signatures
42
-
//! cat signed.json | atproto-attestation-verify -
42
+
//! cat signed.json | atproto-attestation-verify - did:plc:repo123
43
43
//!
44
44
//! # Verify inline JSON
45
-
//! atproto-attestation-verify '{"$type":"app.bsky.feed.post","text":"Hello","signatures":[...]}'
45
+
//! atproto-attestation-verify '{"$type":"app.bsky.feed.post","text":"Hello","signatures":[...]}' did:plc:repo123
46
46
//! ```
47
47
48
48
use anyhow::{Context, Result, anyhow};
49
-
use atproto_attestation::{VerificationStatus, verify_all_signatures_with_resolver};
49
+
use atproto_attestation::AnyInput;
50
+
use atproto_identity::key::{KeyData, KeyResolver, identify_key};
50
51
use clap::Parser;
51
52
use serde_json::Value;
52
53
use std::{
···
60
61
/// Validates attestation signatures by reconstructing signed content and checking
61
62
/// ECDSA signatures against embedded public keys. Supports verifying all signatures
62
63
/// in a record or validating a specific attestation record.
64
+
///
65
+
/// The repository DID parameter is now REQUIRED to prevent replay attacks where
66
+
/// attestations might be copied to different repositories.
63
67
#[derive(Parser)]
64
68
#[command(
65
69
name = "atproto-attestation-verify",
···
69
73
A command-line tool for verifying cryptographic signatures of AT Protocol records.
70
74
71
75
USAGE:
72
-
atproto-attestation-verify <record> Verify all signatures in record
73
-
atproto-attestation-verify <record> <attestation> Verify specific attestation
76
+
atproto-attestation-verify <record> <repository_did> Verify all signatures
74
77
75
78
PARAMETER FORMATS:
76
79
Each parameter accepts JSON strings, file paths, or AT-URIs:
···
81
84
82
85
EXAMPLES:
83
86
# Verify all signatures in a record:
84
-
atproto-attestation-verify ./signed_post.json
85
-
atproto-attestation-verify at://did:plc:abc/app.bsky.feed.post/123
87
+
atproto-attestation-verify ./signed_post.json did:plc:repo123
88
+
atproto-attestation-verify at://did:plc:abc/app.bsky.feed.post/123 did:plc:abc
86
89
87
90
# Verify specific attestation:
88
-
atproto-attestation-verify ./record.json ./attestation.json
89
-
atproto-attestation-verify ./record.json at://did:plc:xyz/com.example.attestation/abc
91
+
atproto-attestation-verify ./record.json did:plc:repo123 ./attestation.json
92
+
atproto-attestation-verify ./record.json did:plc:repo123 at://did:plc:xyz/com.example.attestation/abc
90
93
91
94
# Read from stdin:
92
-
cat signed.json | atproto-attestation-verify -
95
+
cat signed.json | atproto-attestation-verify - did:plc:repo123
93
96
94
97
OUTPUT:
95
98
Single record mode: Reports each signature with โ (valid), โ (invalid), or ? (unverified)
···
105
108
/// Record to verify - JSON string, file path, AT-URI, or '-' for stdin
106
109
record: String,
107
110
111
+
/// Repository DID that houses the record (required for replay attack prevention)
112
+
repository_did: String,
113
+
108
114
/// Optional attestation record to verify against the record - JSON string, file path, or AT-URI
109
115
attestation: Option<String>,
110
116
}
111
117
118
+
/// A key resolver that supports `did:key:` identifiers directly.
119
+
///
120
+
/// This resolver handles key references that are encoded as `did:key:` strings,
121
+
/// parsing them to extract the cryptographic key data. For other DID methods,
122
+
/// it returns an error since those would require fetching DID documents.
123
+
struct DidKeyResolver {}
124
+
125
+
#[async_trait::async_trait]
126
+
impl KeyResolver for DidKeyResolver {
127
+
async fn resolve(&self, subject: &str) -> Result<KeyData> {
128
+
if subject.starts_with("did:key:") {
129
+
identify_key(subject)
130
+
.map_err(|e| anyhow!("Failed to parse did:key '{}': {}", subject, e))
131
+
} else {
132
+
Err(anyhow!(
133
+
"Subject '{}' is not a did:key: identifier. Only did:key: subjects are supported by this resolver.",
134
+
subject
135
+
))
136
+
}
137
+
}
138
+
}
139
+
112
140
#[tokio::main]
113
141
async fn main() -> Result<()> {
114
142
let args = Args::parse();
···
122
150
return Err(anyhow!("Record must be a JSON object"));
123
151
}
124
152
153
+
// Validate repository DID
154
+
if !args.repository_did.starts_with("did:") {
155
+
return Err(anyhow!(
156
+
"Repository DID must start with 'did:' prefix, got: {}",
157
+
args.repository_did
158
+
));
159
+
}
160
+
125
161
// Determine verification mode
126
-
match args.attestation {
127
-
None => {
128
-
// Mode 1: Verify all signatures in the record
129
-
verify_all_mode(&record).await
130
-
}
131
-
Some(attestation_input) => {
132
-
// Mode 2: Verify specific attestation against record
133
-
let attestation = load_input(&attestation_input, false)
134
-
.await
135
-
.context("Failed to load attestation")?;
136
-
137
-
if !attestation.is_object() {
138
-
return Err(anyhow!("Attestation must be a JSON object"));
139
-
}
140
-
141
-
verify_attestation_mode(&record, &attestation).await
142
-
}
143
-
}
162
+
verify_all_mode(&record, &args.repository_did).await
144
163
}
145
164
146
165
/// Mode 1: Verify all signatures contained in the record.
···
149
168
/// - โ Valid signature
150
169
/// - โ Invalid signature
151
170
/// - ? Unverified (e.g., remote attestations requiring proof record fetch)
152
-
async fn verify_all_mode(record: &Value) -> Result<()> {
171
+
async fn verify_all_mode(record: &Value, repository_did: &str) -> Result<()> {
153
172
// Create an identity resolver for fetching remote attestations
154
173
use atproto_identity::resolve::{HickoryDnsResolver, InnerIdentityResolver};
155
174
use std::sync::Arc;
···
169
188
identity_resolver,
170
189
};
171
190
172
-
let reports = verify_all_signatures_with_resolver(record, None, Some(&record_resolver))
173
-
.await
174
-
.context("Failed to verify signatures")?;
191
+
let key_resolver = DidKeyResolver {};
175
192
176
-
if reports.is_empty() {
177
-
return Err(anyhow!("No signatures found in record"));
178
-
}
179
-
180
-
let mut all_valid = true;
181
-
let mut has_errors = false;
182
-
183
-
for report in &reports {
184
-
match &report.status {
185
-
VerificationStatus::Valid { cid } => {
186
-
let key_info = report
187
-
.key
188
-
.as_deref()
189
-
.map(|k| format!(" (key: {})", truncate_did(k)))
190
-
.unwrap_or_default();
191
-
println!(
192
-
"โ Signature {} valid{} [CID: {}]",
193
-
report.index, key_info, cid
194
-
);
195
-
}
196
-
VerificationStatus::Invalid { error } => {
197
-
println!("โ Signature {} invalid: {}", report.index, error);
198
-
all_valid = false;
199
-
has_errors = true;
200
-
}
201
-
VerificationStatus::Unverified { reason } => {
202
-
println!("? Signature {} unverified: {}", report.index, reason);
203
-
all_valid = false;
204
-
}
205
-
}
206
-
}
207
-
208
-
println!();
209
-
println!(
210
-
"Summary: {} total, {} valid",
211
-
reports.len(),
212
-
reports
213
-
.iter()
214
-
.filter(|r| matches!(r.status, VerificationStatus::Valid { .. }))
215
-
.count()
216
-
);
217
-
218
-
if has_errors {
219
-
Err(anyhow!("One or more signatures are invalid"))
220
-
} else if !all_valid {
221
-
Err(anyhow!("One or more signatures could not be verified"))
222
-
} else {
223
-
Ok(())
224
-
}
225
-
}
226
-
227
-
/// Mode 2: Verify a specific attestation record against the provided record.
228
-
///
229
-
/// The attestation should be a standalone attestation object (e.g., from a remote proof record)
230
-
/// that will be verified against the record's content.
231
-
async fn verify_attestation_mode(record: &Value, attestation: &Value) -> Result<()> {
232
-
// The attestation should have a CID field that we can use to verify
233
-
let attestation_obj = attestation
234
-
.as_object()
235
-
.ok_or_else(|| anyhow!("Attestation must be a JSON object"))?;
236
-
237
-
// Get the CID from the attestation
238
-
let cid_str = attestation_obj
239
-
.get("cid")
240
-
.and_then(Value::as_str)
241
-
.ok_or_else(|| anyhow!("Attestation must contain a 'cid' field"))?;
242
-
243
-
// Prepare the signing record with the attestation metadata
244
-
let mut signing_metadata = attestation_obj.clone();
245
-
signing_metadata.remove("cid");
246
-
signing_metadata.remove("signature");
247
-
248
-
let signing_record =
249
-
atproto_attestation::prepare_signing_record(record, &Value::Object(signing_metadata))
250
-
.context("Failed to prepare signing record")?;
251
-
252
-
// Generate the CID from the signing record
253
-
let computed_cid =
254
-
atproto_attestation::create_cid(&signing_record).context("Failed to generate CID")?;
255
-
256
-
// Compare CIDs
257
-
if computed_cid.to_string() != cid_str {
258
-
return Err(anyhow!(
259
-
"CID mismatch: attestation claims {}, but computed {}",
260
-
cid_str,
261
-
computed_cid
262
-
));
263
-
}
264
-
265
-
println!("OK");
266
-
println!("CID: {}", computed_cid);
267
-
268
-
Ok(())
193
+
atproto_attestation::verify_record(
194
+
AnyInput::Serialize(record.clone()),
195
+
repository_did,
196
+
key_resolver,
197
+
record_resolver,
198
+
)
199
+
.await
200
+
.context("Failed to verify signatures")
269
201
}
270
202
271
203
/// Load input from various sources: JSON string, file path, AT-URI, or stdin.
···
369
301
atproto_client::com::atproto::repo::GetRecordResponse::Error(error) => {
370
302
Err(anyhow!("Failed to fetch record: {}", error.error_message()))
371
303
}
372
-
}
373
-
}
374
-
375
-
/// Truncate a DID or did:key for display purposes.
376
-
fn truncate_did(did: &str) -> String {
377
-
if did.len() > 40 {
378
-
format!("{}...{}", &did[..20], &did[did.len() - 12..])
379
-
} else {
380
-
did.to_string()
381
304
}
382
305
}
383
306
+532
crates/atproto-attestation/src/cid.rs
+532
crates/atproto-attestation/src/cid.rs
···
1
+
//! CID (Content Identifier) generation for AT Protocol records.
2
+
//!
3
+
//! This module implements the CID-first attestation workflow, generating
4
+
//! deterministic content identifiers using DAG-CBOR serialization and SHA-256 hashing.
5
+
6
+
use crate::{errors::AttestationError, input::AnyInput};
7
+
#[cfg(test)]
8
+
use atproto_record::typed::LexiconType;
9
+
use cid::Cid;
10
+
use multihash::Multihash;
11
+
use serde::Serialize;
12
+
use serde_json::{Value, Map};
13
+
use sha2::{Digest, Sha256};
14
+
use std::convert::TryInto;
15
+
16
+
/// DAG-CBOR codec identifier used in AT Protocol CIDs.
17
+
///
18
+
/// This codec (0x71) indicates that the data is encoded using DAG-CBOR,
19
+
/// a deterministic subset of CBOR designed for content-addressable systems.
20
+
pub const DAG_CBOR_CODEC: u64 = 0x71;
21
+
22
+
/// SHA-256 multihash code used in AT Protocol CIDs.
23
+
///
24
+
/// This code (0x12) identifies SHA-256 as the hash function used to generate
25
+
/// the content identifier. SHA-256 provides 256-bit cryptographic security.
26
+
pub const MULTIHASH_SHA256: u64 = 0x12;
27
+
28
+
/// Create a CID from any serializable data using DAG-CBOR encoding.
29
+
///
30
+
/// This function generates a content identifier (CID) for arbitrary data by:
31
+
/// 1. Serializing the input to DAG-CBOR format
32
+
/// 2. Computing a SHA-256 hash of the serialized bytes
33
+
/// 3. Creating a CIDv1 with dag-cbor codec (0x71)
34
+
///
35
+
/// # Arguments
36
+
///
37
+
/// * `record` - The data to generate a CID for (must implement `Serialize`)
38
+
///
39
+
/// # Returns
40
+
///
41
+
/// The generated CID for the data using CIDv1 with dag-cbor codec (0x71) and sha2-256 hash
42
+
///
43
+
/// # Type Parameters
44
+
///
45
+
/// * `T` - Any type that implements `Serialize` and is compatible with DAG-CBOR encoding
46
+
///
47
+
/// # Errors
48
+
///
49
+
/// Returns an error if:
50
+
/// - DAG-CBOR serialization fails
51
+
/// - Multihash wrapping fails
52
+
///
53
+
/// # Example
54
+
///
55
+
/// ```rust
56
+
/// use atproto_attestation::cid::create_dagbor_cid;
57
+
/// use serde_json::json;
58
+
///
59
+
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
60
+
/// let data = json!({"text": "Hello, world!"});
61
+
/// let cid = create_dagbor_cid(&data)?;
62
+
/// assert_eq!(cid.codec(), 0x71); // dag-cbor codec
63
+
/// # Ok(())
64
+
/// # }
65
+
/// ```
66
+
pub fn create_dagbor_cid<T: Serialize>(record: &T) -> Result<Cid, AttestationError> {
67
+
let dag_cbor_bytes = serde_ipld_dagcbor::to_vec(record)?;
68
+
let digest = Sha256::digest(&dag_cbor_bytes);
69
+
let multihash = Multihash::wrap(MULTIHASH_SHA256, &digest)
70
+
.map_err(|error| AttestationError::MultihashWrapFailed { error })?;
71
+
72
+
Ok(Cid::new_v1(DAG_CBOR_CODEC, multihash))
73
+
}
74
+
75
+
/// Create a CID for an attestation with automatic `$sig` metadata preparation.
76
+
///
77
+
/// This is the high-level function used internally by attestation creation functions.
78
+
/// It handles the full workflow of preparing a signing record with `$sig` metadata
79
+
/// and generating the CID.
80
+
///
81
+
/// # Arguments
82
+
///
83
+
/// * `record_input` - The record to attest (as AnyInput: String, Json, or TypedLexicon)
84
+
/// * `metadata_input` - The attestation metadata (must include `$type`)
85
+
/// * `repository` - The repository DID to bind the attestation to (prevents replay attacks)
86
+
///
87
+
/// # Returns
88
+
///
89
+
/// The generated CID for the prepared attestation record
90
+
///
91
+
/// # Errors
92
+
///
93
+
/// Returns an error if:
94
+
/// - The record or metadata are not valid JSON objects
95
+
/// - The record is missing the required `$type` field
96
+
/// - The metadata is missing the required `$type` field
97
+
/// - DAG-CBOR serialization fails
98
+
pub fn create_attestation_cid<
99
+
R: Serialize + Clone,
100
+
M: Serialize + Clone,
101
+
>(
102
+
record_input: AnyInput<R>,
103
+
metadata_input: AnyInput<M>,
104
+
repository: &str,
105
+
) -> Result<Cid, AttestationError> {
106
+
let mut record_obj: Map<String, Value> = record_input
107
+
.try_into()
108
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
109
+
110
+
if record_obj
111
+
.get("$type")
112
+
.and_then(Value::as_str)
113
+
.filter(|value| !value.is_empty())
114
+
.is_none()
115
+
{
116
+
return Err(AttestationError::RecordMissingType);
117
+
}
118
+
119
+
let mut metadata_obj: Map<String, Value> = metadata_input
120
+
.try_into()
121
+
.map_err(|_| AttestationError::MetadataMustBeObject)?;
122
+
123
+
if metadata_obj
124
+
.get("$type")
125
+
.and_then(Value::as_str)
126
+
.filter(|value| !value.is_empty())
127
+
.is_none()
128
+
{
129
+
return Err(AttestationError::MetadataMissingSigType);
130
+
}
131
+
132
+
record_obj.remove("signatures");
133
+
134
+
metadata_obj.remove("cid");
135
+
metadata_obj.remove("signature");
136
+
metadata_obj.insert(
137
+
"repository".to_string(),
138
+
Value::String(repository.to_string()),
139
+
);
140
+
141
+
record_obj.insert("$sig".to_string(), Value::Object(metadata_obj.clone()));
142
+
143
+
// Directly pass the Map<String, Value> - no need to wrap in Value::Object
144
+
create_dagbor_cid(&record_obj)
145
+
}
146
+
147
+
/// Validates that a CID string is a valid DAG-CBOR CID for AT Protocol attestations.
148
+
///
149
+
/// This function performs strict validation to ensure the CID meets the exact
150
+
/// specifications required for AT Protocol attestations:
151
+
///
152
+
/// 1. **Valid format**: The string must be a parseable CID
153
+
/// 2. **Version**: Must be CIDv1 (not CIDv0)
154
+
/// 3. **Codec**: Must use DAG-CBOR codec (0x71)
155
+
/// 4. **Hash algorithm**: Must use SHA-256 (multihash code 0x12)
156
+
/// 5. **Hash length**: Must have exactly 32 bytes (SHA-256 standard)
157
+
///
158
+
/// These requirements ensure consistency and security across the AT Protocol
159
+
/// ecosystem, particularly for content addressing and attestation verification.
160
+
///
161
+
/// # Arguments
162
+
///
163
+
/// * `cid` - A string slice containing the CID to validate
164
+
///
165
+
/// # Returns
166
+
///
167
+
/// * `true` if the CID is a valid DAG-CBOR CID with SHA-256 hash
168
+
/// * `false` if the CID is invalid or doesn't meet any requirement
169
+
///
170
+
/// # Examples
171
+
///
172
+
/// ```rust
173
+
/// use atproto_attestation::cid::validate_dagcbor_cid;
174
+
///
175
+
/// // Valid AT Protocol CID (CIDv1, DAG-CBOR, SHA-256)
176
+
/// let valid_cid = "bafyreigw5bqvbz6m3c3zjpqhxwl4njlnbbnw5xvptbx6dzfxjqcde6lt3y";
177
+
/// assert!(validate_dagcbor_cid(valid_cid));
178
+
///
179
+
/// // Invalid: Empty string
180
+
/// assert!(!validate_dagcbor_cid(""));
181
+
///
182
+
/// // Invalid: Not a CID
183
+
/// assert!(!validate_dagcbor_cid("not-a-cid"));
184
+
///
185
+
/// // Invalid: CIDv0 (starts with Qm)
186
+
/// let cid_v0 = "QmYwAPJzv5CZsnA625ub3XtLxT3Tz5Lno5Wqv9eKewWKjE";
187
+
/// assert!(!validate_dagcbor_cid(cid_v0));
188
+
/// ```
189
+
pub fn validate_dagcbor_cid(cid: &str) -> bool {
190
+
if cid.is_empty() {
191
+
return false
192
+
}
193
+
194
+
// Parse the CID using the cid crate for proper validation
195
+
let parsed_cid = match Cid::try_from(cid) {
196
+
Ok(value) => value,
197
+
Err(_) => return false,
198
+
};
199
+
200
+
// Verify it's CIDv1 (version 1)
201
+
if parsed_cid.version() != cid::Version::V1 {
202
+
return false;
203
+
}
204
+
205
+
// Verify it uses DAG-CBOR codec (0x71)
206
+
if parsed_cid.codec() != DAG_CBOR_CODEC {
207
+
return false;
208
+
}
209
+
210
+
// Get the multihash and verify it uses SHA-256
211
+
let multihash = parsed_cid.hash();
212
+
213
+
// SHA-256 code is 0x12
214
+
if multihash.code() != MULTIHASH_SHA256 {
215
+
return false;
216
+
}
217
+
218
+
// Verify the hash digest is 32 bytes (SHA-256 standard)
219
+
if multihash.digest().len() != 32 {
220
+
return false;
221
+
}
222
+
223
+
true
224
+
}
225
+
226
+
#[cfg(test)]
227
+
mod tests {
228
+
use super::*;
229
+
use atproto_record::typed::TypedLexicon;
230
+
use serde::Deserialize;
231
+
232
+
233
+
#[tokio::test]
234
+
async fn test_create_attestation_cid() -> Result<(), AttestationError> {
235
+
use atproto_record::datetime::format as datetime_format;
236
+
use chrono::{DateTime, Utc};
237
+
238
+
// Define test record type with createdAt and text fields
239
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
240
+
#[cfg_attr(debug_assertions, derive(Debug))]
241
+
struct TestRecord {
242
+
#[serde(rename = "createdAt", with = "datetime_format")]
243
+
created_at: DateTime<Utc>,
244
+
text: String,
245
+
}
246
+
247
+
impl LexiconType for TestRecord {
248
+
fn lexicon_type() -> &'static str {
249
+
"com.example.testrecord"
250
+
}
251
+
}
252
+
253
+
// Define test metadata type
254
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
255
+
#[cfg_attr(debug_assertions, derive(Debug))]
256
+
struct TestMetadata {
257
+
#[serde(rename = "createdAt", with = "datetime_format")]
258
+
created_at: DateTime<Utc>,
259
+
purpose: String,
260
+
}
261
+
262
+
impl LexiconType for TestMetadata {
263
+
fn lexicon_type() -> &'static str {
264
+
"com.example.testmetadata"
265
+
}
266
+
}
267
+
268
+
// Create test data
269
+
let created_at = DateTime::parse_from_rfc3339("2025-01-15T14:00:00.000Z")
270
+
.unwrap()
271
+
.with_timezone(&Utc);
272
+
273
+
let record = TestRecord {
274
+
created_at,
275
+
text: "Hello, AT Protocol!".to_string(),
276
+
};
277
+
278
+
let metadata_created_at = DateTime::parse_from_rfc3339("2025-01-15T14:05:00.000Z")
279
+
.unwrap()
280
+
.with_timezone(&Utc);
281
+
282
+
let metadata = TestMetadata {
283
+
created_at: metadata_created_at,
284
+
purpose: "attestation".to_string(),
285
+
};
286
+
287
+
let repository = "did:plc:test123";
288
+
289
+
// Create typed lexicons
290
+
let typed_record = TypedLexicon::new(record);
291
+
let typed_metadata = TypedLexicon::new(metadata);
292
+
293
+
// Call the function
294
+
let cid = create_attestation_cid(
295
+
AnyInput::Serialize(typed_record),
296
+
AnyInput::Serialize(typed_metadata),
297
+
repository,
298
+
)?;
299
+
300
+
// Verify CID properties
301
+
assert_eq!(cid.codec(), 0x71, "CID should use dag-cbor codec");
302
+
assert_eq!(cid.hash().code(), 0x12, "CID should use sha2-256 hash");
303
+
assert_eq!(
304
+
cid.hash().digest().len(),
305
+
32,
306
+
"Hash digest should be 32 bytes"
307
+
);
308
+
assert_eq!(cid.to_bytes().len(), 36, "CID should be 36 bytes total");
309
+
310
+
Ok(())
311
+
}
312
+
313
+
#[tokio::test]
314
+
async fn test_create_attestation_cid_deterministic() -> Result<(), AttestationError> {
315
+
use atproto_record::datetime::format as datetime_format;
316
+
use chrono::{DateTime, Utc};
317
+
318
+
// Define simple test types
319
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
320
+
struct SimpleRecord {
321
+
#[serde(rename = "createdAt", with = "datetime_format")]
322
+
created_at: DateTime<Utc>,
323
+
text: String,
324
+
}
325
+
326
+
impl LexiconType for SimpleRecord {
327
+
fn lexicon_type() -> &'static str {
328
+
"com.example.simple"
329
+
}
330
+
}
331
+
332
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
333
+
struct SimpleMetadata {
334
+
#[serde(rename = "createdAt", with = "datetime_format")]
335
+
created_at: DateTime<Utc>,
336
+
}
337
+
338
+
impl LexiconType for SimpleMetadata {
339
+
fn lexicon_type() -> &'static str {
340
+
"com.example.meta"
341
+
}
342
+
}
343
+
344
+
let created_at = DateTime::parse_from_rfc3339("2025-01-01T00:00:00.000Z")
345
+
.unwrap()
346
+
.with_timezone(&Utc);
347
+
348
+
let record1 = SimpleRecord {
349
+
created_at,
350
+
text: "test".to_string(),
351
+
};
352
+
let record2 = SimpleRecord {
353
+
created_at,
354
+
text: "test".to_string(),
355
+
};
356
+
357
+
let metadata1 = SimpleMetadata { created_at };
358
+
let metadata2 = SimpleMetadata { created_at };
359
+
360
+
let repository = "did:plc:same";
361
+
362
+
// Create CIDs for identical records
363
+
let cid1 = create_attestation_cid(
364
+
AnyInput::Serialize(TypedLexicon::new(record1)),
365
+
AnyInput::Serialize(TypedLexicon::new(metadata1)),
366
+
repository,
367
+
)?;
368
+
369
+
let cid2 = create_attestation_cid(
370
+
AnyInput::Serialize(TypedLexicon::new(record2)),
371
+
AnyInput::Serialize(TypedLexicon::new(metadata2)),
372
+
repository,
373
+
)?;
374
+
375
+
// Verify determinism: identical inputs produce identical CIDs
376
+
assert_eq!(
377
+
cid1, cid2,
378
+
"Identical records should produce identical CIDs"
379
+
);
380
+
381
+
Ok(())
382
+
}
383
+
384
+
#[tokio::test]
385
+
async fn test_create_attestation_cid_different_repositories() -> Result<(), AttestationError> {
386
+
use atproto_record::datetime::format as datetime_format;
387
+
use chrono::{DateTime, Utc};
388
+
389
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
390
+
struct RepoRecord {
391
+
#[serde(rename = "createdAt", with = "datetime_format")]
392
+
created_at: DateTime<Utc>,
393
+
text: String,
394
+
}
395
+
396
+
impl LexiconType for RepoRecord {
397
+
fn lexicon_type() -> &'static str {
398
+
"com.example.repo"
399
+
}
400
+
}
401
+
402
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
403
+
struct RepoMetadata {
404
+
#[serde(rename = "createdAt", with = "datetime_format")]
405
+
created_at: DateTime<Utc>,
406
+
}
407
+
408
+
impl LexiconType for RepoMetadata {
409
+
fn lexicon_type() -> &'static str {
410
+
"com.example.repometa"
411
+
}
412
+
}
413
+
414
+
let created_at = DateTime::parse_from_rfc3339("2025-01-01T12:00:00.000Z")
415
+
.unwrap()
416
+
.with_timezone(&Utc);
417
+
418
+
let record = RepoRecord {
419
+
created_at,
420
+
text: "content".to_string(),
421
+
};
422
+
let metadata = RepoMetadata { created_at };
423
+
424
+
// Same record and metadata, different repositories
425
+
let cid1 = create_attestation_cid(
426
+
AnyInput::Serialize(TypedLexicon::new(record.clone())),
427
+
AnyInput::Serialize(TypedLexicon::new(metadata.clone())),
428
+
"did:plc:repo1",
429
+
)?;
430
+
431
+
let cid2 = create_attestation_cid(
432
+
AnyInput::Serialize(TypedLexicon::new(record)),
433
+
AnyInput::Serialize(TypedLexicon::new(metadata)),
434
+
"did:plc:repo2",
435
+
)?;
436
+
437
+
// Different repositories should produce different CIDs (prevents replay attacks)
438
+
assert_ne!(
439
+
cid1, cid2,
440
+
"Different repository DIDs should produce different CIDs"
441
+
);
442
+
443
+
Ok(())
444
+
}
445
+
446
+
#[test]
447
+
fn test_validate_dagcbor_cid() {
448
+
// Test valid CID (generated from our own create_dagbor_cid function)
449
+
let valid_data = serde_json::json!({"test": "data"});
450
+
let valid_cid = create_dagbor_cid(&valid_data).unwrap();
451
+
let valid_cid_str = valid_cid.to_string();
452
+
assert!(validate_dagcbor_cid(&valid_cid_str), "Valid CID should pass validation");
453
+
454
+
// Test empty string
455
+
assert!(!validate_dagcbor_cid(""), "Empty string should fail validation");
456
+
457
+
// Test invalid CID string
458
+
assert!(!validate_dagcbor_cid("not-a-cid"), "Invalid string should fail validation");
459
+
assert!(!validate_dagcbor_cid("abc123"), "Invalid string should fail validation");
460
+
461
+
// Test CIDv0 (starts with Qm, uses different format)
462
+
let cid_v0 = "QmYwAPJzv5CZsnA625ub3XtLxT3Tz5Lno5Wqv9eKewWKjE";
463
+
assert!(!validate_dagcbor_cid(cid_v0), "CIDv0 should fail validation");
464
+
465
+
// Test valid CID base32 format but wrong codec (not DAG-CBOR)
466
+
// This is a valid CID but uses raw codec (0x55) instead of DAG-CBOR (0x71)
467
+
let wrong_codec = "bafkreigw5bqvbz6m3c3zjpqhxwl4njlnbbnw5xvptbx6dzfxjqcde6lt3y";
468
+
assert!(!validate_dagcbor_cid(wrong_codec), "CID with wrong codec should fail");
469
+
470
+
// Test that our constants match what we're checking
471
+
assert_eq!(DAG_CBOR_CODEC, 0x71, "DAG-CBOR codec constant should be 0x71");
472
+
assert_eq!(MULTIHASH_SHA256, 0x12, "SHA-256 multihash code should be 0x12");
473
+
}
474
+
475
+
#[tokio::test]
476
+
async fn phantom_data_test() -> Result<(), AttestationError> {
477
+
let repository = "did:web:example.com";
478
+
479
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
480
+
struct FooRecord {
481
+
text: String,
482
+
}
483
+
484
+
impl LexiconType for FooRecord {
485
+
fn lexicon_type() -> &'static str {
486
+
"com.example.foo"
487
+
}
488
+
}
489
+
490
+
#[derive(Serialize, Deserialize, PartialEq, Clone)]
491
+
struct BarRecord {
492
+
text: String,
493
+
}
494
+
495
+
impl LexiconType for BarRecord {
496
+
fn lexicon_type() -> &'static str {
497
+
"com.example.bar"
498
+
}
499
+
}
500
+
501
+
let foo = FooRecord {
502
+
text: "foo".to_string(),
503
+
};
504
+
let typed_foo = TypedLexicon::new(foo);
505
+
506
+
let bar = BarRecord {
507
+
text: "bar".to_string(),
508
+
};
509
+
let typed_bar = TypedLexicon::new(bar);
510
+
511
+
let cid1 = create_attestation_cid(
512
+
AnyInput::Serialize(typed_foo.clone()),
513
+
AnyInput::Serialize(typed_bar.clone()),
514
+
repository,
515
+
)?;
516
+
517
+
let value_bar = serde_json::to_value(typed_bar).expect("bar serde_json::Value conversion");
518
+
519
+
let cid2 = create_attestation_cid(
520
+
AnyInput::Serialize(typed_foo),
521
+
AnyInput::Serialize(value_bar),
522
+
repository,
523
+
)?;
524
+
525
+
assert_eq!(
526
+
cid1, cid2,
527
+
"Different repository DIDs should produce different CIDs"
528
+
);
529
+
530
+
Ok(())
531
+
}
532
+
}
+8
crates/atproto-attestation/src/errors.rs
+8
crates/atproto-attestation/src/errors.rs
···
12
12
#[error("error-atproto-attestation-1 Record must be a JSON object")]
13
13
RecordMustBeObject,
14
14
15
+
/// Error when the record omits the `$type` discriminator.
16
+
#[error("error-atproto-attestation-1 Record must include a string `$type` field")]
17
+
RecordMissingType,
18
+
15
19
/// Error when attestation metadata is not a JSON object.
16
20
#[error("error-atproto-attestation-2 Attestation metadata must be a JSON object")]
17
21
MetadataMustBeObject,
···
92
96
/// Error when `$sig` metadata omits the `$type` discriminator.
93
97
#[error("error-atproto-attestation-16 `$sig` metadata must include a string `$type` field")]
94
98
SigMetadataMissingType,
99
+
100
+
/// Error when metadata omits the `$type` discriminator.
101
+
#[error("error-atproto-attestation-18 Metadata must include a string `$type` field")]
102
+
MetadataMissingType,
95
103
96
104
/// Error when a key resolver is required but not provided.
97
105
#[error("error-atproto-attestation-17 Key resolver required to resolve key reference: {key}")]
+384
crates/atproto-attestation/src/input.rs
+384
crates/atproto-attestation/src/input.rs
···
1
+
//! Input types for attestation functions supporting multiple input formats.
2
+
3
+
use serde::Serialize;
4
+
use serde_json::{Map, Value};
5
+
use std::convert::TryFrom;
6
+
use std::str::FromStr;
7
+
use thiserror::Error;
8
+
9
+
/// Flexible input type for attestation functions.
10
+
///
11
+
/// Allows passing records and metadata as JSON strings or any serde serializable types.
12
+
#[derive(Clone)]
13
+
pub enum AnyInput<S: Serialize + Clone> {
14
+
/// JSON string representation
15
+
String(String),
16
+
/// Serializable types
17
+
Serialize(S),
18
+
}
19
+
20
+
/// Error types for AnyInput parsing and transformation operations.
21
+
///
22
+
/// This enum provides specific error types for various failure modes when working
23
+
/// with `AnyInput`, including JSON parsing errors, type conversion errors, and
24
+
/// serialization failures.
25
+
#[derive(Debug, Error)]
26
+
pub enum AnyInputError {
27
+
/// Error when parsing JSON from a string fails.
28
+
#[error("Failed to parse JSON from string: {0}")]
29
+
JsonParseError(#[from] serde_json::Error),
30
+
31
+
/// Error when the value is not a JSON object.
32
+
#[error("Expected JSON object, but got {value_type}")]
33
+
NotAnObject {
34
+
/// The actual type of the value.
35
+
value_type: String,
36
+
},
37
+
38
+
/// Error when the string contains invalid JSON.
39
+
#[error("Invalid JSON string: {message}")]
40
+
InvalidJson {
41
+
/// Error message describing what's wrong with the JSON.
42
+
message: String,
43
+
},
44
+
}
45
+
46
+
impl AnyInputError {
47
+
/// Creates a new `NotAnObject` error with the actual type information.
48
+
pub fn not_an_object(value: &Value) -> Self {
49
+
let value_type = match value {
50
+
Value::Null => "null".to_string(),
51
+
Value::Bool(_) => "boolean".to_string(),
52
+
Value::Number(_) => "number".to_string(),
53
+
Value::String(_) => "string".to_string(),
54
+
Value::Array(_) => "array".to_string(),
55
+
Value::Object(_) => "object".to_string(), // Should not happen
56
+
};
57
+
58
+
AnyInputError::NotAnObject { value_type }
59
+
}
60
+
}
61
+
62
+
/// Implementation of `FromStr` for `AnyInput` that deserializes JSON strings.
63
+
///
64
+
/// This allows parsing JSON strings directly into `AnyInput<serde_json::Value>` using
65
+
/// the standard `FromStr` trait. The string is deserialized using `serde_json::from_str`
66
+
/// and wrapped in `AnyInput::Serialize`.
67
+
///
68
+
/// # Errors
69
+
///
70
+
/// Returns `AnyInputError::JsonParseError` if the string contains invalid JSON.
71
+
///
72
+
/// # Example
73
+
///
74
+
/// ```
75
+
/// use atproto_attestation::input::AnyInput;
76
+
/// use std::str::FromStr;
77
+
///
78
+
/// let input: AnyInput<serde_json::Value> = r#"{"type": "post", "text": "Hello"}"#.parse().unwrap();
79
+
/// ```
80
+
impl FromStr for AnyInput<serde_json::Value> {
81
+
type Err = AnyInputError;
82
+
83
+
fn from_str(s: &str) -> Result<Self, Self::Err> {
84
+
let value = serde_json::from_str(s)?;
85
+
Ok(AnyInput::Serialize(value))
86
+
}
87
+
}
88
+
89
+
impl<S: Serialize + Clone> From<S> for AnyInput<S> {
90
+
fn from(value: S) -> Self {
91
+
AnyInput::Serialize(value)
92
+
}
93
+
}
94
+
95
+
/// Implementation of `TryFrom` for converting `AnyInput` into a JSON object map.
96
+
///
97
+
/// This allows converting any `AnyInput` into a `serde_json::Map<String, Value>`, which
98
+
/// represents a JSON object. Both string and serializable inputs are converted to JSON
99
+
/// objects, with appropriate error handling for non-object values.
100
+
///
101
+
/// # Example
102
+
///
103
+
/// ```
104
+
/// use atproto_attestation::input::AnyInput;
105
+
/// use serde_json::{json, Map, Value};
106
+
/// use std::convert::TryInto;
107
+
///
108
+
/// let input = AnyInput::Serialize(json!({"type": "post", "text": "Hello"}));
109
+
/// let map: Map<String, Value> = input.try_into().unwrap();
110
+
/// assert_eq!(map.get("type").unwrap(), "post");
111
+
/// ```
112
+
impl<S: Serialize + Clone> TryFrom<AnyInput<S>> for Map<String, Value> {
113
+
type Error = AnyInputError;
114
+
115
+
fn try_from(input: AnyInput<S>) -> Result<Self, Self::Error> {
116
+
match input {
117
+
AnyInput::String(value) => {
118
+
// Parse string as JSON
119
+
let json_value = serde_json::from_str::<Value>(&value)?;
120
+
121
+
// Extract as object
122
+
json_value
123
+
.as_object()
124
+
.cloned()
125
+
.ok_or_else(|| AnyInputError::not_an_object(&json_value))
126
+
}
127
+
AnyInput::Serialize(value) => {
128
+
// Convert to JSON value
129
+
let json_value = serde_json::to_value(value)?;
130
+
131
+
// Extract as object
132
+
json_value
133
+
.as_object()
134
+
.cloned()
135
+
.ok_or_else(|| AnyInputError::not_an_object(&json_value))
136
+
}
137
+
}
138
+
}
139
+
}
140
+
141
+
/// Default phantom type for AnyInput when no specific lexicon type is needed.
142
+
///
143
+
/// This type serves as the default generic parameter for `AnyInput`, allowing
144
+
/// for simpler usage when working with untyped JSON values.
145
+
#[derive(Serialize, PartialEq, Clone)]
146
+
pub struct PhantomSignature {}
147
+
148
+
#[cfg(test)]
149
+
mod tests {
150
+
use super::*;
151
+
152
+
#[test]
153
+
fn test_from_str_valid_json() {
154
+
let json_str = r#"{"type": "post", "text": "Hello", "count": 42}"#;
155
+
let result: Result<AnyInput<serde_json::Value>, _> = json_str.parse();
156
+
157
+
assert!(result.is_ok());
158
+
159
+
let input = result.unwrap();
160
+
match input {
161
+
AnyInput::Serialize(value) => {
162
+
assert_eq!(value["type"], "post");
163
+
assert_eq!(value["text"], "Hello");
164
+
assert_eq!(value["count"], 42);
165
+
}
166
+
_ => panic!("Expected AnyInput::Serialize variant"),
167
+
}
168
+
}
169
+
170
+
#[test]
171
+
fn test_from_str_invalid_json() {
172
+
let invalid_json = r#"{"type": "post", "text": "Hello" invalid json"#;
173
+
let result: Result<AnyInput<serde_json::Value>, _> = invalid_json.parse();
174
+
175
+
assert!(result.is_err());
176
+
}
177
+
178
+
#[test]
179
+
fn test_from_str_array() {
180
+
let json_array = r#"[1, 2, 3, "four"]"#;
181
+
let result: Result<AnyInput<serde_json::Value>, _> = json_array.parse();
182
+
183
+
assert!(result.is_ok());
184
+
185
+
let input = result.unwrap();
186
+
match input {
187
+
AnyInput::Serialize(value) => {
188
+
assert!(value.is_array());
189
+
let array = value.as_array().unwrap();
190
+
assert_eq!(array.len(), 4);
191
+
assert_eq!(array[0], 1);
192
+
assert_eq!(array[3], "four");
193
+
}
194
+
_ => panic!("Expected AnyInput::Serialize variant"),
195
+
}
196
+
}
197
+
198
+
#[test]
199
+
fn test_from_str_null() {
200
+
let null_str = "null";
201
+
let result: Result<AnyInput<serde_json::Value>, _> = null_str.parse();
202
+
203
+
assert!(result.is_ok());
204
+
205
+
let input = result.unwrap();
206
+
match input {
207
+
AnyInput::Serialize(value) => {
208
+
assert!(value.is_null());
209
+
}
210
+
_ => panic!("Expected AnyInput::Serialize variant"),
211
+
}
212
+
}
213
+
214
+
#[test]
215
+
fn test_from_str_with_use() {
216
+
// Test using the parse method directly with type inference
217
+
let input: AnyInput<serde_json::Value> = r#"{"$type": "app.bsky.feed.post"}"#
218
+
.parse()
219
+
.expect("Failed to parse JSON");
220
+
221
+
match input {
222
+
AnyInput::Serialize(value) => {
223
+
assert_eq!(value["$type"], "app.bsky.feed.post");
224
+
}
225
+
_ => panic!("Expected AnyInput::Serialize variant"),
226
+
}
227
+
}
228
+
229
+
#[test]
230
+
fn test_try_into_from_string() {
231
+
use std::convert::TryInto;
232
+
233
+
let input = AnyInput::<Value>::String(r#"{"type": "post", "text": "Hello"}"#.to_string());
234
+
let result: Result<Map<String, Value>, _> = input.try_into();
235
+
236
+
assert!(result.is_ok());
237
+
let map = result.unwrap();
238
+
assert_eq!(map.get("type").unwrap(), "post");
239
+
assert_eq!(map.get("text").unwrap(), "Hello");
240
+
}
241
+
242
+
#[test]
243
+
fn test_try_into_from_serialize() {
244
+
use serde_json::json;
245
+
use std::convert::TryInto;
246
+
247
+
let input = AnyInput::Serialize(json!({"$type": "app.bsky.feed.post", "count": 42}));
248
+
let result: Result<Map<String, Value>, _> = input.try_into();
249
+
250
+
assert!(result.is_ok());
251
+
let map = result.unwrap();
252
+
assert_eq!(map.get("$type").unwrap(), "app.bsky.feed.post");
253
+
assert_eq!(map.get("count").unwrap(), 42);
254
+
}
255
+
256
+
#[test]
257
+
fn test_try_into_string_not_object() {
258
+
use std::convert::TryInto;
259
+
260
+
let input = AnyInput::<Value>::String(r#"["array", "not", "object"]"#.to_string());
261
+
let result: Result<Map<String, Value>, AnyInputError> = input.try_into();
262
+
263
+
assert!(result.is_err());
264
+
match result.unwrap_err() {
265
+
AnyInputError::NotAnObject { value_type } => {
266
+
assert_eq!(value_type, "array");
267
+
}
268
+
_ => panic!("Expected NotAnObject error"),
269
+
}
270
+
}
271
+
272
+
#[test]
273
+
fn test_try_into_serialize_not_object() {
274
+
use serde_json::json;
275
+
use std::convert::TryInto;
276
+
277
+
let input = AnyInput::Serialize(json!([1, 2, 3]));
278
+
let result: Result<Map<String, Value>, AnyInputError> = input.try_into();
279
+
280
+
assert!(result.is_err());
281
+
match result.unwrap_err() {
282
+
AnyInputError::NotAnObject { value_type } => {
283
+
assert_eq!(value_type, "array");
284
+
}
285
+
_ => panic!("Expected NotAnObject error"),
286
+
}
287
+
}
288
+
289
+
#[test]
290
+
fn test_try_into_invalid_json_string() {
291
+
use std::convert::TryInto;
292
+
293
+
let input = AnyInput::<Value>::String("not valid json".to_string());
294
+
let result: Result<Map<String, Value>, AnyInputError> = input.try_into();
295
+
296
+
assert!(result.is_err());
297
+
match result.unwrap_err() {
298
+
AnyInputError::JsonParseError(_) => {}
299
+
_ => panic!("Expected JsonParseError"),
300
+
}
301
+
}
302
+
303
+
#[test]
304
+
fn test_try_into_null() {
305
+
use serde_json::json;
306
+
use std::convert::TryInto;
307
+
308
+
let input = AnyInput::Serialize(json!(null));
309
+
let result: Result<Map<String, Value>, AnyInputError> = input.try_into();
310
+
311
+
assert!(result.is_err());
312
+
match result.unwrap_err() {
313
+
AnyInputError::NotAnObject { value_type } => {
314
+
assert_eq!(value_type, "null");
315
+
}
316
+
_ => panic!("Expected NotAnObject error"),
317
+
}
318
+
}
319
+
320
+
#[test]
321
+
fn test_any_input_error_not_an_object() {
322
+
use serde_json::json;
323
+
324
+
// Test null
325
+
let err = AnyInputError::not_an_object(&json!(null));
326
+
match err {
327
+
AnyInputError::NotAnObject { value_type } => {
328
+
assert_eq!(value_type, "null");
329
+
}
330
+
_ => panic!("Expected NotAnObject error"),
331
+
}
332
+
333
+
// Test boolean
334
+
let err = AnyInputError::not_an_object(&json!(true));
335
+
match err {
336
+
AnyInputError::NotAnObject { value_type } => {
337
+
assert_eq!(value_type, "boolean");
338
+
}
339
+
_ => panic!("Expected NotAnObject error"),
340
+
}
341
+
342
+
// Test number
343
+
let err = AnyInputError::not_an_object(&json!(42));
344
+
match err {
345
+
AnyInputError::NotAnObject { value_type } => {
346
+
assert_eq!(value_type, "number");
347
+
}
348
+
_ => panic!("Expected NotAnObject error"),
349
+
}
350
+
351
+
// Test string
352
+
let err = AnyInputError::not_an_object(&json!("hello"));
353
+
match err {
354
+
AnyInputError::NotAnObject { value_type } => {
355
+
assert_eq!(value_type, "string");
356
+
}
357
+
_ => panic!("Expected NotAnObject error"),
358
+
}
359
+
360
+
// Test array
361
+
let err = AnyInputError::not_an_object(&json!([1, 2, 3]));
362
+
match err {
363
+
AnyInputError::NotAnObject { value_type } => {
364
+
assert_eq!(value_type, "array");
365
+
}
366
+
_ => panic!("Expected NotAnObject error"),
367
+
}
368
+
}
369
+
370
+
#[test]
371
+
fn test_error_display() {
372
+
use serde_json::json;
373
+
374
+
// Test NotAnObject error display
375
+
let err = AnyInputError::not_an_object(&json!(42));
376
+
assert_eq!(err.to_string(), "Expected JSON object, but got number");
377
+
378
+
// Test InvalidJson display
379
+
let err = AnyInputError::InvalidJson {
380
+
message: "unexpected token".to_string()
381
+
};
382
+
assert_eq!(err.to_string(), "Invalid JSON string: unexpected token");
383
+
}
384
+
}
+56
-1003
crates/atproto-attestation/src/lib.rs
+56
-1003
crates/atproto-attestation/src/lib.rs
···
1
1
//! AT Protocol record attestation utilities based on the CID-first specification.
2
2
//!
3
-
//! This crate implements helpers for constructing deterministic signing payloads,
4
-
//! creating inline and remote attestation references, and verifying signatures
5
-
//! against DID verification methods. It follows the requirements documented in
6
-
//! `bluesky-attestation-tee/documentation/spec/attestation.md`.
3
+
//! This crate implements helpers for creating inline and remote attestations
4
+
//! and verifying signatures against DID verification methods. It follows the
5
+
//! requirements documented in `bluesky-attestation-tee/documentation/spec/attestation.md`.
6
+
//!
7
+
//! ## Inline Attestations
8
+
//!
9
+
//! Use `create_inline_attestation` to create a signed record with an embedded signature:
7
10
//!
8
-
//! The workflow for inline attestations is:
9
-
//! 1. Prepare a signing record with [`prepare_signing_record`].
10
-
//! 2. Generate the content identifier using [`create_cid`].
11
-
//! 3. Sign the CID bytes externally and embed the attestation with
12
-
//! [`create_inline_attestation_reference`].
13
-
//! 4. Verify signatures with [`verify_signature`] or [`verify_all_signatures`].
11
+
//! ```no_run
12
+
//! use atproto_attestation::{create_inline_attestation, AnyInput};
13
+
//! use atproto_identity::key::{generate_key, KeyType};
14
+
//! use serde_json::json;
15
+
//!
16
+
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
17
+
//! let key = generate_key(KeyType::P256Private)?;
18
+
//! let record = json!({"$type": "app.example.post", "text": "Hello!"});
19
+
//! let metadata = json!({"$type": "com.example.sig", "key": "did:key:..."});
20
+
//!
21
+
//! let signed = create_inline_attestation(
22
+
//! AnyInput::Serialize(record),
23
+
//! AnyInput::Serialize(metadata),
24
+
//! "did:plc:repository",
25
+
//! &key
26
+
//! )?;
27
+
//! # Ok(())
28
+
//! # }
29
+
//! ```
30
+
//!
31
+
//! ## Remote Attestations
14
32
//!
15
-
//! Remote attestations follow the same `$sig` preparation process but store the
16
-
//! generated CID in a proof record and reference it with
17
-
//! [`create_remote_attestation_reference`].
33
+
//! Use `create_remote_attestation` to generate both the proof record and the
34
+
//! attested record with strongRef in a single call.
18
35
19
36
#![forbid(unsafe_code)]
20
37
#![warn(missing_docs)]
21
38
39
+
// Public modules
40
+
pub mod cid;
22
41
pub mod errors;
23
-
24
-
use atproto_record::tid::Tid;
25
-
pub use errors::AttestationError;
26
-
27
-
use atproto_identity::key::{KeyData, KeyResolver, KeyType, identify_key, sign, validate};
28
-
use base64::{
29
-
Engine,
30
-
alphabet::STANDARD as STANDARD_ALPHABET,
31
-
engine::{
32
-
DecodePaddingMode,
33
-
general_purpose::{GeneralPurpose, GeneralPurposeConfig},
34
-
},
35
-
};
36
-
use cid::Cid;
37
-
use elliptic_curve::scalar::IsHigh;
38
-
use k256::ecdsa::Signature as K256Signature;
39
-
use multihash::Multihash;
40
-
use p256::ecdsa::Signature as P256Signature;
41
-
use serde_json::{Map, Value, json};
42
-
use sha2::{Digest, Sha256};
43
-
44
-
// Base64 engine that accepts both padded and unpadded input for maximum compatibility
45
-
// with various AT Protocol implementations. Uses standard encoding with padding for output,
46
-
// but accepts any padding format for decoding.
47
-
const BASE64: GeneralPurpose = GeneralPurpose::new(
48
-
&STANDARD_ALPHABET,
49
-
GeneralPurposeConfig::new()
50
-
.with_encode_padding(true)
51
-
.with_decode_padding_mode(DecodePaddingMode::Indifferent),
52
-
);
53
-
54
-
const STRONG_REF_TYPE: &str = "com.atproto.repo.strongRef";
55
-
56
-
/// Resolver trait for retrieving remote attestation records by AT URI.
57
-
///
58
-
/// Kind of attestation represented within the `signatures` array.
59
-
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
60
-
pub enum AttestationKind {
61
-
/// Inline attestation containing signature bytes.
62
-
Inline,
63
-
/// Remote attestation referencing a proof record via strongRef.
64
-
Remote,
65
-
}
66
-
67
-
/// Result of verifying a single attestation entry.
68
-
#[derive(Debug)]
69
-
pub enum VerificationStatus {
70
-
/// Signature is valid for the reconstructed signing payload.
71
-
Valid {
72
-
/// CID produced for the reconstructed record.
73
-
cid: Cid,
74
-
},
75
-
/// Signature verification or metadata validation failed.
76
-
Invalid {
77
-
/// Failure reason.
78
-
error: AttestationError,
79
-
},
80
-
/// Attestation cannot be verified locally (e.g., remote references).
81
-
Unverified {
82
-
/// Explanation for why verification was skipped.
83
-
reason: String,
84
-
},
85
-
}
86
-
87
-
/// Structured verification report for a single attestation entry.
88
-
#[derive(Debug)]
89
-
pub struct VerificationReport {
90
-
/// Zero-based index of the signature in the record's `signatures` array.
91
-
pub index: usize,
92
-
/// Detected attestation kind.
93
-
pub kind: AttestationKind,
94
-
/// `$type` discriminator from the attestation entry, if present.
95
-
pub signature_type: Option<String>,
96
-
/// Key reference for inline signatures (if available).
97
-
pub key: Option<String>,
98
-
/// Verification outcome.
99
-
pub status: VerificationStatus,
100
-
}
101
-
102
-
/// Create a deterministic CID for a record prepared with [`prepare_signing_record`].
103
-
///
104
-
/// The record **must** contain a `$sig` object with at least a `$type` string
105
-
/// to scope the signature. The returned CID uses the blessed parameters:
106
-
/// CIDv1, dag-cbor codec (0x71), and sha2-256 multihash.
107
-
pub fn create_cid(record: &Value) -> Result<Cid, AttestationError> {
108
-
let record_object = record
109
-
.as_object()
110
-
.ok_or(AttestationError::RecordMustBeObject)?;
111
-
112
-
let sig_value = record_object
113
-
.get("$sig")
114
-
.ok_or(AttestationError::SigMetadataMissing)?;
115
-
116
-
let sig_object = sig_value
117
-
.as_object()
118
-
.ok_or(AttestationError::SigMetadataNotObject)?;
119
-
120
-
if !sig_object
121
-
.get("$type")
122
-
.and_then(Value::as_str)
123
-
.filter(|value| !value.is_empty())
124
-
.is_some()
125
-
{
126
-
return Err(AttestationError::SigMetadataMissingType);
127
-
}
128
-
129
-
let dag_cbor_bytes = serde_ipld_dagcbor::to_vec(record)?;
130
-
let digest = Sha256::digest(&dag_cbor_bytes);
131
-
let multihash = Multihash::wrap(0x12, &digest)
132
-
.map_err(|error| AttestationError::MultihashWrapFailed { error })?;
133
-
134
-
Ok(Cid::new_v1(0x71, multihash))
135
-
}
136
-
137
-
fn create_plain_cid(record: &Value) -> Result<Cid, AttestationError> {
138
-
let dag_cbor_bytes = serde_ipld_dagcbor::to_vec(record)?;
139
-
let digest = Sha256::digest(&dag_cbor_bytes);
140
-
let multihash = Multihash::wrap(0x12, &digest)
141
-
.map_err(|error| AttestationError::MultihashWrapFailed { error })?;
142
-
143
-
Ok(Cid::new_v1(0x71, multihash))
144
-
}
145
-
146
-
/// Prepare a record for signing by removing attestation artifacts and adding `$sig`.
147
-
///
148
-
/// - Removes any existing `signatures`, `sigs`, and `$sig` fields.
149
-
/// - Inserts the provided `attestation` metadata as the new `$sig` object.
150
-
/// - Ensures the metadata contains a string `$type` discriminator.
151
-
pub fn prepare_signing_record(
152
-
record: &Value,
153
-
attestation: &Value,
154
-
) -> Result<Value, AttestationError> {
155
-
let mut prepared = record
156
-
.as_object()
157
-
.cloned()
158
-
.ok_or(AttestationError::RecordMustBeObject)?;
159
-
160
-
let mut sig_metadata = attestation
161
-
.as_object()
162
-
.cloned()
163
-
.ok_or(AttestationError::MetadataMustBeObject)?;
164
-
165
-
if !sig_metadata
166
-
.get("$type")
167
-
.and_then(Value::as_str)
168
-
.filter(|value| !value.is_empty())
169
-
.is_some()
170
-
{
171
-
return Err(AttestationError::MetadataMissingSigType);
172
-
}
173
-
174
-
sig_metadata.remove("signature");
175
-
sig_metadata.remove("cid");
176
-
177
-
prepared.remove("signatures");
178
-
prepared.remove("sigs");
179
-
prepared.remove("$sig");
180
-
prepared.insert("$sig".to_string(), Value::Object(sig_metadata));
181
-
182
-
Ok(Value::Object(prepared))
183
-
}
184
-
185
-
/// Creates an inline attestation by signing the prepared record with the provided key.
186
-
pub fn create_inline_attestation(
187
-
record: &Value,
188
-
attestation_metadata: &Value,
189
-
signing_key: &KeyData,
190
-
) -> Result<Value, AttestationError> {
191
-
let signing_record = prepare_signing_record(record, attestation_metadata)?;
192
-
let cid = create_cid(&signing_record)?;
193
-
194
-
let raw_signature = sign(signing_key, &cid.to_bytes())
195
-
.map_err(|error| AttestationError::SignatureCreationFailed { error })?;
196
-
let signature_bytes = normalize_signature(raw_signature, signing_key.key_type())?;
197
-
198
-
let mut inline_object = attestation_metadata
199
-
.as_object()
200
-
.cloned()
201
-
.ok_or(AttestationError::MetadataMustBeObject)?;
202
-
203
-
inline_object.remove("signature");
204
-
inline_object.remove("cid");
205
-
inline_object.insert(
206
-
"signature".to_string(),
207
-
json!({"$bytes": BASE64.encode(signature_bytes)}),
208
-
);
209
-
210
-
create_inline_attestation_reference(record, &Value::Object(inline_object))
211
-
}
212
-
213
-
/// Creates a remote attestation by generating a proof record and strongRef entry.
214
-
///
215
-
/// Returns a tuple containing:
216
-
/// - Remote proof record containing the CID for storage in a repository.
217
-
pub fn create_remote_attestation(
218
-
record: &Value,
219
-
attestation_metadata: &Value,
220
-
) -> Result<Value, AttestationError> {
221
-
let metadata = attestation_metadata
222
-
.as_object()
223
-
.cloned()
224
-
.ok_or(AttestationError::MetadataMustBeObject)?;
225
-
226
-
let metadata_value = Value::Object(metadata.clone());
227
-
let signing_record = prepare_signing_record(record, &metadata_value)?;
228
-
let cid = create_cid(&signing_record)?;
229
-
230
-
let mut remote_attestation = metadata.clone();
231
-
remote_attestation.insert("cid".to_string(), Value::String(cid.to_string()));
232
-
233
-
Ok(Value::Object(remote_attestation))
234
-
}
235
-
236
-
/// Normalize raw signature bytes to the required low-S form.
237
-
///
238
-
/// This helper ensures signatures produced by signing APIs comply with the
239
-
/// specification requirements before embedding them in attestation objects.
240
-
pub fn normalize_signature(
241
-
signature: Vec<u8>,
242
-
key_type: &KeyType,
243
-
) -> Result<Vec<u8>, AttestationError> {
244
-
match key_type {
245
-
KeyType::P256Private | KeyType::P256Public => normalize_p256(signature),
246
-
KeyType::K256Private | KeyType::K256Public => normalize_k256(signature),
247
-
other => Err(AttestationError::UnsupportedKeyType {
248
-
key_type: other.clone(),
249
-
}),
250
-
}
251
-
}
42
+
pub mod input;
252
43
253
-
/// Attach a remote attestation entry (strongRef) to the record.
254
-
///
255
-
/// The `attestation` value must be an object containing:
256
-
/// - `$type`: `"com.atproto.repo.strongRef"`
257
-
/// - `cid`: base32 CID string referencing the remote proof record
258
-
/// - Optional `uri`: AT URI for the remote record
259
-
pub fn create_remote_attestation_reference(
260
-
record: &Value,
261
-
attestation: &Value,
262
-
did: &str,
263
-
) -> Result<Value, AttestationError> {
264
-
let mut result = record
265
-
.as_object()
266
-
.cloned()
267
-
.ok_or(AttestationError::RecordMustBeObject)?;
44
+
// Internal modules
45
+
mod attestation;
46
+
mod signature;
47
+
mod utils;
48
+
mod verification;
268
49
269
-
let attestation = attestation
270
-
.as_object()
271
-
.cloned()
272
-
.ok_or(AttestationError::MetadataMustBeObject)?;
50
+
// Re-export error type
51
+
pub use errors::AttestationError;
273
52
274
-
let remote_object_type = attestation
275
-
.get("$type")
276
-
.and_then(Value::as_str)
277
-
.filter(|value| !value.is_empty())
278
-
.ok_or(AttestationError::RemoteAttestationMissingCid)?;
53
+
// Re-export CID generation functions
54
+
pub use cid::{create_dagbor_cid};
279
55
280
-
let tid = Tid::new();
56
+
// Re-export signature normalization
57
+
pub use signature::normalize_signature;
281
58
282
-
let attestion_cid = create_plain_cid(&serde_json::Value::Object(attestation.clone()))?;
59
+
// Re-export attestation functions
60
+
pub use attestation::{
61
+
append_inline_attestation, append_remote_attestation, create_inline_attestation,
62
+
create_remote_attestation, create_signature,
63
+
};
283
64
284
-
let remote_object = json!({
285
-
"$type": STRONG_REF_TYPE,
286
-
"uri": format!("at://{did}/{remote_object_type}/{tid}"),
287
-
"cid": attestion_cid.to_string()
288
-
});
65
+
// Re-export input types
66
+
pub use input::{AnyInput, AnyInputError};
289
67
290
-
let mut signatures = extract_signatures_vec(&mut result)?;
291
-
signatures.push(remote_object);
292
-
result.insert("signatures".to_string(), Value::Array(signatures));
68
+
// Re-export verification functions
69
+
pub use verification::verify_record;
293
70
294
-
Ok(Value::Object(result))
295
-
}
296
-
297
-
/// Attach an inline attestation entry containing signature bytes.
71
+
/// Resolver trait for retrieving remote attestation records by AT URI.
298
72
///
299
-
/// The `attestation` value must be an object containing:
300
-
/// - `$type`: union discriminator (must NOT be `com.atproto.repo.strongRef`)
301
-
/// - `key`: verification method reference used to sign
302
-
/// - `signature`: object with `$bytes` base64 signature
303
-
/// Additional custom fields are preserved for `$sig` metadata.
304
-
pub fn create_inline_attestation_reference(
305
-
record: &Value,
306
-
attestation: &Value,
307
-
) -> Result<Value, AttestationError> {
308
-
let mut result = record
309
-
.as_object()
310
-
.cloned()
311
-
.ok_or(AttestationError::RecordMustBeObject)?;
312
-
313
-
let inline_object = attestation
314
-
.as_object()
315
-
.cloned()
316
-
.ok_or(AttestationError::MetadataMustBeObject)?;
317
-
318
-
let signature_type = inline_object
319
-
.get("$type")
320
-
.and_then(Value::as_str)
321
-
.ok_or_else(|| AttestationError::MetadataMissingField {
322
-
field: "$type".to_string(),
323
-
})?;
324
-
325
-
if signature_type == STRONG_REF_TYPE {
326
-
return Err(AttestationError::InlineAttestationTypeInvalid);
327
-
}
328
-
329
-
inline_object
330
-
.get("key")
331
-
.and_then(Value::as_str)
332
-
.filter(|value| !value.is_empty())
333
-
.ok_or_else(|| AttestationError::SignatureMissingField {
334
-
field: "key".to_string(),
335
-
})?;
336
-
337
-
let signature_bytes = inline_object
338
-
.get("signature")
339
-
.and_then(Value::as_object)
340
-
.and_then(|object| object.get("$bytes"))
341
-
.and_then(Value::as_str)
342
-
.filter(|value| !value.is_empty())
343
-
.ok_or(AttestationError::SignatureBytesFormatInvalid)?;
344
-
345
-
// Ensure the signature bytes decode cleanly to catch malformed input early.
346
-
let _ = BASE64
347
-
.decode(signature_bytes)
348
-
.map_err(|error| AttestationError::SignatureDecodingFailed { error })?;
349
-
350
-
let mut signatures = extract_signatures_vec(&mut result)?;
351
-
signatures.push(Value::Object(inline_object));
352
-
result.insert("signatures".to_string(), Value::Array(signatures));
353
-
result.remove("$sig");
354
-
355
-
Ok(Value::Object(result))
356
-
}
357
-
358
-
/// Verify a single attestation entry at the specified index without a record resolver.
359
-
///
360
-
/// Inline signatures are reconstructed into `$sig` metadata, a CID is generated,
361
-
/// and the signature bytes are validated against the resolved public key.
362
-
/// Remote attestations will be reported as unverified.
363
-
///
364
-
/// This is a convenience function for the common case where no record resolver is needed.
365
-
/// For verifying remote attestations, use [`verify_signature_with_resolver`].
366
-
pub async fn verify_signature(
367
-
record: &Value,
368
-
index: usize,
369
-
key_resolver: Option<&dyn KeyResolver>,
370
-
) -> Result<VerificationReport, AttestationError> {
371
-
verify_signature_with_resolver::<atproto_client::record_resolver::HttpRecordResolver>(
372
-
record,
373
-
index,
374
-
key_resolver,
375
-
None,
376
-
)
377
-
.await
378
-
}
379
-
380
-
/// Verify a single attestation entry at the specified index with optional record resolver.
381
-
///
382
-
/// Inline signatures are reconstructed into `$sig` metadata, a CID is generated,
383
-
/// and the signature bytes are validated against the resolved public key.
384
-
/// Remote attestations can be verified if a `record_resolver` is provided to fetch
385
-
/// the proof record via AT-URI. Without a record resolver, remote attestations are
386
-
/// reported as unverified.
387
-
pub async fn verify_signature_with_resolver<R>(
388
-
record: &Value,
389
-
index: usize,
390
-
key_resolver: Option<&dyn KeyResolver>,
391
-
record_resolver: Option<&R>,
392
-
) -> Result<VerificationReport, AttestationError>
393
-
where
394
-
R: atproto_client::record_resolver::RecordResolver,
395
-
{
396
-
let signatures_array = extract_signatures_array(record)?;
397
-
let signature_entry = signatures_array
398
-
.get(index)
399
-
.ok_or(AttestationError::SignatureIndexOutOfBounds { index })?;
400
-
401
-
let signature_map =
402
-
signature_entry
403
-
.as_object()
404
-
.ok_or_else(|| AttestationError::SignatureMissingField {
405
-
field: "object".to_string(),
406
-
})?;
407
-
408
-
let signature_type = signature_map
409
-
.get("$type")
410
-
.and_then(Value::as_str)
411
-
.map(ToOwned::to_owned);
412
-
413
-
let report_kind = match signature_type.as_deref() {
414
-
Some(STRONG_REF_TYPE) => AttestationKind::Remote,
415
-
_ => AttestationKind::Inline,
416
-
};
417
-
418
-
let key_reference = signature_map
419
-
.get("key")
420
-
.and_then(Value::as_str)
421
-
.map(ToOwned::to_owned);
422
-
423
-
let status = match report_kind {
424
-
AttestationKind::Remote => {
425
-
match record_resolver {
426
-
Some(resolver) => {
427
-
match verify_remote_attestation(record, signature_map, resolver).await {
428
-
Ok(cid) => VerificationStatus::Valid { cid },
429
-
Err(error) => VerificationStatus::Invalid { error },
430
-
}
431
-
}
432
-
None => VerificationStatus::Unverified {
433
-
reason: "Remote attestations require a record resolver to fetch the proof record via strongRef.".to_string(),
434
-
},
435
-
}
436
-
}
437
-
AttestationKind::Inline => {
438
-
match verify_inline_attestation(record, signature_map, key_resolver).await {
439
-
Ok(cid) => VerificationStatus::Valid { cid },
440
-
Err(error) => VerificationStatus::Invalid { error },
441
-
}
442
-
}
443
-
};
444
-
445
-
Ok(VerificationReport {
446
-
index,
447
-
kind: report_kind,
448
-
signature_type,
449
-
key: key_reference,
450
-
status,
451
-
})
452
-
}
453
-
454
-
/// Verify all attestation entries attached to the record without a record resolver.
455
-
///
456
-
/// Returns a report per signature. Structural issues with the record (for
457
-
/// example, a missing `signatures` array) are returned as an error.
458
-
///
459
-
/// Remote attestations will be reported as unverified. For verifying remote
460
-
/// attestations, use [`verify_all_signatures_with_resolver`].
461
-
pub async fn verify_all_signatures(
462
-
record: &Value,
463
-
key_resolver: Option<&dyn KeyResolver>,
464
-
) -> Result<Vec<VerificationReport>, AttestationError> {
465
-
verify_all_signatures_with_resolver::<atproto_client::record_resolver::HttpRecordResolver>(
466
-
record,
467
-
key_resolver,
468
-
None,
469
-
)
470
-
.await
471
-
}
472
-
473
-
/// Verify all attestation entries attached to the record with optional record resolver.
474
-
///
475
-
/// Returns a report per signature. Structural issues with the record (for
476
-
/// example, a missing `signatures` array) are returned as an error.
477
-
///
478
-
/// If a `record_resolver` is provided, remote attestations will be fetched and verified.
479
-
/// Otherwise, remote attestations will be reported as unverified.
480
-
pub async fn verify_all_signatures_with_resolver<R>(
481
-
record: &Value,
482
-
key_resolver: Option<&dyn KeyResolver>,
483
-
record_resolver: Option<&R>,
484
-
) -> Result<Vec<VerificationReport>, AttestationError>
485
-
where
486
-
R: atproto_client::record_resolver::RecordResolver,
487
-
{
488
-
let signatures_array = extract_signatures_array(record)?;
489
-
let mut reports = Vec::with_capacity(signatures_array.len());
490
-
491
-
for index in 0..signatures_array.len() {
492
-
reports.push(
493
-
verify_signature_with_resolver(record, index, key_resolver, record_resolver).await?,
494
-
);
495
-
}
496
-
497
-
Ok(reports)
498
-
}
499
-
500
-
async fn verify_remote_attestation<R>(
501
-
record: &Value,
502
-
signature_object: &Map<String, Value>,
503
-
record_resolver: &R,
504
-
) -> Result<Cid, AttestationError>
505
-
where
506
-
R: atproto_client::record_resolver::RecordResolver,
507
-
{
508
-
// Extract the strongRef URI and CID
509
-
let uri = signature_object
510
-
.get("uri")
511
-
.and_then(Value::as_str)
512
-
.ok_or_else(|| AttestationError::SignatureMissingField {
513
-
field: "uri".to_string(),
514
-
})?;
515
-
516
-
let expected_cid_str = signature_object
517
-
.get("cid")
518
-
.and_then(Value::as_str)
519
-
.ok_or_else(|| AttestationError::SignatureMissingField {
520
-
field: "cid".to_string(),
521
-
})?;
522
-
523
-
// Fetch the proof record from the URI
524
-
let proof_record: Value = record_resolver.resolve(uri).await.map_err(|error| {
525
-
AttestationError::RemoteAttestationFetchFailed {
526
-
uri: uri.to_string(),
527
-
error,
528
-
}
529
-
})?;
530
-
531
-
// Verify the proof record CID matches
532
-
let proof_cid = create_plain_cid(&proof_record)?;
533
-
if proof_cid.to_string() != expected_cid_str {
534
-
return Err(AttestationError::RemoteAttestationCidMismatch {
535
-
expected: expected_cid_str.to_string(),
536
-
actual: proof_cid.to_string(),
537
-
});
538
-
}
539
-
540
-
// Extract the CID from the proof record
541
-
let attestation_cid_str = proof_record
542
-
.get("cid")
543
-
.and_then(Value::as_str)
544
-
.ok_or_else(|| AttestationError::SignatureMissingField {
545
-
field: "cid".to_string(),
546
-
})?;
547
-
548
-
// Parse the attestation CID
549
-
let attestation_cid =
550
-
attestation_cid_str
551
-
.parse::<Cid>()
552
-
.map_err(|_| AttestationError::InvalidCid {
553
-
cid: attestation_cid_str.to_string(),
554
-
})?;
555
-
556
-
// Prepare the signing record using the proof record as metadata (without the CID field)
557
-
let mut proof_metadata = proof_record
558
-
.as_object()
559
-
.cloned()
560
-
.ok_or(AttestationError::RecordMustBeObject)?;
561
-
proof_metadata.remove("cid");
562
-
563
-
let signing_record = prepare_signing_record(record, &Value::Object(proof_metadata))?;
564
-
let computed_cid = create_cid(&signing_record)?;
565
-
566
-
// Verify the CID matches
567
-
if computed_cid != attestation_cid {
568
-
return Err(AttestationError::RemoteAttestationCidMismatch {
569
-
expected: attestation_cid.to_string(),
570
-
actual: computed_cid.to_string(),
571
-
});
572
-
}
573
-
574
-
Ok(computed_cid)
575
-
}
576
-
577
-
async fn verify_inline_attestation(
578
-
record: &Value,
579
-
signature_object: &Map<String, Value>,
580
-
key_resolver: Option<&dyn KeyResolver>,
581
-
) -> Result<Cid, AttestationError> {
582
-
let key_reference = signature_object
583
-
.get("key")
584
-
.and_then(Value::as_str)
585
-
.ok_or_else(|| AttestationError::SignatureMissingField {
586
-
field: "key".to_string(),
587
-
})?;
588
-
589
-
let key_data = resolve_key_reference(key_reference, key_resolver).await?;
590
-
591
-
let signature_bytes = signature_object
592
-
.get("signature")
593
-
.and_then(Value::as_object)
594
-
.and_then(|object| object.get("$bytes"))
595
-
.and_then(Value::as_str)
596
-
.ok_or(AttestationError::SignatureBytesFormatInvalid)?;
597
-
598
-
let signature_bytes = BASE64
599
-
.decode(signature_bytes)
600
-
.map_err(|error| AttestationError::SignatureDecodingFailed { error })?;
601
-
602
-
ensure_normalized_signature(&key_data, &signature_bytes)?;
603
-
604
-
let mut sig_metadata = signature_object.clone();
605
-
sig_metadata.remove("signature");
606
-
607
-
let signing_record = prepare_signing_record(record, &Value::Object(sig_metadata))?;
608
-
let cid = create_cid(&signing_record)?;
609
-
let cid_bytes = cid.to_bytes();
610
-
611
-
validate(&key_data, &signature_bytes, &cid_bytes)
612
-
.map_err(|error| AttestationError::SignatureValidationFailed { error })?;
613
-
614
-
Ok(cid)
615
-
}
616
-
617
-
async fn resolve_key_reference(
618
-
key_reference: &str,
619
-
key_resolver: Option<&dyn KeyResolver>,
620
-
) -> Result<KeyData, AttestationError> {
621
-
if let Some(base) = key_reference.split('#').next() {
622
-
if let Ok(key_data) = identify_key(base) {
623
-
return Ok(key_data);
624
-
}
625
-
}
626
-
627
-
if let Ok(key_data) = identify_key(key_reference) {
628
-
return Ok(key_data);
629
-
}
630
-
631
-
let resolver = key_resolver.ok_or_else(|| AttestationError::KeyResolverRequired {
632
-
key: key_reference.to_string(),
633
-
})?;
634
-
635
-
resolver
636
-
.resolve(key_reference)
637
-
.await
638
-
.map_err(|error| AttestationError::KeyResolutionFailed {
639
-
key: key_reference.to_string(),
640
-
error,
641
-
})
642
-
}
643
-
644
-
fn normalize_p256(signature: Vec<u8>) -> Result<Vec<u8>, AttestationError> {
645
-
if signature.len() != 64 {
646
-
return Err(AttestationError::SignatureLengthInvalid {
647
-
expected: 64,
648
-
actual: signature.len(),
649
-
});
650
-
}
651
-
652
-
let parsed = P256Signature::from_slice(&signature).map_err(|_| {
653
-
AttestationError::SignatureLengthInvalid {
654
-
expected: 64,
655
-
actual: signature.len(),
656
-
}
657
-
})?;
658
-
659
-
let normalized = parsed.normalize_s().unwrap_or(parsed);
660
-
661
-
Ok(normalized.to_vec())
662
-
}
663
-
664
-
fn normalize_k256(signature: Vec<u8>) -> Result<Vec<u8>, AttestationError> {
665
-
if signature.len() != 64 {
666
-
return Err(AttestationError::SignatureLengthInvalid {
667
-
expected: 64,
668
-
actual: signature.len(),
669
-
});
670
-
}
671
-
672
-
let parsed = K256Signature::from_slice(&signature).map_err(|_| {
673
-
AttestationError::SignatureLengthInvalid {
674
-
expected: 64,
675
-
actual: signature.len(),
676
-
}
677
-
})?;
678
-
679
-
let normalized = parsed.normalize_s().unwrap_or(parsed);
680
-
681
-
Ok(normalized.to_vec())
682
-
}
683
-
684
-
fn ensure_normalized_signature(
685
-
key_data: &KeyData,
686
-
signature: &[u8],
687
-
) -> Result<(), AttestationError> {
688
-
match key_data.key_type() {
689
-
KeyType::P256Private | KeyType::P256Public => {
690
-
if signature.len() != 64 {
691
-
return Err(AttestationError::SignatureLengthInvalid {
692
-
expected: 64,
693
-
actual: signature.len(),
694
-
});
695
-
}
696
-
697
-
let parsed = P256Signature::from_slice(signature).map_err(|_| {
698
-
AttestationError::SignatureLengthInvalid {
699
-
expected: 64,
700
-
actual: signature.len(),
701
-
}
702
-
})?;
703
-
704
-
if bool::from(parsed.s().is_high()) {
705
-
return Err(AttestationError::SignatureNotNormalized);
706
-
}
707
-
}
708
-
KeyType::K256Private | KeyType::K256Public => {
709
-
if signature.len() != 64 {
710
-
return Err(AttestationError::SignatureLengthInvalid {
711
-
expected: 64,
712
-
actual: signature.len(),
713
-
});
714
-
}
715
-
716
-
let parsed = K256Signature::from_slice(signature).map_err(|_| {
717
-
AttestationError::SignatureLengthInvalid {
718
-
expected: 64,
719
-
actual: signature.len(),
720
-
}
721
-
})?;
722
-
723
-
if bool::from(parsed.s().is_high()) {
724
-
return Err(AttestationError::SignatureNotNormalized);
725
-
}
726
-
}
727
-
other => {
728
-
return Err(AttestationError::UnsupportedKeyType {
729
-
key_type: other.clone(),
730
-
});
731
-
}
732
-
}
733
-
734
-
Ok(())
735
-
}
736
-
737
-
fn extract_signatures_array(record: &Value) -> Result<&Vec<Value>, AttestationError> {
738
-
let signatures = record.get("signatures");
739
-
740
-
match signatures {
741
-
Some(value) => value
742
-
.as_array()
743
-
.ok_or(AttestationError::SignaturesFieldInvalid),
744
-
None => Err(AttestationError::SignaturesArrayMissing),
745
-
}
746
-
}
747
-
748
-
fn extract_signatures_vec(record: &mut Map<String, Value>) -> Result<Vec<Value>, AttestationError> {
749
-
let existing = record.remove("signatures");
750
-
751
-
match existing {
752
-
Some(Value::Array(array)) => Ok(array),
753
-
Some(_) => Err(AttestationError::SignaturesFieldInvalid),
754
-
None => Ok(Vec::new()),
755
-
}
756
-
}
757
-
758
-
#[cfg(test)]
759
-
mod tests {
760
-
use super::*;
761
-
use atproto_identity::key::{IdentityDocumentKeyResolver, KeyType, generate_key, to_public};
762
-
use atproto_identity::model::{Document, DocumentBuilder, VerificationMethod};
763
-
use atproto_identity::resolve::IdentityResolver;
764
-
use serde_json::json;
765
-
use std::sync::Arc;
766
-
767
-
struct StaticResolver {
768
-
document: Document,
769
-
}
770
-
771
-
#[async_trait::async_trait]
772
-
impl IdentityResolver for StaticResolver {
773
-
async fn resolve(&self, _subject: &str) -> anyhow::Result<Document> {
774
-
Ok(self.document.clone())
775
-
}
776
-
}
777
-
778
-
#[test]
779
-
fn prepare_signing_record_removes_signatures() -> Result<(), AttestationError> {
780
-
let record = json!({
781
-
"$type": "app.bsky.feed.post",
782
-
"text": "hello",
783
-
"signatures": [
784
-
{"$type": "example.sig", "signature": {"$bytes": "dGVzdA=="}, "key": "did:key:zabc"}
785
-
]
786
-
});
787
-
788
-
let metadata = json!({
789
-
"$type": "com.example.inlineSignature",
790
-
"key": "did:key:zabc",
791
-
"purpose": "demo",
792
-
"signature": {"$bytes": "trim"},
793
-
"cid": "bafyignored"
794
-
});
795
-
796
-
let prepared = prepare_signing_record(&record, &metadata)?;
797
-
let object = prepared.as_object().unwrap();
798
-
assert!(object.get("signatures").is_none());
799
-
assert!(object.get("sigs").is_none());
800
-
assert!(object.get("$sig").is_some());
801
-
802
-
let sig_object = object.get("$sig").unwrap().as_object().unwrap();
803
-
assert_eq!(
804
-
sig_object.get("$type").and_then(Value::as_str),
805
-
Some("com.example.inlineSignature")
806
-
);
807
-
assert_eq!(
808
-
sig_object.get("purpose").and_then(Value::as_str),
809
-
Some("demo")
810
-
);
811
-
assert!(sig_object.get("signature").is_none());
812
-
assert!(sig_object.get("cid").is_none());
813
-
814
-
Ok(())
815
-
}
816
-
817
-
#[test]
818
-
fn create_cid_produces_expected_codec_and_length() -> Result<(), AttestationError> {
819
-
let prepared = json!({
820
-
"$type": "app.example.record",
821
-
"text": "cid demo",
822
-
"$sig": {
823
-
"$type": "com.example.inlineSignature",
824
-
"key": "did:key:zabc"
825
-
}
826
-
});
827
-
828
-
let cid = create_cid(&prepared)?;
829
-
assert_eq!(cid.codec(), 0x71);
830
-
assert_eq!(cid.hash().code(), 0x12);
831
-
assert_eq!(cid.hash().digest().len(), 32);
832
-
assert_eq!(cid.to_bytes().len(), 36);
833
-
834
-
Ok(())
835
-
}
836
-
837
-
#[test]
838
-
fn create_inline_attestation_appends_signature() -> Result<(), AttestationError> {
839
-
let record = json!({
840
-
"$type": "app.example.record",
841
-
"body": "Important content"
842
-
});
843
-
844
-
let inline = json!({
845
-
"$type": "com.example.inlineSignature",
846
-
"key": "did:key:zabc",
847
-
"signature": {"$bytes": "ZHVtbXk="}
848
-
});
849
-
850
-
let updated = create_inline_attestation_reference(&record, &inline)?;
851
-
let signatures = updated
852
-
.get("signatures")
853
-
.and_then(Value::as_array)
854
-
.expect("signatures array should exist");
855
-
assert_eq!(signatures.len(), 1);
856
-
assert_eq!(
857
-
signatures[0].get("$type").and_then(Value::as_str),
858
-
Some("com.example.inlineSignature")
859
-
);
860
-
861
-
Ok(())
862
-
}
863
-
864
-
#[test]
865
-
fn create_remote_attestation_produces_reference_and_proof()
866
-
-> Result<(), Box<dyn std::error::Error>> {
867
-
let record = json!({
868
-
"$type": "app.example.record",
869
-
"body": "remote attestation"
870
-
});
871
-
872
-
let metadata = json!({
873
-
"$type": "com.example.inlineSignature"
874
-
});
875
-
876
-
let proof_record = create_remote_attestation(&record, &metadata)?;
877
-
878
-
let proof_object = proof_record
879
-
.as_object()
880
-
.expect("reference should be an object");
881
-
assert_eq!(
882
-
proof_object.get("$type").and_then(Value::as_str),
883
-
Some("com.example.inlineSignature")
884
-
);
885
-
assert!(
886
-
proof_object.get("cid").and_then(Value::as_str).is_some(),
887
-
"proof must contain a cid"
888
-
);
889
-
890
-
Ok(())
891
-
}
892
-
893
-
#[tokio::test]
894
-
async fn verify_inline_signature_with_did_key() -> Result<(), Box<dyn std::error::Error>> {
895
-
let private_key = generate_key(KeyType::K256Private)?;
896
-
let public_key = to_public(&private_key)?;
897
-
let key_reference = format!("{}", &public_key);
898
-
899
-
let base_record = json!({
900
-
"$type": "app.example.record",
901
-
"body": "Sign me"
902
-
});
903
-
904
-
let sig_metadata = json!({
905
-
"$type": "com.example.inlineSignature",
906
-
"key": key_reference,
907
-
"purpose": "unit-test"
908
-
});
909
-
910
-
let signed = create_inline_attestation(&base_record, &sig_metadata, &private_key)?;
911
-
912
-
let report = verify_signature(&signed, 0, None).await?;
913
-
match report.status {
914
-
VerificationStatus::Valid { .. } => {}
915
-
other => panic!("expected valid signature, got {:?}", other),
916
-
}
917
-
918
-
Ok(())
919
-
}
920
-
921
-
#[tokio::test]
922
-
async fn verify_inline_signature_with_resolver() -> Result<(), Box<dyn std::error::Error>> {
923
-
let private_key = generate_key(KeyType::P256Private)?;
924
-
let public_key = to_public(&private_key)?;
925
-
let key_multibase = format!("{}", &public_key);
926
-
let key_reference = "did:plc:resolvertest#atproto".to_string();
927
-
928
-
let document = DocumentBuilder::new()
929
-
.id("did:plc:resolvertest")
930
-
.add_verification_method(VerificationMethod::Multikey {
931
-
id: key_reference.clone(),
932
-
controller: "did:plc:resolvertest".to_string(),
933
-
public_key_multibase: key_multibase
934
-
.strip_prefix("did:key:")
935
-
.unwrap_or(&key_multibase)
936
-
.to_string(),
937
-
extra: std::collections::HashMap::new(),
938
-
})
939
-
.build()
940
-
.unwrap();
941
-
942
-
let identity_resolver = Arc::new(StaticResolver { document });
943
-
let key_resolver = IdentityDocumentKeyResolver::new(identity_resolver.clone());
944
-
945
-
let base_record = json!({
946
-
"$type": "app.example.record",
947
-
"body": "resolver test"
948
-
});
949
-
950
-
let sig_metadata = json!({
951
-
"$type": "com.example.inlineSignature",
952
-
"key": key_reference,
953
-
"scope": "resolver"
954
-
});
955
-
956
-
let signed = create_inline_attestation(&base_record, &sig_metadata, &private_key)?;
957
-
958
-
let report = verify_signature(&signed, 0, Some(&key_resolver)).await?;
959
-
match report.status {
960
-
VerificationStatus::Valid { .. } => {}
961
-
other => panic!("expected valid signature, got {:?}", other),
962
-
}
963
-
964
-
Ok(())
965
-
}
966
-
967
-
#[tokio::test]
968
-
async fn verify_all_signatures_reports_remote() -> Result<(), Box<dyn std::error::Error>> {
969
-
let record = json!({
970
-
"$type": "app.example.record",
971
-
"signatures": [
972
-
{
973
-
"$type": STRONG_REF_TYPE,
974
-
"cid": "bafyreid473y2gjzvzgjwdj3vpbk2bdzodf5hvbgxncjc62xmy3zsmb3pxq",
975
-
"uri": "at://did:plc:example/com.example.attestation/abc123"
976
-
}
977
-
]
978
-
});
979
-
980
-
let reports = verify_all_signatures(&record, None).await?;
981
-
assert_eq!(reports.len(), 1);
982
-
match &reports[0].status {
983
-
VerificationStatus::Unverified { reason } => {
984
-
assert!(reason.contains("Remote attestations"));
985
-
}
986
-
other => panic!("expected unverified status, got {:?}", other),
987
-
}
988
-
989
-
Ok(())
990
-
}
991
-
992
-
#[tokio::test]
993
-
async fn verify_detects_tampering() -> Result<(), Box<dyn std::error::Error>> {
994
-
let private_key = generate_key(KeyType::K256Private)?;
995
-
let public_key = to_public(&private_key)?;
996
-
let key_reference = format!("{}", &public_key);
997
-
998
-
let base_record = json!({
999
-
"$type": "app.example.record",
1000
-
"body": "original"
1001
-
});
1002
-
1003
-
let sig_metadata = json!({
1004
-
"$type": "com.example.inlineSignature",
1005
-
"key": key_reference
1006
-
});
1007
-
1008
-
let mut signed = create_inline_attestation(&base_record, &sig_metadata, &private_key)?;
1009
-
if let Some(object) = signed.as_object_mut() {
1010
-
object.insert("body".to_string(), json!("tampered"));
1011
-
}
1012
-
1013
-
let report = verify_signature(&signed, 0, None).await?;
1014
-
match report.status {
1015
-
VerificationStatus::Invalid { .. } => {}
1016
-
other => panic!("expected invalid signature, got {:?}", other),
1017
-
}
1018
-
1019
-
Ok(())
1020
-
}
1021
-
}
73
+
/// This trait is re-exported from atproto_client for convenience.
74
+
pub use atproto_client::record_resolver::RecordResolver;
+98
crates/atproto-attestation/src/signature.rs
+98
crates/atproto-attestation/src/signature.rs
···
1
+
//! ECDSA signature normalization.
2
+
//!
3
+
//! This module handles signature normalization to the low-S form required by
4
+
//! the AT Protocol attestation specification, preventing signature malleability attacks.
5
+
6
+
use crate::errors::AttestationError;
7
+
use atproto_identity::key::KeyType;
8
+
use k256::ecdsa::Signature as K256Signature;
9
+
use p256::ecdsa::Signature as P256Signature;
10
+
11
+
/// Normalize raw signature bytes to the required low-S form.
12
+
///
13
+
/// This helper ensures signatures produced by signing APIs comply with the
14
+
/// specification requirements before embedding them in attestation objects.
15
+
///
16
+
/// # Arguments
17
+
///
18
+
/// * `signature` - The raw signature bytes to normalize
19
+
/// * `key_type` - The type of key used to create the signature
20
+
///
21
+
/// # Returns
22
+
///
23
+
/// The normalized signature bytes in low-S form
24
+
///
25
+
/// # Errors
26
+
///
27
+
/// Returns an error if:
28
+
/// - The signature length is invalid for the key type
29
+
/// - The key type is not supported
30
+
pub fn normalize_signature(
31
+
signature: Vec<u8>,
32
+
key_type: &KeyType,
33
+
) -> Result<Vec<u8>, AttestationError> {
34
+
match key_type {
35
+
KeyType::P256Private | KeyType::P256Public => normalize_p256(signature),
36
+
KeyType::K256Private | KeyType::K256Public => normalize_k256(signature),
37
+
other => Err(AttestationError::UnsupportedKeyType {
38
+
key_type: (*other).clone(),
39
+
}),
40
+
}
41
+
}
42
+
43
+
/// Normalize a P-256 signature to low-S form.
44
+
fn normalize_p256(signature: Vec<u8>) -> Result<Vec<u8>, AttestationError> {
45
+
if signature.len() != 64 {
46
+
return Err(AttestationError::SignatureLengthInvalid {
47
+
expected: 64,
48
+
actual: signature.len(),
49
+
});
50
+
}
51
+
52
+
let parsed = P256Signature::from_slice(&signature).map_err(|_| {
53
+
AttestationError::SignatureLengthInvalid {
54
+
expected: 64,
55
+
actual: signature.len(),
56
+
}
57
+
})?;
58
+
59
+
let normalized = parsed.normalize_s().unwrap_or(parsed);
60
+
61
+
Ok(normalized.to_vec())
62
+
}
63
+
64
+
/// Normalize a K-256 signature to low-S form.
65
+
fn normalize_k256(signature: Vec<u8>) -> Result<Vec<u8>, AttestationError> {
66
+
if signature.len() != 64 {
67
+
return Err(AttestationError::SignatureLengthInvalid {
68
+
expected: 64,
69
+
actual: signature.len(),
70
+
});
71
+
}
72
+
73
+
let parsed = K256Signature::from_slice(&signature).map_err(|_| {
74
+
AttestationError::SignatureLengthInvalid {
75
+
expected: 64,
76
+
actual: signature.len(),
77
+
}
78
+
})?;
79
+
80
+
let normalized = parsed.normalize_s().unwrap_or(parsed);
81
+
82
+
Ok(normalized.to_vec())
83
+
}
84
+
85
+
#[cfg(test)]
86
+
mod tests {
87
+
use super::*;
88
+
89
+
#[test]
90
+
fn reject_invalid_signature_length() {
91
+
let short_signature = vec![0u8; 32];
92
+
let result = normalize_p256(short_signature);
93
+
assert!(matches!(
94
+
result,
95
+
Err(AttestationError::SignatureLengthInvalid { expected: 64, .. })
96
+
));
97
+
}
98
+
}
+22
crates/atproto-attestation/src/utils.rs
+22
crates/atproto-attestation/src/utils.rs
···
1
+
//! Utility functions and constants for attestation operations.
2
+
//!
3
+
//! This module provides common utilities used throughout the attestation framework,
4
+
//! including base64 encoding/decoding with flexible padding support.
5
+
6
+
use base64::{
7
+
alphabet::STANDARD as STANDARD_ALPHABET,
8
+
engine::{
9
+
DecodePaddingMode,
10
+
general_purpose::{GeneralPurpose, GeneralPurposeConfig},
11
+
},
12
+
};
13
+
14
+
/// Base64 engine that accepts both padded and unpadded input for maximum compatibility
15
+
/// with various AT Protocol implementations. Uses standard encoding with padding for output,
16
+
/// but accepts any padding format for decoding.
17
+
pub(crate) const BASE64: GeneralPurpose = GeneralPurpose::new(
18
+
&STANDARD_ALPHABET,
19
+
GeneralPurposeConfig::new()
20
+
.with_encode_padding(true)
21
+
.with_decode_padding_mode(DecodePaddingMode::Indifferent),
22
+
);
+160
crates/atproto-attestation/src/verification.rs
+160
crates/atproto-attestation/src/verification.rs
···
1
+
//! Signature verification for AT Protocol attestations.
2
+
//!
3
+
//! This module provides verification functions for AT Protocol record attestations.
4
+
5
+
use crate::cid::create_attestation_cid;
6
+
use crate::errors::AttestationError;
7
+
use crate::input::AnyInput;
8
+
use crate::utils::BASE64;
9
+
use atproto_identity::key::{KeyResolver, validate};
10
+
use atproto_record::lexicon::com::atproto::repo::STRONG_REF_NSID;
11
+
use base64::Engine;
12
+
use serde::Serialize;
13
+
use serde_json::{Value, Map};
14
+
use std::convert::TryInto;
15
+
16
+
/// Helper function to extract and validate signatures array from a record
17
+
fn extract_signatures(record_object: &Map<String, Value>) -> Result<Vec<Value>, AttestationError> {
18
+
match record_object.get("signatures") {
19
+
Some(value) => value
20
+
.as_array()
21
+
.ok_or(AttestationError::SignaturesFieldInvalid)
22
+
.cloned(),
23
+
None => Ok(vec![]),
24
+
}
25
+
}
26
+
27
+
/// Verify all signatures in a record with flexible input types.
28
+
///
29
+
/// This is a high-level verification function that accepts records in multiple formats
30
+
/// (String, Json, or TypedLexicon) and verifies all signatures with custom resolvers.
31
+
///
32
+
/// # Arguments
33
+
///
34
+
/// * `verify_input` - The record to verify (as AnyInput: String, Json, or TypedLexicon)
35
+
/// * `repository` - The repository DID to validate against (prevents replay attacks)
36
+
/// * `key_resolver` - Resolver for looking up verification keys from DIDs
37
+
/// * `record_resolver` - Resolver for fetching remote attestation proof records
38
+
///
39
+
/// # Returns
40
+
///
41
+
/// Returns `Ok(())` if all signatures are valid, or an error if any verification fails.
42
+
///
43
+
/// # Errors
44
+
///
45
+
/// Returns an error if:
46
+
/// - The input is not a valid record object
47
+
/// - Any signature verification fails
48
+
/// - Key or record resolution fails
49
+
///
50
+
/// # Type Parameters
51
+
///
52
+
/// * `R` - The record type (must implement Serialize + LexiconType + PartialEq + Clone)
53
+
/// * `RR` - The record resolver type (must implement RecordResolver)
54
+
/// * `KR` - The key resolver type (must implement KeyResolver)
55
+
pub async fn verify_record<R, RR, KR>(
56
+
verify_input: AnyInput<R>,
57
+
repository: &str,
58
+
key_resolver: KR,
59
+
record_resolver: RR,
60
+
) -> Result<(), AttestationError>
61
+
where
62
+
R: Serialize + Clone,
63
+
RR: atproto_client::record_resolver::RecordResolver,
64
+
KR: KeyResolver,
65
+
{
66
+
let record_object: Map<String, Value> = verify_input
67
+
.clone()
68
+
.try_into()
69
+
.map_err(|_| AttestationError::RecordMustBeObject)?;
70
+
71
+
let signatures = extract_signatures(&record_object)?;
72
+
73
+
if signatures.is_empty() {
74
+
return Ok(());
75
+
}
76
+
77
+
for signature in signatures {
78
+
let signature_refernce_type = signature
79
+
.get("$type")
80
+
.and_then(Value::as_str)
81
+
.filter(|value| !value.is_empty())
82
+
.ok_or(AttestationError::SigMetadataMissingType)?;
83
+
84
+
let metadata = if signature_refernce_type == STRONG_REF_NSID {
85
+
let aturi = signature
86
+
.get("uri")
87
+
.and_then(Value::as_str)
88
+
.filter(|value| !value.is_empty())
89
+
.ok_or(AttestationError::SignatureMissingField {
90
+
field: "uri".to_string(),
91
+
})?;
92
+
93
+
record_resolver
94
+
.resolve::<serde_json::Value>(aturi)
95
+
.await
96
+
.map_err(|error| AttestationError::RemoteAttestationFetchFailed {
97
+
uri: aturi.to_string(),
98
+
error,
99
+
})?
100
+
} else {
101
+
signature.clone()
102
+
};
103
+
104
+
let computed_cid = create_attestation_cid(
105
+
verify_input.clone(),
106
+
AnyInput::Serialize(metadata.clone()),
107
+
repository,
108
+
)?;
109
+
110
+
if signature_refernce_type == STRONG_REF_NSID {
111
+
let attestation_cid = metadata
112
+
.get("cid")
113
+
.and_then(Value::as_str)
114
+
.filter(|value| !value.is_empty())
115
+
.ok_or(AttestationError::SignatureMissingField {
116
+
field: "cid".to_string(),
117
+
})?;
118
+
119
+
if computed_cid.to_string() != attestation_cid {
120
+
return Err(AttestationError::RemoteAttestationCidMismatch {
121
+
expected: attestation_cid.to_string(),
122
+
actual: computed_cid.to_string(),
123
+
});
124
+
}
125
+
continue;
126
+
}
127
+
128
+
let key = metadata
129
+
.get("key")
130
+
.and_then(Value::as_str)
131
+
.filter(|value| !value.is_empty())
132
+
.ok_or(AttestationError::SignatureMissingField {
133
+
field: "key".to_string(),
134
+
})?;
135
+
let key_data = key_resolver.resolve(key).await.map_err(|error| {
136
+
AttestationError::KeyResolutionFailed {
137
+
key: key.to_string(),
138
+
error,
139
+
}
140
+
})?;
141
+
142
+
let signature_bytes = metadata
143
+
.get("signature")
144
+
.and_then(Value::as_object)
145
+
.and_then(|object| object.get("$bytes"))
146
+
.and_then(Value::as_str)
147
+
.ok_or(AttestationError::SignatureBytesFormatInvalid)?;
148
+
149
+
let signature_bytes = BASE64
150
+
.decode(signature_bytes)
151
+
.map_err(|error| AttestationError::SignatureDecodingFailed { error })?;
152
+
153
+
let computed_cid_bytes = computed_cid.to_bytes();
154
+
155
+
validate(&key_data, &signature_bytes, &computed_cid_bytes)
156
+
.map_err(|error| AttestationError::SignatureValidationFailed { error })?;
157
+
}
158
+
159
+
Ok(())
160
+
}
+6
crates/atproto-client/Cargo.toml
+6
crates/atproto-client/Cargo.toml
+165
crates/atproto-client/src/bin/atproto-client-put-record.rs
+165
crates/atproto-client/src/bin/atproto-client-put-record.rs
···
1
+
//! AT Protocol client tool for writing records to a repository.
2
+
//!
3
+
//! This binary tool creates or updates records in an AT Protocol repository
4
+
//! using app password authentication. It resolves the subject to a DID,
5
+
//! creates a session, and writes the record using the putRecord XRPC method.
6
+
//!
7
+
//! # Usage
8
+
//!
9
+
//! ```text
10
+
//! ATPROTO_PASSWORD=<password> atproto-client-put-record <subject> <record_key> <record_json>
11
+
//! ```
12
+
//!
13
+
//! # Environment Variables
14
+
//!
15
+
//! - `ATPROTO_PASSWORD` - Required. App password for authentication.
16
+
//! - `CERTIFICATE_BUNDLES` - Custom CA certificate bundles.
17
+
//! - `USER_AGENT` - Custom user agent string.
18
+
//! - `DNS_NAMESERVERS` - Custom DNS nameservers.
19
+
//! - `PLC_HOSTNAME` - Override PLC hostname (default: plc.directory).
20
+
21
+
use anyhow::Result;
22
+
use atproto_client::{
23
+
client::{AppPasswordAuth, Auth},
24
+
com::atproto::{
25
+
repo::{put_record, PutRecordRequest, PutRecordResponse},
26
+
server::create_session,
27
+
},
28
+
errors::CliError,
29
+
};
30
+
use atproto_identity::{
31
+
config::{CertificateBundles, DnsNameservers, default_env, optional_env, version},
32
+
plc,
33
+
resolve::{HickoryDnsResolver, resolve_subject},
34
+
web,
35
+
};
36
+
use std::env;
37
+
38
+
fn print_usage() {
39
+
eprintln!("Usage: atproto-client-put-record <subject> <record_key> <record_json>");
40
+
eprintln!();
41
+
eprintln!("Arguments:");
42
+
eprintln!(" <subject> Handle or DID of the repository owner");
43
+
eprintln!(" <record_key> Record key (rkey) for the record");
44
+
eprintln!(" <record_json> JSON record data (must include $type field)");
45
+
eprintln!();
46
+
eprintln!("Environment Variables:");
47
+
eprintln!(" ATPROTO_PASSWORD Required. App password for authentication.");
48
+
eprintln!(" CERTIFICATE_BUNDLES Custom CA certificate bundles.");
49
+
eprintln!(" USER_AGENT Custom user agent string.");
50
+
eprintln!(" DNS_NAMESERVERS Custom DNS nameservers.");
51
+
eprintln!(" PLC_HOSTNAME Override PLC hostname (default: plc.directory).");
52
+
}
53
+
54
+
#[tokio::main]
55
+
async fn main() -> Result<()> {
56
+
let args: Vec<String> = env::args().collect();
57
+
58
+
if args.len() != 4 {
59
+
print_usage();
60
+
std::process::exit(1);
61
+
}
62
+
63
+
let subject = &args[1];
64
+
let record_key = &args[2];
65
+
let record_json = &args[3];
66
+
67
+
// Get password from environment variable
68
+
let password = env::var("ATPROTO_PASSWORD").map_err(|_| {
69
+
anyhow::anyhow!("ATPROTO_PASSWORD environment variable is required")
70
+
})?;
71
+
72
+
// Set up HTTP client configuration
73
+
let certificate_bundles: CertificateBundles = optional_env("CERTIFICATE_BUNDLES").try_into()?;
74
+
let default_user_agent = format!(
75
+
"atproto-identity-rs ({}; +https://tangled.sh/@smokesignal.events/atproto-identity-rs)",
76
+
version()?
77
+
);
78
+
let user_agent = default_env("USER_AGENT", &default_user_agent);
79
+
let dns_nameservers: DnsNameservers = optional_env("DNS_NAMESERVERS").try_into()?;
80
+
let plc_hostname = default_env("PLC_HOSTNAME", "plc.directory");
81
+
82
+
let mut client_builder = reqwest::Client::builder();
83
+
for ca_certificate in certificate_bundles.as_ref() {
84
+
let cert = std::fs::read(ca_certificate)?;
85
+
let cert = reqwest::Certificate::from_pem(&cert)?;
86
+
client_builder = client_builder.add_root_certificate(cert);
87
+
}
88
+
89
+
client_builder = client_builder.user_agent(user_agent);
90
+
let http_client = client_builder.build()?;
91
+
92
+
let dns_resolver = HickoryDnsResolver::create_resolver(dns_nameservers.as_ref());
93
+
94
+
// Parse the record JSON
95
+
let record: serde_json::Value = serde_json::from_str(record_json).map_err(|err| {
96
+
tracing::error!(error = ?err, "Failed to parse record JSON");
97
+
anyhow::anyhow!("Failed to parse record JSON: {}", err)
98
+
})?;
99
+
100
+
// Extract collection from $type field
101
+
let collection = record
102
+
.get("$type")
103
+
.and_then(|v| v.as_str())
104
+
.ok_or_else(|| anyhow::anyhow!("Record must contain a $type field for the collection"))?
105
+
.to_string();
106
+
107
+
// Resolve subject to DID
108
+
let did = resolve_subject(&http_client, &dns_resolver, subject).await?;
109
+
110
+
// Get DID document to find PDS endpoint
111
+
let document = if did.starts_with("did:plc:") {
112
+
plc::query(&http_client, &plc_hostname, &did).await?
113
+
} else if did.starts_with("did:web:") {
114
+
web::query(&http_client, &did).await?
115
+
} else {
116
+
anyhow::bail!("Unsupported DID method: {}", did);
117
+
};
118
+
119
+
// Get PDS endpoint from the DID document
120
+
let pds_endpoints = document.pds_endpoints();
121
+
let pds_endpoint = pds_endpoints
122
+
.first()
123
+
.ok_or_else(|| CliError::NoPdsEndpointFound { did: did.clone() })?;
124
+
125
+
// Create session
126
+
let session = create_session(&http_client, pds_endpoint, &did, &password, None).await?;
127
+
128
+
// Set up app password authentication
129
+
let auth = Auth::AppPassword(AppPasswordAuth {
130
+
access_token: session.access_jwt.clone(),
131
+
});
132
+
133
+
// Create put record request
134
+
let put_request = PutRecordRequest {
135
+
repo: session.did.clone(),
136
+
collection,
137
+
record_key: record_key.clone(),
138
+
validate: true,
139
+
record,
140
+
swap_commit: None,
141
+
swap_record: None,
142
+
};
143
+
144
+
// Execute put record
145
+
let response = put_record(&http_client, &auth, pds_endpoint, put_request).await?;
146
+
147
+
match response {
148
+
PutRecordResponse::StrongRef { uri, cid, .. } => {
149
+
println!(
150
+
"{}",
151
+
serde_json::to_string_pretty(&serde_json::json!({
152
+
"uri": uri,
153
+
"cid": cid
154
+
}))?
155
+
);
156
+
}
157
+
PutRecordResponse::Error(err) => {
158
+
let error_message = err.error_message();
159
+
tracing::error!(error = %error_message, "putRecord failed");
160
+
anyhow::bail!("putRecord failed: {}", error_message);
161
+
}
162
+
}
163
+
164
+
Ok(())
165
+
}
+31
-5
crates/atproto-client/src/record_resolver.rs
+31
-5
crates/atproto-client/src/record_resolver.rs
···
1
1
//! Helpers for resolving AT Protocol records referenced by URI.
2
2
3
3
use std::str::FromStr;
4
+
use std::sync::Arc;
4
5
5
6
use anyhow::{Result, anyhow, bail};
6
7
use async_trait::async_trait;
8
+
use atproto_identity::traits::IdentityResolver;
7
9
use atproto_record::aturi::ATURI;
8
10
9
11
use crate::{
···
24
26
}
25
27
26
28
/// Resolver that fetches records using public XRPC endpoints.
29
+
///
30
+
/// Uses an identity resolver to dynamically determine the PDS endpoint for each record.
27
31
#[derive(Clone)]
28
32
pub struct HttpRecordResolver {
29
33
http_client: reqwest::Client,
30
-
base_url: String,
34
+
identity_resolver: Arc<dyn IdentityResolver>,
31
35
}
32
36
33
37
impl HttpRecordResolver {
34
-
/// Create a new resolver using the provided HTTP client and PDS base URL.
35
-
pub fn new(http_client: reqwest::Client, base_url: impl Into<String>) -> Self {
38
+
/// Create a new resolver using the provided HTTP client and identity resolver.
39
+
///
40
+
/// The identity resolver is used to dynamically determine the PDS endpoint for each record
41
+
/// based on the authority (DID or handle) in the AT URI.
42
+
pub fn new(
43
+
http_client: reqwest::Client,
44
+
identity_resolver: Arc<dyn IdentityResolver>,
45
+
) -> Self {
36
46
Self {
37
47
http_client,
38
-
base_url: base_url.into(),
48
+
identity_resolver,
39
49
}
40
50
}
41
51
}
···
47
57
T: serde::de::DeserializeOwned + Send,
48
58
{
49
59
let parsed = ATURI::from_str(aturi).map_err(|error| anyhow!(error))?;
60
+
61
+
// Resolve the authority (DID or handle) to get the DID document
62
+
let document = self
63
+
.identity_resolver
64
+
.resolve(&parsed.authority)
65
+
.await
66
+
.map_err(|error| {
67
+
anyhow!("Failed to resolve identity for {}: {}", parsed.authority, error)
68
+
})?;
69
+
70
+
// Extract PDS endpoint from the DID document
71
+
let pds_endpoints = document.pds_endpoints();
72
+
let base_url = pds_endpoints
73
+
.first()
74
+
.ok_or_else(|| anyhow!("No PDS endpoint found for {}", parsed.authority))?;
75
+
50
76
let auth = Auth::None;
51
77
52
78
let response = get_record(
53
79
&self.http_client,
54
80
&auth,
55
-
&self.base_url,
81
+
base_url,
56
82
&parsed.authority,
57
83
&parsed.collection,
58
84
&parsed.record_key,
+43
crates/atproto-extras/Cargo.toml
+43
crates/atproto-extras/Cargo.toml
···
1
+
[package]
2
+
name = "atproto-extras"
3
+
version = "0.13.0"
4
+
description = "AT Protocol extras - facet parsing and rich text utilities"
5
+
readme = "README.md"
6
+
homepage = "https://tangled.sh/@smokesignal.events/atproto-identity-rs"
7
+
documentation = "https://docs.rs/atproto-extras"
8
+
9
+
edition.workspace = true
10
+
rust-version.workspace = true
11
+
authors.workspace = true
12
+
repository.workspace = true
13
+
license.workspace = true
14
+
keywords.workspace = true
15
+
categories.workspace = true
16
+
17
+
[dependencies]
18
+
atproto-identity.workspace = true
19
+
atproto-record.workspace = true
20
+
21
+
anyhow.workspace = true
22
+
async-trait.workspace = true
23
+
clap = { workspace = true, optional = true }
24
+
regex.workspace = true
25
+
reqwest = { workspace = true, optional = true }
26
+
serde_json = { workspace = true, optional = true }
27
+
tokio = { workspace = true, optional = true }
28
+
29
+
[dev-dependencies]
30
+
tokio = { workspace = true, features = ["macros", "rt"] }
31
+
32
+
[features]
33
+
default = ["hickory-dns"]
34
+
hickory-dns = ["atproto-identity/hickory-dns"]
35
+
clap = ["dep:clap"]
36
+
cli = ["dep:clap", "dep:serde_json", "dep:tokio", "dep:reqwest"]
37
+
38
+
[[bin]]
39
+
name = "atproto-extras-parse-facets"
40
+
required-features = ["clap", "cli", "hickory-dns"]
41
+
42
+
[lints]
43
+
workspace = true
+128
crates/atproto-extras/README.md
+128
crates/atproto-extras/README.md
···
1
+
# atproto-extras
2
+
3
+
Extra utilities for AT Protocol applications, including rich text facet parsing.
4
+
5
+
## Features
6
+
7
+
- **Facet Parsing**: Extract mentions (`@handle`), URLs, and hashtags (`#tag`) from plain text with correct UTF-8 byte offset calculation
8
+
- **Identity Integration**: Resolve mention handles to DIDs during parsing
9
+
10
+
## Installation
11
+
12
+
Add to your `Cargo.toml`:
13
+
14
+
```toml
15
+
[dependencies]
16
+
atproto-extras = "0.13"
17
+
```
18
+
19
+
## Usage
20
+
21
+
### Parsing Text for Facets
22
+
23
+
```rust
24
+
use atproto_extras::{parse_urls, parse_tags};
25
+
use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
26
+
27
+
let text = "Check out https://example.com #rust";
28
+
29
+
// Parse URLs and tags - returns Vec<Facet> directly
30
+
let url_facets = parse_urls(text);
31
+
let tag_facets = parse_tags(text);
32
+
33
+
// Each facet includes byte positions and typed features
34
+
for facet in url_facets {
35
+
if let Some(FacetFeature::Link(link)) = facet.features.first() {
36
+
println!("URL at bytes {}..{}: {}",
37
+
facet.index.byte_start, facet.index.byte_end, link.uri);
38
+
}
39
+
}
40
+
41
+
for facet in tag_facets {
42
+
if let Some(FacetFeature::Tag(tag)) = facet.features.first() {
43
+
println!("Tag at bytes {}..{}: #{}",
44
+
facet.index.byte_start, facet.index.byte_end, tag.tag);
45
+
}
46
+
}
47
+
```
48
+
49
+
### Parsing Mentions
50
+
51
+
Mention parsing requires an `IdentityResolver` to convert handles to DIDs:
52
+
53
+
```rust
54
+
use atproto_extras::{parse_mentions, FacetLimits};
55
+
use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
56
+
57
+
let text = "Hello @alice.bsky.social!";
58
+
let limits = FacetLimits::default();
59
+
60
+
// Requires an async context and IdentityResolver
61
+
let facets = parse_mentions(text, &resolver, &limits).await;
62
+
63
+
for facet in facets {
64
+
if let Some(FacetFeature::Mention(mention)) = facet.features.first() {
65
+
println!("Mention at bytes {}..{} resolved to {}",
66
+
facet.index.byte_start, facet.index.byte_end, mention.did);
67
+
}
68
+
}
69
+
```
70
+
71
+
Mentions that cannot be resolved to a valid DID are automatically skipped. Mentions appearing within URLs are also excluded.
72
+
73
+
### Creating AT Protocol Facets
74
+
75
+
```rust
76
+
use atproto_extras::{parse_facets_from_text, FacetLimits};
77
+
78
+
let text = "Hello @alice.bsky.social! Check https://rust-lang.org #rust";
79
+
let limits = FacetLimits::default();
80
+
81
+
// Requires an async context and IdentityResolver
82
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
83
+
84
+
if let Some(facets) = facets {
85
+
for facet in &facets {
86
+
println!("Facet at {}..{}", facet.index.byte_start, facet.index.byte_end);
87
+
}
88
+
}
89
+
```
90
+
91
+
## Byte Offset Handling
92
+
93
+
AT Protocol facets use UTF-8 byte offsets, not character indices. This is critical for correct handling of multi-byte characters like emojis or non-ASCII text.
94
+
95
+
```rust
96
+
use atproto_extras::parse_urls;
97
+
98
+
// Text with emojis (multi-byte UTF-8 characters)
99
+
let text = "โจ Check https://example.com โจ";
100
+
101
+
let facets = parse_urls(text);
102
+
// Byte positions correctly account for the 4-byte emoji
103
+
assert_eq!(facets[0].index.byte_start, 11); // After "โจ Check " (4 + 1 + 6 = 11 bytes)
104
+
```
105
+
106
+
## Facet Limits
107
+
108
+
Use `FacetLimits` to control the maximum number of facets processed:
109
+
110
+
```rust
111
+
use atproto_extras::FacetLimits;
112
+
113
+
// Default limits
114
+
let limits = FacetLimits::default();
115
+
// mentions_max: 5, tags_max: 5, links_max: 5, max: 10
116
+
117
+
// Custom limits
118
+
let custom = FacetLimits {
119
+
mentions_max: 10,
120
+
tags_max: 10,
121
+
links_max: 10,
122
+
max: 20,
123
+
};
124
+
```
125
+
126
+
## License
127
+
128
+
MIT
+176
crates/atproto-extras/src/bin/atproto-extras-parse-facets.rs
+176
crates/atproto-extras/src/bin/atproto-extras-parse-facets.rs
···
1
+
//! Command-line tool for generating AT Protocol facet arrays from text.
2
+
//!
3
+
//! This tool parses a string and outputs the facet array in JSON format.
4
+
//! Facets include mentions (@handle), URLs (https://...), and hashtags (#tag).
5
+
//!
6
+
//! By default, mentions are detected but output with placeholder DIDs. Use
7
+
//! `--resolve-mentions` to resolve handles to actual DIDs (requires network access).
8
+
//!
9
+
//! # Usage
10
+
//!
11
+
//! ```bash
12
+
//! # Parse facets without resolving mentions
13
+
//! cargo run --features clap,serde_json,tokio,hickory-dns --bin atproto-extras-parse-facets -- "Check out https://example.com and #rust"
14
+
//!
15
+
//! # Resolve mentions to DIDs
16
+
//! cargo run --features clap,serde_json,tokio,hickory-dns --bin atproto-extras-parse-facets -- --resolve-mentions "Hello @bsky.app!"
17
+
//! ```
18
+
19
+
use atproto_extras::{FacetLimits, parse_mentions, parse_tags, parse_urls};
20
+
use atproto_identity::resolve::{HickoryDnsResolver, InnerIdentityResolver};
21
+
use atproto_record::lexicon::app::bsky::richtext::facet::{
22
+
ByteSlice, Facet, FacetFeature, Mention,
23
+
};
24
+
use clap::Parser;
25
+
use regex::bytes::Regex;
26
+
use std::sync::Arc;
27
+
28
+
/// Parse text and output AT Protocol facets as JSON.
29
+
#[derive(Parser)]
30
+
#[command(
31
+
name = "atproto-extras-parse-facets",
32
+
version,
33
+
about = "Parse text and output AT Protocol facets as JSON",
34
+
long_about = "This tool parses a string for mentions, URLs, and hashtags,\n\
35
+
then outputs the corresponding AT Protocol facet array in JSON format.\n\n\
36
+
By default, mentions are detected but output with placeholder DIDs.\n\
37
+
Use --resolve-mentions to resolve handles to actual DIDs (requires network)."
38
+
)]
39
+
struct Args {
40
+
/// The text to parse for facets
41
+
text: String,
42
+
43
+
/// Resolve mention handles to DIDs (requires network access)
44
+
#[arg(long)]
45
+
resolve_mentions: bool,
46
+
47
+
/// Show debug information on stderr
48
+
#[arg(long, short = 'd')]
49
+
debug: bool,
50
+
}
51
+
52
+
/// Parse mention spans from text without resolution (returns placeholder DIDs).
53
+
fn parse_mention_spans(text: &str) -> Vec<Facet> {
54
+
let mut facets = Vec::new();
55
+
56
+
// Get URL ranges to exclude mentions within URLs
57
+
let url_facets = parse_urls(text);
58
+
59
+
// Same regex pattern as parse_mentions
60
+
let mention_regex = Regex::new(
61
+
r"(?:^|[^\w])(@([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)",
62
+
)
63
+
.expect("Invalid mention regex");
64
+
65
+
let text_bytes = text.as_bytes();
66
+
67
+
for capture in mention_regex.captures_iter(text_bytes) {
68
+
if let Some(mention_match) = capture.get(1) {
69
+
let start = mention_match.start();
70
+
let end = mention_match.end();
71
+
72
+
// Check if this mention overlaps with any URL
73
+
let overlaps_url = url_facets.iter().any(|facet| {
74
+
(start >= facet.index.byte_start && start < facet.index.byte_end)
75
+
|| (end > facet.index.byte_start && end <= facet.index.byte_end)
76
+
});
77
+
78
+
if !overlaps_url {
79
+
let handle = std::str::from_utf8(&mention_match.as_bytes()[1..])
80
+
.unwrap_or_default()
81
+
.to_string();
82
+
83
+
facets.push(Facet {
84
+
index: ByteSlice {
85
+
byte_start: start,
86
+
byte_end: end,
87
+
},
88
+
features: vec![FacetFeature::Mention(Mention {
89
+
did: format!("did:plc:<unresolved:{}>", handle),
90
+
})],
91
+
});
92
+
}
93
+
}
94
+
}
95
+
96
+
facets
97
+
}
98
+
99
+
#[tokio::main]
100
+
async fn main() {
101
+
let args = Args::parse();
102
+
let text = &args.text;
103
+
let mut facets: Vec<Facet> = Vec::new();
104
+
let limits = FacetLimits::default();
105
+
106
+
// Parse mentions (either resolved or with placeholders)
107
+
if args.resolve_mentions {
108
+
let http_client = reqwest::Client::new();
109
+
let dns_resolver = HickoryDnsResolver::create_resolver(&[]);
110
+
let resolver = InnerIdentityResolver {
111
+
http_client,
112
+
dns_resolver: Arc::new(dns_resolver),
113
+
plc_hostname: "plc.directory".to_string(),
114
+
};
115
+
let mention_facets = parse_mentions(text, &resolver, &limits).await;
116
+
facets.extend(mention_facets);
117
+
} else {
118
+
let mention_facets = parse_mention_spans(text);
119
+
facets.extend(mention_facets);
120
+
}
121
+
122
+
// Parse URLs
123
+
let url_facets = parse_urls(text);
124
+
facets.extend(url_facets);
125
+
126
+
// Parse hashtags
127
+
let tag_facets = parse_tags(text);
128
+
facets.extend(tag_facets);
129
+
130
+
// Sort facets by byte_start for consistent output
131
+
facets.sort_by_key(|f| f.index.byte_start);
132
+
133
+
// Output as JSON
134
+
if facets.is_empty() {
135
+
println!("null");
136
+
} else {
137
+
match serde_json::to_string_pretty(&facets) {
138
+
Ok(json) => println!("{}", json),
139
+
Err(e) => {
140
+
eprintln!(
141
+
"error-atproto-extras-parse-facets-1 Error serializing facets: {}",
142
+
e
143
+
);
144
+
std::process::exit(1);
145
+
}
146
+
}
147
+
}
148
+
149
+
// Show debug info if requested
150
+
if args.debug {
151
+
eprintln!();
152
+
eprintln!("--- Debug Info ---");
153
+
eprintln!("Input text: {:?}", text);
154
+
eprintln!("Text length: {} bytes", text.len());
155
+
eprintln!("Facets found: {}", facets.len());
156
+
eprintln!("Mentions resolved: {}", args.resolve_mentions);
157
+
158
+
// Show byte slice verification
159
+
let text_bytes = text.as_bytes();
160
+
for (i, facet) in facets.iter().enumerate() {
161
+
let start = facet.index.byte_start;
162
+
let end = facet.index.byte_end;
163
+
let slice_text =
164
+
std::str::from_utf8(&text_bytes[start..end]).unwrap_or("<invalid utf8>");
165
+
let feature_type = match &facet.features[0] {
166
+
FacetFeature::Mention(_) => "mention",
167
+
FacetFeature::Link(_) => "link",
168
+
FacetFeature::Tag(_) => "tag",
169
+
};
170
+
eprintln!(
171
+
" [{}] {} @ bytes {}..{}: {:?}",
172
+
i, feature_type, start, end, slice_text
173
+
);
174
+
}
175
+
}
176
+
}
+942
crates/atproto-extras/src/facets.rs
+942
crates/atproto-extras/src/facets.rs
···
1
+
//! Rich text facet parsing for AT Protocol.
2
+
//!
3
+
//! This module provides functionality for extracting semantic annotations (facets)
4
+
//! from plain text. Facets include mentions, links (URLs), and hashtags.
5
+
//!
6
+
//! # Overview
7
+
//!
8
+
//! AT Protocol rich text uses "facets" to annotate specific byte ranges within text with
9
+
//! semantic meaning. This module handles:
10
+
//!
11
+
//! - **Parsing**: Extract mentions, URLs, and hashtags from plain text
12
+
//! - **Facet Creation**: Build proper AT Protocol facet structures with resolved DIDs
13
+
//!
14
+
//! # Byte Offset Calculation
15
+
//!
16
+
//! This implementation correctly uses UTF-8 byte offsets as required by AT Protocol.
17
+
//! The facets use "inclusive start and exclusive end" byte ranges. All parsing is done
18
+
//! using `regex::bytes::Regex` which operates on byte slices and returns byte positions,
19
+
//! ensuring correct handling of multi-byte UTF-8 characters (emojis, CJK, accented chars).
20
+
//!
21
+
//! # Example
22
+
//!
23
+
//! ```ignore
24
+
//! use atproto_extras::facets::{parse_urls, parse_tags, FacetLimits};
25
+
//! use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
26
+
//!
27
+
//! let text = "Check out https://example.com #rust";
28
+
//!
29
+
//! // Parse URLs and tags as Facet objects
30
+
//! let url_facets = parse_urls(text);
31
+
//! let tag_facets = parse_tags(text);
32
+
//!
33
+
//! // Access facet data directly
34
+
//! for facet in url_facets {
35
+
//! if let Some(FacetFeature::Link(link)) = facet.features.first() {
36
+
//! println!("URL at bytes {}..{}: {}",
37
+
//! facet.index.byte_start, facet.index.byte_end, link.uri);
38
+
//! }
39
+
//! }
40
+
//! ```
41
+
42
+
use atproto_identity::resolve::IdentityResolver;
43
+
use atproto_record::lexicon::app::bsky::richtext::facet::{
44
+
ByteSlice, Facet, FacetFeature, Link, Mention, Tag,
45
+
};
46
+
use regex::bytes::Regex;
47
+
48
+
/// Configuration for facet parsing limits.
49
+
///
50
+
/// These limits protect against abuse by capping the number of facets
51
+
/// that will be processed. This is important for both performance and
52
+
/// security when handling user-generated content.
53
+
///
54
+
/// # Example
55
+
///
56
+
/// ```
57
+
/// use atproto_extras::FacetLimits;
58
+
///
59
+
/// // Use defaults
60
+
/// let limits = FacetLimits::default();
61
+
///
62
+
/// // Or customize
63
+
/// let custom = FacetLimits {
64
+
/// mentions_max: 10,
65
+
/// tags_max: 10,
66
+
/// links_max: 10,
67
+
/// max: 20,
68
+
/// };
69
+
/// ```
70
+
#[derive(Debug, Clone, Copy)]
71
+
pub struct FacetLimits {
72
+
/// Maximum number of mention facets to process (default: 5)
73
+
pub mentions_max: usize,
74
+
/// Maximum number of tag facets to process (default: 5)
75
+
pub tags_max: usize,
76
+
/// Maximum number of link facets to process (default: 5)
77
+
pub links_max: usize,
78
+
/// Maximum total number of facets to process (default: 10)
79
+
pub max: usize,
80
+
}
81
+
82
+
impl Default for FacetLimits {
83
+
fn default() -> Self {
84
+
Self {
85
+
mentions_max: 5,
86
+
tags_max: 5,
87
+
links_max: 5,
88
+
max: 10,
89
+
}
90
+
}
91
+
}
92
+
93
+
/// Parse mentions from text and return them as Facet objects with resolved DIDs.
94
+
///
95
+
/// This function extracts AT Protocol handle mentions (e.g., `@alice.bsky.social`)
96
+
/// from text, resolves each handle to a DID using the provided identity resolver,
97
+
/// and returns AT Protocol Facet objects with Mention features.
98
+
///
99
+
/// Mentions that cannot be resolved to a valid DID are skipped. Mentions that
100
+
/// appear within URLs are also excluded to avoid false positives.
101
+
///
102
+
/// # Arguments
103
+
///
104
+
/// * `text` - The text to parse for mentions
105
+
/// * `identity_resolver` - Resolver for converting handles to DIDs
106
+
/// * `limits` - Configuration for maximum mentions to process
107
+
///
108
+
/// # Returns
109
+
///
110
+
/// A vector of Facet objects for successfully resolved mentions.
111
+
///
112
+
/// # Example
113
+
///
114
+
/// ```ignore
115
+
/// use atproto_extras::{parse_mentions, FacetLimits};
116
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
117
+
///
118
+
/// let text = "Hello @alice.bsky.social!";
119
+
/// let limits = FacetLimits::default();
120
+
///
121
+
/// // Requires an async context and identity resolver
122
+
/// let facets = parse_mentions(text, &resolver, &limits).await;
123
+
///
124
+
/// for facet in facets {
125
+
/// if let Some(FacetFeature::Mention(mention)) = facet.features.first() {
126
+
/// println!("Mention {} resolved to {}",
127
+
/// &text[facet.index.byte_start..facet.index.byte_end],
128
+
/// mention.did);
129
+
/// }
130
+
/// }
131
+
/// ```
132
+
pub async fn parse_mentions(
133
+
text: &str,
134
+
identity_resolver: &dyn IdentityResolver,
135
+
limits: &FacetLimits,
136
+
) -> Vec<Facet> {
137
+
let mut facets = Vec::new();
138
+
139
+
// First, parse all URLs to exclude mention matches within them
140
+
let url_facets = parse_urls(text);
141
+
142
+
// Regex based on: https://atproto.com/specs/handle#handle-identifier-syntax
143
+
// Pattern: [$|\W](@([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)
144
+
let mention_regex = Regex::new(
145
+
r"(?:^|[^\w])(@([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)",
146
+
)
147
+
.unwrap();
148
+
149
+
let text_bytes = text.as_bytes();
150
+
let mut mention_count = 0;
151
+
152
+
for capture in mention_regex.captures_iter(text_bytes) {
153
+
if mention_count >= limits.mentions_max {
154
+
break;
155
+
}
156
+
157
+
if let Some(mention_match) = capture.get(1) {
158
+
let start = mention_match.start();
159
+
let end = mention_match.end();
160
+
161
+
// Check if this mention overlaps with any URL
162
+
let overlaps_url = url_facets.iter().any(|facet| {
163
+
// Check if mention is within or overlaps the URL span
164
+
(start >= facet.index.byte_start && start < facet.index.byte_end)
165
+
|| (end > facet.index.byte_start && end <= facet.index.byte_end)
166
+
});
167
+
168
+
// Only process the mention if it doesn't overlap with a URL
169
+
if !overlaps_url {
170
+
let handle = std::str::from_utf8(&mention_match.as_bytes()[1..])
171
+
.unwrap_or_default()
172
+
.to_string();
173
+
174
+
// Try to resolve the handle to a DID
175
+
// First try with at:// prefix, then without
176
+
let at_uri = format!("at://{}", handle);
177
+
let did_result = match identity_resolver.resolve(&at_uri).await {
178
+
Ok(doc) => Ok(doc),
179
+
Err(_) => identity_resolver.resolve(&handle).await,
180
+
};
181
+
182
+
// Only add the mention facet if we successfully resolved the DID
183
+
if let Ok(did_doc) = did_result {
184
+
facets.push(Facet {
185
+
index: ByteSlice {
186
+
byte_start: start,
187
+
byte_end: end,
188
+
},
189
+
features: vec![FacetFeature::Mention(Mention {
190
+
did: did_doc.id.to_string(),
191
+
})],
192
+
});
193
+
mention_count += 1;
194
+
}
195
+
}
196
+
}
197
+
}
198
+
199
+
facets
200
+
}
201
+
202
+
/// Parse URLs from text and return them as Facet objects.
203
+
///
204
+
/// This function extracts HTTP and HTTPS URLs from text with correct
205
+
/// byte position tracking for UTF-8 text, returning AT Protocol Facet objects
206
+
/// with Link features.
207
+
///
208
+
/// # Supported URL Patterns
209
+
///
210
+
/// - HTTP URLs: `http://example.com`
211
+
/// - HTTPS URLs: `https://example.com`
212
+
/// - URLs with paths, query strings, and fragments
213
+
/// - URLs with subdomains: `https://www.example.com`
214
+
///
215
+
/// # Example
216
+
///
217
+
/// ```
218
+
/// use atproto_extras::parse_urls;
219
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
220
+
///
221
+
/// let text = "Visit https://example.com/path?query=1 for more info";
222
+
/// let facets = parse_urls(text);
223
+
///
224
+
/// assert_eq!(facets.len(), 1);
225
+
/// assert_eq!(facets[0].index.byte_start, 6);
226
+
/// assert_eq!(facets[0].index.byte_end, 38);
227
+
/// if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
228
+
/// assert_eq!(link.uri, "https://example.com/path?query=1");
229
+
/// }
230
+
/// ```
231
+
///
232
+
/// # Multi-byte Character Handling
233
+
///
234
+
/// Byte positions are correctly calculated even with emojis and other
235
+
/// multi-byte UTF-8 characters:
236
+
///
237
+
/// ```
238
+
/// use atproto_extras::parse_urls;
239
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
240
+
///
241
+
/// let text = "Check out https://example.com now!";
242
+
/// let facets = parse_urls(text);
243
+
/// let text_bytes = text.as_bytes();
244
+
///
245
+
/// // The byte slice matches the URL
246
+
/// let url_bytes = &text_bytes[facets[0].index.byte_start..facets[0].index.byte_end];
247
+
/// assert_eq!(std::str::from_utf8(url_bytes).unwrap(), "https://example.com");
248
+
/// ```
249
+
pub fn parse_urls(text: &str) -> Vec<Facet> {
250
+
let mut facets = Vec::new();
251
+
252
+
// Partial/naive URL regex based on: https://stackoverflow.com/a/3809435
253
+
// Pattern: [$|\W](https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]+\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*[-a-zA-Z0-9@%_\+~#//=])?)
254
+
// Modified to use + instead of {1,6} to support longer TLDs and multi-level subdomains
255
+
let url_regex = Regex::new(
256
+
r"(?:^|[^\w])(https?://(?:www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]+\b(?:[-a-zA-Z0-9()@:%_\+.~#?&//=]*[-a-zA-Z0-9@%_\+~#//=])?)"
257
+
).unwrap();
258
+
259
+
let text_bytes = text.as_bytes();
260
+
for capture in url_regex.captures_iter(text_bytes) {
261
+
if let Some(url_match) = capture.get(1) {
262
+
let url = std::str::from_utf8(url_match.as_bytes())
263
+
.unwrap_or_default()
264
+
.to_string();
265
+
266
+
facets.push(Facet {
267
+
index: ByteSlice {
268
+
byte_start: url_match.start(),
269
+
byte_end: url_match.end(),
270
+
},
271
+
features: vec![FacetFeature::Link(Link { uri: url })],
272
+
});
273
+
}
274
+
}
275
+
276
+
facets
277
+
}
278
+
279
+
/// Parse hashtags from text and return them as Facet objects.
280
+
///
281
+
/// This function extracts hashtags (e.g., `#rust`, `#ATProto`) from text,
282
+
/// returning AT Protocol Facet objects with Tag features.
283
+
/// It supports both standard `#` and full-width `๏ผ` (U+FF03) hash symbols.
284
+
///
285
+
/// # Tag Syntax
286
+
///
287
+
/// - Tags must start with `#` or `๏ผ` (full-width)
288
+
/// - Tag content follows word character rules (`\w`)
289
+
/// - Purely numeric tags (e.g., `#123`) are excluded
290
+
///
291
+
/// # Example
292
+
///
293
+
/// ```
294
+
/// use atproto_extras::parse_tags;
295
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::FacetFeature;
296
+
///
297
+
/// let text = "Learning #rust and #golang today! #100DaysOfCode";
298
+
/// let facets = parse_tags(text);
299
+
///
300
+
/// assert_eq!(facets.len(), 3);
301
+
/// if let Some(FacetFeature::Tag(tag)) = facets[0].features.first() {
302
+
/// assert_eq!(tag.tag, "rust");
303
+
/// }
304
+
/// if let Some(FacetFeature::Tag(tag)) = facets[1].features.first() {
305
+
/// assert_eq!(tag.tag, "golang");
306
+
/// }
307
+
/// if let Some(FacetFeature::Tag(tag)) = facets[2].features.first() {
308
+
/// assert_eq!(tag.tag, "100DaysOfCode");
309
+
/// }
310
+
/// ```
311
+
///
312
+
/// # Numeric Tags
313
+
///
314
+
/// Purely numeric tags are excluded:
315
+
///
316
+
/// ```
317
+
/// use atproto_extras::parse_tags;
318
+
///
319
+
/// let text = "Item #42 is special";
320
+
/// let facets = parse_tags(text);
321
+
///
322
+
/// // #42 is not extracted because it's purely numeric
323
+
/// assert_eq!(facets.len(), 0);
324
+
/// ```
325
+
pub fn parse_tags(text: &str) -> Vec<Facet> {
326
+
let mut facets = Vec::new();
327
+
328
+
// Regex based on: https://github.com/bluesky-social/atproto/blob/d91988fe79030b61b556dd6f16a46f0c3b9d0b44/packages/api/src/rich-text/util.ts
329
+
// Simplified for Rust - matches hashtags at word boundaries
330
+
// Pattern matches: start of string or non-word char, then # or ๏ผ, then tag content
331
+
let tag_regex = Regex::new(r"(?:^|[^\w])([#\xEF\xBC\x83])([\w]+(?:[\w]*)*)").unwrap();
332
+
333
+
let text_bytes = text.as_bytes();
334
+
335
+
// Work with bytes for proper position tracking
336
+
for capture in tag_regex.captures_iter(text_bytes) {
337
+
if let (Some(full_match), Some(hash_match), Some(tag_match)) =
338
+
(capture.get(0), capture.get(1), capture.get(2))
339
+
{
340
+
// Calculate the absolute byte position of the hash symbol
341
+
// The full match includes the preceding character (if any)
342
+
// so we need to adjust for that
343
+
let match_start = full_match.start();
344
+
let hash_offset = hash_match.start() - full_match.start();
345
+
let start = match_start + hash_offset;
346
+
let end = match_start + hash_offset + hash_match.len() + tag_match.len();
347
+
348
+
// Extract just the tag text (without the hash symbol)
349
+
let tag = std::str::from_utf8(tag_match.as_bytes()).unwrap_or_default();
350
+
351
+
// Only include tags that are not purely numeric
352
+
if !tag.chars().all(|c| c.is_ascii_digit()) {
353
+
facets.push(Facet {
354
+
index: ByteSlice {
355
+
byte_start: start,
356
+
byte_end: end,
357
+
},
358
+
features: vec![FacetFeature::Tag(Tag {
359
+
tag: tag.to_string(),
360
+
})],
361
+
});
362
+
}
363
+
}
364
+
}
365
+
366
+
facets
367
+
}
368
+
369
+
/// Parse facets from text and return a vector of Facet objects.
370
+
///
371
+
/// This function extracts mentions, URLs, and hashtags from the provided text
372
+
/// and creates AT Protocol facets with proper byte indices.
373
+
///
374
+
/// Mentions are resolved to actual DIDs using the provided identity resolver.
375
+
/// If a handle cannot be resolved to a DID, the mention facet is skipped.
376
+
///
377
+
/// # Arguments
378
+
///
379
+
/// * `text` - The text to extract facets from
380
+
/// * `identity_resolver` - Resolver for converting handles to DIDs
381
+
/// * `limits` - Configuration for maximum facets per type and total
382
+
///
383
+
/// # Returns
384
+
///
385
+
/// Optional vector of facets. Returns `None` if no facets were found.
386
+
///
387
+
/// # Example
388
+
///
389
+
/// ```ignore
390
+
/// use atproto_extras::{parse_facets_from_text, FacetLimits};
391
+
///
392
+
/// let text = "Hello @alice.bsky.social! Check #rust at https://rust-lang.org";
393
+
/// let limits = FacetLimits::default();
394
+
///
395
+
/// // Requires an async context and identity resolver
396
+
/// let facets = parse_facets_from_text(text, &resolver, &limits).await;
397
+
///
398
+
/// if let Some(facets) = facets {
399
+
/// for facet in &facets {
400
+
/// println!("Facet at {}..{}", facet.index.byte_start, facet.index.byte_end);
401
+
/// }
402
+
/// }
403
+
/// ```
404
+
///
405
+
/// # Mention Resolution
406
+
///
407
+
/// Mentions are only included if the handle resolves to a valid DID:
408
+
///
409
+
/// ```ignore
410
+
/// let text = "@valid.handle.com and @invalid.handle.xyz";
411
+
/// let facets = parse_facets_from_text(text, &resolver, &limits).await;
412
+
///
413
+
/// // Only @valid.handle.com appears as a facet if @invalid.handle.xyz
414
+
/// // cannot be resolved to a DID
415
+
/// ```
416
+
pub async fn parse_facets_from_text(
417
+
text: &str,
418
+
identity_resolver: &dyn IdentityResolver,
419
+
limits: &FacetLimits,
420
+
) -> Option<Vec<Facet>> {
421
+
let mut facets = Vec::new();
422
+
423
+
// Parse mentions (already limited by mentions_max in parse_mentions)
424
+
let mention_facets = parse_mentions(text, identity_resolver, limits).await;
425
+
facets.extend(mention_facets);
426
+
427
+
// Parse URLs (limited by links_max)
428
+
let url_facets = parse_urls(text);
429
+
for (idx, facet) in url_facets.into_iter().enumerate() {
430
+
if idx >= limits.links_max {
431
+
break;
432
+
}
433
+
facets.push(facet);
434
+
}
435
+
436
+
// Parse hashtags (limited by tags_max)
437
+
let tag_facets = parse_tags(text);
438
+
for (idx, facet) in tag_facets.into_iter().enumerate() {
439
+
if idx >= limits.tags_max {
440
+
break;
441
+
}
442
+
facets.push(facet);
443
+
}
444
+
445
+
// Apply global facet limit (truncate if exceeds max)
446
+
if facets.len() > limits.max {
447
+
facets.truncate(limits.max);
448
+
}
449
+
450
+
// Only return facets if we found any
451
+
if !facets.is_empty() {
452
+
Some(facets)
453
+
} else {
454
+
None
455
+
}
456
+
}
457
+
458
+
#[cfg(test)]
459
+
mod tests {
460
+
use async_trait::async_trait;
461
+
use atproto_identity::model::Document;
462
+
use std::collections::HashMap;
463
+
464
+
use super::*;
465
+
466
+
/// Mock identity resolver for testing
467
+
struct MockIdentityResolver {
468
+
handles_to_dids: HashMap<String, String>,
469
+
}
470
+
471
+
impl MockIdentityResolver {
472
+
fn new() -> Self {
473
+
let mut handles_to_dids = HashMap::new();
474
+
handles_to_dids.insert(
475
+
"alice.bsky.social".to_string(),
476
+
"did:plc:alice123".to_string(),
477
+
);
478
+
handles_to_dids.insert(
479
+
"at://alice.bsky.social".to_string(),
480
+
"did:plc:alice123".to_string(),
481
+
);
482
+
Self { handles_to_dids }
483
+
}
484
+
485
+
fn add_identity(&mut self, handle: &str, did: &str) {
486
+
self.handles_to_dids
487
+
.insert(handle.to_string(), did.to_string());
488
+
self.handles_to_dids
489
+
.insert(format!("at://{}", handle), did.to_string());
490
+
}
491
+
}
492
+
493
+
#[async_trait]
494
+
impl IdentityResolver for MockIdentityResolver {
495
+
async fn resolve(&self, handle: &str) -> anyhow::Result<Document> {
496
+
let handle_key = handle.to_string();
497
+
498
+
if let Some(did) = self.handles_to_dids.get(&handle_key) {
499
+
Ok(Document {
500
+
context: vec![],
501
+
id: did.clone(),
502
+
also_known_as: vec![format!("at://{}", handle_key.trim_start_matches("at://"))],
503
+
verification_method: vec![],
504
+
service: vec![],
505
+
extra: HashMap::new(),
506
+
})
507
+
} else {
508
+
Err(anyhow::anyhow!("Handle not found"))
509
+
}
510
+
}
511
+
}
512
+
513
+
#[tokio::test]
514
+
async fn test_parse_facets_from_text_comprehensive() {
515
+
let mut resolver = MockIdentityResolver::new();
516
+
resolver.add_identity("bob.test.com", "did:plc:bob456");
517
+
518
+
let limits = FacetLimits::default();
519
+
let text = "Join @alice.bsky.social and @bob.test.com at https://example.com #rust #golang";
520
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
521
+
522
+
assert!(facets.is_some());
523
+
let facets = facets.unwrap();
524
+
assert_eq!(facets.len(), 5); // 2 mentions, 1 URL, 2 hashtags
525
+
526
+
// Check first mention
527
+
assert_eq!(facets[0].index.byte_start, 5);
528
+
assert_eq!(facets[0].index.byte_end, 23);
529
+
if let FacetFeature::Mention(ref mention) = facets[0].features[0] {
530
+
assert_eq!(mention.did, "did:plc:alice123");
531
+
} else {
532
+
panic!("Expected Mention feature");
533
+
}
534
+
535
+
// Check second mention
536
+
assert_eq!(facets[1].index.byte_start, 28);
537
+
assert_eq!(facets[1].index.byte_end, 41);
538
+
if let FacetFeature::Mention(mention) = &facets[1].features[0] {
539
+
assert_eq!(mention.did, "did:plc:bob456");
540
+
} else {
541
+
panic!("Expected Mention feature");
542
+
}
543
+
544
+
// Check URL
545
+
assert_eq!(facets[2].index.byte_start, 45);
546
+
assert_eq!(facets[2].index.byte_end, 64);
547
+
if let FacetFeature::Link(link) = &facets[2].features[0] {
548
+
assert_eq!(link.uri, "https://example.com");
549
+
} else {
550
+
panic!("Expected Link feature");
551
+
}
552
+
553
+
// Check first hashtag
554
+
assert_eq!(facets[3].index.byte_start, 65);
555
+
assert_eq!(facets[3].index.byte_end, 70);
556
+
if let FacetFeature::Tag(tag) = &facets[3].features[0] {
557
+
assert_eq!(tag.tag, "rust");
558
+
} else {
559
+
panic!("Expected Tag feature");
560
+
}
561
+
562
+
// Check second hashtag
563
+
assert_eq!(facets[4].index.byte_start, 71);
564
+
assert_eq!(facets[4].index.byte_end, 78);
565
+
if let FacetFeature::Tag(tag) = &facets[4].features[0] {
566
+
assert_eq!(tag.tag, "golang");
567
+
} else {
568
+
panic!("Expected Tag feature");
569
+
}
570
+
}
571
+
572
+
#[tokio::test]
573
+
async fn test_parse_facets_from_text_with_unresolvable_mention() {
574
+
let resolver = MockIdentityResolver::new();
575
+
let limits = FacetLimits::default();
576
+
577
+
// Only alice.bsky.social is in the resolver, not unknown.handle.com
578
+
let text = "Contact @unknown.handle.com for details #rust";
579
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
580
+
581
+
assert!(facets.is_some());
582
+
let facets = facets.unwrap();
583
+
// Should only have 1 facet (the hashtag) since the mention couldn't be resolved
584
+
assert_eq!(facets.len(), 1);
585
+
586
+
// Check that it's the hashtag facet
587
+
if let FacetFeature::Tag(tag) = &facets[0].features[0] {
588
+
assert_eq!(tag.tag, "rust");
589
+
} else {
590
+
panic!("Expected Tag feature");
591
+
}
592
+
}
593
+
594
+
#[tokio::test]
595
+
async fn test_parse_facets_from_text_empty() {
596
+
let resolver = MockIdentityResolver::new();
597
+
let limits = FacetLimits::default();
598
+
let text = "No mentions, URLs, or hashtags here";
599
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
600
+
assert!(facets.is_none());
601
+
}
602
+
603
+
#[tokio::test]
604
+
async fn test_parse_facets_from_text_url_with_at_mention() {
605
+
let resolver = MockIdentityResolver::new();
606
+
let limits = FacetLimits::default();
607
+
608
+
// URLs with @ should not create mention facets
609
+
let text = "Tangled https://tangled.org/@smokesignal.events";
610
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
611
+
612
+
assert!(facets.is_some());
613
+
let facets = facets.unwrap();
614
+
615
+
// Should have exactly 1 facet (the URL), not 2 (URL + mention)
616
+
assert_eq!(
617
+
facets.len(),
618
+
1,
619
+
"Expected 1 facet (URL only), got {}",
620
+
facets.len()
621
+
);
622
+
623
+
// Verify it's a link facet, not a mention
624
+
if let FacetFeature::Link(link) = &facets[0].features[0] {
625
+
assert_eq!(link.uri, "https://tangled.org/@smokesignal.events");
626
+
} else {
627
+
panic!("Expected Link feature, got Mention or Tag instead");
628
+
}
629
+
}
630
+
631
+
#[tokio::test]
632
+
async fn test_parse_facets_with_mention_limit() {
633
+
let mut resolver = MockIdentityResolver::new();
634
+
resolver.add_identity("bob.test.com", "did:plc:bob456");
635
+
resolver.add_identity("charlie.test.com", "did:plc:charlie789");
636
+
637
+
// Limit to 2 mentions
638
+
let limits = FacetLimits {
639
+
mentions_max: 2,
640
+
tags_max: 5,
641
+
links_max: 5,
642
+
max: 10,
643
+
};
644
+
645
+
let text = "Join @alice.bsky.social @bob.test.com @charlie.test.com";
646
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
647
+
648
+
assert!(facets.is_some());
649
+
let facets = facets.unwrap();
650
+
// Should only have 2 mentions (alice and bob), charlie should be skipped
651
+
assert_eq!(facets.len(), 2);
652
+
653
+
// Verify they're both mentions
654
+
for facet in &facets {
655
+
assert!(matches!(facet.features[0], FacetFeature::Mention(_)));
656
+
}
657
+
}
658
+
659
+
#[tokio::test]
660
+
async fn test_parse_facets_with_global_limit() {
661
+
let mut resolver = MockIdentityResolver::new();
662
+
resolver.add_identity("bob.test.com", "did:plc:bob456");
663
+
664
+
// Very restrictive global limit
665
+
let limits = FacetLimits {
666
+
mentions_max: 5,
667
+
tags_max: 5,
668
+
links_max: 5,
669
+
max: 3, // Only allow 3 total facets
670
+
};
671
+
672
+
let text =
673
+
"Join @alice.bsky.social @bob.test.com at https://example.com #rust #golang #python";
674
+
let facets = parse_facets_from_text(text, &resolver, &limits).await;
675
+
676
+
assert!(facets.is_some());
677
+
let facets = facets.unwrap();
678
+
// Should be truncated to 3 facets total
679
+
assert_eq!(facets.len(), 3);
680
+
}
681
+
682
+
#[test]
683
+
fn test_parse_urls_multiple_links() {
684
+
let text = "IETF124 is happening in Montreal, Nov 1st to 7th https://www.ietf.org/meeting/124/\n\nWe're confirmed for two days of ATProto community sessions on Monday, Nov 3rd & Tuesday, Mov 4th at ECTO Co-Op. Many of us will also be participating in the free-to-attend IETF hackathon on Sunday, Nov 2nd.\n\nLatest updates and attendees in the forum https://discourse.atprotocol.community/t/update-on-timing-and-plan-for-montreal/164";
685
+
686
+
let facets = parse_urls(text);
687
+
688
+
// Should find both URLs
689
+
assert_eq!(
690
+
facets.len(),
691
+
2,
692
+
"Expected 2 URLs but found {}",
693
+
facets.len()
694
+
);
695
+
696
+
// Check first URL
697
+
if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
698
+
assert_eq!(link.uri, "https://www.ietf.org/meeting/124/");
699
+
} else {
700
+
panic!("Expected Link feature");
701
+
}
702
+
703
+
// Check second URL
704
+
if let Some(FacetFeature::Link(link)) = facets[1].features.first() {
705
+
assert_eq!(
706
+
link.uri,
707
+
"https://discourse.atprotocol.community/t/update-on-timing-and-plan-for-montreal/164"
708
+
);
709
+
} else {
710
+
panic!("Expected Link feature");
711
+
}
712
+
}
713
+
714
+
#[test]
715
+
fn test_parse_urls_with_html_entity() {
716
+
// Test with the HTML entity & in the text
717
+
let text = "IETF124 is happening in Montreal, Nov 1st to 7th https://www.ietf.org/meeting/124/\n\nWe're confirmed for two days of ATProto community sessions on Monday, Nov 3rd & Tuesday, Mov 4th at ECTO Co-Op. Many of us will also be participating in the free-to-attend IETF hackathon on Sunday, Nov 2nd.\n\nLatest updates and attendees in the forum https://discourse.atprotocol.community/t/update-on-timing-and-plan-for-montreal/164";
718
+
719
+
let facets = parse_urls(text);
720
+
721
+
// Should find both URLs
722
+
assert_eq!(
723
+
facets.len(),
724
+
2,
725
+
"Expected 2 URLs but found {}",
726
+
facets.len()
727
+
);
728
+
729
+
// Check first URL
730
+
if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
731
+
assert_eq!(link.uri, "https://www.ietf.org/meeting/124/");
732
+
} else {
733
+
panic!("Expected Link feature");
734
+
}
735
+
736
+
// Check second URL
737
+
if let Some(FacetFeature::Link(link)) = facets[1].features.first() {
738
+
assert_eq!(
739
+
link.uri,
740
+
"https://discourse.atprotocol.community/t/update-on-timing-and-plan-for-montreal/164"
741
+
);
742
+
} else {
743
+
panic!("Expected Link feature");
744
+
}
745
+
}
746
+
747
+
#[test]
748
+
fn test_byte_offset_with_html_entities() {
749
+
// This test demonstrates that HTML entity escaping shifts byte positions.
750
+
// The byte positions shift:
751
+
// In original: '&' is at byte 8 (1 byte)
752
+
// In escaped: '&' starts at byte 8 (5 bytes)
753
+
// This causes facet byte offsets to be misaligned if text is escaped before rendering.
754
+
755
+
// If we have a URL after the ampersand in the original:
756
+
let original_with_url = "Nov 3rd & Tuesday https://example.com";
757
+
let escaped_with_url = "Nov 3rd & Tuesday https://example.com";
758
+
759
+
// Parse URLs from both versions
760
+
let original_facets = parse_urls(original_with_url);
761
+
let escaped_facets = parse_urls(escaped_with_url);
762
+
763
+
// Both should find the URL, but at different byte positions
764
+
assert_eq!(original_facets.len(), 1);
765
+
assert_eq!(escaped_facets.len(), 1);
766
+
767
+
// The byte positions will be different
768
+
assert_eq!(original_facets[0].index.byte_start, 18); // After "Nov 3rd & Tuesday "
769
+
assert_eq!(escaped_facets[0].index.byte_start, 22); // After "Nov 3rd & Tuesday " (4 extra bytes for &)
770
+
}
771
+
772
+
#[test]
773
+
fn test_parse_urls_from_atproto_record_text() {
774
+
// Test parsing URLs from real AT Protocol record description text.
775
+
// This demonstrates the correct byte positions that should be used for facets.
776
+
let text = "Dev, Power Users, and Generally inquisitive folks get a completely unprofessionally amateur interview. Just a yap sesh where chat is part of the call!\n\nโจthe danielโจ & I will be on a Zoom call and I will stream out to https://stream.place/psingletary.com\n\nSubscribe to the publications! https://atprotocalls.leaflet.pub/";
777
+
778
+
let facets = parse_urls(text);
779
+
780
+
assert_eq!(facets.len(), 2, "Should find 2 URLs");
781
+
782
+
// First URL: https://stream.place/psingletary.com
783
+
assert_eq!(facets[0].index.byte_start, 221);
784
+
assert_eq!(facets[0].index.byte_end, 257);
785
+
if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
786
+
assert_eq!(link.uri, "https://stream.place/psingletary.com");
787
+
}
788
+
789
+
// Second URL: https://atprotocalls.leaflet.pub/
790
+
assert_eq!(facets[1].index.byte_start, 290);
791
+
assert_eq!(facets[1].index.byte_end, 323);
792
+
if let Some(FacetFeature::Link(link)) = facets[1].features.first() {
793
+
assert_eq!(link.uri, "https://atprotocalls.leaflet.pub/");
794
+
}
795
+
796
+
// Verify the byte slices match the expected text
797
+
let text_bytes = text.as_bytes();
798
+
assert_eq!(
799
+
std::str::from_utf8(&text_bytes[221..257]).unwrap(),
800
+
"https://stream.place/psingletary.com"
801
+
);
802
+
assert_eq!(
803
+
std::str::from_utf8(&text_bytes[290..323]).unwrap(),
804
+
"https://atprotocalls.leaflet.pub/"
805
+
);
806
+
}
807
+
808
+
#[tokio::test]
809
+
async fn test_parse_mentions_basic() {
810
+
let resolver = MockIdentityResolver::new();
811
+
let limits = FacetLimits::default();
812
+
let text = "Hello @alice.bsky.social!";
813
+
let facets = parse_mentions(text, &resolver, &limits).await;
814
+
815
+
assert_eq!(facets.len(), 1);
816
+
assert_eq!(facets[0].index.byte_start, 6);
817
+
assert_eq!(facets[0].index.byte_end, 24);
818
+
if let Some(FacetFeature::Mention(mention)) = facets[0].features.first() {
819
+
assert_eq!(mention.did, "did:plc:alice123");
820
+
} else {
821
+
panic!("Expected Mention feature");
822
+
}
823
+
}
824
+
825
+
#[tokio::test]
826
+
async fn test_parse_mentions_multiple() {
827
+
let mut resolver = MockIdentityResolver::new();
828
+
resolver.add_identity("bob.example.com", "did:plc:bob456");
829
+
let limits = FacetLimits::default();
830
+
let text = "CC @alice.bsky.social and @bob.example.com";
831
+
let facets = parse_mentions(text, &resolver, &limits).await;
832
+
833
+
assert_eq!(facets.len(), 2);
834
+
if let Some(FacetFeature::Mention(mention)) = facets[0].features.first() {
835
+
assert_eq!(mention.did, "did:plc:alice123");
836
+
}
837
+
if let Some(FacetFeature::Mention(mention)) = facets[1].features.first() {
838
+
assert_eq!(mention.did, "did:plc:bob456");
839
+
}
840
+
}
841
+
842
+
#[tokio::test]
843
+
async fn test_parse_mentions_unresolvable() {
844
+
let resolver = MockIdentityResolver::new();
845
+
let limits = FacetLimits::default();
846
+
// unknown.handle.com is not in the resolver
847
+
let text = "Hello @unknown.handle.com!";
848
+
let facets = parse_mentions(text, &resolver, &limits).await;
849
+
850
+
// Should be empty since the handle can't be resolved
851
+
assert_eq!(facets.len(), 0);
852
+
}
853
+
854
+
#[tokio::test]
855
+
async fn test_parse_mentions_in_url_excluded() {
856
+
let resolver = MockIdentityResolver::new();
857
+
let limits = FacetLimits::default();
858
+
// The @smokesignal.events is inside a URL and should not be parsed as a mention
859
+
let text = "Check https://tangled.org/@smokesignal.events";
860
+
let facets = parse_mentions(text, &resolver, &limits).await;
861
+
862
+
// Should be empty since the mention is inside a URL
863
+
assert_eq!(facets.len(), 0);
864
+
}
865
+
866
+
#[test]
867
+
fn test_parse_tags_basic() {
868
+
let text = "Learning #rust today!";
869
+
let facets = parse_tags(text);
870
+
871
+
assert_eq!(facets.len(), 1);
872
+
assert_eq!(facets[0].index.byte_start, 9);
873
+
assert_eq!(facets[0].index.byte_end, 14);
874
+
if let Some(FacetFeature::Tag(tag)) = facets[0].features.first() {
875
+
assert_eq!(tag.tag, "rust");
876
+
} else {
877
+
panic!("Expected Tag feature");
878
+
}
879
+
}
880
+
881
+
#[test]
882
+
fn test_parse_tags_multiple() {
883
+
let text = "#rust #golang #python are great!";
884
+
let facets = parse_tags(text);
885
+
886
+
assert_eq!(facets.len(), 3);
887
+
if let Some(FacetFeature::Tag(tag)) = facets[0].features.first() {
888
+
assert_eq!(tag.tag, "rust");
889
+
}
890
+
if let Some(FacetFeature::Tag(tag)) = facets[1].features.first() {
891
+
assert_eq!(tag.tag, "golang");
892
+
}
893
+
if let Some(FacetFeature::Tag(tag)) = facets[2].features.first() {
894
+
assert_eq!(tag.tag, "python");
895
+
}
896
+
}
897
+
898
+
#[test]
899
+
fn test_parse_tags_excludes_numeric() {
900
+
let text = "Item #42 is special #test123";
901
+
let facets = parse_tags(text);
902
+
903
+
// #42 should be excluded (purely numeric), #test123 should be included
904
+
assert_eq!(facets.len(), 1);
905
+
if let Some(FacetFeature::Tag(tag)) = facets[0].features.first() {
906
+
assert_eq!(tag.tag, "test123");
907
+
}
908
+
}
909
+
910
+
#[test]
911
+
fn test_parse_urls_basic() {
912
+
let text = "Visit https://example.com today!";
913
+
let facets = parse_urls(text);
914
+
915
+
assert_eq!(facets.len(), 1);
916
+
assert_eq!(facets[0].index.byte_start, 6);
917
+
assert_eq!(facets[0].index.byte_end, 25);
918
+
if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
919
+
assert_eq!(link.uri, "https://example.com");
920
+
}
921
+
}
922
+
923
+
#[test]
924
+
fn test_parse_urls_with_path() {
925
+
let text = "Check https://example.com/path/to/page?query=1#section";
926
+
let facets = parse_urls(text);
927
+
928
+
assert_eq!(facets.len(), 1);
929
+
if let Some(FacetFeature::Link(link)) = facets[0].features.first() {
930
+
assert_eq!(link.uri, "https://example.com/path/to/page?query=1#section");
931
+
}
932
+
}
933
+
934
+
#[test]
935
+
fn test_facet_limits_default() {
936
+
let limits = FacetLimits::default();
937
+
assert_eq!(limits.mentions_max, 5);
938
+
assert_eq!(limits.tags_max, 5);
939
+
assert_eq!(limits.links_max, 5);
940
+
assert_eq!(limits.max, 10);
941
+
}
942
+
}
+50
crates/atproto-extras/src/lib.rs
+50
crates/atproto-extras/src/lib.rs
···
1
+
//! Extra utilities for AT Protocol applications.
2
+
//!
3
+
//! This crate provides additional utilities that complement the core AT Protocol
4
+
//! identity and record crates. Currently, it focuses on rich text facet parsing.
5
+
//!
6
+
//! ## Features
7
+
//!
8
+
//! - **Facet Parsing**: Extract mentions, URLs, and hashtags from plain text
9
+
//! with correct UTF-8 byte offset calculation
10
+
//! - **Identity Integration**: Resolve mention handles to DIDs during parsing
11
+
//!
12
+
//! ## Example
13
+
//!
14
+
//! ```ignore
15
+
//! use atproto_extras::{parse_facets_from_text, FacetLimits};
16
+
//!
17
+
//! // Parse facets from text (requires an IdentityResolver)
18
+
//! let text = "Hello @alice.bsky.social! Check out https://example.com #rust";
19
+
//! let limits = FacetLimits::default();
20
+
//! let facets = parse_facets_from_text(text, &resolver, &limits).await;
21
+
//! ```
22
+
//!
23
+
//! ## Byte Offset Calculation
24
+
//!
25
+
//! This implementation correctly uses UTF-8 byte offsets as required by AT Protocol.
26
+
//! The facets use "inclusive start and exclusive end" byte ranges. All parsing is done
27
+
//! using `regex::bytes::Regex` which operates on byte slices and returns byte positions,
28
+
//! ensuring correct handling of multi-byte UTF-8 characters (emojis, CJK, accented chars).
29
+
30
+
#![forbid(unsafe_code)]
31
+
#![warn(missing_docs)]
32
+
33
+
/// Rich text facet parsing for AT Protocol.
34
+
///
35
+
/// This module provides functionality for extracting semantic annotations (facets)
36
+
/// from plain text. Facets include:
37
+
///
38
+
/// - **Mentions**: User handles prefixed with `@` (e.g., `@alice.bsky.social`)
39
+
/// - **Links**: HTTP/HTTPS URLs
40
+
/// - **Tags**: Hashtags prefixed with `#` or `๏ผ` (e.g., `#rust`)
41
+
///
42
+
/// ## Byte Offsets
43
+
///
44
+
/// All facet indices use UTF-8 byte offsets, not character indices. This is
45
+
/// critical for correct handling of multi-byte characters like emojis or
46
+
/// non-ASCII text.
47
+
pub mod facets;
48
+
49
+
/// Re-export commonly used types for convenience.
50
+
pub use facets::{FacetLimits, parse_facets_from_text, parse_mentions, parse_tags, parse_urls};
+19
-1
crates/atproto-identity/src/model.rs
+19
-1
crates/atproto-identity/src/model.rs
···
70
70
/// The DID identifier (e.g., "did:plc:abc123").
71
71
pub id: String,
72
72
/// Alternative identifiers like handles and domains.
73
+
#[serde(default)]
73
74
pub also_known_as: Vec<String>,
74
75
/// Available services for this identity.
76
+
#[serde(default)]
75
77
pub service: Vec<Service>,
76
78
77
79
/// Cryptographic verification methods.
78
-
#[serde(alias = "verificationMethod")]
80
+
#[serde(alias = "verificationMethod", default)]
79
81
pub verification_method: Vec<VerificationMethod>,
80
82
81
83
/// Additional document properties not explicitly defined.
···
402
404
let document = document.unwrap();
403
405
assert_eq!(document.id, "did:plc:cbkjy5n7bk3ax2wplmtjofq2");
404
406
}
407
+
}
408
+
409
+
#[test]
410
+
fn test_deserialize_service_did_document() {
411
+
// DID document from api.bsky.app - a service DID without alsoKnownAs
412
+
let document = serde_json::from_str::<Document>(
413
+
r##"{"@context":["https://www.w3.org/ns/did/v1","https://w3id.org/security/multikey/v1"],"id":"did:web:api.bsky.app","verificationMethod":[{"id":"did:web:api.bsky.app#atproto","type":"Multikey","controller":"did:web:api.bsky.app","publicKeyMultibase":"zQ3shpRzb2NDriwCSSsce6EqGxG23kVktHZc57C3NEcuNy1jg"}],"service":[{"id":"#bsky_notif","type":"BskyNotificationService","serviceEndpoint":"https://api.bsky.app"},{"id":"#bsky_appview","type":"BskyAppView","serviceEndpoint":"https://api.bsky.app"}]}"##,
414
+
);
415
+
assert!(document.is_ok(), "Failed to parse: {:?}", document.err());
416
+
417
+
let document = document.unwrap();
418
+
assert_eq!(document.id, "did:web:api.bsky.app");
419
+
assert!(document.also_known_as.is_empty());
420
+
assert_eq!(document.service.len(), 2);
421
+
assert_eq!(document.service[0].id, "#bsky_notif");
422
+
assert_eq!(document.service[1].id, "#bsky_appview");
405
423
}
406
424
}
+75
-24
crates/atproto-jetstream/src/consumer.rs
+75
-24
crates/atproto-jetstream/src/consumer.rs
···
2
2
//!
3
3
//! WebSocket event consumption with background processing and
4
4
//! customizable event handler dispatch.
5
+
//!
6
+
//! ## Memory Efficiency
7
+
//!
8
+
//! This module is optimized for high-throughput event processing with minimal allocations:
9
+
//!
10
+
//! - **Arc-based event sharing**: Events are wrapped in `Arc` and shared across all handlers,
11
+
//! avoiding expensive clones of event data structures.
12
+
//! - **Zero-copy handler IDs**: Handler identifiers use string slices to avoid allocations
13
+
//! during registration and dispatch.
14
+
//! - **Optimized query building**: WebSocket query strings are built with pre-allocated
15
+
//! capacity to minimize reallocations.
16
+
//!
17
+
//! ## Usage
18
+
//!
19
+
//! Implement the `EventHandler` trait to process events:
20
+
//!
21
+
//! ```rust
22
+
//! use atproto_jetstream::{EventHandler, JetstreamEvent};
23
+
//! use async_trait::async_trait;
24
+
//! use std::sync::Arc;
25
+
//! use anyhow::Result;
26
+
//!
27
+
//! struct MyHandler;
28
+
//!
29
+
//! #[async_trait]
30
+
//! impl EventHandler for MyHandler {
31
+
//! async fn handle_event(&self, event: Arc<JetstreamEvent>) -> Result<()> {
32
+
//! // Process event without cloning
33
+
//! Ok(())
34
+
//! }
35
+
//!
36
+
//! fn handler_id(&self) -> &str {
37
+
//! "my-handler"
38
+
//! }
39
+
//! }
40
+
//! ```
5
41
6
42
use crate::errors::ConsumerError;
7
43
use anyhow::Result;
···
133
169
#[async_trait]
134
170
pub trait EventHandler: Send + Sync {
135
171
/// Handle a received event
136
-
async fn handle_event(&self, event: JetstreamEvent) -> Result<()>;
172
+
///
173
+
/// Events are wrapped in Arc to enable efficient sharing across multiple handlers
174
+
/// without cloning the entire event data structure.
175
+
async fn handle_event(&self, event: Arc<JetstreamEvent>) -> Result<()>;
137
176
138
177
/// Get the handler's identifier
139
-
fn handler_id(&self) -> String;
178
+
///
179
+
/// Returns a string slice to avoid unnecessary allocations.
180
+
fn handler_id(&self) -> &str;
140
181
}
141
182
142
183
#[cfg_attr(debug_assertions, derive(Debug))]
···
167
208
pub struct Consumer {
168
209
config: ConsumerTaskConfig,
169
210
handlers: Arc<RwLock<HashMap<String, Arc<dyn EventHandler>>>>,
170
-
event_sender: Arc<RwLock<Option<broadcast::Sender<JetstreamEvent>>>>,
211
+
event_sender: Arc<RwLock<Option<broadcast::Sender<Arc<JetstreamEvent>>>>>,
171
212
}
172
213
173
214
impl Consumer {
···
185
226
let handler_id = handler.handler_id();
186
227
let mut handlers = self.handlers.write().await;
187
228
188
-
if handlers.contains_key(&handler_id) {
229
+
if handlers.contains_key(handler_id) {
189
230
return Err(ConsumerError::HandlerRegistrationFailed(format!(
190
231
"Handler with ID '{}' already registered",
191
232
handler_id
···
193
234
.into());
194
235
}
195
236
196
-
handlers.insert(handler_id.clone(), handler);
237
+
handlers.insert(handler_id.to_string(), handler);
197
238
Ok(())
198
239
}
199
240
···
205
246
}
206
247
207
248
/// Get a broadcast receiver for events
208
-
pub async fn get_event_receiver(&self) -> Result<broadcast::Receiver<JetstreamEvent>> {
249
+
///
250
+
/// Events are wrapped in Arc to enable efficient sharing without cloning.
251
+
pub async fn get_event_receiver(&self) -> Result<broadcast::Receiver<Arc<JetstreamEvent>>> {
209
252
let sender_guard = self.event_sender.read().await;
210
253
match sender_guard.as_ref() {
211
254
Some(sender) => Ok(sender.subscribe()),
···
249
292
tracing::info!("Starting Jetstream consumer");
250
293
251
294
// Build WebSocket URL with query parameters
252
-
let mut query_params = vec![];
295
+
// Pre-allocate capacity to avoid reallocations during string building
296
+
let capacity = 50 // Base parameters
297
+
+ self.config.collections.len() * 30 // Estimate per collection
298
+
+ self.config.dids.len() * 60; // Estimate per DID
299
+
let mut query_string = String::with_capacity(capacity);
253
300
254
301
// Add compression parameter
255
-
query_params.push(format!("compress={}", self.config.compression));
302
+
query_string.push_str("compress=");
303
+
query_string.push_str(if self.config.compression { "true" } else { "false" });
256
304
257
305
// Add requireHello parameter
258
-
query_params.push(format!("requireHello={}", self.config.require_hello));
306
+
query_string.push_str("&requireHello=");
307
+
query_string.push_str(if self.config.require_hello { "true" } else { "false" });
259
308
260
309
// Add wantedCollections if specified (each collection as a separate query parameter)
261
310
if !self.config.collections.is_empty() && !self.config.require_hello {
262
311
for collection in &self.config.collections {
263
-
query_params.push(format!(
264
-
"wantedCollections={}",
265
-
urlencoding::encode(collection)
266
-
));
312
+
query_string.push_str("&wantedCollections=");
313
+
query_string.push_str(&urlencoding::encode(collection));
267
314
}
268
315
}
269
316
270
317
// Add wantedDids if specified (each DID as a separate query parameter)
271
318
if !self.config.dids.is_empty() && !self.config.require_hello {
272
319
for did in &self.config.dids {
273
-
query_params.push(format!("wantedDids={}", urlencoding::encode(did)));
320
+
query_string.push_str("&wantedDids=");
321
+
query_string.push_str(&urlencoding::encode(did));
274
322
}
275
323
}
276
324
277
325
// Add maxMessageSizeBytes if specified
278
326
if let Some(max_size) = self.config.max_message_size_bytes {
279
-
query_params.push(format!("maxMessageSizeBytes={}", max_size));
327
+
use std::fmt::Write;
328
+
write!(&mut query_string, "&maxMessageSizeBytes={}", max_size).unwrap();
280
329
}
281
330
282
331
// Add cursor if specified
283
332
if let Some(cursor) = self.config.cursor {
284
-
query_params.push(format!("cursor={}", cursor));
333
+
use std::fmt::Write;
334
+
write!(&mut query_string, "&cursor={}", cursor).unwrap();
285
335
}
286
-
287
-
let query_string = query_params.join("&");
288
336
let ws_url = Uri::from_str(&format!(
289
337
"wss://{}/subscribe?{}",
290
338
self.config.jetstream_hostname, query_string
···
335
383
break;
336
384
},
337
385
() = &mut sleeper => {
338
-
// consumer_control_insert(&self.pool, &self.config.jetstream_hostname, time_usec).await?;
339
-
340
386
sleeper.as_mut().reset(Instant::now() + interval);
341
387
},
342
388
item = client.next() => {
···
404
450
}
405
451
406
452
/// Dispatch event to all registered handlers
453
+
///
454
+
/// Wraps the event in Arc once and shares it across all handlers,
455
+
/// avoiding expensive clones of the event data structure.
407
456
async fn dispatch_to_handlers(&self, event: JetstreamEvent) -> Result<()> {
408
457
let handlers = self.handlers.read().await;
458
+
let event = Arc::new(event);
409
459
410
460
for (handler_id, handler) in handlers.iter() {
411
461
let handler_span = tracing::debug_span!("handler_dispatch", handler_id = %handler_id);
462
+
let event_ref = Arc::clone(&event);
412
463
async {
413
-
if let Err(err) = handler.handle_event(event.clone()).await {
464
+
if let Err(err) = handler.handle_event(event_ref).await {
414
465
tracing::error!(
415
466
error = ?err,
416
467
handler_id = %handler_id,
···
440
491
441
492
#[async_trait]
442
493
impl EventHandler for LoggingHandler {
443
-
async fn handle_event(&self, _event: JetstreamEvent) -> Result<()> {
494
+
async fn handle_event(&self, _event: Arc<JetstreamEvent>) -> Result<()> {
444
495
Ok(())
445
496
}
446
497
447
-
fn handler_id(&self) -> String {
448
-
self.id.clone()
498
+
fn handler_id(&self) -> &str {
499
+
&self.id
449
500
}
450
501
}
451
502
+374
-5
crates/atproto-oauth/src/scopes.rs
+374
-5
crates/atproto-oauth/src/scopes.rs
···
38
38
Atproto,
39
39
/// Transition scope for migration operations
40
40
Transition(TransitionScope),
41
+
/// Include scope for referencing permission sets by NSID
42
+
Include(IncludeScope),
41
43
/// OpenID Connect scope - required for OpenID Connect authentication
42
44
OpenId,
43
45
/// Profile scope - access to user profile information
···
91
93
Generic,
92
94
/// Email transition operations
93
95
Email,
96
+
}
97
+
98
+
/// Include scope for referencing permission sets by NSID
99
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
100
+
pub struct IncludeScope {
101
+
/// The permission set NSID (e.g., "app.example.authFull")
102
+
pub nsid: String,
103
+
/// Optional audience DID for inherited RPC permissions
104
+
pub aud: Option<String>,
94
105
}
95
106
96
107
/// Blob scope with mime type constraints
···
310
321
"rpc",
311
322
"atproto",
312
323
"transition",
324
+
"include",
313
325
"openid",
314
326
"profile",
315
327
"email",
···
349
361
"rpc" => Self::parse_rpc(suffix),
350
362
"atproto" => Self::parse_atproto(suffix),
351
363
"transition" => Self::parse_transition(suffix),
364
+
"include" => Self::parse_include(suffix),
352
365
"openid" => Self::parse_openid(suffix),
353
366
"profile" => Self::parse_profile(suffix),
354
367
"email" => Self::parse_email(suffix),
···
573
586
Ok(Scope::Transition(scope))
574
587
}
575
588
589
+
fn parse_include(suffix: Option<&str>) -> Result<Self, ParseError> {
590
+
let (nsid, params) = match suffix {
591
+
Some(s) => {
592
+
if let Some(pos) = s.find('?') {
593
+
(&s[..pos], Some(&s[pos + 1..]))
594
+
} else {
595
+
(s, None)
596
+
}
597
+
}
598
+
None => return Err(ParseError::MissingResource),
599
+
};
600
+
601
+
if nsid.is_empty() {
602
+
return Err(ParseError::MissingResource);
603
+
}
604
+
605
+
let aud = if let Some(params) = params {
606
+
let parsed_params = parse_query_string(params);
607
+
parsed_params
608
+
.get("aud")
609
+
.and_then(|v| v.first())
610
+
.map(|s| url_decode(s))
611
+
} else {
612
+
None
613
+
};
614
+
615
+
Ok(Scope::Include(IncludeScope {
616
+
nsid: nsid.to_string(),
617
+
aud,
618
+
}))
619
+
}
620
+
576
621
fn parse_openid(suffix: Option<&str>) -> Result<Self, ParseError> {
577
622
if suffix.is_some() {
578
623
return Err(ParseError::InvalidResource(
···
677
722
if let Some(lxm) = scope.lxm.iter().next() {
678
723
match lxm {
679
724
RpcLexicon::All => "rpc:*".to_string(),
680
-
RpcLexicon::Nsid(nsid) => format!("rpc:{}", nsid),
725
+
RpcLexicon::Nsid(nsid) => format!("rpc:{}?aud=*", nsid),
726
+
}
727
+
} else {
728
+
"rpc:*".to_string()
729
+
}
730
+
} else if scope.lxm.len() == 1 && scope.aud.len() == 1 {
731
+
// Single lxm and single aud (aud is not All, handled above)
732
+
if let (Some(lxm), Some(aud)) =
733
+
(scope.lxm.iter().next(), scope.aud.iter().next())
734
+
{
735
+
match (lxm, aud) {
736
+
(RpcLexicon::Nsid(nsid), RpcAudience::Did(did)) => {
737
+
format!("rpc:{}?aud={}", nsid, did)
738
+
}
739
+
(RpcLexicon::All, RpcAudience::Did(did)) => {
740
+
format!("rpc:*?aud={}", did)
741
+
}
742
+
_ => "rpc:*".to_string(),
681
743
}
682
744
} else {
683
745
"rpc:*".to_string()
···
713
775
TransitionScope::Generic => "transition:generic".to_string(),
714
776
TransitionScope::Email => "transition:email".to_string(),
715
777
},
778
+
Scope::Include(scope) => {
779
+
if let Some(ref aud) = scope.aud {
780
+
format!("include:{}?aud={}", scope.nsid, url_encode(aud))
781
+
} else {
782
+
format!("include:{}", scope.nsid)
783
+
}
784
+
}
716
785
Scope::OpenId => "openid".to_string(),
717
786
Scope::Profile => "profile".to_string(),
718
787
Scope::Email => "email".to_string(),
···
732
801
// Other scopes don't grant transition scopes
733
802
(_, Scope::Transition(_)) => false,
734
803
(Scope::Transition(_), _) => false,
804
+
// Include scopes only grant themselves (exact match including aud)
805
+
(Scope::Include(a), Scope::Include(b)) => a == b,
806
+
// Other scopes don't grant include scopes
807
+
(_, Scope::Include(_)) => false,
808
+
(Scope::Include(_), _) => false,
735
809
// OpenID Connect scopes only grant themselves
736
810
(Scope::OpenId, Scope::OpenId) => true,
737
811
(Scope::OpenId, _) => false,
···
873
947
params
874
948
}
875
949
950
+
/// Decode a percent-encoded string
951
+
fn url_decode(s: &str) -> String {
952
+
let mut result = String::with_capacity(s.len());
953
+
let mut chars = s.chars().peekable();
954
+
955
+
while let Some(c) = chars.next() {
956
+
if c == '%' {
957
+
let hex: String = chars.by_ref().take(2).collect();
958
+
if hex.len() == 2 {
959
+
if let Ok(byte) = u8::from_str_radix(&hex, 16) {
960
+
result.push(byte as char);
961
+
continue;
962
+
}
963
+
}
964
+
result.push('%');
965
+
result.push_str(&hex);
966
+
} else {
967
+
result.push(c);
968
+
}
969
+
}
970
+
971
+
result
972
+
}
973
+
974
+
/// Encode a string for use in a URL query parameter
975
+
fn url_encode(s: &str) -> String {
976
+
let mut result = String::with_capacity(s.len() * 3);
977
+
978
+
for c in s.chars() {
979
+
match c {
980
+
'A'..='Z' | 'a'..='z' | '0'..='9' | '-' | '_' | '.' | '~' | ':' => {
981
+
result.push(c);
982
+
}
983
+
_ => {
984
+
for byte in c.to_string().as_bytes() {
985
+
result.push_str(&format!("%{:02X}", byte));
986
+
}
987
+
}
988
+
}
989
+
}
990
+
991
+
result
992
+
}
993
+
876
994
/// Error type for scope parsing
877
995
#[derive(Debug, Clone, PartialEq, Eq)]
878
996
pub enum ParseError {
···
1056
1174
("repo:foo.bar", "repo:foo.bar"),
1057
1175
("repo:foo.bar?action=create", "repo:foo.bar?action=create"),
1058
1176
("rpc:*", "rpc:*"),
1177
+
("rpc:com.example.service", "rpc:com.example.service?aud=*"),
1178
+
(
1179
+
"rpc:com.example.service?aud=did:example:123",
1180
+
"rpc:com.example.service?aud=did:example:123",
1181
+
),
1059
1182
];
1060
1183
1061
1184
for (input, expected) in tests {
···
1677
1800
1678
1801
// Test with complex scopes including query parameters
1679
1802
let scopes = vec![
1680
-
Scope::parse("rpc:com.example.service?aud=did:example:123&lxm=com.example.method")
1681
-
.unwrap(),
1803
+
Scope::parse("rpc:com.example.service?aud=did:example:123").unwrap(),
1682
1804
Scope::parse("repo:foo.bar?action=create&action=update").unwrap(),
1683
1805
Scope::parse("blob:image/*?accept=image/png&accept=image/jpeg").unwrap(),
1684
1806
];
1685
1807
let result = Scope::serialize_multiple(&scopes);
1686
1808
// The result should be sorted alphabetically
1687
-
// Note: RPC scope with query params is serialized as "rpc?aud=...&lxm=..."
1809
+
// Single lxm + single aud is serialized as "rpc:[lxm]?aud=[aud]"
1688
1810
assert!(result.starts_with("blob:"));
1689
1811
assert!(result.contains(" repo:"));
1690
-
assert!(result.contains("rpc?aud=did:example:123&lxm=com.example.service"));
1812
+
assert!(result.contains("rpc:com.example.service?aud=did:example:123"));
1691
1813
1692
1814
// Test with transition scopes
1693
1815
let scopes = vec![
···
1835
1957
assert!(!result.contains(&Scope::parse("account:email").unwrap()));
1836
1958
assert!(result.contains(&Scope::parse("account:email?action=manage").unwrap()));
1837
1959
assert!(result.contains(&Scope::parse("account:repo").unwrap()));
1960
+
}
1961
+
1962
+
#[test]
1963
+
fn test_repo_nsid_with_wildcard_suffix() {
1964
+
// Test parsing "repo:app.bsky.feed.*" - the asterisk is treated as a literal part of the NSID,
1965
+
// not as a wildcard pattern. Only "repo:*" has special wildcard behavior for ALL collections.
1966
+
let scope = Scope::parse("repo:app.bsky.feed.*").unwrap();
1967
+
1968
+
// Verify it parses as a specific NSID, not as a wildcard
1969
+
assert_eq!(
1970
+
scope,
1971
+
Scope::Repo(RepoScope {
1972
+
collection: RepoCollection::Nsid("app.bsky.feed.*".to_string()),
1973
+
actions: {
1974
+
let mut actions = BTreeSet::new();
1975
+
actions.insert(RepoAction::Create);
1976
+
actions.insert(RepoAction::Update);
1977
+
actions.insert(RepoAction::Delete);
1978
+
actions
1979
+
}
1980
+
})
1981
+
);
1982
+
1983
+
// Verify normalization preserves the literal NSID
1984
+
assert_eq!(scope.to_string_normalized(), "repo:app.bsky.feed.*");
1985
+
1986
+
// Test that it does NOT grant access to "app.bsky.feed.post"
1987
+
// (because "app.bsky.feed.*" is a literal NSID, not a pattern)
1988
+
let specific_feed = Scope::parse("repo:app.bsky.feed.post").unwrap();
1989
+
assert!(!scope.grants(&specific_feed));
1990
+
1991
+
// Test that only "repo:*" grants access to "app.bsky.feed.*"
1992
+
let repo_all = Scope::parse("repo:*").unwrap();
1993
+
assert!(repo_all.grants(&scope));
1994
+
1995
+
// Test that "repo:app.bsky.feed.*" only grants itself
1996
+
assert!(scope.grants(&scope));
1997
+
1998
+
// Test with actions
1999
+
let scope_with_create = Scope::parse("repo:app.bsky.feed.*?action=create").unwrap();
2000
+
assert_eq!(
2001
+
scope_with_create,
2002
+
Scope::Repo(RepoScope {
2003
+
collection: RepoCollection::Nsid("app.bsky.feed.*".to_string()),
2004
+
actions: {
2005
+
let mut actions = BTreeSet::new();
2006
+
actions.insert(RepoAction::Create);
2007
+
actions
2008
+
}
2009
+
})
2010
+
);
2011
+
2012
+
// The full scope (with all actions) grants the create-only scope
2013
+
assert!(scope.grants(&scope_with_create));
2014
+
// But the create-only scope does NOT grant the full scope
2015
+
assert!(!scope_with_create.grants(&scope));
2016
+
2017
+
// Test parsing multiple scopes with NSID wildcards
2018
+
let scopes = Scope::parse_multiple("repo:app.bsky.feed.* repo:app.bsky.graph.* repo:*").unwrap();
2019
+
assert_eq!(scopes.len(), 3);
2020
+
2021
+
// Test that parse_multiple_reduced properly reduces when "repo:*" is present
2022
+
let reduced = Scope::parse_multiple_reduced("repo:app.bsky.feed.* repo:app.bsky.graph.* repo:*").unwrap();
2023
+
assert_eq!(reduced.len(), 1);
2024
+
assert_eq!(reduced[0], repo_all);
2025
+
}
2026
+
2027
+
#[test]
2028
+
fn test_include_scope_parsing() {
2029
+
// Test basic include scope
2030
+
let scope = Scope::parse("include:app.example.authFull").unwrap();
2031
+
assert_eq!(
2032
+
scope,
2033
+
Scope::Include(IncludeScope {
2034
+
nsid: "app.example.authFull".to_string(),
2035
+
aud: None,
2036
+
})
2037
+
);
2038
+
2039
+
// Test include scope with audience
2040
+
let scope = Scope::parse("include:app.example.authFull?aud=did:web:api.example.com").unwrap();
2041
+
assert_eq!(
2042
+
scope,
2043
+
Scope::Include(IncludeScope {
2044
+
nsid: "app.example.authFull".to_string(),
2045
+
aud: Some("did:web:api.example.com".to_string()),
2046
+
})
2047
+
);
2048
+
2049
+
// Test include scope with URL-encoded audience (with fragment)
2050
+
let scope = Scope::parse("include:app.example.authFull?aud=did:web:api.example.com%23svc_chat").unwrap();
2051
+
assert_eq!(
2052
+
scope,
2053
+
Scope::Include(IncludeScope {
2054
+
nsid: "app.example.authFull".to_string(),
2055
+
aud: Some("did:web:api.example.com#svc_chat".to_string()),
2056
+
})
2057
+
);
2058
+
2059
+
// Test missing NSID
2060
+
assert!(matches!(
2061
+
Scope::parse("include"),
2062
+
Err(ParseError::MissingResource)
2063
+
));
2064
+
2065
+
// Test empty NSID with query params
2066
+
assert!(matches!(
2067
+
Scope::parse("include:?aud=did:example:123"),
2068
+
Err(ParseError::MissingResource)
2069
+
));
2070
+
}
2071
+
2072
+
#[test]
2073
+
fn test_include_scope_normalization() {
2074
+
// Test normalization without audience
2075
+
let scope = Scope::parse("include:com.example.authBasic").unwrap();
2076
+
assert_eq!(scope.to_string_normalized(), "include:com.example.authBasic");
2077
+
2078
+
// Test normalization with audience (no special chars)
2079
+
let scope = Scope::parse("include:com.example.authBasic?aud=did:plc:xyz123").unwrap();
2080
+
assert_eq!(
2081
+
scope.to_string_normalized(),
2082
+
"include:com.example.authBasic?aud=did:plc:xyz123"
2083
+
);
2084
+
2085
+
// Test normalization with URL encoding (fragment needs encoding)
2086
+
let scope = Scope::parse("include:app.example.authFull?aud=did:web:api.example.com%23svc_chat").unwrap();
2087
+
let normalized = scope.to_string_normalized();
2088
+
assert_eq!(
2089
+
normalized,
2090
+
"include:app.example.authFull?aud=did:web:api.example.com%23svc_chat"
2091
+
);
2092
+
}
2093
+
2094
+
#[test]
2095
+
fn test_include_scope_grants() {
2096
+
let include1 = Scope::parse("include:app.example.authFull").unwrap();
2097
+
let include2 = Scope::parse("include:app.example.authBasic").unwrap();
2098
+
let include1_with_aud = Scope::parse("include:app.example.authFull?aud=did:plc:xyz").unwrap();
2099
+
let account = Scope::parse("account:email").unwrap();
2100
+
2101
+
// Include scopes only grant themselves (exact match)
2102
+
assert!(include1.grants(&include1));
2103
+
assert!(!include1.grants(&include2));
2104
+
assert!(!include1.grants(&include1_with_aud)); // Different because aud differs
2105
+
assert!(include1_with_aud.grants(&include1_with_aud));
2106
+
2107
+
// Include scopes don't grant other scope types
2108
+
assert!(!include1.grants(&account));
2109
+
assert!(!account.grants(&include1));
2110
+
2111
+
// Include scopes don't grant atproto or transition
2112
+
let atproto = Scope::parse("atproto").unwrap();
2113
+
let transition = Scope::parse("transition:generic").unwrap();
2114
+
assert!(!include1.grants(&atproto));
2115
+
assert!(!include1.grants(&transition));
2116
+
assert!(!atproto.grants(&include1));
2117
+
assert!(!transition.grants(&include1));
2118
+
}
2119
+
2120
+
#[test]
2121
+
fn test_parse_multiple_with_include() {
2122
+
let scopes = Scope::parse_multiple("atproto include:app.example.auth repo:*").unwrap();
2123
+
assert_eq!(scopes.len(), 3);
2124
+
assert_eq!(scopes[0], Scope::Atproto);
2125
+
assert!(matches!(scopes[1], Scope::Include(_)));
2126
+
assert!(matches!(scopes[2], Scope::Repo(_)));
2127
+
2128
+
// Test with URL-encoded audience
2129
+
let scopes = Scope::parse_multiple(
2130
+
"include:app.example.auth?aud=did:web:api.example.com%23svc account:email"
2131
+
).unwrap();
2132
+
assert_eq!(scopes.len(), 2);
2133
+
if let Scope::Include(inc) = &scopes[0] {
2134
+
assert_eq!(inc.nsid, "app.example.auth");
2135
+
assert_eq!(inc.aud, Some("did:web:api.example.com#svc".to_string()));
2136
+
} else {
2137
+
panic!("Expected Include scope");
2138
+
}
2139
+
}
2140
+
2141
+
#[test]
2142
+
fn test_parse_multiple_reduced_with_include() {
2143
+
// Include scopes don't reduce each other (each is distinct)
2144
+
let scopes = Scope::parse_multiple_reduced(
2145
+
"include:app.example.auth include:app.example.other include:app.example.auth"
2146
+
).unwrap();
2147
+
assert_eq!(scopes.len(), 2); // Duplicates are removed
2148
+
assert!(scopes.contains(&Scope::Include(IncludeScope {
2149
+
nsid: "app.example.auth".to_string(),
2150
+
aud: None,
2151
+
})));
2152
+
assert!(scopes.contains(&Scope::Include(IncludeScope {
2153
+
nsid: "app.example.other".to_string(),
2154
+
aud: None,
2155
+
})));
2156
+
2157
+
// Include scopes with different audiences are not duplicates
2158
+
let scopes = Scope::parse_multiple_reduced(
2159
+
"include:app.example.auth include:app.example.auth?aud=did:plc:xyz"
2160
+
).unwrap();
2161
+
assert_eq!(scopes.len(), 2);
2162
+
}
2163
+
2164
+
#[test]
2165
+
fn test_serialize_multiple_with_include() {
2166
+
let scopes = vec![
2167
+
Scope::parse("repo:*").unwrap(),
2168
+
Scope::parse("include:app.example.authFull").unwrap(),
2169
+
Scope::Atproto,
2170
+
];
2171
+
let result = Scope::serialize_multiple(&scopes);
2172
+
assert_eq!(result, "atproto include:app.example.authFull repo:*");
2173
+
2174
+
// Test with URL-encoded audience
2175
+
let scopes = vec![
2176
+
Scope::Include(IncludeScope {
2177
+
nsid: "app.example.auth".to_string(),
2178
+
aud: Some("did:web:api.example.com#svc".to_string()),
2179
+
}),
2180
+
];
2181
+
let result = Scope::serialize_multiple(&scopes);
2182
+
assert_eq!(result, "include:app.example.auth?aud=did:web:api.example.com%23svc");
2183
+
}
2184
+
2185
+
#[test]
2186
+
fn test_remove_scope_with_include() {
2187
+
let scopes = vec![
2188
+
Scope::Atproto,
2189
+
Scope::parse("include:app.example.auth").unwrap(),
2190
+
Scope::parse("account:email").unwrap(),
2191
+
];
2192
+
let to_remove = Scope::parse("include:app.example.auth").unwrap();
2193
+
let result = Scope::remove_scope(&scopes, &to_remove);
2194
+
assert_eq!(result.len(), 2);
2195
+
assert!(!result.contains(&to_remove));
2196
+
assert!(result.contains(&Scope::Atproto));
2197
+
}
2198
+
2199
+
#[test]
2200
+
fn test_include_scope_roundtrip() {
2201
+
// Test that parse and serialize are inverses
2202
+
let original = "include:com.example.authBasicFeatures?aud=did:web:api.example.com%23svc_appview";
2203
+
let scope = Scope::parse(original).unwrap();
2204
+
let serialized = scope.to_string_normalized();
2205
+
let reparsed = Scope::parse(&serialized).unwrap();
2206
+
assert_eq!(scope, reparsed);
1838
2207
}
1839
2208
}
+205
crates/atproto-record/src/lexicon/app_bsky_richtext_facet.rs
+205
crates/atproto-record/src/lexicon/app_bsky_richtext_facet.rs
···
1
+
//! AT Protocol rich text facet types.
2
+
//!
3
+
//! This module provides types for annotating rich text content with semantic
4
+
//! meaning, based on the `app.bsky.richtext.facet` lexicon. Facets enable
5
+
//! mentions, links, hashtags, and other structured metadata to be attached
6
+
//! to specific byte ranges within text content.
7
+
//!
8
+
//! # Overview
9
+
//!
10
+
//! Facets consist of:
11
+
//! - A byte range (start/end indices in UTF-8 encoded text)
12
+
//! - One or more features (mention, link, tag) that apply to that range
13
+
//!
14
+
//! # Example
15
+
//!
16
+
//! ```ignore
17
+
//! use atproto_record::lexicon::app::bsky::richtext::facet::{Facet, ByteSlice, FacetFeature, Mention};
18
+
//!
19
+
//! // Create a mention facet for "@alice.bsky.social"
20
+
//! let facet = Facet {
21
+
//! index: ByteSlice { byte_start: 0, byte_end: 19 },
22
+
//! features: vec![
23
+
//! FacetFeature::Mention(Mention {
24
+
//! did: "did:plc:alice123".to_string(),
25
+
//! })
26
+
//! ],
27
+
//! };
28
+
//! ```
29
+
30
+
use serde::{Deserialize, Serialize};
31
+
32
+
/// Byte range specification for facet features.
33
+
///
34
+
/// Specifies the sub-string range a facet feature applies to using
35
+
/// zero-indexed byte offsets in UTF-8 encoded text. Start index is
36
+
/// inclusive, end index is exclusive.
37
+
///
38
+
/// # Example
39
+
///
40
+
/// ```ignore
41
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::ByteSlice;
42
+
///
43
+
/// // Represents bytes 0-5 of the text
44
+
/// let slice = ByteSlice {
45
+
/// byte_start: 0,
46
+
/// byte_end: 5,
47
+
/// };
48
+
/// ```
49
+
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
50
+
#[serde(rename_all = "camelCase")]
51
+
pub struct ByteSlice {
52
+
/// Starting byte index (inclusive)
53
+
pub byte_start: usize,
54
+
55
+
/// Ending byte index (exclusive)
56
+
pub byte_end: usize,
57
+
}
58
+
59
+
/// Mention facet feature for referencing another account.
60
+
///
61
+
/// The text content typically displays a handle with '@' prefix (e.g., "@alice.bsky.social"),
62
+
/// but the facet reference must use the account's DID for stable identification.
63
+
///
64
+
/// # Example
65
+
///
66
+
/// ```ignore
67
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::Mention;
68
+
///
69
+
/// let mention = Mention {
70
+
/// did: "did:plc:alice123".to_string(),
71
+
/// };
72
+
/// ```
73
+
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
74
+
pub struct Mention {
75
+
/// DID of the mentioned account
76
+
pub did: String,
77
+
}
78
+
79
+
/// Link facet feature for URL references.
80
+
///
81
+
/// The text content may be simplified or truncated for display purposes,
82
+
/// but the facet reference should contain the complete, valid URL.
83
+
///
84
+
/// # Example
85
+
///
86
+
/// ```ignore
87
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::Link;
88
+
///
89
+
/// let link = Link {
90
+
/// uri: "https://example.com/full/path".to_string(),
91
+
/// };
92
+
/// ```
93
+
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
94
+
pub struct Link {
95
+
/// Complete URI/URL for the link
96
+
pub uri: String,
97
+
}
98
+
99
+
/// Tag facet feature for hashtags.
100
+
///
101
+
/// The text content typically includes a '#' prefix for display,
102
+
/// but the facet reference should contain only the tag text without the prefix.
103
+
///
104
+
/// # Example
105
+
///
106
+
/// ```ignore
107
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::Tag;
108
+
///
109
+
/// // For text "#atproto", store just "atproto"
110
+
/// let tag = Tag {
111
+
/// tag: "atproto".to_string(),
112
+
/// };
113
+
/// ```
114
+
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
115
+
pub struct Tag {
116
+
/// Tag text without '#' prefix
117
+
pub tag: String,
118
+
}
119
+
120
+
/// Discriminated union of facet feature types.
121
+
///
122
+
/// Represents the different types of semantic annotations that can be
123
+
/// applied to text ranges. Each variant corresponds to a specific lexicon
124
+
/// type in the `app.bsky.richtext.facet` namespace.
125
+
///
126
+
/// # Example
127
+
///
128
+
/// ```ignore
129
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::{FacetFeature, Mention, Link, Tag};
130
+
///
131
+
/// // Create different feature types
132
+
/// let mention = FacetFeature::Mention(Mention {
133
+
/// did: "did:plc:alice123".to_string(),
134
+
/// });
135
+
///
136
+
/// let link = FacetFeature::Link(Link {
137
+
/// uri: "https://example.com".to_string(),
138
+
/// });
139
+
///
140
+
/// let tag = FacetFeature::Tag(Tag {
141
+
/// tag: "rust".to_string(),
142
+
/// });
143
+
/// ```
144
+
#[derive(Serialize, Deserialize, Clone, PartialEq)]
145
+
#[cfg_attr(debug_assertions, derive(Debug))]
146
+
#[serde(tag = "$type")]
147
+
pub enum FacetFeature {
148
+
/// Account mention feature
149
+
#[serde(rename = "app.bsky.richtext.facet#mention")]
150
+
Mention(Mention),
151
+
152
+
/// URL link feature
153
+
#[serde(rename = "app.bsky.richtext.facet#link")]
154
+
Link(Link),
155
+
156
+
/// Hashtag feature
157
+
#[serde(rename = "app.bsky.richtext.facet#tag")]
158
+
Tag(Tag),
159
+
}
160
+
161
+
/// Rich text facet annotation.
162
+
///
163
+
/// Associates one or more semantic features with a specific byte range
164
+
/// within text content. Multiple features can apply to the same range
165
+
/// (e.g., a URL that is also a hashtag).
166
+
///
167
+
/// # Example
168
+
///
169
+
/// ```ignore
170
+
/// use atproto_record::lexicon::app::bsky::richtext::facet::{
171
+
/// Facet, ByteSlice, FacetFeature, Mention, Link
172
+
/// };
173
+
///
174
+
/// // Annotate "@alice.bsky.social" at bytes 0-19
175
+
/// let facet = Facet {
176
+
/// index: ByteSlice { byte_start: 0, byte_end: 19 },
177
+
/// features: vec![
178
+
/// FacetFeature::Mention(Mention {
179
+
/// did: "did:plc:alice123".to_string(),
180
+
/// }),
181
+
/// ],
182
+
/// };
183
+
///
184
+
/// // Multiple features for the same range
185
+
/// let multi_facet = Facet {
186
+
/// index: ByteSlice { byte_start: 20, byte_end: 35 },
187
+
/// features: vec![
188
+
/// FacetFeature::Link(Link {
189
+
/// uri: "https://example.com".to_string(),
190
+
/// }),
191
+
/// FacetFeature::Tag(Tag {
192
+
/// tag: "example".to_string(),
193
+
/// }),
194
+
/// ],
195
+
/// };
196
+
/// ```
197
+
#[derive(Serialize, Deserialize, Clone, PartialEq)]
198
+
#[cfg_attr(debug_assertions, derive(Debug))]
199
+
pub struct Facet {
200
+
/// Byte range this facet applies to
201
+
pub index: ByteSlice,
202
+
203
+
/// Semantic features applied to this range
204
+
pub features: Vec<FacetFeature>,
205
+
}
+19
-68
crates/atproto-record/src/lexicon/community_lexicon_attestation.rs
+19
-68
crates/atproto-record/src/lexicon/community_lexicon_attestation.rs
···
30
30
///
31
31
/// // Inline signature
32
32
/// let inline = SignatureOrRef::Inline(create_typed_signature(
33
-
/// "did:plc:issuer".to_string(),
34
33
/// Bytes { bytes: b"signature".to_vec() },
35
34
/// ));
36
35
///
···
55
54
56
55
/// Cryptographic signature structure.
57
56
///
58
-
/// Represents a signature created by an issuer (identified by DID) over
59
-
/// some data. The signature can be used to verify authenticity, authorization,
60
-
/// or other properties of the signed content.
57
+
/// Represents a cryptographic signature over some data. The signature can be
58
+
/// used to verify authenticity, authorization, or other properties of the
59
+
/// signed content.
61
60
///
62
61
/// # Fields
63
62
///
64
-
/// - `issuer`: DID of the entity that created the signature
65
63
/// - `signature`: The actual signature bytes
66
64
/// - `extra`: Additional fields that may be present in the signature
67
65
///
···
73
71
/// use std::collections::HashMap;
74
72
///
75
73
/// let sig = Signature {
76
-
/// issuer: "did:plc:example".to_string(),
77
74
/// signature: Bytes { bytes: b"signature_bytes".to_vec() },
78
75
/// extra: HashMap::new(),
79
76
/// };
···
81
78
#[derive(Deserialize, Serialize, Clone, PartialEq)]
82
79
#[cfg_attr(debug_assertions, derive(Debug))]
83
80
pub struct Signature {
84
-
/// DID of the entity that created this signature
85
-
pub issuer: String,
86
-
87
81
/// The cryptographic signature bytes
88
82
pub signature: Bytes,
89
83
···
116
110
///
117
111
/// # Arguments
118
112
///
119
-
/// * `issuer` - DID of the signature issuer
120
113
/// * `signature` - The signature bytes
121
114
///
122
115
/// # Example
···
126
119
/// use atproto_record::lexicon::Bytes;
127
120
///
128
121
/// let sig = create_typed_signature(
129
-
/// "did:plc:issuer".to_string(),
130
122
/// Bytes { bytes: b"sig_data".to_vec() },
131
123
/// );
132
124
/// ```
133
-
pub fn create_typed_signature(issuer: String, signature: Bytes) -> TypedSignature {
125
+
pub fn create_typed_signature(signature: Bytes) -> TypedSignature {
134
126
TypedLexicon::new(Signature {
135
-
issuer,
136
127
signature,
137
128
extra: HashMap::new(),
138
129
})
···
150
141
let json_str = r#"{
151
142
"$type": "community.lexicon.attestation.signature",
152
143
"issuedAt": "2025-08-19T20:17:17.133Z",
153
-
"issuer": "did:web:acudo-dev.smokesignal.tools",
154
144
"signature": {
155
145
"$bytes": "mr9c0MCu3g6SXNQ25JFhzfX1ecYgK9k1Kf6OZI2p2AlQRoQu09dOE7J5uaeilIx/UFCjJErO89C/uBBb9ANmUA"
156
146
}
···
160
150
let typed_sig_result: Result<TypedSignature, _> = serde_json::from_str(json_str);
161
151
match &typed_sig_result {
162
152
Ok(sig) => {
163
-
println!("TypedSignature OK: issuer={}", sig.inner.issuer);
164
-
assert_eq!(sig.inner.issuer, "did:web:acudo-dev.smokesignal.tools");
153
+
println!("TypedSignature OK: signature bytes len={}", sig.inner.signature.bytes.len());
154
+
assert_eq!(sig.inner.signature.bytes.len(), 64);
165
155
}
166
156
Err(e) => {
167
157
eprintln!("TypedSignature deserialization error: {}", e);
···
172
162
let sig_or_ref_result: Result<SignatureOrRef, _> = serde_json::from_str(json_str);
173
163
match &sig_or_ref_result {
174
164
Ok(SignatureOrRef::Inline(sig)) => {
175
-
println!("SignatureOrRef OK (Inline): issuer={}", sig.inner.issuer);
176
-
assert_eq!(sig.inner.issuer, "did:web:acudo-dev.smokesignal.tools");
165
+
println!("SignatureOrRef OK (Inline): signature bytes len={}", sig.inner.signature.bytes.len());
166
+
assert_eq!(sig.inner.signature.bytes.len(), 64);
177
167
}
178
168
Ok(SignatureOrRef::Reference(_)) => {
179
169
panic!("Expected Inline signature, got Reference");
···
186
176
// Try without $type field
187
177
let json_no_type = r#"{
188
178
"issuedAt": "2025-08-19T20:17:17.133Z",
189
-
"issuer": "did:web:acudo-dev.smokesignal.tools",
190
179
"signature": {
191
180
"$bytes": "mr9c0MCu3g6SXNQ25JFhzfX1ecYgK9k1Kf6OZI2p2AlQRoQu09dOE7J5uaeilIx/UFCjJErO89C/uBBb9ANmUA"
192
181
}
···
195
184
let no_type_result: Result<Signature, _> = serde_json::from_str(json_no_type);
196
185
match &no_type_result {
197
186
Ok(sig) => {
198
-
println!("Signature (no type) OK: issuer={}", sig.issuer);
199
-
assert_eq!(sig.issuer, "did:web:acudo-dev.smokesignal.tools");
187
+
println!("Signature (no type) OK: signature bytes len={}", sig.signature.bytes.len());
200
188
assert_eq!(sig.signature.bytes.len(), 64);
201
189
202
190
// Now wrap it in TypedLexicon and try as SignatureOrRef
···
220
208
fn test_signature_deserialization() {
221
209
let json_str = r#"{
222
210
"$type": "community.lexicon.attestation.signature",
223
-
"issuer": "did:plc:test123",
224
211
"signature": {"$bytes": "dGVzdCBzaWduYXR1cmU="}
225
212
}"#;
226
213
227
214
let signature: Signature = serde_json::from_str(json_str).unwrap();
228
215
229
-
assert_eq!(signature.issuer, "did:plc:test123");
230
216
assert_eq!(signature.signature.bytes, b"test signature");
231
217
// The $type field will be captured in extra due to #[serde(flatten)]
232
218
assert_eq!(signature.extra.len(), 1);
···
237
223
fn test_signature_deserialization_with_extra_fields() {
238
224
let json_str = r#"{
239
225
"$type": "community.lexicon.attestation.signature",
240
-
"issuer": "did:plc:test123",
241
226
"signature": {"$bytes": "dGVzdCBzaWduYXR1cmU="},
242
227
"issuedAt": "2024-01-01T00:00:00.000Z",
243
228
"purpose": "verification"
···
245
230
246
231
let signature: Signature = serde_json::from_str(json_str).unwrap();
247
232
248
-
assert_eq!(signature.issuer, "did:plc:test123");
249
233
assert_eq!(signature.signature.bytes, b"test signature");
250
234
// 3 extra fields: $type, issuedAt, purpose
251
235
assert_eq!(signature.extra.len(), 3);
···
263
247
extra.insert("custom_field".to_string(), json!("custom_value"));
264
248
265
249
let signature = Signature {
266
-
issuer: "did:plc:serializer".to_string(),
267
250
signature: Bytes {
268
251
bytes: b"hello world".to_vec(),
269
252
},
···
274
257
275
258
// Without custom Serialize impl, $type is not automatically added
276
259
assert!(!json.as_object().unwrap().contains_key("$type"));
277
-
assert_eq!(json["issuer"], "did:plc:serializer");
278
260
// "hello world" base64 encoded is "aGVsbG8gd29ybGQ="
279
261
assert_eq!(json["signature"]["$bytes"], "aGVsbG8gd29ybGQ=");
280
262
assert_eq!(json["custom_field"], "custom_value");
···
283
265
#[test]
284
266
fn test_signature_round_trip() {
285
267
let original = Signature {
286
-
issuer: "did:plc:roundtrip".to_string(),
287
268
signature: Bytes {
288
269
bytes: b"round trip test".to_vec(),
289
270
},
···
296
277
// Deserialize back
297
278
let deserialized: Signature = serde_json::from_str(&json).unwrap();
298
279
299
-
assert_eq!(original.issuer, deserialized.issuer);
300
280
assert_eq!(original.signature.bytes, deserialized.signature.bytes);
301
281
// Without the custom Serialize impl, no $type is added
302
282
// so the round-trip preserves the empty extra map
···
317
297
extra.insert("tags".to_string(), json!(["tag1", "tag2", "tag3"]));
318
298
319
299
let signature = Signature {
320
-
issuer: "did:plc:complex".to_string(),
321
300
signature: Bytes {
322
301
bytes: vec![0xFF, 0xEE, 0xDD, 0xCC, 0xBB, 0xAA],
323
302
},
···
328
307
329
308
// Without custom Serialize impl, $type is not automatically added
330
309
assert!(!json.as_object().unwrap().contains_key("$type"));
331
-
assert_eq!(json["issuer"], "did:plc:complex");
332
310
assert_eq!(json["timestamp"], 1234567890);
333
311
assert_eq!(json["metadata"]["version"], "1.0");
334
312
assert_eq!(json["metadata"]["algorithm"], "ES256");
···
338
316
#[test]
339
317
fn test_empty_signature() {
340
318
let signature = Signature {
341
-
issuer: String::new(),
342
319
signature: Bytes { bytes: Vec::new() },
343
320
extra: HashMap::new(),
344
321
};
···
347
324
348
325
// Without custom Serialize impl, $type is not automatically added
349
326
assert!(!json.as_object().unwrap().contains_key("$type"));
350
-
assert_eq!(json["issuer"], "");
351
327
assert_eq!(json["signature"]["$bytes"], ""); // Empty bytes encode to empty string
352
328
}
353
329
···
356
332
// Test with plain Vec<Signature> for basic signature serialization
357
333
let signatures: Vec<Signature> = vec![
358
334
Signature {
359
-
issuer: "did:plc:first".to_string(),
360
335
signature: Bytes {
361
336
bytes: b"first".to_vec(),
362
337
},
363
338
extra: HashMap::new(),
364
339
},
365
340
Signature {
366
-
issuer: "did:plc:second".to_string(),
367
341
signature: Bytes {
368
342
bytes: b"second".to_vec(),
369
343
},
···
375
349
376
350
assert!(json.is_array());
377
351
assert_eq!(json.as_array().unwrap().len(), 2);
378
-
assert_eq!(json[0]["issuer"], "did:plc:first");
379
-
assert_eq!(json[1]["issuer"], "did:plc:second");
352
+
assert_eq!(json[0]["signature"]["$bytes"], "Zmlyc3Q="); // "first" in base64
353
+
assert_eq!(json[1]["signature"]["$bytes"], "c2Vjb25k"); // "second" in base64
380
354
}
381
355
382
356
#[test]
···
384
358
// Test the new Signatures type with inline signatures
385
359
let signatures: Signatures = vec![
386
360
SignatureOrRef::Inline(create_typed_signature(
387
-
"did:plc:first".to_string(),
388
361
Bytes {
389
362
bytes: b"first".to_vec(),
390
363
},
391
364
)),
392
365
SignatureOrRef::Inline(create_typed_signature(
393
-
"did:plc:second".to_string(),
394
366
Bytes {
395
367
bytes: b"second".to_vec(),
396
368
},
···
402
374
assert!(json.is_array());
403
375
assert_eq!(json.as_array().unwrap().len(), 2);
404
376
assert_eq!(json[0]["$type"], "community.lexicon.attestation.signature");
405
-
assert_eq!(json[0]["issuer"], "did:plc:first");
377
+
assert_eq!(json[0]["signature"]["$bytes"], "Zmlyc3Q="); // "first" in base64
406
378
assert_eq!(json[1]["$type"], "community.lexicon.attestation.signature");
407
-
assert_eq!(json[1]["issuer"], "did:plc:second");
379
+
assert_eq!(json[1]["signature"]["$bytes"], "c2Vjb25k"); // "second" in base64
408
380
}
409
381
410
382
#[test]
411
383
fn test_typed_signature_serialization() {
412
384
let typed_sig = create_typed_signature(
413
-
"did:plc:typed".to_string(),
414
385
Bytes {
415
386
bytes: b"typed signature".to_vec(),
416
387
},
···
419
390
let json = serde_json::to_value(&typed_sig).unwrap();
420
391
421
392
assert_eq!(json["$type"], "community.lexicon.attestation.signature");
422
-
assert_eq!(json["issuer"], "did:plc:typed");
423
393
// "typed signature" base64 encoded
424
394
assert_eq!(json["signature"]["$bytes"], "dHlwZWQgc2lnbmF0dXJl");
425
395
}
···
428
398
fn test_typed_signature_deserialization() {
429
399
let json = json!({
430
400
"$type": "community.lexicon.attestation.signature",
431
-
"issuer": "did:plc:typed",
432
401
"signature": {"$bytes": "dHlwZWQgc2lnbmF0dXJl"}
433
402
});
434
403
435
404
let typed_sig: TypedSignature = serde_json::from_value(json).unwrap();
436
405
437
-
assert_eq!(typed_sig.inner.issuer, "did:plc:typed");
438
406
assert_eq!(typed_sig.inner.signature.bytes, b"typed signature");
439
407
assert!(typed_sig.has_type_field());
440
408
assert!(typed_sig.validate().is_ok());
···
443
411
#[test]
444
412
fn test_typed_signature_without_type_field() {
445
413
let json = json!({
446
-
"issuer": "did:plc:notype",
447
414
"signature": {"$bytes": "bm8gdHlwZQ=="} // "no type" in base64
448
415
});
449
416
450
417
let typed_sig: TypedSignature = serde_json::from_value(json).unwrap();
451
418
452
-
assert_eq!(typed_sig.inner.issuer, "did:plc:notype");
453
419
assert_eq!(typed_sig.inner.signature.bytes, b"no type");
454
420
assert!(!typed_sig.has_type_field());
455
421
// Validation should still pass because type_required() returns false for Signature
···
459
425
#[test]
460
426
fn test_typed_signature_with_extra_fields() {
461
427
let mut sig = Signature {
462
-
issuer: "did:plc:extra".to_string(),
463
428
signature: Bytes {
464
429
bytes: b"extra test".to_vec(),
465
430
},
···
474
439
let json = serde_json::to_value(&typed_sig).unwrap();
475
440
476
441
assert_eq!(json["$type"], "community.lexicon.attestation.signature");
477
-
assert_eq!(json["issuer"], "did:plc:extra");
478
442
assert_eq!(json["customField"], "customValue");
479
443
assert_eq!(json["timestamp"], 1234567890);
480
444
}
···
482
446
#[test]
483
447
fn test_typed_signature_round_trip() {
484
448
let original = Signature {
485
-
issuer: "did:plc:roundtrip2".to_string(),
486
449
signature: Bytes {
487
450
bytes: b"round trip typed".to_vec(),
488
451
},
···
494
457
let json = serde_json::to_string(&typed).unwrap();
495
458
let deserialized: TypedSignature = serde_json::from_str(&json).unwrap();
496
459
497
-
assert_eq!(deserialized.inner.issuer, original.issuer);
498
460
assert_eq!(deserialized.inner.signature.bytes, original.signature.bytes);
499
461
assert!(deserialized.has_type_field());
500
462
}
···
503
465
fn test_typed_signatures_vec() {
504
466
let typed_sigs: Vec<TypedSignature> = vec![
505
467
create_typed_signature(
506
-
"did:plc:first".to_string(),
507
468
Bytes {
508
469
bytes: b"first".to_vec(),
509
470
},
510
471
),
511
472
create_typed_signature(
512
-
"did:plc:second".to_string(),
513
473
Bytes {
514
474
bytes: b"second".to_vec(),
515
475
},
···
520
480
521
481
assert!(json.is_array());
522
482
assert_eq!(json[0]["$type"], "community.lexicon.attestation.signature");
523
-
assert_eq!(json[0]["issuer"], "did:plc:first");
483
+
assert_eq!(json[0]["signature"]["$bytes"], "Zmlyc3Q="); // "first" in base64
524
484
assert_eq!(json[1]["$type"], "community.lexicon.attestation.signature");
525
-
assert_eq!(json[1]["issuer"], "did:plc:second");
485
+
assert_eq!(json[1]["signature"]["$bytes"], "c2Vjb25k"); // "second" in base64
526
486
}
527
487
528
488
#[test]
529
489
fn test_plain_vs_typed_signature() {
530
490
// Plain Signature doesn't include $type field
531
491
let plain_sig = Signature {
532
-
issuer: "did:plc:plain".to_string(),
533
492
signature: Bytes {
534
493
bytes: b"plain sig".to_vec(),
535
494
},
···
548
507
);
549
508
550
509
// Both have the same core data
551
-
assert_eq!(plain_json["issuer"], typed_json["issuer"]);
552
510
assert_eq!(plain_json["signature"], typed_json["signature"]);
553
511
}
554
512
···
556
514
fn test_signature_or_ref_inline() {
557
515
// Test inline signature
558
516
let inline_sig = create_typed_signature(
559
-
"did:plc:inline".to_string(),
560
517
Bytes {
561
518
bytes: b"inline signature".to_vec(),
562
519
},
···
567
524
// Serialize
568
525
let json = serde_json::to_value(&sig_or_ref).unwrap();
569
526
assert_eq!(json["$type"], "community.lexicon.attestation.signature");
570
-
assert_eq!(json["issuer"], "did:plc:inline");
571
527
assert_eq!(json["signature"]["$bytes"], "aW5saW5lIHNpZ25hdHVyZQ=="); // "inline signature" in base64
572
528
573
529
// Deserialize
574
530
let deserialized: SignatureOrRef = serde_json::from_value(json.clone()).unwrap();
575
531
match deserialized {
576
532
SignatureOrRef::Inline(sig) => {
577
-
assert_eq!(sig.inner.issuer, "did:plc:inline");
578
533
assert_eq!(sig.inner.signature.bytes, b"inline signature");
579
534
}
580
535
_ => panic!("Expected inline signature"),
···
621
576
let signatures: Signatures = vec![
622
577
// Inline signature
623
578
SignatureOrRef::Inline(create_typed_signature(
624
-
"did:plc:signer1".to_string(),
625
579
Bytes {
626
580
bytes: b"sig1".to_vec(),
627
581
},
···
633
587
})),
634
588
// Another inline signature
635
589
SignatureOrRef::Inline(create_typed_signature(
636
-
"did:plc:signer3".to_string(),
637
590
Bytes {
638
591
bytes: b"sig3".to_vec(),
639
592
},
···
648
601
649
602
// First element should be inline signature
650
603
assert_eq!(array[0]["$type"], "community.lexicon.attestation.signature");
651
-
assert_eq!(array[0]["issuer"], "did:plc:signer1");
604
+
assert_eq!(array[0]["signature"]["$bytes"], "c2lnMQ=="); // "sig1" in base64
652
605
653
606
// Second element should be reference
654
607
assert_eq!(array[1]["$type"], "com.atproto.repo.strongRef");
···
659
612
660
613
// Third element should be inline signature
661
614
assert_eq!(array[2]["$type"], "community.lexicon.attestation.signature");
662
-
assert_eq!(array[2]["issuer"], "did:plc:signer3");
615
+
assert_eq!(array[2]["signature"]["$bytes"], "c2lnMw=="); // "sig3" in base64
663
616
664
617
// Deserialize back
665
618
let deserialized: Signatures = serde_json::from_value(json).unwrap();
···
667
620
668
621
// Verify each element
669
622
match &deserialized[0] {
670
-
SignatureOrRef::Inline(sig) => assert_eq!(sig.inner.issuer, "did:plc:signer1"),
623
+
SignatureOrRef::Inline(sig) => assert_eq!(sig.inner.signature.bytes, b"sig1"),
671
624
_ => panic!("Expected inline signature at index 0"),
672
625
}
673
626
···
682
635
}
683
636
684
637
match &deserialized[2] {
685
-
SignatureOrRef::Inline(sig) => assert_eq!(sig.inner.issuer, "did:plc:signer3"),
638
+
SignatureOrRef::Inline(sig) => assert_eq!(sig.inner.signature.bytes, b"sig3"),
686
639
_ => panic!("Expected inline signature at index 2"),
687
640
}
688
641
}
···
694
647
// Inline signature JSON
695
648
let inline_json = r#"{
696
649
"$type": "community.lexicon.attestation.signature",
697
-
"issuer": "did:plc:testinline",
698
650
"signature": {"$bytes": "aGVsbG8="}
699
651
}"#;
700
652
701
653
let inline_deser: SignatureOrRef = serde_json::from_str(inline_json).unwrap();
702
654
match inline_deser {
703
655
SignatureOrRef::Inline(sig) => {
704
-
assert_eq!(sig.inner.issuer, "did:plc:testinline");
705
656
assert_eq!(sig.inner.signature.bytes, b"hello");
706
657
}
707
658
_ => panic!("Expected inline signature"),
+1
-2
crates/atproto-record/src/lexicon/community_lexicon_badge.rs
+1
-2
crates/atproto-record/src/lexicon/community_lexicon_badge.rs
···
311
311
// The signature should be inline in this test
312
312
match sig_or_ref {
313
313
crate::lexicon::community_lexicon_attestation::SignatureOrRef::Inline(sig) => {
314
-
assert_eq!(sig.issuer, "did:plc:issuer");
315
314
// The bytes should match the decoded base64 value
316
315
// "dGVzdCBzaWduYXR1cmU=" decodes to "test signature"
317
-
assert_eq!(sig.signature.bytes, b"test signature".to_vec());
316
+
assert_eq!(sig.inner.signature.bytes, b"test signature".to_vec());
318
317
}
319
318
_ => panic!("Expected inline signature"),
320
319
}
+43
-9
crates/atproto-record/src/lexicon/community_lexicon_calendar_event.rs
+43
-9
crates/atproto-record/src/lexicon/community_lexicon_calendar_event.rs
···
10
10
11
11
use crate::datetime::format as datetime_format;
12
12
use crate::datetime::optional_format as optional_datetime_format;
13
+
use crate::lexicon::app::bsky::richtext::facet::Facet;
13
14
use crate::lexicon::TypedBlob;
14
15
use crate::lexicon::community::lexicon::location::Locations;
15
16
use crate::typed::{LexiconType, TypedLexicon};
16
17
17
-
/// The namespace identifier for events
18
+
/// Lexicon namespace identifier for calendar events.
19
+
///
20
+
/// Used as the `$type` field value for event records in the AT Protocol.
18
21
pub const NSID: &str = "community.lexicon.calendar.event";
19
22
20
23
/// Event status enumeration.
···
65
68
Hybrid,
66
69
}
67
70
68
-
/// The namespace identifier for named URIs
71
+
/// Lexicon namespace identifier for named URIs in calendar events.
72
+
///
73
+
/// Used as the `$type` field value for URI references associated with events.
69
74
pub const NAMED_URI_NSID: &str = "community.lexicon.calendar.event#uri";
70
75
71
76
/// Named URI structure.
···
89
94
}
90
95
}
91
96
92
-
/// Type alias for NamedUri with automatic $type field handling
97
+
/// Type alias for NamedUri with automatic $type field handling.
98
+
///
99
+
/// Wraps `NamedUri` in `TypedLexicon` to ensure proper serialization
100
+
/// and deserialization of the `$type` field.
93
101
pub type TypedNamedUri = TypedLexicon<NamedUri>;
94
102
95
-
/// The namespace identifier for event links
103
+
/// Lexicon namespace identifier for event links.
104
+
///
105
+
/// Used as the `$type` field value for event link references.
106
+
/// Note: This shares the same NSID as `NAMED_URI_NSID` for compatibility.
96
107
pub const EVENT_LINK_NSID: &str = "community.lexicon.calendar.event#uri";
97
108
98
109
/// Event link structure.
···
116
127
}
117
128
}
118
129
119
-
/// Type alias for EventLink with automatic $type field handling
130
+
/// Type alias for EventLink with automatic $type field handling.
131
+
///
132
+
/// Wraps `EventLink` in `TypedLexicon` to ensure proper serialization
133
+
/// and deserialization of the `$type` field.
120
134
pub type TypedEventLink = TypedLexicon<EventLink>;
121
135
122
-
/// A vector of typed event links
136
+
/// Collection of typed event links.
137
+
///
138
+
/// Represents multiple URI references associated with an event,
139
+
/// such as registration pages, live streams, or related content.
123
140
pub type EventLinks = Vec<TypedEventLink>;
124
141
125
142
/// Aspect ratio for media content.
···
134
151
pub height: u64,
135
152
}
136
153
137
-
/// The namespace identifier for media
154
+
/// Lexicon namespace identifier for event media.
155
+
///
156
+
/// Used as the `$type` field value for media attachments associated with events.
138
157
pub const MEDIA_NSID: &str = "community.lexicon.calendar.event#media";
139
158
140
159
/// Media structure for event-related visual content.
···
163
182
}
164
183
}
165
184
166
-
/// Type alias for Media with automatic $type field handling
185
+
/// Type alias for Media with automatic $type field handling.
186
+
///
187
+
/// Wraps `Media` in `TypedLexicon` to ensure proper serialization
188
+
/// and deserialization of the `$type` field.
167
189
pub type TypedMedia = TypedLexicon<Media>;
168
190
169
-
/// A vector of typed media items
191
+
/// Collection of typed media items.
192
+
///
193
+
/// Represents multiple media attachments for an event, such as banners,
194
+
/// posters, thumbnails, or promotional images.
170
195
pub type MediaList = Vec<TypedMedia>;
171
196
172
197
/// Calendar event structure.
···
248
273
#[serde(skip_serializing_if = "Vec::is_empty", default)]
249
274
pub media: MediaList,
250
275
276
+
/// Rich text facets for semantic annotations in description field.
277
+
///
278
+
/// Enables mentions, links, and hashtags to be embedded in the event
279
+
/// description text with proper semantic metadata.
280
+
#[serde(skip_serializing_if = "Option::is_none")]
281
+
pub facets: Option<Vec<Facet>>,
282
+
251
283
/// Extension fields for forward compatibility.
252
284
/// This catch-all allows unknown fields to be preserved and indexed
253
285
/// for potential future use without requiring re-indexing.
···
312
344
locations: vec![],
313
345
uris: vec![],
314
346
media: vec![],
347
+
facets: None,
315
348
extra: HashMap::new(),
316
349
};
317
350
···
466
499
locations: vec![],
467
500
uris: vec![TypedLexicon::new(event_link)],
468
501
media: vec![TypedLexicon::new(media)],
502
+
facets: None,
469
503
extra: HashMap::new(),
470
504
};
471
505
-3
crates/atproto-record/src/lexicon/community_lexicon_calendar_rsvp.rs
-3
crates/atproto-record/src/lexicon/community_lexicon_calendar_rsvp.rs
···
294
294
assert_eq!(typed_rsvp.inner.signatures.len(), 1);
295
295
match &typed_rsvp.inner.signatures[0] {
296
296
SignatureOrRef::Inline(sig) => {
297
-
assert_eq!(sig.inner.issuer, "did:plc:issuer");
298
297
assert_eq!(sig.inner.signature.bytes, b"test signature");
299
298
}
300
299
_ => panic!("Expected inline signature"),
···
364
363
assert_eq!(typed_rsvp.inner.signatures.len(), 1);
365
364
match &typed_rsvp.inner.signatures[0] {
366
365
SignatureOrRef::Inline(sig) => {
367
-
assert_eq!(sig.inner.issuer, "did:web:acudo-dev.smokesignal.tools");
368
-
369
366
// Verify the issuedAt field if present
370
367
if let Some(issued_at_value) = sig.inner.extra.get("issuedAt") {
371
368
assert_eq!(issued_at_value, "2025-08-19T20:17:17.133Z");
+22
crates/atproto-record/src/lexicon/mod.rs
+22
crates/atproto-record/src/lexicon/mod.rs
···
37
37
mod community_lexicon_calendar_event;
38
38
mod community_lexicon_calendar_rsvp;
39
39
mod community_lexicon_location;
40
+
mod app_bsky_richtext_facet;
40
41
mod primatives;
41
42
43
+
// Re-export primitive types for convenience
42
44
pub use primatives::*;
45
+
46
+
/// Bluesky application namespace.
47
+
///
48
+
/// Contains lexicon types specific to the Bluesky application,
49
+
/// including rich text formatting and social features.
50
+
pub mod app {
51
+
/// Bluesky namespace.
52
+
pub mod bsky {
53
+
/// Rich text formatting types.
54
+
pub mod richtext {
55
+
/// Facet types for semantic text annotations.
56
+
///
57
+
/// Provides types for mentions, links, hashtags, and other
58
+
/// structured metadata that can be attached to text content.
59
+
pub mod facet {
60
+
pub use crate::lexicon::app_bsky_richtext_facet::*;
61
+
}
62
+
}
63
+
}
64
+
}
43
65
44
66
/// AT Protocol core types namespace
45
67
pub mod com {
+53
crates/atproto-tap/Cargo.toml
+53
crates/atproto-tap/Cargo.toml
···
1
+
[package]
2
+
name = "atproto-tap"
3
+
version = "0.13.0"
4
+
description = "AT Protocol TAP (Trusted Attestation Protocol) service consumer"
5
+
readme = "README.md"
6
+
homepage = "https://tangled.sh/@smokesignal.events/atproto-identity-rs"
7
+
documentation = "https://docs.rs/atproto-tap"
8
+
9
+
edition.workspace = true
10
+
rust-version.workspace = true
11
+
authors.workspace = true
12
+
repository.workspace = true
13
+
license.workspace = true
14
+
keywords.workspace = true
15
+
categories.workspace = true
16
+
17
+
[dependencies]
18
+
tokio = { workspace = true, features = ["sync", "time"] }
19
+
tokio-stream = "0.1"
20
+
tokio-websockets = { workspace = true }
21
+
futures = { workspace = true }
22
+
reqwest = { workspace = true }
23
+
serde = { workspace = true }
24
+
serde_json = { workspace = true }
25
+
thiserror = { workspace = true }
26
+
tracing = { workspace = true }
27
+
http = { workspace = true }
28
+
base64 = { workspace = true }
29
+
atproto-identity.workspace = true
30
+
atproto-client = { workspace = true, optional = true }
31
+
32
+
# Memory efficiency
33
+
compact_str = { version = "0.8", features = ["serde"] }
34
+
itoa = "1.0"
35
+
36
+
# Optional for CLI
37
+
clap = { workspace = true, optional = true }
38
+
tracing-subscriber = { version = "0.3", features = ["env-filter"], optional = true }
39
+
40
+
[features]
41
+
default = []
42
+
clap = ["dep:clap", "dep:tracing-subscriber", "dep:atproto-client", "tokio/rt-multi-thread", "tokio/macros", "tokio/signal"]
43
+
44
+
[[bin]]
45
+
name = "atproto-tap-client"
46
+
required-features = ["clap"]
47
+
48
+
[[bin]]
49
+
name = "atproto-tap-extras"
50
+
required-features = ["clap"]
51
+
52
+
[lints]
53
+
workspace = true
+351
crates/atproto-tap/src/bin/atproto-tap-client.rs
+351
crates/atproto-tap/src/bin/atproto-tap-client.rs
···
1
+
//! Command-line client for TAP services.
2
+
//!
3
+
//! This tool provides commands for consuming TAP events and managing tracked repositories.
4
+
//!
5
+
//! # Usage
6
+
//!
7
+
//! ```bash
8
+
//! # Stream events from a TAP service
9
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 read
10
+
//!
11
+
//! # Stream with authentication and filters
12
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 -p secret read --live-only
13
+
//!
14
+
//! # Add repositories to track
15
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 -p secret repos add did:plc:xyz did:plc:abc
16
+
//!
17
+
//! # Remove repositories from tracking
18
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 -p secret repos remove did:plc:xyz
19
+
//!
20
+
//! # Resolve a DID to its DID document
21
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 resolve did:plc:xyz
22
+
//!
23
+
//! # Resolve a DID and only output the handle
24
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 resolve did:plc:xyz --handle-only
25
+
//!
26
+
//! # Get repository tracking info
27
+
//! cargo run --features cli --bin atproto-tap-client -- localhost:2480 info did:plc:xyz
28
+
//! ```
29
+
30
+
use atproto_tap::{TapClient, TapConfig, TapEvent, connect};
31
+
use clap::{Parser, Subcommand};
32
+
use std::time::Duration;
33
+
use tokio_stream::StreamExt;
34
+
35
+
/// TAP service client for consuming events and managing repositories.
36
+
#[derive(Parser)]
37
+
#[command(
38
+
name = "atproto-tap-client",
39
+
version,
40
+
about = "TAP service client for AT Protocol",
41
+
long_about = "Connect to a TAP service to stream repository/identity events or manage tracked repositories.\n\n\
42
+
Events are printed to stdout as JSON, one per line.\n\
43
+
Use Ctrl+C to gracefully stop the consumer."
44
+
)]
45
+
struct Args {
46
+
/// TAP service hostname (e.g., localhost:2480)
47
+
hostname: String,
48
+
49
+
/// Admin password for authentication
50
+
#[arg(short, long, global = true)]
51
+
password: Option<String>,
52
+
53
+
#[command(subcommand)]
54
+
command: Command,
55
+
}
56
+
57
+
#[derive(Subcommand)]
58
+
enum Command {
59
+
/// Connect to TAP and stream events as JSON
60
+
Read {
61
+
/// Disable acknowledgments
62
+
#[arg(long)]
63
+
no_acks: bool,
64
+
65
+
/// Maximum reconnection attempts (0 = unlimited)
66
+
#[arg(long, default_value = "0")]
67
+
max_reconnects: u32,
68
+
69
+
/// Print debug information to stderr
70
+
#[arg(short, long)]
71
+
debug: bool,
72
+
73
+
/// Filter to specific collections (comma-separated)
74
+
#[arg(long)]
75
+
collections: Option<String>,
76
+
77
+
/// Only show live events (skip backfill)
78
+
#[arg(long)]
79
+
live_only: bool,
80
+
},
81
+
82
+
/// Manage tracked repositories
83
+
Repos {
84
+
#[command(subcommand)]
85
+
action: ReposAction,
86
+
},
87
+
88
+
/// Resolve a DID to its DID document
89
+
Resolve {
90
+
/// DID to resolve (e.g., did:plc:xyz123)
91
+
did: String,
92
+
93
+
/// Only output the handle (instead of full DID document)
94
+
#[arg(long)]
95
+
handle_only: bool,
96
+
},
97
+
98
+
/// Get tracking info for a repository
99
+
Info {
100
+
/// DID to get info for (e.g., did:plc:xyz123)
101
+
did: String,
102
+
},
103
+
}
104
+
105
+
#[derive(Subcommand)]
106
+
enum ReposAction {
107
+
/// Add repositories to track
108
+
Add {
109
+
/// DIDs to add (e.g., did:plc:xyz123)
110
+
#[arg(required = true)]
111
+
dids: Vec<String>,
112
+
},
113
+
114
+
/// Remove repositories from tracking
115
+
Remove {
116
+
/// DIDs to remove
117
+
#[arg(required = true)]
118
+
dids: Vec<String>,
119
+
},
120
+
}
121
+
122
+
#[tokio::main]
123
+
async fn main() {
124
+
let args = Args::parse();
125
+
126
+
match args.command {
127
+
Command::Read {
128
+
no_acks,
129
+
max_reconnects,
130
+
debug,
131
+
collections,
132
+
live_only,
133
+
} => {
134
+
run_read(
135
+
&args.hostname,
136
+
args.password,
137
+
no_acks,
138
+
max_reconnects,
139
+
debug,
140
+
collections,
141
+
live_only,
142
+
)
143
+
.await;
144
+
}
145
+
Command::Repos { action } => {
146
+
run_repos(&args.hostname, args.password, action).await;
147
+
}
148
+
Command::Resolve { did, handle_only } => {
149
+
run_resolve(&args.hostname, args.password, &did, handle_only).await;
150
+
}
151
+
Command::Info { did } => {
152
+
run_info(&args.hostname, args.password, &did).await;
153
+
}
154
+
}
155
+
}
156
+
157
+
async fn run_read(
158
+
hostname: &str,
159
+
password: Option<String>,
160
+
no_acks: bool,
161
+
max_reconnects: u32,
162
+
debug: bool,
163
+
collections: Option<String>,
164
+
live_only: bool,
165
+
) {
166
+
// Initialize tracing if debug mode
167
+
if debug {
168
+
tracing_subscriber::fmt()
169
+
.with_env_filter("atproto_tap=debug")
170
+
.with_writer(std::io::stderr)
171
+
.init();
172
+
}
173
+
174
+
// Build configuration
175
+
let mut config_builder = TapConfig::builder()
176
+
.hostname(hostname)
177
+
.send_acks(!no_acks);
178
+
179
+
if let Some(password) = password {
180
+
config_builder = config_builder.admin_password(password);
181
+
}
182
+
183
+
if max_reconnects > 0 {
184
+
config_builder = config_builder.max_reconnect_attempts(Some(max_reconnects));
185
+
}
186
+
187
+
// Set reasonable defaults for CLI usage
188
+
config_builder = config_builder
189
+
.initial_reconnect_delay(Duration::from_secs(1))
190
+
.max_reconnect_delay(Duration::from_secs(30));
191
+
192
+
let config = config_builder.build();
193
+
194
+
eprintln!("Connecting to TAP service at {}...", hostname);
195
+
196
+
let mut stream = connect(config);
197
+
198
+
// Parse collection filters
199
+
let collection_filters: Vec<String> = collections
200
+
.map(|c| c.split(',').map(|s| s.trim().to_string()).collect())
201
+
.unwrap_or_default();
202
+
203
+
// Handle Ctrl+C
204
+
let ctrl_c = tokio::signal::ctrl_c();
205
+
tokio::pin!(ctrl_c);
206
+
207
+
loop {
208
+
tokio::select! {
209
+
Some(result) = stream.next() => {
210
+
match result {
211
+
Ok(event) => {
212
+
// Apply filters
213
+
let should_print = match event.as_ref() {
214
+
TapEvent::Record { record, .. } => {
215
+
// Filter by live flag
216
+
if live_only && !record.live {
217
+
false
218
+
}
219
+
// Filter by collection
220
+
else if !collection_filters.is_empty() {
221
+
collection_filters.iter().any(|c| record.collection.as_ref() == c)
222
+
} else {
223
+
true
224
+
}
225
+
}
226
+
TapEvent::Identity { .. } => !live_only, // Always show identity unless live_only
227
+
};
228
+
229
+
if should_print {
230
+
// Print as JSON to stdout
231
+
match serde_json::to_string(event.as_ref()) {
232
+
Ok(json) => println!("{}", json),
233
+
Err(e) => {
234
+
eprintln!("Failed to serialize event: {}", e);
235
+
}
236
+
}
237
+
}
238
+
}
239
+
Err(e) => {
240
+
eprintln!("Error: {}", e);
241
+
242
+
// Exit on fatal errors
243
+
if e.is_fatal() {
244
+
eprintln!("Fatal error, exiting");
245
+
std::process::exit(1);
246
+
}
247
+
}
248
+
}
249
+
}
250
+
_ = &mut ctrl_c => {
251
+
eprintln!("\nReceived Ctrl+C, shutting down...");
252
+
stream.close().await;
253
+
break;
254
+
}
255
+
}
256
+
}
257
+
258
+
eprintln!("Client stopped");
259
+
}
260
+
261
+
async fn run_repos(hostname: &str, password: Option<String>, action: ReposAction) {
262
+
let client = TapClient::new(hostname, password);
263
+
264
+
match action {
265
+
ReposAction::Add { dids } => {
266
+
let did_refs: Vec<&str> = dids.iter().map(|s| s.as_str()).collect();
267
+
268
+
match client.add_repos(&did_refs).await {
269
+
Ok(()) => {
270
+
eprintln!("Added {} repository(ies) to tracking", dids.len());
271
+
for did in &dids {
272
+
println!("{}", did);
273
+
}
274
+
}
275
+
Err(e) => {
276
+
eprintln!("Failed to add repositories: {}", e);
277
+
std::process::exit(1);
278
+
}
279
+
}
280
+
}
281
+
ReposAction::Remove { dids } => {
282
+
let did_refs: Vec<&str> = dids.iter().map(|s| s.as_str()).collect();
283
+
284
+
match client.remove_repos(&did_refs).await {
285
+
Ok(()) => {
286
+
eprintln!("Removed {} repository(ies) from tracking", dids.len());
287
+
for did in &dids {
288
+
println!("{}", did);
289
+
}
290
+
}
291
+
Err(e) => {
292
+
eprintln!("Failed to remove repositories: {}", e);
293
+
std::process::exit(1);
294
+
}
295
+
}
296
+
}
297
+
}
298
+
}
299
+
300
+
async fn run_resolve(hostname: &str, password: Option<String>, did: &str, handle_only: bool) {
301
+
let client = TapClient::new(hostname, password);
302
+
303
+
match client.resolve(did).await {
304
+
Ok(doc) => {
305
+
if handle_only {
306
+
// Use the handles() method from atproto_identity::model::Document
307
+
match doc.handles() {
308
+
Some(handle) => println!("{}", handle),
309
+
None => {
310
+
eprintln!("No handle found in DID document");
311
+
std::process::exit(1);
312
+
}
313
+
}
314
+
} else {
315
+
// Print full DID document as JSON
316
+
match serde_json::to_string_pretty(&doc) {
317
+
Ok(json) => println!("{}", json),
318
+
Err(e) => {
319
+
eprintln!("Failed to serialize DID document: {}", e);
320
+
std::process::exit(1);
321
+
}
322
+
}
323
+
}
324
+
}
325
+
Err(e) => {
326
+
eprintln!("Failed to resolve DID: {}", e);
327
+
std::process::exit(1);
328
+
}
329
+
}
330
+
}
331
+
332
+
async fn run_info(hostname: &str, password: Option<String>, did: &str) {
333
+
let client = TapClient::new(hostname, password);
334
+
335
+
match client.info(did).await {
336
+
Ok(info) => {
337
+
// Print as JSON for easy parsing
338
+
match serde_json::to_string_pretty(&info) {
339
+
Ok(json) => println!("{}", json),
340
+
Err(e) => {
341
+
eprintln!("Failed to serialize info: {}", e);
342
+
std::process::exit(1);
343
+
}
344
+
}
345
+
}
346
+
Err(e) => {
347
+
eprintln!("Failed to get repository info: {}", e);
348
+
std::process::exit(1);
349
+
}
350
+
}
351
+
}
+214
crates/atproto-tap/src/bin/atproto-tap-extras.rs
+214
crates/atproto-tap/src/bin/atproto-tap-extras.rs
···
1
+
//! Additional TAP client utilities for AT Protocol.
2
+
//!
3
+
//! This tool provides extra commands for managing TAP tracked repositories
4
+
//! based on social graph data.
5
+
//!
6
+
//! # Usage
7
+
//!
8
+
//! ```bash
9
+
//! # Add all accounts followed by a DID to TAP tracking
10
+
//! cargo run --features cli --bin atproto-tap-extras -- localhost:2480 repos-add-followers did:plc:xyz
11
+
//!
12
+
//! # With authentication
13
+
//! cargo run --features cli --bin atproto-tap-extras -- localhost:2480 -p secret repos-add-followers did:plc:xyz
14
+
//! ```
15
+
16
+
use atproto_client::client::Auth;
17
+
use atproto_client::com::atproto::repo::{ListRecordsParams, list_records};
18
+
use atproto_identity::plc::query as plc_query;
19
+
use atproto_tap::TapClient;
20
+
use clap::{Parser, Subcommand};
21
+
use serde::Deserialize;
22
+
23
+
/// TAP extras utility for managing tracked repositories.
24
+
#[derive(Parser)]
25
+
#[command(
26
+
name = "atproto-tap-extras",
27
+
version,
28
+
about = "TAP extras utility for AT Protocol",
29
+
long_about = "Additional utilities for managing TAP tracked repositories based on social graph data."
30
+
)]
31
+
struct Args {
32
+
/// TAP service hostname (e.g., localhost:2480)
33
+
hostname: String,
34
+
35
+
/// Admin password for TAP authentication
36
+
#[arg(short, long, global = true)]
37
+
password: Option<String>,
38
+
39
+
/// PLC directory hostname for DID resolution
40
+
#[arg(long, default_value = "plc.directory", global = true)]
41
+
plc_hostname: String,
42
+
43
+
#[command(subcommand)]
44
+
command: Command,
45
+
}
46
+
47
+
#[derive(Subcommand)]
48
+
enum Command {
49
+
/// Add accounts followed by a DID to TAP tracking.
50
+
///
51
+
/// Fetches all app.bsky.graph.follow records from the specified DID's repository
52
+
/// and adds the followed DIDs to TAP for tracking.
53
+
ReposAddFollowers {
54
+
/// DID to read followers from (e.g., did:plc:xyz123)
55
+
did: String,
56
+
57
+
/// Batch size for adding repos to TAP
58
+
#[arg(long, default_value = "100")]
59
+
batch_size: usize,
60
+
61
+
/// Dry run - print DIDs without adding to TAP
62
+
#[arg(long)]
63
+
dry_run: bool,
64
+
},
65
+
}
66
+
67
+
/// Follow record structure from app.bsky.graph.follow.
68
+
#[derive(Debug, Deserialize)]
69
+
struct FollowRecord {
70
+
/// The DID of the account being followed.
71
+
subject: String,
72
+
}
73
+
74
+
#[tokio::main]
75
+
async fn main() {
76
+
let args = Args::parse();
77
+
78
+
match args.command {
79
+
Command::ReposAddFollowers {
80
+
did,
81
+
batch_size,
82
+
dry_run,
83
+
} => {
84
+
run_repos_add_followers(
85
+
&args.hostname,
86
+
args.password,
87
+
&args.plc_hostname,
88
+
&did,
89
+
batch_size,
90
+
dry_run,
91
+
)
92
+
.await;
93
+
}
94
+
}
95
+
}
96
+
97
+
async fn run_repos_add_followers(
98
+
tap_hostname: &str,
99
+
tap_password: Option<String>,
100
+
plc_hostname: &str,
101
+
did: &str,
102
+
batch_size: usize,
103
+
dry_run: bool,
104
+
) {
105
+
let http_client = reqwest::Client::new();
106
+
107
+
// Resolve the DID to get the PDS endpoint
108
+
eprintln!("Resolving DID: {}", did);
109
+
let document = match plc_query(&http_client, plc_hostname, did).await {
110
+
Ok(doc) => doc,
111
+
Err(e) => {
112
+
eprintln!("Failed to resolve DID: {}", e);
113
+
std::process::exit(1);
114
+
}
115
+
};
116
+
117
+
let pds_endpoints = document.pds_endpoints();
118
+
if pds_endpoints.is_empty() {
119
+
eprintln!("No PDS endpoint found in DID document");
120
+
std::process::exit(1);
121
+
}
122
+
let pds_url = pds_endpoints[0];
123
+
eprintln!("Using PDS: {}", pds_url);
124
+
125
+
// Collect all followed DIDs
126
+
let mut followed_dids: Vec<String> = Vec::new();
127
+
let mut cursor: Option<String> = None;
128
+
let collection = "app.bsky.graph.follow".to_string();
129
+
130
+
eprintln!("Fetching follow records...");
131
+
132
+
loop {
133
+
let params = if let Some(c) = cursor.take() {
134
+
ListRecordsParams::new().limit(100).cursor(c)
135
+
} else {
136
+
ListRecordsParams::new().limit(100)
137
+
};
138
+
139
+
let response = match list_records::<FollowRecord>(
140
+
&http_client,
141
+
&Auth::None,
142
+
pds_url,
143
+
did.to_string(),
144
+
collection.clone(),
145
+
params,
146
+
)
147
+
.await
148
+
{
149
+
Ok(resp) => resp,
150
+
Err(e) => {
151
+
eprintln!("Failed to list records: {}", e);
152
+
std::process::exit(1);
153
+
}
154
+
};
155
+
156
+
for record in &response.records {
157
+
followed_dids.push(record.value.subject.clone());
158
+
}
159
+
160
+
eprintln!(
161
+
" Fetched {} records (total: {})",
162
+
response.records.len(),
163
+
followed_dids.len()
164
+
);
165
+
166
+
match response.cursor {
167
+
Some(c) if !response.records.is_empty() => {
168
+
cursor = Some(c);
169
+
}
170
+
_ => break,
171
+
}
172
+
}
173
+
174
+
if followed_dids.is_empty() {
175
+
eprintln!("No follow records found");
176
+
return;
177
+
}
178
+
179
+
eprintln!("Found {} followed accounts", followed_dids.len());
180
+
181
+
if dry_run {
182
+
eprintln!("\nDry run - would add these DIDs to TAP:");
183
+
for did in &followed_dids {
184
+
println!("{}", did);
185
+
}
186
+
return;
187
+
}
188
+
189
+
// Add to TAP in batches
190
+
let tap_client = TapClient::new(tap_hostname, tap_password);
191
+
let mut added = 0;
192
+
193
+
for chunk in followed_dids.chunks(batch_size) {
194
+
let did_refs: Vec<&str> = chunk.iter().map(|s| s.as_str()).collect();
195
+
196
+
match tap_client.add_repos(&did_refs).await {
197
+
Ok(()) => {
198
+
added += chunk.len();
199
+
eprintln!("Added {} DIDs to TAP (total: {})", chunk.len(), added);
200
+
}
201
+
Err(e) => {
202
+
eprintln!("Failed to add repos to TAP: {}", e);
203
+
std::process::exit(1);
204
+
}
205
+
}
206
+
}
207
+
208
+
eprintln!("Successfully added {} DIDs to TAP", added);
209
+
210
+
// Print all added DIDs
211
+
for did in &followed_dids {
212
+
println!("{}", did);
213
+
}
214
+
}
+371
crates/atproto-tap/src/client.rs
+371
crates/atproto-tap/src/client.rs
···
1
+
//! HTTP client for TAP management API.
2
+
//!
3
+
//! This module provides [`TapClient`] for interacting with the TAP service's
4
+
//! HTTP management endpoints, including adding/removing tracked repositories.
5
+
6
+
use crate::errors::TapError;
7
+
use atproto_identity::model::Document;
8
+
use base64::Engine;
9
+
use base64::engine::general_purpose::STANDARD as BASE64;
10
+
use reqwest::header::{AUTHORIZATION, CONTENT_TYPE, HeaderMap, HeaderValue};
11
+
use serde::{Deserialize, Serialize};
12
+
13
+
/// HTTP client for TAP management API.
14
+
///
15
+
/// Provides methods for managing which repositories the TAP service tracks,
16
+
/// checking service health, and querying repository status.
17
+
///
18
+
/// # Example
19
+
///
20
+
/// ```ignore
21
+
/// use atproto_tap::TapClient;
22
+
///
23
+
/// let client = TapClient::new("localhost:2480", Some("admin_password".to_string()));
24
+
///
25
+
/// // Add repositories to track
26
+
/// client.add_repos(&["did:plc:xyz123", "did:plc:abc456"]).await?;
27
+
///
28
+
/// // Check health
29
+
/// if client.health().await? {
30
+
/// println!("TAP service is healthy");
31
+
/// }
32
+
/// ```
33
+
#[derive(Debug, Clone)]
34
+
pub struct TapClient {
35
+
http_client: reqwest::Client,
36
+
base_url: String,
37
+
auth_header: Option<HeaderValue>,
38
+
}
39
+
40
+
impl TapClient {
41
+
/// Create a new TAP management client.
42
+
///
43
+
/// # Arguments
44
+
///
45
+
/// * `hostname` - TAP service hostname (e.g., "localhost:2480")
46
+
/// * `admin_password` - Optional admin password for authentication
47
+
pub fn new(hostname: &str, admin_password: Option<String>) -> Self {
48
+
let auth_header = admin_password.map(|password| {
49
+
let credentials = format!("admin:{}", password);
50
+
let encoded = BASE64.encode(credentials.as_bytes());
51
+
HeaderValue::from_str(&format!("Basic {}", encoded))
52
+
.expect("Invalid auth header value")
53
+
});
54
+
55
+
Self {
56
+
http_client: reqwest::Client::new(),
57
+
base_url: format!("http://{}", hostname),
58
+
auth_header,
59
+
}
60
+
}
61
+
62
+
/// Create default headers for requests.
63
+
fn default_headers(&self) -> HeaderMap {
64
+
let mut headers = HeaderMap::new();
65
+
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
66
+
if let Some(auth) = &self.auth_header {
67
+
headers.insert(AUTHORIZATION, auth.clone());
68
+
}
69
+
headers
70
+
}
71
+
72
+
/// Add repositories to track.
73
+
///
74
+
/// Sends a POST request to `/repos/add` with the list of DIDs.
75
+
///
76
+
/// # Arguments
77
+
///
78
+
/// * `dids` - Slice of DID strings to track
79
+
///
80
+
/// # Example
81
+
///
82
+
/// ```ignore
83
+
/// client.add_repos(&[
84
+
/// "did:plc:z72i7hdynmk6r22z27h6tvur",
85
+
/// "did:plc:ewvi7nxzyoun6zhxrhs64oiz",
86
+
/// ]).await?;
87
+
/// ```
88
+
pub async fn add_repos(&self, dids: &[&str]) -> Result<(), TapError> {
89
+
let url = format!("{}/repos/add", self.base_url);
90
+
let body = AddReposRequest {
91
+
dids: dids.iter().map(|s| s.to_string()).collect(),
92
+
};
93
+
94
+
let response = self
95
+
.http_client
96
+
.post(&url)
97
+
.headers(self.default_headers())
98
+
.json(&body)
99
+
.send()
100
+
.await?;
101
+
102
+
if response.status().is_success() {
103
+
tracing::debug!(count = dids.len(), "Added repositories to TAP");
104
+
Ok(())
105
+
} else {
106
+
let status = response.status().as_u16();
107
+
let message = response.text().await.unwrap_or_default();
108
+
Err(TapError::HttpResponseError { status, message })
109
+
}
110
+
}
111
+
112
+
/// Remove repositories from tracking.
113
+
///
114
+
/// Sends a POST request to `/repos/remove` with the list of DIDs.
115
+
///
116
+
/// # Arguments
117
+
///
118
+
/// * `dids` - Slice of DID strings to stop tracking
119
+
pub async fn remove_repos(&self, dids: &[&str]) -> Result<(), TapError> {
120
+
let url = format!("{}/repos/remove", self.base_url);
121
+
let body = AddReposRequest {
122
+
dids: dids.iter().map(|s| s.to_string()).collect(),
123
+
};
124
+
125
+
let response = self
126
+
.http_client
127
+
.post(&url)
128
+
.headers(self.default_headers())
129
+
.json(&body)
130
+
.send()
131
+
.await?;
132
+
133
+
if response.status().is_success() {
134
+
tracing::debug!(count = dids.len(), "Removed repositories from TAP");
135
+
Ok(())
136
+
} else {
137
+
let status = response.status().as_u16();
138
+
let message = response.text().await.unwrap_or_default();
139
+
Err(TapError::HttpResponseError { status, message })
140
+
}
141
+
}
142
+
143
+
/// Check service health.
144
+
///
145
+
/// Sends a GET request to `/health`.
146
+
///
147
+
/// # Returns
148
+
///
149
+
/// `true` if the service is healthy, `false` otherwise.
150
+
pub async fn health(&self) -> Result<bool, TapError> {
151
+
let url = format!("{}/health", self.base_url);
152
+
153
+
let response = self
154
+
.http_client
155
+
.get(&url)
156
+
.headers(self.default_headers())
157
+
.send()
158
+
.await?;
159
+
160
+
Ok(response.status().is_success())
161
+
}
162
+
163
+
/// Resolve a DID to its DID document.
164
+
///
165
+
/// Sends a GET request to `/resolve/:did`.
166
+
///
167
+
/// # Arguments
168
+
///
169
+
/// * `did` - The DID to resolve
170
+
///
171
+
/// # Returns
172
+
///
173
+
/// The DID document for the identity.
174
+
pub async fn resolve(&self, did: &str) -> Result<Document, TapError> {
175
+
let url = format!("{}/resolve/{}", self.base_url, did);
176
+
177
+
let response = self
178
+
.http_client
179
+
.get(&url)
180
+
.headers(self.default_headers())
181
+
.send()
182
+
.await?;
183
+
184
+
if response.status().is_success() {
185
+
let doc: Document = response.json().await?;
186
+
Ok(doc)
187
+
} else {
188
+
let status = response.status().as_u16();
189
+
let message = response.text().await.unwrap_or_default();
190
+
Err(TapError::HttpResponseError { status, message })
191
+
}
192
+
}
193
+
194
+
/// Get info about a tracked repository.
195
+
///
196
+
/// Sends a GET request to `/info/:did`.
197
+
///
198
+
/// # Arguments
199
+
///
200
+
/// * `did` - The DID to get info for
201
+
///
202
+
/// # Returns
203
+
///
204
+
/// Repository tracking information.
205
+
pub async fn info(&self, did: &str) -> Result<RepoInfo, TapError> {
206
+
let url = format!("{}/info/{}", self.base_url, did);
207
+
208
+
let response = self
209
+
.http_client
210
+
.get(&url)
211
+
.headers(self.default_headers())
212
+
.send()
213
+
.await?;
214
+
215
+
if response.status().is_success() {
216
+
let info: RepoInfo = response.json().await?;
217
+
Ok(info)
218
+
} else {
219
+
let status = response.status().as_u16();
220
+
let message = response.text().await.unwrap_or_default();
221
+
Err(TapError::HttpResponseError { status, message })
222
+
}
223
+
}
224
+
}
225
+
226
+
/// Request body for adding/removing repositories.
227
+
#[derive(Debug, Serialize)]
228
+
struct AddReposRequest {
229
+
dids: Vec<String>,
230
+
}
231
+
232
+
/// Repository tracking information.
233
+
#[derive(Debug, Clone, Serialize, Deserialize)]
234
+
pub struct RepoInfo {
235
+
/// The repository DID.
236
+
pub did: Box<str>,
237
+
/// Current sync state.
238
+
pub state: RepoState,
239
+
/// The handle for the repository.
240
+
#[serde(default)]
241
+
pub handle: Option<Box<str>>,
242
+
/// Number of records in the repository.
243
+
#[serde(default)]
244
+
pub records: u64,
245
+
/// Current repository revision.
246
+
#[serde(default)]
247
+
pub rev: Option<Box<str>>,
248
+
/// Number of retries for syncing.
249
+
#[serde(default)]
250
+
pub retries: u32,
251
+
/// Error message if any.
252
+
#[serde(default)]
253
+
pub error: Option<Box<str>>,
254
+
/// Additional fields may be present depending on TAP version.
255
+
#[serde(flatten)]
256
+
pub extra: serde_json::Value,
257
+
}
258
+
259
+
/// Repository sync state.
260
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
261
+
#[serde(rename_all = "lowercase")]
262
+
pub enum RepoState {
263
+
/// Repository is active and synced.
264
+
Active,
265
+
/// Repository is currently syncing.
266
+
Syncing,
267
+
/// Repository is fully synced.
268
+
Synced,
269
+
/// Sync failed for this repository.
270
+
Failed,
271
+
/// Repository is queued for sync.
272
+
Queued,
273
+
/// Unknown state.
274
+
#[serde(other)]
275
+
Unknown,
276
+
}
277
+
278
+
/// Deprecated alias for RepoState.
279
+
#[deprecated(since = "0.13.0", note = "Use RepoState instead")]
280
+
pub type RepoStatus = RepoState;
281
+
282
+
impl std::fmt::Display for RepoState {
283
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
284
+
match self {
285
+
RepoState::Active => write!(f, "active"),
286
+
RepoState::Syncing => write!(f, "syncing"),
287
+
RepoState::Synced => write!(f, "synced"),
288
+
RepoState::Failed => write!(f, "failed"),
289
+
RepoState::Queued => write!(f, "queued"),
290
+
RepoState::Unknown => write!(f, "unknown"),
291
+
}
292
+
}
293
+
}
294
+
295
+
#[cfg(test)]
296
+
mod tests {
297
+
use super::*;
298
+
299
+
#[test]
300
+
fn test_client_creation() {
301
+
let client = TapClient::new("localhost:2480", None);
302
+
assert_eq!(client.base_url, "http://localhost:2480");
303
+
assert!(client.auth_header.is_none());
304
+
305
+
let client = TapClient::new("localhost:2480", Some("secret".to_string()));
306
+
assert!(client.auth_header.is_some());
307
+
}
308
+
309
+
#[test]
310
+
fn test_repo_state_display() {
311
+
assert_eq!(RepoState::Active.to_string(), "active");
312
+
assert_eq!(RepoState::Syncing.to_string(), "syncing");
313
+
assert_eq!(RepoState::Synced.to_string(), "synced");
314
+
assert_eq!(RepoState::Failed.to_string(), "failed");
315
+
assert_eq!(RepoState::Queued.to_string(), "queued");
316
+
assert_eq!(RepoState::Unknown.to_string(), "unknown");
317
+
}
318
+
319
+
#[test]
320
+
fn test_repo_state_deserialize() {
321
+
let json = r#""active""#;
322
+
let state: RepoState = serde_json::from_str(json).unwrap();
323
+
assert_eq!(state, RepoState::Active);
324
+
325
+
let json = r#""syncing""#;
326
+
let state: RepoState = serde_json::from_str(json).unwrap();
327
+
assert_eq!(state, RepoState::Syncing);
328
+
329
+
let json = r#""some_new_state""#;
330
+
let state: RepoState = serde_json::from_str(json).unwrap();
331
+
assert_eq!(state, RepoState::Unknown);
332
+
}
333
+
334
+
#[test]
335
+
fn test_repo_info_deserialize() {
336
+
let json = r#"{"did":"did:plc:cbkjy5n7bk3ax2wplmtjofq2","error":"","handle":"ngerakines.me","records":21382,"retries":0,"rev":"3mam4aazabs2m","state":"active"}"#;
337
+
let info: RepoInfo = serde_json::from_str(json).unwrap();
338
+
assert_eq!(&*info.did, "did:plc:cbkjy5n7bk3ax2wplmtjofq2");
339
+
assert_eq!(info.state, RepoState::Active);
340
+
assert_eq!(info.handle.as_deref(), Some("ngerakines.me"));
341
+
assert_eq!(info.records, 21382);
342
+
assert_eq!(info.retries, 0);
343
+
assert_eq!(info.rev.as_deref(), Some("3mam4aazabs2m"));
344
+
// Empty string deserializes as Some("")
345
+
assert_eq!(info.error.as_deref(), Some(""));
346
+
}
347
+
348
+
#[test]
349
+
fn test_repo_info_deserialize_minimal() {
350
+
// Test with only required fields
351
+
let json = r#"{"did":"did:plc:test","state":"syncing"}"#;
352
+
let info: RepoInfo = serde_json::from_str(json).unwrap();
353
+
assert_eq!(&*info.did, "did:plc:test");
354
+
assert_eq!(info.state, RepoState::Syncing);
355
+
assert_eq!(info.handle, None);
356
+
assert_eq!(info.records, 0);
357
+
assert_eq!(info.retries, 0);
358
+
assert_eq!(info.rev, None);
359
+
assert_eq!(info.error, None);
360
+
}
361
+
362
+
#[test]
363
+
fn test_add_repos_request_serialize() {
364
+
let req = AddReposRequest {
365
+
dids: vec!["did:plc:xyz".to_string(), "did:plc:abc".to_string()],
366
+
};
367
+
let json = serde_json::to_string(&req).unwrap();
368
+
assert!(json.contains("dids"));
369
+
assert!(json.contains("did:plc:xyz"));
370
+
}
371
+
}
+220
crates/atproto-tap/src/config.rs
+220
crates/atproto-tap/src/config.rs
···
1
+
//! Configuration for TAP stream connections.
2
+
//!
3
+
//! This module provides the [`TapConfig`] struct for configuring TAP stream
4
+
//! connections, including hostname, authentication, and reconnection behavior.
5
+
6
+
use std::time::Duration;
7
+
8
+
/// Configuration for a TAP stream connection.
9
+
///
10
+
/// Use [`TapConfig::builder()`] for ergonomic construction with defaults.
11
+
///
12
+
/// # Example
13
+
///
14
+
/// ```
15
+
/// use atproto_tap::TapConfig;
16
+
/// use std::time::Duration;
17
+
///
18
+
/// let config = TapConfig::builder()
19
+
/// .hostname("localhost:2480")
20
+
/// .admin_password("secret")
21
+
/// .send_acks(true)
22
+
/// .max_reconnect_attempts(Some(10))
23
+
/// .build();
24
+
/// ```
25
+
#[derive(Debug, Clone)]
26
+
pub struct TapConfig {
27
+
/// TAP service hostname (e.g., "localhost:2480").
28
+
///
29
+
/// The WebSocket URL is constructed as `ws://{hostname}/channel`.
30
+
pub hostname: String,
31
+
32
+
/// Optional admin password for authentication.
33
+
///
34
+
/// If set, HTTP Basic Auth is used with username "admin".
35
+
pub admin_password: Option<String>,
36
+
37
+
/// Whether to send acknowledgments for received messages.
38
+
///
39
+
/// Default: `true`. Set to `false` if the TAP service has acks disabled.
40
+
pub send_acks: bool,
41
+
42
+
/// User-Agent header value for WebSocket connections.
43
+
pub user_agent: String,
44
+
45
+
/// Maximum reconnection attempts before giving up.
46
+
///
47
+
/// `None` means unlimited reconnection attempts (default).
48
+
pub max_reconnect_attempts: Option<u32>,
49
+
50
+
/// Initial delay before first reconnection attempt.
51
+
///
52
+
/// Default: 1 second.
53
+
pub initial_reconnect_delay: Duration,
54
+
55
+
/// Maximum delay between reconnection attempts.
56
+
///
57
+
/// Default: 60 seconds.
58
+
pub max_reconnect_delay: Duration,
59
+
60
+
/// Multiplier for exponential backoff between reconnections.
61
+
///
62
+
/// Default: 2.0 (doubles the delay each attempt).
63
+
pub reconnect_backoff_multiplier: f64,
64
+
}
65
+
66
+
impl Default for TapConfig {
67
+
fn default() -> Self {
68
+
Self {
69
+
hostname: "localhost:2480".to_string(),
70
+
admin_password: None,
71
+
send_acks: true,
72
+
user_agent: format!("atproto-tap/{}", env!("CARGO_PKG_VERSION")),
73
+
max_reconnect_attempts: None,
74
+
initial_reconnect_delay: Duration::from_secs(1),
75
+
max_reconnect_delay: Duration::from_secs(60),
76
+
reconnect_backoff_multiplier: 2.0,
77
+
}
78
+
}
79
+
}
80
+
81
+
impl TapConfig {
82
+
/// Create a new configuration builder with defaults.
83
+
pub fn builder() -> TapConfigBuilder {
84
+
TapConfigBuilder::default()
85
+
}
86
+
87
+
/// Create a minimal configuration for the given hostname.
88
+
pub fn new(hostname: impl Into<String>) -> Self {
89
+
Self {
90
+
hostname: hostname.into(),
91
+
..Default::default()
92
+
}
93
+
}
94
+
95
+
/// Returns the WebSocket URL for the TAP channel.
96
+
pub fn ws_url(&self) -> String {
97
+
format!("ws://{}/channel", self.hostname)
98
+
}
99
+
100
+
/// Returns the HTTP base URL for the TAP management API.
101
+
pub fn http_base_url(&self) -> String {
102
+
format!("http://{}", self.hostname)
103
+
}
104
+
}
105
+
106
+
/// Builder for [`TapConfig`].
107
+
#[derive(Debug, Clone, Default)]
108
+
pub struct TapConfigBuilder {
109
+
config: TapConfig,
110
+
}
111
+
112
+
impl TapConfigBuilder {
113
+
/// Set the TAP service hostname.
114
+
pub fn hostname(mut self, hostname: impl Into<String>) -> Self {
115
+
self.config.hostname = hostname.into();
116
+
self
117
+
}
118
+
119
+
/// Set the admin password for authentication.
120
+
pub fn admin_password(mut self, password: impl Into<String>) -> Self {
121
+
self.config.admin_password = Some(password.into());
122
+
self
123
+
}
124
+
125
+
/// Set whether to send acknowledgments.
126
+
pub fn send_acks(mut self, send_acks: bool) -> Self {
127
+
self.config.send_acks = send_acks;
128
+
self
129
+
}
130
+
131
+
/// Set the User-Agent header value.
132
+
pub fn user_agent(mut self, user_agent: impl Into<String>) -> Self {
133
+
self.config.user_agent = user_agent.into();
134
+
self
135
+
}
136
+
137
+
/// Set the maximum reconnection attempts.
138
+
///
139
+
/// `None` means unlimited attempts.
140
+
pub fn max_reconnect_attempts(mut self, max: Option<u32>) -> Self {
141
+
self.config.max_reconnect_attempts = max;
142
+
self
143
+
}
144
+
145
+
/// Set the initial reconnection delay.
146
+
pub fn initial_reconnect_delay(mut self, delay: Duration) -> Self {
147
+
self.config.initial_reconnect_delay = delay;
148
+
self
149
+
}
150
+
151
+
/// Set the maximum reconnection delay.
152
+
pub fn max_reconnect_delay(mut self, delay: Duration) -> Self {
153
+
self.config.max_reconnect_delay = delay;
154
+
self
155
+
}
156
+
157
+
/// Set the reconnection backoff multiplier.
158
+
pub fn reconnect_backoff_multiplier(mut self, multiplier: f64) -> Self {
159
+
self.config.reconnect_backoff_multiplier = multiplier;
160
+
self
161
+
}
162
+
163
+
/// Build the configuration.
164
+
pub fn build(self) -> TapConfig {
165
+
self.config
166
+
}
167
+
}
168
+
169
+
#[cfg(test)]
170
+
mod tests {
171
+
use super::*;
172
+
173
+
#[test]
174
+
fn test_default_config() {
175
+
let config = TapConfig::default();
176
+
assert_eq!(config.hostname, "localhost:2480");
177
+
assert!(config.admin_password.is_none());
178
+
assert!(config.send_acks);
179
+
assert!(config.max_reconnect_attempts.is_none());
180
+
assert_eq!(config.initial_reconnect_delay, Duration::from_secs(1));
181
+
assert_eq!(config.max_reconnect_delay, Duration::from_secs(60));
182
+
assert!((config.reconnect_backoff_multiplier - 2.0).abs() < f64::EPSILON);
183
+
}
184
+
185
+
#[test]
186
+
fn test_builder() {
187
+
let config = TapConfig::builder()
188
+
.hostname("tap.example.com:2480")
189
+
.admin_password("secret123")
190
+
.send_acks(false)
191
+
.max_reconnect_attempts(Some(5))
192
+
.initial_reconnect_delay(Duration::from_millis(500))
193
+
.max_reconnect_delay(Duration::from_secs(30))
194
+
.reconnect_backoff_multiplier(1.5)
195
+
.build();
196
+
197
+
assert_eq!(config.hostname, "tap.example.com:2480");
198
+
assert_eq!(config.admin_password, Some("secret123".to_string()));
199
+
assert!(!config.send_acks);
200
+
assert_eq!(config.max_reconnect_attempts, Some(5));
201
+
assert_eq!(config.initial_reconnect_delay, Duration::from_millis(500));
202
+
assert_eq!(config.max_reconnect_delay, Duration::from_secs(30));
203
+
assert!((config.reconnect_backoff_multiplier - 1.5).abs() < f64::EPSILON);
204
+
}
205
+
206
+
#[test]
207
+
fn test_ws_url() {
208
+
let config = TapConfig::new("localhost:2480");
209
+
assert_eq!(config.ws_url(), "ws://localhost:2480/channel");
210
+
211
+
let config = TapConfig::new("tap.example.com:8080");
212
+
assert_eq!(config.ws_url(), "ws://tap.example.com:8080/channel");
213
+
}
214
+
215
+
#[test]
216
+
fn test_http_base_url() {
217
+
let config = TapConfig::new("localhost:2480");
218
+
assert_eq!(config.http_base_url(), "http://localhost:2480");
219
+
}
220
+
}
+168
crates/atproto-tap/src/connection.rs
+168
crates/atproto-tap/src/connection.rs
···
1
+
//! WebSocket connection management for TAP streams.
2
+
//!
3
+
//! This module handles the low-level WebSocket connection to a TAP service,
4
+
//! including authentication and message sending/receiving.
5
+
6
+
use crate::config::TapConfig;
7
+
use crate::errors::TapError;
8
+
use base64::Engine;
9
+
use base64::engine::general_purpose::STANDARD as BASE64;
10
+
use futures::{SinkExt, StreamExt};
11
+
use http::Uri;
12
+
use std::str::FromStr;
13
+
use tokio_websockets::{ClientBuilder, Message, WebSocketStream};
14
+
use tokio_websockets::MaybeTlsStream;
15
+
use tokio::net::TcpStream;
16
+
17
+
/// WebSocket connection to a TAP service.
18
+
pub(crate) struct TapConnection {
19
+
/// The underlying WebSocket stream.
20
+
ws: WebSocketStream<MaybeTlsStream<TcpStream>>,
21
+
/// Pre-allocated buffer for acknowledgment messages.
22
+
ack_buffer: Vec<u8>,
23
+
}
24
+
25
+
impl TapConnection {
26
+
/// Establish a new WebSocket connection to the TAP service.
27
+
pub async fn connect(config: &TapConfig) -> Result<Self, TapError> {
28
+
let uri = Uri::from_str(&config.ws_url())
29
+
.map_err(|e| TapError::InvalidUrl(e.to_string()))?;
30
+
31
+
let mut builder = ClientBuilder::from_uri(uri);
32
+
33
+
// Add User-Agent header
34
+
builder = builder
35
+
.add_header(
36
+
http::header::USER_AGENT,
37
+
http::HeaderValue::from_str(&config.user_agent)
38
+
.map_err(|e| TapError::ConnectionFailed(format!("Invalid user agent: {}", e)))?,
39
+
)
40
+
.map_err(|e| TapError::ConnectionFailed(format!("Failed to add header: {}", e)))?;
41
+
42
+
// Add Basic Auth header if password is configured
43
+
if let Some(password) = &config.admin_password {
44
+
let credentials = format!("admin:{}", password);
45
+
let encoded = BASE64.encode(credentials.as_bytes());
46
+
let auth_value = format!("Basic {}", encoded);
47
+
48
+
builder = builder
49
+
.add_header(
50
+
http::header::AUTHORIZATION,
51
+
http::HeaderValue::from_str(&auth_value)
52
+
.map_err(|e| TapError::ConnectionFailed(format!("Invalid auth header: {}", e)))?,
53
+
)
54
+
.map_err(|e| TapError::ConnectionFailed(format!("Failed to add auth header: {}", e)))?;
55
+
}
56
+
57
+
// Connect
58
+
let (ws, _response) = builder
59
+
.connect()
60
+
.await
61
+
.map_err(|e| TapError::ConnectionFailed(e.to_string()))?;
62
+
63
+
tracing::debug!(hostname = %config.hostname, "Connected to TAP service");
64
+
65
+
Ok(Self {
66
+
ws,
67
+
ack_buffer: Vec::with_capacity(48), // {"type":"ack","id":18446744073709551615} is 40 bytes max
68
+
})
69
+
}
70
+
71
+
/// Receive the next message from the WebSocket.
72
+
///
73
+
/// Returns `None` if the connection was closed cleanly.
74
+
pub async fn recv(&mut self) -> Result<Option<String>, TapError> {
75
+
match self.ws.next().await {
76
+
Some(Ok(msg)) => {
77
+
if msg.is_text() {
78
+
msg.as_text()
79
+
.map(|s| Some(s.to_string()))
80
+
.ok_or_else(|| TapError::ParseError("Failed to get text from message".into()))
81
+
} else if msg.is_close() {
82
+
tracing::debug!("Received close frame from TAP service");
83
+
Ok(None)
84
+
} else {
85
+
// Ignore ping/pong and binary messages
86
+
tracing::trace!("Received non-text message, ignoring");
87
+
// Recurse to get the next text message
88
+
Box::pin(self.recv()).await
89
+
}
90
+
}
91
+
Some(Err(e)) => Err(TapError::ConnectionFailed(e.to_string())),
92
+
None => {
93
+
tracing::debug!("WebSocket stream ended");
94
+
Ok(None)
95
+
}
96
+
}
97
+
}
98
+
99
+
/// Send an acknowledgment for the given event ID.
100
+
///
101
+
/// Uses a pre-allocated buffer and itoa for allocation-free formatting.
102
+
/// Format: `{"type":"ack","id":12345}`
103
+
pub async fn send_ack(&mut self, id: u64) -> Result<(), TapError> {
104
+
self.ack_buffer.clear();
105
+
self.ack_buffer.extend_from_slice(b"{\"type\":\"ack\",\"id\":");
106
+
let mut itoa_buf = itoa::Buffer::new();
107
+
self.ack_buffer.extend_from_slice(itoa_buf.format(id).as_bytes());
108
+
self.ack_buffer.push(b'}');
109
+
110
+
// All bytes are ASCII so this is always valid UTF-8
111
+
let msg = std::str::from_utf8(&self.ack_buffer)
112
+
.expect("ack buffer contains only ASCII");
113
+
114
+
self.ws
115
+
.send(Message::text(msg.to_string()))
116
+
.await
117
+
.map_err(|e| TapError::AckFailed(e.to_string()))?;
118
+
119
+
// Flush to ensure the ack is sent immediately
120
+
self.ws
121
+
.flush()
122
+
.await
123
+
.map_err(|e| TapError::AckFailed(format!("Failed to flush ack: {}", e)))?;
124
+
125
+
tracing::trace!(id, "Sent ack");
126
+
Ok(())
127
+
}
128
+
129
+
/// Close the WebSocket connection gracefully.
130
+
pub async fn close(&mut self) -> Result<(), TapError> {
131
+
self.ws
132
+
.close()
133
+
.await
134
+
.map_err(|e| TapError::ConnectionFailed(format!("Failed to close: {}", e)))?;
135
+
Ok(())
136
+
}
137
+
}
138
+
139
+
#[cfg(test)]
140
+
mod tests {
141
+
#[test]
142
+
fn test_ack_buffer_format() {
143
+
// Test that our manual JSON formatting is correct
144
+
// Format: {"type":"ack","id":12345}
145
+
let mut buffer = Vec::with_capacity(64);
146
+
147
+
let id: u64 = 12345;
148
+
buffer.clear();
149
+
buffer.extend_from_slice(b"{\"type\":\"ack\",\"id\":");
150
+
let mut itoa_buf = itoa::Buffer::new();
151
+
buffer.extend_from_slice(itoa_buf.format(id).as_bytes());
152
+
buffer.push(b'}');
153
+
154
+
let result = std::str::from_utf8(&buffer).unwrap();
155
+
assert_eq!(result, r#"{"type":"ack","id":12345}"#);
156
+
157
+
// Test max u64
158
+
let id: u64 = u64::MAX;
159
+
buffer.clear();
160
+
buffer.extend_from_slice(b"{\"type\":\"ack\",\"id\":");
161
+
buffer.extend_from_slice(itoa_buf.format(id).as_bytes());
162
+
buffer.push(b'}');
163
+
164
+
let result = std::str::from_utf8(&buffer).unwrap();
165
+
assert_eq!(result, r#"{"type":"ack","id":18446744073709551615}"#);
166
+
assert!(buffer.len() <= 64); // Fits in our pre-allocated buffer
167
+
}
168
+
}
+143
crates/atproto-tap/src/errors.rs
+143
crates/atproto-tap/src/errors.rs
···
1
+
//! Error types for TAP operations.
2
+
//!
3
+
//! This module defines the error types returned by TAP stream and client operations.
4
+
5
+
use thiserror::Error;
6
+
7
+
/// Errors that can occur during TAP operations.
8
+
#[derive(Debug, Error)]
9
+
pub enum TapError {
10
+
/// WebSocket connection failed.
11
+
#[error("error-atproto-tap-connection-1 WebSocket connection failed: {0}")]
12
+
ConnectionFailed(String),
13
+
14
+
/// Connection was closed unexpectedly.
15
+
#[error("error-atproto-tap-connection-2 Connection closed unexpectedly")]
16
+
ConnectionClosed,
17
+
18
+
/// Maximum reconnection attempts exceeded.
19
+
#[error("error-atproto-tap-connection-3 Maximum reconnection attempts exceeded after {0} attempts")]
20
+
MaxReconnectAttemptsExceeded(u32),
21
+
22
+
/// Authentication failed.
23
+
#[error("error-atproto-tap-auth-1 Authentication failed: {0}")]
24
+
AuthenticationFailed(String),
25
+
26
+
/// Failed to parse a message from the server.
27
+
#[error("error-atproto-tap-parse-1 Failed to parse message: {0}")]
28
+
ParseError(String),
29
+
30
+
/// Failed to send an acknowledgment.
31
+
#[error("error-atproto-tap-ack-1 Failed to send acknowledgment: {0}")]
32
+
AckFailed(String),
33
+
34
+
/// HTTP request failed.
35
+
#[error("error-atproto-tap-http-1 HTTP request failed: {0}")]
36
+
HttpError(String),
37
+
38
+
/// HTTP response indicated an error.
39
+
#[error("error-atproto-tap-http-2 HTTP error response: {status} - {message}")]
40
+
HttpResponseError {
41
+
/// HTTP status code.
42
+
status: u16,
43
+
/// Error message from response.
44
+
message: String,
45
+
},
46
+
47
+
/// Invalid URL.
48
+
#[error("error-atproto-tap-url-1 Invalid URL: {0}")]
49
+
InvalidUrl(String),
50
+
51
+
/// I/O error.
52
+
#[error("error-atproto-tap-io-1 I/O error: {0}")]
53
+
IoError(#[from] std::io::Error),
54
+
55
+
/// JSON serialization/deserialization error.
56
+
#[error("error-atproto-tap-json-1 JSON error: {0}")]
57
+
JsonError(#[from] serde_json::Error),
58
+
59
+
/// Stream has been closed and cannot be used.
60
+
#[error("error-atproto-tap-stream-1 Stream is closed")]
61
+
StreamClosed,
62
+
63
+
/// Operation timed out.
64
+
#[error("error-atproto-tap-timeout-1 Operation timed out")]
65
+
Timeout,
66
+
}
67
+
68
+
impl TapError {
69
+
/// Returns true if this error indicates a connection issue that may be recoverable.
70
+
pub fn is_connection_error(&self) -> bool {
71
+
matches!(
72
+
self,
73
+
TapError::ConnectionFailed(_)
74
+
| TapError::ConnectionClosed
75
+
| TapError::IoError(_)
76
+
| TapError::Timeout
77
+
)
78
+
}
79
+
80
+
/// Returns true if this error is a parse error that doesn't affect connection state.
81
+
pub fn is_parse_error(&self) -> bool {
82
+
matches!(self, TapError::ParseError(_) | TapError::JsonError(_))
83
+
}
84
+
85
+
/// Returns true if this error is fatal and the stream should not attempt recovery.
86
+
pub fn is_fatal(&self) -> bool {
87
+
matches!(
88
+
self,
89
+
TapError::MaxReconnectAttemptsExceeded(_)
90
+
| TapError::AuthenticationFailed(_)
91
+
| TapError::StreamClosed
92
+
)
93
+
}
94
+
}
95
+
96
+
impl From<reqwest::Error> for TapError {
97
+
fn from(err: reqwest::Error) -> Self {
98
+
if err.is_timeout() {
99
+
TapError::Timeout
100
+
} else if err.is_connect() {
101
+
TapError::ConnectionFailed(err.to_string())
102
+
} else {
103
+
TapError::HttpError(err.to_string())
104
+
}
105
+
}
106
+
}
107
+
108
+
#[cfg(test)]
109
+
mod tests {
110
+
use super::*;
111
+
112
+
#[test]
113
+
fn test_error_classification() {
114
+
assert!(TapError::ConnectionFailed("test".into()).is_connection_error());
115
+
assert!(TapError::ConnectionClosed.is_connection_error());
116
+
assert!(TapError::Timeout.is_connection_error());
117
+
118
+
assert!(TapError::ParseError("test".into()).is_parse_error());
119
+
assert!(TapError::JsonError(serde_json::from_str::<()>("invalid").unwrap_err()).is_parse_error());
120
+
121
+
assert!(TapError::MaxReconnectAttemptsExceeded(5).is_fatal());
122
+
assert!(TapError::AuthenticationFailed("test".into()).is_fatal());
123
+
assert!(TapError::StreamClosed.is_fatal());
124
+
125
+
// Non-fatal errors
126
+
assert!(!TapError::ConnectionFailed("test".into()).is_fatal());
127
+
assert!(!TapError::ParseError("test".into()).is_fatal());
128
+
}
129
+
130
+
#[test]
131
+
fn test_error_display() {
132
+
let err = TapError::ConnectionFailed("refused".to_string());
133
+
assert!(err.to_string().contains("error-atproto-tap-connection-1"));
134
+
assert!(err.to_string().contains("refused"));
135
+
136
+
let err = TapError::HttpResponseError {
137
+
status: 404,
138
+
message: "Not Found".to_string(),
139
+
};
140
+
assert!(err.to_string().contains("404"));
141
+
assert!(err.to_string().contains("Not Found"));
142
+
}
143
+
}
+488
crates/atproto-tap/src/events.rs
+488
crates/atproto-tap/src/events.rs
···
1
+
//! TAP event types for AT Protocol record and identity events.
2
+
//!
3
+
//! This module defines the event structures received from a TAP service.
4
+
//! Events are optimized for memory efficiency using:
5
+
//! - `CompactString` for small strings (SSO for โค24 bytes)
6
+
//! - `Box<str>` for immutable strings (no capacity overhead)
7
+
//! - `serde_json::Value` for record payloads (allows lazy access)
8
+
9
+
use compact_str::CompactString;
10
+
use serde::de::{self, Deserializer, IgnoredAny, MapAccess, Visitor};
11
+
use serde::{Deserialize, Serialize, de::DeserializeOwned};
12
+
use std::fmt;
13
+
14
+
/// A TAP event received from the stream.
15
+
///
16
+
/// TAP delivers two types of events:
17
+
/// - `Record`: Repository record changes (create, update, delete)
18
+
/// - `Identity`: Identity/handle changes for accounts
19
+
#[derive(Debug, Clone, Serialize, Deserialize)]
20
+
#[serde(tag = "type", rename_all = "lowercase")]
21
+
pub enum TapEvent {
22
+
/// A repository record event (create, update, or delete).
23
+
Record {
24
+
/// Sequential event identifier.
25
+
id: u64,
26
+
/// The record event data.
27
+
record: RecordEvent,
28
+
},
29
+
/// An identity change event.
30
+
Identity {
31
+
/// Sequential event identifier.
32
+
id: u64,
33
+
/// The identity event data.
34
+
identity: IdentityEvent,
35
+
},
36
+
}
37
+
38
+
impl TapEvent {
39
+
/// Returns the event ID.
40
+
pub fn id(&self) -> u64 {
41
+
match self {
42
+
TapEvent::Record { id, .. } => *id,
43
+
TapEvent::Identity { id, .. } => *id,
44
+
}
45
+
}
46
+
}
47
+
48
+
/// Extract only the event ID from a JSON string without fully parsing it.
49
+
///
50
+
/// This is a fallback parser used when full `TapEvent` parsing fails (e.g., due to
51
+
/// deeply nested records hitting serde_json's recursion limit). It uses `IgnoredAny`
52
+
/// to efficiently skip over nested content without building data structures, allowing
53
+
/// us to extract the ID for acknowledgment even when full parsing fails.
54
+
///
55
+
/// # Example
56
+
///
57
+
/// ```
58
+
/// use atproto_tap::extract_event_id;
59
+
///
60
+
/// let json = r#"{"type":"record","id":12345,"record":{"deeply":"nested"}}"#;
61
+
/// assert_eq!(extract_event_id(json), Some(12345));
62
+
/// ```
63
+
pub fn extract_event_id(json: &str) -> Option<u64> {
64
+
let mut deserializer = serde_json::Deserializer::from_str(json);
65
+
deserializer.disable_recursion_limit();
66
+
EventIdOnly::deserialize(&mut deserializer).ok().map(|e| e.id)
67
+
}
68
+
69
+
/// Internal struct for extracting only the "id" field from a TAP event.
70
+
#[derive(Debug)]
71
+
struct EventIdOnly {
72
+
id: u64,
73
+
}
74
+
75
+
impl<'de> Deserialize<'de> for EventIdOnly {
76
+
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
77
+
where
78
+
D: Deserializer<'de>,
79
+
{
80
+
deserializer.deserialize_map(EventIdOnlyVisitor)
81
+
}
82
+
}
83
+
84
+
struct EventIdOnlyVisitor;
85
+
86
+
impl<'de> Visitor<'de> for EventIdOnlyVisitor {
87
+
type Value = EventIdOnly;
88
+
89
+
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
90
+
formatter.write_str("a map with an 'id' field")
91
+
}
92
+
93
+
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
94
+
where
95
+
M: MapAccess<'de>,
96
+
{
97
+
let mut id: Option<u64> = None;
98
+
99
+
while let Some(key) = map.next_key::<&str>()? {
100
+
if key == "id" {
101
+
id = Some(map.next_value()?);
102
+
// Found what we need - skip the rest efficiently using IgnoredAny
103
+
// which handles deeply nested structures without recursion issues
104
+
while map.next_entry::<IgnoredAny, IgnoredAny>()?.is_some() {}
105
+
break;
106
+
} else {
107
+
// Skip this value without fully parsing it
108
+
map.next_value::<IgnoredAny>()?;
109
+
}
110
+
}
111
+
112
+
id.map(|id| EventIdOnly { id })
113
+
.ok_or_else(|| de::Error::missing_field("id"))
114
+
}
115
+
}
116
+
117
+
/// A repository record event from TAP.
118
+
///
119
+
/// Contains information about a record change in a user's repository,
120
+
/// including the action taken and the record data (for creates/updates).
121
+
#[derive(Debug, Clone, Serialize, Deserialize)]
122
+
pub struct RecordEvent {
123
+
/// True if from live firehose, false if from backfill/resync.
124
+
///
125
+
/// During initial sync or recovery, TAP delivers historical events
126
+
/// with `live: false`. Once caught up, live events have `live: true`.
127
+
pub live: bool,
128
+
129
+
/// Repository revision identifier.
130
+
///
131
+
/// Typically 13 characters, stored inline via CompactString SSO.
132
+
pub rev: CompactString,
133
+
134
+
/// Actor DID (e.g., "did:plc:xyz123").
135
+
pub did: Box<str>,
136
+
137
+
/// Collection NSID (e.g., "app.bsky.feed.post").
138
+
pub collection: Box<str>,
139
+
140
+
/// Record key within the collection.
141
+
///
142
+
/// Typically a TID (13 characters), stored inline via CompactString SSO.
143
+
pub rkey: CompactString,
144
+
145
+
/// The action performed on the record.
146
+
pub action: RecordAction,
147
+
148
+
/// Content identifier (CID) of the record.
149
+
///
150
+
/// Present for create and update actions, absent for delete.
151
+
#[serde(skip_serializing_if = "Option::is_none")]
152
+
pub cid: Option<CompactString>,
153
+
154
+
/// Record data as JSON value.
155
+
///
156
+
/// Present for create and update actions, absent for delete.
157
+
/// Use [`parse_record`](Self::parse_record) to deserialize on demand.
158
+
#[serde(skip_serializing_if = "Option::is_none")]
159
+
pub record: Option<serde_json::Value>,
160
+
}
161
+
162
+
impl RecordEvent {
163
+
/// Parse the record payload into a typed structure.
164
+
///
165
+
/// This method deserializes the raw JSON on demand, avoiding
166
+
/// unnecessary allocations when the record data isn't needed.
167
+
///
168
+
/// # Errors
169
+
///
170
+
/// Returns an error if the record is absent (delete events) or
171
+
/// if deserialization fails.
172
+
///
173
+
/// # Example
174
+
///
175
+
/// ```ignore
176
+
/// use serde::Deserialize;
177
+
///
178
+
/// #[derive(Deserialize)]
179
+
/// struct Post {
180
+
/// text: String,
181
+
/// #[serde(rename = "createdAt")]
182
+
/// created_at: String,
183
+
/// }
184
+
///
185
+
/// let post: Post = record_event.parse_record()?;
186
+
/// println!("Post text: {}", post.text);
187
+
/// ```
188
+
pub fn parse_record<T: DeserializeOwned>(&self) -> Result<T, serde_json::Error> {
189
+
match &self.record {
190
+
Some(value) => serde_json::from_value(value.clone()),
191
+
None => Err(serde::de::Error::custom("no record data (delete event)")),
192
+
}
193
+
}
194
+
195
+
/// Returns the record as a JSON Value reference, if present.
196
+
pub fn record_value(&self) -> Option<&serde_json::Value> {
197
+
self.record.as_ref()
198
+
}
199
+
200
+
/// Returns true if this is a delete event.
201
+
pub fn is_delete(&self) -> bool {
202
+
self.action == RecordAction::Delete
203
+
}
204
+
205
+
/// Returns the AT-URI for this record.
206
+
///
207
+
/// Format: `at://{did}/{collection}/{rkey}`
208
+
pub fn at_uri(&self) -> String {
209
+
format!("at://{}/{}/{}", self.did, self.collection, self.rkey)
210
+
}
211
+
}
212
+
213
+
/// The action performed on a record.
214
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
215
+
#[serde(rename_all = "lowercase")]
216
+
pub enum RecordAction {
217
+
/// A new record was created.
218
+
Create,
219
+
/// An existing record was updated.
220
+
Update,
221
+
/// A record was deleted.
222
+
Delete,
223
+
}
224
+
225
+
impl std::fmt::Display for RecordAction {
226
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
227
+
match self {
228
+
RecordAction::Create => write!(f, "create"),
229
+
RecordAction::Update => write!(f, "update"),
230
+
RecordAction::Delete => write!(f, "delete"),
231
+
}
232
+
}
233
+
}
234
+
235
+
/// An identity change event from TAP.
236
+
///
237
+
/// Contains information about handle or account status changes.
238
+
#[derive(Debug, Clone, Serialize, Deserialize)]
239
+
pub struct IdentityEvent {
240
+
/// Actor DID.
241
+
pub did: Box<str>,
242
+
243
+
/// Current handle for the account.
244
+
pub handle: Box<str>,
245
+
246
+
/// Whether the account is currently active.
247
+
#[serde(default)]
248
+
pub is_active: bool,
249
+
250
+
/// Account status.
251
+
#[serde(default)]
252
+
pub status: IdentityStatus,
253
+
}
254
+
255
+
/// Account status in an identity event.
256
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, Serialize, Deserialize)]
257
+
#[serde(rename_all = "lowercase")]
258
+
pub enum IdentityStatus {
259
+
/// Account is active and in good standing.
260
+
#[default]
261
+
Active,
262
+
/// Account has been deactivated by the user.
263
+
Deactivated,
264
+
/// Account has been suspended.
265
+
Suspended,
266
+
/// Account has been deleted.
267
+
Deleted,
268
+
/// Account has been taken down.
269
+
Takendown,
270
+
}
271
+
272
+
impl std::fmt::Display for IdentityStatus {
273
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
274
+
match self {
275
+
IdentityStatus::Active => write!(f, "active"),
276
+
IdentityStatus::Deactivated => write!(f, "deactivated"),
277
+
IdentityStatus::Suspended => write!(f, "suspended"),
278
+
IdentityStatus::Deleted => write!(f, "deleted"),
279
+
IdentityStatus::Takendown => write!(f, "takendown"),
280
+
}
281
+
}
282
+
}
283
+
284
+
#[cfg(test)]
285
+
mod tests {
286
+
use super::*;
287
+
288
+
#[test]
289
+
fn test_parse_record_event() {
290
+
let json = r#"{
291
+
"id": 12345,
292
+
"type": "record",
293
+
"record": {
294
+
"live": true,
295
+
"rev": "3lyileto4q52k",
296
+
"did": "did:plc:z72i7hdynmk6r22z27h6tvur",
297
+
"collection": "app.bsky.feed.post",
298
+
"rkey": "3lyiletddxt2c",
299
+
"action": "create",
300
+
"cid": "bafyreigroo6vhxt62ufcndhaxzas6btq4jmniuz4egszbwuqgiyisqwqoy",
301
+
"record": {"$type": "app.bsky.feed.post", "text": "Hello world!", "createdAt": "2025-01-01T00:00:00Z"}
302
+
}
303
+
}"#;
304
+
305
+
let event: TapEvent = serde_json::from_str(json).expect("Failed to parse");
306
+
307
+
match event {
308
+
TapEvent::Record { id, record } => {
309
+
assert_eq!(id, 12345);
310
+
assert!(record.live);
311
+
assert_eq!(record.rev.as_str(), "3lyileto4q52k");
312
+
assert_eq!(&*record.did, "did:plc:z72i7hdynmk6r22z27h6tvur");
313
+
assert_eq!(&*record.collection, "app.bsky.feed.post");
314
+
assert_eq!(record.rkey.as_str(), "3lyiletddxt2c");
315
+
assert_eq!(record.action, RecordAction::Create);
316
+
assert!(record.cid.is_some());
317
+
assert!(record.record.is_some());
318
+
319
+
// Test lazy parsing
320
+
#[derive(Deserialize)]
321
+
struct Post {
322
+
text: String,
323
+
}
324
+
let post: Post = record.parse_record().expect("Failed to parse record");
325
+
assert_eq!(post.text, "Hello world!");
326
+
}
327
+
_ => panic!("Expected Record event"),
328
+
}
329
+
}
330
+
331
+
#[test]
332
+
fn test_parse_delete_event() {
333
+
let json = r#"{
334
+
"id": 12346,
335
+
"type": "record",
336
+
"record": {
337
+
"live": true,
338
+
"rev": "3lyileto4q52k",
339
+
"did": "did:plc:z72i7hdynmk6r22z27h6tvur",
340
+
"collection": "app.bsky.feed.post",
341
+
"rkey": "3lyiletddxt2c",
342
+
"action": "delete"
343
+
}
344
+
}"#;
345
+
346
+
let event: TapEvent = serde_json::from_str(json).expect("Failed to parse");
347
+
348
+
match event {
349
+
TapEvent::Record { id, record } => {
350
+
assert_eq!(id, 12346);
351
+
assert_eq!(record.action, RecordAction::Delete);
352
+
assert!(record.is_delete());
353
+
assert!(record.cid.is_none());
354
+
assert!(record.record.is_none());
355
+
}
356
+
_ => panic!("Expected Record event"),
357
+
}
358
+
}
359
+
360
+
#[test]
361
+
fn test_parse_identity_event() {
362
+
let json = r#"{
363
+
"id": 12347,
364
+
"type": "identity",
365
+
"identity": {
366
+
"did": "did:plc:z72i7hdynmk6r22z27h6tvur",
367
+
"handle": "user.bsky.social",
368
+
"is_active": true,
369
+
"status": "active"
370
+
}
371
+
}"#;
372
+
373
+
let event: TapEvent = serde_json::from_str(json).expect("Failed to parse");
374
+
375
+
match event {
376
+
TapEvent::Identity { id, identity } => {
377
+
assert_eq!(id, 12347);
378
+
assert_eq!(&*identity.did, "did:plc:z72i7hdynmk6r22z27h6tvur");
379
+
assert_eq!(&*identity.handle, "user.bsky.social");
380
+
assert!(identity.is_active);
381
+
assert_eq!(identity.status, IdentityStatus::Active);
382
+
}
383
+
_ => panic!("Expected Identity event"),
384
+
}
385
+
}
386
+
387
+
#[test]
388
+
fn test_record_action_display() {
389
+
assert_eq!(RecordAction::Create.to_string(), "create");
390
+
assert_eq!(RecordAction::Update.to_string(), "update");
391
+
assert_eq!(RecordAction::Delete.to_string(), "delete");
392
+
}
393
+
394
+
#[test]
395
+
fn test_identity_status_display() {
396
+
assert_eq!(IdentityStatus::Active.to_string(), "active");
397
+
assert_eq!(IdentityStatus::Deactivated.to_string(), "deactivated");
398
+
assert_eq!(IdentityStatus::Suspended.to_string(), "suspended");
399
+
assert_eq!(IdentityStatus::Deleted.to_string(), "deleted");
400
+
assert_eq!(IdentityStatus::Takendown.to_string(), "takendown");
401
+
}
402
+
403
+
#[test]
404
+
fn test_at_uri() {
405
+
let record = RecordEvent {
406
+
live: true,
407
+
rev: "3lyileto4q52k".into(),
408
+
did: "did:plc:xyz".into(),
409
+
collection: "app.bsky.feed.post".into(),
410
+
rkey: "abc123".into(),
411
+
action: RecordAction::Create,
412
+
cid: None,
413
+
record: None,
414
+
};
415
+
416
+
assert_eq!(record.at_uri(), "at://did:plc:xyz/app.bsky.feed.post/abc123");
417
+
}
418
+
419
+
#[test]
420
+
fn test_event_id() {
421
+
let record_event = TapEvent::Record {
422
+
id: 100,
423
+
record: RecordEvent {
424
+
live: true,
425
+
rev: "rev".into(),
426
+
did: "did".into(),
427
+
collection: "col".into(),
428
+
rkey: "rkey".into(),
429
+
action: RecordAction::Create,
430
+
cid: None,
431
+
record: None,
432
+
},
433
+
};
434
+
assert_eq!(record_event.id(), 100);
435
+
436
+
let identity_event = TapEvent::Identity {
437
+
id: 200,
438
+
identity: IdentityEvent {
439
+
did: "did".into(),
440
+
handle: "handle".into(),
441
+
is_active: true,
442
+
status: IdentityStatus::Active,
443
+
},
444
+
};
445
+
assert_eq!(identity_event.id(), 200);
446
+
}
447
+
448
+
#[test]
449
+
fn test_extract_event_id_simple() {
450
+
let json = r#"{"type":"record","id":12345,"record":{"deeply":"nested"}}"#;
451
+
assert_eq!(extract_event_id(json), Some(12345));
452
+
}
453
+
454
+
#[test]
455
+
fn test_extract_event_id_at_end() {
456
+
let json = r#"{"type":"record","record":{"deeply":"nested"},"id":99999}"#;
457
+
assert_eq!(extract_event_id(json), Some(99999));
458
+
}
459
+
460
+
#[test]
461
+
fn test_extract_event_id_missing() {
462
+
let json = r#"{"type":"record","record":{"deeply":"nested"}}"#;
463
+
assert_eq!(extract_event_id(json), None);
464
+
}
465
+
466
+
#[test]
467
+
fn test_extract_event_id_invalid_json() {
468
+
let json = r#"{"type":"record","id":123"#; // Truncated JSON
469
+
assert_eq!(extract_event_id(json), None);
470
+
}
471
+
472
+
#[test]
473
+
fn test_extract_event_id_deeply_nested() {
474
+
// Create a deeply nested JSON that would exceed serde_json's default recursion limit
475
+
let mut json = String::from(r#"{"id":42,"record":{"nested":"#);
476
+
for _ in 0..200 {
477
+
json.push_str("[");
478
+
}
479
+
json.push_str("1");
480
+
for _ in 0..200 {
481
+
json.push_str("]");
482
+
}
483
+
json.push_str("}}");
484
+
485
+
// extract_event_id should still work because it uses IgnoredAny with disabled recursion limit
486
+
assert_eq!(extract_event_id(&json), Some(42));
487
+
}
488
+
}
+119
crates/atproto-tap/src/lib.rs
+119
crates/atproto-tap/src/lib.rs
···
1
+
//! TAP (Trusted Attestation Protocol) service consumer for AT Protocol.
2
+
//!
3
+
//! This crate provides a client for consuming events from a TAP service,
4
+
//! which delivers filtered, verified AT Protocol repository events.
5
+
//!
6
+
//! # Overview
7
+
//!
8
+
//! TAP is a single-tenant service that subscribes to an AT Protocol Relay and
9
+
//! outputs filtered, verified events. Key features include:
10
+
//!
11
+
//! - **Verified Events**: MST integrity checks and signature verification
12
+
//! - **Automatic Backfill**: Historical events delivered with `live: false`
13
+
//! - **Repository Filtering**: Track specific DIDs or collections
14
+
//! - **Acknowledgment Protocol**: At-least-once delivery semantics
15
+
//!
16
+
//! # Quick Start
17
+
//!
18
+
//! ```ignore
19
+
//! use atproto_tap::{connect_to, TapEvent};
20
+
//! use tokio_stream::StreamExt;
21
+
//!
22
+
//! #[tokio::main]
23
+
//! async fn main() {
24
+
//! let mut stream = connect_to("localhost:2480");
25
+
//!
26
+
//! while let Some(result) = stream.next().await {
27
+
//! match result {
28
+
//! Ok(event) => match event.as_ref() {
29
+
//! TapEvent::Record { record, .. } => {
30
+
//! println!("{} {} {}", record.action, record.collection, record.did);
31
+
//! }
32
+
//! TapEvent::Identity { identity, .. } => {
33
+
//! println!("Identity: {} = {}", identity.did, identity.handle);
34
+
//! }
35
+
//! },
36
+
//! Err(e) => eprintln!("Error: {}", e),
37
+
//! }
38
+
//! }
39
+
//! }
40
+
//! ```
41
+
//!
42
+
//! # Using with `tokio::select!`
43
+
//!
44
+
//! The stream integrates naturally with Tokio's select macro:
45
+
//!
46
+
//! ```ignore
47
+
//! use atproto_tap::{connect, TapConfig};
48
+
//! use tokio_stream::StreamExt;
49
+
//! use tokio::signal;
50
+
//!
51
+
//! #[tokio::main]
52
+
//! async fn main() {
53
+
//! let config = TapConfig::builder()
54
+
//! .hostname("localhost:2480")
55
+
//! .admin_password("secret")
56
+
//! .build();
57
+
//!
58
+
//! let mut stream = connect(config);
59
+
//!
60
+
//! loop {
61
+
//! tokio::select! {
62
+
//! Some(result) = stream.next() => {
63
+
//! // Process event
64
+
//! }
65
+
//! _ = signal::ctrl_c() => {
66
+
//! break;
67
+
//! }
68
+
//! }
69
+
//! }
70
+
//! }
71
+
//! ```
72
+
//!
73
+
//! # Management API
74
+
//!
75
+
//! Use [`TapClient`] to manage tracked repositories:
76
+
//!
77
+
//! ```ignore
78
+
//! use atproto_tap::TapClient;
79
+
//!
80
+
//! let client = TapClient::new("localhost:2480", Some("password".to_string()));
81
+
//!
82
+
//! // Add repositories to track
83
+
//! client.add_repos(&["did:plc:xyz123"]).await?;
84
+
//!
85
+
//! // Check service health
86
+
//! if client.health().await? {
87
+
//! println!("TAP service is healthy");
88
+
//! }
89
+
//! ```
90
+
//!
91
+
//! # Memory Efficiency
92
+
//!
93
+
//! This crate is optimized for high-throughput event processing:
94
+
//!
95
+
//! - **Arc-wrapped events**: Events are shared via `Arc` for zero-cost sharing
96
+
//! - **CompactString**: Small strings use inline storage (no heap allocation)
97
+
//! - **Box<str>**: Immutable strings without capacity overhead
98
+
//! - **RawValue**: Record payloads are lazily parsed on demand
99
+
//! - **Pre-allocated buffers**: Ack messages avoid per-message allocations
100
+
101
+
#![forbid(unsafe_code)]
102
+
#![warn(missing_docs)]
103
+
104
+
mod client;
105
+
mod config;
106
+
mod connection;
107
+
mod errors;
108
+
mod events;
109
+
mod stream;
110
+
111
+
// Re-export public types
112
+
pub use atproto_identity::model::{Document, Service, VerificationMethod};
113
+
pub use client::{RepoInfo, RepoState, TapClient};
114
+
#[allow(deprecated)]
115
+
pub use client::RepoStatus;
116
+
pub use config::{TapConfig, TapConfigBuilder};
117
+
pub use errors::TapError;
118
+
pub use events::{IdentityEvent, IdentityStatus, RecordAction, RecordEvent, TapEvent, extract_event_id};
119
+
pub use stream::{TapStream, connect, connect_to};
+330
crates/atproto-tap/src/stream.rs
+330
crates/atproto-tap/src/stream.rs
···
1
+
//! TAP event stream implementation.
2
+
//!
3
+
//! This module provides [`TapStream`], an async stream that yields TAP events
4
+
//! with automatic connection management and reconnection handling.
5
+
//!
6
+
//! # Design
7
+
//!
8
+
//! The stream encapsulates all connection logic, allowing consumers to simply
9
+
//! iterate over events using standard stream combinators or `tokio::select!`.
10
+
//!
11
+
//! Reconnection is handled automatically with exponential backoff. Parse errors
12
+
//! are yielded as `Err` items but don't affect connection state - only connection
13
+
//! errors trigger reconnection attempts.
14
+
15
+
use crate::config::TapConfig;
16
+
use crate::connection::TapConnection;
17
+
use crate::errors::TapError;
18
+
use crate::events::{TapEvent, extract_event_id};
19
+
use futures::Stream;
20
+
use std::pin::Pin;
21
+
use std::sync::Arc;
22
+
use std::task::{Context, Poll};
23
+
use std::time::Duration;
24
+
use tokio::sync::mpsc;
25
+
26
+
/// An async stream of TAP events with automatic reconnection.
27
+
///
28
+
/// `TapStream` implements [`Stream`] and yields `Result<Arc<TapEvent>, TapError>`.
29
+
/// Events are wrapped in `Arc` for efficient zero-cost sharing across consumers.
30
+
///
31
+
/// # Connection Management
32
+
///
33
+
/// The stream automatically:
34
+
/// - Connects on first poll
35
+
/// - Reconnects with exponential backoff on connection errors
36
+
/// - Sends acknowledgments after parsing each message (if enabled)
37
+
/// - Yields parse errors without affecting connection state
38
+
///
39
+
/// # Example
40
+
///
41
+
/// ```ignore
42
+
/// use atproto_tap::{TapConfig, TapStream};
43
+
/// use tokio_stream::StreamExt;
44
+
///
45
+
/// let config = TapConfig::builder()
46
+
/// .hostname("localhost:2480")
47
+
/// .build();
48
+
///
49
+
/// let mut stream = TapStream::new(config);
50
+
///
51
+
/// while let Some(result) = stream.next().await {
52
+
/// match result {
53
+
/// Ok(event) => println!("Event: {:?}", event),
54
+
/// Err(e) => eprintln!("Error: {}", e),
55
+
/// }
56
+
/// }
57
+
/// ```
58
+
pub struct TapStream {
59
+
/// Receiver for events from the background task.
60
+
receiver: mpsc::Receiver<Result<Arc<TapEvent>, TapError>>,
61
+
/// Handle to request stream closure.
62
+
close_sender: Option<mpsc::Sender<()>>,
63
+
/// Whether the stream has been closed.
64
+
closed: bool,
65
+
}
66
+
67
+
impl TapStream {
68
+
/// Create a new TAP stream with the given configuration.
69
+
///
70
+
/// The stream will start connecting immediately in a background task.
71
+
pub fn new(config: TapConfig) -> Self {
72
+
// Channel for events - buffer a few to handle bursts
73
+
let (event_tx, event_rx) = mpsc::channel(32);
74
+
// Channel for close signal
75
+
let (close_tx, close_rx) = mpsc::channel(1);
76
+
77
+
// Spawn background task to manage connection
78
+
tokio::spawn(connection_task(config, event_tx, close_rx));
79
+
80
+
Self {
81
+
receiver: event_rx,
82
+
close_sender: Some(close_tx),
83
+
closed: false,
84
+
}
85
+
}
86
+
87
+
/// Close the stream and release resources.
88
+
///
89
+
/// After calling this, the stream will yield `None` on the next poll.
90
+
pub async fn close(&mut self) {
91
+
if let Some(sender) = self.close_sender.take() {
92
+
// Signal the background task to close
93
+
let _ = sender.send(()).await;
94
+
}
95
+
self.closed = true;
96
+
}
97
+
98
+
/// Returns true if the stream is closed.
99
+
pub fn is_closed(&self) -> bool {
100
+
self.closed
101
+
}
102
+
}
103
+
104
+
impl Stream for TapStream {
105
+
type Item = Result<Arc<TapEvent>, TapError>;
106
+
107
+
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
108
+
if self.closed {
109
+
return Poll::Ready(None);
110
+
}
111
+
112
+
self.receiver.poll_recv(cx)
113
+
}
114
+
}
115
+
116
+
impl Drop for TapStream {
117
+
fn drop(&mut self) {
118
+
// Drop the close_sender to signal the background task
119
+
self.close_sender.take();
120
+
tracing::debug!("TapStream dropped");
121
+
}
122
+
}
123
+
124
+
/// Background task that manages the WebSocket connection.
125
+
async fn connection_task(
126
+
config: TapConfig,
127
+
event_tx: mpsc::Sender<Result<Arc<TapEvent>, TapError>>,
128
+
mut close_rx: mpsc::Receiver<()>,
129
+
) {
130
+
let mut current_reconnect_delay = config.initial_reconnect_delay;
131
+
let mut attempt: u32 = 0;
132
+
133
+
loop {
134
+
// Check for close signal
135
+
if close_rx.try_recv().is_ok() {
136
+
tracing::debug!("Connection task received close signal");
137
+
break;
138
+
}
139
+
140
+
// Try to connect
141
+
tracing::debug!(attempt, hostname = %config.hostname, "Connecting to TAP service");
142
+
let conn_result = TapConnection::connect(&config).await;
143
+
144
+
match conn_result {
145
+
Ok(mut conn) => {
146
+
tracing::info!(hostname = %config.hostname, "TAP stream connected");
147
+
// Reset reconnection state on successful connect
148
+
current_reconnect_delay = config.initial_reconnect_delay;
149
+
attempt = 0;
150
+
151
+
// Event loop for this connection
152
+
loop {
153
+
tokio::select! {
154
+
biased;
155
+
156
+
_ = close_rx.recv() => {
157
+
tracing::debug!("Connection task received close signal during receive");
158
+
let _ = conn.close().await;
159
+
return;
160
+
}
161
+
162
+
recv_result = conn.recv() => {
163
+
match recv_result {
164
+
Ok(Some(msg)) => {
165
+
// Parse the message
166
+
match serde_json::from_str::<TapEvent>(&msg) {
167
+
Ok(event) => {
168
+
let event_id = event.id();
169
+
170
+
// Send ack if enabled (before sending event to channel)
171
+
if config.send_acks
172
+
&& let Err(err) = conn.send_ack(event_id).await
173
+
{
174
+
tracing::warn!(error = %err, "Failed to send ack");
175
+
// Don't break connection for ack errors
176
+
}
177
+
178
+
// Send event to channel
179
+
let event = Arc::new(event);
180
+
if event_tx.send(Ok(event)).await.is_err() {
181
+
// Receiver dropped, exit task
182
+
tracing::debug!("Event receiver dropped, closing connection");
183
+
let _ = conn.close().await;
184
+
return;
185
+
}
186
+
}
187
+
Err(err) => {
188
+
// Parse errors don't affect connection
189
+
tracing::warn!(error = %err, "Failed to parse TAP message");
190
+
191
+
// Try to extract just the ID using fallback parser
192
+
// so we can still ack the message even if full parsing fails
193
+
if config.send_acks {
194
+
if let Some(event_id) = extract_event_id(&msg) {
195
+
tracing::debug!(event_id, "Extracted event ID via fallback parser");
196
+
if let Err(ack_err) = conn.send_ack(event_id).await {
197
+
tracing::warn!(error = %ack_err, "Failed to send ack for unparseable message");
198
+
}
199
+
} else {
200
+
tracing::warn!("Could not extract event ID from unparseable message");
201
+
}
202
+
}
203
+
204
+
if event_tx.send(Err(TapError::ParseError(err.to_string()))).await.is_err() {
205
+
tracing::debug!("Event receiver dropped, closing connection");
206
+
let _ = conn.close().await;
207
+
return;
208
+
}
209
+
}
210
+
}
211
+
}
212
+
Ok(None) => {
213
+
// Connection closed by server
214
+
tracing::debug!("TAP connection closed by server");
215
+
break;
216
+
}
217
+
Err(err) => {
218
+
// Connection error
219
+
tracing::warn!(error = %err, "TAP connection error");
220
+
break;
221
+
}
222
+
}
223
+
}
224
+
}
225
+
}
226
+
}
227
+
Err(err) => {
228
+
tracing::warn!(error = %err, attempt, "Failed to connect to TAP service");
229
+
}
230
+
}
231
+
232
+
// Increment attempt counter
233
+
attempt += 1;
234
+
235
+
// Check if we've exceeded max attempts
236
+
if let Some(max) = config.max_reconnect_attempts
237
+
&& attempt >= max
238
+
{
239
+
tracing::error!(attempts = attempt, "Max reconnection attempts exceeded");
240
+
let _ = event_tx
241
+
.send(Err(TapError::MaxReconnectAttemptsExceeded(attempt)))
242
+
.await;
243
+
break;
244
+
}
245
+
246
+
// Wait before reconnecting with exponential backoff
247
+
tracing::debug!(
248
+
delay_ms = current_reconnect_delay.as_millis(),
249
+
attempt,
250
+
"Waiting before reconnection"
251
+
);
252
+
253
+
tokio::select! {
254
+
_ = close_rx.recv() => {
255
+
tracing::debug!("Connection task received close signal during backoff");
256
+
return;
257
+
}
258
+
_ = tokio::time::sleep(current_reconnect_delay) => {
259
+
// Update delay for next attempt
260
+
current_reconnect_delay = Duration::from_secs_f64(
261
+
(current_reconnect_delay.as_secs_f64() * config.reconnect_backoff_multiplier)
262
+
.min(config.max_reconnect_delay.as_secs_f64()),
263
+
);
264
+
}
265
+
}
266
+
}
267
+
268
+
tracing::debug!("Connection task exiting");
269
+
}
270
+
271
+
/// Create a new TAP stream with the given configuration.
272
+
pub fn connect(config: TapConfig) -> TapStream {
273
+
TapStream::new(config)
274
+
}
275
+
276
+
/// Create a new TAP stream connected to the given hostname.
277
+
///
278
+
/// Uses default configuration values.
279
+
pub fn connect_to(hostname: &str) -> TapStream {
280
+
TapStream::new(TapConfig::new(hostname))
281
+
}
282
+
283
+
#[cfg(test)]
284
+
mod tests {
285
+
use super::*;
286
+
287
+
#[test]
288
+
fn test_stream_initial_state() {
289
+
// Note: This test doesn't actually poll the stream, just checks initial state
290
+
// Creating a TapStream requires a tokio runtime for the spawn
291
+
}
292
+
293
+
#[tokio::test]
294
+
async fn test_stream_close() {
295
+
let mut stream = TapStream::new(TapConfig::new("localhost:9999"));
296
+
assert!(!stream.is_closed());
297
+
stream.close().await;
298
+
assert!(stream.is_closed());
299
+
}
300
+
301
+
#[test]
302
+
fn test_connect_functions() {
303
+
// These just create configs, actual connection happens in background task
304
+
// We can't test without a runtime, so just verify the types compile
305
+
let _ = TapConfig::new("localhost:2480");
306
+
}
307
+
308
+
#[test]
309
+
fn test_reconnect_delay_calculation() {
310
+
// Test the delay calculation logic
311
+
let initial = Duration::from_secs(1);
312
+
let max = Duration::from_secs(10);
313
+
let multiplier = 2.0;
314
+
315
+
let mut delay = initial;
316
+
assert_eq!(delay, Duration::from_secs(1));
317
+
318
+
delay = Duration::from_secs_f64((delay.as_secs_f64() * multiplier).min(max.as_secs_f64()));
319
+
assert_eq!(delay, Duration::from_secs(2));
320
+
321
+
delay = Duration::from_secs_f64((delay.as_secs_f64() * multiplier).min(max.as_secs_f64()));
322
+
assert_eq!(delay, Duration::from_secs(4));
323
+
324
+
delay = Duration::from_secs_f64((delay.as_secs_f64() * multiplier).min(max.as_secs_f64()));
325
+
assert_eq!(delay, Duration::from_secs(8));
326
+
327
+
delay = Duration::from_secs_f64((delay.as_secs_f64() * multiplier).min(max.as_secs_f64()));
328
+
assert_eq!(delay, Duration::from_secs(10)); // Capped at max
329
+
}
330
+
}
+13
-13
crates/atproto-xrpcs/README.md
+13
-13
crates/atproto-xrpcs/README.md
···
23
23
### Basic XRPC Service
24
24
25
25
```rust
26
-
use atproto_xrpcs::authorization::ResolvingAuthorization;
26
+
use atproto_xrpcs::authorization::Authorization;
27
27
use axum::{Json, Router, extract::Query, routing::get};
28
28
use serde::Deserialize;
29
29
use serde_json::json;
···
35
35
36
36
async fn handle_hello(
37
37
params: Query<HelloParams>,
38
-
authorization: Option<ResolvingAuthorization>,
38
+
authorization: Option<Authorization>,
39
39
) -> Json<serde_json::Value> {
40
40
let name = params.name.as_deref().unwrap_or("World");
41
-
41
+
42
42
let message = if authorization.is_some() {
43
43
format!("Hello, authenticated {}!", name)
44
44
} else {
45
45
format!("Hello, {}!", name)
46
46
};
47
-
47
+
48
48
Json(json!({ "message": message }))
49
49
}
50
50
···
56
56
### JWT Authorization
57
57
58
58
```rust
59
-
use atproto_xrpcs::authorization::ResolvingAuthorization;
59
+
use atproto_xrpcs::authorization::Authorization;
60
60
61
61
async fn handle_secure_endpoint(
62
-
authorization: ResolvingAuthorization, // Required authorization
62
+
authorization: Authorization, // Required authorization
63
63
) -> Json<serde_json::Value> {
64
-
// The ResolvingAuthorization extractor automatically:
64
+
// The Authorization extractor automatically:
65
65
// 1. Validates the JWT token
66
-
// 2. Resolves the caller's DID document
66
+
// 2. Resolves the caller's DID document
67
67
// 3. Verifies the signature against the DID document
68
68
// 4. Provides access to caller identity information
69
-
69
+
70
70
let caller_did = authorization.subject();
71
71
Json(json!({"caller": caller_did, "status": "authenticated"}))
72
72
}
···
79
79
use axum::{response::IntoResponse, http::StatusCode};
80
80
81
81
async fn protected_handler(
82
-
authorization: Result<ResolvingAuthorization, AuthorizationError>,
82
+
authorization: Result<Authorization, AuthorizationError>,
83
83
) -> impl IntoResponse {
84
84
match authorization {
85
85
Ok(auth) => (StatusCode::OK, "Access granted").into_response(),
86
-
Err(AuthorizationError::InvalidJWTToken { .. }) => {
86
+
Err(AuthorizationError::InvalidJWTFormat) => {
87
87
(StatusCode::UNAUTHORIZED, "Invalid token").into_response()
88
88
}
89
-
Err(AuthorizationError::DIDDocumentResolutionFailed { .. }) => {
89
+
Err(AuthorizationError::SubjectResolutionFailed { .. }) => {
90
90
(StatusCode::FORBIDDEN, "Identity verification failed").into_response()
91
91
}
92
92
Err(_) => {
···
98
98
99
99
## Authorization Flow
100
100
101
-
The `ResolvingAuthorization` extractor implements:
101
+
The `Authorization` extractor implements:
102
102
103
103
1. JWT extraction from HTTP Authorization headers
104
104
2. Token validation (signature and claims structure)
+5
-49
crates/atproto-xrpcs/src/errors.rs
+5
-49
crates/atproto-xrpcs/src/errors.rs
···
42
42
#[error("error-atproto-xrpcs-authorization-4 No issuer found in JWT claims")]
43
43
NoIssuerInClaims,
44
44
45
-
/// Occurs when DID document is not found for the issuer
46
-
#[error("error-atproto-xrpcs-authorization-5 DID document not found for issuer: {issuer}")]
47
-
DIDDocumentNotFound {
48
-
/// The issuer DID that was not found
49
-
issuer: String,
50
-
},
51
-
52
45
/// Occurs when no verification keys are found in DID document
53
-
#[error("error-atproto-xrpcs-authorization-6 No verification keys found in DID document")]
46
+
#[error("error-atproto-xrpcs-authorization-5 No verification keys found in DID document")]
54
47
NoVerificationKeys,
55
48
56
49
/// Occurs when JWT header cannot be base64 decoded
57
-
#[error("error-atproto-xrpcs-authorization-7 Failed to decode JWT header: {error}")]
50
+
#[error("error-atproto-xrpcs-authorization-6 Failed to decode JWT header: {error}")]
58
51
HeaderDecodeError {
59
52
/// The underlying base64 decode error
60
53
error: base64::DecodeError,
61
54
},
62
55
63
56
/// Occurs when JWT header cannot be parsed as JSON
64
-
#[error("error-atproto-xrpcs-authorization-8 Failed to parse JWT header: {error}")]
57
+
#[error("error-atproto-xrpcs-authorization-7 Failed to parse JWT header: {error}")]
65
58
HeaderParseError {
66
59
/// The underlying JSON parse error
67
60
error: serde_json::Error,
68
61
},
69
62
70
63
/// Occurs when JWT validation fails with all available keys
71
-
#[error("error-atproto-xrpcs-authorization-9 JWT validation failed with all available keys")]
64
+
#[error("error-atproto-xrpcs-authorization-8 JWT validation failed with all available keys")]
72
65
ValidationFailedAllKeys,
73
66
74
67
/// Occurs when subject resolution fails during DID document lookup
75
-
#[error("error-atproto-xrpcs-authorization-10 Subject resolution failed: {issuer} {error}")]
68
+
#[error("error-atproto-xrpcs-authorization-9 Subject resolution failed: {issuer} {error}")]
76
69
SubjectResolutionFailed {
77
70
/// The issuer that failed to resolve
78
71
issuer: String,
79
72
/// The underlying resolution error
80
-
error: anyhow::Error,
81
-
},
82
-
83
-
/// Occurs when DID document lookup fails after successful resolution
84
-
#[error(
85
-
"error-atproto-xrpcs-authorization-11 DID document not found for resolved issuer: {resolved_did}"
86
-
)]
87
-
ResolvedDIDDocumentNotFound {
88
-
/// The resolved DID that was not found in storage
89
-
resolved_did: String,
90
-
},
91
-
92
-
/// Occurs when PLC directory query fails
93
-
#[error("error-atproto-xrpcs-authorization-12 PLC directory query failed: {error}")]
94
-
PLCQueryFailed {
95
-
/// The underlying PLC query error
96
-
error: anyhow::Error,
97
-
},
98
-
99
-
/// Occurs when web DID query fails
100
-
#[error("error-atproto-xrpcs-authorization-13 Web DID query failed: {error}")]
101
-
WebDIDQueryFailed {
102
-
/// The underlying web DID query error
103
-
error: anyhow::Error,
104
-
},
105
-
106
-
/// Occurs when DID document storage operation fails
107
-
#[error("error-atproto-xrpcs-authorization-14 DID document storage failed: {error}")]
108
-
DocumentStorageFailed {
109
-
/// The underlying storage error
110
-
error: anyhow::Error,
111
-
},
112
-
113
-
/// Occurs when input parsing fails for resolved DID
114
-
#[error("error-atproto-xrpcs-authorization-15 Input parsing failed for resolved DID: {error}")]
115
-
InputParsingFailed {
116
-
/// The underlying parsing error
117
73
error: anyhow::Error,
118
74
},
119
75
}
+3
-13
crates/atproto-xrpcs-helloworld/src/main.rs
+3
-13
crates/atproto-xrpcs-helloworld/src/main.rs
···
7
7
config::{CertificateBundles, DnsNameservers, default_env, optional_env, require_env, version},
8
8
key::{KeyData, KeyResolver, identify_key, to_public},
9
9
resolve::{HickoryDnsResolver, IdentityResolver, InnerIdentityResolver},
10
-
storage_lru::LruDidDocumentStorage,
11
-
traits::DidDocumentStorage,
12
10
};
13
-
use atproto_xrpcs::authorization::ResolvingAuthorization;
11
+
use atproto_xrpcs::authorization::Authorization;
14
12
use axum::{
15
13
Json, Router,
16
14
extract::{FromRef, Query, State},
···
21
19
use http::{HeaderMap, StatusCode};
22
20
use serde::Deserialize;
23
21
use serde_json::json;
24
-
use std::{collections::HashMap, num::NonZeroUsize, ops::Deref, sync::Arc};
22
+
use std::{collections::HashMap, ops::Deref, sync::Arc};
25
23
26
24
#[derive(Clone)]
27
25
pub struct SimpleKeyResolver {
···
61
59
62
60
pub struct InnerWebContext {
63
61
pub http_client: reqwest::Client,
64
-
pub document_storage: Arc<dyn DidDocumentStorage>,
65
62
pub key_resolver: Arc<dyn KeyResolver>,
66
63
pub service_document: ServiceDocument,
67
64
pub service_did: ServiceDID,
···
97
94
}
98
95
}
99
96
100
-
impl FromRef<WebContext> for Arc<dyn DidDocumentStorage> {
101
-
fn from_ref(context: &WebContext) -> Self {
102
-
context.0.document_storage.clone()
103
-
}
104
-
}
105
-
106
97
impl FromRef<WebContext> for Arc<dyn KeyResolver> {
107
98
fn from_ref(context: &WebContext) -> Self {
108
99
context.0.key_resolver.clone()
···
216
207
217
208
let web_context = WebContext(Arc::new(InnerWebContext {
218
209
http_client: http_client.clone(),
219
-
document_storage: Arc::new(LruDidDocumentStorage::new(NonZeroUsize::new(255).unwrap())),
220
210
key_resolver: Arc::new(SimpleKeyResolver {
221
211
keys: signing_key_storage,
222
212
}),
···
284
274
async fn handle_xrpc_hello_world(
285
275
parameters: Query<HelloParameters>,
286
276
headers: HeaderMap,
287
-
authorization: Option<ResolvingAuthorization>,
277
+
authorization: Option<Authorization>,
288
278
) -> Json<serde_json::Value> {
289
279
println!("headers {headers:?}");
290
280
let subject = parameters.subject.as_deref().unwrap_or("World");