+365
-257
Cargo.lock
+365
-257
Cargo.lock
···
125
125
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
126
126
127
127
[[package]]
128
-
name = "async-channel"
129
-
version = "1.9.0"
130
-
source = "registry+https://github.com/rust-lang/crates.io-index"
131
-
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
132
-
dependencies = [
133
-
"concurrent-queue",
134
-
"event-listener 2.5.3",
135
-
"futures-core",
136
-
]
137
-
138
-
[[package]]
139
-
name = "async-channel"
140
-
version = "2.3.1"
141
-
source = "registry+https://github.com/rust-lang/crates.io-index"
142
-
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
143
-
dependencies = [
144
-
"concurrent-queue",
145
-
"event-listener-strategy",
146
-
"futures-core",
147
-
"pin-project-lite",
148
-
]
149
-
150
-
[[package]]
151
128
name = "async-compression"
152
129
version = "0.4.22"
153
130
source = "registry+https://github.com/rust-lang/crates.io-index"
···
161
138
]
162
139
163
140
[[package]]
164
-
name = "async-executor"
165
-
version = "1.13.1"
166
-
source = "registry+https://github.com/rust-lang/crates.io-index"
167
-
checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec"
168
-
dependencies = [
169
-
"async-task",
170
-
"concurrent-queue",
171
-
"fastrand",
172
-
"futures-lite",
173
-
"slab",
174
-
]
175
-
176
-
[[package]]
177
-
name = "async-global-executor"
178
-
version = "2.4.1"
179
-
source = "registry+https://github.com/rust-lang/crates.io-index"
180
-
checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
181
-
dependencies = [
182
-
"async-channel 2.3.1",
183
-
"async-executor",
184
-
"async-io",
185
-
"async-lock",
186
-
"blocking",
187
-
"futures-lite",
188
-
"once_cell",
189
-
]
190
-
191
-
[[package]]
192
-
name = "async-io"
193
-
version = "2.4.0"
194
-
source = "registry+https://github.com/rust-lang/crates.io-index"
195
-
checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059"
196
-
dependencies = [
197
-
"async-lock",
198
-
"cfg-if",
199
-
"concurrent-queue",
200
-
"futures-io",
201
-
"futures-lite",
202
-
"parking",
203
-
"polling",
204
-
"rustix",
205
-
"slab",
206
-
"tracing",
207
-
"windows-sys 0.59.0",
208
-
]
209
-
210
-
[[package]]
211
-
name = "async-lock"
212
-
version = "3.4.0"
213
-
source = "registry+https://github.com/rust-lang/crates.io-index"
214
-
checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18"
215
-
dependencies = [
216
-
"event-listener 5.4.0",
217
-
"event-listener-strategy",
218
-
"pin-project-lite",
219
-
]
220
-
221
-
[[package]]
222
141
name = "async-recursion"
223
142
version = "1.1.1"
224
143
source = "registry+https://github.com/rust-lang/crates.io-index"
···
228
147
"quote",
229
148
"syn",
230
149
]
231
-
232
-
[[package]]
233
-
name = "async-std"
234
-
version = "1.13.0"
235
-
source = "registry+https://github.com/rust-lang/crates.io-index"
236
-
checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615"
237
-
dependencies = [
238
-
"async-channel 1.9.0",
239
-
"async-global-executor",
240
-
"async-io",
241
-
"async-lock",
242
-
"crossbeam-utils",
243
-
"futures-channel",
244
-
"futures-core",
245
-
"futures-io",
246
-
"futures-lite",
247
-
"gloo-timers",
248
-
"kv-log-macro",
249
-
"log",
250
-
"memchr",
251
-
"once_cell",
252
-
"pin-project-lite",
253
-
"pin-utils",
254
-
"slab",
255
-
"wasm-bindgen-futures",
256
-
]
257
-
258
-
[[package]]
259
-
name = "async-task"
260
-
version = "4.7.1"
261
-
source = "registry+https://github.com/rust-lang/crates.io-index"
262
-
checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
263
150
264
151
[[package]]
265
152
name = "async-trait"
···
399
286
]
400
287
401
288
[[package]]
289
+
name = "axum-tracing-opentelemetry"
290
+
version = "0.32.1"
291
+
source = "registry+https://github.com/rust-lang/crates.io-index"
292
+
checksum = "328c8ddd5ca871b2a5acb00be0b4f103aa62f5d6b6db4071ccf3b12b0629e7c1"
293
+
dependencies = [
294
+
"axum",
295
+
"futures-core",
296
+
"futures-util",
297
+
"http",
298
+
"opentelemetry",
299
+
"opentelemetry-semantic-conventions",
300
+
"pin-project-lite",
301
+
"tower",
302
+
"tracing",
303
+
"tracing-opentelemetry",
304
+
"tracing-opentelemetry-instrumentation-sdk",
305
+
]
306
+
307
+
[[package]]
402
308
name = "backtrace"
403
309
version = "0.3.74"
404
310
source = "registry+https://github.com/rust-lang/crates.io-index"
···
460
366
"proc-macro2",
461
367
"quote",
462
368
"regex",
463
-
"rustc-hash",
369
+
"rustc-hash 1.1.0",
464
370
"shlex",
465
371
"syn",
466
372
"which",
···
488
394
]
489
395
490
396
[[package]]
491
-
name = "blocking"
492
-
version = "1.6.1"
493
-
source = "registry+https://github.com/rust-lang/crates.io-index"
494
-
checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea"
495
-
dependencies = [
496
-
"async-channel 2.3.1",
497
-
"async-task",
498
-
"futures-io",
499
-
"futures-lite",
500
-
"piper",
501
-
]
502
-
503
-
[[package]]
504
397
name = "brotli"
505
398
version = "7.0.0"
506
399
source = "registry+https://github.com/rust-lang/crates.io-index"
···
589
482
version = "1.0.0"
590
483
source = "registry+https://github.com/rust-lang/crates.io-index"
591
484
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
485
+
486
+
[[package]]
487
+
name = "cfg_aliases"
488
+
version = "0.2.1"
489
+
source = "registry+https://github.com/rust-lang/crates.io-index"
490
+
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
592
491
593
492
[[package]]
594
493
name = "chrono"
···
727
626
]
728
627
729
628
[[package]]
730
-
name = "concurrent-queue"
731
-
version = "2.5.0"
732
-
source = "registry+https://github.com/rust-lang/crates.io-index"
733
-
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
734
-
dependencies = [
735
-
"crossbeam-utils",
736
-
]
737
-
738
-
[[package]]
739
629
name = "const-oid"
740
630
version = "0.9.6"
741
631
source = "registry+https://github.com/rust-lang/crates.io-index"
···
968
858
name = "dataloader"
969
859
version = "0.18.0"
970
860
dependencies = [
971
-
"async-std",
972
861
"futures",
973
862
"tokio",
974
863
]
···
1256
1145
]
1257
1146
1258
1147
[[package]]
1259
-
name = "event-listener"
1260
-
version = "2.5.3"
1261
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1262
-
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
1263
-
1264
-
[[package]]
1265
-
name = "event-listener"
1266
-
version = "5.4.0"
1267
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1268
-
checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae"
1269
-
dependencies = [
1270
-
"concurrent-queue",
1271
-
"parking",
1272
-
"pin-project-lite",
1273
-
]
1274
-
1275
-
[[package]]
1276
-
name = "event-listener-strategy"
1277
-
version = "0.5.3"
1278
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1279
-
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
1280
-
dependencies = [
1281
-
"event-listener 5.4.0",
1282
-
"pin-project-lite",
1283
-
]
1284
-
1285
-
[[package]]
1286
1148
name = "eyre"
1287
1149
version = "0.6.12"
1288
1150
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1453
1315
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
1454
1316
1455
1317
[[package]]
1456
-
name = "futures-lite"
1457
-
version = "2.6.0"
1458
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1459
-
checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532"
1460
-
dependencies = [
1461
-
"fastrand",
1462
-
"futures-core",
1463
-
"futures-io",
1464
-
"parking",
1465
-
"pin-project-lite",
1466
-
]
1467
-
1468
-
[[package]]
1469
1318
name = "futures-macro"
1470
1319
version = "0.3.31"
1471
1320
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1546
1395
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
1547
1396
dependencies = [
1548
1397
"cfg-if",
1398
+
"js-sys",
1549
1399
"libc",
1550
1400
"r-efi",
1551
1401
"wasi 0.14.2+wasi-0.2.4",
1402
+
"wasm-bindgen",
1552
1403
]
1553
1404
1554
1405
[[package]]
···
1564
1415
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
1565
1416
1566
1417
[[package]]
1567
-
name = "gloo-timers"
1568
-
version = "0.3.0"
1569
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1570
-
checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994"
1571
-
dependencies = [
1572
-
"futures-channel",
1573
-
"futures-core",
1574
-
"js-sys",
1575
-
"wasm-bindgen",
1576
-
]
1577
-
1578
-
[[package]]
1579
1418
name = "group"
1580
1419
version = "0.13.0"
1581
1420
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1665
1504
version = "0.3.9"
1666
1505
source = "registry+https://github.com/rust-lang/crates.io-index"
1667
1506
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
1668
-
1669
-
[[package]]
1670
-
name = "hermit-abi"
1671
-
version = "0.4.0"
1672
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1673
-
checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
1674
1507
1675
1508
[[package]]
1676
1509
name = "hex"
···
2248
2081
]
2249
2082
2250
2083
[[package]]
2251
-
name = "kv-log-macro"
2252
-
version = "1.0.7"
2253
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2254
-
checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
2255
-
dependencies = [
2256
-
"log",
2257
-
]
2258
-
2259
-
[[package]]
2260
2084
name = "lazy_static"
2261
2085
version = "1.5.0"
2262
2086
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2361
2185
version = "0.4.25"
2362
2186
source = "registry+https://github.com/rust-lang/crates.io-index"
2363
2187
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
2364
-
dependencies = [
2365
-
"value-bag",
2366
-
]
2367
2188
2368
2189
[[package]]
2369
2190
name = "lru-cache"
···
2373
2194
dependencies = [
2374
2195
"linked-hash-map",
2375
2196
]
2197
+
2198
+
[[package]]
2199
+
name = "lru-slab"
2200
+
version = "0.1.2"
2201
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2202
+
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
2376
2203
2377
2204
[[package]]
2378
2205
name = "lz4-sys"
···
2389
2216
version = "0.1.0"
2390
2217
source = "registry+https://github.com/rust-lang/crates.io-index"
2391
2218
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
2219
+
2220
+
[[package]]
2221
+
name = "matchers"
2222
+
version = "0.1.0"
2223
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2224
+
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
2225
+
dependencies = [
2226
+
"regex-automata 0.1.10",
2227
+
]
2392
2228
2393
2229
[[package]]
2394
2230
name = "matchit"
···
2655
2491
source = "registry+https://github.com/rust-lang/crates.io-index"
2656
2492
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
2657
2493
dependencies = [
2658
-
"hermit-abi 0.3.9",
2494
+
"hermit-abi",
2659
2495
"libc",
2660
2496
]
2661
2497
···
2719
2555
]
2720
2556
2721
2557
[[package]]
2558
+
name = "opentelemetry"
2559
+
version = "0.31.0"
2560
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2561
+
checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0"
2562
+
dependencies = [
2563
+
"futures-core",
2564
+
"futures-sink",
2565
+
"js-sys",
2566
+
"pin-project-lite",
2567
+
"thiserror 2.0.12",
2568
+
"tracing",
2569
+
]
2570
+
2571
+
[[package]]
2572
+
name = "opentelemetry-http"
2573
+
version = "0.31.0"
2574
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2575
+
checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d"
2576
+
dependencies = [
2577
+
"async-trait",
2578
+
"bytes",
2579
+
"http",
2580
+
"opentelemetry",
2581
+
"reqwest",
2582
+
]
2583
+
2584
+
[[package]]
2585
+
name = "opentelemetry-otlp"
2586
+
version = "0.31.0"
2587
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2588
+
checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf"
2589
+
dependencies = [
2590
+
"http",
2591
+
"opentelemetry",
2592
+
"opentelemetry-http",
2593
+
"opentelemetry-proto",
2594
+
"opentelemetry_sdk",
2595
+
"prost 0.14.1",
2596
+
"reqwest",
2597
+
"thiserror 2.0.12",
2598
+
"tracing",
2599
+
]
2600
+
2601
+
[[package]]
2602
+
name = "opentelemetry-proto"
2603
+
version = "0.31.0"
2604
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2605
+
checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f"
2606
+
dependencies = [
2607
+
"opentelemetry",
2608
+
"opentelemetry_sdk",
2609
+
"prost 0.14.1",
2610
+
"tonic 0.14.2",
2611
+
"tonic-prost",
2612
+
]
2613
+
2614
+
[[package]]
2615
+
name = "opentelemetry-semantic-conventions"
2616
+
version = "0.31.0"
2617
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2618
+
checksum = "e62e29dfe041afb8ed2a6c9737ab57db4907285d999ef8ad3a59092a36bdc846"
2619
+
2620
+
[[package]]
2621
+
name = "opentelemetry_sdk"
2622
+
version = "0.31.0"
2623
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2624
+
checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd"
2625
+
dependencies = [
2626
+
"futures-channel",
2627
+
"futures-executor",
2628
+
"futures-util",
2629
+
"opentelemetry",
2630
+
"percent-encoding",
2631
+
"rand 0.9.1",
2632
+
"thiserror 2.0.12",
2633
+
]
2634
+
2635
+
[[package]]
2722
2636
name = "overload"
2723
2637
version = "0.1.1"
2724
2638
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2755
2669
"async-recursion",
2756
2670
"axum",
2757
2671
"axum-extra",
2672
+
"axum-tracing-opentelemetry",
2758
2673
"base64 0.22.1",
2759
2674
"chrono",
2760
2675
"dataloader",
···
2769
2684
"jsonwebtoken",
2770
2685
"lexica",
2771
2686
"multibase",
2687
+
"opentelemetry",
2688
+
"opentelemetry-otlp",
2689
+
"opentelemetry_sdk",
2772
2690
"parakeet-db",
2773
2691
"parakeet-index",
2774
2692
"redis",
···
2777
2695
"serde_ipld_dagcbor",
2778
2696
"serde_json",
2779
2697
"tokio",
2698
+
"tower",
2780
2699
"tower-http",
2781
2700
"tracing",
2701
+
"tracing-opentelemetry",
2782
2702
"tracing-subscriber",
2783
2703
]
2784
2704
···
2800
2720
"eyre",
2801
2721
"figment",
2802
2722
"itertools 0.14.0",
2803
-
"prost",
2723
+
"opentelemetry",
2724
+
"opentelemetry-otlp",
2725
+
"opentelemetry_sdk",
2726
+
"prost 0.13.5",
2804
2727
"rocksdb",
2805
2728
"serde",
2806
2729
"tokio",
2807
-
"tonic",
2730
+
"tonic 0.13.1",
2808
2731
"tonic-build",
2809
2732
"tonic-health",
2733
+
"tonic-tracing-opentelemetry",
2734
+
"tower",
2810
2735
"tracing",
2736
+
"tracing-opentelemetry",
2811
2737
"tracing-subscriber",
2812
2738
]
2813
2739
···
2824
2750
]
2825
2751
2826
2752
[[package]]
2827
-
name = "parking"
2828
-
version = "2.2.1"
2829
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2830
-
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
2831
-
2832
-
[[package]]
2833
2753
name = "parking_lot"
2834
2754
version = "0.11.2"
2835
2755
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2992
2912
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
2993
2913
2994
2914
[[package]]
2995
-
name = "piper"
2996
-
version = "0.2.4"
2997
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2998
-
checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066"
2999
-
dependencies = [
3000
-
"atomic-waker",
3001
-
"fastrand",
3002
-
"futures-io",
3003
-
]
3004
-
3005
-
[[package]]
3006
2915
name = "pkcs1"
3007
2916
version = "0.7.5"
3008
2917
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3028
2937
version = "0.3.31"
3029
2938
source = "registry+https://github.com/rust-lang/crates.io-index"
3030
2939
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
3031
-
3032
-
[[package]]
3033
-
name = "polling"
3034
-
version = "3.7.4"
3035
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3036
-
checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f"
3037
-
dependencies = [
3038
-
"cfg-if",
3039
-
"concurrent-queue",
3040
-
"hermit-abi 0.4.0",
3041
-
"pin-project-lite",
3042
-
"rustix",
3043
-
"tracing",
3044
-
"windows-sys 0.59.0",
3045
-
]
3046
2940
3047
2941
[[package]]
3048
2942
name = "portable-atomic"
···
3145
3039
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
3146
3040
dependencies = [
3147
3041
"bytes",
3148
-
"prost-derive",
3042
+
"prost-derive 0.13.5",
3043
+
]
3044
+
3045
+
[[package]]
3046
+
name = "prost"
3047
+
version = "0.14.1"
3048
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3049
+
checksum = "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d"
3050
+
dependencies = [
3051
+
"bytes",
3052
+
"prost-derive 0.14.1",
3149
3053
]
3150
3054
3151
3055
[[package]]
···
3161
3065
"once_cell",
3162
3066
"petgraph",
3163
3067
"prettyplease",
3164
-
"prost",
3068
+
"prost 0.13.5",
3165
3069
"prost-types",
3166
3070
"regex",
3167
3071
"syn",
···
3182
3086
]
3183
3087
3184
3088
[[package]]
3089
+
name = "prost-derive"
3090
+
version = "0.14.1"
3091
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3092
+
checksum = "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425"
3093
+
dependencies = [
3094
+
"anyhow",
3095
+
"itertools 0.14.0",
3096
+
"proc-macro2",
3097
+
"quote",
3098
+
"syn",
3099
+
]
3100
+
3101
+
[[package]]
3185
3102
name = "prost-types"
3186
3103
version = "0.13.5"
3187
3104
source = "registry+https://github.com/rust-lang/crates.io-index"
3188
3105
checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16"
3189
3106
dependencies = [
3190
-
"prost",
3107
+
"prost 0.13.5",
3191
3108
]
3192
3109
3193
3110
[[package]]
···
3210
3127
version = "1.2.3"
3211
3128
source = "registry+https://github.com/rust-lang/crates.io-index"
3212
3129
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
3130
+
3131
+
[[package]]
3132
+
name = "quinn"
3133
+
version = "0.11.9"
3134
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3135
+
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
3136
+
dependencies = [
3137
+
"bytes",
3138
+
"cfg_aliases",
3139
+
"pin-project-lite",
3140
+
"quinn-proto",
3141
+
"quinn-udp",
3142
+
"rustc-hash 2.1.1",
3143
+
"rustls",
3144
+
"socket2 0.6.0",
3145
+
"thiserror 2.0.12",
3146
+
"tokio",
3147
+
"tracing",
3148
+
"web-time",
3149
+
]
3150
+
3151
+
[[package]]
3152
+
name = "quinn-proto"
3153
+
version = "0.11.13"
3154
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3155
+
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
3156
+
dependencies = [
3157
+
"bytes",
3158
+
"getrandom 0.3.3",
3159
+
"lru-slab",
3160
+
"rand 0.9.1",
3161
+
"ring",
3162
+
"rustc-hash 2.1.1",
3163
+
"rustls",
3164
+
"rustls-pki-types",
3165
+
"slab",
3166
+
"thiserror 2.0.12",
3167
+
"tinyvec",
3168
+
"tracing",
3169
+
"web-time",
3170
+
]
3171
+
3172
+
[[package]]
3173
+
name = "quinn-udp"
3174
+
version = "0.5.14"
3175
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3176
+
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
3177
+
dependencies = [
3178
+
"cfg_aliases",
3179
+
"libc",
3180
+
"once_cell",
3181
+
"socket2 0.6.0",
3182
+
"tracing",
3183
+
"windows-sys 0.59.0",
3184
+
]
3213
3185
3214
3186
[[package]]
3215
3187
name = "quote"
···
3373
3345
dependencies = [
3374
3346
"aho-corasick",
3375
3347
"memchr",
3376
-
"regex-automata",
3377
-
"regex-syntax",
3348
+
"regex-automata 0.4.9",
3349
+
"regex-syntax 0.8.5",
3350
+
]
3351
+
3352
+
[[package]]
3353
+
name = "regex-automata"
3354
+
version = "0.1.10"
3355
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3356
+
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
3357
+
dependencies = [
3358
+
"regex-syntax 0.6.29",
3378
3359
]
3379
3360
3380
3361
[[package]]
···
3385
3366
dependencies = [
3386
3367
"aho-corasick",
3387
3368
"memchr",
3388
-
"regex-syntax",
3369
+
"regex-syntax 0.8.5",
3389
3370
]
3371
+
3372
+
[[package]]
3373
+
name = "regex-syntax"
3374
+
version = "0.6.29"
3375
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3376
+
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
3390
3377
3391
3378
[[package]]
3392
3379
name = "regex-syntax"
···
3404
3391
"base64 0.22.1",
3405
3392
"bytes",
3406
3393
"encoding_rs",
3394
+
"futures-channel",
3407
3395
"futures-core",
3408
3396
"futures-util",
3409
3397
"h2",
···
3422
3410
"once_cell",
3423
3411
"percent-encoding",
3424
3412
"pin-project-lite",
3413
+
"quinn",
3414
+
"rustls",
3415
+
"rustls-native-certs",
3425
3416
"rustls-pemfile",
3417
+
"rustls-pki-types",
3426
3418
"serde",
3427
3419
"serde_json",
3428
3420
"serde_urlencoded",
···
3430
3422
"system-configuration",
3431
3423
"tokio",
3432
3424
"tokio-native-tls",
3425
+
"tokio-rustls",
3433
3426
"tokio-util",
3434
3427
"tower",
3435
3428
"tower-service",
···
3519
3512
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
3520
3513
3521
3514
[[package]]
3515
+
name = "rustc-hash"
3516
+
version = "2.1.1"
3517
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3518
+
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
3519
+
3520
+
[[package]]
3522
3521
name = "rustc_version"
3523
3522
version = "0.4.1"
3524
3523
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3548
3547
dependencies = [
3549
3548
"aws-lc-rs",
3550
3549
"once_cell",
3550
+
"ring",
3551
3551
"rustls-pki-types",
3552
3552
"rustls-webpki",
3553
3553
"subtle",
···
3580
3580
version = "1.11.0"
3581
3581
source = "registry+https://github.com/rust-lang/crates.io-index"
3582
3582
checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c"
3583
+
dependencies = [
3584
+
"web-time",
3585
+
]
3583
3586
3584
3587
[[package]]
3585
3588
name = "rustls-webpki"
···
4423
4426
"hyper-util",
4424
4427
"percent-encoding",
4425
4428
"pin-project",
4426
-
"prost",
4429
+
"prost 0.13.5",
4427
4430
"socket2 0.5.8",
4428
4431
"tokio",
4429
4432
"tokio-stream",
···
4434
4437
]
4435
4438
4436
4439
[[package]]
4440
+
name = "tonic"
4441
+
version = "0.14.2"
4442
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4443
+
checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203"
4444
+
dependencies = [
4445
+
"async-trait",
4446
+
"base64 0.22.1",
4447
+
"bytes",
4448
+
"http",
4449
+
"http-body",
4450
+
"http-body-util",
4451
+
"percent-encoding",
4452
+
"pin-project",
4453
+
"sync_wrapper",
4454
+
"tokio-stream",
4455
+
"tower-layer",
4456
+
"tower-service",
4457
+
"tracing",
4458
+
]
4459
+
4460
+
[[package]]
4437
4461
name = "tonic-build"
4438
4462
version = "0.13.0"
4439
4463
source = "registry+https://github.com/rust-lang/crates.io-index"
···
4453
4477
source = "registry+https://github.com/rust-lang/crates.io-index"
4454
4478
checksum = "cb87334d340313fefa513b6e60794d44a86d5f039b523229c99c323e4e19ca4b"
4455
4479
dependencies = [
4456
-
"prost",
4480
+
"prost 0.13.5",
4457
4481
"tokio",
4458
4482
"tokio-stream",
4459
-
"tonic",
4483
+
"tonic 0.13.1",
4484
+
]
4485
+
4486
+
[[package]]
4487
+
name = "tonic-prost"
4488
+
version = "0.14.2"
4489
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4490
+
checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67"
4491
+
dependencies = [
4492
+
"bytes",
4493
+
"prost 0.14.1",
4494
+
"tonic 0.14.2",
4495
+
]
4496
+
4497
+
[[package]]
4498
+
name = "tonic-tracing-opentelemetry"
4499
+
version = "0.32.0"
4500
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4501
+
checksum = "31f57ac46b32b08989476b498239364c300b09d75928c1fa2e46cb489a41c8e3"
4502
+
dependencies = [
4503
+
"futures-core",
4504
+
"futures-util",
4505
+
"http",
4506
+
"http-body",
4507
+
"hyper",
4508
+
"opentelemetry",
4509
+
"pin-project-lite",
4510
+
"tonic 0.14.2",
4511
+
"tower",
4512
+
"tracing",
4513
+
"tracing-opentelemetry",
4514
+
"tracing-opentelemetry-instrumentation-sdk",
4460
4515
]
4461
4516
4462
4517
[[package]]
···
4551
4606
]
4552
4607
4553
4608
[[package]]
4609
+
name = "tracing-opentelemetry"
4610
+
version = "0.32.0"
4611
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4612
+
checksum = "1e6e5658463dd88089aba75c7791e1d3120633b1bfde22478b28f625a9bb1b8e"
4613
+
dependencies = [
4614
+
"js-sys",
4615
+
"opentelemetry",
4616
+
"opentelemetry_sdk",
4617
+
"rustversion",
4618
+
"smallvec",
4619
+
"thiserror 2.0.12",
4620
+
"tracing",
4621
+
"tracing-core",
4622
+
"tracing-log",
4623
+
"tracing-subscriber",
4624
+
"web-time",
4625
+
]
4626
+
4627
+
[[package]]
4628
+
name = "tracing-opentelemetry-instrumentation-sdk"
4629
+
version = "0.32.1"
4630
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4631
+
checksum = "7a1a4dcfb798af2cef9e47c30a14e13c108b4b40e057120401b2025ec622c416"
4632
+
dependencies = [
4633
+
"http",
4634
+
"opentelemetry",
4635
+
"opentelemetry-semantic-conventions",
4636
+
"tracing",
4637
+
"tracing-opentelemetry",
4638
+
]
4639
+
4640
+
[[package]]
4641
+
name = "tracing-serde"
4642
+
version = "0.2.0"
4643
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4644
+
checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1"
4645
+
dependencies = [
4646
+
"serde",
4647
+
"tracing-core",
4648
+
]
4649
+
4650
+
[[package]]
4554
4651
name = "tracing-subscriber"
4555
4652
version = "0.3.19"
4556
4653
source = "registry+https://github.com/rust-lang/crates.io-index"
4557
4654
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
4558
4655
dependencies = [
4656
+
"matchers",
4559
4657
"nu-ansi-term",
4658
+
"once_cell",
4659
+
"regex",
4660
+
"serde",
4661
+
"serde_json",
4560
4662
"sharded-slab",
4561
4663
"smallvec",
4562
4664
"thread_local",
4665
+
"tracing",
4563
4666
"tracing-core",
4564
4667
"tracing-log",
4668
+
"tracing-serde",
4565
4669
]
4566
4670
4567
4671
[[package]]
···
4690
4794
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
4691
4795
4692
4796
[[package]]
4693
-
name = "value-bag"
4694
-
version = "1.10.0"
4695
-
source = "registry+https://github.com/rust-lang/crates.io-index"
4696
-
checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2"
4697
-
4698
-
[[package]]
4699
4797
name = "vcpkg"
4700
4798
version = "0.2.15"
4701
4799
source = "registry+https://github.com/rust-lang/crates.io-index"
···
4836
4934
version = "0.3.77"
4837
4935
source = "registry+https://github.com/rust-lang/crates.io-index"
4838
4936
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
4937
+
dependencies = [
4938
+
"js-sys",
4939
+
"wasm-bindgen",
4940
+
]
4941
+
4942
+
[[package]]
4943
+
name = "web-time"
4944
+
version = "1.1.0"
4945
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4946
+
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
4839
4947
dependencies = [
4840
4948
"js-sys",
4841
4949
"wasm-bindgen",
+1
-1
consumer/Cargo.toml
+1
-1
consumer/Cargo.toml
+3
-3
consumer/src/backfill/downloader.rs
+3
-3
consumer/src/backfill/downloader.rs
···
84
84
85
85
// has the repo already been downloaded?
86
86
if rc.sismember(DL_DUP_KEY, &did).await.unwrap_or_default() {
87
-
tracing::warn!("skipping duplicate repo {did}");
87
+
tracing::info!("skipping duplicate repo {did}");
88
88
continue;
89
89
}
90
90
···
92
92
match db::actor_get_statuses(&mut conn, &did).await {
93
93
Ok(Some((_, state))) => {
94
94
if state == ActorSyncState::Synced || state == ActorSyncState::Processing {
95
-
tracing::warn!("skipping duplicate repo {did}");
95
+
tracing::info!("skipping duplicate repo {did}");
96
96
continue;
97
97
}
98
98
}
···
206
206
let _ = rc.zadd(BF_REM_KEY, &pds, rem).await;
207
207
let _ = rc.zadd(BF_RESET_KEY, &pds, reset).await;
208
208
}
209
-
Ok(_) => tracing::warn!(pds, "got response with no ratelimit headers."),
209
+
Ok(_) => tracing::debug!(pds, "got response with no ratelimit headers."),
210
210
Err(e) => {
211
211
tracing::error!(pds, did, "failed to download repo: {e}");
212
212
continue;
+6
-1
consumer/src/backfill/mod.rs
+6
-1
consumer/src/backfill/mod.rs
···
275
275
follows: Vec<(String, String, DateTime<Utc>)>,
276
276
list_items: Vec<(String, records::AppBskyGraphListItem)>,
277
277
verifications: Vec<(String, Cid, records::AppBskyGraphVerification)>,
278
+
threadgates: Vec<(String, Cid, records::AppBskyFeedThreadgate)>, // not COPY'd but needs to be kept until last.
278
279
records: Vec<(String, Cid)>,
279
280
}
280
281
281
282
impl CopyStore {
282
283
async fn submit(self, t: &mut Transaction<'_>, did: &str) -> Result<(), tokio_postgres::Error> {
283
284
db::copy::copy_likes(t, did, self.likes).await?;
284
-
db::copy::copy_posts(t, did, self.posts).await?;
285
285
db::copy::copy_reposts(t, did, self.reposts).await?;
286
286
db::copy::copy_blocks(t, did, self.blocks).await?;
287
287
db::copy::copy_follows(t, did, self.follows).await?;
288
288
db::copy::copy_list_items(t, self.list_items).await?;
289
289
db::copy::copy_verification(t, did, self.verifications).await?;
290
+
db::copy::copy_posts(t, did, self.posts).await?;
291
+
for (at_uri, cid, record) in self.threadgates {
292
+
db::threadgate_enforce_backfill(t, did, &record).await?;
293
+
db::threadgate_upsert(t, &at_uri, cid, record).await?;
294
+
}
290
295
db::copy::copy_records(t, did, self.records).await?;
291
296
292
297
Ok(())
+14
-5
consumer/src/backfill/repo.rs
+14
-5
consumer/src/backfill/repo.rs
···
4
4
};
5
5
use crate::indexer::records;
6
6
use crate::indexer::types::{AggregateDeltaStore, RecordTypes};
7
+
use crate::utils::at_uri_is_by;
7
8
use crate::{db, indexer};
8
9
use deadpool_postgres::Transaction;
9
10
use ipld_core::cid::Cid;
···
52
53
53
54
match block {
54
55
CarEntry::Commit(_) => {
55
-
tracing::warn!("got commit entry that was not in root")
56
+
tracing::debug!("got commit entry that was not in root")
56
57
}
57
58
CarEntry::Record(CarRecordEntry::Known(record)) => {
58
59
if let Some(path) = mst_nodes.remove(&cid) {
···
95
96
}
96
97
}
97
98
98
-
let commit = commit.unwrap();
99
+
let Some(commit) = commit else {
100
+
eyre::bail!("repo contained no commit?");
101
+
};
99
102
100
103
Ok((commit, deltas, copies))
101
104
}
···
144
147
db::maintain_self_labels(t, did, Some(cid), &at_uri, labels).await?;
145
148
}
146
149
if let Some(embed) = rec.embed.clone().and_then(|embed| embed.into_bsky()) {
147
-
db::post_embed_insert(t, &at_uri, embed, rec.created_at).await?;
150
+
db::post_embed_insert(t, &at_uri, embed, rec.created_at, true).await?;
148
151
}
149
152
150
153
deltas.incr(did, AggregateType::ProfilePost).await;
···
166
169
.reposts
167
170
.push((rkey.to_string(), rec.subject, rec.via, rec.created_at));
168
171
}
172
+
RecordTypes::AppBskyFeedThreadgate(record) => {
173
+
if !at_uri_is_by(&record.post, did) {
174
+
return Ok(());
175
+
}
176
+
177
+
copies.push_record(&at_uri, cid);
178
+
copies.threadgates.push((at_uri, cid, record));
179
+
}
169
180
RecordTypes::AppBskyGraphBlock(rec) => {
170
181
copies.push_record(&at_uri, cid);
171
182
copies
···
184
195
RecordTypes::AppBskyGraphListItem(rec) => {
185
196
let split_aturi = rec.list.rsplitn(4, '/').collect::<Vec<_>>();
186
197
if did != split_aturi[2] {
187
-
// it's also probably a bad idea to log *all* the attempts to do this...
188
-
tracing::warn!("tried to create a listitem on a list we don't control!");
189
198
return Ok(());
190
199
}
191
200
+8
consumer/src/config.rs
+8
consumer/src/config.rs
···
13
13
14
14
#[derive(Debug, Deserialize)]
15
15
pub struct Config {
16
+
#[serde(flatten)]
17
+
pub instruments: ConfigInstruments,
16
18
pub index_uri: String,
17
19
pub database: deadpool_postgres::Config,
18
20
pub redis_uri: String,
···
27
29
pub indexer: Option<IndexerConfig>,
28
30
/// Configuration items specific to backfill
29
31
pub backfill: Option<BackfillConfig>,
32
+
}
33
+
34
+
#[derive(Debug, Deserialize)]
35
+
pub struct ConfigInstruments {
36
+
#[serde(default)]
37
+
pub log_json: bool,
30
38
}
31
39
32
40
#[derive(Debug, Deserialize)]
+3
-3
consumer/src/db/actor.rs
+3
-3
consumer/src/db/actor.rs
···
69
69
)
70
70
.await?;
71
71
72
-
Ok(res.map(|v| (v.get(0), v.get(1))))
72
+
res.map(|v| Ok((v.try_get(0)?, v.try_get(1)?))).transpose()
73
73
}
74
74
75
75
pub async fn actor_get_repo_status<C: GenericClient>(
···
83
83
)
84
84
.await?;
85
85
86
-
Ok(res.map(|v| (v.get(0), v.get(1))))
86
+
res.map(|v| Ok((v.try_get(0)?, v.try_get(1)?))).transpose()
87
87
}
88
88
89
89
pub async fn actor_get_statuses<C: GenericClient>(
···
97
97
)
98
98
.await?;
99
99
100
-
Ok(res.map(|v| (v.get(0), v.get(1))))
100
+
res.map(|v| Ok((v.try_get(0)?, v.try_get(1)?))).transpose()
101
101
}
+10
-9
consumer/src/db/backfill.rs
+10
-9
consumer/src/db/backfill.rs
···
51
51
)
52
52
.await?;
53
53
54
-
Ok(res
55
-
.into_iter()
56
-
.map(|row| BackfillRow {
57
-
repo: row.get(0),
58
-
repo_ver: row.get(1),
59
-
cid: row.get(2),
60
-
data: row.get(3),
61
-
indexed_at: row.get(4),
54
+
res.into_iter()
55
+
.map(|row| {
56
+
Ok(BackfillRow {
57
+
repo: row.try_get(0)?,
58
+
repo_ver: row.try_get(1)?,
59
+
cid: row.try_get(2)?,
60
+
data: row.try_get(3)?,
61
+
indexed_at: row.try_get(4)?,
62
+
})
62
63
})
63
-
.collect())
64
+
.collect()
64
65
}
65
66
66
67
pub async fn backfill_delete_rows<C: GenericClient>(conn: &mut C, repo: &str) -> PgExecResult {
+40
-10
consumer/src/db/copy.rs
+40
-10
consumer/src/db/copy.rs
···
1
1
use super::PgExecResult;
2
2
use crate::indexer::records;
3
-
use crate::utils::strongref_to_parts;
3
+
use crate::utils::{extract_mentions_and_tags, merge_tags, strongref_to_parts};
4
4
use chrono::prelude::*;
5
5
use deadpool_postgres::Transaction;
6
6
use futures::pin_mut;
7
7
use ipld_core::cid::Cid;
8
+
use lexica::StrongRef;
8
9
use tokio_postgres::binary_copy::BinaryCopyInWriter;
9
10
use tokio_postgres::types::Type;
10
-
use lexica::StrongRef;
11
11
12
12
// StrongRefs are used in both likes and reposts
13
13
const STRONGREF_TYPES: &[Type] = &[
···
19
19
Type::TEXT,
20
20
Type::TIMESTAMP,
21
21
];
22
-
type StrongRefRow = (
23
-
String,
24
-
StrongRef,
25
-
Option<StrongRef>,
26
-
DateTime<Utc>,
27
-
);
22
+
type StrongRefRow = (String, StrongRef, Option<StrongRef>, DateTime<Utc>);
28
23
29
24
// SubjectRefs are used in both blocks and follows
30
25
const SUBJECT_TYPES: &[Type] = &[Type::TEXT, Type::TEXT, Type::TEXT, Type::TIMESTAMP];
···
124
119
.await
125
120
}
126
121
127
-
const POST_STMT: &str = "COPY posts_tmp (at_uri, cid, did, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri, root_cid, embed, embed_subtype, created_at) FROM STDIN (FORMAT binary)";
122
+
const POST_STMT: &str = "COPY posts_tmp (at_uri, cid, did, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri, root_cid, embed, embed_subtype, mentions, created_at) FROM STDIN (FORMAT binary)";
128
123
const POST_TYPES: &[Type] = &[
129
124
Type::TEXT,
130
125
Type::TEXT,
···
140
135
Type::TEXT,
141
136
Type::TEXT,
142
137
Type::TEXT,
138
+
Type::TEXT_ARRAY,
143
139
Type::TIMESTAMP,
144
140
];
145
141
pub async fn copy_posts(
···
164
160
165
161
for (at_uri, cid, post) in data {
166
162
let record = serde_json::to_value(&post).unwrap();
163
+
let (mentions, tags) = post
164
+
.facets
165
+
.as_ref()
166
+
.map(|v| extract_mentions_and_tags(v))
167
+
.unzip();
167
168
let facets = post.facets.and_then(|v| serde_json::to_value(v).ok());
168
169
let embed = post.embed.as_ref().map(|v| v.as_str());
169
170
let embed_subtype = post.embed.as_ref().and_then(|v| v.subtype());
170
171
let (parent_uri, parent_cid) = strongref_to_parts(post.reply.as_ref().map(|v| &v.parent));
171
172
let (root_uri, root_cid) = strongref_to_parts(post.reply.as_ref().map(|v| &v.root));
173
+
174
+
let tags = merge_tags(tags, post.tags);
172
175
173
176
let writer = writer.as_mut();
174
177
writer
···
180
183
&post.text,
181
184
&facets,
182
185
&post.langs.unwrap_or_default(),
183
-
&post.tags.unwrap_or_default(),
186
+
&tags,
184
187
&parent_uri,
185
188
&parent_cid,
186
189
&root_uri,
187
190
&root_cid,
188
191
&embed,
189
192
&embed_subtype,
193
+
&mentions,
190
194
&post.created_at.naive_utc(),
191
195
])
192
196
.await?;
193
197
}
194
198
195
199
writer.finish().await?;
200
+
201
+
let threadgated: Vec<(String, String, DateTime<Utc>)> = conn
202
+
.query(
203
+
"SELECT root_uri, p.at_uri, p.created_at FROM posts_tmp p INNER JOIN threadgates t ON root_uri = post_uri WHERE t.allow IS NOT NULL",
204
+
&[],
205
+
)
206
+
.await?
207
+
.into_iter()
208
+
.map(|v| Ok((v.try_get(0)?, v.try_get(1)?, v.try_get(2)?))).collect::<Result<_, _>>()?;
209
+
210
+
for (root, post, created_at) in threadgated {
211
+
match super::post_enforce_threadgate(conn, &root, did, created_at, true).await {
212
+
Ok(true) => {
213
+
conn.execute(
214
+
"UPDATE posts_tmp SET violates_threadgate=TRUE WHERE at_uri=$1",
215
+
&[&post],
216
+
)
217
+
.await?;
218
+
}
219
+
Ok(false) => continue,
220
+
Err(e) => {
221
+
tracing::error!("failed to check threadgate enforcement: {e}");
222
+
continue;
223
+
}
224
+
}
225
+
}
196
226
197
227
conn.execute("INSERT INTO posts (SELECT * FROM posts_tmp)", &[])
198
228
.await
+213
consumer/src/db/gates.rs
+213
consumer/src/db/gates.rs
···
1
+
use super::{PgExecResult, PgResult};
2
+
use crate::indexer::records::{
3
+
AppBskyFeedThreadgate, ThreadgateRule, THREADGATE_RULE_FOLLOWER, THREADGATE_RULE_FOLLOWING,
4
+
THREADGATE_RULE_LIST, THREADGATE_RULE_MENTION,
5
+
};
6
+
use chrono::prelude::*;
7
+
use chrono::{DateTime, Utc};
8
+
use deadpool_postgres::GenericClient;
9
+
use std::collections::HashSet;
10
+
11
+
pub async fn post_enforce_threadgate<C: GenericClient>(
12
+
conn: &mut C,
13
+
root: &str,
14
+
post_author: &str,
15
+
post_created_at: DateTime<Utc>,
16
+
is_backfill: bool,
17
+
) -> PgResult<bool> {
18
+
// check if the root and the current post are the same author
19
+
// strip "at://" then break into parts by '/'
20
+
let parts = root[5..].split('/').collect::<Vec<_>>();
21
+
let root_author = parts[0];
22
+
if root_author == post_author {
23
+
return Ok(false);
24
+
}
25
+
26
+
let tg_data = super::threadgate_get(conn, root).await?;
27
+
28
+
let Some((created_at, allow, allow_lists)) = tg_data else {
29
+
return Ok(false);
30
+
};
31
+
32
+
// when backfilling, there's no point continuing if the record is dated before the threadgate
33
+
if is_backfill && post_created_at < created_at {
34
+
return Ok(false);
35
+
}
36
+
37
+
if allow.is_empty() {
38
+
return Ok(true);
39
+
}
40
+
41
+
let allow: HashSet<String> = HashSet::from_iter(allow);
42
+
43
+
if allow.contains(THREADGATE_RULE_FOLLOWER) || allow.contains(THREADGATE_RULE_FOLLOWING) {
44
+
let profile_state: Option<(bool, bool)> = conn
45
+
.query_opt(
46
+
"SELECT following IS NOT NULL, followed IS NOT NULL FROM profile_states WHERE did=$1 AND subject=$2",
47
+
&[&root_author, &post_author],
48
+
)
49
+
.await?
50
+
.map(|v| Ok((v.try_get(0)?, v.try_get(1)?))).transpose()?;
51
+
52
+
if let Some((following, followed)) = profile_state {
53
+
if allow.contains(THREADGATE_RULE_FOLLOWER) && followed {
54
+
return Ok(false);
55
+
}
56
+
57
+
if allow.contains(THREADGATE_RULE_FOLLOWING) && following {
58
+
return Ok(false);
59
+
}
60
+
}
61
+
}
62
+
63
+
// check mentions
64
+
if allow.contains(THREADGATE_RULE_MENTION) {
65
+
let mentions: Vec<String> = conn
66
+
.query_opt("SELECT mentions FROM posts WHERE at_uri=$1", &[&root])
67
+
.await?
68
+
.and_then(|r| r.try_get::<_, Option<_>>(0).transpose())
69
+
.transpose()?
70
+
.unwrap_or_default();
71
+
72
+
if mentions.contains(&post_author.to_owned()) {
73
+
return Ok(false);
74
+
}
75
+
}
76
+
77
+
if allow.contains(THREADGATE_RULE_LIST) {
78
+
if allow_lists.is_empty() {
79
+
return Ok(true);
80
+
}
81
+
82
+
let count: i64 = conn
83
+
.query_one(
84
+
"SELECT count(*) FROM list_items WHERE list_uri=ANY($1) AND subject=$2",
85
+
&[&allow_lists, &post_author],
86
+
)
87
+
.await?
88
+
.try_get(0)?;
89
+
if count != 0 {
90
+
return Ok(false);
91
+
}
92
+
}
93
+
94
+
Ok(true)
95
+
}
96
+
97
+
pub async fn postgate_maintain_detaches<C: GenericClient>(
98
+
conn: &mut C,
99
+
post: &str,
100
+
detached: &[String],
101
+
disable_effective: Option<NaiveDateTime>,
102
+
) -> PgExecResult {
103
+
conn.execute(
104
+
"SELECT maintain_postgates($1, $2, $3)",
105
+
&[&post, &detached, &disable_effective],
106
+
)
107
+
.await
108
+
}
109
+
110
+
// variant of post_enforce_threadgate that runs when backfilling to clean up any posts already in DB
111
+
pub async fn threadgate_enforce_backfill<C: GenericClient>(
112
+
conn: &mut C,
113
+
root_author: &str,
114
+
threadgate: &AppBskyFeedThreadgate,
115
+
) -> PgExecResult {
116
+
// pull out allow - if it's None we can skip this gate.
117
+
let Some(allow) = threadgate.allow.as_ref() else {
118
+
return Ok(0);
119
+
};
120
+
121
+
let root = &threadgate.post;
122
+
123
+
if allow.is_empty() {
124
+
// blind update everything
125
+
return conn.execute(
126
+
"UPDATE posts SET violates_threadgate=TRUE WHERE root_uri=$1 AND did != $2 AND created_at >= $3",
127
+
&[&root, &root_author, &threadgate.created_at],
128
+
).await;
129
+
}
130
+
131
+
// pull authors with our root_uri where the author is not the root author and are dated after created_at
132
+
// this is mutable because we'll remove ALLOWED dids
133
+
let mut dids: HashSet<String> = conn
134
+
.query(
135
+
"SELECT DISTINCT did FROM posts WHERE root_uri=$1 AND did != $2 AND created_at >= $3",
136
+
&[&root, &root_author, &threadgate.created_at],
137
+
)
138
+
.await?
139
+
.into_iter()
140
+
.map(|row| row.try_get(0))
141
+
.collect::<Result<_, _>>()?;
142
+
143
+
// this will be empty if there are no replies.
144
+
if dids.is_empty() {
145
+
return Ok(0);
146
+
}
147
+
148
+
let allowed_lists = allow
149
+
.iter()
150
+
.filter_map(|rule| match rule {
151
+
ThreadgateRule::List { list } => Some(list),
152
+
_ => None,
153
+
})
154
+
.collect::<Vec<_>>();
155
+
156
+
let allow: HashSet<_> = HashSet::from_iter(allow.iter().map(|v| v.as_str()));
157
+
158
+
if allow.contains(THREADGATE_RULE_FOLLOWER) && !dids.is_empty() {
159
+
let current_dids: Vec<_> = dids.iter().collect();
160
+
161
+
let res = conn.query(
162
+
"SELECT subject FROM profile_states WHERE did=$1 AND subject=ANY($2) AND followed IS NOT NULL",
163
+
&[&root_author, ¤t_dids]
164
+
).await?.into_iter().map(|row| row.try_get(0)).collect::<Result<HashSet<_>, _>>()?;
165
+
166
+
dids = &dids - &res;
167
+
}
168
+
169
+
if allow.contains(THREADGATE_RULE_FOLLOWING) && !dids.is_empty() {
170
+
let current_dids: Vec<_> = dids.iter().collect();
171
+
172
+
let res = conn.query(
173
+
"SELECT subject FROM profile_states WHERE did=$1 AND subject=ANY($2) AND following IS NOT NULL",
174
+
&[&root_author, ¤t_dids]
175
+
).await?.into_iter().map(|row| row.try_get(0)).collect::<Result<_, _>>()?;
176
+
177
+
dids = &dids - &res;
178
+
}
179
+
180
+
if allow.contains(THREADGATE_RULE_MENTION) && !dids.is_empty() {
181
+
let mentions: Vec<String> = conn
182
+
.query_opt("SELECT mentions FROM posts WHERE at_uri=$1", &[&root])
183
+
.await?
184
+
.and_then(|r| r.try_get::<_, Option<_>>(0).transpose())
185
+
.transpose()?
186
+
.unwrap_or_default();
187
+
188
+
dids = &dids - &HashSet::from_iter(mentions);
189
+
}
190
+
191
+
if allow.contains(THREADGATE_RULE_LIST) && !dids.is_empty() {
192
+
let current_dids: Vec<_> = dids.iter().collect();
193
+
194
+
let res = conn
195
+
.query(
196
+
"SELECT subject FROM list_items WHERE list_uri = ANY($1) AND subject = ANY($2)",
197
+
&[&allowed_lists, ¤t_dids],
198
+
)
199
+
.await?
200
+
.into_iter()
201
+
.map(|row| row.try_get(0))
202
+
.collect::<Result<_, _>>()?;
203
+
204
+
dids = &dids - &res;
205
+
}
206
+
207
+
let dids = dids.into_iter().collect::<Vec<_>>();
208
+
209
+
conn.execute(
210
+
"UPDATE posts SET violates_threadgate=TRUE WHERE root_uri = $1 AND did = ANY($2) AND created_at >= $3",
211
+
&[&threadgate.post, &dids, &threadgate.created_at]
212
+
).await
213
+
}
+2
consumer/src/db/mod.rs
+2
consumer/src/db/mod.rs
+99
-49
consumer/src/db/record.rs
+99
-49
consumer/src/db/record.rs
···
1
1
use super::{PgExecResult, PgOptResult, PgResult};
2
2
use crate::indexer::records::*;
3
-
use crate::utils::{blob_ref, strongref_to_parts};
3
+
use crate::utils::{blob_ref, extract_mentions_and_tags, merge_tags, strongref_to_parts};
4
4
use chrono::prelude::*;
5
5
use deadpool_postgres::GenericClient;
6
6
use ipld_core::cid::Cid;
7
7
use lexica::community_lexicon::bookmarks::Bookmark;
8
+
use std::collections::HashSet;
8
9
9
10
pub async fn record_upsert<C: GenericClient>(
10
11
conn: &mut C,
···
37
38
38
39
conn.execute(
39
40
include_str!("sql/bookmarks_upsert.sql"),
40
-
&[&repo, &rkey, &rec.subject, &rec_type, &rec.tags, &rec.created_at],
41
+
&[
42
+
&repo,
43
+
&rkey,
44
+
&rec.subject,
45
+
&rec_type,
46
+
&rec.tags,
47
+
&rec.created_at,
48
+
],
41
49
)
42
50
.await
43
51
}
···
119
127
],
120
128
)
121
129
.await
122
-
.map(|r| r.get::<_, i32>(0) == 0)
130
+
.and_then(|r| Ok(r.try_get::<_, i32>(0)? == 0))
123
131
}
124
132
125
133
pub async fn feedgen_delete<C: GenericClient>(conn: &mut C, at_uri: &str) -> PgExecResult {
···
151
159
)
152
160
.await?;
153
161
154
-
Ok(res.map(|v| v.get(0)))
162
+
res.map(|v| v.try_get(0)).transpose()
155
163
}
156
164
157
165
pub async fn labeler_upsert<C: GenericClient>(
···
216
224
)
217
225
.await?;
218
226
219
-
Ok(res.map(|v| v.get(0)))
227
+
res.map(|v| v.try_get(0)).transpose()
220
228
}
221
229
222
230
pub async fn list_upsert<C: GenericClient>(
···
247
255
],
248
256
)
249
257
.await
250
-
.map(|r| r.get::<_, i32>(0) == 0)
258
+
.and_then(|r| Ok(r.try_get::<_, i32>(0)? == 0))
251
259
}
252
260
253
261
pub async fn list_delete<C: GenericClient>(conn: &mut C, at_uri: &str) -> PgExecResult {
···
310
318
repo: &str,
311
319
cid: Cid,
312
320
rec: AppBskyFeedPost,
321
+
is_backfill: bool,
313
322
) -> PgExecResult {
314
323
let cid = cid.to_string();
315
324
let record = serde_json::to_value(&rec).unwrap();
325
+
let (mentions, tags) = rec
326
+
.facets
327
+
.as_ref()
328
+
.map(|v| extract_mentions_and_tags(v))
329
+
.unzip();
316
330
let facets = rec.facets.and_then(|v| serde_json::to_value(v).ok());
317
331
let (parent_uri, parent_cid) = strongref_to_parts(rec.reply.as_ref().map(|v| &v.parent));
318
332
let (root_uri, root_cid) = strongref_to_parts(rec.reply.as_ref().map(|v| &v.root));
319
333
let embed = rec.embed.as_ref().map(|v| v.as_str());
320
334
let embed_subtype = rec.embed.as_ref().and_then(|v| v.subtype());
321
335
336
+
// if there is a root, we need to check for the presence of a threadgate.
337
+
let violates_threadgate = match &root_uri {
338
+
Some(root) => {
339
+
super::post_enforce_threadgate(conn, root, repo, rec.created_at, is_backfill).await?
340
+
}
341
+
None => false,
342
+
};
343
+
344
+
let tags = merge_tags(tags, rec.tags);
345
+
322
346
let count = conn
323
347
.execute(
324
348
include_str!("sql/post_insert.sql"),
···
330
354
&rec.text,
331
355
&facets,
332
356
&rec.langs.unwrap_or_default(),
333
-
&rec.tags.unwrap_or_default(),
357
+
&tags,
334
358
&parent_uri,
335
359
&parent_cid,
336
360
&root_uri,
337
361
&root_cid,
338
362
&embed,
339
363
&embed_subtype,
364
+
&mentions,
365
+
&violates_threadgate,
340
366
&rec.created_at,
341
367
],
342
368
)
343
369
.await?;
344
370
345
371
if let Some(embed) = rec.embed.and_then(|embed| embed.into_bsky()) {
346
-
post_embed_insert(conn, at_uri, embed, rec.created_at).await?;
372
+
post_embed_insert(conn, at_uri, embed, rec.created_at, is_backfill).await?;
347
373
}
348
374
349
375
Ok(count)
···
365
391
)
366
392
.await?;
367
393
368
-
Ok(res.map(|row| (row.get(0), row.get(1))))
394
+
res.map(|row| Ok((row.try_get(0)?, row.try_get(1)?)))
395
+
.transpose()
369
396
}
370
397
371
398
pub async fn post_embed_insert<C: GenericClient>(
···
373
400
post: &str,
374
401
embed: AppBskyEmbed,
375
402
created_at: DateTime<Utc>,
403
+
is_backfill: bool,
376
404
) -> PgExecResult {
377
405
match embed {
378
406
AppBskyEmbed::Images(embed) => post_embed_image_insert(conn, post, embed).await,
379
407
AppBskyEmbed::Video(embed) => post_embed_video_insert(conn, post, embed).await,
380
408
AppBskyEmbed::External(embed) => post_embed_external_insert(conn, post, embed).await,
381
409
AppBskyEmbed::Record(embed) => {
382
-
post_embed_record_insert(conn, post, embed, created_at).await
410
+
post_embed_record_insert(conn, post, embed, created_at, is_backfill).await
383
411
}
384
412
AppBskyEmbed::RecordWithMedia(embed) => {
385
-
post_embed_record_insert(conn, post, embed.record, created_at).await?;
413
+
post_embed_record_insert(conn, post, embed.record, created_at, is_backfill).await?;
386
414
match *embed.media {
387
415
AppBskyEmbed::Images(embed) => post_embed_image_insert(conn, post, embed).await,
388
416
AppBskyEmbed::Video(embed) => post_embed_video_insert(conn, post, embed).await,
···
469
497
).await
470
498
}
471
499
500
+
const PG_DISABLE_RULE: &str = "app.bsky.feed.postgate#disableRule";
472
501
async fn post_embed_record_insert<C: GenericClient>(
473
502
conn: &mut C,
474
503
post: &str,
475
504
embed: AppBskyEmbedRecord,
476
505
post_created_at: DateTime<Utc>,
506
+
is_backfill: bool,
477
507
) -> PgExecResult {
478
508
// strip "at://" then break into parts by '/'
479
509
let parts = embed.record.uri[5..].split('/').collect::<Vec<_>>();
480
510
481
511
let detached = if parts[1] == "app.bsky.feed.post" {
482
-
let postgate_effective: Option<DateTime<Utc>> = conn
483
-
.query_opt(
484
-
"SELECT created_at FROM postgates WHERE post_uri=$1",
485
-
&[&post],
486
-
)
487
-
.await?
488
-
.map(|v| v.get(0));
512
+
let pg_data = postgate_get(conn, post).await?;
489
513
490
-
postgate_effective
491
-
.map(|v| Utc::now().min(post_created_at) > v)
492
-
.unwrap_or_default()
514
+
if let Some((effective, detached, rules)) = pg_data {
515
+
let detached: HashSet<String> = HashSet::from_iter(detached);
516
+
let rules: HashSet<String> = HashSet::from_iter(rules);
517
+
let compare_date = match is_backfill {
518
+
true => post_created_at,
519
+
false => Utc::now(),
520
+
};
521
+
522
+
detached.contains(post) || (rules.contains(PG_DISABLE_RULE) && compare_date > effective)
523
+
} else {
524
+
false
525
+
}
493
526
} else {
494
527
false
495
528
};
···
498
531
"INSERT INTO post_embed_record (post_uri, record_type, uri, cid, detached) VALUES ($1, $2, $3, $4, $5)",
499
532
&[&post, &parts[1], &embed.record.uri, &embed.record.cid.to_string(), &detached],
500
533
).await
534
+
}
535
+
536
+
async fn postgate_get<C: GenericClient>(
537
+
conn: &mut C,
538
+
post: &str,
539
+
) -> PgOptResult<(DateTime<Utc>, Vec<String>, Vec<String>)> {
540
+
conn.query_opt(
541
+
"SELECT created_at, detached, rules FROM postgates WHERE post_uri=$1",
542
+
&[&post],
543
+
)
544
+
.await?
545
+
.map(|v| Ok((v.try_get(0)?, v.try_get(1)?, v.try_get(2)?)))
546
+
.transpose()
501
547
}
502
548
503
549
pub async fn postgate_upsert<C: GenericClient>(
···
531
577
.await
532
578
}
533
579
534
-
pub async fn postgate_maintain_detaches<C: GenericClient>(
535
-
conn: &mut C,
536
-
post: &str,
537
-
detached: &[String],
538
-
disable_effective: Option<NaiveDateTime>,
539
-
) -> PgExecResult {
540
-
conn.execute(
541
-
"SELECT maintain_postgates($1, $2, $3)",
542
-
&[&post, &detached, &disable_effective],
543
-
)
544
-
.await
545
-
}
546
-
547
580
pub async fn profile_upsert<C: GenericClient>(
548
581
conn: &mut C,
549
582
repo: &str,
···
569
602
&pinned_cid,
570
603
&joined_sp_uri,
571
604
&joined_sp_cid,
605
+
&rec.pronouns,
606
+
&rec.website,
572
607
&rec.created_at.unwrap_or(Utc::now()).naive_utc(),
573
608
],
574
609
)
···
615
650
)
616
651
.await?;
617
652
618
-
Ok(res.map(|v| v.get(0)))
653
+
res.map(|v| v.try_get(0)).transpose()
619
654
}
620
655
621
656
pub async fn starter_pack_upsert<C: GenericClient>(
···
650
685
],
651
686
)
652
687
.await
653
-
.map(|r| r.get::<_, i32>(0) == 0)
688
+
.and_then(|r| Ok(r.try_get::<_, i32>(0)? == 0))
654
689
}
655
690
656
691
pub async fn starter_pack_delete<C: GenericClient>(conn: &mut C, at_uri: &str) -> PgExecResult {
···
691
726
.await
692
727
}
693
728
729
+
pub async fn threadgate_get<C: GenericClient>(
730
+
conn: &mut C,
731
+
post: &str,
732
+
) -> PgOptResult<(DateTime<Utc>, Vec<String>, Vec<String>)> {
733
+
conn
734
+
.query_opt(
735
+
"SELECT created_at, allow, allowed_lists FROM threadgates WHERE post_uri=$1 AND allow IS NOT NULL",
736
+
&[&post],
737
+
)
738
+
.await?
739
+
.map(|v| Ok((v.try_get(0)?, v.try_get(1)?, v.try_get(2)?))).transpose()
740
+
}
741
+
694
742
pub async fn threadgate_upsert<C: GenericClient>(
695
743
conn: &mut C,
696
744
at_uri: &str,
···
699
747
) -> PgExecResult {
700
748
let record = serde_json::to_value(&rec).unwrap();
701
749
702
-
let allowed_lists = rec
703
-
.allow
704
-
.iter()
705
-
.filter_map(|rule| match rule {
706
-
ThreadgateRule::List { list } => Some(list.clone()),
707
-
_ => None,
708
-
})
709
-
.collect::<Vec<_>>();
750
+
let allowed_lists = rec.allow.as_ref().map(|allow| {
751
+
allow
752
+
.iter()
753
+
.filter_map(|rule| match rule {
754
+
ThreadgateRule::List { list } => Some(list.clone()),
755
+
_ => None,
756
+
})
757
+
.collect::<Vec<_>>()
758
+
});
710
759
711
-
let allow = rec
712
-
.allow
713
-
.into_iter()
714
-
.map(|v| v.as_str().to_string())
715
-
.collect::<Vec<_>>();
760
+
let allow = rec.allow.map(|allow| {
761
+
allow
762
+
.into_iter()
763
+
.map(|v| v.as_str().to_string())
764
+
.collect::<Vec<_>>()
765
+
});
716
766
717
767
conn.execute(
718
768
include_str!("sql/threadgate_upsert.sql"),
+2
-2
consumer/src/db/sql/post_insert.sql
+2
-2
consumer/src/db/sql/post_insert.sql
···
1
1
INSERT INTO posts (at_uri, did, cid, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri,
2
-
root_cid, embed, embed_subtype, created_at)
3
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
2
+
root_cid, embed, embed_subtype, mentions, violates_threadgate, created_at)
3
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
4
4
ON CONFLICT DO NOTHING
+4
-2
consumer/src/db/sql/profile_upsert.sql
+4
-2
consumer/src/db/sql/profile_upsert.sql
···
1
1
INSERT INTO profiles (did, cid, avatar_cid, banner_cid, display_name, description, pinned_uri, pinned_cid,
2
-
joined_sp_uri, joined_sp_cid, created_at)
3
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
2
+
joined_sp_uri, joined_sp_cid, pronouns, website, created_at)
3
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
4
4
ON CONFLICT (did) DO UPDATE SET cid=EXCLUDED.cid,
5
5
avatar_cid=EXCLUDED.avatar_cid,
6
6
banner_cid=EXCLUDED.banner_cid,
···
10
10
pinned_cid=EXCLUDED.pinned_cid,
11
11
joined_sp_uri=EXCLUDED.joined_sp_uri,
12
12
joined_sp_cid=EXCLUDED.joined_sp_cid,
13
+
pronouns=EXCLUDED.pronouns,
14
+
website=EXCLUDED.website,
13
15
indexed_at=NOW()
+1
-2
consumer/src/firehose/mod.rs
+1
-2
consumer/src/firehose/mod.rs
···
119
119
}
120
120
"#sync" => {
121
121
counter!("firehose_events.total", "event" => "sync").increment(1);
122
-
let event: AtpSyncEvent =
123
-
serde_ipld_dagcbor::from_reader(&mut reader)?;
122
+
let event: AtpSyncEvent = serde_ipld_dagcbor::from_reader(&mut reader)?;
124
123
125
124
// increment the seq
126
125
if self.seq < event.seq {
+4
-9
consumer/src/indexer/mod.rs
+4
-9
consumer/src/indexer/mod.rs
···
213
213
rc: &mut MultiplexedConnection,
214
214
sync: AtpSyncEvent,
215
215
) -> eyre::Result<()> {
216
-
let Some((sync_state, Some(current_rev))) = db::actor_get_repo_status(conn, &sync.did).await? else {
216
+
let Some((sync_state, Some(current_rev))) = db::actor_get_repo_status(conn, &sync.did).await?
217
+
else {
217
218
return Ok(());
218
219
};
219
220
···
624
625
});
625
626
626
627
let labels = record.labels.clone();
627
-
db::post_insert(conn, at_uri, repo, cid, record).await?;
628
+
db::post_insert(conn, at_uri, repo, cid, record, false).await?;
628
629
if let Some(labels) = labels {
629
630
db::maintain_self_labels(conn, repo, Some(cid), at_uri, labels).await?;
630
631
}
···
639
640
}
640
641
RecordTypes::AppBskyFeedPostgate(record) => {
641
642
if !at_uri_is_by(&record.post, repo) {
642
-
tracing::warn!("tried to create a postgate on a post we don't control!");
643
643
return Ok(());
644
644
}
645
645
···
669
669
}
670
670
RecordTypes::AppBskyFeedThreadgate(record) => {
671
671
if !at_uri_is_by(&record.post, repo) {
672
-
tracing::warn!("tried to create a threadgate on a post we don't control!");
673
672
return Ok(());
674
673
}
675
674
···
709
708
}
710
709
RecordTypes::AppBskyGraphListItem(record) => {
711
710
if !at_uri_is_by(&record.list, repo) {
712
-
// it's also probably a bad idea to log *all* the attempts to do this...
713
-
tracing::warn!("tried to create a listitem on a list we don't control!");
714
711
return Ok(());
715
712
}
716
713
···
866
863
redis::AsyncTypedCommands::del(rc, format!("profile#{repo}")).await?;
867
864
db::chat_decl_delete(conn, repo).await?
868
865
}
869
-
CollectionType::CommunityLexiconBookmark => {
870
-
db::bookmark_delete(conn, rkey, repo).await?
871
-
}
866
+
CollectionType::CommunityLexiconBookmark => db::bookmark_delete(conn, rkey, repo).await?,
872
867
_ => unreachable!(),
873
868
};
874
869
+14
-6
consumer/src/indexer/records.rs
+14
-6
consumer/src/indexer/records.rs
···
23
23
pub labels: Option<SelfLabels>,
24
24
pub joined_via_starter_pack: Option<StrongRef>,
25
25
pub pinned_post: Option<StrongRef>,
26
+
#[serde_as(as = "utils::safe_string")]
27
+
pub pronouns: Option<String>,
28
+
#[serde_as(as = "utils::safe_string")]
29
+
pub website: Option<String>,
26
30
pub created_at: Option<DateTime<Utc>>,
27
31
}
28
32
···
263
267
pub struct AppBskyFeedThreadgate {
264
268
pub post: String,
265
269
pub created_at: DateTime<Utc>,
266
-
#[serde(default)]
267
-
pub allow: Vec<ThreadgateRule>,
270
+
pub allow: Option<Vec<ThreadgateRule>>,
268
271
#[serde(default)]
269
272
pub hidden_replies: Vec<String>,
270
273
}
274
+
275
+
pub const THREADGATE_RULE_MENTION: &str = "app.bsky.feed.threadgate#mentionRule";
276
+
pub const THREADGATE_RULE_FOLLOWER: &str = "app.bsky.feed.threadgate#followerRule";
277
+
pub const THREADGATE_RULE_FOLLOWING: &str = "app.bsky.feed.threadgate#followingRule";
278
+
pub const THREADGATE_RULE_LIST: &str = "app.bsky.feed.threadgate#listRule";
271
279
272
280
#[derive(Debug, Deserialize, Serialize)]
273
281
#[serde(tag = "$type")]
···
285
293
impl ThreadgateRule {
286
294
pub fn as_str(&self) -> &'static str {
287
295
match self {
288
-
ThreadgateRule::Mention => "app.bsky.feed.threadgate#mentionRule",
289
-
ThreadgateRule::Follower => "app.bsky.feed.threadgate#followerRule",
290
-
ThreadgateRule::Following => "app.bsky.feed.threadgate#followingRule",
291
-
ThreadgateRule::List { .. } => "app.bsky.feed.threadgate#listRule",
296
+
ThreadgateRule::Mention => THREADGATE_RULE_MENTION,
297
+
ThreadgateRule::Follower => THREADGATE_RULE_FOLLOWER,
298
+
ThreadgateRule::Following => THREADGATE_RULE_FOLLOWING,
299
+
ThreadgateRule::List { .. } => THREADGATE_RULE_LIST,
292
300
}
293
301
}
294
302
}
+1
-1
consumer/src/indexer/types.rs
+1
-1
consumer/src/indexer/types.rs
···
42
42
#[serde(rename = "chat.bsky.actor.declaration")]
43
43
ChatBskyActorDeclaration(records::ChatBskyActorDeclaration),
44
44
#[serde(rename = "community.lexicon.bookmarks.bookmark")]
45
-
CommunityLexiconBookmark(lexica::community_lexicon::bookmarks::Bookmark)
45
+
CommunityLexiconBookmark(lexica::community_lexicon::bookmarks::Bookmark),
46
46
}
47
47
48
48
#[derive(Debug, PartialOrd, PartialEq, Deserialize, Serialize)]
+25
consumer/src/instrumentation.rs
+25
consumer/src/instrumentation.rs
···
1
+
use tracing::Subscriber;
2
+
use tracing_subscriber::filter::Filtered;
3
+
use tracing_subscriber::layer::SubscriberExt;
4
+
use tracing_subscriber::registry::LookupSpan;
5
+
use tracing_subscriber::util::SubscriberInitExt;
6
+
use tracing_subscriber::{EnvFilter, Layer};
7
+
8
+
pub fn init_instruments(cfg: &crate::config::ConfigInstruments) {
9
+
let log_layer = init_log(cfg.log_json);
10
+
11
+
tracing_subscriber::registry().with(log_layer).init();
12
+
}
13
+
14
+
fn init_log<S>(json: bool) -> Filtered<Box<dyn Layer<S> + Send + Sync>, EnvFilter, S>
15
+
where
16
+
S: Subscriber + for<'span> LookupSpan<'span>,
17
+
{
18
+
let stdout_filter = EnvFilter::from_default_env();
19
+
20
+
match json {
21
+
true => tracing_subscriber::fmt::layer().json().boxed(),
22
+
false => tracing_subscriber::fmt::layer().boxed(),
23
+
}
24
+
.with_filter(stdout_filter)
25
+
}
+2
-1
consumer/src/main.rs
+2
-1
consumer/src/main.rs
···
12
12
mod db;
13
13
mod firehose;
14
14
mod indexer;
15
+
mod instrumentation;
15
16
mod label_indexer;
16
17
mod utils;
17
18
18
19
#[tokio::main]
19
20
async fn main() -> eyre::Result<()> {
20
-
tracing_subscriber::fmt::init();
21
21
PrometheusBuilder::new().install()?;
22
22
23
23
let cli = cmd::parse();
24
24
let conf = config::load_config()?;
25
25
26
+
instrumentation::init_instruments(&conf.instruments);
26
27
let user_agent = build_ua(&conf.ua_contact);
27
28
28
29
let pool = conf.database.create_pool(Some(Runtime::Tokio1), NoTls)?;
+33
-4
consumer/src/utils.rs
+33
-4
consumer/src/utils.rs
···
1
+
use lexica::app_bsky::richtext::{Facet, FacetMain, FacetOuter};
2
+
use lexica::{Blob, StrongRef};
1
3
use serde::{Deserialize, Deserializer};
2
-
use lexica::{Blob, StrongRef};
3
4
4
5
// see https://deer.social/profile/did:plc:63y3oh7iakdueqhlj6trojbq/post/3ltuv4skhqs2h
5
6
pub fn safe_string<'de, D: Deserializer<'de>>(deserializer: D) -> Result<String, D::Error> {
···
12
13
blob.map(|blob| blob.cid.to_string())
13
14
}
14
15
15
-
pub fn strongref_to_parts(
16
-
strongref: Option<&StrongRef>,
17
-
) -> (Option<String>, Option<String>) {
16
+
pub fn strongref_to_parts(strongref: Option<&StrongRef>) -> (Option<String>, Option<String>) {
18
17
strongref
19
18
.map(|sr| (sr.uri.clone(), sr.cid.to_string()))
20
19
.unzip()
···
41
40
42
41
did == split_aturi[2]
43
42
}
43
+
44
+
pub fn extract_mentions_and_tags(from: &[FacetMain]) -> (Vec<String>, Vec<String>) {
45
+
let (mentions, tags) = from
46
+
.iter()
47
+
.flat_map(|v| {
48
+
v.features.iter().map(|facet| match facet {
49
+
FacetOuter::Bsky(Facet::Mention { did }) => (Some(did), None),
50
+
FacetOuter::Bsky(Facet::Tag { tag }) => (None, Some(tag)),
51
+
_ => (None, None),
52
+
})
53
+
})
54
+
.unzip::<_, _, Vec<_>, Vec<_>>();
55
+
56
+
let mentions = mentions.into_iter().flatten().cloned().collect();
57
+
let tags = tags.into_iter().flatten().cloned().collect();
58
+
59
+
(mentions, tags)
60
+
}
61
+
62
+
pub fn merge_tags<T>(t1: Option<Vec<T>>, t2: Option<Vec<T>>) -> Vec<T> {
63
+
match (t1, t2) {
64
+
(Some(t1), None) => t1,
65
+
(None, Some(t2)) => t2,
66
+
(Some(mut t1), Some(t2)) => {
67
+
t1.extend(t2);
68
+
t1
69
+
}
70
+
_ => Vec::default(),
71
+
}
72
+
}
+2
-12
dataloader-rs/Cargo.toml
+2
-12
dataloader-rs/Cargo.toml
···
2
2
name = "dataloader"
3
3
version = "0.18.0"
4
4
edition = "2021"
5
-
authors = ["cksac <cs.cksac@gmail.com>", "Lily"]
5
+
authors = ["cksac <cs.cksac@gmail.com>", "Mia"]
6
6
description = "Rust implementation of Facebook's DataLoader using async-await."
7
7
keywords = ["batcher", "dataloader", "cache"]
8
8
categories = ["asynchronous", "caching"]
···
15
15
[badges]
16
16
travis-ci = { repository = "/cksac/dataloader-rs" }
17
17
18
-
[features]
19
-
default = ["runtime-async-std"]
20
-
runtime-async-std = [
21
-
"async-std",
22
-
]
23
-
runtime-tokio = [
24
-
"tokio"
25
-
]
26
-
27
18
[dependencies]
28
-
async-std = { version = "1", optional = true }
29
-
tokio = { version = "1", features = [ "sync", "rt" ], optional = true }
19
+
tokio = { version = "1", features = [ "sync", "rt" ] }
30
20
31
21
[dev-dependencies]
32
22
futures = "0.3"
-13
dataloader-rs/src/runtime.rs
-13
dataloader-rs/src/runtime.rs
···
1
-
// runtime-async-std
2
-
#[cfg(feature = "runtime-async-std")]
3
-
pub type Arc<T> = async_std::sync::Arc<T>;
4
-
5
-
#[cfg(feature = "runtime-async-std")]
6
-
pub type Mutex<T> = async_std::sync::Mutex<T>;
7
-
8
-
#[cfg(feature = "runtime-async-std")]
9
-
pub use async_std::task::yield_now;
10
-
11
1
// runtime-tokio
12
-
#[cfg(feature = "runtime-tokio")]
13
2
pub type Arc<T> = std::sync::Arc<T>;
14
3
15
-
#[cfg(feature = "runtime-tokio")]
16
4
pub type Mutex<T> = tokio::sync::Mutex<T>;
17
5
18
-
#[cfg(feature = "runtime-tokio")]
19
6
pub use tokio::task::yield_now;
+1
-2
justfile
+1
-2
justfile
···
12
12
13
13
@reset-redis:
14
14
echo "Resetting Redis lists..."
15
-
redis-cli DEL backfill_queue
16
-
redis-cli DEL backfill_processing
15
+
redis-cli DEL backfill_queue backfill_processing bf_downloaded
17
16
18
17
@reset-and-backfill *dids: reset-db reset-redis
19
18
for PARAMETER_VALUE in {{dids}}; do \
+36
-6
lexica/src/app_bsky/actor.rs
+36
-6
lexica/src/app_bsky/actor.rs
···
1
1
use crate::app_bsky::embed::External;
2
+
use crate::app_bsky::graph::ListViewBasic;
2
3
use crate::com_atproto::label::Label;
3
4
use chrono::prelude::*;
4
5
use serde::{Deserialize, Serialize};
5
6
use std::fmt::Display;
6
7
use std::str::FromStr;
8
+
9
+
#[derive(Clone, Default, Debug, Serialize)]
10
+
#[serde(rename_all = "camelCase")]
11
+
pub struct ProfileViewerState {
12
+
pub muted: bool,
13
+
#[serde(skip_serializing_if = "Option::is_none")]
14
+
pub muted_by_list: Option<ListViewBasic>,
15
+
pub blocked_by: bool,
16
+
#[serde(skip_serializing_if = "Option::is_none")]
17
+
pub blocking: Option<String>,
18
+
#[serde(skip_serializing_if = "Option::is_none")]
19
+
pub blocking_by_list: Option<ListViewBasic>,
20
+
#[serde(skip_serializing_if = "Option::is_none")]
21
+
pub following: Option<String>,
22
+
#[serde(skip_serializing_if = "Option::is_none")]
23
+
pub followed_by: Option<String>,
24
+
// #[serde(skip_serializing_if = "Option::is_none")]
25
+
// pub known_followers: Option<()>,
26
+
// #[serde(skip_serializing_if = "Option::is_none")]
27
+
// pub activity_subscriptions: Option<()>,
28
+
}
7
29
8
30
#[derive(Clone, Default, Debug, Serialize)]
9
31
#[serde(rename_all = "camelCase")]
···
130
152
pub avatar: Option<String>,
131
153
#[serde(skip_serializing_if = "Option::is_none")]
132
154
pub associated: Option<ProfileAssociated>,
133
-
// #[serde(skip_serializing_if = "Option::is_none")]
134
-
// pub viewer: Option<()>,
155
+
#[serde(skip_serializing_if = "Option::is_none")]
156
+
pub viewer: Option<ProfileViewerState>,
135
157
#[serde(skip_serializing_if = "Vec::is_empty")]
136
158
pub labels: Vec<Label>,
137
159
#[serde(skip_serializing_if = "Option::is_none")]
138
160
pub verification: Option<VerificationState>,
139
161
#[serde(skip_serializing_if = "Option::is_none")]
140
162
pub status: Option<StatusView>,
163
+
#[serde(skip_serializing_if = "Option::is_none")]
164
+
pub pronouns: Option<String>,
141
165
142
166
pub created_at: DateTime<Utc>,
143
167
}
···
156
180
pub avatar: Option<String>,
157
181
#[serde(skip_serializing_if = "Option::is_none")]
158
182
pub associated: Option<ProfileAssociated>,
159
-
// #[serde(skip_serializing_if = "Option::is_none")]
160
-
// pub viewer: Option<()>,
183
+
#[serde(skip_serializing_if = "Option::is_none")]
184
+
pub viewer: Option<ProfileViewerState>,
161
185
#[serde(skip_serializing_if = "Vec::is_empty")]
162
186
pub labels: Vec<Label>,
163
187
#[serde(skip_serializing_if = "Option::is_none")]
164
188
pub verification: Option<VerificationState>,
165
189
#[serde(skip_serializing_if = "Option::is_none")]
166
190
pub status: Option<StatusView>,
191
+
#[serde(skip_serializing_if = "Option::is_none")]
192
+
pub pronouns: Option<String>,
167
193
168
194
pub created_at: DateTime<Utc>,
169
195
pub indexed_at: NaiveDateTime,
···
189
215
pub associated: Option<ProfileAssociated>,
190
216
// #[serde(skip_serializing_if = "Option::is_none")]
191
217
// pub joined_via_starter_pack: Option<()>,
192
-
// #[serde(skip_serializing_if = "Option::is_none")]
193
-
// pub viewer: Option<()>,
218
+
#[serde(skip_serializing_if = "Option::is_none")]
219
+
pub viewer: Option<ProfileViewerState>,
194
220
#[serde(skip_serializing_if = "Vec::is_empty")]
195
221
pub labels: Vec<Label>,
196
222
// #[serde(skip_serializing_if = "Option::is_none")]
···
199
225
pub verification: Option<VerificationState>,
200
226
#[serde(skip_serializing_if = "Option::is_none")]
201
227
pub status: Option<StatusView>,
228
+
#[serde(skip_serializing_if = "Option::is_none")]
229
+
pub pronouns: Option<String>,
230
+
#[serde(skip_serializing_if = "Option::is_none")]
231
+
pub website: Option<String>,
202
232
203
233
pub created_at: DateTime<Utc>,
204
234
pub indexed_at: NaiveDateTime,
+29
-10
lexica/src/app_bsky/feed.rs
+29
-10
lexica/src/app_bsky/feed.rs
···
1
1
use super::RecordStats;
2
-
use crate::app_bsky::actor::{ProfileView, ProfileViewBasic};
2
+
use crate::app_bsky::actor::{ProfileView, ProfileViewBasic, ProfileViewerState};
3
3
use crate::app_bsky::embed::Embed;
4
4
use crate::app_bsky::graph::ListViewBasic;
5
5
use crate::app_bsky::richtext::FacetMain;
···
8
8
use serde::{Deserialize, Serialize};
9
9
use std::str::FromStr;
10
10
11
+
#[derive(Clone, Default, Debug, Serialize)]
12
+
#[serde(rename_all = "camelCase")]
13
+
pub struct PostViewerState {
14
+
#[serde(skip_serializing_if = "Option::is_none")]
15
+
pub repost: Option<String>,
16
+
#[serde(skip_serializing_if = "Option::is_none")]
17
+
pub like: Option<String>,
18
+
pub bookmarked: bool,
19
+
pub thread_muted: bool,
20
+
pub reply_disabled: bool,
21
+
pub embedding_disabled: bool,
22
+
pub pinned: bool,
23
+
}
24
+
11
25
#[derive(Clone, Debug, Serialize)]
12
26
#[serde(rename_all = "camelCase")]
13
27
pub struct PostView {
···
23
37
24
38
#[serde(skip_serializing_if = "Vec::is_empty")]
25
39
pub labels: Vec<Label>,
26
-
// #[serde(skip_serializing_if = "Option::is_none")]
27
-
// pub viewer: Option<()>,
40
+
#[serde(skip_serializing_if = "Option::is_none")]
41
+
pub viewer: Option<PostViewerState>,
28
42
#[serde(skip_serializing_if = "Option::is_none")]
29
43
pub threadgate: Option<ThreadgateView>,
30
44
···
123
137
124
138
#[derive(Clone, Debug, Serialize)]
125
139
pub struct BlockedAuthor {
126
-
pub uri: String,
127
-
// pub viewer: Option<()>,
140
+
pub did: String,
141
+
pub viewer: Option<ProfileViewerState>,
142
+
}
143
+
144
+
#[derive(Clone, Default, Debug, Serialize)]
145
+
#[serde(rename_all = "camelCase")]
146
+
pub struct GeneratorViewerState {
147
+
#[serde(skip_serializing_if = "Option::is_none")]
148
+
pub like: Option<String>,
128
149
}
129
150
130
151
#[derive(Clone, Debug, Serialize)]
···
148
169
pub accepts_interactions: bool,
149
170
#[serde(skip_serializing_if = "Vec::is_empty")]
150
171
pub labels: Vec<Label>,
151
-
// #[serde(skip_serializing_if = "Option::is_none")]
152
-
// pub viewer: Option<()>,
172
+
#[serde(skip_serializing_if = "Option::is_none")]
173
+
pub viewer: Option<GeneratorViewerState>,
153
174
#[serde(skip_serializing_if = "Option::is_none")]
154
175
pub content_mode: Option<GeneratorContentMode>,
155
176
···
219
240
#[serde(rename = "app.bsky.feed.defs#skeletonReasonPin")]
220
241
Pin {},
221
242
#[serde(rename = "app.bsky.feed.defs#skeletonReasonRepost")]
222
-
Repost {
223
-
repost: String,
224
-
},
243
+
Repost { repost: String },
225
244
}
+12
-4
lexica/src/app_bsky/graph.rs
+12
-4
lexica/src/app_bsky/graph.rs
···
6
6
use serde::{Deserialize, Serialize};
7
7
use std::str::FromStr;
8
8
9
+
#[derive(Clone, Default, Debug, Serialize)]
10
+
#[serde(rename_all = "camelCase")]
11
+
pub struct ListViewerState {
12
+
pub muted: bool,
13
+
#[serde(skip_serializing_if = "Option::is_none")]
14
+
pub blocked: Option<String>,
15
+
}
16
+
9
17
#[derive(Clone, Debug, Serialize)]
10
18
#[serde(rename_all = "camelCase")]
11
19
pub struct ListViewBasic {
···
18
26
pub avatar: Option<String>,
19
27
pub list_item_count: i64,
20
28
21
-
// #[serde(skip_serializing_if = "Option::is_none")]
22
-
// pub viewer: Option<()>,
29
+
#[serde(skip_serializing_if = "Option::is_none")]
30
+
pub viewer: Option<ListViewerState>,
23
31
#[serde(skip_serializing_if = "Vec::is_empty")]
24
32
pub labels: Vec<Label>,
25
33
···
44
52
pub avatar: Option<String>,
45
53
pub list_item_count: i64,
46
54
47
-
// #[serde(skip_serializing_if = "Option::is_none")]
48
-
// pub viewer: Option<()>,
55
+
#[serde(skip_serializing_if = "Option::is_none")]
56
+
pub viewer: Option<ListViewerState>,
49
57
#[serde(skip_serializing_if = "Vec::is_empty")]
50
58
pub labels: Vec<Label>,
51
59
+11
-4
lexica/src/app_bsky/labeler.rs
+11
-4
lexica/src/app_bsky/labeler.rs
···
4
4
use chrono::prelude::*;
5
5
use serde::{Deserialize, Serialize};
6
6
7
+
#[derive(Clone, Default, Debug, Serialize)]
8
+
#[serde(rename_all = "camelCase")]
9
+
pub struct LabelerViewerState {
10
+
#[serde(skip_serializing_if = "Option::is_none")]
11
+
pub like: Option<String>,
12
+
}
13
+
7
14
#[derive(Clone, Debug, Serialize)]
8
15
#[serde(rename_all = "camelCase")]
9
16
pub struct LabelerView {
···
12
19
pub creator: ProfileView,
13
20
14
21
pub like_count: i64,
15
-
// #[serde(skip_serializing_if = "Option::is_none")]
16
-
// pub viewer: Option<()>,
22
+
#[serde(skip_serializing_if = "Option::is_none")]
23
+
pub viewer: Option<LabelerViewerState>,
17
24
#[serde(skip_serializing_if = "Vec::is_empty")]
18
25
pub labels: Vec<Label>,
19
26
pub indexed_at: DateTime<Utc>,
···
27
34
pub creator: ProfileView,
28
35
29
36
pub like_count: i64,
30
-
// #[serde(skip_serializing_if = "Option::is_none")]
31
-
// pub viewer: Option<()>,
37
+
#[serde(skip_serializing_if = "Option::is_none")]
38
+
pub viewer: Option<LabelerViewerState>,
32
39
#[serde(skip_serializing_if = "Vec::is_empty")]
33
40
pub labels: Vec<Label>,
34
41
pub policies: LabelerPolicy,
+1
lexica/src/app_bsky/mod.rs
+1
lexica/src/app_bsky/mod.rs
+33
lexica/src/app_bsky/unspecced.rs
+33
lexica/src/app_bsky/unspecced.rs
···
1
+
use crate::app_bsky::feed::{BlockedAuthor, PostView};
2
+
use serde::Serialize;
3
+
4
+
#[derive(Clone, Debug, Serialize)]
5
+
pub struct ThreadV2Item {
6
+
pub uri: String,
7
+
pub depth: i32,
8
+
pub value: ThreadV2ItemType,
9
+
}
10
+
11
+
#[derive(Clone, Debug, Serialize)]
12
+
#[serde(tag = "$type")]
13
+
pub enum ThreadV2ItemType {
14
+
#[serde(rename = "app.bsky.unspecced.defs#threadItemPost")]
15
+
Post(ThreadItemPost),
16
+
#[serde(rename = "app.bsky.unspecced.defs#threadItemNoUnauthenticated")]
17
+
NoUnauthenticated {},
18
+
#[serde(rename = "app.bsky.unspecced.defs#threadItemNotFound")]
19
+
NotFound {},
20
+
#[serde(rename = "app.bsky.unspecced.defs#threadItemBlocked")]
21
+
Blocked { author: BlockedAuthor },
22
+
}
23
+
24
+
#[derive(Clone, Debug, Serialize)]
25
+
#[serde(rename_all = "camelCase")]
26
+
pub struct ThreadItemPost {
27
+
pub post: PostView,
28
+
pub more_parents: bool,
29
+
pub more_replies: i32,
30
+
pub op_thread: bool,
31
+
pub hidden_by_threadgate: bool,
32
+
pub muted_by_viewer: bool,
33
+
}
+1
-1
lexica/src/community_lexicon/bookmarks.rs
+1
-1
lexica/src/community_lexicon/bookmarks.rs
+2
-2
lexica/src/utils.rs
+2
-2
lexica/src/utils.rs
+2
-2
migrations/2025-02-16-142357_posts/up.sql
+2
-2
migrations/2025-02-16-142357_posts/up.sql
+17
migrations/2025-09-17-190406_viewer-interactions/down.sql
+17
migrations/2025-09-17-190406_viewer-interactions/down.sql
···
1
+
drop trigger t_profile_state_ins on follows;
2
+
drop trigger t_profile_state_del on follows;
3
+
drop trigger t_profile_state_ins on blocks;
4
+
drop trigger t_profile_state_del on blocks;
5
+
drop trigger t_profile_state_ins on mutes;
6
+
drop trigger t_profile_state_del on mutes;
7
+
8
+
drop function f_profile_state_ins_follow;
9
+
drop function f_profile_state_del_follow;
10
+
drop function f_profile_state_ins_block;
11
+
drop function f_profile_state_del_block;
12
+
drop function f_profile_state_ins_mute;
13
+
drop function f_profile_state_del_mute;
14
+
15
+
drop view v_list_mutes_exp;
16
+
drop view v_list_block_exp;
17
+
drop table profile_states;
+146
migrations/2025-09-17-190406_viewer-interactions/up.sql
+146
migrations/2025-09-17-190406_viewer-interactions/up.sql
···
1
+
create table profile_states
2
+
(
3
+
did text not null,
4
+
subject text not null,
5
+
muting bool not null default false, -- subj muted by did
6
+
blocked bool not null default false, -- did blocked by subj
7
+
blocking text, -- subj blocked by did
8
+
following text, -- rkey of follow record (did->subj)
9
+
followed text, -- rkey of follow record (subj->did)
10
+
11
+
primary key (did, subject)
12
+
);
13
+
14
+
create index profilestates_did_index on profile_states using hash (did);
15
+
create index profilestates_sub_index on profile_states using hash (subject);
16
+
17
+
create view v_list_block_exp as
18
+
(
19
+
select lb.list_uri, did, li.subject
20
+
from list_blocks lb
21
+
inner join list_items li on lb.list_uri = li.list_uri
22
+
);
23
+
24
+
create view v_list_mutes_exp as
25
+
(
26
+
select lm.list_uri, did, li.subject
27
+
from list_mutes lm
28
+
inner join list_items li on lm.list_uri = li.list_uri
29
+
);
30
+
31
+
-- profile_states follow triggers
32
+
create function f_profile_state_ins_follow() returns trigger
33
+
language plpgsql as
34
+
$$
35
+
begin
36
+
insert into profile_states (did, subject, following)
37
+
VALUES (NEW.did, NEW.subject, NEW.rkey)
38
+
ON CONFLICT (did, subject) DO UPDATE SET following=excluded.following;
39
+
40
+
insert into profile_states (did, subject, followed)
41
+
VALUES (NEW.subject, NEW.did, NEW.rkey)
42
+
ON CONFLICT (did, subject) DO UPDATE SET followed=excluded.followed;
43
+
44
+
return NEW;
45
+
end;
46
+
$$;
47
+
48
+
create trigger t_profile_state_ins
49
+
before insert
50
+
on follows
51
+
for each row
52
+
execute procedure f_profile_state_ins_follow();
53
+
54
+
create function f_profile_state_del_follow() returns trigger
55
+
language plpgsql as
56
+
$$
57
+
begin
58
+
update profile_states set following = null where did = OLD.did and subject = OLD.subject;
59
+
update profile_states set followed = null where did = OLD.subject and subject = OLD.did;
60
+
61
+
return OLD;
62
+
end;
63
+
$$;
64
+
65
+
create trigger t_profile_state_del
66
+
before delete
67
+
on follows
68
+
for each row
69
+
execute procedure f_profile_state_del_follow();
70
+
71
+
-- profile_states block triggers
72
+
73
+
create function f_profile_state_ins_block() returns trigger
74
+
language plpgsql as
75
+
$$
76
+
begin
77
+
insert into profile_states (did, subject, blocking)
78
+
VALUES (NEW.did, NEW.subject, NEW.rkey)
79
+
ON CONFLICT (did, subject) DO UPDATE SET blocking=excluded.blocking;
80
+
81
+
insert into profile_states (did, subject, blocked)
82
+
VALUES (NEW.subject, NEW.did, TRUE)
83
+
ON CONFLICT (did, subject) DO UPDATE SET blocked=excluded.blocked;
84
+
85
+
return NEW;
86
+
end;
87
+
$$;
88
+
89
+
create trigger t_profile_state_ins
90
+
before insert
91
+
on blocks
92
+
for each row
93
+
execute procedure f_profile_state_ins_block();
94
+
95
+
create function f_profile_state_del_block() returns trigger
96
+
language plpgsql as
97
+
$$
98
+
begin
99
+
update profile_states set blocking = null where did = OLD.did and subject = OLD.subject;
100
+
update profile_states set blocked = FALSE where did = OLD.subject and subject = OLD.did;
101
+
102
+
return OLD;
103
+
end;
104
+
$$;
105
+
106
+
create trigger t_profile_state_del
107
+
before delete
108
+
on blocks
109
+
for each row
110
+
execute procedure f_profile_state_del_block();
111
+
112
+
-- profile_states mutes triggers
113
+
114
+
create function f_profile_state_ins_mute() returns trigger
115
+
language plpgsql as
116
+
$$
117
+
begin
118
+
insert into profile_states (did, subject, muting)
119
+
VALUES (NEW.did, NEW.subject, TRUE)
120
+
ON CONFLICT (did, subject) DO UPDATE SET muting=excluded.muting;
121
+
122
+
return NEW;
123
+
end;
124
+
$$;
125
+
126
+
create trigger t_profile_state_ins
127
+
before insert
128
+
on mutes
129
+
for each row
130
+
execute procedure f_profile_state_ins_mute();
131
+
132
+
create function f_profile_state_del_mute() returns trigger
133
+
language plpgsql as
134
+
$$
135
+
begin
136
+
update profile_states set muting = false where did = OLD.did and subject = OLD.subject;
137
+
138
+
return OLD;
139
+
end;
140
+
$$;
141
+
142
+
create trigger t_profile_state_del
143
+
before delete
144
+
on mutes
145
+
for each row
146
+
execute procedure f_profile_state_del_mute();
+3
migrations/2025-09-24-205239_profiles-4224/down.sql
+3
migrations/2025-09-24-205239_profiles-4224/down.sql
+3
migrations/2025-09-24-205239_profiles-4224/up.sql
+3
migrations/2025-09-24-205239_profiles-4224/up.sql
+15
migrations/2025-09-27-171241_post-tweaks/down.sql
+15
migrations/2025-09-27-171241_post-tweaks/down.sql
···
1
+
alter table posts
2
+
drop column mentions,
3
+
drop column violates_threadgate;
4
+
5
+
drop trigger t_author_feed_ins_post on posts;
6
+
drop trigger t_author_feed_del_post on posts;
7
+
drop trigger t_author_feed_ins_repost on reposts;
8
+
drop trigger t_author_feed_del_repost on reposts;
9
+
10
+
drop function f_author_feed_ins_post;
11
+
drop function f_author_feed_del_post;
12
+
drop function f_author_feed_ins_repost;
13
+
drop function f_author_feed_del_repost;
14
+
15
+
drop table author_feeds;
+79
migrations/2025-09-27-171241_post-tweaks/up.sql
+79
migrations/2025-09-27-171241_post-tweaks/up.sql
···
1
+
alter table posts
2
+
add column mentions text[],
3
+
add column violates_threadgate bool not null default false;
4
+
5
+
create table author_feeds
6
+
(
7
+
uri text primary key,
8
+
cid text not null,
9
+
post text not null,
10
+
did text not null,
11
+
typ text not null,
12
+
sort_at timestamptz not null
13
+
);
14
+
15
+
-- author_feeds post triggers
16
+
create function f_author_feed_ins_post() returns trigger
17
+
language plpgsql as
18
+
$$
19
+
begin
20
+
insert into author_feeds (uri, cid, post, did, typ, sort_at)
21
+
VALUES (NEW.at_uri, NEW.cid, NEW.at_uri, NEW.did, 'post', NEW.created_at)
22
+
on conflict do nothing;
23
+
return NEW;
24
+
end;
25
+
$$;
26
+
27
+
create trigger t_author_feed_ins_post
28
+
before insert
29
+
on posts
30
+
for each row
31
+
execute procedure f_author_feed_ins_post();
32
+
33
+
create function f_author_feed_del_post() returns trigger
34
+
language plpgsql as
35
+
$$
36
+
begin
37
+
delete from author_feeds where did = OLD.did and uri = OLD.at_uri and typ = 'post';
38
+
return OLD;
39
+
end;
40
+
$$;
41
+
42
+
create trigger t_author_feed_del_post
43
+
before delete
44
+
on posts
45
+
for each row
46
+
execute procedure f_author_feed_del_post();
47
+
48
+
-- author_feeds repost triggers
49
+
create function f_author_feed_ins_repost() returns trigger
50
+
language plpgsql as
51
+
$$
52
+
begin
53
+
insert into author_feeds (uri, cid, post, did, typ, sort_at)
54
+
VALUES ('at://' || NEW.did || 'app.bsky.feed.repost' || NEW.rkey, NEW.post_cid, NEW.post, NEW.did, 'repost', NEW.created_at)
55
+
on conflict do nothing;
56
+
return NEW;
57
+
end;
58
+
$$;
59
+
60
+
create trigger t_author_feed_ins_repost
61
+
before insert
62
+
on reposts
63
+
for each row
64
+
execute procedure f_author_feed_ins_repost();
65
+
66
+
create function f_author_feed_del_repost() returns trigger
67
+
language plpgsql as
68
+
$$
69
+
begin
70
+
delete from author_feeds where did = OLD.did and post = OLD.post and typ = 'repost';
71
+
return OLD;
72
+
end;
73
+
$$;
74
+
75
+
create trigger t_author_feed_del_repost
76
+
before delete
77
+
on reposts
78
+
for each row
79
+
execute procedure f_author_feed_del_repost();
+9
-3
parakeet/Cargo.toml
+9
-3
parakeet/Cargo.toml
···
6
6
[dependencies]
7
7
async-recursion = "1.1.1"
8
8
axum = { version = "0.8", features = ["json"] }
9
+
axum-tracing-opentelemetry = "0.32"
9
10
axum-extra = { version = "0.10.0", features = ["query", "typed-header"] }
10
11
base64 = "0.22"
11
12
chrono = { version = "0.4.39", features = ["serde"] }
12
-
dataloader = { path = "../dataloader-rs", default-features = false, features = ["runtime-tokio"] }
13
+
dataloader = { path = "../dataloader-rs" }
13
14
deadpool = { version = "0.12.1", features = ["managed"] }
14
15
did-resolver = { path = "../did-resolver" }
15
16
diesel = { version = "2.2.6", features = ["chrono", "serde_json"] }
···
21
22
jsonwebtoken = { git = "https://gitlab.com/parakeet-social/jsonwebtoken", branch = "es256k" }
22
23
lexica = { path = "../lexica" }
23
24
multibase = "0.9.1"
25
+
opentelemetry = "0.31.0"
26
+
opentelemetry-otlp = "0.31.0"
27
+
opentelemetry_sdk = "0.31.0"
24
28
parakeet-db = { path = "../parakeet-db" }
25
-
parakeet-index = { path = "../parakeet-index" }
29
+
parakeet-index = { path = "../parakeet-index", features = ["otel"] }
26
30
redis = { version = "0.32", features = ["tokio-native-tls-comp"] }
27
31
reqwest = { version = "0.12", features = ["json"] }
28
32
serde = { version = "1.0.217", features = ["derive"] }
29
33
serde_ipld_dagcbor = "0.6.1"
30
34
serde_json = "1.0.134"
31
35
tokio = { version = "1.42.0", features = ["full"] }
36
+
tower = "0.5"
32
37
tower-http = { version = "0.6.2", features = ["cors", "trace"] }
33
38
tracing = "0.1.40"
34
-
tracing-subscriber = "0.3.18"
39
+
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] }
40
+
tracing-opentelemetry = "0.32"
+2
-2
parakeet/src/cache.rs
+2
-2
parakeet/src/cache.rs
···
29
29
type Val = V;
30
30
31
31
async fn get(&mut self, key: &Self::Key) -> Option<Self::Val> {
32
-
let res: Option<Vec<u8>> = redis::AsyncCommands::get(&mut self.conn, &key).await.ok()?;
32
+
let res: Option<Vec<u8>> = redis::AsyncCommands::get(&mut self.conn, key).await.ok()?;
33
33
34
34
match serde_ipld_dagcbor::from_slice(&res?) {
35
35
Ok(v) => Some(v),
···
57
57
}
58
58
59
59
async fn remove(&mut self, key: &Self::Key) -> Option<Self::Val> {
60
-
let res: Option<Vec<u8>> = redis::AsyncCommands::get_del(&mut self.conn, &key)
60
+
let res: Option<Vec<u8>> = redis::AsyncCommands::get_del(&mut self.conn, key)
61
61
.await
62
62
.ok()?;
63
63
+10
parakeet/src/config.rs
+10
parakeet/src/config.rs
···
13
13
14
14
#[derive(Debug, Deserialize)]
15
15
pub struct Config {
16
+
#[serde(flatten)]
17
+
pub instruments: ConfigInstruments,
16
18
pub index_uri: String,
17
19
pub database_url: String,
18
20
pub redis_uri: String,
···
27
29
pub did_allowlist: Option<Vec<String>>,
28
30
#[serde(default)]
29
31
pub migrate: bool,
32
+
}
33
+
34
+
#[derive(Debug, Deserialize)]
35
+
pub struct ConfigInstruments {
36
+
#[serde(default)]
37
+
pub otel_enable: bool,
38
+
#[serde(default)]
39
+
pub log_json: bool,
30
40
}
31
41
32
42
#[derive(Debug, Deserialize)]
+297
parakeet/src/db.rs
+297
parakeet/src/db.rs
···
1
1
use diesel::prelude::*;
2
+
use diesel::sql_types::{Array, Bool, Integer, Nullable, Text};
2
3
use diesel_async::{AsyncPgConnection, RunQueryDsl};
4
+
use parakeet_db::models::TextArray;
3
5
use parakeet_db::{schema, types};
6
+
use tracing::instrument;
4
7
8
+
#[instrument(skip_all)]
5
9
pub async fn get_actor_status(
6
10
conn: &mut AsyncPgConnection,
7
11
did: &str,
···
13
17
.await
14
18
.optional()
15
19
}
20
+
21
+
#[derive(Clone, Debug, QueryableByName)]
22
+
#[diesel(check_for_backend(diesel::pg::Pg))]
23
+
pub struct ProfileStateRet {
24
+
#[diesel(sql_type = Text)]
25
+
pub did: String,
26
+
#[diesel(sql_type = Text)]
27
+
pub subject: String,
28
+
#[diesel(sql_type = Nullable<Bool>)]
29
+
pub muting: Option<bool>,
30
+
#[diesel(sql_type = Nullable<Bool>)]
31
+
pub blocked: Option<bool>,
32
+
#[diesel(sql_type = Nullable<Text>)]
33
+
pub blocking: Option<String>,
34
+
#[diesel(sql_type = Nullable<Text>)]
35
+
pub following: Option<String>,
36
+
#[diesel(sql_type = Nullable<Text>)]
37
+
pub followed: Option<String>,
38
+
#[diesel(sql_type = Nullable<Text>)]
39
+
pub list_block: Option<String>,
40
+
#[diesel(sql_type = Nullable<Text>)]
41
+
pub list_mute: Option<String>,
42
+
}
43
+
44
+
#[instrument(skip_all)]
45
+
pub async fn get_profile_state(
46
+
conn: &mut AsyncPgConnection,
47
+
did: &str,
48
+
sub: &str,
49
+
) -> QueryResult<Option<ProfileStateRet>> {
50
+
diesel::sql_query(include_str!("sql/profile_state.sql"))
51
+
.bind::<Text, _>(did)
52
+
.bind::<Array<Text>, _>(vec![sub])
53
+
.get_result::<ProfileStateRet>(conn)
54
+
.await
55
+
.optional()
56
+
}
57
+
58
+
#[instrument(skip_all)]
59
+
pub async fn get_profile_states(
60
+
conn: &mut AsyncPgConnection,
61
+
did: &str,
62
+
sub: &[String],
63
+
) -> QueryResult<Vec<ProfileStateRet>> {
64
+
diesel::sql_query(include_str!("sql/profile_state.sql"))
65
+
.bind::<Text, _>(did)
66
+
.bind::<Array<Text>, _>(sub)
67
+
.load::<ProfileStateRet>(conn)
68
+
.await
69
+
}
70
+
71
+
#[derive(Clone, Debug, QueryableByName)]
72
+
#[diesel(check_for_backend(diesel::pg::Pg))]
73
+
pub struct PostStateRet {
74
+
#[diesel(sql_type = diesel::sql_types::Text)]
75
+
pub at_uri: String,
76
+
#[diesel(sql_type = diesel::sql_types::Text)]
77
+
pub did: String,
78
+
#[diesel(sql_type = diesel::sql_types::Text)]
79
+
pub cid: String,
80
+
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
81
+
pub like_rkey: Option<String>,
82
+
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
83
+
pub repost_rkey: Option<String>,
84
+
#[diesel(sql_type = diesel::sql_types::Bool)]
85
+
pub bookmarked: bool,
86
+
// #[diesel(sql_type = diesel::sql_types::Bool)]
87
+
// pub muted: bool,
88
+
#[diesel(sql_type = diesel::sql_types::Bool)]
89
+
pub embed_disabled: bool,
90
+
#[diesel(sql_type = diesel::sql_types::Bool)]
91
+
pub pinned: bool,
92
+
}
93
+
94
+
#[instrument(skip_all)]
95
+
pub async fn get_post_state(
96
+
conn: &mut AsyncPgConnection,
97
+
did: &str,
98
+
subject: &str,
99
+
) -> QueryResult<Option<PostStateRet>> {
100
+
diesel::sql_query(include_str!("sql/post_state.sql"))
101
+
.bind::<Text, _>(did)
102
+
.bind::<Array<Text>, _>(vec![subject])
103
+
.get_result::<PostStateRet>(conn)
104
+
.await
105
+
.optional()
106
+
}
107
+
108
+
#[instrument(skip_all)]
109
+
pub async fn get_post_states(
110
+
conn: &mut AsyncPgConnection,
111
+
did: &str,
112
+
sub: &[String],
113
+
) -> QueryResult<Vec<PostStateRet>> {
114
+
diesel::sql_query(include_str!("sql/post_state.sql"))
115
+
.bind::<Text, _>(did)
116
+
.bind::<Array<Text>, _>(sub)
117
+
.load::<PostStateRet>(conn)
118
+
.await
119
+
}
120
+
121
+
#[derive(Clone, Debug, QueryableByName)]
122
+
#[diesel(check_for_backend(diesel::pg::Pg))]
123
+
pub struct ListStateRet {
124
+
#[diesel(sql_type = Text)]
125
+
pub at_uri: String,
126
+
#[diesel(sql_type = Bool)]
127
+
pub muted: bool,
128
+
#[diesel(sql_type = Nullable<Text>)]
129
+
pub block: Option<String>,
130
+
}
131
+
132
+
#[instrument(skip_all)]
133
+
pub async fn get_list_state(
134
+
conn: &mut AsyncPgConnection,
135
+
did: &str,
136
+
subject: &str,
137
+
) -> QueryResult<Option<ListStateRet>> {
138
+
diesel::sql_query(include_str!("sql/list_states.sql"))
139
+
.bind::<Text, _>(did)
140
+
.bind::<Array<Text>, _>(vec![subject])
141
+
.get_result::<ListStateRet>(conn)
142
+
.await
143
+
.optional()
144
+
}
145
+
146
+
#[instrument(skip_all)]
147
+
pub async fn get_list_states(
148
+
conn: &mut AsyncPgConnection,
149
+
did: &str,
150
+
sub: &[String],
151
+
) -> QueryResult<Vec<ListStateRet>> {
152
+
diesel::sql_query(include_str!("sql/list_states.sql"))
153
+
.bind::<Text, _>(did)
154
+
.bind::<Array<Text>, _>(sub)
155
+
.load::<ListStateRet>(conn)
156
+
.await
157
+
}
158
+
159
+
#[instrument(skip_all)]
160
+
pub async fn get_like_state(
161
+
conn: &mut AsyncPgConnection,
162
+
did: &str,
163
+
subject: &str,
164
+
) -> QueryResult<Option<(String, String)>> {
165
+
schema::likes::table
166
+
.select((schema::likes::did, schema::likes::rkey))
167
+
.filter(
168
+
schema::likes::did
169
+
.eq(did)
170
+
.and(schema::likes::subject.eq(subject)),
171
+
)
172
+
.get_result(conn)
173
+
.await
174
+
.optional()
175
+
}
176
+
177
+
#[instrument(skip_all)]
178
+
pub async fn get_like_states(
179
+
conn: &mut AsyncPgConnection,
180
+
did: &str,
181
+
sub: &[String],
182
+
) -> QueryResult<Vec<(String, String, String)>> {
183
+
schema::likes::table
184
+
.select((
185
+
schema::likes::subject,
186
+
schema::likes::did,
187
+
schema::likes::rkey,
188
+
))
189
+
.filter(
190
+
schema::likes::did
191
+
.eq(did)
192
+
.and(schema::likes::subject.eq_any(sub)),
193
+
)
194
+
.load(conn)
195
+
.await
196
+
}
197
+
198
+
#[instrument(skip_all)]
199
+
pub async fn get_pinned_post_uri(
200
+
conn: &mut AsyncPgConnection,
201
+
did: &str,
202
+
) -> QueryResult<Option<String>> {
203
+
schema::profiles::table
204
+
.select(schema::profiles::pinned_uri.assume_not_null())
205
+
.filter(
206
+
schema::profiles::did
207
+
.eq(did)
208
+
.and(schema::profiles::pinned_uri.is_not_null()),
209
+
)
210
+
.get_result(conn)
211
+
.await
212
+
.optional()
213
+
}
214
+
215
+
#[derive(Debug, QueryableByName)]
216
+
#[diesel(check_for_backend(diesel::pg::Pg))]
217
+
#[allow(unused)]
218
+
pub struct ThreadItem {
219
+
#[diesel(sql_type = Text)]
220
+
pub at_uri: String,
221
+
#[diesel(sql_type = Nullable<Text>)]
222
+
pub parent_uri: Option<String>,
223
+
#[diesel(sql_type = Nullable<Text>)]
224
+
pub root_uri: Option<String>,
225
+
#[diesel(sql_type = Integer)]
226
+
pub depth: i32,
227
+
}
228
+
229
+
#[instrument(skip_all)]
230
+
pub async fn get_thread_children(
231
+
conn: &mut AsyncPgConnection,
232
+
uri: &str,
233
+
depth: i32,
234
+
) -> QueryResult<Vec<ThreadItem>> {
235
+
diesel::sql_query(include_str!("sql/thread.sql"))
236
+
.bind::<Text, _>(uri)
237
+
.bind::<Integer, _>(depth)
238
+
.load(conn)
239
+
.await
240
+
}
241
+
242
+
#[instrument(skip_all)]
243
+
pub async fn get_thread_children_branching(
244
+
conn: &mut AsyncPgConnection,
245
+
uri: &str,
246
+
depth: i32,
247
+
branching_factor: i32,
248
+
) -> QueryResult<Vec<ThreadItem>> {
249
+
diesel::sql_query(include_str!("sql/thread_branching.sql"))
250
+
.bind::<Text, _>(uri)
251
+
.bind::<Integer, _>(depth)
252
+
.bind::<Integer, _>(branching_factor)
253
+
.load(conn)
254
+
.await
255
+
}
256
+
257
+
#[derive(Debug, QueryableByName)]
258
+
#[diesel(check_for_backend(diesel::pg::Pg))]
259
+
pub struct HiddenThreadChildItem {
260
+
#[diesel(sql_type = Text)]
261
+
pub at_uri: String,
262
+
}
263
+
264
+
#[instrument(skip_all)]
265
+
pub async fn get_thread_children_hidden(
266
+
conn: &mut AsyncPgConnection,
267
+
uri: &str,
268
+
root: &str,
269
+
) -> QueryResult<Vec<HiddenThreadChildItem>> {
270
+
diesel::sql_query(include_str!("sql/thread_v2_hidden_children.sql"))
271
+
.bind::<Text, _>(uri)
272
+
.bind::<Text, _>(root)
273
+
.load(conn)
274
+
.await
275
+
}
276
+
277
+
#[instrument(skip_all)]
278
+
pub async fn get_thread_parents(
279
+
conn: &mut AsyncPgConnection,
280
+
uri: &str,
281
+
height: i32,
282
+
) -> QueryResult<Vec<ThreadItem>> {
283
+
diesel::sql_query(include_str!("sql/thread_parent.sql"))
284
+
.bind::<Text, _>(uri)
285
+
.bind::<Integer, _>(height)
286
+
.load(conn)
287
+
.await
288
+
}
289
+
290
+
#[instrument(skip_all)]
291
+
pub async fn get_root_post(conn: &mut AsyncPgConnection, uri: &str) -> QueryResult<Option<String>> {
292
+
schema::posts::table
293
+
.select(schema::posts::root_uri)
294
+
.find(&uri)
295
+
.get_result(conn)
296
+
.await
297
+
.optional()
298
+
.map(|v| v.flatten())
299
+
}
300
+
301
+
#[instrument(skip_all)]
302
+
pub async fn get_threadgate_hiddens(
303
+
conn: &mut AsyncPgConnection,
304
+
uri: &str,
305
+
) -> QueryResult<Option<TextArray>> {
306
+
schema::threadgates::table
307
+
.select(schema::threadgates::hidden_replies)
308
+
.find(&uri)
309
+
.get_result(conn)
310
+
.await
311
+
.optional()
312
+
}
+3
parakeet/src/hydration/embed.rs
+3
parakeet/src/hydration/embed.rs
···
8
8
use lexica::app_bsky::feed::PostView;
9
9
use parakeet_db::models;
10
10
use std::collections::HashMap;
11
+
use tracing::instrument;
11
12
12
13
fn build_aspect_ratio(height: Option<i32>, width: Option<i32>) -> Option<AspectRatio> {
13
14
height
···
176
177
out
177
178
}
178
179
180
+
#[instrument(skip_all)]
179
181
pub async fn hydrate_embed(&self, post: String) -> Option<Embed> {
180
182
let (embed, author) = self.loaders.embed.load(post).await?;
181
183
···
195
197
}
196
198
}
197
199
200
+
#[instrument(skip_all)]
198
201
pub async fn hydrate_embeds(&self, posts: Vec<String>) -> HashMap<String, Embed> {
199
202
let embeds = self.loaders.embed.load_many(posts).await;
200
203
+46
-3
parakeet/src/hydration/feedgen.rs
+46
-3
parakeet/src/hydration/feedgen.rs
···
1
1
use crate::hydration::map_labels;
2
2
use crate::xrpc::cdn::BskyCdn;
3
3
use lexica::app_bsky::actor::ProfileView;
4
-
use lexica::app_bsky::feed::{GeneratorContentMode, GeneratorView};
4
+
use lexica::app_bsky::feed::{GeneratorContentMode, GeneratorView, GeneratorViewerState};
5
5
use parakeet_db::models;
6
6
use std::collections::HashMap;
7
7
use std::str::FromStr;
8
+
use tracing::instrument;
9
+
10
+
fn build_viewer((did, rkey): (String, String)) -> GeneratorViewerState {
11
+
GeneratorViewerState {
12
+
like: Some(format!("at://{did}/app.bsky.feed.like/{rkey}")),
13
+
}
14
+
}
8
15
9
16
fn build_feedgen(
10
17
feedgen: models::FeedGen,
11
18
creator: ProfileView,
12
19
labels: Vec<models::Label>,
13
20
likes: Option<i32>,
21
+
viewer: Option<GeneratorViewerState>,
14
22
cdn: &BskyCdn,
15
23
) -> GeneratorView {
16
24
let content_mode = feedgen
···
35
43
like_count: likes.unwrap_or_default() as i64,
36
44
accepts_interactions: feedgen.accepts_interactions,
37
45
labels: map_labels(labels),
46
+
viewer,
38
47
content_mode,
39
48
indexed_at: feedgen.created_at,
40
49
}
41
50
}
42
51
43
52
impl super::StatefulHydrator<'_> {
53
+
#[instrument(skip_all)]
44
54
pub async fn hydrate_feedgen(&self, feedgen: String) -> Option<GeneratorView> {
45
55
let labels = self.get_label(&feedgen).await;
56
+
let viewer = self.get_feedgen_viewer_state(&feedgen).await;
46
57
let likes = self.loaders.like.load(feedgen.clone()).await;
47
58
let feedgen = self.loaders.feedgen.load(feedgen).await?;
48
59
let profile = self.hydrate_profile(feedgen.owner.clone()).await?;
49
60
50
-
Some(build_feedgen(feedgen, profile, labels, likes, &self.cdn))
61
+
Some(build_feedgen(
62
+
feedgen, profile, labels, likes, viewer, &self.cdn,
63
+
))
51
64
}
52
65
66
+
#[instrument(skip_all)]
53
67
pub async fn hydrate_feedgens(&self, feedgens: Vec<String>) -> HashMap<String, GeneratorView> {
54
68
let labels = self.get_label_many(&feedgens).await;
69
+
let viewers = self.get_feedgen_viewer_states(&feedgens).await;
55
70
let mut likes = self.loaders.like.load_many(feedgens.clone()).await;
56
71
let feedgens = self.loaders.feedgen.load_many(feedgens).await;
57
72
···
66
81
.into_iter()
67
82
.filter_map(|(uri, feedgen)| {
68
83
let creator = creators.get(&feedgen.owner).cloned()?;
84
+
let viewer = viewers.get(&uri).cloned();
69
85
let labels = labels.get(&uri).cloned().unwrap_or_default();
70
86
let likes = likes.remove(&uri);
71
87
72
88
Some((
73
89
uri,
74
-
build_feedgen(feedgen, creator, labels, likes, &self.cdn),
90
+
build_feedgen(feedgen, creator, labels, likes, viewer, &self.cdn),
75
91
))
76
92
})
77
93
.collect()
94
+
}
95
+
96
+
#[instrument(skip_all)]
97
+
async fn get_feedgen_viewer_state(&self, subject: &str) -> Option<GeneratorViewerState> {
98
+
if let Some(viewer) = &self.current_actor {
99
+
let data = self.loaders.like_state.get(viewer, subject).await?;
100
+
101
+
Some(build_viewer(data))
102
+
} else {
103
+
None
104
+
}
105
+
}
106
+
107
+
#[instrument(skip_all)]
108
+
async fn get_feedgen_viewer_states(
109
+
&self,
110
+
subjects: &[String],
111
+
) -> HashMap<String, GeneratorViewerState> {
112
+
if let Some(viewer) = &self.current_actor {
113
+
let data = self.loaders.like_state.get_many(viewer, subjects).await;
114
+
115
+
data.into_iter()
116
+
.map(|(k, state)| (k, build_viewer(state)))
117
+
.collect()
118
+
} else {
119
+
HashMap::new()
120
+
}
78
121
}
79
122
}
+66
-14
parakeet/src/hydration/labeler.rs
+66
-14
parakeet/src/hydration/labeler.rs
···
1
1
use crate::hydration::{map_labels, StatefulHydrator};
2
2
use lexica::app_bsky::actor::ProfileView;
3
-
use lexica::app_bsky::labeler::{LabelerPolicy, LabelerView, LabelerViewDetailed};
3
+
use lexica::app_bsky::labeler::{
4
+
LabelerPolicy, LabelerView, LabelerViewDetailed, LabelerViewerState,
5
+
};
4
6
use lexica::com_atproto::label::{Blurs, LabelValueDefinition, Severity};
5
7
use lexica::com_atproto::moderation::{ReasonType, SubjectType};
6
8
use parakeet_db::models;
7
9
use std::collections::HashMap;
8
10
use std::str::FromStr;
11
+
use tracing::instrument;
12
+
13
+
fn build_viewer((did, rkey): (String, String)) -> LabelerViewerState {
14
+
LabelerViewerState {
15
+
like: Some(format!("at://{did}/app.bsky.feed.like/{rkey}")),
16
+
}
17
+
}
9
18
10
19
fn build_view(
11
20
labeler: models::LabelerService,
12
21
creator: ProfileView,
13
22
labels: Vec<models::Label>,
23
+
viewer: Option<LabelerViewerState>,
14
24
likes: Option<i32>,
15
25
) -> LabelerView {
16
26
LabelerView {
···
18
28
cid: labeler.cid,
19
29
creator,
20
30
like_count: likes.unwrap_or_default() as i64,
31
+
viewer,
21
32
labels: map_labels(labels),
22
33
indexed_at: labeler.indexed_at.and_utc(),
23
34
}
···
28
39
defs: Vec<models::LabelDefinition>,
29
40
creator: ProfileView,
30
41
labels: Vec<models::Label>,
42
+
viewer: Option<LabelerViewerState>,
31
43
likes: Option<i32>,
32
44
) -> LabelerViewDetailed {
33
45
let reason_types = labeler.reasons.map(|v| {
34
-
v.into_iter()
35
-
.flatten()
36
-
.filter_map(|v| ReasonType::from_str(&v).ok())
46
+
v.iter()
47
+
.filter_map(|v| ReasonType::from_str(v).ok())
37
48
.collect()
38
49
});
39
50
···
63
74
})
64
75
.collect();
65
76
let subject_types = labeler.subject_types.map(|v| {
66
-
v.into_iter()
67
-
.flatten()
68
-
.filter_map(|v| SubjectType::from_str(&v).ok())
77
+
v.iter()
78
+
.filter_map(|v| SubjectType::from_str(v).ok())
69
79
.collect()
70
80
});
71
-
let subject_collections = labeler
72
-
.subject_collections
73
-
.map(|v| v.into_iter().flatten().collect());
81
+
let subject_collections = labeler.subject_collections.map(Vec::from);
74
82
75
83
LabelerViewDetailed {
76
84
uri: format!("at://{}/app.bsky.labeler.service/self", labeler.did),
77
85
cid: labeler.cid,
78
86
creator,
79
87
like_count: likes.unwrap_or_default() as i64,
88
+
viewer,
80
89
policies: LabelerPolicy {
81
90
label_values,
82
91
label_value_definitions,
···
90
99
}
91
100
92
101
impl StatefulHydrator<'_> {
102
+
#[instrument(skip_all)]
93
103
pub async fn hydrate_labeler(&self, labeler: String) -> Option<LabelerView> {
94
104
let labels = self.get_label(&labeler).await;
105
+
let viewer = self.get_labeler_viewer_state(&labeler).await;
95
106
let likes = self.loaders.like.load(make_labeler_uri(&labeler)).await;
96
107
let (labeler, _) = self.loaders.labeler.load(labeler).await?;
97
108
let creator = self.hydrate_profile(labeler.did.clone()).await?;
98
109
99
-
Some(build_view(labeler, creator, labels, likes))
110
+
Some(build_view(labeler, creator, labels, viewer, likes))
100
111
}
101
112
113
+
#[instrument(skip_all)]
102
114
pub async fn hydrate_labelers(&self, labelers: Vec<String>) -> HashMap<String, LabelerView> {
103
115
let labels = self.get_label_many(&labelers).await;
104
116
let labelers = self.loaders.labeler.load_many(labelers).await;
···
107
119
.values()
108
120
.map(|(labeler, _)| (labeler.did.clone(), make_labeler_uri(&labeler.did)))
109
121
.unzip::<_, _, Vec<_>, Vec<_>>();
122
+
let viewers = self.get_labeler_viewer_states(&uris).await;
110
123
let creators = self.hydrate_profiles(creators).await;
111
124
let mut likes = self.loaders.like.load_many(uris.clone()).await;
112
125
···
116
129
let creator = creators.get(&labeler.did).cloned()?;
117
130
let labels = labels.get(&k).cloned().unwrap_or_default();
118
131
let likes = likes.remove(&make_labeler_uri(&labeler.did));
132
+
let viewer = viewers.get(&make_labeler_uri(&k)).cloned();
119
133
120
-
Some((k, build_view(labeler, creator, labels, likes)))
134
+
Some((k, build_view(labeler, creator, labels, viewer, likes)))
121
135
})
122
136
.collect()
123
137
}
124
138
139
+
#[instrument(skip_all)]
125
140
pub async fn hydrate_labeler_detailed(&self, labeler: String) -> Option<LabelerViewDetailed> {
126
141
let labels = self.get_label(&labeler).await;
142
+
let viewer = self.get_labeler_viewer_state(&labeler).await;
127
143
let likes = self.loaders.like.load(make_labeler_uri(&labeler)).await;
128
144
let (labeler, defs) = self.loaders.labeler.load(labeler).await?;
129
145
let creator = self.hydrate_profile(labeler.did.clone()).await?;
130
146
131
-
Some(build_view_detailed(labeler, defs, creator, labels, likes))
147
+
Some(build_view_detailed(
148
+
labeler, defs, creator, labels, viewer, likes,
149
+
))
132
150
}
133
151
152
+
#[instrument(skip_all)]
134
153
pub async fn hydrate_labelers_detailed(
135
154
&self,
136
155
labelers: Vec<String>,
···
142
161
.values()
143
162
.map(|(labeler, _)| (labeler.did.clone(), make_labeler_uri(&labeler.did)))
144
163
.unzip::<_, _, Vec<_>, Vec<_>>();
164
+
let viewers = self.get_labeler_viewer_states(&uris).await;
145
165
let creators = self.hydrate_profiles(creators).await;
146
166
let mut likes = self.loaders.like.load_many(uris.clone()).await;
147
167
···
151
171
let creator = creators.get(&labeler.did).cloned()?;
152
172
let labels = labels.get(&k).cloned().unwrap_or_default();
153
173
let likes = likes.remove(&make_labeler_uri(&labeler.did));
174
+
let viewer = viewers.get(&make_labeler_uri(&k)).cloned();
154
175
155
-
let view = build_view_detailed(labeler, defs, creator, labels, likes);
176
+
let view = build_view_detailed(labeler, defs, creator, labels, viewer, likes);
156
177
157
178
Some((k, view))
158
179
})
159
180
.collect()
181
+
}
182
+
183
+
#[instrument(skip_all)]
184
+
async fn get_labeler_viewer_state(&self, subject: &str) -> Option<LabelerViewerState> {
185
+
if let Some(viewer) = &self.current_actor {
186
+
let data = self
187
+
.loaders
188
+
.like_state
189
+
.get(&make_labeler_uri(viewer), subject)
190
+
.await?;
191
+
192
+
Some(build_viewer(data))
193
+
} else {
194
+
None
195
+
}
196
+
}
197
+
198
+
#[instrument(skip_all)]
199
+
async fn get_labeler_viewer_states(
200
+
&self,
201
+
subjects: &[String],
202
+
) -> HashMap<String, LabelerViewerState> {
203
+
if let Some(viewer) = &self.current_actor {
204
+
let data = self.loaders.like_state.get_many(viewer, subjects).await;
205
+
206
+
data.into_iter()
207
+
.map(|(k, state)| (k, build_viewer(state)))
208
+
.collect()
209
+
} else {
210
+
HashMap::new()
211
+
}
160
212
}
161
213
}
162
214
+64
-5
parakeet/src/hydration/list.rs
+64
-5
parakeet/src/hydration/list.rs
···
1
+
use crate::db::ListStateRet;
1
2
use crate::hydration::{map_labels, StatefulHydrator};
2
3
use crate::xrpc::cdn::BskyCdn;
3
4
use lexica::app_bsky::actor::ProfileView;
4
-
use lexica::app_bsky::graph::{ListPurpose, ListView, ListViewBasic};
5
+
use lexica::app_bsky::graph::{ListPurpose, ListView, ListViewBasic, ListViewerState};
5
6
use parakeet_db::models;
6
7
use std::collections::HashMap;
7
8
use std::str::FromStr;
9
+
use tracing::instrument;
10
+
11
+
fn build_viewer(data: ListStateRet) -> ListViewerState {
12
+
ListViewerState {
13
+
muted: data.muted,
14
+
blocked: data.block,
15
+
}
16
+
}
8
17
9
18
fn build_basic(
10
19
list: models::List,
11
20
list_item_count: i64,
12
21
labels: Vec<models::Label>,
22
+
viewer: Option<ListViewerState>,
13
23
cdn: &BskyCdn,
14
24
) -> Option<ListViewBasic> {
15
25
let purpose = ListPurpose::from_str(&list.list_type).ok()?;
···
22
32
purpose,
23
33
avatar,
24
34
list_item_count,
35
+
viewer,
25
36
labels: map_labels(labels),
26
37
indexed_at: list.created_at,
27
38
})
···
32
43
list_item_count: i64,
33
44
creator: ProfileView,
34
45
labels: Vec<models::Label>,
46
+
viewer: Option<ListViewerState>,
35
47
cdn: &BskyCdn,
36
48
) -> Option<ListView> {
37
49
let purpose = ListPurpose::from_str(&list.list_type).ok()?;
···
51
63
description_facets,
52
64
avatar,
53
65
list_item_count,
66
+
viewer,
54
67
labels: map_labels(labels),
55
68
indexed_at: list.created_at,
56
69
})
57
70
}
58
71
59
72
impl StatefulHydrator<'_> {
73
+
#[instrument(skip_all)]
60
74
pub async fn hydrate_list_basic(&self, list: String) -> Option<ListViewBasic> {
61
75
let labels = self.get_label(&list).await;
76
+
let viewer = self.get_list_viewer_state(&list).await;
62
77
let (list, count) = self.loaders.list.load(list).await?;
63
78
64
-
build_basic(list, count, labels, &self.cdn)
79
+
build_basic(list, count, labels, viewer, &self.cdn)
65
80
}
66
81
82
+
#[instrument(skip_all)]
67
83
pub async fn hydrate_lists_basic(&self, lists: Vec<String>) -> HashMap<String, ListViewBasic> {
84
+
if lists.is_empty() {
85
+
return HashMap::new();
86
+
}
87
+
68
88
let labels = self.get_label_many(&lists).await;
89
+
let viewers = self.get_list_viewer_states(&lists).await;
69
90
let lists = self.loaders.list.load_many(lists).await;
70
91
71
92
lists
72
93
.into_iter()
73
94
.filter_map(|(uri, (list, count))| {
74
95
let labels = labels.get(&uri).cloned().unwrap_or_default();
96
+
let viewer = viewers.get(&uri).cloned();
75
97
76
-
build_basic(list, count, labels, &self.cdn).map(|v| (uri, v))
98
+
build_basic(list, count, labels, viewer, &self.cdn).map(|v| (uri, v))
77
99
})
78
100
.collect()
79
101
}
80
102
103
+
#[instrument(skip_all)]
81
104
pub async fn hydrate_list(&self, list: String) -> Option<ListView> {
82
105
let labels = self.get_label(&list).await;
106
+
let viewer = self.get_list_viewer_state(&list).await;
83
107
let (list, count) = self.loaders.list.load(list).await?;
84
108
let profile = self.hydrate_profile(list.owner.clone()).await?;
85
109
86
-
build_listview(list, count, profile, labels, &self.cdn)
110
+
build_listview(list, count, profile, labels, viewer, &self.cdn)
87
111
}
88
112
113
+
#[instrument(skip_all)]
89
114
pub async fn hydrate_lists(&self, lists: Vec<String>) -> HashMap<String, ListView> {
115
+
if lists.is_empty() {
116
+
return HashMap::new();
117
+
}
118
+
90
119
let labels = self.get_label_many(&lists).await;
120
+
let viewers = self.get_list_viewer_states(&lists).await;
91
121
let lists = self.loaders.list.load_many(lists).await;
92
122
93
123
let creators = lists.values().map(|(list, _)| list.owner.clone()).collect();
···
97
127
.into_iter()
98
128
.filter_map(|(uri, (list, count))| {
99
129
let creator = creators.get(&list.owner)?;
130
+
let viewer = viewers.get(&uri).cloned();
100
131
let labels = labels.get(&uri).cloned().unwrap_or_default();
101
132
102
-
build_listview(list, count, creator.to_owned(), labels, &self.cdn).map(|v| (uri, v))
133
+
build_listview(list, count, creator.to_owned(), labels, viewer, &self.cdn)
134
+
.map(|v| (uri, v))
103
135
})
104
136
.collect()
137
+
}
138
+
139
+
#[instrument(skip_all)]
140
+
async fn get_list_viewer_state(&self, subject: &str) -> Option<ListViewerState> {
141
+
if let Some(viewer) = &self.current_actor {
142
+
let data = self.loaders.list_state.get(viewer, subject).await?;
143
+
144
+
Some(build_viewer(data))
145
+
} else {
146
+
None
147
+
}
148
+
}
149
+
150
+
#[instrument(skip_all)]
151
+
async fn get_list_viewer_states(
152
+
&self,
153
+
subjects: &[String],
154
+
) -> HashMap<String, ListViewerState> {
155
+
if let Some(viewer) = &self.current_actor {
156
+
let data = self.loaders.list_state.get_many(viewer, subjects).await;
157
+
158
+
data.into_iter()
159
+
.map(|(k, state)| (k, build_viewer(state)))
160
+
.collect()
161
+
} else {
162
+
HashMap::new()
163
+
}
105
164
}
106
165
}
+4
parakeet/src/hydration/mod.rs
+4
parakeet/src/hydration/mod.rs
···
63
63
}
64
64
}
65
65
66
+
#[tracing::instrument(skip_all)]
66
67
async fn get_label(&self, uri: &str) -> Vec<parakeet_db::models::Label> {
67
68
self.loaders.label.load(uri, self.accept_labelers).await
68
69
}
69
70
71
+
#[tracing::instrument(skip_all)]
70
72
async fn get_profile_label(&self, did: &str) -> Vec<parakeet_db::models::Label> {
71
73
let uris = &[
72
74
did.to_string(),
···
80
82
.collect()
81
83
}
82
84
85
+
#[tracing::instrument(skip_all)]
83
86
async fn get_label_many(
84
87
&self,
85
88
uris: &[String],
···
90
93
.await
91
94
}
92
95
96
+
#[tracing::instrument(skip_all)]
93
97
async fn get_profile_label_many(
94
98
&self,
95
99
uris: &[String],
+248
-80
parakeet/src/hydration/posts.rs
+248
-80
parakeet/src/hydration/posts.rs
···
1
+
use crate::db::PostStateRet;
1
2
use crate::hydration::{map_labels, StatefulHydrator};
2
3
use lexica::app_bsky::actor::ProfileViewBasic;
3
4
use lexica::app_bsky::embed::Embed;
4
-
use lexica::app_bsky::feed::{FeedViewPost, PostView, ReplyRef, ReplyRefPost, ThreadgateView};
5
+
use lexica::app_bsky::feed::{
6
+
BlockedAuthor, FeedReasonRepost, FeedViewPost, FeedViewPostReason, PostView, PostViewerState,
7
+
ReplyRef, ReplyRefPost, ThreadgateView,
8
+
};
5
9
use lexica::app_bsky::graph::ListViewBasic;
6
10
use lexica::app_bsky::RecordStats;
7
11
use parakeet_db::models;
8
12
use parakeet_index::PostStats;
9
13
use std::collections::HashMap;
14
+
use tracing::instrument;
15
+
16
+
fn build_viewer(did: &str, data: PostStateRet) -> PostViewerState {
17
+
let is_me = did == data.did;
18
+
19
+
let repost = data
20
+
.repost_rkey
21
+
.map(|rkey| format!("at://{did}/app.bsky.feed.repost/{rkey}"));
22
+
let like = data
23
+
.like_rkey
24
+
.map(|rkey| format!("at://{did}/app.bsky.feed.like/{rkey}"));
25
+
26
+
PostViewerState {
27
+
repost,
28
+
like,
29
+
bookmarked: data.bookmarked,
30
+
thread_muted: false, // todo when we have thread mutes
31
+
reply_disabled: false,
32
+
embedding_disabled: data.embed_disabled && !is_me, // poster can always bypass embed disabled.
33
+
pinned: data.pinned,
34
+
}
35
+
}
36
+
37
+
type HydratePostsRet = (
38
+
models::Post,
39
+
ProfileViewBasic,
40
+
Vec<models::Label>,
41
+
Option<Embed>,
42
+
Option<ThreadgateView>,
43
+
Option<PostViewerState>,
44
+
Option<PostStats>,
45
+
);
10
46
11
47
fn build_postview(
12
-
post: models::Post,
13
-
author: ProfileViewBasic,
14
-
labels: Vec<models::Label>,
15
-
embed: Option<Embed>,
16
-
threadgate: Option<ThreadgateView>,
17
-
stats: Option<PostStats>,
48
+
(post, author, labels, embed, threadgate, viewer, stats): HydratePostsRet,
18
49
) -> PostView {
19
50
let stats = stats
20
51
.map(|stats| RecordStats {
···
33
64
embed,
34
65
stats,
35
66
labels: map_labels(labels),
67
+
viewer,
36
68
threadgate,
37
69
indexed_at: post.created_at,
38
70
}
···
51
83
}
52
84
53
85
impl StatefulHydrator<'_> {
86
+
#[instrument(skip_all)]
54
87
async fn hydrate_threadgate(
55
88
&self,
56
89
threadgate: Option<models::Threadgate>,
57
90
) -> Option<ThreadgateView> {
58
91
let threadgate = threadgate?;
59
92
60
-
let lists = threadgate
61
-
.allowed_lists
62
-
.iter()
63
-
.flatten()
64
-
.cloned()
65
-
.collect::<Vec<_>>();
93
+
let lists = match threadgate.allowed_lists.as_ref() {
94
+
Some(allowed_lists) => allowed_lists.clone().into(),
95
+
None => Vec::new(),
96
+
};
66
97
let lists = self.hydrate_lists_basic(lists).await;
67
98
68
99
Some(build_threadgate_view(
···
71
102
))
72
103
}
73
104
105
+
#[instrument(skip_all)]
74
106
async fn hydrate_threadgates(
75
107
&self,
76
108
threadgates: Vec<models::Threadgate>,
77
109
) -> HashMap<String, ThreadgateView> {
78
110
let lists = threadgates.iter().fold(Vec::new(), |mut acc, c| {
79
-
acc.extend(c.allowed_lists.iter().flatten().cloned());
111
+
if let Some(lists) = &c.allowed_lists {
112
+
acc.extend(lists.clone().0);
113
+
}
80
114
acc
81
115
});
82
116
let lists = self.hydrate_lists_basic(lists).await;
···
84
118
threadgates
85
119
.into_iter()
86
120
.map(|threadgate| {
87
-
let this_lists = threadgate
88
-
.allowed_lists
89
-
.iter()
90
-
.filter_map(|v| v.clone().and_then(|v| lists.get(&v).cloned()))
91
-
.collect();
121
+
let this_lists = match &threadgate.allowed_lists {
122
+
Some(allowed_lists) => allowed_lists
123
+
.iter()
124
+
.filter_map(|v| lists.get(v).cloned())
125
+
.collect(),
126
+
None => Vec::new(),
127
+
};
92
128
93
129
(
94
130
threadgate.at_uri.clone(),
···
98
134
.collect()
99
135
}
100
136
137
+
#[instrument(skip_all)]
101
138
pub async fn hydrate_post(&self, post: String) -> Option<PostView> {
102
139
let stats = self.loaders.post_stats.load(post.clone()).await;
103
140
let (post, threadgate) = self.loaders.posts.load(post).await?;
141
+
let viewer = self.get_post_viewer_state(&post.at_uri).await;
104
142
let embed = self.hydrate_embed(post.at_uri.clone()).await;
105
143
let author = self.hydrate_profile_basic(post.did.clone()).await?;
106
144
let threadgate = self.hydrate_threadgate(threadgate).await;
107
145
let labels = self.get_label(&post.at_uri).await;
108
146
109
-
Some(build_postview(
110
-
post, author, labels, embed, threadgate, stats,
111
-
))
147
+
Some(build_postview((
148
+
post, author, labels, embed, threadgate, viewer, stats,
149
+
)))
112
150
}
113
151
114
-
pub async fn hydrate_posts(&self, posts: Vec<String>) -> HashMap<String, PostView> {
152
+
#[instrument(skip_all)]
153
+
async fn hydrate_posts_inner(&self, posts: Vec<String>) -> HashMap<String, HydratePostsRet> {
115
154
let stats = self.loaders.post_stats.load_many(posts.clone()).await;
116
155
let posts = self.loaders.posts.load_many(posts).await;
117
156
···
121
160
.unzip::<_, _, Vec<_>, Vec<_>>();
122
161
let authors = self.hydrate_profiles_basic(authors).await;
123
162
124
-
let post_labels = self.get_label_many(&post_uris).await;
163
+
let mut post_labels = self.get_label_many(&post_uris).await;
164
+
let mut viewer_data = self.get_post_viewer_states(&post_uris).await;
125
165
126
166
let threadgates = posts
127
167
.values()
···
129
169
.collect();
130
170
let threadgates = self.hydrate_threadgates(threadgates).await;
131
171
132
-
let embeds = self.hydrate_embeds(post_uris).await;
172
+
let mut embeds = self.hydrate_embeds(post_uris).await;
133
173
134
174
posts
135
175
.into_iter()
136
176
.filter_map(|(uri, (post, threadgate))| {
137
-
let author = authors.get(&post.did)?;
138
-
let embed = embeds.get(&uri).cloned();
177
+
let author = authors.get(&post.did)?.clone();
178
+
let embed = embeds.remove(&uri);
139
179
let threadgate = threadgate.and_then(|tg| threadgates.get(&tg.at_uri).cloned());
140
-
let labels = post_labels.get(&uri).cloned().unwrap_or_default();
180
+
let labels = post_labels.remove(&uri).unwrap_or_default();
141
181
let stats = stats.get(&uri).cloned();
182
+
let viewer = viewer_data.remove(&uri);
142
183
143
184
Some((
144
185
uri,
145
-
build_postview(post, author.to_owned(), labels, embed, threadgate, stats),
186
+
(post, author, labels, embed, threadgate, viewer, stats),
146
187
))
147
188
})
148
189
.collect()
149
190
}
150
191
151
-
pub async fn hydrate_feed_posts(&self, posts: Vec<String>) -> HashMap<String, FeedViewPost> {
152
-
let stats = self.loaders.post_stats.load_many(posts.clone()).await;
153
-
let posts = self.loaders.posts.load_many(posts).await;
154
-
155
-
let (authors, post_uris) = posts
156
-
.values()
157
-
.map(|(post, _)| (post.did.clone(), post.at_uri.clone()))
158
-
.unzip::<_, _, Vec<_>, Vec<_>>();
159
-
let authors = self.hydrate_profiles_basic(authors).await;
160
-
161
-
let post_labels = self.get_label_many(&post_uris).await;
192
+
#[instrument(skip_all)]
193
+
pub async fn hydrate_posts(&self, posts: Vec<String>) -> HashMap<String, PostView> {
194
+
self.hydrate_posts_inner(posts)
195
+
.await
196
+
.into_iter()
197
+
.map(|(uri, data)| (uri, build_postview(data)))
198
+
.collect()
199
+
}
162
200
163
-
let embeds = self.hydrate_embeds(post_uris).await;
201
+
#[instrument(skip_all)]
202
+
pub async fn hydrate_feed_posts(
203
+
&self,
204
+
posts: Vec<RawFeedItem>,
205
+
author_threads_only: bool,
206
+
) -> Vec<FeedViewPost> {
207
+
let post_uris = posts
208
+
.iter()
209
+
.map(|item| item.post_uri().to_string())
210
+
.collect::<Vec<_>>();
211
+
let mut posts_hyd = self.hydrate_posts_inner(post_uris).await;
164
212
165
-
let reply_refs = posts
213
+
// we shouldn't show the parent when the post violates a threadgate.
214
+
let reply_refs = posts_hyd
166
215
.values()
167
-
.flat_map(|(post, _)| [post.parent_uri.clone(), post.root_uri.clone()])
216
+
.filter(|(post, ..)| !post.violates_threadgate)
217
+
.flat_map(|(post, ..)| [post.parent_uri.clone(), post.root_uri.clone()])
168
218
.flatten()
169
219
.collect::<Vec<_>>();
170
-
171
220
let reply_posts = self.hydrate_posts(reply_refs).await;
172
221
222
+
let repost_profiles = posts
223
+
.iter()
224
+
.filter_map(|item| item.repost_by())
225
+
.collect::<Vec<_>>();
226
+
let profiles_hydrated = self.hydrate_profiles_basic(repost_profiles).await;
227
+
173
228
posts
174
229
.into_iter()
175
-
.filter_map(|(post_uri, (post, _))| {
176
-
let author = authors.get(&post.did)?;
230
+
.filter_map(|item| {
231
+
let post = posts_hyd.remove(item.post_uri())?;
232
+
let context = item.context();
177
233
178
-
let root = post.root_uri.as_ref().and_then(|uri| reply_posts.get(uri));
179
-
let parent = post
180
-
.parent_uri
181
-
.as_ref()
182
-
.and_then(|uri| reply_posts.get(uri));
234
+
let reply = if let RawFeedItem::Post { .. } = item {
235
+
let root_uri = post.0.root_uri.as_ref();
236
+
let parent_uri = post.0.parent_uri.as_ref();
183
237
184
-
let reply = if post.parent_uri.is_some() && post.root_uri.is_some() {
185
-
Some(ReplyRef {
186
-
root: root.cloned().map(ReplyRefPost::Post).unwrap_or(
187
-
ReplyRefPost::NotFound {
188
-
uri: post.root_uri.as_ref().unwrap().clone(),
189
-
not_found: true,
190
-
},
191
-
),
192
-
parent: parent.cloned().map(ReplyRefPost::Post).unwrap_or(
193
-
ReplyRefPost::NotFound {
194
-
uri: post.parent_uri.as_ref().unwrap().clone(),
195
-
not_found: true,
196
-
},
197
-
),
198
-
grandparent_author: None,
199
-
})
238
+
let (root, parent) = if author_threads_only {
239
+
if root_uri.is_some() && parent_uri.is_some() {
240
+
let root = root_uri.and_then(|uri| posts_hyd.get(uri))?;
241
+
let parent = parent_uri.and_then(|uri| posts_hyd.get(uri))?;
242
+
243
+
let root = build_postview(root.clone());
244
+
let parent = build_postview(parent.clone());
245
+
246
+
(Some(root), Some(parent))
247
+
} else {
248
+
(None, None)
249
+
}
250
+
} else {
251
+
let root = root_uri.and_then(|uri| reply_posts.get(uri)).cloned();
252
+
let parent = parent_uri.and_then(|uri| reply_posts.get(uri)).cloned();
253
+
254
+
(root, parent)
255
+
};
256
+
257
+
if root_uri.is_some() || parent_uri.is_some() {
258
+
Some(ReplyRef {
259
+
root: root.map(postview_to_replyref).unwrap_or(
260
+
ReplyRefPost::NotFound {
261
+
uri: root_uri.unwrap().to_owned(),
262
+
not_found: true,
263
+
},
264
+
),
265
+
parent: parent.map(postview_to_replyref).unwrap_or(
266
+
ReplyRefPost::NotFound {
267
+
uri: parent_uri.unwrap().to_owned(),
268
+
not_found: true,
269
+
},
270
+
),
271
+
grandparent_author: None,
272
+
})
273
+
} else {
274
+
None
275
+
}
200
276
} else {
201
277
None
202
278
};
203
279
204
-
let embed = embeds.get(&post_uri).cloned();
205
-
let labels = post_labels.get(&post_uri).cloned().unwrap_or_default();
206
-
let stats = stats.get(&post_uri).cloned();
207
-
let post = build_postview(post, author.to_owned(), labels, embed, None, stats);
280
+
let reason = match item {
281
+
RawFeedItem::Repost { uri, by, at, .. } => {
282
+
Some(FeedViewPostReason::Repost(FeedReasonRepost {
283
+
by: profiles_hydrated.get(&by).cloned()?,
284
+
uri: Some(uri),
285
+
cid: None,
286
+
indexed_at: at,
287
+
}))
288
+
}
289
+
RawFeedItem::Pin { .. } => Some(FeedViewPostReason::Pin),
290
+
_ => None,
291
+
};
292
+
293
+
let post = build_postview(post);
208
294
209
-
Some((
210
-
post_uri,
211
-
FeedViewPost {
212
-
post,
213
-
reply,
214
-
reason: None,
215
-
feed_context: None,
216
-
},
217
-
))
295
+
Some(FeedViewPost {
296
+
post,
297
+
reply,
298
+
reason,
299
+
feed_context: context,
300
+
})
218
301
})
219
302
.collect()
303
+
}
304
+
305
+
#[instrument(skip_all)]
306
+
async fn get_post_viewer_state(&self, subject: &str) -> Option<PostViewerState> {
307
+
if let Some(viewer) = &self.current_actor {
308
+
let data = self.loaders.post_state.get(viewer, subject).await?;
309
+
310
+
Some(build_viewer(viewer, data))
311
+
} else {
312
+
None
313
+
}
314
+
}
315
+
316
+
#[instrument(skip_all)]
317
+
async fn get_post_viewer_states(
318
+
&self,
319
+
subjects: &[String],
320
+
) -> HashMap<String, PostViewerState> {
321
+
if let Some(viewer) = &self.current_actor {
322
+
let data = self.loaders.post_state.get_many(viewer, subjects).await;
323
+
324
+
data.into_iter()
325
+
.map(|(k, state)| (k, build_viewer(viewer, state)))
326
+
.collect()
327
+
} else {
328
+
HashMap::new()
329
+
}
330
+
}
331
+
}
332
+
333
+
fn postview_to_replyref(post: PostView) -> ReplyRefPost {
334
+
match &post.author.viewer {
335
+
Some(v) if v.blocked_by || v.blocking.is_some() => ReplyRefPost::Blocked {
336
+
uri: post.uri,
337
+
blocked: true,
338
+
author: BlockedAuthor {
339
+
did: post.author.did.clone(),
340
+
viewer: post.author.viewer,
341
+
},
342
+
},
343
+
_ => ReplyRefPost::Post(post),
344
+
}
345
+
}
346
+
347
+
#[derive(Debug)]
348
+
pub enum RawFeedItem {
349
+
Pin {
350
+
uri: String,
351
+
context: Option<String>,
352
+
},
353
+
Post {
354
+
uri: String,
355
+
context: Option<String>,
356
+
},
357
+
Repost {
358
+
uri: String,
359
+
post: String,
360
+
by: String,
361
+
at: chrono::DateTime<chrono::Utc>,
362
+
context: Option<String>,
363
+
},
364
+
}
365
+
366
+
impl RawFeedItem {
367
+
fn post_uri(&self) -> &str {
368
+
match self {
369
+
RawFeedItem::Pin { uri, .. } => uri,
370
+
RawFeedItem::Post { uri, .. } => uri,
371
+
RawFeedItem::Repost { post, .. } => post,
372
+
}
373
+
}
374
+
375
+
fn repost_by(&self) -> Option<String> {
376
+
match self {
377
+
RawFeedItem::Repost { by, .. } => Some(by.clone()),
378
+
_ => None,
379
+
}
380
+
}
381
+
382
+
fn context(&self) -> Option<String> {
383
+
match self {
384
+
RawFeedItem::Pin { context, .. } => context.clone(),
385
+
RawFeedItem::Post { context, .. } => context.clone(),
386
+
RawFeedItem::Repost { context, .. } => context.clone(),
387
+
}
220
388
}
221
389
}
+126
-5
parakeet/src/hydration/profile.rs
+126
-5
parakeet/src/hydration/profile.rs
···
1
+
use crate::db::ProfileStateRet;
1
2
use crate::hydration::map_labels;
2
3
use crate::loaders::ProfileLoaderRet;
3
4
use crate::xrpc::cdn::BskyCdn;
···
5
6
use chrono::TimeDelta;
6
7
use lexica::app_bsky::actor::*;
7
8
use lexica::app_bsky::embed::External;
9
+
use lexica::app_bsky::graph::ListViewBasic;
8
10
use parakeet_db::models;
9
11
use parakeet_index::ProfileStats;
10
12
use std::collections::HashMap;
11
13
use std::str::FromStr;
12
14
use std::sync::OnceLock;
15
+
use tracing::instrument;
13
16
14
17
pub static TRUSTED_VERIFIERS: OnceLock<Vec<String>> = OnceLock::new();
15
18
···
34
37
})
35
38
} else {
36
39
None
40
+
}
41
+
}
42
+
43
+
fn build_viewer(
44
+
data: ProfileStateRet,
45
+
list_mute: Option<ListViewBasic>,
46
+
list_block: Option<ListViewBasic>,
47
+
) -> ProfileViewerState {
48
+
let following = data
49
+
.following
50
+
.map(|rkey| format!("at://{}/app.bsky.graph.follow/{rkey}", data.did));
51
+
let followed_by = data
52
+
.followed
53
+
.map(|rkey| format!("at://{}/app.bsky.graph.follow/{rkey}", data.subject));
54
+
55
+
let blocking = data.list_block.or(data
56
+
.blocking
57
+
.map(|rkey| format!("at://{}/app.bsky.graph.block/{rkey}", data.did)));
58
+
59
+
ProfileViewerState {
60
+
muted: data.muting.unwrap_or_default(),
61
+
muted_by_list: list_mute,
62
+
blocked_by: data.blocked.unwrap_or_default(), // TODO: this doesn't factor for blocklists atm
63
+
blocking,
64
+
blocking_by_list: list_block,
65
+
following,
66
+
followed_by,
37
67
}
38
68
}
39
69
···
156
186
stats: Option<ProfileStats>,
157
187
labels: Vec<models::Label>,
158
188
verifications: Option<Vec<models::VerificationEntry>>,
189
+
viewer: Option<ProfileViewerState>,
159
190
cdn: &BskyCdn,
160
191
) -> ProfileViewBasic {
161
192
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
169
200
display_name: profile.display_name,
170
201
avatar,
171
202
associated,
203
+
viewer,
172
204
labels: map_labels(labels),
173
205
verification,
174
206
status,
207
+
pronouns: profile.pronouns,
175
208
created_at: profile.created_at.and_utc(),
176
209
}
177
210
}
···
181
214
stats: Option<ProfileStats>,
182
215
labels: Vec<models::Label>,
183
216
verifications: Option<Vec<models::VerificationEntry>>,
217
+
viewer: Option<ProfileViewerState>,
184
218
cdn: &BskyCdn,
185
219
) -> ProfileView {
186
220
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
195
229
description: profile.description,
196
230
avatar,
197
231
associated,
232
+
viewer,
198
233
labels: map_labels(labels),
199
234
verification,
200
235
status,
236
+
pronouns: profile.pronouns,
201
237
created_at: profile.created_at.and_utc(),
202
238
indexed_at: profile.indexed_at,
203
239
}
···
208
244
stats: Option<ProfileStats>,
209
245
labels: Vec<models::Label>,
210
246
verifications: Option<Vec<models::VerificationEntry>>,
247
+
viewer: Option<ProfileViewerState>,
211
248
cdn: &BskyCdn,
212
249
) -> ProfileViewDetailed {
213
250
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
226
263
followers_count: stats.map(|v| v.followers as i64).unwrap_or_default(),
227
264
follows_count: stats.map(|v| v.following as i64).unwrap_or_default(),
228
265
associated,
266
+
viewer,
229
267
labels: map_labels(labels),
230
268
verification,
231
269
status,
270
+
pronouns: profile.pronouns,
271
+
website: profile.website,
232
272
created_at: profile.created_at.and_utc(),
233
273
indexed_at: profile.indexed_at,
234
274
}
235
275
}
236
276
237
277
impl super::StatefulHydrator<'_> {
278
+
#[instrument(skip_all)]
238
279
pub async fn hydrate_profile_basic(&self, did: String) -> Option<ProfileViewBasic> {
239
280
let labels = self.get_profile_label(&did).await;
281
+
let viewer = self.get_profile_viewer_state(&did).await;
240
282
let verif = self.loaders.verification.load(did.clone()).await;
241
283
let stats = self.loaders.profile_stats.load(did.clone()).await;
242
284
let profile_info = self.loaders.profile.load(did).await?;
243
285
244
-
Some(build_basic(profile_info, stats, labels, verif, &self.cdn))
286
+
Some(build_basic(
287
+
profile_info,
288
+
stats,
289
+
labels,
290
+
verif,
291
+
viewer,
292
+
&self.cdn,
293
+
))
245
294
}
246
295
296
+
#[instrument(skip_all)]
247
297
pub async fn hydrate_profiles_basic(
248
298
&self,
249
299
dids: Vec<String>,
250
300
) -> HashMap<String, ProfileViewBasic> {
251
301
let labels = self.get_profile_label_many(&dids).await;
302
+
let viewers = self.get_profile_viewer_states(&dids).await;
252
303
let verif = self.loaders.verification.load_many(dids.clone()).await;
253
304
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
254
305
let profiles = self.loaders.profile.load_many(dids).await;
···
258
309
.map(|(k, profile_info)| {
259
310
let labels = labels.get(&k).cloned().unwrap_or_default();
260
311
let verif = verif.get(&k).cloned();
312
+
let viewer = viewers.get(&k).cloned();
261
313
let stats = stats.get(&k).cloned();
262
314
263
-
let v = build_basic(profile_info, stats, labels, verif, &self.cdn);
315
+
let v = build_basic(profile_info, stats, labels, verif, viewer, &self.cdn);
264
316
(k, v)
265
317
})
266
318
.collect()
267
319
}
268
320
321
+
#[instrument(skip_all)]
269
322
pub async fn hydrate_profile(&self, did: String) -> Option<ProfileView> {
270
323
let labels = self.get_profile_label(&did).await;
324
+
let viewer = self.get_profile_viewer_state(&did).await;
271
325
let verif = self.loaders.verification.load(did.clone()).await;
272
326
let stats = self.loaders.profile_stats.load(did.clone()).await;
273
327
let profile_info = self.loaders.profile.load(did).await?;
274
328
275
-
Some(build_profile(profile_info, stats, labels, verif, &self.cdn))
329
+
Some(build_profile(
330
+
profile_info,
331
+
stats,
332
+
labels,
333
+
verif,
334
+
viewer,
335
+
&self.cdn,
336
+
))
276
337
}
277
338
339
+
#[instrument(skip_all)]
278
340
pub async fn hydrate_profiles(&self, dids: Vec<String>) -> HashMap<String, ProfileView> {
279
341
let labels = self.get_profile_label_many(&dids).await;
342
+
let viewers = self.get_profile_viewer_states(&dids).await;
280
343
let verif = self.loaders.verification.load_many(dids.clone()).await;
281
344
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
282
345
let profiles = self.loaders.profile.load_many(dids).await;
···
286
349
.map(|(k, profile_info)| {
287
350
let labels = labels.get(&k).cloned().unwrap_or_default();
288
351
let verif = verif.get(&k).cloned();
352
+
let viewer = viewers.get(&k).cloned();
289
353
let stats = stats.get(&k).cloned();
290
354
291
-
let v = build_profile(profile_info, stats, labels, verif, &self.cdn);
355
+
let v = build_profile(profile_info, stats, labels, verif, viewer, &self.cdn);
292
356
(k, v)
293
357
})
294
358
.collect()
295
359
}
296
360
361
+
#[instrument(skip_all)]
297
362
pub async fn hydrate_profile_detailed(&self, did: String) -> Option<ProfileViewDetailed> {
298
363
let labels = self.get_profile_label(&did).await;
364
+
let viewer = self.get_profile_viewer_state(&did).await;
299
365
let verif = self.loaders.verification.load(did.clone()).await;
300
366
let stats = self.loaders.profile_stats.load(did.clone()).await;
301
367
let profile_info = self.loaders.profile.load(did).await?;
···
305
371
stats,
306
372
labels,
307
373
verif,
374
+
viewer,
308
375
&self.cdn,
309
376
))
310
377
}
311
378
379
+
#[instrument(skip_all)]
312
380
pub async fn hydrate_profiles_detailed(
313
381
&self,
314
382
dids: Vec<String>,
315
383
) -> HashMap<String, ProfileViewDetailed> {
316
384
let labels = self.get_profile_label_many(&dids).await;
385
+
let viewers = self.get_profile_viewer_states(&dids).await;
317
386
let verif = self.loaders.verification.load_many(dids.clone()).await;
318
387
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
319
388
let profiles = self.loaders.profile.load_many(dids).await;
···
323
392
.map(|(k, profile_info)| {
324
393
let labels = labels.get(&k).cloned().unwrap_or_default();
325
394
let verif = verif.get(&k).cloned();
395
+
let viewer = viewers.get(&k).cloned();
326
396
let stats = stats.get(&k).cloned();
327
397
328
-
let v = build_detailed(profile_info, stats, labels, verif, &self.cdn);
398
+
let v = build_detailed(profile_info, stats, labels, verif, viewer, &self.cdn);
329
399
(k, v)
330
400
})
331
401
.collect()
402
+
}
403
+
404
+
#[instrument(skip_all)]
405
+
async fn get_profile_viewer_state(&self, subject: &str) -> Option<ProfileViewerState> {
406
+
if let Some(viewer) = &self.current_actor {
407
+
let data = self.loaders.profile_state.get(viewer, subject).await?;
408
+
409
+
let list_block = match &data.list_block {
410
+
Some(uri) => self.hydrate_list_basic(uri.clone()).await,
411
+
None => None,
412
+
};
413
+
let list_mute = match &data.list_mute {
414
+
Some(uri) => self.hydrate_list_basic(uri.clone()).await,
415
+
None => None,
416
+
};
417
+
418
+
Some(build_viewer(data, list_mute, list_block))
419
+
} else {
420
+
None
421
+
}
422
+
}
423
+
424
+
#[instrument(skip_all)]
425
+
async fn get_profile_viewer_states(
426
+
&self,
427
+
dids: &[String],
428
+
) -> HashMap<String, ProfileViewerState> {
429
+
if let Some(viewer) = &self.current_actor {
430
+
let data = self.loaders.profile_state.get_many(viewer, dids).await;
431
+
let lists = data
432
+
.values()
433
+
.flat_map(|v| [&v.list_block, &v.list_mute])
434
+
.flatten()
435
+
.cloned()
436
+
.collect();
437
+
let lists = self.hydrate_lists_basic(lists).await;
438
+
439
+
data.into_iter()
440
+
.map(|(k, state)| {
441
+
let list_mute = state.list_mute.as_ref().and_then(|v| lists.get(v).cloned());
442
+
let list_block = state
443
+
.list_block
444
+
.as_ref()
445
+
.and_then(|v| lists.get(v).cloned());
446
+
447
+
(k, build_viewer(state, list_mute, list_block))
448
+
})
449
+
.collect()
450
+
} else {
451
+
HashMap::new()
452
+
}
332
453
}
333
454
}
+11
-9
parakeet/src/hydration/starter_packs.rs
+11
-9
parakeet/src/hydration/starter_packs.rs
···
4
4
use lexica::app_bsky::graph::{ListViewBasic, StarterPackView, StarterPackViewBasic};
5
5
use parakeet_db::models;
6
6
use std::collections::HashMap;
7
+
use tracing::instrument;
7
8
8
9
fn build_basic(
9
10
starter_pack: models::StaterPack,
···
50
51
}
51
52
52
53
impl StatefulHydrator<'_> {
54
+
#[instrument(skip_all)]
53
55
pub async fn hydrate_starterpack_basic(&self, pack: String) -> Option<StarterPackViewBasic> {
54
56
let labels = self.get_label(&pack).await;
55
57
let sp = self.loaders.starterpacks.load(pack).await?;
···
59
61
Some(build_basic(sp, creator, labels, list_item_count))
60
62
}
61
63
64
+
#[instrument(skip_all)]
62
65
pub async fn hydrate_starterpacks_basic(
63
66
&self,
64
67
packs: Vec<String>,
···
86
89
.collect()
87
90
}
88
91
92
+
#[instrument(skip_all)]
89
93
pub async fn hydrate_starterpack(&self, pack: String) -> Option<StarterPackView> {
90
94
let labels = self.get_label(&pack).await;
91
95
let sp = self.loaders.starterpacks.load(pack).await?;
···
93
97
let creator = self.hydrate_profile_basic(sp.owner.clone()).await?;
94
98
let list = self.hydrate_list_basic(sp.list.clone()).await;
95
99
96
-
let feeds = sp
97
-
.feeds
98
-
.clone()
99
-
.unwrap_or_default()
100
-
.into_iter()
101
-
.flatten()
100
+
let feeds = sp.feeds.clone().unwrap_or_default();
101
+
let feeds = self
102
+
.hydrate_feedgens(feeds.into())
103
+
.await
104
+
.into_values()
102
105
.collect();
103
-
let feeds = self.hydrate_feedgens(feeds).await.into_values().collect();
104
106
105
107
Some(build_spview(sp, creator, labels, list, feeds))
106
108
}
107
109
110
+
#[instrument(skip_all)]
108
111
pub async fn hydrate_starterpacks(
109
112
&self,
110
113
packs: Vec<String>,
···
119
122
let feeds = packs
120
123
.values()
121
124
.filter_map(|pack| pack.feeds.clone())
122
-
.flat_map(|feeds| feeds.into_iter().flatten())
125
+
.flat_map(Vec::from)
123
126
.collect();
124
127
125
128
let creators = self.hydrate_profiles_basic(creators).await;
···
133
136
let list = lists.get(&pack.list).cloned();
134
137
let feeds = pack.feeds.as_ref().map(|v| {
135
138
v.iter()
136
-
.flatten()
137
139
.filter_map(|feed| feeds.get(feed).cloned())
138
140
.collect()
139
141
});
+57
parakeet/src/instrumentation.rs
+57
parakeet/src/instrumentation.rs
···
1
+
use opentelemetry::trace::TracerProvider;
2
+
use opentelemetry_otlp::{Protocol, SpanExporter, WithExportConfig};
3
+
use opentelemetry_sdk::trace::{Sampler, SdkTracer, SdkTracerProvider};
4
+
use tracing::Subscriber;
5
+
use tracing_opentelemetry::OpenTelemetryLayer;
6
+
use tracing_subscriber::filter::Filtered;
7
+
use tracing_subscriber::layer::SubscriberExt;
8
+
use tracing_subscriber::registry::LookupSpan;
9
+
use tracing_subscriber::util::SubscriberInitExt;
10
+
use tracing_subscriber::{EnvFilter, Layer};
11
+
12
+
pub fn init_instruments(cfg: &crate::config::ConfigInstruments) {
13
+
let otel_layer = cfg.otel_enable.then(init_otel);
14
+
let log_layer = init_log(cfg.log_json);
15
+
16
+
tracing_subscriber::registry()
17
+
.with(log_layer)
18
+
.with(otel_layer)
19
+
.init();
20
+
}
21
+
22
+
fn init_otel<S>() -> Filtered<OpenTelemetryLayer<S, SdkTracer>, EnvFilter, S>
23
+
where
24
+
S: Subscriber + for<'span> LookupSpan<'span>,
25
+
{
26
+
let span_exporter = SpanExporter::builder()
27
+
.with_http()
28
+
.with_protocol(Protocol::HttpBinary)
29
+
.build()
30
+
.unwrap();
31
+
32
+
let tracer_provider = SdkTracerProvider::builder()
33
+
.with_batch_exporter(span_exporter)
34
+
.with_sampler(Sampler::AlwaysOn)
35
+
.build();
36
+
37
+
opentelemetry::global::set_tracer_provider(tracer_provider.clone());
38
+
39
+
let tracer = tracer_provider.tracer("parakeet");
40
+
let otel_filter = EnvFilter::new("info,otel::tracing=trace,tower_http=off");
41
+
42
+
OpenTelemetryLayer::new(tracer).with_filter(otel_filter)
43
+
}
44
+
45
+
fn init_log<S>(json: bool) -> Filtered<Box<dyn Layer<S> + Send + Sync>, EnvFilter, S>
46
+
where
47
+
S: Subscriber + for<'span> LookupSpan<'span>,
48
+
{
49
+
let stdout_filter =
50
+
EnvFilter::from_default_env().add_directive("otel::tracing=off".parse().unwrap());
51
+
52
+
match json {
53
+
true => tracing_subscriber::fmt::layer().json().boxed(),
54
+
false => tracing_subscriber::fmt::layer().boxed(),
55
+
}
56
+
.with_filter(stdout_filter)
57
+
}
+162
-5
parakeet/src/loaders.rs
+162
-5
parakeet/src/loaders.rs
···
1
1
use crate::cache::PrefixedLoaderCache;
2
+
use crate::db;
2
3
use crate::xrpc::extract::LabelConfigItem;
3
4
use dataloader::async_cached::Loader;
4
5
use dataloader::non_cached::Loader as NonCachedLoader;
5
6
use dataloader::BatchFn;
7
+
use diesel::dsl::sql;
6
8
use diesel::prelude::*;
7
9
use diesel_async::pooled_connection::deadpool::Pool;
8
10
use diesel_async::{AsyncPgConnection, RunQueryDsl};
···
13
15
use serde::{Deserialize, Serialize};
14
16
use std::collections::HashMap;
15
17
use std::str::FromStr;
18
+
use tracing::instrument;
16
19
17
20
type CachingLoader<K, V, L> = Loader<K, V, L, PrefixedLoaderCache<V>>;
18
21
···
39
42
pub label: LabelLoader,
40
43
pub labeler: CachingLoader<String, LabelServiceLoaderRet, LabelServiceLoader>,
41
44
pub list: CachingLoader<String, ListLoaderRet, ListLoader>,
45
+
pub list_state: ListStateLoader,
42
46
pub like: NonCachedLoader<String, i32, LikeLoader>,
47
+
pub like_state: LikeRecordLoader,
43
48
pub posts: CachingLoader<String, PostLoaderRet, PostLoader>,
44
49
pub post_stats: NonCachedLoader<String, parakeet_index::PostStats, PostStatsLoader>,
50
+
pub post_state: PostStateLoader,
45
51
pub profile: CachingLoader<String, ProfileLoaderRet, ProfileLoader>,
46
52
pub profile_stats: NonCachedLoader<String, parakeet_index::ProfileStats, ProfileStatsLoader>,
53
+
pub profile_state: ProfileStateLoader,
47
54
pub starterpacks: CachingLoader<String, StarterPackLoaderRet, StarterPackLoader>,
48
55
pub verification: CachingLoader<String, Vec<models::VerificationEntry>, VerificationLoader>,
49
56
}
···
57
64
) -> Dataloaders {
58
65
Dataloaders {
59
66
embed: new_plc_loader(EmbedLoader(pool.clone()), &rc, "embed", 3600),
60
-
feedgen: new_plc_loader(FeedGenLoader(pool.clone(), idxc.clone()), &rc, "feedgen", 600),
67
+
feedgen: new_plc_loader(FeedGenLoader(pool.clone()), &rc, "feedgen", 600),
61
68
handle: new_plc_loader(HandleLoader(pool.clone()), &rc, "handle", 60),
62
69
label: LabelLoader(pool.clone()), // CARE: never cache this.
63
-
labeler: new_plc_loader(LabelServiceLoader(pool.clone(), idxc.clone()), &rc, "labeler", 600),
70
+
labeler: new_plc_loader(LabelServiceLoader(pool.clone()), &rc, "labeler", 600),
64
71
like: NonCachedLoader::new(LikeLoader(idxc.clone())),
72
+
like_state: LikeRecordLoader(pool.clone()),
65
73
list: new_plc_loader(ListLoader(pool.clone()), &rc, "list", 600),
74
+
list_state: ListStateLoader(pool.clone()),
66
75
posts: new_plc_loader(PostLoader(pool.clone()), &rc, "post", 3600),
67
76
post_stats: NonCachedLoader::new(PostStatsLoader(idxc.clone())),
77
+
post_state: PostStateLoader(pool.clone()),
68
78
profile: new_plc_loader(ProfileLoader(pool.clone()), &rc, "profile", 3600),
69
79
profile_stats: NonCachedLoader::new(ProfileStatsLoader(idxc.clone())),
80
+
profile_state: ProfileStateLoader(pool.clone()),
70
81
starterpacks: new_plc_loader(StarterPackLoader(pool.clone()), &rc, "starterpacks", 600),
71
82
verification: new_plc_loader(VerificationLoader(pool.clone()), &rc, "verification", 60),
72
83
}
···
75
86
76
87
pub struct LikeLoader(parakeet_index::Client);
77
88
impl BatchFn<String, i32> for LikeLoader {
89
+
#[instrument(name = "LikeLoader", skip_all)]
78
90
async fn load(&mut self, keys: &[String]) -> HashMap<String, i32> {
79
91
let res = self
80
92
.0
···
95
107
}
96
108
}
97
109
110
+
pub struct LikeRecordLoader(Pool<AsyncPgConnection>);
111
+
impl LikeRecordLoader {
112
+
#[instrument(name = "LikeRecordLoader::get", skip_all)]
113
+
pub async fn get(&self, did: &str, subject: &str) -> Option<(String, String)> {
114
+
let mut conn = self.0.get().await.unwrap();
115
+
116
+
db::get_like_state(&mut conn, did, subject)
117
+
.await
118
+
.unwrap_or_else(|e| {
119
+
tracing::error!("like state load failed: {e}");
120
+
None
121
+
})
122
+
}
123
+
124
+
#[instrument(name = "LikeRecordLoader::get_many", skip_all)]
125
+
pub async fn get_many(
126
+
&self,
127
+
did: &str,
128
+
subjects: &[String],
129
+
) -> HashMap<String, (String, String)> {
130
+
let mut conn = self.0.get().await.unwrap();
131
+
132
+
match db::get_like_states(&mut conn, did, subjects).await {
133
+
Ok(res) => {
134
+
HashMap::from_iter(res.into_iter().map(|(sub, did, rkey)| (sub, (did, rkey))))
135
+
}
136
+
Err(e) => {
137
+
tracing::error!("like state load failed: {e}");
138
+
HashMap::new()
139
+
}
140
+
}
141
+
}
142
+
}
143
+
98
144
pub struct HandleLoader(Pool<AsyncPgConnection>);
99
145
impl BatchFn<String, String> for HandleLoader {
146
+
#[instrument(name = "HandleLoader", skip_all)]
100
147
async fn load(&mut self, keys: &[String]) -> HashMap<String, String> {
101
148
let mut conn = self.0.get().await.unwrap();
102
149
···
129
176
Option<ProfileAllowSubscriptions>,
130
177
);
131
178
impl BatchFn<String, ProfileLoaderRet> for ProfileLoader {
179
+
#[instrument(name = "ProfileLoader", skip_all)]
132
180
async fn load(&mut self, keys: &[String]) -> HashMap<String, ProfileLoaderRet> {
133
181
let mut conn = self.0.get().await.unwrap();
134
182
···
190
238
191
239
pub struct ProfileStatsLoader(parakeet_index::Client);
192
240
impl BatchFn<String, parakeet_index::ProfileStats> for ProfileStatsLoader {
241
+
#[instrument(name = "ProfileStatsLoader", skip_all)]
193
242
async fn load(&mut self, keys: &[String]) -> HashMap<String, parakeet_index::ProfileStats> {
194
243
let stats_req = parakeet_index::GetStatsManyReq {
195
244
uris: keys.to_vec(),
···
204
253
}
205
254
}
206
255
256
+
pub struct ProfileStateLoader(Pool<AsyncPgConnection>);
257
+
impl ProfileStateLoader {
258
+
#[instrument(name = "ProfileStateLoader::get", skip_all)]
259
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::ProfileStateRet> {
260
+
let mut conn = self.0.get().await.unwrap();
261
+
262
+
db::get_profile_state(&mut conn, did, subject)
263
+
.await
264
+
.unwrap_or_else(|e| {
265
+
tracing::error!("profile state load failed: {e}");
266
+
None
267
+
})
268
+
}
269
+
270
+
#[instrument(name = "ProfileStateLoader::get_many", skip_all)]
271
+
pub async fn get_many(
272
+
&self,
273
+
did: &str,
274
+
subjects: &[String],
275
+
) -> HashMap<String, db::ProfileStateRet> {
276
+
let mut conn = self.0.get().await.unwrap();
277
+
278
+
match db::get_profile_states(&mut conn, did, subjects).await {
279
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.subject.clone(), v))),
280
+
Err(e) => {
281
+
tracing::error!("profile state load failed: {e}");
282
+
HashMap::new()
283
+
}
284
+
}
285
+
}
286
+
}
287
+
207
288
pub struct ListLoader(Pool<AsyncPgConnection>);
208
289
type ListLoaderRet = (models::List, i64);
209
290
impl BatchFn<String, ListLoaderRet> for ListLoader {
291
+
#[instrument(name = "ListLoaderRet", skip_all)]
210
292
async fn load(&mut self, keys: &[String]) -> HashMap<String, ListLoaderRet> {
211
293
let mut conn = self.0.get().await.unwrap();
212
294
···
236
318
}
237
319
}
238
320
239
-
pub struct FeedGenLoader(Pool<AsyncPgConnection>, parakeet_index::Client);
321
+
pub struct ListStateLoader(Pool<AsyncPgConnection>);
322
+
impl ListStateLoader {
323
+
#[instrument(name = "ListStateLoader::get", skip_all)]
324
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::ListStateRet> {
325
+
let mut conn = self.0.get().await.unwrap();
326
+
327
+
db::get_list_state(&mut conn, did, subject)
328
+
.await
329
+
.unwrap_or_else(|e| {
330
+
tracing::error!("list state load failed: {e}");
331
+
None
332
+
})
333
+
}
334
+
335
+
#[instrument(name = "ListStateLoader::get_many", skip_all)]
336
+
pub async fn get_many(
337
+
&self,
338
+
did: &str,
339
+
subjects: &[String],
340
+
) -> HashMap<String, db::ListStateRet> {
341
+
let mut conn = self.0.get().await.unwrap();
342
+
343
+
match db::get_list_states(&mut conn, did, subjects).await {
344
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.at_uri.clone(), v))),
345
+
Err(e) => {
346
+
tracing::error!("list state load failed: {e}");
347
+
HashMap::new()
348
+
}
349
+
}
350
+
}
351
+
}
352
+
353
+
pub struct FeedGenLoader(Pool<AsyncPgConnection>);
240
354
impl BatchFn<String, models::FeedGen> for FeedGenLoader {
355
+
#[instrument(name = "FeedGenLoader", skip_all)]
241
356
async fn load(&mut self, keys: &[String]) -> HashMap<String, models::FeedGen> {
242
357
let mut conn = self.0.get().await.unwrap();
243
358
···
263
378
pub struct PostLoader(Pool<AsyncPgConnection>);
264
379
type PostLoaderRet = (models::Post, Option<models::Threadgate>);
265
380
impl BatchFn<String, PostLoaderRet> for PostLoader {
381
+
#[instrument(name = "PostLoader", skip_all)]
266
382
async fn load(&mut self, keys: &[String]) -> HashMap<String, PostLoaderRet> {
267
383
let mut conn = self.0.get().await.unwrap();
268
384
269
385
let res = schema::posts::table
270
-
.left_join(schema::threadgates::table)
386
+
.left_join(schema::threadgates::table.on(
387
+
schema::threadgates::post_uri.eq(sql("coalesce(posts.root_uri, posts.at_uri)")),
388
+
))
271
389
.select((
272
390
models::Post::as_select(),
273
391
Option::<models::Threadgate>::as_select(),
···
291
409
292
410
pub struct PostStatsLoader(parakeet_index::Client);
293
411
impl BatchFn<String, parakeet_index::PostStats> for PostStatsLoader {
412
+
#[instrument(name = "PostStatsLoader", skip_all)]
294
413
async fn load(&mut self, keys: &[String]) -> HashMap<String, parakeet_index::PostStats> {
295
414
let stats_req = parakeet_index::GetStatsManyReq {
296
415
uris: keys.to_vec(),
···
305
424
}
306
425
}
307
426
427
+
pub struct PostStateLoader(Pool<AsyncPgConnection>);
428
+
impl PostStateLoader {
429
+
#[instrument(name = "PostStateLoader::get", skip_all)]
430
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::PostStateRet> {
431
+
let mut conn = self.0.get().await.unwrap();
432
+
433
+
db::get_post_state(&mut conn, did, subject)
434
+
.await
435
+
.unwrap_or_else(|e| {
436
+
tracing::error!("post state load failed: {e}");
437
+
None
438
+
})
439
+
}
440
+
441
+
#[instrument(name = "PostStateLoader::get_many", skip_all)]
442
+
pub async fn get_many(
443
+
&self,
444
+
did: &str,
445
+
subjects: &[String],
446
+
) -> HashMap<String, db::PostStateRet> {
447
+
let mut conn = self.0.get().await.unwrap();
448
+
449
+
match db::get_post_states(&mut conn, did, subjects).await {
450
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.at_uri.clone(), v))),
451
+
Err(e) => {
452
+
tracing::error!("post state load failed: {e}");
453
+
HashMap::new()
454
+
}
455
+
}
456
+
}
457
+
}
458
+
308
459
pub struct EmbedLoader(Pool<AsyncPgConnection>);
309
460
#[derive(Debug, Clone, Serialize, Deserialize)]
310
461
pub enum EmbedLoaderRet {
···
315
466
RecordWithMedia(models::PostEmbedRecord, Box<EmbedLoaderRet>),
316
467
}
317
468
impl BatchFn<String, (EmbedLoaderRet, String)> for EmbedLoader {
469
+
#[instrument(name = "EmbedLoader", skip_all)]
318
470
async fn load(&mut self, keys: &[String]) -> HashMap<String, (EmbedLoaderRet, String)> {
319
471
let mut conn = self.0.get().await.unwrap();
320
472
···
397
549
pub struct StarterPackLoader(Pool<AsyncPgConnection>);
398
550
type StarterPackLoaderRet = models::StaterPack;
399
551
impl BatchFn<String, StarterPackLoaderRet> for StarterPackLoader {
552
+
#[instrument(name = "StarterPackLoader", skip_all)]
400
553
async fn load(&mut self, keys: &[String]) -> HashMap<String, StarterPackLoaderRet> {
401
554
let mut conn = self.0.get().await.unwrap();
402
555
···
419
572
}
420
573
}
421
574
422
-
pub struct LabelServiceLoader(Pool<AsyncPgConnection>, parakeet_index::Client);
575
+
pub struct LabelServiceLoader(Pool<AsyncPgConnection>);
423
576
type LabelServiceLoaderRet = (models::LabelerService, Vec<models::LabelDefinition>);
424
577
impl BatchFn<String, LabelServiceLoaderRet> for LabelServiceLoader {
578
+
#[instrument(name = "LabelServiceLoader", skip_all)]
425
579
async fn load(&mut self, keys: &[String]) -> HashMap<String, LabelServiceLoaderRet> {
426
580
let mut conn = self.0.get().await.unwrap();
427
581
···
460
614
// but it should live here anyway
461
615
pub struct LabelLoader(Pool<AsyncPgConnection>);
462
616
impl LabelLoader {
617
+
#[instrument(name = "LabelLoader::load", skip_all)]
463
618
pub async fn load(&self, uri: &str, services: &[LabelConfigItem]) -> Vec<models::Label> {
464
619
let mut conn = self.0.get().await.unwrap();
465
620
···
479
634
})
480
635
}
481
636
637
+
#[instrument(name = "LabelLoader::load_many", skip_all)]
482
638
pub async fn load_many(
483
639
&self,
484
640
uris: &[String],
···
513
669
514
670
pub struct VerificationLoader(Pool<AsyncPgConnection>);
515
671
impl BatchFn<String, Vec<models::VerificationEntry>> for VerificationLoader {
672
+
#[instrument(name = "VerificationLoader", skip_all)]
516
673
async fn load(&mut self, keys: &[String]) -> HashMap<String, Vec<models::VerificationEntry>> {
517
674
let mut conn = self.0.get().await.unwrap();
518
675
+14
-5
parakeet/src/main.rs
+14
-5
parakeet/src/main.rs
···
1
+
use axum_tracing_opentelemetry::middleware::{OtelAxumLayer, OtelInResponseLayer};
1
2
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
2
3
use diesel_async::pooled_connection::deadpool::Pool;
3
4
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
···
14
15
mod config;
15
16
mod db;
16
17
mod hydration;
18
+
mod instrumentation;
17
19
mod loaders;
18
20
mod xrpc;
19
21
···
31
33
32
34
#[tokio::main]
33
35
async fn main() -> eyre::Result<()> {
34
-
tracing_subscriber::fmt::init();
35
-
36
36
let conf = config::load_config()?;
37
37
38
+
instrumentation::init_instruments(&conf.instruments);
39
+
38
40
let db_mgr = AsyncDieselConnectionManager::<AsyncPgConnection>::new(&conf.database_url);
39
41
let pool = Pool::builder(db_mgr).build()?;
40
42
···
52
54
let redis_client = redis::Client::open(conf.redis_uri)?;
53
55
let redis_mp = redis_client.get_multiplexed_tokio_connection().await?;
54
56
55
-
let index_client = parakeet_index::Client::connect(conf.index_uri).await?;
57
+
let index_client = parakeet_index::connect_with_otel(conf.index_uri)
58
+
.await
59
+
.map_err(|e| eyre::eyre!(e))?;
56
60
57
61
let dataloaders = Arc::new(loaders::Dataloaders::new(
58
62
pool.clone(),
···
79
83
80
84
let did_doc = did_web_doc(&conf.service);
81
85
86
+
let mw = tower::ServiceBuilder::new()
87
+
.option_layer(conf.instruments.otel_enable.then(OtelInResponseLayer::default))
88
+
.option_layer(conf.instruments.otel_enable.then(OtelAxumLayer::default))
89
+
.layer(TraceLayer::new_for_http())
90
+
.layer(cors);
91
+
82
92
let app = axum::Router::new()
83
93
.nest("/xrpc", xrpc::xrpc_routes())
84
94
.route(
85
95
"/.well-known/did.json",
86
96
axum::routing::get(async || axum::Json(did_doc)),
87
97
)
88
-
.layer(TraceLayer::new_for_http())
89
-
.layer(cors)
98
+
.layer(mw)
90
99
.with_state(GlobalState {
91
100
pool,
92
101
redis_mp,
+5
parakeet/src/sql/list_states.sql
+5
parakeet/src/sql/list_states.sql
···
1
+
select l.at_uri, lb.at_uri as block, lm.did is not null as muted
2
+
from lists l
3
+
left join list_blocks lb on l.at_uri = lb.list_uri and lb.did = $1
4
+
left join list_mutes lm on l.at_uri = lm.list_uri and lm.did = $1
5
+
where l.at_uri = any ($2) and (lm.did is not null or lb.at_uri is not null)
+16
parakeet/src/sql/post_state.sql
+16
parakeet/src/sql/post_state.sql
···
1
+
select bq.*, coalesce(bq.at_uri = pinned_uri, false) as pinned
2
+
from (select p.at_uri,
3
+
p.did,
4
+
p.cid,
5
+
l.rkey as like_rkey,
6
+
r.rkey as repost_rkey,
7
+
b.did is not null as bookmarked,
8
+
coalesce(pg.rules && ARRAY ['app.bsky.feed.postgate#disableRule'], false) as embed_disabled
9
+
from posts p
10
+
left join likes l on l.subject = p.at_uri and l.did = $1
11
+
left join reposts r on r.post = p.at_uri and r.did = $1
12
+
left join bookmarks b on b.subject = p.at_uri and b.did = $1
13
+
left join postgates pg on pg.post_uri = p.at_uri
14
+
where p.at_uri = any ($2)
15
+
and (l.rkey is not null or r.rkey is not null or b.did is not null or pg.rules is not null)) bq,
16
+
(select pinned_uri, pinned_cid from profiles where did = $1) pp;
+20
parakeet/src/sql/profile_state.sql
+20
parakeet/src/sql/profile_state.sql
···
1
+
with vlb as (select * from v_list_block_exp where did = $1 and subject = any ($2)),
2
+
vlm as (select * from v_list_mutes_exp where did = $1 and subject = any ($2)),
3
+
ps as (select * from profile_states where did = $1 and subject = any ($2)),
4
+
vlb2 as (select subject as did, did as subject, list_uri is not null as blocked
5
+
from v_list_block_exp
6
+
where did = any ($2)
7
+
and subject = $1)
8
+
select distinct on (did, subject) did,
9
+
subject,
10
+
muting,
11
+
ps.blocked or vlb2.blocked as blocked,
12
+
blocking,
13
+
following,
14
+
followed,
15
+
vlb.list_uri as list_block,
16
+
vlm.list_uri as list_mute
17
+
from ps
18
+
full join vlb using (did, subject)
19
+
full join vlm using (did, subject)
20
+
full join vlb2 using (did, subject);
+3
-3
parakeet/src/sql/thread.sql
+3
-3
parakeet/src/sql/thread.sql
···
1
-
with recursive thread as (select at_uri, parent_uri, root_uri, 0 as depth
1
+
with recursive thread as (select at_uri, parent_uri, root_uri, 1 as depth
2
2
from posts
3
-
where parent_uri = $1
3
+
where parent_uri = $1 and violates_threadgate=FALSE
4
4
union all
5
5
select p.at_uri, p.parent_uri, p.root_uri, thread.depth + 1
6
6
from posts p
7
7
join thread on p.parent_uri = thread.at_uri
8
-
where thread.depth <= $2)
8
+
where thread.depth <= $2 and p.violates_threadgate=FALSE)
9
9
select *
10
10
from thread
11
11
order by depth desc;
+13
parakeet/src/sql/thread_branching.sql
+13
parakeet/src/sql/thread_branching.sql
···
1
+
with recursive thread as (select at_uri, parent_uri, root_uri, 1 as depth
2
+
from posts
3
+
where parent_uri = $1
4
+
and violates_threadgate = FALSE
5
+
union all
6
+
(select p.at_uri, p.parent_uri, p.root_uri, thread.depth + 1
7
+
from posts p
8
+
join thread on p.parent_uri = thread.at_uri
9
+
where thread.depth <= $2
10
+
and violates_threadgate = FALSE
11
+
LIMIT $3))
12
+
select *
13
+
from thread;
+4
-2
parakeet/src/sql/thread_parent.sql
+4
-2
parakeet/src/sql/thread_parent.sql
···
1
1
with recursive parents as (select at_uri, cid, parent_uri, root_uri, 0 as depth
2
2
from posts
3
-
where at_uri = (select parent_uri from posts where at_uri = $1)
3
+
where
4
+
at_uri = (select parent_uri from posts where at_uri = $1 and violates_threadgate = FALSE)
4
5
union all
5
6
select p.at_uri, p.cid, p.parent_uri, p.root_uri, parents.depth + 1
6
7
from posts p
7
8
join parents on p.at_uri = parents.parent_uri
8
-
where parents.depth <= $2)
9
+
where parents.depth <= $2
10
+
and p.violates_threadgate = FALSE)
9
11
select *
10
12
from parents
11
13
order by depth desc;
+21
-6
parakeet/src/xrpc/app_bsky/bookmark.rs
+21
-6
parakeet/src/xrpc/app_bsky/bookmark.rs
···
8
8
use diesel::prelude::*;
9
9
use diesel_async::RunQueryDsl;
10
10
use lexica::app_bsky::bookmark::{BookmarkView, BookmarkViewItem};
11
+
use lexica::app_bsky::feed::{BlockedAuthor, PostView};
12
+
use lexica::StrongRef;
11
13
use parakeet_db::{models, schema};
12
14
use serde::{Deserialize, Serialize};
13
-
use lexica::StrongRef;
14
15
15
16
const BSKY_ALLOWED_TYPES: &[&str] = &["app.bsky.feed.post"];
16
17
···
35
36
rkey: None,
36
37
subject: &form.uri,
37
38
subject_cid: Some(form.cid),
38
-
subject_type: &parts[1],
39
+
subject_type: parts[1],
39
40
tags: vec![],
40
41
};
41
42
···
125
126
// otherwise just ditch. we should have one.
126
127
let cid = bookmark.subject_cid.or(maybe_cid)?;
127
128
128
-
let item = maybe_item.map(BookmarkViewItem::Post).unwrap_or(
129
-
BookmarkViewItem::NotFound {
129
+
let item = maybe_item
130
+
.map(postview_to_bvi)
131
+
.unwrap_or(BookmarkViewItem::NotFound {
130
132
uri: bookmark.subject.clone(),
131
133
not_found: true,
132
-
},
133
-
);
134
+
});
134
135
135
136
let subject = StrongRef::new_from_str(bookmark.subject, &cid).ok()?;
136
137
···
144
145
145
146
Ok(Json(GetBookmarksRes { cursor, bookmarks }))
146
147
}
148
+
149
+
fn postview_to_bvi(post: PostView) -> BookmarkViewItem {
150
+
match &post.author.viewer {
151
+
Some(v) if v.blocked_by || v.blocking.is_some() => BookmarkViewItem::Blocked {
152
+
uri: post.uri,
153
+
blocked: true,
154
+
author: BlockedAuthor {
155
+
did: post.author.did.clone(),
156
+
viewer: post.author.viewer,
157
+
},
158
+
},
159
+
_ => BookmarkViewItem::Post(post),
160
+
}
161
+
}
+7
-8
parakeet/src/xrpc/app_bsky/feed/likes.rs
+7
-8
parakeet/src/xrpc/app_bsky/feed/likes.rs
···
1
+
use crate::hydration::posts::RawFeedItem;
1
2
use crate::hydration::StatefulHydrator;
2
3
use crate::xrpc::error::{Error, XrpcResult};
3
4
use crate::xrpc::extract::{AtpAcceptLabelers, AtpAuth};
···
57
58
.last()
58
59
.map(|(last, _)| last.timestamp_millis().to_string());
59
60
60
-
let at_uris = results
61
+
let raw_feed = results
61
62
.iter()
62
-
.map(|(_, uri)| uri.clone())
63
+
.map(|(_, uri)| RawFeedItem::Post {
64
+
uri: uri.clone(),
65
+
context: None,
66
+
})
63
67
.collect::<Vec<_>>();
64
68
65
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
66
-
67
-
let feed: Vec<_> = results
68
-
.into_iter()
69
-
.filter_map(|(_, uri)| posts.remove(&uri))
70
-
.collect();
69
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
71
70
72
71
Ok(Json(FeedRes { cursor, feed }))
73
72
}
+157
-122
parakeet/src/xrpc/app_bsky/feed/posts.rs
+157
-122
parakeet/src/xrpc/app_bsky/feed/posts.rs
···
1
+
use crate::hydration::posts::RawFeedItem;
1
2
use crate::hydration::StatefulHydrator;
2
3
use crate::xrpc::app_bsky::graph::lists::ListWithCursorQuery;
3
4
use crate::xrpc::error::{Error, XrpcResult};
···
16
17
use diesel_async::{AsyncPgConnection, RunQueryDsl};
17
18
use lexica::app_bsky::actor::ProfileView;
18
19
use lexica::app_bsky::feed::{
19
-
FeedReasonRepost, FeedSkeletonResponse, FeedViewPost, FeedViewPostReason, PostView,
20
-
SkeletonReason, ThreadViewPost, ThreadViewPostType, ThreadgateView,
20
+
BlockedAuthor, FeedSkeletonResponse, FeedViewPost, PostView, SkeletonReason, ThreadViewPost,
21
+
ThreadViewPostType, ThreadgateView,
21
22
};
22
-
use parakeet_db::schema;
23
+
use parakeet_db::{models, schema};
23
24
use reqwest::Url;
24
25
use serde::{Deserialize, Serialize};
25
26
use std::collections::HashMap;
27
+
use tracing::instrument;
26
28
27
29
const FEEDGEN_SERVICE_ID: &str = "#bsky_fg";
28
30
···
113
115
114
116
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
115
117
116
-
let at_uris = skeleton.feed.iter().map(|v| v.post.clone()).collect();
117
118
let repost_skeleton = skeleton
118
119
.feed
119
120
.iter()
···
122
123
_ => None,
123
124
})
124
125
.collect::<Vec<_>>();
125
-
126
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
127
-
let mut repost_data = get_skeleton_repost_data(&mut conn, &hyd, repost_skeleton).await;
126
+
let mut repost_data = get_skeleton_repost_data(&mut conn, repost_skeleton).await;
128
127
129
-
let feed = skeleton
128
+
let raw_feed = skeleton
130
129
.feed
131
130
.into_iter()
132
-
.filter_map(|item| {
133
-
let mut post = posts.remove(&item.post)?;
134
-
let reason = match item.reason {
135
-
Some(SkeletonReason::Repost { repost }) => {
136
-
repost_data.remove(&repost).map(FeedViewPostReason::Repost)
137
-
}
138
-
Some(SkeletonReason::Pin {}) => Some(FeedViewPostReason::Pin),
139
-
_ => None,
140
-
};
141
-
142
-
post.reason = reason;
143
-
post.feed_context = item.feed_context;
144
-
145
-
Some(post)
131
+
.filter_map(|v| match v.reason {
132
+
Some(SkeletonReason::Repost { repost }) => {
133
+
repost_data
134
+
.remove_entry(&repost)
135
+
.map(|(uri, (by, at))| RawFeedItem::Repost {
136
+
uri,
137
+
post: v.post,
138
+
by,
139
+
at: at.and_utc(),
140
+
context: v.feed_context,
141
+
})
142
+
}
143
+
Some(SkeletonReason::Pin {}) => Some(RawFeedItem::Pin {
144
+
uri: v.post,
145
+
context: v.feed_context,
146
+
}),
147
+
None => Some(RawFeedItem::Post {
148
+
uri: v.post,
149
+
context: v.feed_context,
150
+
}),
146
151
})
147
152
.collect();
148
153
154
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
155
+
149
156
Ok(Json(FeedRes {
150
157
cursor: skeleton.cursor,
151
158
feed,
152
159
}))
153
160
}
154
161
155
-
#[derive(Debug, Deserialize)]
162
+
#[derive(Debug, Default, Eq, PartialEq, Deserialize)]
156
163
#[serde(rename_all = "snake_case")]
164
+
#[allow(clippy::enum_variant_names)]
157
165
pub enum GetAuthorFeedFilter {
166
+
#[default]
158
167
PostsWithReplies,
159
168
PostsNoReplies,
160
169
PostsWithMedia,
161
170
PostsAndAuthorThreads,
162
171
PostsWithVideo,
163
-
}
164
-
165
-
impl Default for GetAuthorFeedFilter {
166
-
fn default() -> Self {
167
-
Self::PostsWithReplies
168
-
}
169
172
}
170
173
171
174
#[derive(Debug, Deserialize)]
···
187
190
Query(query): Query<GetAuthorFeedQuery>,
188
191
) -> XrpcResult<Json<FeedRes>> {
189
192
let mut conn = state.pool.get().await?;
190
-
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
191
193
192
194
let did = get_actor_did(&state.dataloaders, query.actor.clone()).await?;
193
195
194
196
check_actor_status(&mut conn, &did).await?;
195
197
198
+
// check if we block the actor or if they block us
199
+
if let Some(auth) = &maybe_auth {
200
+
if let Some(psr) = crate::db::get_profile_state(&mut conn, &auth.0, &did).await? {
201
+
if psr.blocked.unwrap_or_default() {
202
+
// they block us
203
+
return Err(Error::new(StatusCode::BAD_REQUEST, "BlockedByActor", None));
204
+
} else if psr.blocking.is_some() {
205
+
// we block them
206
+
return Err(Error::new(StatusCode::BAD_REQUEST, "BlockedActor", None));
207
+
}
208
+
}
209
+
}
210
+
211
+
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
212
+
213
+
let pin = match query.include_pins && query.cursor.is_none() {
214
+
false => None,
215
+
true => crate::db::get_pinned_post_uri(&mut conn, &did).await?,
216
+
};
217
+
196
218
let limit = query.limit.unwrap_or(50).clamp(1, 100);
197
219
198
-
let mut posts_query = schema::posts::table
199
-
.select((schema::posts::created_at, schema::posts::at_uri))
200
-
.filter(schema::posts::did.eq(did))
220
+
let mut posts_query = schema::author_feeds::table
221
+
.select(models::AuthorFeedItem::as_select())
222
+
.left_join(schema::posts::table.on(schema::posts::at_uri.eq(schema::author_feeds::post)))
223
+
.filter(schema::author_feeds::did.eq(&did))
201
224
.into_boxed();
202
225
203
226
if let Some(cursor) = datetime_cursor(query.cursor.as_ref()) {
204
-
posts_query = posts_query.filter(schema::posts::created_at.lt(cursor));
227
+
posts_query = posts_query.filter(schema::author_feeds::sort_at.lt(cursor));
205
228
}
206
229
230
+
let author_threads_only = query.filter == GetAuthorFeedFilter::PostsAndAuthorThreads;
207
231
posts_query = match query.filter {
208
-
GetAuthorFeedFilter::PostsWithReplies => posts_query,
232
+
GetAuthorFeedFilter::PostsWithReplies => {
233
+
posts_query.filter(schema::author_feeds::typ.eq("post"))
234
+
}
209
235
GetAuthorFeedFilter::PostsNoReplies => {
210
236
posts_query.filter(schema::posts::parent_uri.is_null())
211
237
}
212
-
GetAuthorFeedFilter::PostsWithMedia => posts_query.filter(embed_type_filter(&[
213
-
"app.bsky.embed.video",
214
-
"app.bsky.embed.images",
215
-
])),
238
+
GetAuthorFeedFilter::PostsWithMedia => posts_query.filter(
239
+
embed_type_filter(&["app.bsky.embed.video", "app.bsky.embed.images"])
240
+
.and(schema::author_feeds::typ.eq("post")),
241
+
),
216
242
GetAuthorFeedFilter::PostsAndAuthorThreads => posts_query.filter(
217
243
(schema::posts::parent_uri
218
-
.like(format!("at://{}/%", &query.actor))
244
+
.like(format!("at://{did}/%"))
219
245
.or(schema::posts::parent_uri.is_null()))
220
246
.and(
221
247
schema::posts::root_uri
222
-
.like(format!("at://{}/%", &query.actor))
248
+
.like(format!("at://{did}/%"))
223
249
.or(schema::posts::root_uri.is_null()),
224
250
),
225
251
),
226
-
GetAuthorFeedFilter::PostsWithVideo => {
227
-
posts_query.filter(embed_type_filter(&["app.bsky.embed.video"]))
228
-
}
252
+
GetAuthorFeedFilter::PostsWithVideo => posts_query.filter(
253
+
embed_type_filter(&["app.bsky.embed.video"]).and(schema::author_feeds::typ.eq("post")),
254
+
),
229
255
};
230
256
231
257
let results = posts_query
232
-
.order(schema::posts::created_at.desc())
258
+
.order(schema::author_feeds::sort_at.desc())
233
259
.limit(limit as i64)
234
-
.load::<(chrono::DateTime<chrono::Utc>, String)>(&mut conn)
260
+
.load(&mut conn)
235
261
.await?;
236
262
237
263
let cursor = results
238
264
.last()
239
-
.map(|(last, _)| last.timestamp_millis().to_string());
265
+
.map(|item| item.sort_at.timestamp_millis().to_string());
240
266
241
-
let at_uris = results
242
-
.iter()
243
-
.map(|(_, uri)| uri.clone())
267
+
let mut raw_feed = results
268
+
.into_iter()
269
+
.filter_map(|item| match &*item.typ {
270
+
"post" => Some(RawFeedItem::Post {
271
+
uri: item.post,
272
+
context: None,
273
+
}),
274
+
"repost" => Some(RawFeedItem::Repost {
275
+
uri: item.uri,
276
+
post: item.post,
277
+
by: item.did,
278
+
at: item.sort_at,
279
+
context: None,
280
+
}),
281
+
_ => None,
282
+
})
244
283
.collect::<Vec<_>>();
245
284
246
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
285
+
if let Some(post) = pin {
286
+
raw_feed.insert(
287
+
0,
288
+
RawFeedItem::Pin {
289
+
uri: post,
290
+
context: None,
291
+
},
292
+
);
293
+
}
247
294
248
-
let feed = results
249
-
.into_iter()
250
-
.filter_map(|(_, uri)| posts.remove(&uri))
251
-
.collect();
295
+
let feed = hyd.hydrate_feed_posts(raw_feed, author_threads_only).await;
252
296
253
297
Ok(Json(FeedRes { cursor, feed }))
254
298
}
···
291
335
.last()
292
336
.map(|(last, _)| last.timestamp_millis().to_string());
293
337
294
-
let at_uris = results
338
+
let raw_feed = results
295
339
.iter()
296
-
.map(|(_, uri)| uri.clone())
340
+
.map(|(_, uri)| RawFeedItem::Post {
341
+
uri: uri.clone(),
342
+
context: None,
343
+
})
297
344
.collect::<Vec<_>>();
298
345
299
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
300
-
301
-
let feed = results
302
-
.into_iter()
303
-
.filter_map(|(_, uri)| posts.remove(&uri))
304
-
.collect();
346
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
305
347
306
348
Ok(Json(FeedRes { cursor, feed }))
307
349
}
···
321
363
pub threadgate: Option<ThreadgateView>,
322
364
}
323
365
324
-
#[derive(Debug, QueryableByName)]
325
-
#[diesel(check_for_backend(diesel::pg::Pg))]
326
-
struct ThreadItem {
327
-
#[diesel(sql_type = diesel::sql_types::Text)]
328
-
at_uri: String,
329
-
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
330
-
parent_uri: Option<String>,
331
-
// #[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
332
-
// root_uri: Option<String>,
333
-
#[diesel(sql_type = diesel::sql_types::Integer)]
334
-
depth: i32,
335
-
}
336
-
337
366
pub async fn get_post_thread(
338
367
State(state): State<GlobalState>,
339
368
AtpAcceptLabelers(labelers): AtpAcceptLabelers,
···
347
376
let depth = query.depth.unwrap_or(6).clamp(0, 1000);
348
377
let parent_height = query.parent_height.unwrap_or(80).clamp(0, 1000);
349
378
350
-
let replies = diesel::sql_query(include_str!("../../../sql/thread.sql"))
351
-
.bind::<diesel::sql_types::Text, _>(&uri)
352
-
.bind::<diesel::sql_types::Integer, _>(depth as i32)
353
-
.load::<ThreadItem>(&mut conn)
354
-
.await?;
379
+
let root = hyd
380
+
.hydrate_post(uri.clone())
381
+
.await
382
+
.ok_or(Error::not_found())?;
383
+
let threadgate = root.threadgate.clone();
384
+
385
+
if let Some(viewer) = &root.author.viewer {
386
+
if viewer.blocked_by || viewer.blocking.is_some() {
387
+
return Ok(Json(GetPostThreadRes {
388
+
thread: ThreadViewPostType::Blocked {
389
+
uri,
390
+
blocked: true,
391
+
author: BlockedAuthor {
392
+
did: root.author.did,
393
+
viewer: root.author.viewer,
394
+
},
395
+
},
396
+
threadgate,
397
+
}));
398
+
}
399
+
}
355
400
356
-
let parents = diesel::sql_query(include_str!("../../../sql/thread_parent.sql"))
357
-
.bind::<diesel::sql_types::Text, _>(&uri)
358
-
.bind::<diesel::sql_types::Integer, _>(parent_height as i32)
359
-
.load::<ThreadItem>(&mut conn)
360
-
.await?;
401
+
let replies = crate::db::get_thread_children(&mut conn, &uri, depth as i32).await?;
402
+
let parents = crate::db::get_thread_parents(&mut conn, &uri, parent_height as i32).await?;
361
403
362
404
let reply_uris = replies.iter().map(|item| item.at_uri.clone()).collect();
363
405
let parent_uris = parents.iter().map(|item| item.at_uri.clone()).collect();
364
406
365
-
let root = hyd
366
-
.hydrate_post(uri.clone())
367
-
.await
368
-
.ok_or(Error::not_found())?;
369
407
let mut replies_hydrated = hyd.hydrate_posts(reply_uris).await;
370
408
let mut parents_hydrated = hyd.hydrate_posts(parent_uris).await;
371
409
···
381
419
continue;
382
420
};
383
421
384
-
entry.push(ThreadViewPostType::Post(Box::new(ThreadViewPost {
385
-
post,
386
-
parent: None,
387
-
replies: this_post_replies,
388
-
})));
422
+
entry.push(postview_to_tvpt(post, None, this_post_replies));
389
423
}
390
424
391
425
let mut root_parent = None;
···
394
428
395
429
let parent = parents_hydrated
396
430
.remove(&parent.at_uri)
397
-
.map(|post| {
398
-
ThreadViewPostType::Post(Box::new(ThreadViewPost {
399
-
post,
400
-
parent: p2,
401
-
replies: vec![],
402
-
}))
403
-
})
431
+
.map(|post| postview_to_tvpt(post, p2, Vec::default()))
404
432
.unwrap_or(ThreadViewPostType::NotFound {
405
433
uri: parent.at_uri.clone(),
406
434
not_found: true,
···
410
438
}
411
439
412
440
let replies = tmpbuf.remove(&root.uri).unwrap_or_default();
413
-
414
-
let threadgate = root.threadgate.clone();
415
441
416
442
Ok(Json(GetPostThreadRes {
417
443
threadgate,
···
588
614
.or(schema::posts::embed_subtype.eq_any(filter))
589
615
}
590
616
617
+
#[instrument(skip_all)]
591
618
async fn get_feed_skeleton(
592
619
feed: &str,
593
620
service: &str,
···
629
656
}
630
657
}
631
658
632
-
async fn get_skeleton_repost_data<'a>(
659
+
#[instrument(skip_all)]
660
+
async fn get_skeleton_repost_data(
633
661
conn: &mut AsyncPgConnection,
634
-
hyd: &StatefulHydrator<'a>,
635
662
reposts: Vec<String>,
636
-
) -> HashMap<String, FeedReasonRepost> {
663
+
) -> HashMap<String, (String, NaiveDateTime)> {
637
664
let Ok(repost_data) = schema::records::table
638
665
.select((
639
666
schema::records::at_uri,
···
647
674
return HashMap::new();
648
675
};
649
676
650
-
let profiles = repost_data.iter().map(|(_, did, _)| did.clone()).collect();
651
-
let profiles = hyd.hydrate_profiles_basic(profiles).await;
652
-
653
677
repost_data
654
678
.into_iter()
655
-
.filter_map(|(uri, did, indexed_at)| {
656
-
let by = profiles.get(&did).cloned()?;
679
+
.map(|(uri, did, at)| (uri, (did, at)))
680
+
.collect()
681
+
}
657
682
658
-
let repost = FeedReasonRepost {
659
-
by,
660
-
uri: Some(uri.clone()),
661
-
cid: None, // okay, we do have this, but the app doesn't seem to be bothered about not setting it.
662
-
indexed_at: indexed_at.and_utc(),
663
-
};
664
-
665
-
Some((uri, repost))
666
-
})
667
-
.collect()
683
+
fn postview_to_tvpt(
684
+
post: PostView,
685
+
parent: Option<ThreadViewPostType>,
686
+
replies: Vec<ThreadViewPostType>,
687
+
) -> ThreadViewPostType {
688
+
match &post.author.viewer {
689
+
Some(v) if v.blocked_by || v.blocking.is_some() => ThreadViewPostType::Blocked {
690
+
uri: post.uri.clone(),
691
+
blocked: true,
692
+
author: BlockedAuthor {
693
+
did: post.author.did,
694
+
viewer: post.author.viewer,
695
+
},
696
+
},
697
+
_ => ThreadViewPostType::Post(Box::new(ThreadViewPost {
698
+
post,
699
+
parent,
700
+
replies,
701
+
})),
702
+
}
668
703
}
+1
-1
parakeet/src/xrpc/app_bsky/graph/relations.rs
+1
-1
parakeet/src/xrpc/app_bsky/graph/relations.rs
+4
-1
parakeet/src/xrpc/app_bsky/mod.rs
+4
-1
parakeet/src/xrpc/app_bsky/mod.rs
···
6
6
mod feed;
7
7
mod graph;
8
8
mod labeler;
9
+
mod unspecced;
9
10
10
11
#[rustfmt::skip]
11
12
pub fn routes() -> Router<crate::GlobalState> {
···
64
65
// TODO: app.bsky.notification.putActivitySubscriptions
65
66
// TODO: app.bsky.notification.putPreferences
66
67
// TODO: app.bsky.notification.putPreferencesV2
68
+
.route("/app.bsky.unspecced.getPostThreadV2", get(unspecced::thread_v2::get_post_thread_v2))
69
+
.route("/app.bsky.unspecced.getPostThreadOtherV2", get(unspecced::thread_v2::get_post_thread_other_v2))
67
70
}
68
71
69
72
async fn not_implemented() -> axum::http::StatusCode {
70
73
axum::http::StatusCode::NOT_IMPLEMENTED
71
-
}
74
+
}
+1
parakeet/src/xrpc/app_bsky/unspecced/mod.rs
+1
parakeet/src/xrpc/app_bsky/unspecced/mod.rs
···
1
+
pub mod thread_v2;
+379
parakeet/src/xrpc/app_bsky/unspecced/thread_v2.rs
+379
parakeet/src/xrpc/app_bsky/unspecced/thread_v2.rs
···
1
+
use crate::db::ThreadItem;
2
+
use crate::hydration::StatefulHydrator;
3
+
use crate::xrpc::error::{Error, XrpcResult};
4
+
use crate::xrpc::extract::{AtpAcceptLabelers, AtpAuth};
5
+
use crate::xrpc::normalise_at_uri;
6
+
use crate::GlobalState;
7
+
use axum::extract::{Query, State};
8
+
use axum::Json;
9
+
use itertools::Itertools;
10
+
use lexica::app_bsky::feed::{BlockedAuthor, PostView, ThreadgateView};
11
+
use lexica::app_bsky::unspecced::{ThreadItemPost, ThreadV2Item, ThreadV2ItemType};
12
+
use serde::{Deserialize, Serialize};
13
+
use std::cmp::Ordering;
14
+
use std::collections::{HashMap, HashSet};
15
+
16
+
const THREAD_PARENTS: usize = 50;
17
+
const DEFAULT_BRANCHING: u32 = 10;
18
+
const DEFAULT_DEPTH: u32 = 6;
19
+
20
+
#[derive(Copy, Clone, Debug, Default, Deserialize)]
21
+
#[serde(rename_all = "lowercase")]
22
+
pub enum PostThreadSort {
23
+
Newest,
24
+
#[default]
25
+
Oldest,
26
+
Top,
27
+
}
28
+
29
+
#[derive(Debug, Deserialize)]
30
+
#[serde(rename_all = "camelCase")]
31
+
pub struct GetPostThreadV2Req {
32
+
pub anchor: String,
33
+
pub above: Option<bool>,
34
+
pub below: Option<u32>,
35
+
pub branching_factor: Option<u32>,
36
+
#[serde(default)]
37
+
pub sort: PostThreadSort,
38
+
}
39
+
40
+
#[derive(Debug, Serialize)]
41
+
#[serde(rename_all = "camelCase")]
42
+
pub struct GetPostThreadV2Res {
43
+
pub thread: Vec<ThreadV2Item>,
44
+
#[serde(skip_serializing_if = "Option::is_none")]
45
+
pub threadgate: Option<ThreadgateView>,
46
+
pub has_other_replies: bool,
47
+
}
48
+
49
+
pub async fn get_post_thread_v2(
50
+
State(state): State<GlobalState>,
51
+
AtpAcceptLabelers(labelers): AtpAcceptLabelers,
52
+
maybe_auth: Option<AtpAuth>,
53
+
Query(query): Query<GetPostThreadV2Req>,
54
+
) -> XrpcResult<Json<GetPostThreadV2Res>> {
55
+
let mut conn = state.pool.get().await?;
56
+
let maybe_did = maybe_auth.clone().map(|v| v.0);
57
+
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
58
+
59
+
let uri = normalise_at_uri(&state.dataloaders, &query.anchor).await?;
60
+
let depth = query.below.unwrap_or(DEFAULT_DEPTH).clamp(0, 20) as i32;
61
+
let branching_factor = query
62
+
.branching_factor
63
+
.unwrap_or(DEFAULT_BRANCHING)
64
+
.clamp(0, 100) as i32;
65
+
66
+
let anchor = hyd
67
+
.hydrate_post(uri.clone())
68
+
.await
69
+
.ok_or(Error::not_found())?;
70
+
71
+
if let Some(v) = &anchor.author.viewer {
72
+
if v.blocked_by || v.blocking.is_some() {
73
+
let block = ThreadV2ItemType::Blocked {
74
+
author: BlockedAuthor {
75
+
did: anchor.author.did,
76
+
viewer: anchor.author.viewer,
77
+
},
78
+
};
79
+
80
+
return Ok(Json(GetPostThreadV2Res {
81
+
thread: vec![ThreadV2Item {
82
+
uri,
83
+
depth: 0,
84
+
value: block,
85
+
}],
86
+
threadgate: anchor.threadgate,
87
+
has_other_replies: false,
88
+
}));
89
+
}
90
+
}
91
+
92
+
// get the root post URI (if there is one) and return its author's DID.
93
+
let root_uri = crate::db::get_root_post(&mut conn, &uri)
94
+
.await?
95
+
.unwrap_or(uri.clone());
96
+
let root_did = root_uri[5..].split('/').collect::<Vec<_>>()[0];
97
+
98
+
let replies =
99
+
crate::db::get_thread_children_branching(&mut conn, &uri, depth, branching_factor + 1)
100
+
.await?;
101
+
let reply_uris = replies
102
+
.iter()
103
+
.map(|item| item.at_uri.clone())
104
+
.collect::<Vec<_>>();
105
+
106
+
// bluesky seems to use -50 atm. we get 1 extra to know if to set more_parents.
107
+
let parents = match query.above.unwrap_or(true) {
108
+
true => crate::db::get_thread_parents(&mut conn, &uri, THREAD_PARENTS as i32 + 1).await?,
109
+
false => vec![],
110
+
};
111
+
let parent_uris = parents
112
+
.iter()
113
+
.map(|item| item.at_uri.clone())
114
+
.collect::<Vec<_>>();
115
+
116
+
let (mut replies_hyd, mut parents_hyd) = tokio::join!(
117
+
hyd.hydrate_posts(reply_uris),
118
+
hyd.hydrate_posts(parent_uris),
119
+
);
120
+
121
+
let threadgate = anchor.threadgate.clone();
122
+
let hidden: HashSet<_, std::hash::RandomState> = match &threadgate {
123
+
Some(tg) => crate::db::get_threadgate_hiddens(&mut conn, &tg.uri).await?,
124
+
None => None,
125
+
}
126
+
.map(|hiddens| HashSet::from_iter(Vec::from(hiddens)))
127
+
.unwrap_or_default();
128
+
129
+
let root_has_more = parents.len() > THREAD_PARENTS;
130
+
let mut is_op_thread = true;
131
+
132
+
let mut thread = Vec::with_capacity(1 + replies.len() + parents.len());
133
+
134
+
thread.extend(
135
+
parents
136
+
.into_iter()
137
+
.tail(THREAD_PARENTS)
138
+
.enumerate()
139
+
.map(|(idx, item)| {
140
+
let value = parents_hyd
141
+
.remove(&item.at_uri)
142
+
.map(|post| {
143
+
if let Some(v) = &post.author.viewer {
144
+
if v.blocked_by || v.blocking.is_some() {
145
+
return ThreadV2ItemType::Blocked {
146
+
author: BlockedAuthor {
147
+
did: post.author.did,
148
+
viewer: post.author.viewer,
149
+
},
150
+
};
151
+
}
152
+
}
153
+
154
+
let op_thread = (is_op_thread
155
+
|| item.root_uri.is_none() && item.parent_uri.is_none())
156
+
&& post.author.did == root_did;
157
+
158
+
ThreadV2ItemType::Post(ThreadItemPost {
159
+
post,
160
+
more_parents: idx == 0 && root_has_more,
161
+
more_replies: 0,
162
+
op_thread,
163
+
hidden_by_threadgate: false,
164
+
muted_by_viewer: false,
165
+
})
166
+
})
167
+
.unwrap_or(ThreadV2ItemType::NotFound {});
168
+
169
+
ThreadV2Item {
170
+
uri: item.at_uri,
171
+
depth: -item.depth - 1,
172
+
value,
173
+
}
174
+
}),
175
+
);
176
+
177
+
is_op_thread = is_op_thread && anchor.author.did == root_did;
178
+
thread.push(ThreadV2Item {
179
+
uri: uri.clone(),
180
+
depth: 0,
181
+
value: ThreadV2ItemType::Post(ThreadItemPost {
182
+
post: anchor,
183
+
more_parents: false,
184
+
more_replies: 0,
185
+
op_thread: is_op_thread,
186
+
hidden_by_threadgate: false,
187
+
muted_by_viewer: false,
188
+
}),
189
+
});
190
+
191
+
let mut replies_grouped = replies
192
+
.into_iter()
193
+
.into_group_map_by(|item| item.parent_uri.clone().unwrap_or_default());
194
+
195
+
// start with the anchor
196
+
let (children, has_other_replies) = build_thread_children(
197
+
&mut replies_grouped,
198
+
&mut replies_hyd,
199
+
&hidden,
200
+
&uri,
201
+
is_op_thread,
202
+
1,
203
+
&BuildThreadChildrenOpts {
204
+
root_did,
205
+
sort: query.sort,
206
+
maybe_did: &maybe_did,
207
+
max_depth: depth,
208
+
},
209
+
);
210
+
thread.extend(children);
211
+
212
+
Ok(Json(GetPostThreadV2Res {
213
+
thread,
214
+
threadgate,
215
+
has_other_replies,
216
+
}))
217
+
}
218
+
219
+
#[derive(Debug, Deserialize)]
220
+
#[serde(rename_all = "camelCase")]
221
+
pub struct GetPostThreadOtherV2Req {
222
+
pub anchor: String,
223
+
}
224
+
225
+
#[derive(Debug, Serialize)]
226
+
#[serde(rename_all = "camelCase")]
227
+
pub struct GetPostThreadOtherV2Res {
228
+
pub thread: Vec<ThreadV2Item>,
229
+
}
230
+
231
+
pub async fn get_post_thread_other_v2(
232
+
State(state): State<GlobalState>,
233
+
AtpAcceptLabelers(labelers): AtpAcceptLabelers,
234
+
maybe_auth: Option<AtpAuth>,
235
+
Query(query): Query<GetPostThreadOtherV2Req>,
236
+
) -> XrpcResult<Json<GetPostThreadOtherV2Res>> {
237
+
let mut conn = state.pool.get().await?;
238
+
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
239
+
240
+
let uri = normalise_at_uri(&state.dataloaders, &query.anchor).await?;
241
+
242
+
let root = crate::db::get_root_post(&mut conn, &uri)
243
+
.await?
244
+
.unwrap_or(uri.clone());
245
+
246
+
// this only returns immediate children (depth==1) where hiddenByThreadgate=TRUE
247
+
let replies = crate::db::get_thread_children_hidden(&mut conn, &uri, &root).await?;
248
+
let reply_uris = replies
249
+
.into_iter()
250
+
.map(|item| item.at_uri)
251
+
.collect::<Vec<_>>();
252
+
let thread = hyd
253
+
.hydrate_posts(reply_uris)
254
+
.await
255
+
.into_iter()
256
+
.filter(|(_, post)| matches!(&post.author.viewer, Some(viewer) if viewer.blocked_by || viewer.blocking.is_some()))
257
+
.map(|(uri, post)| {
258
+
let post = ThreadItemPost {
259
+
post,
260
+
more_parents: false,
261
+
more_replies: 0,
262
+
op_thread: false,
263
+
hidden_by_threadgate: true,
264
+
muted_by_viewer: false,
265
+
};
266
+
267
+
ThreadV2Item {
268
+
uri,
269
+
depth: 1,
270
+
value: ThreadV2ItemType::Post(post),
271
+
}
272
+
})
273
+
.collect();
274
+
275
+
Ok(Json(GetPostThreadOtherV2Res { thread }))
276
+
}
277
+
278
+
#[derive(Debug)]
279
+
struct BuildThreadChildrenOpts<'a> {
280
+
root_did: &'a str,
281
+
sort: PostThreadSort,
282
+
maybe_did: &'a Option<String>,
283
+
max_depth: i32,
284
+
}
285
+
286
+
fn build_thread_children(
287
+
grouped_replies: &mut HashMap<String, Vec<ThreadItem>>,
288
+
replies_hyd: &mut HashMap<String, PostView>,
289
+
hidden: &HashSet<String>,
290
+
parent: &str,
291
+
is_op_thread: bool,
292
+
depth: i32,
293
+
opts: &BuildThreadChildrenOpts,
294
+
) -> (Vec<ThreadV2Item>, bool) {
295
+
let mut has_other_replies = false;
296
+
297
+
let Some(replies) = grouped_replies.remove(parent) else {
298
+
return (Vec::default(), has_other_replies);
299
+
};
300
+
301
+
let replies = replies
302
+
.into_iter()
303
+
.filter_map(|item| replies_hyd.remove(&item.at_uri))
304
+
.sorted_by(sort_replies(&opts.sort));
305
+
306
+
let mut out = Vec::new();
307
+
308
+
for post in replies {
309
+
let reply_count = grouped_replies
310
+
.get(&post.uri)
311
+
.map(|v| v.len())
312
+
.unwrap_or_default();
313
+
let at_max = depth == opts.max_depth;
314
+
let more_replies = if at_max { reply_count } else { 0 };
315
+
let op_thread = is_op_thread && post.author.did == opts.root_did;
316
+
317
+
// shouldn't push to the thread if there's a block relation. Bsky doesn't push a type of Blocked for replies...
318
+
if let Some(v) = &post.author.viewer {
319
+
if v.blocked_by || v.blocking.is_some() {
320
+
continue;
321
+
}
322
+
}
323
+
324
+
// check if the post is hidden AND we're NOT the author (hidden posts still show for their author)
325
+
if hidden.contains(&post.uri) && !did_is_cur(opts.maybe_did, &post.author.did) {
326
+
// post is hidden - do not ~pass go~ push to the thread.
327
+
if depth == 1 {
328
+
has_other_replies = true;
329
+
}
330
+
continue;
331
+
}
332
+
333
+
let uri = post.uri.clone();
334
+
out.push(ThreadV2Item {
335
+
uri: post.uri.clone(),
336
+
depth,
337
+
value: ThreadV2ItemType::Post(ThreadItemPost {
338
+
post,
339
+
more_parents: false,
340
+
more_replies: more_replies as i32,
341
+
op_thread,
342
+
hidden_by_threadgate: false,
343
+
muted_by_viewer: false,
344
+
}),
345
+
});
346
+
347
+
if !at_max {
348
+
// we don't care about has_other_replies when recursing
349
+
let (children, _) = build_thread_children(
350
+
grouped_replies,
351
+
replies_hyd,
352
+
hidden,
353
+
&uri,
354
+
op_thread,
355
+
depth + 1,
356
+
opts,
357
+
);
358
+
359
+
out.extend(children);
360
+
}
361
+
}
362
+
363
+
(out, has_other_replies)
364
+
}
365
+
366
+
fn sort_replies(sort: &PostThreadSort) -> impl Fn(&PostView, &PostView) -> Ordering + use<'_> {
367
+
move |a: &PostView, b: &PostView| match sort {
368
+
PostThreadSort::Newest => b.indexed_at.cmp(&a.indexed_at),
369
+
PostThreadSort::Oldest => a.indexed_at.cmp(&b.indexed_at),
370
+
PostThreadSort::Top => b.stats.like_count.cmp(&a.stats.like_count),
371
+
}
372
+
}
373
+
374
+
fn did_is_cur(cur: &Option<String>, did: &String) -> bool {
375
+
match cur {
376
+
Some(cur) => did == cur,
377
+
None => false,
378
+
}
379
+
}
+1
-1
parakeet/src/xrpc/community_lexicon/bookmarks.rs
+1
-1
parakeet/src/xrpc/community_lexicon/bookmarks.rs
+3
parakeet/src/xrpc/jwt.rs
+3
parakeet/src/xrpc/jwt.rs
···
4
4
use std::collections::HashMap;
5
5
use std::sync::{Arc, LazyLock};
6
6
use tokio::sync::RwLock;
7
+
use tracing::instrument;
7
8
8
9
static DUMMY_KEY: LazyLock<DecodingKey> = LazyLock::new(|| DecodingKey::from_secret(&[]));
9
10
static NO_VERIFY: LazyLock<Validation> = LazyLock::new(|| {
···
38
39
}
39
40
}
40
41
42
+
#[instrument(skip_all)]
41
43
pub async fn resolve_and_verify_jwt(&self, token: &str, aud: Option<&str>) -> Option<Claims> {
42
44
// first we need to decode without verifying, to get iss.
43
45
let unsafe_data = jsonwebtoken::decode::<Claims>(token, &DUMMY_KEY, &NO_VERIFY).ok()?;
···
56
58
self.verify_jwt_multibase_with_alg(token, &multibase_key, unsafe_data.header.alg, aud)
57
59
}
58
60
61
+
#[instrument(skip_all)]
59
62
async fn resolve_key(&self, did: &str) -> Option<String> {
60
63
tracing::trace!("resolving multikey for {did}");
61
64
let did_doc = self.resolver.resolve_did(did).await.ok()??;
+77
-14
parakeet-db/src/models.rs
+77
-14
parakeet-db/src/models.rs
···
37
37
pub joined_sp_uri: Option<String>,
38
38
pub joined_sp_cid: Option<String>,
39
39
40
+
pub pronouns: Option<String>,
41
+
pub website: Option<String>,
42
+
40
43
pub created_at: NaiveDateTime,
41
44
pub indexed_at: NaiveDateTime,
42
45
}
···
134
137
135
138
pub content: String,
136
139
pub facets: Option<serde_json::Value>,
137
-
pub languages: Vec<Option<String>>,
138
-
pub tags: Vec<Option<String>>,
140
+
pub languages: not_null_vec::TextArray,
141
+
pub tags: not_null_vec::TextArray,
139
142
140
143
pub parent_uri: Option<String>,
141
144
pub parent_cid: Option<String>,
···
144
147
145
148
pub embed: Option<String>,
146
149
pub embed_subtype: Option<String>,
150
+
151
+
pub mentions: Option<not_null_vec::TextArray>,
152
+
pub violates_threadgate: bool,
147
153
148
154
pub created_at: DateTime<Utc>,
149
155
pub indexed_at: NaiveDateTime,
···
230
236
pub cid: String,
231
237
pub post_uri: String,
232
238
233
-
pub detached: Vec<Option<String>>,
234
-
pub rules: Vec<Option<String>>,
239
+
pub detached: not_null_vec::TextArray,
240
+
pub rules: not_null_vec::TextArray,
235
241
236
242
pub created_at: DateTime<Utc>,
237
243
pub indexed_at: NaiveDateTime,
···
246
252
pub cid: String,
247
253
pub post_uri: String,
248
254
249
-
pub hidden_replies: Vec<Option<String>>,
250
-
pub allow: Vec<Option<String>>,
251
-
pub allowed_lists: Vec<Option<String>>,
255
+
pub hidden_replies: not_null_vec::TextArray,
256
+
pub allow: Option<not_null_vec::TextArray>,
257
+
pub allowed_lists: Option<not_null_vec::TextArray>,
252
258
253
259
pub record: serde_json::Value,
254
260
···
270
276
pub description: Option<String>,
271
277
pub description_facets: Option<serde_json::Value>,
272
278
pub list: String,
273
-
pub feeds: Option<Vec<Option<String>>>,
279
+
pub feeds: Option<not_null_vec::TextArray>,
274
280
275
281
pub created_at: DateTime<Utc>,
276
282
pub indexed_at: NaiveDateTime,
···
284
290
pub did: String,
285
291
pub cid: String,
286
292
287
-
pub reasons: Option<Vec<Option<String>>>,
288
-
pub subject_types: Option<Vec<Option<String>>>,
289
-
pub subject_collections: Option<Vec<Option<String>>>,
293
+
pub reasons: Option<not_null_vec::TextArray>,
294
+
pub subject_types: Option<not_null_vec::TextArray>,
295
+
pub subject_collections: Option<not_null_vec::TextArray>,
290
296
291
297
pub created_at: NaiveDateTime,
292
298
pub indexed_at: NaiveDateTime,
293
299
}
294
300
295
-
#[derive(Clone, Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, Associations)]
301
+
#[derive(
302
+
Clone, Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, Associations,
303
+
)]
296
304
#[diesel(table_name = crate::schema::labeler_defs)]
297
305
#[diesel(belongs_to(LabelerService, foreign_key = labeler))]
298
306
#[diesel(check_for_backend(diesel::pg::Pg))]
···
394
402
pub subject: String,
395
403
pub subject_cid: Option<String>,
396
404
pub subject_type: String,
397
-
pub tags: Vec<Option<String>>,
405
+
pub tags: not_null_vec::TextArray,
398
406
pub created_at: DateTime<Utc>,
399
407
}
400
408
···
408
416
pub subject_cid: Option<String>,
409
417
pub subject_type: &'a str,
410
418
pub tags: Vec<String>,
411
-
}
419
+
}
420
+
421
+
#[derive(Debug, Queryable, Selectable, Identifiable)]
422
+
#[diesel(table_name = crate::schema::author_feeds)]
423
+
#[diesel(primary_key(uri))]
424
+
#[diesel(check_for_backend(diesel::pg::Pg))]
425
+
pub struct AuthorFeedItem {
426
+
pub uri: String,
427
+
pub cid: String,
428
+
pub post: String,
429
+
pub did: String,
430
+
pub typ: String,
431
+
pub sort_at: DateTime<Utc>,
432
+
}
433
+
434
+
pub use not_null_vec::TextArray;
435
+
mod not_null_vec {
436
+
use diesel::deserialize::FromSql;
437
+
use diesel::pg::Pg;
438
+
use diesel::sql_types::{Array, Nullable, Text};
439
+
use diesel::{deserialize, FromSqlRow};
440
+
use serde::{Deserialize, Serialize};
441
+
use std::ops::{Deref, DerefMut};
442
+
443
+
#[derive(Clone, Debug, Default, Serialize, Deserialize, FromSqlRow)]
444
+
#[diesel(sql_type = Array<Nullable<Text>>)]
445
+
pub struct TextArray(pub Vec<String>);
446
+
447
+
impl FromSql<Array<Nullable<Text>>, Pg> for TextArray {
448
+
fn from_sql(bytes: diesel::pg::PgValue<'_>) -> deserialize::Result<Self> {
449
+
let vec_with_nulls =
450
+
<Vec<Option<String>> as FromSql<Array<Nullable<Text>>, Pg>>::from_sql(bytes)?;
451
+
Ok(TextArray(vec_with_nulls.into_iter().flatten().collect()))
452
+
}
453
+
}
454
+
455
+
impl Deref for TextArray {
456
+
type Target = Vec<String>;
457
+
458
+
fn deref(&self) -> &Self::Target {
459
+
&self.0
460
+
}
461
+
}
462
+
463
+
impl DerefMut for TextArray {
464
+
fn deref_mut(&mut self) -> &mut Self::Target {
465
+
&mut self.0
466
+
}
467
+
}
468
+
469
+
impl From<TextArray> for Vec<String> {
470
+
fn from(v: TextArray) -> Vec<String> {
471
+
v.0
472
+
}
473
+
}
474
+
}
+31
-2
parakeet-db/src/schema.rs
+31
-2
parakeet-db/src/schema.rs
···
13
13
}
14
14
15
15
diesel::table! {
16
+
author_feeds (uri) {
17
+
uri -> Text,
18
+
cid -> Text,
19
+
post -> Text,
20
+
did -> Text,
21
+
typ -> Text,
22
+
sort_at -> Timestamptz,
23
+
}
24
+
}
25
+
26
+
diesel::table! {
16
27
backfill (repo, repo_ver) {
17
28
repo -> Text,
18
29
repo_ver -> Text,
···
284
295
embed_subtype -> Nullable<Text>,
285
296
created_at -> Timestamptz,
286
297
indexed_at -> Timestamp,
298
+
mentions -> Nullable<Array<Nullable<Text>>>,
299
+
violates_threadgate -> Bool,
300
+
}
301
+
}
302
+
303
+
diesel::table! {
304
+
profile_states (did, subject) {
305
+
did -> Text,
306
+
subject -> Text,
307
+
muting -> Bool,
308
+
blocked -> Bool,
309
+
blocking -> Nullable<Text>,
310
+
following -> Nullable<Text>,
311
+
followed -> Nullable<Text>,
287
312
}
288
313
}
289
314
···
301
326
joined_sp_cid -> Nullable<Text>,
302
327
created_at -> Timestamp,
303
328
indexed_at -> Timestamp,
329
+
pronouns -> Nullable<Text>,
330
+
website -> Nullable<Text>,
304
331
}
305
332
}
306
333
···
364
391
cid -> Text,
365
392
post_uri -> Text,
366
393
hidden_replies -> Array<Nullable<Text>>,
367
-
allow -> Array<Nullable<Text>>,
368
-
allowed_lists -> Array<Nullable<Text>>,
394
+
allow -> Nullable<Array<Nullable<Text>>>,
395
+
allowed_lists -> Nullable<Array<Nullable<Text>>>,
369
396
record -> Jsonb,
370
397
created_at -> Timestamptz,
371
398
indexed_at -> Timestamp,
···
415
442
416
443
diesel::allow_tables_to_appear_in_same_query!(
417
444
actors,
445
+
author_feeds,
418
446
backfill,
419
447
backfill_jobs,
420
448
blocks,
···
439
467
post_embed_video_captions,
440
468
postgates,
441
469
posts,
470
+
profile_states,
442
471
profiles,
443
472
records,
444
473
reposts,
+1
-1
parakeet-db/src/types.rs
+1
-1
parakeet-db/src/types.rs
+24
-2
parakeet-index/Cargo.toml
+24
-2
parakeet-index/Cargo.toml
···
10
10
[dependencies]
11
11
tonic = "0.13.0"
12
12
prost = "0.13.5"
13
+
tonic-tracing-opentelemetry = { version = "0.32", optional = true }
14
+
tower = { version = "0.5", optional = true }
13
15
14
16
eyre = { version = "0.6.12", optional = true }
15
17
figment = { version = "0.10.19", features = ["env", "toml"], optional = true }
16
18
itertools = { version = "0.14.0", optional = true }
19
+
opentelemetry = { version = "0.31.0", optional = true }
20
+
opentelemetry-otlp = { version = "0.31.0", features = ["reqwest-rustls"], optional = true }
21
+
opentelemetry_sdk = { version = "0.31.0", optional = true }
17
22
rocksdb = { version = "0.23", default-features = false, features = ["lz4", "bindgen-runtime"], optional = true }
18
23
serde = { version = "1.0.217", features = ["derive"], optional = true }
19
24
tokio = { version = "1.42.0", features = ["full"], optional = true }
20
25
tonic-health = { version = "0.13.0", optional = true }
21
26
tracing = { version = "0.1.40", optional = true }
22
-
tracing-subscriber = { version = "0.3.18", optional = true }
27
+
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"], optional = true }
28
+
tracing-opentelemetry = { version = "0.32", optional = true }
23
29
24
30
[build-dependencies]
25
31
tonic-build = "0.13.0"
26
32
27
33
[features]
28
-
server = ["dep:eyre", "dep:figment", "dep:itertools", "dep:rocksdb", "dep:serde", "dep:tokio", "dep:tonic-health", "dep:tracing", "dep:tracing-subscriber"]
34
+
otel = ["dep:tonic-tracing-opentelemetry", "dep:tower"]
35
+
server = [
36
+
"dep:eyre",
37
+
"dep:figment",
38
+
"dep:itertools",
39
+
"dep:opentelemetry",
40
+
"dep:opentelemetry-otlp",
41
+
"dep:opentelemetry_sdk",
42
+
"dep:rocksdb",
43
+
"dep:serde",
44
+
"dep:tokio",
45
+
"dep:tonic-health",
46
+
"otel",
47
+
"dep:tracing",
48
+
"dep:tracing-subscriber",
49
+
"dep:tracing-opentelemetry"
50
+
]
+20
-1
parakeet-index/src/lib.rs
+20
-1
parakeet-index/src/lib.rs
···
1
+
use tonic::transport::Channel;
2
+
1
3
#[allow(clippy::all)]
2
4
pub mod index {
3
5
tonic::include_proto!("parakeet");
4
6
}
5
7
6
8
pub use index::*;
7
-
pub type Client = index_client::IndexClient<tonic::transport::Channel>;
9
+
#[cfg(not(feature = "otel"))]
10
+
pub type Client = index_client::IndexClient<Channel>;
11
+
#[cfg(feature = "otel")]
12
+
pub type Client = index_client::IndexClient<
13
+
tonic_tracing_opentelemetry::middleware::client::OtelGrpcService<Channel>,
14
+
>;
8
15
9
16
#[cfg(feature = "server")]
10
17
pub mod server;
18
+
19
+
#[cfg(feature = "otel")]
20
+
pub async fn connect_with_otel(
21
+
uri: String,
22
+
) -> Result<Client, Box<dyn std::error::Error + Send + Sync>> {
23
+
let channel = Channel::from_shared(uri)?.connect().await?;
24
+
let channel = tower::ServiceBuilder::new()
25
+
.layer(tonic_tracing_opentelemetry::middleware::client::OtelGrpcLayer)
26
+
.service(channel);
27
+
28
+
Ok(index_client::IndexClient::new(channel))
29
+
}
+9
-3
parakeet-index/src/main.rs
+9
-3
parakeet-index/src/main.rs
···
1
1
use parakeet_index::index_server::IndexServer;
2
2
use parakeet_index::server::service::Service;
3
-
use parakeet_index::server::{GlobalState, config};
3
+
use parakeet_index::server::{GlobalState, config, instrumentation};
4
4
use std::sync::Arc;
5
5
use tonic::transport::Server;
6
+
use tonic_tracing_opentelemetry::middleware::server::OtelGrpcLayer;
6
7
7
8
#[tokio::main]
8
9
async fn main() -> eyre::Result<()> {
9
-
tracing_subscriber::fmt::init();
10
-
11
10
let conf = config::load_config()?;
12
11
12
+
instrumentation::init_instruments(&conf.instruments);
13
+
13
14
let db_root = conf.index_db_path.parse()?;
14
15
let addr = std::net::SocketAddr::new(conf.server.bind_address.parse()?, conf.server.port);
15
16
let state = Arc::new(GlobalState::new(db_root)?);
···
18
19
reporter.set_serving::<IndexServer<Service>>().await;
19
20
20
21
let service = Service::new(state.clone());
22
+
23
+
let mw = tower::ServiceBuilder::new()
24
+
.option_layer(conf.instruments.otel_enable.then(OtelGrpcLayer::default));
25
+
21
26
Server::builder()
27
+
.layer(mw)
22
28
.add_service(health_service)
23
29
.add_service(IndexServer::new(service))
24
30
.serve(addr)
+10
parakeet-index/src/server/config.rs
+10
parakeet-index/src/server/config.rs
···
13
13
14
14
#[derive(Debug, Deserialize)]
15
15
pub struct Config {
16
+
#[serde(flatten)]
17
+
pub instruments: ConfigInstruments,
16
18
pub database_url: String,
17
19
pub index_db_path: String,
18
20
#[serde(default)]
19
21
pub server: ConfigServer,
22
+
}
23
+
24
+
#[derive(Debug, Deserialize)]
25
+
pub struct ConfigInstruments {
26
+
#[serde(default)]
27
+
pub otel_enable: bool,
28
+
#[serde(default)]
29
+
pub log_json: bool,
20
30
}
21
31
22
32
#[derive(Debug, Deserialize)]
+57
parakeet-index/src/server/instrumentation.rs
+57
parakeet-index/src/server/instrumentation.rs
···
1
+
use opentelemetry::trace::TracerProvider;
2
+
use opentelemetry_otlp::{Protocol, SpanExporter, WithExportConfig};
3
+
use opentelemetry_sdk::trace::{Sampler, SdkTracer, SdkTracerProvider};
4
+
use tracing::Subscriber;
5
+
use tracing_opentelemetry::OpenTelemetryLayer;
6
+
use tracing_subscriber::filter::Filtered;
7
+
use tracing_subscriber::layer::SubscriberExt;
8
+
use tracing_subscriber::registry::LookupSpan;
9
+
use tracing_subscriber::util::SubscriberInitExt;
10
+
use tracing_subscriber::{EnvFilter, Layer};
11
+
12
+
pub fn init_instruments(cfg: &super::config::ConfigInstruments) {
13
+
let otel_layer = cfg.otel_enable.then(init_otel);
14
+
let log_layer = init_log(cfg.log_json);
15
+
16
+
tracing_subscriber::registry()
17
+
.with(log_layer)
18
+
.with(otel_layer)
19
+
.init();
20
+
}
21
+
22
+
fn init_otel<S>() -> Filtered<OpenTelemetryLayer<S, SdkTracer>, EnvFilter, S>
23
+
where
24
+
S: Subscriber + for<'span> LookupSpan<'span>,
25
+
{
26
+
let span_exporter = SpanExporter::builder()
27
+
.with_http()
28
+
.with_protocol(Protocol::HttpBinary)
29
+
.build()
30
+
.unwrap();
31
+
32
+
let tracer_provider = SdkTracerProvider::builder()
33
+
.with_batch_exporter(span_exporter)
34
+
.with_sampler(Sampler::AlwaysOn)
35
+
.build();
36
+
37
+
opentelemetry::global::set_tracer_provider(tracer_provider.clone());
38
+
39
+
let tracer = tracer_provider.tracer("parakeet");
40
+
let otel_filter = EnvFilter::new("info,otel::tracing=trace");
41
+
42
+
OpenTelemetryLayer::new(tracer).with_filter(otel_filter)
43
+
}
44
+
45
+
fn init_log<S>(json: bool) -> Filtered<Box<dyn Layer<S> + Send + Sync>, EnvFilter, S>
46
+
where
47
+
S: Subscriber + for<'span> LookupSpan<'span>,
48
+
{
49
+
let stdout_filter =
50
+
EnvFilter::from_default_env().add_directive("otel::tracing=off".parse().unwrap());
51
+
52
+
match json {
53
+
true => tracing_subscriber::fmt::layer().json().boxed(),
54
+
false => tracing_subscriber::fmt::layer().boxed(),
55
+
}
56
+
.with_filter(stdout_filter)
57
+
}