+1
-245
Cargo.lock
+1
-245
Cargo.lock
···
125
125
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
126
126
127
127
[[package]]
128
-
name = "async-channel"
129
-
version = "1.9.0"
130
-
source = "registry+https://github.com/rust-lang/crates.io-index"
131
-
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
132
-
dependencies = [
133
-
"concurrent-queue",
134
-
"event-listener 2.5.3",
135
-
"futures-core",
136
-
]
137
-
138
-
[[package]]
139
-
name = "async-channel"
140
-
version = "2.3.1"
141
-
source = "registry+https://github.com/rust-lang/crates.io-index"
142
-
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
143
-
dependencies = [
144
-
"concurrent-queue",
145
-
"event-listener-strategy",
146
-
"futures-core",
147
-
"pin-project-lite",
148
-
]
149
-
150
-
[[package]]
151
128
name = "async-compression"
152
129
version = "0.4.22"
153
130
source = "registry+https://github.com/rust-lang/crates.io-index"
···
161
138
]
162
139
163
140
[[package]]
164
-
name = "async-executor"
165
-
version = "1.13.1"
166
-
source = "registry+https://github.com/rust-lang/crates.io-index"
167
-
checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec"
168
-
dependencies = [
169
-
"async-task",
170
-
"concurrent-queue",
171
-
"fastrand",
172
-
"futures-lite",
173
-
"slab",
174
-
]
175
-
176
-
[[package]]
177
-
name = "async-global-executor"
178
-
version = "2.4.1"
179
-
source = "registry+https://github.com/rust-lang/crates.io-index"
180
-
checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
181
-
dependencies = [
182
-
"async-channel 2.3.1",
183
-
"async-executor",
184
-
"async-io",
185
-
"async-lock",
186
-
"blocking",
187
-
"futures-lite",
188
-
"once_cell",
189
-
]
190
-
191
-
[[package]]
192
-
name = "async-io"
193
-
version = "2.4.0"
194
-
source = "registry+https://github.com/rust-lang/crates.io-index"
195
-
checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059"
196
-
dependencies = [
197
-
"async-lock",
198
-
"cfg-if",
199
-
"concurrent-queue",
200
-
"futures-io",
201
-
"futures-lite",
202
-
"parking",
203
-
"polling",
204
-
"rustix",
205
-
"slab",
206
-
"tracing",
207
-
"windows-sys 0.59.0",
208
-
]
209
-
210
-
[[package]]
211
-
name = "async-lock"
212
-
version = "3.4.0"
213
-
source = "registry+https://github.com/rust-lang/crates.io-index"
214
-
checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18"
215
-
dependencies = [
216
-
"event-listener 5.4.0",
217
-
"event-listener-strategy",
218
-
"pin-project-lite",
219
-
]
220
-
221
-
[[package]]
222
141
name = "async-recursion"
223
142
version = "1.1.1"
224
143
source = "registry+https://github.com/rust-lang/crates.io-index"
···
228
147
"quote",
229
148
"syn",
230
149
]
231
-
232
-
[[package]]
233
-
name = "async-std"
234
-
version = "1.13.0"
235
-
source = "registry+https://github.com/rust-lang/crates.io-index"
236
-
checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615"
237
-
dependencies = [
238
-
"async-channel 1.9.0",
239
-
"async-global-executor",
240
-
"async-io",
241
-
"async-lock",
242
-
"crossbeam-utils",
243
-
"futures-channel",
244
-
"futures-core",
245
-
"futures-io",
246
-
"futures-lite",
247
-
"gloo-timers",
248
-
"kv-log-macro",
249
-
"log",
250
-
"memchr",
251
-
"once_cell",
252
-
"pin-project-lite",
253
-
"pin-utils",
254
-
"slab",
255
-
"wasm-bindgen-futures",
256
-
]
257
-
258
-
[[package]]
259
-
name = "async-task"
260
-
version = "4.7.1"
261
-
source = "registry+https://github.com/rust-lang/crates.io-index"
262
-
checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
263
150
264
151
[[package]]
265
152
name = "async-trait"
···
488
375
]
489
376
490
377
[[package]]
491
-
name = "blocking"
492
-
version = "1.6.1"
493
-
source = "registry+https://github.com/rust-lang/crates.io-index"
494
-
checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea"
495
-
dependencies = [
496
-
"async-channel 2.3.1",
497
-
"async-task",
498
-
"futures-io",
499
-
"futures-lite",
500
-
"piper",
501
-
]
502
-
503
-
[[package]]
504
378
name = "brotli"
505
379
version = "7.0.0"
506
380
source = "registry+https://github.com/rust-lang/crates.io-index"
···
727
601
]
728
602
729
603
[[package]]
730
-
name = "concurrent-queue"
731
-
version = "2.5.0"
732
-
source = "registry+https://github.com/rust-lang/crates.io-index"
733
-
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
734
-
dependencies = [
735
-
"crossbeam-utils",
736
-
]
737
-
738
-
[[package]]
739
604
name = "const-oid"
740
605
version = "0.9.6"
741
606
source = "registry+https://github.com/rust-lang/crates.io-index"
···
968
833
name = "dataloader"
969
834
version = "0.18.0"
970
835
dependencies = [
971
-
"async-std",
972
836
"futures",
973
837
"tokio",
974
838
]
···
1256
1120
]
1257
1121
1258
1122
[[package]]
1259
-
name = "event-listener"
1260
-
version = "2.5.3"
1261
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1262
-
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
1263
-
1264
-
[[package]]
1265
-
name = "event-listener"
1266
-
version = "5.4.0"
1267
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1268
-
checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae"
1269
-
dependencies = [
1270
-
"concurrent-queue",
1271
-
"parking",
1272
-
"pin-project-lite",
1273
-
]
1274
-
1275
-
[[package]]
1276
-
name = "event-listener-strategy"
1277
-
version = "0.5.3"
1278
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1279
-
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
1280
-
dependencies = [
1281
-
"event-listener 5.4.0",
1282
-
"pin-project-lite",
1283
-
]
1284
-
1285
-
[[package]]
1286
1123
name = "eyre"
1287
1124
version = "0.6.12"
1288
1125
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1453
1290
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
1454
1291
1455
1292
[[package]]
1456
-
name = "futures-lite"
1457
-
version = "2.6.0"
1458
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1459
-
checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532"
1460
-
dependencies = [
1461
-
"fastrand",
1462
-
"futures-core",
1463
-
"futures-io",
1464
-
"parking",
1465
-
"pin-project-lite",
1466
-
]
1467
-
1468
-
[[package]]
1469
1293
name = "futures-macro"
1470
1294
version = "0.3.31"
1471
1295
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1564
1388
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
1565
1389
1566
1390
[[package]]
1567
-
name = "gloo-timers"
1568
-
version = "0.3.0"
1569
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1570
-
checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994"
1571
-
dependencies = [
1572
-
"futures-channel",
1573
-
"futures-core",
1574
-
"js-sys",
1575
-
"wasm-bindgen",
1576
-
]
1577
-
1578
-
[[package]]
1579
1391
name = "group"
1580
1392
version = "0.13.0"
1581
1393
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1665
1477
version = "0.3.9"
1666
1478
source = "registry+https://github.com/rust-lang/crates.io-index"
1667
1479
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
1668
-
1669
-
[[package]]
1670
-
name = "hermit-abi"
1671
-
version = "0.4.0"
1672
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1673
-
checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
1674
1480
1675
1481
[[package]]
1676
1482
name = "hex"
···
2248
2054
]
2249
2055
2250
2056
[[package]]
2251
-
name = "kv-log-macro"
2252
-
version = "1.0.7"
2253
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2254
-
checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
2255
-
dependencies = [
2256
-
"log",
2257
-
]
2258
-
2259
-
[[package]]
2260
2057
name = "lazy_static"
2261
2058
version = "1.5.0"
2262
2059
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2361
2158
version = "0.4.25"
2362
2159
source = "registry+https://github.com/rust-lang/crates.io-index"
2363
2160
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
2364
-
dependencies = [
2365
-
"value-bag",
2366
-
]
2367
2161
2368
2162
[[package]]
2369
2163
name = "lru-cache"
···
2655
2449
source = "registry+https://github.com/rust-lang/crates.io-index"
2656
2450
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
2657
2451
dependencies = [
2658
-
"hermit-abi 0.3.9",
2452
+
"hermit-abi",
2659
2453
"libc",
2660
2454
]
2661
2455
···
2824
2618
]
2825
2619
2826
2620
[[package]]
2827
-
name = "parking"
2828
-
version = "2.2.1"
2829
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2830
-
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
2831
-
2832
-
[[package]]
2833
2621
name = "parking_lot"
2834
2622
version = "0.11.2"
2835
2623
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2992
2780
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
2993
2781
2994
2782
[[package]]
2995
-
name = "piper"
2996
-
version = "0.2.4"
2997
-
source = "registry+https://github.com/rust-lang/crates.io-index"
2998
-
checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066"
2999
-
dependencies = [
3000
-
"atomic-waker",
3001
-
"fastrand",
3002
-
"futures-io",
3003
-
]
3004
-
3005
-
[[package]]
3006
2783
name = "pkcs1"
3007
2784
version = "0.7.5"
3008
2785
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3028
2805
version = "0.3.31"
3029
2806
source = "registry+https://github.com/rust-lang/crates.io-index"
3030
2807
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
3031
-
3032
-
[[package]]
3033
-
name = "polling"
3034
-
version = "3.7.4"
3035
-
source = "registry+https://github.com/rust-lang/crates.io-index"
3036
-
checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f"
3037
-
dependencies = [
3038
-
"cfg-if",
3039
-
"concurrent-queue",
3040
-
"hermit-abi 0.4.0",
3041
-
"pin-project-lite",
3042
-
"rustix",
3043
-
"tracing",
3044
-
"windows-sys 0.59.0",
3045
-
]
3046
2808
3047
2809
[[package]]
3048
2810
name = "portable-atomic"
···
4688
4450
version = "0.1.1"
4689
4451
source = "registry+https://github.com/rust-lang/crates.io-index"
4690
4452
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
4691
-
4692
-
[[package]]
4693
-
name = "value-bag"
4694
-
version = "1.10.0"
4695
-
source = "registry+https://github.com/rust-lang/crates.io-index"
4696
-
checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2"
4697
4453
4698
4454
[[package]]
4699
4455
name = "vcpkg"
+6
-1
consumer/src/backfill/mod.rs
+6
-1
consumer/src/backfill/mod.rs
···
275
275
follows: Vec<(String, String, DateTime<Utc>)>,
276
276
list_items: Vec<(String, records::AppBskyGraphListItem)>,
277
277
verifications: Vec<(String, Cid, records::AppBskyGraphVerification)>,
278
+
threadgates: Vec<(String, Cid, records::AppBskyFeedThreadgate)>, // not COPY'd but needs to be kept until last.
278
279
records: Vec<(String, Cid)>,
279
280
}
280
281
281
282
impl CopyStore {
282
283
async fn submit(self, t: &mut Transaction<'_>, did: &str) -> Result<(), tokio_postgres::Error> {
283
284
db::copy::copy_likes(t, did, self.likes).await?;
284
-
db::copy::copy_posts(t, did, self.posts).await?;
285
285
db::copy::copy_reposts(t, did, self.reposts).await?;
286
286
db::copy::copy_blocks(t, did, self.blocks).await?;
287
287
db::copy::copy_follows(t, did, self.follows).await?;
288
288
db::copy::copy_list_items(t, self.list_items).await?;
289
289
db::copy::copy_verification(t, did, self.verifications).await?;
290
+
db::copy::copy_posts(t, did, self.posts).await?;
291
+
for (at_uri, cid, record) in self.threadgates {
292
+
db::threadgate_enforce_backfill(t, did, &record).await?;
293
+
db::threadgate_upsert(t, &at_uri, cid, record).await?;
294
+
}
290
295
db::copy::copy_records(t, did, self.records).await?;
291
296
292
297
Ok(())
+11
-1
consumer/src/backfill/repo.rs
+11
-1
consumer/src/backfill/repo.rs
···
4
4
};
5
5
use crate::indexer::records;
6
6
use crate::indexer::types::{AggregateDeltaStore, RecordTypes};
7
+
use crate::utils::at_uri_is_by;
7
8
use crate::{db, indexer};
8
9
use deadpool_postgres::Transaction;
9
10
use ipld_core::cid::Cid;
···
144
145
db::maintain_self_labels(t, did, Some(cid), &at_uri, labels).await?;
145
146
}
146
147
if let Some(embed) = rec.embed.clone().and_then(|embed| embed.into_bsky()) {
147
-
db::post_embed_insert(t, &at_uri, embed, rec.created_at).await?;
148
+
db::post_embed_insert(t, &at_uri, embed, rec.created_at, true).await?;
148
149
}
149
150
150
151
deltas.incr(did, AggregateType::ProfilePost).await;
···
165
166
copies
166
167
.reposts
167
168
.push((rkey.to_string(), rec.subject, rec.via, rec.created_at));
169
+
}
170
+
RecordTypes::AppBskyFeedThreadgate(record) => {
171
+
if !at_uri_is_by(&record.post, did) {
172
+
tracing::warn!("tried to create a threadgate on a post we don't control!");
173
+
return Ok(());
174
+
}
175
+
176
+
copies.push_record(&at_uri, cid);
177
+
copies.threadgates.push((at_uri, cid, record));
168
178
}
169
179
RecordTypes::AppBskyGraphBlock(rec) => {
170
180
copies.push_record(&at_uri, cid);
+40
-10
consumer/src/db/copy.rs
+40
-10
consumer/src/db/copy.rs
···
1
1
use super::PgExecResult;
2
2
use crate::indexer::records;
3
-
use crate::utils::strongref_to_parts;
3
+
use crate::utils::{extract_mentions_and_tags, merge_tags, strongref_to_parts};
4
4
use chrono::prelude::*;
5
5
use deadpool_postgres::Transaction;
6
6
use futures::pin_mut;
7
7
use ipld_core::cid::Cid;
8
+
use lexica::StrongRef;
8
9
use tokio_postgres::binary_copy::BinaryCopyInWriter;
9
10
use tokio_postgres::types::Type;
10
-
use lexica::StrongRef;
11
11
12
12
// StrongRefs are used in both likes and reposts
13
13
const STRONGREF_TYPES: &[Type] = &[
···
19
19
Type::TEXT,
20
20
Type::TIMESTAMP,
21
21
];
22
-
type StrongRefRow = (
23
-
String,
24
-
StrongRef,
25
-
Option<StrongRef>,
26
-
DateTime<Utc>,
27
-
);
22
+
type StrongRefRow = (String, StrongRef, Option<StrongRef>, DateTime<Utc>);
28
23
29
24
// SubjectRefs are used in both blocks and follows
30
25
const SUBJECT_TYPES: &[Type] = &[Type::TEXT, Type::TEXT, Type::TEXT, Type::TIMESTAMP];
···
124
119
.await
125
120
}
126
121
127
-
const POST_STMT: &str = "COPY posts_tmp (at_uri, cid, did, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri, root_cid, embed, embed_subtype, created_at) FROM STDIN (FORMAT binary)";
122
+
const POST_STMT: &str = "COPY posts_tmp (at_uri, cid, did, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri, root_cid, embed, embed_subtype, mentions, created_at) FROM STDIN (FORMAT binary)";
128
123
const POST_TYPES: &[Type] = &[
129
124
Type::TEXT,
130
125
Type::TEXT,
···
140
135
Type::TEXT,
141
136
Type::TEXT,
142
137
Type::TEXT,
138
+
Type::TEXT_ARRAY,
143
139
Type::TIMESTAMP,
144
140
];
145
141
pub async fn copy_posts(
···
164
160
165
161
for (at_uri, cid, post) in data {
166
162
let record = serde_json::to_value(&post).unwrap();
163
+
let (mentions, tags) = post
164
+
.facets
165
+
.as_ref()
166
+
.map(|v| extract_mentions_and_tags(v))
167
+
.unzip();
167
168
let facets = post.facets.and_then(|v| serde_json::to_value(v).ok());
168
169
let embed = post.embed.as_ref().map(|v| v.as_str());
169
170
let embed_subtype = post.embed.as_ref().and_then(|v| v.subtype());
170
171
let (parent_uri, parent_cid) = strongref_to_parts(post.reply.as_ref().map(|v| &v.parent));
171
172
let (root_uri, root_cid) = strongref_to_parts(post.reply.as_ref().map(|v| &v.root));
173
+
174
+
let tags = merge_tags(tags, post.tags);
172
175
173
176
let writer = writer.as_mut();
174
177
writer
···
180
183
&post.text,
181
184
&facets,
182
185
&post.langs.unwrap_or_default(),
183
-
&post.tags.unwrap_or_default(),
186
+
&tags,
184
187
&parent_uri,
185
188
&parent_cid,
186
189
&root_uri,
187
190
&root_cid,
188
191
&embed,
189
192
&embed_subtype,
193
+
&mentions,
190
194
&post.created_at.naive_utc(),
191
195
])
192
196
.await?;
193
197
}
194
198
195
199
writer.finish().await?;
200
+
201
+
let threadgated: Vec<(String, String, DateTime<Utc>)> = conn
202
+
.query(
203
+
"SELECT root_uri, p.at_uri, p.created_at FROM posts_tmp p INNER JOIN threadgates t ON root_uri = post_uri WHERE t.allow IS NOT NULL",
204
+
&[],
205
+
)
206
+
.await?
207
+
.into_iter()
208
+
.map(|v| (v.get(0), v.get(1), v.get(2))).collect();
209
+
210
+
for (root, post, created_at) in threadgated {
211
+
match super::post_enforce_threadgate(conn, &root, did, created_at, true).await {
212
+
Ok(true) => {
213
+
conn.execute(
214
+
"UPDATE posts_tmp SET violates_threadgate=TRUE WHERE at_uri=$1",
215
+
&[&post],
216
+
)
217
+
.await?;
218
+
}
219
+
Ok(false) => continue,
220
+
Err(e) => {
221
+
tracing::error!("failed to check threadgate enforcement: {e}");
222
+
continue;
223
+
}
224
+
}
225
+
}
196
226
197
227
conn.execute("INSERT INTO posts (SELECT * FROM posts_tmp)", &[])
198
228
.await
+208
consumer/src/db/gates.rs
+208
consumer/src/db/gates.rs
···
1
+
use super::{PgExecResult, PgResult};
2
+
use crate::indexer::records::{
3
+
AppBskyFeedThreadgate, ThreadgateRule, THREADGATE_RULE_FOLLOWER, THREADGATE_RULE_FOLLOWING,
4
+
THREADGATE_RULE_LIST, THREADGATE_RULE_MENTION,
5
+
};
6
+
use chrono::prelude::*;
7
+
use chrono::{DateTime, Utc};
8
+
use deadpool_postgres::GenericClient;
9
+
use std::collections::HashSet;
10
+
11
+
pub async fn post_enforce_threadgate<C: GenericClient>(
12
+
conn: &mut C,
13
+
root: &str,
14
+
post_author: &str,
15
+
post_created_at: DateTime<Utc>,
16
+
is_backfill: bool,
17
+
) -> PgResult<bool> {
18
+
// check if the root and the current post are the same author
19
+
// strip "at://" then break into parts by '/'
20
+
let parts = root[5..].split('/').collect::<Vec<_>>();
21
+
let root_author = parts[0];
22
+
if root_author == post_author {
23
+
return Ok(false);
24
+
}
25
+
26
+
let tg_data = super::threadgate_get(conn, root).await?;
27
+
28
+
let Some((created_at, allow, allow_lists)) = tg_data else {
29
+
return Ok(false);
30
+
};
31
+
32
+
// when backfilling, there's no point continuing if the record is dated before the threadgate
33
+
if is_backfill && post_created_at < created_at {
34
+
return Ok(false);
35
+
}
36
+
37
+
if allow.is_empty() {
38
+
return Ok(true);
39
+
}
40
+
41
+
let allow: HashSet<String> = HashSet::from_iter(allow);
42
+
43
+
if allow.contains(THREADGATE_RULE_FOLLOWER) || allow.contains(THREADGATE_RULE_FOLLOWING) {
44
+
let profile_state: Option<(bool, bool)> = conn
45
+
.query_opt(
46
+
"SELECT following IS NOT NULL, followed IS NOT NULL FROM profile_states WHERE did=$1 AND subject=$2",
47
+
&[&root_author, &post_author],
48
+
)
49
+
.await?
50
+
.map(|v| (v.get(0), v.get(1)));
51
+
52
+
if let Some((following, followed)) = profile_state {
53
+
if allow.contains(THREADGATE_RULE_FOLLOWER) && followed {
54
+
return Ok(false);
55
+
}
56
+
57
+
if allow.contains(THREADGATE_RULE_FOLLOWING) && following {
58
+
return Ok(false);
59
+
}
60
+
}
61
+
}
62
+
63
+
// check mentions
64
+
if allow.contains(THREADGATE_RULE_MENTION) {
65
+
let mentions: Vec<String> = conn
66
+
.query_opt("SELECT mentions FROM posts WHERE at_uri=$1", &[&root])
67
+
.await?
68
+
.map(|r| r.get(0))
69
+
.unwrap_or_default();
70
+
71
+
if mentions.contains(&post_author.to_owned()) {
72
+
return Ok(false);
73
+
}
74
+
}
75
+
76
+
if allow.contains(THREADGATE_RULE_LIST) {
77
+
if allow_lists.is_empty() {
78
+
return Ok(true);
79
+
}
80
+
81
+
let count: i64 = conn
82
+
.query_one(
83
+
"SELECT count(*) FROM list_items WHERE list_uri=ANY($1) AND subject=$2",
84
+
&[&allow_lists, &post_author],
85
+
)
86
+
.await?
87
+
.get(0);
88
+
if count != 0 {
89
+
return Ok(false);
90
+
}
91
+
}
92
+
93
+
Ok(true)
94
+
}
95
+
96
+
pub async fn postgate_maintain_detaches<C: GenericClient>(
97
+
conn: &mut C,
98
+
post: &str,
99
+
detached: &[String],
100
+
disable_effective: Option<NaiveDateTime>,
101
+
) -> PgExecResult {
102
+
conn.execute(
103
+
"SELECT maintain_postgates($1, $2, $3)",
104
+
&[&post, &detached, &disable_effective],
105
+
)
106
+
.await
107
+
}
108
+
109
+
// variant of post_enforce_threadgate that runs when backfilling to clean up any posts already in DB
110
+
pub async fn threadgate_enforce_backfill<C: GenericClient>(
111
+
conn: &mut C,
112
+
root_author: &str,
113
+
threadgate: &AppBskyFeedThreadgate,
114
+
) -> PgExecResult {
115
+
// pull out allow - if it's None we can skip this gate.
116
+
let Some(allow) = threadgate.allow.as_ref() else {
117
+
return Ok(0);
118
+
};
119
+
120
+
let root = &threadgate.post;
121
+
122
+
if allow.is_empty() {
123
+
// blind update everything
124
+
return conn.execute(
125
+
"UPDATE posts SET violates_threadgate=TRUE WHERE root_uri=$1 AND did != $2 AND created_at >= $3",
126
+
&[&root, &root_author, &threadgate.created_at],
127
+
).await;
128
+
}
129
+
130
+
// pull authors with our root_uri where the author is not the root author and are dated after created_at
131
+
// this is mutable because we'll remove ALLOWED dids
132
+
let mut dids: HashSet<String> = conn
133
+
.query(
134
+
"SELECT DISTINCT did FROM posts WHERE root_uri=$1 AND did != $2 AND created_at >= $3",
135
+
&[&root, &root_author, &threadgate.created_at],
136
+
)
137
+
.await?
138
+
.into_iter()
139
+
.map(|row| row.get(0))
140
+
.collect();
141
+
142
+
// this will be empty if there are no replies.
143
+
if dids.is_empty() {
144
+
return Ok(0);
145
+
}
146
+
147
+
let allowed_lists = allow
148
+
.iter()
149
+
.filter_map(|rule| match rule {
150
+
ThreadgateRule::List { list } => Some(list),
151
+
_ => None,
152
+
})
153
+
.collect::<Vec<_>>();
154
+
155
+
let allow: HashSet<_> = HashSet::from_iter(allow.into_iter().map(|v| v.as_str()));
156
+
157
+
if allow.contains(THREADGATE_RULE_FOLLOWER) && !dids.is_empty() {
158
+
let current_dids: Vec<_> = dids.iter().collect();
159
+
160
+
let res = conn.query(
161
+
"SELECT subject FROM profile_states WHERE did=$1 AND subject=ANY($2) AND followed IS NOT NULL",
162
+
&[&root_author, ¤t_dids]
163
+
).await?;
164
+
165
+
dids = &dids - &HashSet::from_iter(res.into_iter().map(|r| r.get(0)));
166
+
}
167
+
168
+
if allow.contains(THREADGATE_RULE_FOLLOWING) && !dids.is_empty() {
169
+
let current_dids: Vec<_> = dids.iter().collect();
170
+
171
+
let res = conn.query(
172
+
"SELECT subject FROM profile_states WHERE did=$1 AND subject=ANY($2) AND following IS NOT NULL",
173
+
&[&root_author, ¤t_dids]
174
+
).await?;
175
+
176
+
dids = &dids - &HashSet::from_iter(res.into_iter().map(|r| r.get(0)));
177
+
}
178
+
179
+
if allow.contains(THREADGATE_RULE_MENTION) && !dids.is_empty() {
180
+
let mentions: Vec<String> = conn
181
+
.query_opt("SELECT mentions FROM posts WHERE at_uri=$1", &[&root])
182
+
.await?
183
+
.map(|r| r.get(0))
184
+
.unwrap_or_default();
185
+
186
+
dids = &dids - &HashSet::from_iter(mentions);
187
+
}
188
+
189
+
if allow.contains(THREADGATE_RULE_LIST) && !dids.is_empty() {
190
+
let current_dids: Vec<_> = dids.iter().collect();
191
+
192
+
let res = conn
193
+
.query(
194
+
"SELECT subject FROM list_items WHERE list_uri = ANY($1) AND subject = ANY($2)",
195
+
&[&allowed_lists, ¤t_dids],
196
+
)
197
+
.await?;
198
+
199
+
dids = &dids - &HashSet::from_iter(res.into_iter().map(|r| r.get(0)));
200
+
}
201
+
202
+
let dids = dids.into_iter().collect::<Vec<_>>();
203
+
204
+
conn.execute(
205
+
"UPDATE posts SET violates_threadgate=TRUE WHERE root_uri = $1 AND did = ANY($2) AND created_at >= $3",
206
+
&[&threadgate.post, &dids, &threadgate.created_at]
207
+
).await
208
+
}
+2
consumer/src/db/mod.rs
+2
consumer/src/db/mod.rs
+95
-42
consumer/src/db/record.rs
+95
-42
consumer/src/db/record.rs
···
1
1
use super::{PgExecResult, PgOptResult, PgResult};
2
2
use crate::indexer::records::*;
3
-
use crate::utils::{blob_ref, strongref_to_parts};
3
+
use crate::utils::{blob_ref, extract_mentions_and_tags, merge_tags, strongref_to_parts};
4
4
use chrono::prelude::*;
5
5
use deadpool_postgres::GenericClient;
6
6
use ipld_core::cid::Cid;
7
7
use lexica::community_lexicon::bookmarks::Bookmark;
8
+
use std::collections::HashSet;
8
9
9
10
pub async fn record_upsert<C: GenericClient>(
10
11
conn: &mut C,
···
37
38
38
39
conn.execute(
39
40
include_str!("sql/bookmarks_upsert.sql"),
40
-
&[&repo, &rkey, &rec.subject, &rec_type, &rec.tags, &rec.created_at],
41
+
&[
42
+
&repo,
43
+
&rkey,
44
+
&rec.subject,
45
+
&rec_type,
46
+
&rec.tags,
47
+
&rec.created_at,
48
+
],
41
49
)
42
50
.await
43
51
}
···
310
318
repo: &str,
311
319
cid: Cid,
312
320
rec: AppBskyFeedPost,
321
+
is_backfill: bool,
313
322
) -> PgExecResult {
314
323
let cid = cid.to_string();
315
324
let record = serde_json::to_value(&rec).unwrap();
325
+
let (mentions, tags) = rec
326
+
.facets
327
+
.as_ref()
328
+
.map(|v| extract_mentions_and_tags(v))
329
+
.unzip();
316
330
let facets = rec.facets.and_then(|v| serde_json::to_value(v).ok());
317
331
let (parent_uri, parent_cid) = strongref_to_parts(rec.reply.as_ref().map(|v| &v.parent));
318
332
let (root_uri, root_cid) = strongref_to_parts(rec.reply.as_ref().map(|v| &v.root));
319
333
let embed = rec.embed.as_ref().map(|v| v.as_str());
320
334
let embed_subtype = rec.embed.as_ref().and_then(|v| v.subtype());
321
335
336
+
// if there is a root, we need to check for the presence of a threadgate.
337
+
let violates_threadgate = match &root_uri {
338
+
Some(root) => {
339
+
super::post_enforce_threadgate(conn, root, repo, rec.created_at, is_backfill).await?
340
+
}
341
+
None => false,
342
+
};
343
+
344
+
let tags = merge_tags(tags, rec.tags);
345
+
322
346
let count = conn
323
347
.execute(
324
348
include_str!("sql/post_insert.sql"),
···
330
354
&rec.text,
331
355
&facets,
332
356
&rec.langs.unwrap_or_default(),
333
-
&rec.tags.unwrap_or_default(),
357
+
&tags,
334
358
&parent_uri,
335
359
&parent_cid,
336
360
&root_uri,
337
361
&root_cid,
338
362
&embed,
339
363
&embed_subtype,
364
+
&mentions,
365
+
&violates_threadgate,
340
366
&rec.created_at,
341
367
],
342
368
)
343
369
.await?;
344
370
345
371
if let Some(embed) = rec.embed.and_then(|embed| embed.into_bsky()) {
346
-
post_embed_insert(conn, at_uri, embed, rec.created_at).await?;
372
+
post_embed_insert(conn, at_uri, embed, rec.created_at, is_backfill).await?;
347
373
}
348
374
349
375
Ok(count)
···
373
399
post: &str,
374
400
embed: AppBskyEmbed,
375
401
created_at: DateTime<Utc>,
402
+
is_backfill: bool,
376
403
) -> PgExecResult {
377
404
match embed {
378
405
AppBskyEmbed::Images(embed) => post_embed_image_insert(conn, post, embed).await,
379
406
AppBskyEmbed::Video(embed) => post_embed_video_insert(conn, post, embed).await,
380
407
AppBskyEmbed::External(embed) => post_embed_external_insert(conn, post, embed).await,
381
408
AppBskyEmbed::Record(embed) => {
382
-
post_embed_record_insert(conn, post, embed, created_at).await
409
+
post_embed_record_insert(conn, post, embed, created_at, is_backfill).await
383
410
}
384
411
AppBskyEmbed::RecordWithMedia(embed) => {
385
-
post_embed_record_insert(conn, post, embed.record, created_at).await?;
412
+
post_embed_record_insert(conn, post, embed.record, created_at, is_backfill).await?;
386
413
match *embed.media {
387
414
AppBskyEmbed::Images(embed) => post_embed_image_insert(conn, post, embed).await,
388
415
AppBskyEmbed::Video(embed) => post_embed_video_insert(conn, post, embed).await,
···
469
496
).await
470
497
}
471
498
499
+
const PG_DISABLE_RULE: &str = "app.bsky.feed.postgate#disableRule";
472
500
async fn post_embed_record_insert<C: GenericClient>(
473
501
conn: &mut C,
474
502
post: &str,
475
503
embed: AppBskyEmbedRecord,
476
504
post_created_at: DateTime<Utc>,
505
+
is_backfill: bool,
477
506
) -> PgExecResult {
478
507
// strip "at://" then break into parts by '/'
479
508
let parts = embed.record.uri[5..].split('/').collect::<Vec<_>>();
480
509
481
510
let detached = if parts[1] == "app.bsky.feed.post" {
482
-
let postgate_effective: Option<DateTime<Utc>> = conn
483
-
.query_opt(
484
-
"SELECT created_at FROM postgates WHERE post_uri=$1",
485
-
&[&post],
486
-
)
487
-
.await?
488
-
.map(|v| v.get(0));
511
+
let pg_data = postgate_get(conn, post).await?;
489
512
490
-
postgate_effective
491
-
.map(|v| Utc::now().min(post_created_at) > v)
492
-
.unwrap_or_default()
513
+
if let Some((effective, detached, rules)) = pg_data {
514
+
let detached: HashSet<String> = HashSet::from_iter(detached);
515
+
let rules: HashSet<String> = HashSet::from_iter(rules);
516
+
let compare_date = match is_backfill {
517
+
true => post_created_at,
518
+
false => Utc::now(),
519
+
};
520
+
521
+
detached.contains(post) || (rules.contains(PG_DISABLE_RULE) && compare_date > effective)
522
+
} else {
523
+
false
524
+
}
493
525
} else {
494
526
false
495
527
};
···
500
532
).await
501
533
}
502
534
535
+
async fn postgate_get<C: GenericClient>(
536
+
conn: &mut C,
537
+
post: &str,
538
+
) -> PgOptResult<(DateTime<Utc>, Vec<String>, Vec<String>)> {
539
+
let res = conn
540
+
.query_opt(
541
+
"SELECT created_at, detached, rules FROM postgates WHERE post_uri=$1",
542
+
&[&post],
543
+
)
544
+
.await?
545
+
.map(|v| (v.get(0), v.get(1), v.get(2)));
546
+
547
+
Ok(res)
548
+
}
549
+
503
550
pub async fn postgate_upsert<C: GenericClient>(
504
551
conn: &mut C,
505
552
at_uri: &str,
···
531
578
.await
532
579
}
533
580
534
-
pub async fn postgate_maintain_detaches<C: GenericClient>(
535
-
conn: &mut C,
536
-
post: &str,
537
-
detached: &[String],
538
-
disable_effective: Option<NaiveDateTime>,
539
-
) -> PgExecResult {
540
-
conn.execute(
541
-
"SELECT maintain_postgates($1, $2, $3)",
542
-
&[&post, &detached, &disable_effective],
543
-
)
544
-
.await
545
-
}
546
-
547
581
pub async fn profile_upsert<C: GenericClient>(
548
582
conn: &mut C,
549
583
repo: &str,
···
569
603
&pinned_cid,
570
604
&joined_sp_uri,
571
605
&joined_sp_cid,
606
+
&rec.pronouns,
607
+
&rec.website,
572
608
&rec.created_at.unwrap_or(Utc::now()).naive_utc(),
573
609
],
574
610
)
···
691
727
.await
692
728
}
693
729
730
+
pub async fn threadgate_get<C: GenericClient>(
731
+
conn: &mut C,
732
+
post: &str,
733
+
) -> PgOptResult<(DateTime<Utc>, Vec<String>, Vec<String>)> {
734
+
let res = conn
735
+
.query_opt(
736
+
"SELECT created_at, allow, allowed_lists FROM threadgates WHERE post_uri=$1 AND allow IS NOT NULL",
737
+
&[&post],
738
+
)
739
+
.await?
740
+
.map(|v| (v.get(0), v.get(1), v.get(2)));
741
+
742
+
Ok(res)
743
+
}
744
+
694
745
pub async fn threadgate_upsert<C: GenericClient>(
695
746
conn: &mut C,
696
747
at_uri: &str,
···
699
750
) -> PgExecResult {
700
751
let record = serde_json::to_value(&rec).unwrap();
701
752
702
-
let allowed_lists = rec
703
-
.allow
704
-
.iter()
705
-
.filter_map(|rule| match rule {
706
-
ThreadgateRule::List { list } => Some(list.clone()),
707
-
_ => None,
708
-
})
709
-
.collect::<Vec<_>>();
753
+
let allowed_lists = rec.allow.as_ref().map(|allow| {
754
+
allow
755
+
.iter()
756
+
.filter_map(|rule| match rule {
757
+
ThreadgateRule::List { list } => Some(list.clone()),
758
+
_ => None,
759
+
})
760
+
.collect::<Vec<_>>()
761
+
});
710
762
711
-
let allow = rec
712
-
.allow
713
-
.into_iter()
714
-
.map(|v| v.as_str().to_string())
715
-
.collect::<Vec<_>>();
763
+
let allow = rec.allow.map(|allow| {
764
+
allow
765
+
.into_iter()
766
+
.map(|v| v.as_str().to_string())
767
+
.collect::<Vec<_>>()
768
+
});
716
769
717
770
conn.execute(
718
771
include_str!("sql/threadgate_upsert.sql"),
+2
-2
consumer/src/db/sql/post_insert.sql
+2
-2
consumer/src/db/sql/post_insert.sql
···
1
1
INSERT INTO posts (at_uri, did, cid, record, content, facets, languages, tags, parent_uri, parent_cid, root_uri,
2
-
root_cid, embed, embed_subtype, created_at)
3
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
2
+
root_cid, embed, embed_subtype, mentions, violates_threadgate, created_at)
3
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
4
4
ON CONFLICT DO NOTHING
+4
-2
consumer/src/db/sql/profile_upsert.sql
+4
-2
consumer/src/db/sql/profile_upsert.sql
···
1
1
INSERT INTO profiles (did, cid, avatar_cid, banner_cid, display_name, description, pinned_uri, pinned_cid,
2
-
joined_sp_uri, joined_sp_cid, created_at)
3
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
2
+
joined_sp_uri, joined_sp_cid, pronouns, website, created_at)
3
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
4
4
ON CONFLICT (did) DO UPDATE SET cid=EXCLUDED.cid,
5
5
avatar_cid=EXCLUDED.avatar_cid,
6
6
banner_cid=EXCLUDED.banner_cid,
···
10
10
pinned_cid=EXCLUDED.pinned_cid,
11
11
joined_sp_uri=EXCLUDED.joined_sp_uri,
12
12
joined_sp_cid=EXCLUDED.joined_sp_cid,
13
+
pronouns=EXCLUDED.pronouns,
14
+
website=EXCLUDED.website,
13
15
indexed_at=NOW()
+14
consumer/src/firehose/mod.rs
+14
consumer/src/firehose/mod.rs
···
117
117
118
118
FirehoseEvent::Label(event)
119
119
}
120
+
"#sync" => {
121
+
counter!("firehose_events.total", "event" => "sync").increment(1);
122
+
let event: AtpSyncEvent = serde_ipld_dagcbor::from_reader(&mut reader)?;
123
+
124
+
// increment the seq
125
+
if self.seq < event.seq {
126
+
self.seq = event.seq;
127
+
} else {
128
+
tracing::error!("Event sequence was not greater than previous seq, exiting. {} <= {}", event.seq, self.seq);
129
+
return Ok(FirehoseOutput::Close);
130
+
}
131
+
132
+
FirehoseEvent::Sync(event)
133
+
}
120
134
_ => {
121
135
tracing::warn!("unknown event type {ty}");
122
136
return Ok(FirehoseOutput::Continue);
+23
consumer/src/firehose/types.rs
+23
consumer/src/firehose/types.rs
···
31
31
Account(AtpAccountEvent),
32
32
Commit(AtpCommitEvent),
33
33
Label(AtpLabelEvent),
34
+
Sync(AtpSyncEvent),
34
35
}
35
36
36
37
#[derive(Debug, Deserialize)]
···
48
49
Suspended,
49
50
Deleted,
50
51
Deactivated,
52
+
Throttled,
53
+
Desynchronized,
51
54
}
52
55
53
56
impl AtpAccountStatus {
···
57
60
AtpAccountStatus::Suspended => "suspended",
58
61
AtpAccountStatus::Deleted => "deleted",
59
62
AtpAccountStatus::Deactivated => "deactivated",
63
+
AtpAccountStatus::Throttled => "throttled",
64
+
AtpAccountStatus::Desynchronized => "desynchronized",
60
65
}
61
66
}
62
67
}
···
68
73
AtpAccountStatus::Suspended => parakeet_db::types::ActorStatus::Suspended,
69
74
AtpAccountStatus::Deleted => parakeet_db::types::ActorStatus::Deleted,
70
75
AtpAccountStatus::Deactivated => parakeet_db::types::ActorStatus::Deactivated,
76
+
AtpAccountStatus::Throttled | AtpAccountStatus::Desynchronized => {
77
+
parakeet_db::types::ActorStatus::Active
78
+
}
71
79
}
72
80
}
73
81
}
···
90
98
pub since: Option<String>,
91
99
pub commit: Cid,
92
100
#[serde(rename = "tooBig")]
101
+
#[deprecated]
93
102
pub too_big: bool,
94
103
#[serde(default)]
95
104
pub blocks: ByteBuf,
96
105
#[serde(default)]
97
106
pub ops: Vec<CommitOp>,
98
107
#[serde(default)]
108
+
#[deprecated]
99
109
pub blobs: Vec<Cid>,
110
+
#[serde(rename = "prevData")]
111
+
pub prev_data: Option<Cid>,
100
112
}
101
113
102
114
#[derive(Debug, Deserialize)]
103
115
pub struct CommitOp {
104
116
pub action: String,
105
117
pub cid: Option<Cid>,
118
+
pub prev: Option<Cid>,
106
119
pub path: String,
107
120
}
108
121
···
124
137
pub seq: u64,
125
138
pub labels: Vec<AtpLabel>,
126
139
}
140
+
141
+
#[derive(Debug, Deserialize)]
142
+
pub struct AtpSyncEvent {
143
+
pub seq: u64,
144
+
pub did: String,
145
+
pub time: DateTime<Utc>,
146
+
pub rev: String,
147
+
#[serde(default)]
148
+
pub blocks: ByteBuf,
149
+
}
+42
-13
consumer/src/indexer/mod.rs
+42
-13
consumer/src/indexer/mod.rs
···
1
1
use crate::config::HistoryMode;
2
2
use crate::db;
3
3
use crate::firehose::{
4
-
AtpAccountEvent, AtpCommitEvent, AtpIdentityEvent, CommitOp, FirehoseConsumer, FirehoseEvent,
5
-
FirehoseOutput,
4
+
AtpAccountEvent, AtpCommitEvent, AtpIdentityEvent, AtpSyncEvent, CommitOp, FirehoseConsumer,
5
+
FirehoseEvent, FirehoseOutput,
6
6
};
7
7
use crate::indexer::types::{
8
8
AggregateDeltaStore, BackfillItem, BackfillItemInner, CollectionType, RecordTypes,
9
9
};
10
+
use crate::utils::at_uri_is_by;
10
11
use deadpool_postgres::{Object, Pool, Transaction};
11
12
use did_resolver::Resolver;
12
13
use foldhash::quality::RandomState;
···
107
108
FirehoseEvent::Commit(commit) => {
108
109
index_commit(&mut state, &mut conn, &mut rc, commit).await
109
110
}
111
+
FirehoseEvent::Sync(sync) => {
112
+
process_sync(&state, &mut conn, &mut rc, sync).await
113
+
}
110
114
FirehoseEvent::Label(_) => unreachable!(),
111
115
};
112
116
···
188
192
FirehoseEvent::Identity(identity) => self.hasher.hash_one(&identity.did) % threads,
189
193
FirehoseEvent::Account(account) => self.hasher.hash_one(&account.did) % threads,
190
194
FirehoseEvent::Commit(commit) => self.hasher.hash_one(&commit.repo) % threads,
195
+
FirehoseEvent::Sync(sync) => self.hasher.hash_one(&sync.did) % threads,
191
196
FirehoseEvent::Label(_) => {
192
197
// We handle all labels through direct connections to labelers
193
198
tracing::warn!("got #labels from the relay");
···
201
206
}
202
207
}
203
208
209
+
#[instrument(skip_all, fields(seq = sync.seq, repo = sync.did))]
210
+
async fn process_sync(
211
+
state: &RelayIndexerState,
212
+
conn: &mut Object,
213
+
rc: &mut MultiplexedConnection,
214
+
sync: AtpSyncEvent,
215
+
) -> eyre::Result<()> {
216
+
let Some((sync_state, Some(current_rev))) = db::actor_get_repo_status(conn, &sync.did).await?
217
+
else {
218
+
return Ok(());
219
+
};
220
+
221
+
// don't care if we're not synced. also no point if !do_backfill bc we might not have a worker
222
+
if sync_state == ActorSyncState::Synced && state.do_backfill && sync.rev > current_rev {
223
+
tracing::debug!("triggering backfill due to #sync");
224
+
rc.rpush::<_, _, i32>("backfill_queue", sync.did).await?;
225
+
}
226
+
227
+
Ok(())
228
+
}
229
+
204
230
#[instrument(skip_all, fields(seq = identity.seq, repo = identity.did))]
205
231
async fn index_identity(
206
232
state: &RelayIndexerState,
···
527
553
rkey: &str,
528
554
) -> eyre::Result<()> {
529
555
match record {
530
-
RecordTypes::AppBskyActorProfile(record) => {
556
+
RecordTypes::AppBskyActorProfile(mut record) => {
531
557
if rkey == "self" {
532
558
let labels = record.labels.clone();
559
+
560
+
// don't allow pinned posts that aren't by us.
561
+
if let Some(pinned) = &record.pinned_post {
562
+
if !at_uri_is_by(&pinned.uri, repo) {
563
+
record.pinned_post = None;
564
+
}
565
+
}
566
+
533
567
db::profile_upsert(conn, repo, cid, record).await?;
534
568
535
569
if let Some(labels) = labels {
···
591
625
});
592
626
593
627
let labels = record.labels.clone();
594
-
db::post_insert(conn, at_uri, repo, cid, record).await?;
628
+
db::post_insert(conn, at_uri, repo, cid, record, false).await?;
595
629
if let Some(labels) = labels {
596
630
db::maintain_self_labels(conn, repo, Some(cid), at_uri, labels).await?;
597
631
}
···
605
639
}
606
640
}
607
641
RecordTypes::AppBskyFeedPostgate(record) => {
608
-
let split_aturi = record.post.rsplitn(4, '/').collect::<Vec<_>>();
609
-
if repo != split_aturi[2] {
642
+
if !at_uri_is_by(&record.post, repo) {
610
643
tracing::warn!("tried to create a postgate on a post we don't control!");
611
644
return Ok(());
612
645
}
···
636
669
db::repost_insert(conn, rkey, repo, record).await?;
637
670
}
638
671
RecordTypes::AppBskyFeedThreadgate(record) => {
639
-
let split_aturi = record.post.rsplitn(4, '/').collect::<Vec<_>>();
640
-
if repo != split_aturi[2] {
672
+
if !at_uri_is_by(&record.post, repo) {
641
673
tracing::warn!("tried to create a threadgate on a post we don't control!");
642
674
return Ok(());
643
675
}
···
677
709
db::list_block_insert(conn, at_uri, repo, record).await?;
678
710
}
679
711
RecordTypes::AppBskyGraphListItem(record) => {
680
-
let split_aturi = record.list.rsplitn(4, '/').collect::<Vec<_>>();
681
-
if repo != split_aturi[2] {
712
+
if !at_uri_is_by(&record.list, repo) {
682
713
// it's also probably a bad idea to log *all* the attempts to do this...
683
714
tracing::warn!("tried to create a listitem on a list we don't control!");
684
715
return Ok(());
···
836
867
redis::AsyncTypedCommands::del(rc, format!("profile#{repo}")).await?;
837
868
db::chat_decl_delete(conn, repo).await?
838
869
}
839
-
CollectionType::CommunityLexiconBookmark => {
840
-
db::bookmark_delete(conn, rkey, repo).await?
841
-
}
870
+
CollectionType::CommunityLexiconBookmark => db::bookmark_delete(conn, rkey, repo).await?,
842
871
_ => unreachable!(),
843
872
};
844
873
+14
-6
consumer/src/indexer/records.rs
+14
-6
consumer/src/indexer/records.rs
···
23
23
pub labels: Option<SelfLabels>,
24
24
pub joined_via_starter_pack: Option<StrongRef>,
25
25
pub pinned_post: Option<StrongRef>,
26
+
#[serde_as(as = "utils::safe_string")]
27
+
pub pronouns: Option<String>,
28
+
#[serde_as(as = "utils::safe_string")]
29
+
pub website: Option<String>,
26
30
pub created_at: Option<DateTime<Utc>>,
27
31
}
28
32
···
263
267
pub struct AppBskyFeedThreadgate {
264
268
pub post: String,
265
269
pub created_at: DateTime<Utc>,
266
-
#[serde(default)]
267
-
pub allow: Vec<ThreadgateRule>,
270
+
pub allow: Option<Vec<ThreadgateRule>>,
268
271
#[serde(default)]
269
272
pub hidden_replies: Vec<String>,
270
273
}
274
+
275
+
pub const THREADGATE_RULE_MENTION: &str = "app.bsky.feed.threadgate#mentionRule";
276
+
pub const THREADGATE_RULE_FOLLOWER: &str = "app.bsky.feed.threadgate#followerRule";
277
+
pub const THREADGATE_RULE_FOLLOWING: &str = "app.bsky.feed.threadgate#followingRule";
278
+
pub const THREADGATE_RULE_LIST: &str = "app.bsky.feed.threadgate#listRule";
271
279
272
280
#[derive(Debug, Deserialize, Serialize)]
273
281
#[serde(tag = "$type")]
···
285
293
impl ThreadgateRule {
286
294
pub fn as_str(&self) -> &'static str {
287
295
match self {
288
-
ThreadgateRule::Mention => "app.bsky.feed.threadgate#mentionRule",
289
-
ThreadgateRule::Follower => "app.bsky.feed.threadgate#followerRule",
290
-
ThreadgateRule::Following => "app.bsky.feed.threadgate#followingRule",
291
-
ThreadgateRule::List { .. } => "app.bsky.feed.threadgate#listRule",
296
+
ThreadgateRule::Mention => THREADGATE_RULE_MENTION,
297
+
ThreadgateRule::Follower => THREADGATE_RULE_FOLLOWER,
298
+
ThreadgateRule::Following => THREADGATE_RULE_FOLLOWING,
299
+
ThreadgateRule::List { .. } => THREADGATE_RULE_LIST,
292
300
}
293
301
}
294
302
}
+1
-1
consumer/src/indexer/types.rs
+1
-1
consumer/src/indexer/types.rs
···
42
42
#[serde(rename = "chat.bsky.actor.declaration")]
43
43
ChatBskyActorDeclaration(records::ChatBskyActorDeclaration),
44
44
#[serde(rename = "community.lexicon.bookmarks.bookmark")]
45
-
CommunityLexiconBookmark(lexica::community_lexicon::bookmarks::Bookmark)
45
+
CommunityLexiconBookmark(lexica::community_lexicon::bookmarks::Bookmark),
46
46
}
47
47
48
48
#[derive(Debug, PartialOrd, PartialEq, Deserialize, Serialize)]
+39
-4
consumer/src/utils.rs
+39
-4
consumer/src/utils.rs
···
1
+
use lexica::app_bsky::richtext::{Facet, FacetMain, FacetOuter};
2
+
use lexica::{Blob, StrongRef};
1
3
use serde::{Deserialize, Deserializer};
2
-
use lexica::{Blob, StrongRef};
3
4
4
5
// see https://deer.social/profile/did:plc:63y3oh7iakdueqhlj6trojbq/post/3ltuv4skhqs2h
5
6
pub fn safe_string<'de, D: Deserializer<'de>>(deserializer: D) -> Result<String, D::Error> {
···
12
13
blob.map(|blob| blob.cid.to_string())
13
14
}
14
15
15
-
pub fn strongref_to_parts(
16
-
strongref: Option<&StrongRef>,
17
-
) -> (Option<String>, Option<String>) {
16
+
pub fn strongref_to_parts(strongref: Option<&StrongRef>) -> (Option<String>, Option<String>) {
18
17
strongref
19
18
.map(|sr| (sr.uri.clone(), sr.cid.to_string()))
20
19
.unzip()
···
35
34
None
36
35
}
37
36
}
37
+
38
+
pub fn at_uri_is_by(uri: &str, did: &str) -> bool {
39
+
let split_aturi = uri.rsplitn(4, '/').collect::<Vec<_>>();
40
+
41
+
did == split_aturi[2]
42
+
}
43
+
44
+
pub fn extract_mentions_and_tags(from: &[FacetMain]) -> (Vec<String>, Vec<String>) {
45
+
let (mentions, tags) = from
46
+
.iter()
47
+
.flat_map(|v| {
48
+
v.features.iter().map(|facet| match facet {
49
+
FacetOuter::Bsky(Facet::Mention { did }) => (Some(did), None),
50
+
FacetOuter::Bsky(Facet::Tag { tag }) => (None, Some(tag)),
51
+
_ => (None, None),
52
+
})
53
+
})
54
+
.unzip::<_, _, Vec<_>, Vec<_>>();
55
+
56
+
let mentions = mentions.into_iter().flatten().cloned().collect();
57
+
let tags = tags.into_iter().flatten().cloned().collect();
58
+
59
+
(mentions, tags)
60
+
}
61
+
62
+
pub fn merge_tags<T>(t1: Option<Vec<T>>, t2: Option<Vec<T>>) -> Vec<T> {
63
+
match (t1, t2) {
64
+
(Some(t1), None) => t1,
65
+
(None, Some(t2)) => t2,
66
+
(Some(mut t1), Some(t2)) => {
67
+
t1.extend(t2);
68
+
t1
69
+
}
70
+
_ => Vec::default(),
71
+
}
72
+
}
+2
-12
dataloader-rs/Cargo.toml
+2
-12
dataloader-rs/Cargo.toml
···
2
2
name = "dataloader"
3
3
version = "0.18.0"
4
4
edition = "2021"
5
-
authors = ["cksac <cs.cksac@gmail.com>", "Lily"]
5
+
authors = ["cksac <cs.cksac@gmail.com>", "Mia"]
6
6
description = "Rust implementation of Facebook's DataLoader using async-await."
7
7
keywords = ["batcher", "dataloader", "cache"]
8
8
categories = ["asynchronous", "caching"]
···
15
15
[badges]
16
16
travis-ci = { repository = "/cksac/dataloader-rs" }
17
17
18
-
[features]
19
-
default = ["runtime-async-std"]
20
-
runtime-async-std = [
21
-
"async-std",
22
-
]
23
-
runtime-tokio = [
24
-
"tokio"
25
-
]
26
-
27
18
[dependencies]
28
-
async-std = { version = "1", optional = true }
29
-
tokio = { version = "1", features = [ "sync", "rt" ], optional = true }
19
+
tokio = { version = "1", features = [ "sync", "rt" ] }
30
20
31
21
[dev-dependencies]
32
22
futures = "0.3"
-13
dataloader-rs/src/runtime.rs
-13
dataloader-rs/src/runtime.rs
···
1
-
// runtime-async-std
2
-
#[cfg(feature = "runtime-async-std")]
3
-
pub type Arc<T> = async_std::sync::Arc<T>;
4
-
5
-
#[cfg(feature = "runtime-async-std")]
6
-
pub type Mutex<T> = async_std::sync::Mutex<T>;
7
-
8
-
#[cfg(feature = "runtime-async-std")]
9
-
pub use async_std::task::yield_now;
10
-
11
1
// runtime-tokio
12
-
#[cfg(feature = "runtime-tokio")]
13
2
pub type Arc<T> = std::sync::Arc<T>;
14
3
15
-
#[cfg(feature = "runtime-tokio")]
16
4
pub type Mutex<T> = tokio::sync::Mutex<T>;
17
5
18
-
#[cfg(feature = "runtime-tokio")]
19
6
pub use tokio::task::yield_now;
+1
-2
justfile
+1
-2
justfile
···
12
12
13
13
@reset-redis:
14
14
echo "Resetting Redis lists..."
15
-
redis-cli DEL backfill_queue
16
-
redis-cli DEL backfill_processing
15
+
redis-cli DEL backfill_queue backfill_processing bf_downloaded
17
16
18
17
@reset-and-backfill *dids: reset-db reset-redis
19
18
for PARAMETER_VALUE in {{dids}}; do \
+36
-6
lexica/src/app_bsky/actor.rs
+36
-6
lexica/src/app_bsky/actor.rs
···
1
1
use crate::app_bsky::embed::External;
2
+
use crate::app_bsky::graph::ListViewBasic;
2
3
use crate::com_atproto::label::Label;
3
4
use chrono::prelude::*;
4
5
use serde::{Deserialize, Serialize};
5
6
use std::fmt::Display;
6
7
use std::str::FromStr;
8
+
9
+
#[derive(Clone, Default, Debug, Serialize)]
10
+
#[serde(rename_all = "camelCase")]
11
+
pub struct ProfileViewerState {
12
+
pub muted: bool,
13
+
#[serde(skip_serializing_if = "Option::is_none")]
14
+
pub muted_by_list: Option<ListViewBasic>,
15
+
pub blocked_by: bool,
16
+
#[serde(skip_serializing_if = "Option::is_none")]
17
+
pub blocking: Option<String>,
18
+
#[serde(skip_serializing_if = "Option::is_none")]
19
+
pub blocking_by_list: Option<ListViewBasic>,
20
+
#[serde(skip_serializing_if = "Option::is_none")]
21
+
pub following: Option<String>,
22
+
#[serde(skip_serializing_if = "Option::is_none")]
23
+
pub followed_by: Option<String>,
24
+
// #[serde(skip_serializing_if = "Option::is_none")]
25
+
// pub known_followers: Option<()>,
26
+
// #[serde(skip_serializing_if = "Option::is_none")]
27
+
// pub activity_subscriptions: Option<()>,
28
+
}
7
29
8
30
#[derive(Clone, Default, Debug, Serialize)]
9
31
#[serde(rename_all = "camelCase")]
···
130
152
pub avatar: Option<String>,
131
153
#[serde(skip_serializing_if = "Option::is_none")]
132
154
pub associated: Option<ProfileAssociated>,
133
-
// #[serde(skip_serializing_if = "Option::is_none")]
134
-
// pub viewer: Option<()>,
155
+
#[serde(skip_serializing_if = "Option::is_none")]
156
+
pub viewer: Option<ProfileViewerState>,
135
157
#[serde(skip_serializing_if = "Vec::is_empty")]
136
158
pub labels: Vec<Label>,
137
159
#[serde(skip_serializing_if = "Option::is_none")]
138
160
pub verification: Option<VerificationState>,
139
161
#[serde(skip_serializing_if = "Option::is_none")]
140
162
pub status: Option<StatusView>,
163
+
#[serde(skip_serializing_if = "Option::is_none")]
164
+
pub pronouns: Option<String>,
141
165
142
166
pub created_at: DateTime<Utc>,
143
167
}
···
156
180
pub avatar: Option<String>,
157
181
#[serde(skip_serializing_if = "Option::is_none")]
158
182
pub associated: Option<ProfileAssociated>,
159
-
// #[serde(skip_serializing_if = "Option::is_none")]
160
-
// pub viewer: Option<()>,
183
+
#[serde(skip_serializing_if = "Option::is_none")]
184
+
pub viewer: Option<ProfileViewerState>,
161
185
#[serde(skip_serializing_if = "Vec::is_empty")]
162
186
pub labels: Vec<Label>,
163
187
#[serde(skip_serializing_if = "Option::is_none")]
164
188
pub verification: Option<VerificationState>,
165
189
#[serde(skip_serializing_if = "Option::is_none")]
166
190
pub status: Option<StatusView>,
191
+
#[serde(skip_serializing_if = "Option::is_none")]
192
+
pub pronouns: Option<String>,
167
193
168
194
pub created_at: DateTime<Utc>,
169
195
pub indexed_at: NaiveDateTime,
···
189
215
pub associated: Option<ProfileAssociated>,
190
216
// #[serde(skip_serializing_if = "Option::is_none")]
191
217
// pub joined_via_starter_pack: Option<()>,
192
-
// #[serde(skip_serializing_if = "Option::is_none")]
193
-
// pub viewer: Option<()>,
218
+
#[serde(skip_serializing_if = "Option::is_none")]
219
+
pub viewer: Option<ProfileViewerState>,
194
220
#[serde(skip_serializing_if = "Vec::is_empty")]
195
221
pub labels: Vec<Label>,
196
222
// #[serde(skip_serializing_if = "Option::is_none")]
···
199
225
pub verification: Option<VerificationState>,
200
226
#[serde(skip_serializing_if = "Option::is_none")]
201
227
pub status: Option<StatusView>,
228
+
#[serde(skip_serializing_if = "Option::is_none")]
229
+
pub pronouns: Option<String>,
230
+
#[serde(skip_serializing_if = "Option::is_none")]
231
+
pub website: Option<String>,
202
232
203
233
pub created_at: DateTime<Utc>,
204
234
pub indexed_at: NaiveDateTime,
+29
-10
lexica/src/app_bsky/feed.rs
+29
-10
lexica/src/app_bsky/feed.rs
···
1
1
use super::RecordStats;
2
-
use crate::app_bsky::actor::{ProfileView, ProfileViewBasic};
2
+
use crate::app_bsky::actor::{ProfileView, ProfileViewBasic, ProfileViewerState};
3
3
use crate::app_bsky::embed::Embed;
4
4
use crate::app_bsky::graph::ListViewBasic;
5
5
use crate::app_bsky::richtext::FacetMain;
···
8
8
use serde::{Deserialize, Serialize};
9
9
use std::str::FromStr;
10
10
11
+
#[derive(Clone, Default, Debug, Serialize)]
12
+
#[serde(rename_all = "camelCase")]
13
+
pub struct PostViewerState {
14
+
#[serde(skip_serializing_if = "Option::is_none")]
15
+
pub repost: Option<String>,
16
+
#[serde(skip_serializing_if = "Option::is_none")]
17
+
pub like: Option<String>,
18
+
pub bookmarked: bool,
19
+
pub thread_muted: bool,
20
+
pub reply_disabled: bool,
21
+
pub embedding_disabled: bool,
22
+
pub pinned: bool,
23
+
}
24
+
11
25
#[derive(Clone, Debug, Serialize)]
12
26
#[serde(rename_all = "camelCase")]
13
27
pub struct PostView {
···
23
37
24
38
#[serde(skip_serializing_if = "Vec::is_empty")]
25
39
pub labels: Vec<Label>,
26
-
// #[serde(skip_serializing_if = "Option::is_none")]
27
-
// pub viewer: Option<()>,
40
+
#[serde(skip_serializing_if = "Option::is_none")]
41
+
pub viewer: Option<PostViewerState>,
28
42
#[serde(skip_serializing_if = "Option::is_none")]
29
43
pub threadgate: Option<ThreadgateView>,
30
44
···
123
137
124
138
#[derive(Clone, Debug, Serialize)]
125
139
pub struct BlockedAuthor {
126
-
pub uri: String,
127
-
// pub viewer: Option<()>,
140
+
pub did: String,
141
+
pub viewer: Option<ProfileViewerState>,
142
+
}
143
+
144
+
#[derive(Clone, Default, Debug, Serialize)]
145
+
#[serde(rename_all = "camelCase")]
146
+
pub struct GeneratorViewerState {
147
+
#[serde(skip_serializing_if = "Option::is_none")]
148
+
pub like: Option<String>,
128
149
}
129
150
130
151
#[derive(Clone, Debug, Serialize)]
···
148
169
pub accepts_interactions: bool,
149
170
#[serde(skip_serializing_if = "Vec::is_empty")]
150
171
pub labels: Vec<Label>,
151
-
// #[serde(skip_serializing_if = "Option::is_none")]
152
-
// pub viewer: Option<()>,
172
+
#[serde(skip_serializing_if = "Option::is_none")]
173
+
pub viewer: Option<GeneratorViewerState>,
153
174
#[serde(skip_serializing_if = "Option::is_none")]
154
175
pub content_mode: Option<GeneratorContentMode>,
155
176
···
219
240
#[serde(rename = "app.bsky.feed.defs#skeletonReasonPin")]
220
241
Pin {},
221
242
#[serde(rename = "app.bsky.feed.defs#skeletonReasonRepost")]
222
-
Repost {
223
-
repost: String,
224
-
},
243
+
Repost { repost: String },
225
244
}
+12
-4
lexica/src/app_bsky/graph.rs
+12
-4
lexica/src/app_bsky/graph.rs
···
6
6
use serde::{Deserialize, Serialize};
7
7
use std::str::FromStr;
8
8
9
+
#[derive(Clone, Default, Debug, Serialize)]
10
+
#[serde(rename_all = "camelCase")]
11
+
pub struct ListViewerState {
12
+
pub muted: bool,
13
+
#[serde(skip_serializing_if = "Option::is_none")]
14
+
pub blocked: Option<String>,
15
+
}
16
+
9
17
#[derive(Clone, Debug, Serialize)]
10
18
#[serde(rename_all = "camelCase")]
11
19
pub struct ListViewBasic {
···
18
26
pub avatar: Option<String>,
19
27
pub list_item_count: i64,
20
28
21
-
// #[serde(skip_serializing_if = "Option::is_none")]
22
-
// pub viewer: Option<()>,
29
+
#[serde(skip_serializing_if = "Option::is_none")]
30
+
pub viewer: Option<ListViewerState>,
23
31
#[serde(skip_serializing_if = "Vec::is_empty")]
24
32
pub labels: Vec<Label>,
25
33
···
44
52
pub avatar: Option<String>,
45
53
pub list_item_count: i64,
46
54
47
-
// #[serde(skip_serializing_if = "Option::is_none")]
48
-
// pub viewer: Option<()>,
55
+
#[serde(skip_serializing_if = "Option::is_none")]
56
+
pub viewer: Option<ListViewerState>,
49
57
#[serde(skip_serializing_if = "Vec::is_empty")]
50
58
pub labels: Vec<Label>,
51
59
+11
-4
lexica/src/app_bsky/labeler.rs
+11
-4
lexica/src/app_bsky/labeler.rs
···
4
4
use chrono::prelude::*;
5
5
use serde::{Deserialize, Serialize};
6
6
7
+
#[derive(Clone, Default, Debug, Serialize)]
8
+
#[serde(rename_all = "camelCase")]
9
+
pub struct LabelerViewerState {
10
+
#[serde(skip_serializing_if = "Option::is_none")]
11
+
pub like: Option<String>,
12
+
}
13
+
7
14
#[derive(Clone, Debug, Serialize)]
8
15
#[serde(rename_all = "camelCase")]
9
16
pub struct LabelerView {
···
12
19
pub creator: ProfileView,
13
20
14
21
pub like_count: i64,
15
-
// #[serde(skip_serializing_if = "Option::is_none")]
16
-
// pub viewer: Option<()>,
22
+
#[serde(skip_serializing_if = "Option::is_none")]
23
+
pub viewer: Option<LabelerViewerState>,
17
24
#[serde(skip_serializing_if = "Vec::is_empty")]
18
25
pub labels: Vec<Label>,
19
26
pub indexed_at: DateTime<Utc>,
···
27
34
pub creator: ProfileView,
28
35
29
36
pub like_count: i64,
30
-
// #[serde(skip_serializing_if = "Option::is_none")]
31
-
// pub viewer: Option<()>,
37
+
#[serde(skip_serializing_if = "Option::is_none")]
38
+
pub viewer: Option<LabelerViewerState>,
32
39
#[serde(skip_serializing_if = "Vec::is_empty")]
33
40
pub labels: Vec<Label>,
34
41
pub policies: LabelerPolicy,
+1
-1
lexica/src/community_lexicon/bookmarks.rs
+1
-1
lexica/src/community_lexicon/bookmarks.rs
+2
-2
lexica/src/utils.rs
+2
-2
lexica/src/utils.rs
+2
-2
migrations/2025-02-16-142357_posts/up.sql
+2
-2
migrations/2025-02-16-142357_posts/up.sql
+17
migrations/2025-09-17-190406_viewer-interactions/down.sql
+17
migrations/2025-09-17-190406_viewer-interactions/down.sql
···
1
+
drop trigger t_profile_state_ins on follows;
2
+
drop trigger t_profile_state_del on follows;
3
+
drop trigger t_profile_state_ins on blocks;
4
+
drop trigger t_profile_state_del on blocks;
5
+
drop trigger t_profile_state_ins on mutes;
6
+
drop trigger t_profile_state_del on mutes;
7
+
8
+
drop function f_profile_state_ins_follow;
9
+
drop function f_profile_state_del_follow;
10
+
drop function f_profile_state_ins_block;
11
+
drop function f_profile_state_del_block;
12
+
drop function f_profile_state_ins_mute;
13
+
drop function f_profile_state_del_mute;
14
+
15
+
drop view v_list_mutes_exp;
16
+
drop view v_list_block_exp;
17
+
drop table profile_states;
+146
migrations/2025-09-17-190406_viewer-interactions/up.sql
+146
migrations/2025-09-17-190406_viewer-interactions/up.sql
···
1
+
create table profile_states
2
+
(
3
+
did text not null,
4
+
subject text not null,
5
+
muting bool not null default false, -- subj muted by did
6
+
blocked bool not null default false, -- did blocked by subj
7
+
blocking text, -- subj blocked by did
8
+
following text, -- rkey of follow record (did->subj)
9
+
followed text, -- rkey of follow record (subj->did)
10
+
11
+
primary key (did, subject)
12
+
);
13
+
14
+
create index profilestates_did_index on profile_states using hash (did);
15
+
create index profilestates_sub_index on profile_states using hash (subject);
16
+
17
+
create view v_list_block_exp as
18
+
(
19
+
select lb.list_uri, did, li.subject
20
+
from list_blocks lb
21
+
inner join list_items li on lb.list_uri = li.list_uri
22
+
);
23
+
24
+
create view v_list_mutes_exp as
25
+
(
26
+
select lm.list_uri, did, li.subject
27
+
from list_mutes lm
28
+
inner join list_items li on lm.list_uri = li.list_uri
29
+
);
30
+
31
+
-- profile_states follow triggers
32
+
create function f_profile_state_ins_follow() returns trigger
33
+
language plpgsql as
34
+
$$
35
+
begin
36
+
insert into profile_states (did, subject, following)
37
+
VALUES (NEW.did, NEW.subject, NEW.rkey)
38
+
ON CONFLICT (did, subject) DO UPDATE SET following=excluded.following;
39
+
40
+
insert into profile_states (did, subject, followed)
41
+
VALUES (NEW.subject, NEW.did, NEW.rkey)
42
+
ON CONFLICT (did, subject) DO UPDATE SET followed=excluded.followed;
43
+
44
+
return NEW;
45
+
end;
46
+
$$;
47
+
48
+
create trigger t_profile_state_ins
49
+
before insert
50
+
on follows
51
+
for each row
52
+
execute procedure f_profile_state_ins_follow();
53
+
54
+
create function f_profile_state_del_follow() returns trigger
55
+
language plpgsql as
56
+
$$
57
+
begin
58
+
update profile_states set following = null where did = OLD.did and subject = OLD.subject;
59
+
update profile_states set followed = null where did = OLD.subject and subject = OLD.did;
60
+
61
+
return OLD;
62
+
end;
63
+
$$;
64
+
65
+
create trigger t_profile_state_del
66
+
before delete
67
+
on follows
68
+
for each row
69
+
execute procedure f_profile_state_del_follow();
70
+
71
+
-- profile_states block triggers
72
+
73
+
create function f_profile_state_ins_block() returns trigger
74
+
language plpgsql as
75
+
$$
76
+
begin
77
+
insert into profile_states (did, subject, blocking)
78
+
VALUES (NEW.did, NEW.subject, NEW.rkey)
79
+
ON CONFLICT (did, subject) DO UPDATE SET blocking=excluded.blocking;
80
+
81
+
insert into profile_states (did, subject, blocked)
82
+
VALUES (NEW.subject, NEW.did, TRUE)
83
+
ON CONFLICT (did, subject) DO UPDATE SET blocked=excluded.blocked;
84
+
85
+
return NEW;
86
+
end;
87
+
$$;
88
+
89
+
create trigger t_profile_state_ins
90
+
before insert
91
+
on blocks
92
+
for each row
93
+
execute procedure f_profile_state_ins_block();
94
+
95
+
create function f_profile_state_del_block() returns trigger
96
+
language plpgsql as
97
+
$$
98
+
begin
99
+
update profile_states set blocking = null where did = OLD.did and subject = OLD.subject;
100
+
update profile_states set blocked = FALSE where did = OLD.subject and subject = OLD.did;
101
+
102
+
return OLD;
103
+
end;
104
+
$$;
105
+
106
+
create trigger t_profile_state_del
107
+
before delete
108
+
on blocks
109
+
for each row
110
+
execute procedure f_profile_state_del_block();
111
+
112
+
-- profile_states mutes triggers
113
+
114
+
create function f_profile_state_ins_mute() returns trigger
115
+
language plpgsql as
116
+
$$
117
+
begin
118
+
insert into profile_states (did, subject, muting)
119
+
VALUES (NEW.did, NEW.subject, TRUE)
120
+
ON CONFLICT (did, subject) DO UPDATE SET muting=excluded.muting;
121
+
122
+
return NEW;
123
+
end;
124
+
$$;
125
+
126
+
create trigger t_profile_state_ins
127
+
before insert
128
+
on mutes
129
+
for each row
130
+
execute procedure f_profile_state_ins_mute();
131
+
132
+
create function f_profile_state_del_mute() returns trigger
133
+
language plpgsql as
134
+
$$
135
+
begin
136
+
update profile_states set muting = false where did = OLD.did and subject = OLD.subject;
137
+
138
+
return OLD;
139
+
end;
140
+
$$;
141
+
142
+
create trigger t_profile_state_del
143
+
before delete
144
+
on mutes
145
+
for each row
146
+
execute procedure f_profile_state_del_mute();
+3
migrations/2025-09-24-205239_profiles-4224/down.sql
+3
migrations/2025-09-24-205239_profiles-4224/down.sql
+3
migrations/2025-09-24-205239_profiles-4224/up.sql
+3
migrations/2025-09-24-205239_profiles-4224/up.sql
+15
migrations/2025-09-27-171241_post-tweaks/down.sql
+15
migrations/2025-09-27-171241_post-tweaks/down.sql
···
1
+
alter table posts
2
+
drop column mentions,
3
+
drop column violates_threadgate;
4
+
5
+
drop trigger t_author_feed_ins_post on posts;
6
+
drop trigger t_author_feed_del_post on posts;
7
+
drop trigger t_author_feed_ins_repost on reposts;
8
+
drop trigger t_author_feed_del_repost on reposts;
9
+
10
+
drop function f_author_feed_ins_post;
11
+
drop function f_author_feed_del_post;
12
+
drop function f_author_feed_ins_repost;
13
+
drop function f_author_feed_del_repost;
14
+
15
+
drop table author_feeds;
+79
migrations/2025-09-27-171241_post-tweaks/up.sql
+79
migrations/2025-09-27-171241_post-tweaks/up.sql
···
1
+
alter table posts
2
+
add column mentions text[],
3
+
add column violates_threadgate bool not null default false;
4
+
5
+
create table author_feeds
6
+
(
7
+
uri text primary key,
8
+
cid text not null,
9
+
post text not null,
10
+
did text not null,
11
+
typ text not null,
12
+
sort_at timestamptz not null
13
+
);
14
+
15
+
-- author_feeds post triggers
16
+
create function f_author_feed_ins_post() returns trigger
17
+
language plpgsql as
18
+
$$
19
+
begin
20
+
insert into author_feeds (uri, cid, post, did, typ, sort_at)
21
+
VALUES (NEW.at_uri, NEW.cid, NEW.at_uri, NEW.did, 'post', NEW.created_at)
22
+
on conflict do nothing;
23
+
return NEW;
24
+
end;
25
+
$$;
26
+
27
+
create trigger t_author_feed_ins_post
28
+
before insert
29
+
on posts
30
+
for each row
31
+
execute procedure f_author_feed_ins_post();
32
+
33
+
create function f_author_feed_del_post() returns trigger
34
+
language plpgsql as
35
+
$$
36
+
begin
37
+
delete from author_feeds where did = OLD.did and item = OLD.at_uri and typ = 'post';
38
+
return OLD;
39
+
end;
40
+
$$;
41
+
42
+
create trigger t_author_feed_del_post
43
+
before delete
44
+
on posts
45
+
for each row
46
+
execute procedure f_author_feed_del_post();
47
+
48
+
-- author_feeds repost triggers
49
+
create function f_author_feed_ins_repost() returns trigger
50
+
language plpgsql as
51
+
$$
52
+
begin
53
+
insert into author_feeds (uri, cid, post, did, typ, sort_at)
54
+
VALUES ('at://' || NEW.did || 'app.bsky.feed.repost' || NEW.rkey, NEW.post_cid, NEW.post, NEW.did, 'repost', NEW.created_at)
55
+
on conflict do nothing;
56
+
return NEW;
57
+
end;
58
+
$$;
59
+
60
+
create trigger t_author_feed_ins_repost
61
+
before insert
62
+
on reposts
63
+
for each row
64
+
execute procedure f_author_feed_ins_repost();
65
+
66
+
create function f_author_feed_del_repost() returns trigger
67
+
language plpgsql as
68
+
$$
69
+
begin
70
+
delete from author_feeds where did = OLD.did and item = OLD.post and typ = 'repost';
71
+
return OLD;
72
+
end;
73
+
$$;
74
+
75
+
create trigger t_author_feed_del_repost
76
+
before delete
77
+
on reposts
78
+
for each row
79
+
execute procedure f_author_feed_del_repost();
+1
-1
parakeet/Cargo.toml
+1
-1
parakeet/Cargo.toml
···
9
9
axum-extra = { version = "0.10.0", features = ["query", "typed-header"] }
10
10
base64 = "0.22"
11
11
chrono = { version = "0.4.39", features = ["serde"] }
12
-
dataloader = { path = "../dataloader-rs", default-features = false, features = ["runtime-tokio"] }
12
+
dataloader = { path = "../dataloader-rs" }
13
13
deadpool = { version = "0.12.1", features = ["managed"] }
14
14
did-resolver = { path = "../did-resolver" }
15
15
diesel = { version = "2.2.6", features = ["chrono", "serde_json"] }
+183
parakeet/src/db.rs
+183
parakeet/src/db.rs
···
1
1
use diesel::prelude::*;
2
+
use diesel::sql_types::{Array, Bool, Nullable, Text};
2
3
use diesel_async::{AsyncPgConnection, RunQueryDsl};
3
4
use parakeet_db::{schema, types};
4
5
···
13
14
.await
14
15
.optional()
15
16
}
17
+
18
+
#[derive(Clone, Debug, QueryableByName)]
19
+
#[diesel(check_for_backend(diesel::pg::Pg))]
20
+
pub struct ProfileStateRet {
21
+
#[diesel(sql_type = Text)]
22
+
pub did: String,
23
+
#[diesel(sql_type = Text)]
24
+
pub subject: String,
25
+
#[diesel(sql_type = Nullable<Bool>)]
26
+
pub muting: Option<bool>,
27
+
#[diesel(sql_type = Nullable<Bool>)]
28
+
pub blocked: Option<bool>,
29
+
#[diesel(sql_type = Nullable<Text>)]
30
+
pub blocking: Option<String>,
31
+
#[diesel(sql_type = Nullable<Text>)]
32
+
pub following: Option<String>,
33
+
#[diesel(sql_type = Nullable<Text>)]
34
+
pub followed: Option<String>,
35
+
#[diesel(sql_type = Nullable<Text>)]
36
+
pub list_block: Option<String>,
37
+
#[diesel(sql_type = Nullable<Text>)]
38
+
pub list_mute: Option<String>,
39
+
}
40
+
pub async fn get_profile_state(
41
+
conn: &mut AsyncPgConnection,
42
+
did: &str,
43
+
sub: &str,
44
+
) -> QueryResult<Option<ProfileStateRet>> {
45
+
diesel::sql_query(include_str!("sql/profile_state.sql"))
46
+
.bind::<Text, _>(did)
47
+
.bind::<Array<Text>, _>(vec![sub])
48
+
.get_result::<ProfileStateRet>(conn)
49
+
.await
50
+
.optional()
51
+
}
52
+
pub async fn get_profile_states(
53
+
conn: &mut AsyncPgConnection,
54
+
did: &str,
55
+
sub: &[String],
56
+
) -> QueryResult<Vec<ProfileStateRet>> {
57
+
diesel::sql_query(include_str!("sql/profile_state.sql"))
58
+
.bind::<Text, _>(did)
59
+
.bind::<Array<Text>, _>(sub)
60
+
.load::<ProfileStateRet>(conn)
61
+
.await
62
+
}
63
+
64
+
#[derive(Clone, Debug, QueryableByName)]
65
+
#[diesel(check_for_backend(diesel::pg::Pg))]
66
+
pub struct PostStateRet {
67
+
#[diesel(sql_type = diesel::sql_types::Text)]
68
+
pub at_uri: String,
69
+
#[diesel(sql_type = diesel::sql_types::Text)]
70
+
pub did: String,
71
+
#[diesel(sql_type = diesel::sql_types::Text)]
72
+
pub cid: String,
73
+
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
74
+
pub like_rkey: Option<String>,
75
+
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
76
+
pub repost_rkey: Option<String>,
77
+
#[diesel(sql_type = diesel::sql_types::Bool)]
78
+
pub bookmarked: bool,
79
+
// #[diesel(sql_type = diesel::sql_types::Bool)]
80
+
// pub muted: bool,
81
+
#[diesel(sql_type = diesel::sql_types::Bool)]
82
+
pub embed_disabled: bool,
83
+
#[diesel(sql_type = diesel::sql_types::Bool)]
84
+
pub pinned: bool,
85
+
}
86
+
pub async fn get_post_state(
87
+
conn: &mut AsyncPgConnection,
88
+
did: &str,
89
+
subject: &str,
90
+
) -> QueryResult<Option<PostStateRet>> {
91
+
diesel::sql_query(include_str!("sql/post_state.sql"))
92
+
.bind::<Text, _>(did)
93
+
.bind::<Array<Text>, _>(vec![subject])
94
+
.get_result::<PostStateRet>(conn)
95
+
.await
96
+
.optional()
97
+
}
98
+
99
+
pub async fn get_post_states(
100
+
conn: &mut AsyncPgConnection,
101
+
did: &str,
102
+
sub: &[String],
103
+
) -> QueryResult<Vec<PostStateRet>> {
104
+
diesel::sql_query(include_str!("sql/post_state.sql"))
105
+
.bind::<Text, _>(did)
106
+
.bind::<Array<Text>, _>(sub)
107
+
.load::<PostStateRet>(conn)
108
+
.await
109
+
}
110
+
111
+
#[derive(Clone, Debug, QueryableByName)]
112
+
#[diesel(check_for_backend(diesel::pg::Pg))]
113
+
pub struct ListStateRet {
114
+
#[diesel(sql_type = Text)]
115
+
pub at_uri: String,
116
+
#[diesel(sql_type = Bool)]
117
+
pub muted: bool,
118
+
#[diesel(sql_type = Nullable<Text>)]
119
+
pub block: Option<String>,
120
+
}
121
+
122
+
pub async fn get_list_state(
123
+
conn: &mut AsyncPgConnection,
124
+
did: &str,
125
+
subject: &str,
126
+
) -> QueryResult<Option<ListStateRet>> {
127
+
diesel::sql_query(include_str!("sql/list_states.sql"))
128
+
.bind::<Text, _>(did)
129
+
.bind::<Array<Text>, _>(vec![subject])
130
+
.get_result::<ListStateRet>(conn)
131
+
.await
132
+
.optional()
133
+
}
134
+
135
+
pub async fn get_list_states(
136
+
conn: &mut AsyncPgConnection,
137
+
did: &str,
138
+
sub: &[String],
139
+
) -> QueryResult<Vec<ListStateRet>> {
140
+
diesel::sql_query(include_str!("sql/list_states.sql"))
141
+
.bind::<Text, _>(did)
142
+
.bind::<Array<Text>, _>(sub)
143
+
.load::<ListStateRet>(conn)
144
+
.await
145
+
}
146
+
147
+
pub async fn get_like_state(
148
+
conn: &mut AsyncPgConnection,
149
+
did: &str,
150
+
subject: &str,
151
+
) -> QueryResult<Option<(String, String)>> {
152
+
schema::likes::table
153
+
.select((schema::likes::did, schema::likes::rkey))
154
+
.filter(
155
+
schema::likes::did
156
+
.eq(did)
157
+
.and(schema::likes::subject.eq(subject)),
158
+
)
159
+
.get_result(conn)
160
+
.await
161
+
.optional()
162
+
}
163
+
164
+
pub async fn get_like_states(
165
+
conn: &mut AsyncPgConnection,
166
+
did: &str,
167
+
sub: &[String],
168
+
) -> QueryResult<Vec<(String, String, String)>> {
169
+
schema::likes::table
170
+
.select((
171
+
schema::likes::subject,
172
+
schema::likes::did,
173
+
schema::likes::rkey,
174
+
))
175
+
.filter(
176
+
schema::likes::did
177
+
.eq(did)
178
+
.and(schema::likes::subject.eq_any(sub)),
179
+
)
180
+
.load(conn)
181
+
.await
182
+
}
183
+
184
+
pub async fn get_pinned_post_uri(
185
+
conn: &mut AsyncPgConnection,
186
+
did: &str,
187
+
) -> QueryResult<Option<String>> {
188
+
schema::profiles::table
189
+
.select(schema::profiles::pinned_uri.assume_not_null())
190
+
.filter(
191
+
schema::profiles::did
192
+
.eq(did)
193
+
.and(schema::profiles::pinned_uri.is_not_null()),
194
+
)
195
+
.get_result(conn)
196
+
.await
197
+
.optional()
198
+
}
+41
-3
parakeet/src/hydration/feedgen.rs
+41
-3
parakeet/src/hydration/feedgen.rs
···
1
1
use crate::hydration::map_labels;
2
2
use crate::xrpc::cdn::BskyCdn;
3
3
use lexica::app_bsky::actor::ProfileView;
4
-
use lexica::app_bsky::feed::{GeneratorContentMode, GeneratorView};
4
+
use lexica::app_bsky::feed::{GeneratorContentMode, GeneratorView, GeneratorViewerState};
5
5
use parakeet_db::models;
6
6
use std::collections::HashMap;
7
7
use std::str::FromStr;
8
8
9
+
fn build_viewer((did, rkey): (String, String)) -> GeneratorViewerState {
10
+
GeneratorViewerState {
11
+
like: Some(format!("at://{did}/app.bsky.feed.like/{rkey}")),
12
+
}
13
+
}
14
+
9
15
fn build_feedgen(
10
16
feedgen: models::FeedGen,
11
17
creator: ProfileView,
12
18
labels: Vec<models::Label>,
13
19
likes: Option<i32>,
20
+
viewer: Option<GeneratorViewerState>,
14
21
cdn: &BskyCdn,
15
22
) -> GeneratorView {
16
23
let content_mode = feedgen
···
35
42
like_count: likes.unwrap_or_default() as i64,
36
43
accepts_interactions: feedgen.accepts_interactions,
37
44
labels: map_labels(labels),
45
+
viewer,
38
46
content_mode,
39
47
indexed_at: feedgen.created_at,
40
48
}
···
43
51
impl super::StatefulHydrator<'_> {
44
52
pub async fn hydrate_feedgen(&self, feedgen: String) -> Option<GeneratorView> {
45
53
let labels = self.get_label(&feedgen).await;
54
+
let viewer = self.get_feedgen_viewer_state(&feedgen).await;
46
55
let likes = self.loaders.like.load(feedgen.clone()).await;
47
56
let feedgen = self.loaders.feedgen.load(feedgen).await?;
48
57
let profile = self.hydrate_profile(feedgen.owner.clone()).await?;
49
58
50
-
Some(build_feedgen(feedgen, profile, labels, likes, &self.cdn))
59
+
Some(build_feedgen(
60
+
feedgen, profile, labels, likes, viewer, &self.cdn,
61
+
))
51
62
}
52
63
53
64
pub async fn hydrate_feedgens(&self, feedgens: Vec<String>) -> HashMap<String, GeneratorView> {
54
65
let labels = self.get_label_many(&feedgens).await;
66
+
let viewers = self.get_feedgen_viewer_states(&feedgens).await;
55
67
let mut likes = self.loaders.like.load_many(feedgens.clone()).await;
56
68
let feedgens = self.loaders.feedgen.load_many(feedgens).await;
57
69
···
66
78
.into_iter()
67
79
.filter_map(|(uri, feedgen)| {
68
80
let creator = creators.get(&feedgen.owner).cloned()?;
81
+
let viewer = viewers.get(&uri).cloned();
69
82
let labels = labels.get(&uri).cloned().unwrap_or_default();
70
83
let likes = likes.remove(&uri);
71
84
72
85
Some((
73
86
uri,
74
-
build_feedgen(feedgen, creator, labels, likes, &self.cdn),
87
+
build_feedgen(feedgen, creator, labels, likes, viewer, &self.cdn),
75
88
))
76
89
})
77
90
.collect()
91
+
}
92
+
93
+
async fn get_feedgen_viewer_state(&self, subject: &str) -> Option<GeneratorViewerState> {
94
+
if let Some(viewer) = &self.current_actor {
95
+
let data = self.loaders.like_state.get(viewer, subject).await?;
96
+
97
+
Some(build_viewer(data))
98
+
} else {
99
+
None
100
+
}
101
+
}
102
+
103
+
async fn get_feedgen_viewer_states(
104
+
&self,
105
+
subjects: &[String],
106
+
) -> HashMap<String, GeneratorViewerState> {
107
+
if let Some(viewer) = &self.current_actor {
108
+
let data = self.loaders.like_state.get_many(viewer, subjects).await;
109
+
110
+
data.into_iter()
111
+
.map(|(k, state)| (k, build_viewer(state)))
112
+
.collect()
113
+
} else {
114
+
HashMap::new()
115
+
}
78
116
}
79
117
}
+54
-5
parakeet/src/hydration/labeler.rs
+54
-5
parakeet/src/hydration/labeler.rs
···
1
1
use crate::hydration::{map_labels, StatefulHydrator};
2
2
use lexica::app_bsky::actor::ProfileView;
3
-
use lexica::app_bsky::labeler::{LabelerPolicy, LabelerView, LabelerViewDetailed};
3
+
use lexica::app_bsky::labeler::{
4
+
LabelerPolicy, LabelerView, LabelerViewDetailed, LabelerViewerState,
5
+
};
4
6
use lexica::com_atproto::label::{Blurs, LabelValueDefinition, Severity};
5
7
use lexica::com_atproto::moderation::{ReasonType, SubjectType};
6
8
use parakeet_db::models;
7
9
use std::collections::HashMap;
8
10
use std::str::FromStr;
9
11
12
+
fn build_viewer((did, rkey): (String, String)) -> LabelerViewerState {
13
+
LabelerViewerState {
14
+
like: Some(format!("at://{did}/app.bsky.feed.like/{rkey}")),
15
+
}
16
+
}
17
+
10
18
fn build_view(
11
19
labeler: models::LabelerService,
12
20
creator: ProfileView,
13
21
labels: Vec<models::Label>,
22
+
viewer: Option<LabelerViewerState>,
14
23
likes: Option<i32>,
15
24
) -> LabelerView {
16
25
LabelerView {
···
18
27
cid: labeler.cid,
19
28
creator,
20
29
like_count: likes.unwrap_or_default() as i64,
30
+
viewer,
21
31
labels: map_labels(labels),
22
32
indexed_at: labeler.indexed_at.and_utc(),
23
33
}
···
28
38
defs: Vec<models::LabelDefinition>,
29
39
creator: ProfileView,
30
40
labels: Vec<models::Label>,
41
+
viewer: Option<LabelerViewerState>,
31
42
likes: Option<i32>,
32
43
) -> LabelerViewDetailed {
33
44
let reason_types = labeler.reasons.map(|v| {
···
77
88
cid: labeler.cid,
78
89
creator,
79
90
like_count: likes.unwrap_or_default() as i64,
91
+
viewer,
80
92
policies: LabelerPolicy {
81
93
label_values,
82
94
label_value_definitions,
···
92
104
impl StatefulHydrator<'_> {
93
105
pub async fn hydrate_labeler(&self, labeler: String) -> Option<LabelerView> {
94
106
let labels = self.get_label(&labeler).await;
107
+
let viewer = self.get_labeler_viewer_state(&labeler).await;
95
108
let likes = self.loaders.like.load(make_labeler_uri(&labeler)).await;
96
109
let (labeler, _) = self.loaders.labeler.load(labeler).await?;
97
110
let creator = self.hydrate_profile(labeler.did.clone()).await?;
98
111
99
-
Some(build_view(labeler, creator, labels, likes))
112
+
Some(build_view(labeler, creator, labels, viewer, likes))
100
113
}
101
114
102
115
pub async fn hydrate_labelers(&self, labelers: Vec<String>) -> HashMap<String, LabelerView> {
···
107
120
.values()
108
121
.map(|(labeler, _)| (labeler.did.clone(), make_labeler_uri(&labeler.did)))
109
122
.unzip::<_, _, Vec<_>, Vec<_>>();
123
+
let viewers = self.get_labeler_viewer_states(&uris).await;
110
124
let creators = self.hydrate_profiles(creators).await;
111
125
let mut likes = self.loaders.like.load_many(uris.clone()).await;
112
126
···
116
130
let creator = creators.get(&labeler.did).cloned()?;
117
131
let labels = labels.get(&k).cloned().unwrap_or_default();
118
132
let likes = likes.remove(&make_labeler_uri(&labeler.did));
133
+
let viewer = viewers.get(&make_labeler_uri(&k)).cloned();
119
134
120
-
Some((k, build_view(labeler, creator, labels, likes)))
135
+
Some((k, build_view(labeler, creator, labels, viewer, likes)))
121
136
})
122
137
.collect()
123
138
}
124
139
125
140
pub async fn hydrate_labeler_detailed(&self, labeler: String) -> Option<LabelerViewDetailed> {
126
141
let labels = self.get_label(&labeler).await;
142
+
let viewer = self.get_labeler_viewer_state(&labeler).await;
127
143
let likes = self.loaders.like.load(make_labeler_uri(&labeler)).await;
128
144
let (labeler, defs) = self.loaders.labeler.load(labeler).await?;
129
145
let creator = self.hydrate_profile(labeler.did.clone()).await?;
130
146
131
-
Some(build_view_detailed(labeler, defs, creator, labels, likes))
147
+
Some(build_view_detailed(
148
+
labeler, defs, creator, labels, viewer, likes,
149
+
))
132
150
}
133
151
134
152
pub async fn hydrate_labelers_detailed(
···
142
160
.values()
143
161
.map(|(labeler, _)| (labeler.did.clone(), make_labeler_uri(&labeler.did)))
144
162
.unzip::<_, _, Vec<_>, Vec<_>>();
163
+
let viewers = self.get_labeler_viewer_states(&uris).await;
145
164
let creators = self.hydrate_profiles(creators).await;
146
165
let mut likes = self.loaders.like.load_many(uris.clone()).await;
147
166
···
151
170
let creator = creators.get(&labeler.did).cloned()?;
152
171
let labels = labels.get(&k).cloned().unwrap_or_default();
153
172
let likes = likes.remove(&make_labeler_uri(&labeler.did));
173
+
let viewer = viewers.get(&make_labeler_uri(&k)).cloned();
154
174
155
-
let view = build_view_detailed(labeler, defs, creator, labels, likes);
175
+
let view = build_view_detailed(labeler, defs, creator, labels, viewer, likes);
156
176
157
177
Some((k, view))
158
178
})
159
179
.collect()
180
+
}
181
+
182
+
async fn get_labeler_viewer_state(&self, subject: &str) -> Option<LabelerViewerState> {
183
+
if let Some(viewer) = &self.current_actor {
184
+
let data = self
185
+
.loaders
186
+
.like_state
187
+
.get(&make_labeler_uri(viewer), subject)
188
+
.await?;
189
+
190
+
Some(build_viewer(data))
191
+
} else {
192
+
None
193
+
}
194
+
}
195
+
196
+
async fn get_labeler_viewer_states(
197
+
&self,
198
+
subjects: &[String],
199
+
) -> HashMap<String, LabelerViewerState> {
200
+
if let Some(viewer) = &self.current_actor {
201
+
let data = self.loaders.like_state.get_many(viewer, subjects).await;
202
+
203
+
data.into_iter()
204
+
.map(|(k, state)| (k, build_viewer(state)))
205
+
.collect()
206
+
} else {
207
+
HashMap::new()
208
+
}
160
209
}
161
210
}
162
211
+57
-5
parakeet/src/hydration/list.rs
+57
-5
parakeet/src/hydration/list.rs
···
1
+
use crate::db::ListStateRet;
1
2
use crate::hydration::{map_labels, StatefulHydrator};
2
3
use crate::xrpc::cdn::BskyCdn;
3
4
use lexica::app_bsky::actor::ProfileView;
4
-
use lexica::app_bsky::graph::{ListPurpose, ListView, ListViewBasic};
5
+
use lexica::app_bsky::graph::{ListPurpose, ListView, ListViewBasic, ListViewerState};
5
6
use parakeet_db::models;
6
7
use std::collections::HashMap;
7
8
use std::str::FromStr;
8
9
10
+
fn build_viewer(data: ListStateRet) -> ListViewerState {
11
+
ListViewerState {
12
+
muted: data.muted,
13
+
blocked: data.block,
14
+
}
15
+
}
16
+
9
17
fn build_basic(
10
18
list: models::List,
11
19
list_item_count: i64,
12
20
labels: Vec<models::Label>,
21
+
viewer: Option<ListViewerState>,
13
22
cdn: &BskyCdn,
14
23
) -> Option<ListViewBasic> {
15
24
let purpose = ListPurpose::from_str(&list.list_type).ok()?;
···
22
31
purpose,
23
32
avatar,
24
33
list_item_count,
34
+
viewer,
25
35
labels: map_labels(labels),
26
36
indexed_at: list.created_at,
27
37
})
···
32
42
list_item_count: i64,
33
43
creator: ProfileView,
34
44
labels: Vec<models::Label>,
45
+
viewer: Option<ListViewerState>,
35
46
cdn: &BskyCdn,
36
47
) -> Option<ListView> {
37
48
let purpose = ListPurpose::from_str(&list.list_type).ok()?;
···
51
62
description_facets,
52
63
avatar,
53
64
list_item_count,
65
+
viewer,
54
66
labels: map_labels(labels),
55
67
indexed_at: list.created_at,
56
68
})
···
59
71
impl StatefulHydrator<'_> {
60
72
pub async fn hydrate_list_basic(&self, list: String) -> Option<ListViewBasic> {
61
73
let labels = self.get_label(&list).await;
74
+
let viewer = self.get_list_viewer_state(&list).await;
62
75
let (list, count) = self.loaders.list.load(list).await?;
63
76
64
-
build_basic(list, count, labels, &self.cdn)
77
+
build_basic(list, count, labels, viewer, &self.cdn)
65
78
}
66
79
67
80
pub async fn hydrate_lists_basic(&self, lists: Vec<String>) -> HashMap<String, ListViewBasic> {
81
+
if lists.is_empty() {
82
+
return HashMap::new();
83
+
}
84
+
68
85
let labels = self.get_label_many(&lists).await;
86
+
let viewers = self.get_list_viewer_states(&lists).await;
69
87
let lists = self.loaders.list.load_many(lists).await;
70
88
71
89
lists
72
90
.into_iter()
73
91
.filter_map(|(uri, (list, count))| {
74
92
let labels = labels.get(&uri).cloned().unwrap_or_default();
93
+
let viewer = viewers.get(&uri).cloned();
75
94
76
-
build_basic(list, count, labels, &self.cdn).map(|v| (uri, v))
95
+
build_basic(list, count, labels, viewer, &self.cdn).map(|v| (uri, v))
77
96
})
78
97
.collect()
79
98
}
80
99
81
100
pub async fn hydrate_list(&self, list: String) -> Option<ListView> {
82
101
let labels = self.get_label(&list).await;
102
+
let viewer = self.get_list_viewer_state(&list).await;
83
103
let (list, count) = self.loaders.list.load(list).await?;
84
104
let profile = self.hydrate_profile(list.owner.clone()).await?;
85
105
86
-
build_listview(list, count, profile, labels, &self.cdn)
106
+
build_listview(list, count, profile, labels, viewer, &self.cdn)
87
107
}
88
108
89
109
pub async fn hydrate_lists(&self, lists: Vec<String>) -> HashMap<String, ListView> {
110
+
if lists.is_empty() {
111
+
return HashMap::new();
112
+
}
113
+
90
114
let labels = self.get_label_many(&lists).await;
115
+
let viewers = self.get_list_viewer_states(&lists).await;
91
116
let lists = self.loaders.list.load_many(lists).await;
92
117
93
118
let creators = lists.values().map(|(list, _)| list.owner.clone()).collect();
···
97
122
.into_iter()
98
123
.filter_map(|(uri, (list, count))| {
99
124
let creator = creators.get(&list.owner)?;
125
+
let viewer = viewers.get(&uri).cloned();
100
126
let labels = labels.get(&uri).cloned().unwrap_or_default();
101
127
102
-
build_listview(list, count, creator.to_owned(), labels, &self.cdn).map(|v| (uri, v))
128
+
build_listview(list, count, creator.to_owned(), labels, viewer, &self.cdn)
129
+
.map(|v| (uri, v))
103
130
})
104
131
.collect()
132
+
}
133
+
134
+
async fn get_list_viewer_state(&self, subject: &str) -> Option<ListViewerState> {
135
+
if let Some(viewer) = &self.current_actor {
136
+
let data = self.loaders.list_state.get(viewer, subject).await?;
137
+
138
+
Some(build_viewer(data))
139
+
} else {
140
+
None
141
+
}
142
+
}
143
+
144
+
async fn get_list_viewer_states(
145
+
&self,
146
+
subjects: &[String],
147
+
) -> HashMap<String, ListViewerState> {
148
+
if let Some(viewer) = &self.current_actor {
149
+
let data = self.loaders.list_state.get_many(viewer, subjects).await;
150
+
151
+
data.into_iter()
152
+
.map(|(k, state)| (k, build_viewer(state)))
153
+
.collect()
154
+
} else {
155
+
HashMap::new()
156
+
}
105
157
}
106
158
}
+239
-80
parakeet/src/hydration/posts.rs
+239
-80
parakeet/src/hydration/posts.rs
···
1
+
use crate::db::PostStateRet;
1
2
use crate::hydration::{map_labels, StatefulHydrator};
2
3
use lexica::app_bsky::actor::ProfileViewBasic;
3
4
use lexica::app_bsky::embed::Embed;
4
-
use lexica::app_bsky::feed::{FeedViewPost, PostView, ReplyRef, ReplyRefPost, ThreadgateView};
5
+
use lexica::app_bsky::feed::{
6
+
BlockedAuthor, FeedReasonRepost, FeedViewPost, FeedViewPostReason, PostView, PostViewerState,
7
+
ReplyRef, ReplyRefPost, ThreadgateView,
8
+
};
5
9
use lexica::app_bsky::graph::ListViewBasic;
6
10
use lexica::app_bsky::RecordStats;
7
11
use parakeet_db::models;
8
12
use parakeet_index::PostStats;
9
13
use std::collections::HashMap;
10
14
15
+
fn build_viewer(did: &str, data: PostStateRet) -> PostViewerState {
16
+
let is_me = did == data.did;
17
+
18
+
let repost = data
19
+
.repost_rkey
20
+
.map(|rkey| format!("at://{did}/app.bsky.feed.repost/{rkey}"));
21
+
let like = data
22
+
.like_rkey
23
+
.map(|rkey| format!("at://{did}/app.bsky.feed.like/{rkey}"));
24
+
25
+
PostViewerState {
26
+
repost,
27
+
like,
28
+
bookmarked: data.bookmarked,
29
+
thread_muted: false, // todo when we have thread mutes
30
+
reply_disabled: false,
31
+
embedding_disabled: data.embed_disabled && !is_me, // poster can always bypass embed disabled.
32
+
pinned: data.pinned,
33
+
}
34
+
}
35
+
36
+
type HydratePostsRet = (
37
+
models::Post,
38
+
ProfileViewBasic,
39
+
Vec<models::Label>,
40
+
Option<Embed>,
41
+
Option<ThreadgateView>,
42
+
Option<PostViewerState>,
43
+
Option<PostStats>,
44
+
);
45
+
11
46
fn build_postview(
12
-
post: models::Post,
13
-
author: ProfileViewBasic,
14
-
labels: Vec<models::Label>,
15
-
embed: Option<Embed>,
16
-
threadgate: Option<ThreadgateView>,
17
-
stats: Option<PostStats>,
47
+
(post, author, labels, embed, threadgate, viewer, stats): HydratePostsRet,
18
48
) -> PostView {
19
49
let stats = stats
20
50
.map(|stats| RecordStats {
···
33
63
embed,
34
64
stats,
35
65
labels: map_labels(labels),
66
+
viewer,
36
67
threadgate,
37
68
indexed_at: post.created_at,
38
69
}
···
57
88
) -> Option<ThreadgateView> {
58
89
let threadgate = threadgate?;
59
90
60
-
let lists = threadgate
61
-
.allowed_lists
62
-
.iter()
63
-
.flatten()
64
-
.cloned()
65
-
.collect::<Vec<_>>();
91
+
let lists = match threadgate.allowed_lists.as_ref() {
92
+
Some(allowed_lists) => allowed_lists.iter().flatten().cloned().collect(),
93
+
None => Vec::new(),
94
+
};
66
95
let lists = self.hydrate_lists_basic(lists).await;
67
96
68
97
Some(build_threadgate_view(
···
76
105
threadgates: Vec<models::Threadgate>,
77
106
) -> HashMap<String, ThreadgateView> {
78
107
let lists = threadgates.iter().fold(Vec::new(), |mut acc, c| {
79
-
acc.extend(c.allowed_lists.iter().flatten().cloned());
108
+
if let Some(lists) = &c.allowed_lists {
109
+
acc.extend(lists.iter().flatten().cloned());
110
+
}
80
111
acc
81
112
});
82
113
let lists = self.hydrate_lists_basic(lists).await;
···
84
115
threadgates
85
116
.into_iter()
86
117
.map(|threadgate| {
87
-
let this_lists = threadgate
88
-
.allowed_lists
89
-
.iter()
90
-
.filter_map(|v| v.clone().and_then(|v| lists.get(&v).cloned()))
91
-
.collect();
118
+
let this_lists = match &threadgate.allowed_lists {
119
+
Some(allowed_lists) => allowed_lists
120
+
.iter()
121
+
.filter_map(|v| v.clone().and_then(|v| lists.get(&v).cloned()))
122
+
.collect(),
123
+
None => Vec::new(),
124
+
};
92
125
93
126
(
94
127
threadgate.at_uri.clone(),
···
101
134
pub async fn hydrate_post(&self, post: String) -> Option<PostView> {
102
135
let stats = self.loaders.post_stats.load(post.clone()).await;
103
136
let (post, threadgate) = self.loaders.posts.load(post).await?;
137
+
let viewer = self.get_post_viewer_state(&post.at_uri).await;
104
138
let embed = self.hydrate_embed(post.at_uri.clone()).await;
105
139
let author = self.hydrate_profile_basic(post.did.clone()).await?;
106
140
let threadgate = self.hydrate_threadgate(threadgate).await;
107
141
let labels = self.get_label(&post.at_uri).await;
108
142
109
-
Some(build_postview(
110
-
post, author, labels, embed, threadgate, stats,
111
-
))
143
+
Some(build_postview((
144
+
post, author, labels, embed, threadgate, viewer, stats,
145
+
)))
112
146
}
113
147
114
-
pub async fn hydrate_posts(&self, posts: Vec<String>) -> HashMap<String, PostView> {
148
+
async fn hydrate_posts_inner(&self, posts: Vec<String>) -> HashMap<String, HydratePostsRet> {
115
149
let stats = self.loaders.post_stats.load_many(posts.clone()).await;
116
150
let posts = self.loaders.posts.load_many(posts).await;
117
151
···
121
155
.unzip::<_, _, Vec<_>, Vec<_>>();
122
156
let authors = self.hydrate_profiles_basic(authors).await;
123
157
124
-
let post_labels = self.get_label_many(&post_uris).await;
158
+
let mut post_labels = self.get_label_many(&post_uris).await;
159
+
let mut viewer_data = self.get_post_viewer_states(&post_uris).await;
125
160
126
161
let threadgates = posts
127
162
.values()
···
129
164
.collect();
130
165
let threadgates = self.hydrate_threadgates(threadgates).await;
131
166
132
-
let embeds = self.hydrate_embeds(post_uris).await;
167
+
let mut embeds = self.hydrate_embeds(post_uris).await;
133
168
134
169
posts
135
170
.into_iter()
136
171
.filter_map(|(uri, (post, threadgate))| {
137
-
let author = authors.get(&post.did)?;
138
-
let embed = embeds.get(&uri).cloned();
172
+
let author = authors.get(&post.did)?.clone();
173
+
let embed = embeds.remove(&uri);
139
174
let threadgate = threadgate.and_then(|tg| threadgates.get(&tg.at_uri).cloned());
140
-
let labels = post_labels.get(&uri).cloned().unwrap_or_default();
175
+
let labels = post_labels.remove(&uri).unwrap_or_default();
141
176
let stats = stats.get(&uri).cloned();
177
+
let viewer = viewer_data.remove(&uri);
142
178
143
179
Some((
144
180
uri,
145
-
build_postview(post, author.to_owned(), labels, embed, threadgate, stats),
181
+
(post, author, labels, embed, threadgate, viewer, stats),
146
182
))
147
183
})
148
184
.collect()
149
185
}
150
186
151
-
pub async fn hydrate_feed_posts(&self, posts: Vec<String>) -> HashMap<String, FeedViewPost> {
152
-
let stats = self.loaders.post_stats.load_many(posts.clone()).await;
153
-
let posts = self.loaders.posts.load_many(posts).await;
154
-
155
-
let (authors, post_uris) = posts
156
-
.values()
157
-
.map(|(post, _)| (post.did.clone(), post.at_uri.clone()))
158
-
.unzip::<_, _, Vec<_>, Vec<_>>();
159
-
let authors = self.hydrate_profiles_basic(authors).await;
160
-
161
-
let post_labels = self.get_label_many(&post_uris).await;
187
+
pub async fn hydrate_posts(&self, posts: Vec<String>) -> HashMap<String, PostView> {
188
+
self.hydrate_posts_inner(posts)
189
+
.await
190
+
.into_iter()
191
+
.map(|(uri, data)| (uri, build_postview(data)))
192
+
.collect()
193
+
}
162
194
163
-
let embeds = self.hydrate_embeds(post_uris).await;
195
+
pub async fn hydrate_feed_posts(
196
+
&self,
197
+
posts: Vec<RawFeedItem>,
198
+
author_threads_only: bool,
199
+
) -> Vec<FeedViewPost> {
200
+
let post_uris = posts
201
+
.iter()
202
+
.map(|item| item.post_uri().to_string())
203
+
.collect::<Vec<_>>();
204
+
let mut posts_hyd = self.hydrate_posts_inner(post_uris).await;
164
205
165
-
let reply_refs = posts
206
+
// we shouldn't show the parent when the post violates a threadgate.
207
+
let reply_refs = posts_hyd
166
208
.values()
167
-
.flat_map(|(post, _)| [post.parent_uri.clone(), post.root_uri.clone()])
209
+
.filter(|(post, ..)| !post.violates_threadgate)
210
+
.flat_map(|(post, ..)| [post.parent_uri.clone(), post.root_uri.clone()])
168
211
.flatten()
169
212
.collect::<Vec<_>>();
170
-
171
213
let reply_posts = self.hydrate_posts(reply_refs).await;
172
214
215
+
let repost_profiles = posts
216
+
.iter()
217
+
.filter_map(|item| item.repost_by())
218
+
.collect::<Vec<_>>();
219
+
let profiles_hydrated = self.hydrate_profiles_basic(repost_profiles).await;
220
+
173
221
posts
174
222
.into_iter()
175
-
.filter_map(|(post_uri, (post, _))| {
176
-
let author = authors.get(&post.did)?;
223
+
.filter_map(|item| {
224
+
let post = posts_hyd.remove(item.post_uri())?;
225
+
let context = item.context();
226
+
227
+
let reply = if let RawFeedItem::Post { .. } = item {
228
+
let root_uri = post.0.root_uri.as_ref();
229
+
let parent_uri = post.0.parent_uri.as_ref();
230
+
231
+
let (root, parent) = if author_threads_only {
232
+
if root_uri.is_some() && parent_uri.is_some() {
233
+
let root = root_uri.and_then(|uri| posts_hyd.get(uri))?;
234
+
let parent = parent_uri.and_then(|uri| posts_hyd.get(uri))?;
235
+
236
+
let root = build_postview(root.clone());
237
+
let parent = build_postview(parent.clone());
238
+
239
+
(Some(root), Some(parent))
240
+
} else {
241
+
(None, None)
242
+
}
243
+
} else {
244
+
let root = root_uri.and_then(|uri| reply_posts.get(uri)).cloned();
245
+
let parent = parent_uri.and_then(|uri| reply_posts.get(uri)).cloned();
177
246
178
-
let root = post.root_uri.as_ref().and_then(|uri| reply_posts.get(uri));
179
-
let parent = post
180
-
.parent_uri
181
-
.as_ref()
182
-
.and_then(|uri| reply_posts.get(uri));
247
+
(root, parent)
248
+
};
183
249
184
-
let reply = if post.parent_uri.is_some() && post.root_uri.is_some() {
185
-
Some(ReplyRef {
186
-
root: root.cloned().map(ReplyRefPost::Post).unwrap_or(
187
-
ReplyRefPost::NotFound {
188
-
uri: post.root_uri.as_ref().unwrap().clone(),
189
-
not_found: true,
190
-
},
191
-
),
192
-
parent: parent.cloned().map(ReplyRefPost::Post).unwrap_or(
193
-
ReplyRefPost::NotFound {
194
-
uri: post.parent_uri.as_ref().unwrap().clone(),
195
-
not_found: true,
196
-
},
197
-
),
198
-
grandparent_author: None,
199
-
})
250
+
if root_uri.is_some() || parent_uri.is_some() {
251
+
Some(ReplyRef {
252
+
root: root.map(postview_to_replyref).unwrap_or(
253
+
ReplyRefPost::NotFound {
254
+
uri: root_uri.unwrap().to_owned(),
255
+
not_found: true,
256
+
},
257
+
),
258
+
parent: parent.map(postview_to_replyref).unwrap_or(
259
+
ReplyRefPost::NotFound {
260
+
uri: parent_uri.unwrap().to_owned(),
261
+
not_found: true,
262
+
},
263
+
),
264
+
grandparent_author: None,
265
+
})
266
+
} else {
267
+
None
268
+
}
200
269
} else {
201
270
None
202
271
};
203
272
204
-
let embed = embeds.get(&post_uri).cloned();
205
-
let labels = post_labels.get(&post_uri).cloned().unwrap_or_default();
206
-
let stats = stats.get(&post_uri).cloned();
207
-
let post = build_postview(post, author.to_owned(), labels, embed, None, stats);
273
+
let reason = match item {
274
+
RawFeedItem::Repost { uri, by, at, .. } => {
275
+
Some(FeedViewPostReason::Repost(FeedReasonRepost {
276
+
by: profiles_hydrated.get(&by).cloned()?,
277
+
uri: Some(uri),
278
+
cid: None,
279
+
indexed_at: at,
280
+
}))
281
+
}
282
+
RawFeedItem::Pin { .. } => Some(FeedViewPostReason::Pin),
283
+
_ => None,
284
+
};
208
285
209
-
Some((
210
-
post_uri,
211
-
FeedViewPost {
212
-
post,
213
-
reply,
214
-
reason: None,
215
-
feed_context: None,
216
-
},
217
-
))
286
+
let post = build_postview(post);
287
+
288
+
Some(FeedViewPost {
289
+
post,
290
+
reply,
291
+
reason,
292
+
feed_context: context,
293
+
})
218
294
})
219
295
.collect()
296
+
}
297
+
298
+
async fn get_post_viewer_state(&self, subject: &str) -> Option<PostViewerState> {
299
+
if let Some(viewer) = &self.current_actor {
300
+
let data = self.loaders.post_state.get(viewer, subject).await?;
301
+
302
+
Some(build_viewer(viewer, data))
303
+
} else {
304
+
None
305
+
}
306
+
}
307
+
308
+
async fn get_post_viewer_states(
309
+
&self,
310
+
subjects: &[String],
311
+
) -> HashMap<String, PostViewerState> {
312
+
if let Some(viewer) = &self.current_actor {
313
+
let data = self.loaders.post_state.get_many(viewer, subjects).await;
314
+
315
+
data.into_iter()
316
+
.map(|(k, state)| (k, build_viewer(viewer, state)))
317
+
.collect()
318
+
} else {
319
+
HashMap::new()
320
+
}
321
+
}
322
+
}
323
+
324
+
fn postview_to_replyref(post: PostView) -> ReplyRefPost {
325
+
match &post.author.viewer {
326
+
Some(v) if v.blocked_by || v.blocking.is_some() => ReplyRefPost::Blocked {
327
+
uri: post.uri,
328
+
blocked: true,
329
+
author: BlockedAuthor {
330
+
did: post.author.did.clone(),
331
+
viewer: post.author.viewer,
332
+
},
333
+
},
334
+
_ => ReplyRefPost::Post(post),
335
+
}
336
+
}
337
+
338
+
#[derive(Debug)]
339
+
pub enum RawFeedItem {
340
+
Pin {
341
+
uri: String,
342
+
context: Option<String>,
343
+
},
344
+
Post {
345
+
uri: String,
346
+
context: Option<String>,
347
+
},
348
+
Repost {
349
+
uri: String,
350
+
post: String,
351
+
by: String,
352
+
at: chrono::DateTime<chrono::Utc>,
353
+
context: Option<String>,
354
+
},
355
+
}
356
+
357
+
impl RawFeedItem {
358
+
fn post_uri(&self) -> &str {
359
+
match self {
360
+
RawFeedItem::Pin { uri, .. } => uri,
361
+
RawFeedItem::Post { uri, .. } => uri,
362
+
RawFeedItem::Repost { post, .. } => post,
363
+
}
364
+
}
365
+
366
+
fn repost_by(&self) -> Option<String> {
367
+
match self {
368
+
RawFeedItem::Repost { by, .. } => Some(by.clone()),
369
+
_ => None,
370
+
}
371
+
}
372
+
373
+
fn context(&self) -> Option<String> {
374
+
match self {
375
+
RawFeedItem::Pin { context, .. } => context.clone(),
376
+
RawFeedItem::Post { context, .. } => context.clone(),
377
+
RawFeedItem::Repost { context, .. } => context.clone(),
378
+
}
220
379
}
221
380
}
+115
-5
parakeet/src/hydration/profile.rs
+115
-5
parakeet/src/hydration/profile.rs
···
1
+
use crate::db::ProfileStateRet;
1
2
use crate::hydration::map_labels;
2
3
use crate::loaders::ProfileLoaderRet;
3
4
use crate::xrpc::cdn::BskyCdn;
···
5
6
use chrono::TimeDelta;
6
7
use lexica::app_bsky::actor::*;
7
8
use lexica::app_bsky::embed::External;
9
+
use lexica::app_bsky::graph::ListViewBasic;
8
10
use parakeet_db::models;
9
11
use parakeet_index::ProfileStats;
10
12
use std::collections::HashMap;
···
34
36
})
35
37
} else {
36
38
None
39
+
}
40
+
}
41
+
42
+
fn build_viewer(
43
+
data: ProfileStateRet,
44
+
list_mute: Option<ListViewBasic>,
45
+
list_block: Option<ListViewBasic>,
46
+
) -> ProfileViewerState {
47
+
let following = data
48
+
.following
49
+
.map(|rkey| format!("at://{}/app.bsky.graph.follow/{rkey}", data.did));
50
+
let followed_by = data
51
+
.followed
52
+
.map(|rkey| format!("at://{}/app.bsky.graph.follow/{rkey}", data.subject));
53
+
54
+
let blocking = data.list_block.or(data.blocking);
55
+
56
+
ProfileViewerState {
57
+
muted: data.muting.unwrap_or_default(),
58
+
muted_by_list: list_mute,
59
+
blocked_by: data.blocked.unwrap_or_default(), // TODO: this doesn't factor for blocklists atm
60
+
blocking,
61
+
blocking_by_list: list_block,
62
+
following,
63
+
followed_by,
37
64
}
38
65
}
39
66
···
156
183
stats: Option<ProfileStats>,
157
184
labels: Vec<models::Label>,
158
185
verifications: Option<Vec<models::VerificationEntry>>,
186
+
viewer: Option<ProfileViewerState>,
159
187
cdn: &BskyCdn,
160
188
) -> ProfileViewBasic {
161
189
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
169
197
display_name: profile.display_name,
170
198
avatar,
171
199
associated,
200
+
viewer,
172
201
labels: map_labels(labels),
173
202
verification,
174
203
status,
204
+
pronouns: profile.pronouns,
175
205
created_at: profile.created_at.and_utc(),
176
206
}
177
207
}
···
181
211
stats: Option<ProfileStats>,
182
212
labels: Vec<models::Label>,
183
213
verifications: Option<Vec<models::VerificationEntry>>,
214
+
viewer: Option<ProfileViewerState>,
184
215
cdn: &BskyCdn,
185
216
) -> ProfileView {
186
217
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
195
226
description: profile.description,
196
227
avatar,
197
228
associated,
229
+
viewer,
198
230
labels: map_labels(labels),
199
231
verification,
200
232
status,
233
+
pronouns: profile.pronouns,
201
234
created_at: profile.created_at.and_utc(),
202
235
indexed_at: profile.indexed_at,
203
236
}
···
208
241
stats: Option<ProfileStats>,
209
242
labels: Vec<models::Label>,
210
243
verifications: Option<Vec<models::VerificationEntry>>,
244
+
viewer: Option<ProfileViewerState>,
211
245
cdn: &BskyCdn,
212
246
) -> ProfileViewDetailed {
213
247
let associated = build_associated(chat_decl, is_labeler, stats, notif_decl);
···
226
260
followers_count: stats.map(|v| v.followers as i64).unwrap_or_default(),
227
261
follows_count: stats.map(|v| v.following as i64).unwrap_or_default(),
228
262
associated,
263
+
viewer,
229
264
labels: map_labels(labels),
230
265
verification,
231
266
status,
267
+
pronouns: profile.pronouns,
268
+
website: profile.website,
232
269
created_at: profile.created_at.and_utc(),
233
270
indexed_at: profile.indexed_at,
234
271
}
···
237
274
impl super::StatefulHydrator<'_> {
238
275
pub async fn hydrate_profile_basic(&self, did: String) -> Option<ProfileViewBasic> {
239
276
let labels = self.get_profile_label(&did).await;
277
+
let viewer = self.get_profile_viewer_state(&did).await;
240
278
let verif = self.loaders.verification.load(did.clone()).await;
241
279
let stats = self.loaders.profile_stats.load(did.clone()).await;
242
280
let profile_info = self.loaders.profile.load(did).await?;
243
281
244
-
Some(build_basic(profile_info, stats, labels, verif, &self.cdn))
282
+
Some(build_basic(
283
+
profile_info,
284
+
stats,
285
+
labels,
286
+
verif,
287
+
viewer,
288
+
&self.cdn,
289
+
))
245
290
}
246
291
247
292
pub async fn hydrate_profiles_basic(
···
249
294
dids: Vec<String>,
250
295
) -> HashMap<String, ProfileViewBasic> {
251
296
let labels = self.get_profile_label_many(&dids).await;
297
+
let viewers = self.get_profile_viewer_states(&dids).await;
252
298
let verif = self.loaders.verification.load_many(dids.clone()).await;
253
299
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
254
300
let profiles = self.loaders.profile.load_many(dids).await;
···
258
304
.map(|(k, profile_info)| {
259
305
let labels = labels.get(&k).cloned().unwrap_or_default();
260
306
let verif = verif.get(&k).cloned();
307
+
let viewer = viewers.get(&k).cloned();
261
308
let stats = stats.get(&k).cloned();
262
309
263
-
let v = build_basic(profile_info, stats, labels, verif, &self.cdn);
310
+
let v = build_basic(profile_info, stats, labels, verif, viewer, &self.cdn);
264
311
(k, v)
265
312
})
266
313
.collect()
···
268
315
269
316
pub async fn hydrate_profile(&self, did: String) -> Option<ProfileView> {
270
317
let labels = self.get_profile_label(&did).await;
318
+
let viewer = self.get_profile_viewer_state(&did).await;
271
319
let verif = self.loaders.verification.load(did.clone()).await;
272
320
let stats = self.loaders.profile_stats.load(did.clone()).await;
273
321
let profile_info = self.loaders.profile.load(did).await?;
274
322
275
-
Some(build_profile(profile_info, stats, labels, verif, &self.cdn))
323
+
Some(build_profile(
324
+
profile_info,
325
+
stats,
326
+
labels,
327
+
verif,
328
+
viewer,
329
+
&self.cdn,
330
+
))
276
331
}
277
332
278
333
pub async fn hydrate_profiles(&self, dids: Vec<String>) -> HashMap<String, ProfileView> {
279
334
let labels = self.get_profile_label_many(&dids).await;
335
+
let viewers = self.get_profile_viewer_states(&dids).await;
280
336
let verif = self.loaders.verification.load_many(dids.clone()).await;
281
337
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
282
338
let profiles = self.loaders.profile.load_many(dids).await;
···
286
342
.map(|(k, profile_info)| {
287
343
let labels = labels.get(&k).cloned().unwrap_or_default();
288
344
let verif = verif.get(&k).cloned();
345
+
let viewer = viewers.get(&k).cloned();
289
346
let stats = stats.get(&k).cloned();
290
347
291
-
let v = build_profile(profile_info, stats, labels, verif, &self.cdn);
348
+
let v = build_profile(profile_info, stats, labels, verif, viewer, &self.cdn);
292
349
(k, v)
293
350
})
294
351
.collect()
···
296
353
297
354
pub async fn hydrate_profile_detailed(&self, did: String) -> Option<ProfileViewDetailed> {
298
355
let labels = self.get_profile_label(&did).await;
356
+
let viewer = self.get_profile_viewer_state(&did).await;
299
357
let verif = self.loaders.verification.load(did.clone()).await;
300
358
let stats = self.loaders.profile_stats.load(did.clone()).await;
301
359
let profile_info = self.loaders.profile.load(did).await?;
···
305
363
stats,
306
364
labels,
307
365
verif,
366
+
viewer,
308
367
&self.cdn,
309
368
))
310
369
}
···
314
373
dids: Vec<String>,
315
374
) -> HashMap<String, ProfileViewDetailed> {
316
375
let labels = self.get_profile_label_many(&dids).await;
376
+
let viewers = self.get_profile_viewer_states(&dids).await;
317
377
let verif = self.loaders.verification.load_many(dids.clone()).await;
318
378
let stats = self.loaders.profile_stats.load_many(dids.clone()).await;
319
379
let profiles = self.loaders.profile.load_many(dids).await;
···
323
383
.map(|(k, profile_info)| {
324
384
let labels = labels.get(&k).cloned().unwrap_or_default();
325
385
let verif = verif.get(&k).cloned();
386
+
let viewer = viewers.get(&k).cloned();
326
387
let stats = stats.get(&k).cloned();
327
388
328
-
let v = build_detailed(profile_info, stats, labels, verif, &self.cdn);
389
+
let v = build_detailed(profile_info, stats, labels, verif, viewer, &self.cdn);
329
390
(k, v)
330
391
})
331
392
.collect()
393
+
}
394
+
395
+
async fn get_profile_viewer_state(&self, subject: &str) -> Option<ProfileViewerState> {
396
+
if let Some(viewer) = &self.current_actor {
397
+
let data = self.loaders.profile_state.get(viewer, subject).await?;
398
+
399
+
let list_block = match &data.list_block {
400
+
Some(uri) => self.hydrate_list_basic(uri.clone()).await,
401
+
None => None,
402
+
};
403
+
let list_mute = match &data.list_mute {
404
+
Some(uri) => self.hydrate_list_basic(uri.clone()).await,
405
+
None => None,
406
+
};
407
+
408
+
Some(build_viewer(data, list_mute, list_block))
409
+
} else {
410
+
None
411
+
}
412
+
}
413
+
414
+
async fn get_profile_viewer_states(
415
+
&self,
416
+
dids: &[String],
417
+
) -> HashMap<String, ProfileViewerState> {
418
+
if let Some(viewer) = &self.current_actor {
419
+
let data = self.loaders.profile_state.get_many(viewer, dids).await;
420
+
let lists = data
421
+
.values()
422
+
.flat_map(|v| [&v.list_block, &v.list_mute])
423
+
.flatten()
424
+
.cloned()
425
+
.collect();
426
+
let lists = self.hydrate_lists_basic(lists).await;
427
+
428
+
data.into_iter()
429
+
.map(|(k, state)| {
430
+
let list_mute = state.list_mute.as_ref().and_then(|v| lists.get(v).cloned());
431
+
let list_block = state
432
+
.list_block
433
+
.as_ref()
434
+
.and_then(|v| lists.get(v).cloned());
435
+
436
+
(k, build_viewer(state, list_mute, list_block))
437
+
})
438
+
.collect()
439
+
} else {
440
+
HashMap::new()
441
+
}
332
442
}
333
443
}
+131
parakeet/src/loaders.rs
+131
parakeet/src/loaders.rs
···
1
1
use crate::cache::PrefixedLoaderCache;
2
+
use crate::db;
2
3
use crate::xrpc::extract::LabelConfigItem;
3
4
use dataloader::async_cached::Loader;
4
5
use dataloader::non_cached::Loader as NonCachedLoader;
···
39
40
pub label: LabelLoader,
40
41
pub labeler: CachingLoader<String, LabelServiceLoaderRet, LabelServiceLoader>,
41
42
pub list: CachingLoader<String, ListLoaderRet, ListLoader>,
43
+
pub list_state: ListStateLoader,
42
44
pub like: NonCachedLoader<String, i32, LikeLoader>,
45
+
pub like_state: LikeRecordLoader,
43
46
pub posts: CachingLoader<String, PostLoaderRet, PostLoader>,
44
47
pub post_stats: NonCachedLoader<String, parakeet_index::PostStats, PostStatsLoader>,
48
+
pub post_state: PostStateLoader,
45
49
pub profile: CachingLoader<String, ProfileLoaderRet, ProfileLoader>,
46
50
pub profile_stats: NonCachedLoader<String, parakeet_index::ProfileStats, ProfileStatsLoader>,
51
+
pub profile_state: ProfileStateLoader,
47
52
pub starterpacks: CachingLoader<String, StarterPackLoaderRet, StarterPackLoader>,
48
53
pub verification: CachingLoader<String, Vec<models::VerificationEntry>, VerificationLoader>,
49
54
}
···
62
67
label: LabelLoader(pool.clone()), // CARE: never cache this.
63
68
labeler: new_plc_loader(LabelServiceLoader(pool.clone(), idxc.clone()), &rc, "labeler", 600),
64
69
like: NonCachedLoader::new(LikeLoader(idxc.clone())),
70
+
like_state: LikeRecordLoader(pool.clone()),
65
71
list: new_plc_loader(ListLoader(pool.clone()), &rc, "list", 600),
72
+
list_state: ListStateLoader(pool.clone()),
66
73
posts: new_plc_loader(PostLoader(pool.clone()), &rc, "post", 3600),
67
74
post_stats: NonCachedLoader::new(PostStatsLoader(idxc.clone())),
75
+
post_state: PostStateLoader(pool.clone()),
68
76
profile: new_plc_loader(ProfileLoader(pool.clone()), &rc, "profile", 3600),
69
77
profile_stats: NonCachedLoader::new(ProfileStatsLoader(idxc.clone())),
78
+
profile_state: ProfileStateLoader(pool.clone()),
70
79
starterpacks: new_plc_loader(StarterPackLoader(pool.clone()), &rc, "starterpacks", 600),
71
80
verification: new_plc_loader(VerificationLoader(pool.clone()), &rc, "verification", 60),
72
81
}
···
95
104
}
96
105
}
97
106
107
+
pub struct LikeRecordLoader(Pool<AsyncPgConnection>);
108
+
impl LikeRecordLoader {
109
+
pub async fn get(&self, did: &str, subject: &str) -> Option<(String, String)> {
110
+
let mut conn = self.0.get().await.unwrap();
111
+
112
+
db::get_like_state(&mut conn, did, subject)
113
+
.await
114
+
.unwrap_or_else(|e| {
115
+
tracing::error!("like state load failed: {e}");
116
+
None
117
+
})
118
+
}
119
+
120
+
pub async fn get_many(
121
+
&self,
122
+
did: &str,
123
+
subjects: &[String],
124
+
) -> HashMap<String, (String, String)> {
125
+
let mut conn = self.0.get().await.unwrap();
126
+
127
+
match db::get_like_states(&mut conn, did, subjects).await {
128
+
Ok(res) => {
129
+
HashMap::from_iter(res.into_iter().map(|(sub, did, rkey)| (sub, (did, rkey))))
130
+
}
131
+
Err(e) => {
132
+
tracing::error!("like state load failed: {e}");
133
+
HashMap::new()
134
+
}
135
+
}
136
+
}
137
+
}
138
+
98
139
pub struct HandleLoader(Pool<AsyncPgConnection>);
99
140
impl BatchFn<String, String> for HandleLoader {
100
141
async fn load(&mut self, keys: &[String]) -> HashMap<String, String> {
···
204
245
}
205
246
}
206
247
248
+
pub struct ProfileStateLoader(Pool<AsyncPgConnection>);
249
+
impl ProfileStateLoader {
250
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::ProfileStateRet> {
251
+
let mut conn = self.0.get().await.unwrap();
252
+
253
+
db::get_profile_state(&mut conn, did, subject)
254
+
.await
255
+
.unwrap_or_else(|e| {
256
+
tracing::error!("profile state load failed: {e}");
257
+
None
258
+
})
259
+
}
260
+
261
+
pub async fn get_many(
262
+
&self,
263
+
did: &str,
264
+
subjects: &[String],
265
+
) -> HashMap<String, db::ProfileStateRet> {
266
+
let mut conn = self.0.get().await.unwrap();
267
+
268
+
match db::get_profile_states(&mut conn, did, subjects).await {
269
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.subject.clone(), v))),
270
+
Err(e) => {
271
+
tracing::error!("profile state load failed: {e}");
272
+
HashMap::new()
273
+
}
274
+
}
275
+
}
276
+
}
277
+
207
278
pub struct ListLoader(Pool<AsyncPgConnection>);
208
279
type ListLoaderRet = (models::List, i64);
209
280
impl BatchFn<String, ListLoaderRet> for ListLoader {
···
236
307
}
237
308
}
238
309
310
+
pub struct ListStateLoader(Pool<AsyncPgConnection>);
311
+
impl ListStateLoader {
312
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::ListStateRet> {
313
+
let mut conn = self.0.get().await.unwrap();
314
+
315
+
db::get_list_state(&mut conn, did, subject)
316
+
.await
317
+
.unwrap_or_else(|e| {
318
+
tracing::error!("list state load failed: {e}");
319
+
None
320
+
})
321
+
}
322
+
323
+
pub async fn get_many(
324
+
&self,
325
+
did: &str,
326
+
subjects: &[String],
327
+
) -> HashMap<String, db::ListStateRet> {
328
+
let mut conn = self.0.get().await.unwrap();
329
+
330
+
match db::get_list_states(&mut conn, did, subjects).await {
331
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.at_uri.clone(), v))),
332
+
Err(e) => {
333
+
tracing::error!("list state load failed: {e}");
334
+
HashMap::new()
335
+
}
336
+
}
337
+
}
338
+
}
339
+
239
340
pub struct FeedGenLoader(Pool<AsyncPgConnection>, parakeet_index::Client);
240
341
impl BatchFn<String, models::FeedGen> for FeedGenLoader {
241
342
async fn load(&mut self, keys: &[String]) -> HashMap<String, models::FeedGen> {
···
302
403
.unwrap()
303
404
.into_inner()
304
405
.entries
406
+
}
407
+
}
408
+
409
+
pub struct PostStateLoader(Pool<AsyncPgConnection>);
410
+
impl PostStateLoader {
411
+
pub async fn get(&self, did: &str, subject: &str) -> Option<db::PostStateRet> {
412
+
let mut conn = self.0.get().await.unwrap();
413
+
414
+
db::get_post_state(&mut conn, did, subject)
415
+
.await
416
+
.unwrap_or_else(|e| {
417
+
tracing::error!("post state load failed: {e}");
418
+
None
419
+
})
420
+
}
421
+
422
+
pub async fn get_many(
423
+
&self,
424
+
did: &str,
425
+
subjects: &[String],
426
+
) -> HashMap<String, db::PostStateRet> {
427
+
let mut conn = self.0.get().await.unwrap();
428
+
429
+
match db::get_post_states(&mut conn, did, subjects).await {
430
+
Ok(res) => HashMap::from_iter(res.into_iter().map(|v| (v.at_uri.clone(), v))),
431
+
Err(e) => {
432
+
tracing::error!("post state load failed: {e}");
433
+
HashMap::new()
434
+
}
435
+
}
305
436
}
306
437
}
307
438
+5
parakeet/src/sql/list_states.sql
+5
parakeet/src/sql/list_states.sql
···
1
+
select l.at_uri, lb.at_uri as block, lm.did is not null as muted
2
+
from lists l
3
+
left join list_blocks lb on l.at_uri = lb.list_uri and lb.did = $1
4
+
left join list_mutes lm on l.at_uri = lm.list_uri and lm.did = $1
5
+
where l.at_uri = any ($2) and (lm.did is not null or lb.at_uri is not null)
+16
parakeet/src/sql/post_state.sql
+16
parakeet/src/sql/post_state.sql
···
1
+
select bq.*, coalesce(bq.at_uri = pinned_uri, false) as pinned
2
+
from (select p.at_uri,
3
+
p.did,
4
+
p.cid,
5
+
l.rkey as like_rkey,
6
+
r.rkey as repost_rkey,
7
+
b.did is not null as bookmarked,
8
+
coalesce(pg.rules && ARRAY ['app.bsky.feed.postgate#disableRule'], false) as embed_disabled
9
+
from posts p
10
+
left join likes l on l.subject = p.at_uri and l.did = $1
11
+
left join reposts r on r.post = p.at_uri and r.did = $1
12
+
left join bookmarks b on b.subject = p.at_uri and b.did = $1
13
+
left join postgates pg on pg.post_uri = p.at_uri
14
+
where p.at_uri = any ($2)
15
+
and (l.rkey is not null or r.rkey is not null or b.did is not null or pg.rules is not null)) bq,
16
+
(select pinned_uri, pinned_cid from profiles where did = $1) pp;
+20
parakeet/src/sql/profile_state.sql
+20
parakeet/src/sql/profile_state.sql
···
1
+
with vlb as (select * from v_list_block_exp where did = $1 and subject = any ($2)),
2
+
vlm as (select * from v_list_mutes_exp where did = $1 and subject = any ($2)),
3
+
ps as (select * from profile_states where did = $1 and subject = any ($2)),
4
+
vlb2 as (select subject as did, did as subject, list_uri is not null as blocked
5
+
from v_list_block_exp
6
+
where did = any ($2)
7
+
and subject = $1)
8
+
select distinct on (did, subject) did,
9
+
subject,
10
+
muting,
11
+
ps.blocked or vlb2.blocked as blocked,
12
+
blocking,
13
+
following,
14
+
followed,
15
+
vlb.list_uri as list_block,
16
+
vlm.list_uri as list_mute
17
+
from ps
18
+
full join vlb using (did, subject)
19
+
full join vlm using (did, subject)
20
+
full join vlb2 using (did, subject);
+2
-2
parakeet/src/sql/thread.sql
+2
-2
parakeet/src/sql/thread.sql
···
1
1
with recursive thread as (select at_uri, parent_uri, root_uri, 0 as depth
2
2
from posts
3
-
where parent_uri = $1
3
+
where parent_uri = $1 and violates_threadgate=FALSE
4
4
union all
5
5
select p.at_uri, p.parent_uri, p.root_uri, thread.depth + 1
6
6
from posts p
7
7
join thread on p.parent_uri = thread.at_uri
8
-
where thread.depth <= $2)
8
+
where thread.depth <= $2 and p.violates_threadgate=FALSE)
9
9
select *
10
10
from thread
11
11
order by depth desc;
+4
-2
parakeet/src/sql/thread_parent.sql
+4
-2
parakeet/src/sql/thread_parent.sql
···
1
1
with recursive parents as (select at_uri, cid, parent_uri, root_uri, 0 as depth
2
2
from posts
3
-
where at_uri = (select parent_uri from posts where at_uri = $1)
3
+
where
4
+
at_uri = (select parent_uri from posts where at_uri = $1 and violates_threadgate = FALSE)
4
5
union all
5
6
select p.at_uri, p.cid, p.parent_uri, p.root_uri, parents.depth + 1
6
7
from posts p
7
8
join parents on p.at_uri = parents.parent_uri
8
-
where parents.depth <= $2)
9
+
where parents.depth <= $2
10
+
and p.violates_threadgate = FALSE)
9
11
select *
10
12
from parents
11
13
order by depth desc;
+20
-5
parakeet/src/xrpc/app_bsky/bookmark.rs
+20
-5
parakeet/src/xrpc/app_bsky/bookmark.rs
···
8
8
use diesel::prelude::*;
9
9
use diesel_async::RunQueryDsl;
10
10
use lexica::app_bsky::bookmark::{BookmarkView, BookmarkViewItem};
11
+
use lexica::app_bsky::feed::{BlockedAuthor, PostView};
12
+
use lexica::StrongRef;
11
13
use parakeet_db::{models, schema};
12
14
use serde::{Deserialize, Serialize};
13
-
use lexica::StrongRef;
14
15
15
16
const BSKY_ALLOWED_TYPES: &[&str] = &["app.bsky.feed.post"];
16
17
···
125
126
// otherwise just ditch. we should have one.
126
127
let cid = bookmark.subject_cid.or(maybe_cid)?;
127
128
128
-
let item = maybe_item.map(BookmarkViewItem::Post).unwrap_or(
129
-
BookmarkViewItem::NotFound {
129
+
let item = maybe_item
130
+
.map(postview_to_bvi)
131
+
.unwrap_or(BookmarkViewItem::NotFound {
130
132
uri: bookmark.subject.clone(),
131
133
not_found: true,
132
-
},
133
-
);
134
+
});
134
135
135
136
let subject = StrongRef::new_from_str(bookmark.subject, &cid).ok()?;
136
137
···
144
145
145
146
Ok(Json(GetBookmarksRes { cursor, bookmarks }))
146
147
}
148
+
149
+
fn postview_to_bvi(post: PostView) -> BookmarkViewItem {
150
+
match &post.author.viewer {
151
+
Some(v) if v.blocked_by || v.blocking.is_some() => BookmarkViewItem::Blocked {
152
+
uri: post.uri,
153
+
blocked: true,
154
+
author: BlockedAuthor {
155
+
did: post.author.did.clone(),
156
+
viewer: post.author.viewer,
157
+
},
158
+
},
159
+
_ => BookmarkViewItem::Post(post),
160
+
}
161
+
}
+7
-8
parakeet/src/xrpc/app_bsky/feed/likes.rs
+7
-8
parakeet/src/xrpc/app_bsky/feed/likes.rs
···
1
+
use crate::hydration::posts::RawFeedItem;
1
2
use crate::hydration::StatefulHydrator;
2
3
use crate::xrpc::error::{Error, XrpcResult};
3
4
use crate::xrpc::extract::{AtpAcceptLabelers, AtpAuth};
···
57
58
.last()
58
59
.map(|(last, _)| last.timestamp_millis().to_string());
59
60
60
-
let at_uris = results
61
+
let raw_feed = results
61
62
.iter()
62
-
.map(|(_, uri)| uri.clone())
63
+
.map(|(_, uri)| RawFeedItem::Post {
64
+
uri: uri.clone(),
65
+
context: None,
66
+
})
63
67
.collect::<Vec<_>>();
64
68
65
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
66
-
67
-
let feed: Vec<_> = results
68
-
.into_iter()
69
-
.filter_map(|(_, uri)| posts.remove(&uri))
70
-
.collect();
69
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
71
70
72
71
Ok(Json(FeedRes { cursor, feed }))
73
72
}
+152
-99
parakeet/src/xrpc/app_bsky/feed/posts.rs
+152
-99
parakeet/src/xrpc/app_bsky/feed/posts.rs
···
1
+
use crate::hydration::posts::RawFeedItem;
1
2
use crate::hydration::StatefulHydrator;
2
3
use crate::xrpc::app_bsky::graph::lists::ListWithCursorQuery;
3
4
use crate::xrpc::error::{Error, XrpcResult};
···
16
17
use diesel_async::{AsyncPgConnection, RunQueryDsl};
17
18
use lexica::app_bsky::actor::ProfileView;
18
19
use lexica::app_bsky::feed::{
19
-
FeedReasonRepost, FeedSkeletonResponse, FeedViewPost, FeedViewPostReason, PostView,
20
-
SkeletonReason, ThreadViewPost, ThreadViewPostType, ThreadgateView,
20
+
BlockedAuthor, FeedSkeletonResponse, FeedViewPost, PostView, SkeletonReason, ThreadViewPost,
21
+
ThreadViewPostType, ThreadgateView,
21
22
};
22
-
use parakeet_db::schema;
23
+
use parakeet_db::{models, schema};
23
24
use reqwest::Url;
24
25
use serde::{Deserialize, Serialize};
25
26
use std::collections::HashMap;
···
113
114
114
115
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
115
116
116
-
let at_uris = skeleton.feed.iter().map(|v| v.post.clone()).collect();
117
117
let repost_skeleton = skeleton
118
118
.feed
119
119
.iter()
···
122
122
_ => None,
123
123
})
124
124
.collect::<Vec<_>>();
125
-
126
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
127
-
let mut repost_data = get_skeleton_repost_data(&mut conn, &hyd, repost_skeleton).await;
125
+
let mut repost_data = get_skeleton_repost_data(&mut conn, repost_skeleton).await;
128
126
129
-
let feed = skeleton
127
+
let raw_feed = skeleton
130
128
.feed
131
129
.into_iter()
132
-
.filter_map(|item| {
133
-
let mut post = posts.remove(&item.post)?;
134
-
let reason = match item.reason {
135
-
Some(SkeletonReason::Repost { repost }) => {
136
-
repost_data.remove(&repost).map(FeedViewPostReason::Repost)
137
-
}
138
-
Some(SkeletonReason::Pin {}) => Some(FeedViewPostReason::Pin),
139
-
_ => None,
140
-
};
141
-
142
-
post.reason = reason;
143
-
post.feed_context = item.feed_context;
144
-
145
-
Some(post)
130
+
.filter_map(|v| match v.reason {
131
+
Some(SkeletonReason::Repost { repost }) => {
132
+
repost_data
133
+
.remove_entry(&repost)
134
+
.map(|(uri, (by, at))| RawFeedItem::Repost {
135
+
uri,
136
+
post: v.post,
137
+
by,
138
+
at: at.and_utc(),
139
+
context: v.feed_context,
140
+
})
141
+
}
142
+
Some(SkeletonReason::Pin {}) => Some(RawFeedItem::Pin {
143
+
uri: v.post,
144
+
context: v.feed_context,
145
+
}),
146
+
None => Some(RawFeedItem::Post {
147
+
uri: v.post,
148
+
context: v.feed_context,
149
+
}),
146
150
})
147
151
.collect();
148
152
153
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
154
+
149
155
Ok(Json(FeedRes {
150
156
cursor: skeleton.cursor,
151
157
feed,
152
158
}))
153
159
}
154
160
155
-
#[derive(Debug, Deserialize)]
161
+
#[derive(Debug, Default, Eq, PartialEq, Deserialize)]
156
162
#[serde(rename_all = "snake_case")]
157
163
pub enum GetAuthorFeedFilter {
164
+
#[default]
158
165
PostsWithReplies,
159
166
PostsNoReplies,
160
167
PostsWithMedia,
···
162
169
PostsWithVideo,
163
170
}
164
171
165
-
impl Default for GetAuthorFeedFilter {
166
-
fn default() -> Self {
167
-
Self::PostsWithReplies
168
-
}
169
-
}
170
-
171
172
#[derive(Debug, Deserialize)]
172
173
#[serde(rename_all = "camelCase")]
173
174
pub struct GetAuthorFeedQuery {
···
187
188
Query(query): Query<GetAuthorFeedQuery>,
188
189
) -> XrpcResult<Json<FeedRes>> {
189
190
let mut conn = state.pool.get().await?;
190
-
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
191
191
192
192
let did = get_actor_did(&state.dataloaders, query.actor.clone()).await?;
193
193
194
194
check_actor_status(&mut conn, &did).await?;
195
195
196
+
// check if we block the actor or if they block us
197
+
if let Some(auth) = &maybe_auth {
198
+
if let Some(psr) = crate::db::get_profile_state(&mut conn, &auth.0, &did).await? {
199
+
if psr.blocked.unwrap_or_default() {
200
+
// they block us
201
+
return Err(Error::new(StatusCode::BAD_REQUEST, "BlockedByActor", None))
202
+
} else if psr.blocking.is_some() {
203
+
// we block them
204
+
return Err(Error::new(StatusCode::BAD_REQUEST, "BlockedActor", None))
205
+
}
206
+
}
207
+
}
208
+
209
+
let hyd = StatefulHydrator::new(&state.dataloaders, &state.cdn, &labelers, maybe_auth);
210
+
211
+
let pin = match query.include_pins && query.cursor.is_none() {
212
+
false => None,
213
+
true => crate::db::get_pinned_post_uri(&mut conn, &did).await?,
214
+
};
215
+
196
216
let limit = query.limit.unwrap_or(50).clamp(1, 100);
197
217
198
-
let mut posts_query = schema::posts::table
199
-
.select((schema::posts::created_at, schema::posts::at_uri))
200
-
.filter(schema::posts::did.eq(did))
218
+
let mut posts_query = schema::author_feeds::table
219
+
.select(models::AuthorFeedItem::as_select())
220
+
.left_join(schema::posts::table.on(schema::posts::at_uri.eq(schema::author_feeds::post)))
221
+
.filter(schema::author_feeds::did.eq(&did))
201
222
.into_boxed();
202
223
203
224
if let Some(cursor) = datetime_cursor(query.cursor.as_ref()) {
204
-
posts_query = posts_query.filter(schema::posts::created_at.lt(cursor));
225
+
posts_query = posts_query.filter(schema::author_feeds::sort_at.lt(cursor));
205
226
}
206
227
228
+
let author_threads_only = query.filter == GetAuthorFeedFilter::PostsAndAuthorThreads;
207
229
posts_query = match query.filter {
208
-
GetAuthorFeedFilter::PostsWithReplies => posts_query,
230
+
GetAuthorFeedFilter::PostsWithReplies => {
231
+
posts_query.filter(schema::author_feeds::typ.eq("post"))
232
+
}
209
233
GetAuthorFeedFilter::PostsNoReplies => {
210
234
posts_query.filter(schema::posts::parent_uri.is_null())
211
235
}
212
-
GetAuthorFeedFilter::PostsWithMedia => posts_query.filter(embed_type_filter(&[
213
-
"app.bsky.embed.video",
214
-
"app.bsky.embed.images",
215
-
])),
236
+
GetAuthorFeedFilter::PostsWithMedia => posts_query.filter(
237
+
embed_type_filter(&["app.bsky.embed.video", "app.bsky.embed.images"])
238
+
.and(schema::author_feeds::typ.eq("post")),
239
+
),
216
240
GetAuthorFeedFilter::PostsAndAuthorThreads => posts_query.filter(
217
241
(schema::posts::parent_uri
218
-
.like(format!("at://{}/%", &query.actor))
242
+
.like(format!("at://{did}/%"))
219
243
.or(schema::posts::parent_uri.is_null()))
220
244
.and(
221
245
schema::posts::root_uri
222
-
.like(format!("at://{}/%", &query.actor))
246
+
.like(format!("at://{did}/%"))
223
247
.or(schema::posts::root_uri.is_null()),
224
248
),
225
249
),
226
-
GetAuthorFeedFilter::PostsWithVideo => {
227
-
posts_query.filter(embed_type_filter(&["app.bsky.embed.video"]))
228
-
}
250
+
GetAuthorFeedFilter::PostsWithVideo => posts_query.filter(
251
+
embed_type_filter(&["app.bsky.embed.video"]).and(schema::author_feeds::typ.eq("post")),
252
+
),
229
253
};
230
254
231
255
let results = posts_query
232
-
.order(schema::posts::created_at.desc())
256
+
.order(schema::author_feeds::sort_at.desc())
233
257
.limit(limit as i64)
234
-
.load::<(chrono::DateTime<chrono::Utc>, String)>(&mut conn)
258
+
.load(&mut conn)
235
259
.await?;
236
260
237
261
let cursor = results
238
262
.last()
239
-
.map(|(last, _)| last.timestamp_millis().to_string());
263
+
.map(|item| item.sort_at.timestamp_millis().to_string());
240
264
241
-
let at_uris = results
242
-
.iter()
243
-
.map(|(_, uri)| uri.clone())
265
+
let mut raw_feed = results
266
+
.into_iter()
267
+
.filter_map(|item| match &*item.typ {
268
+
"post" => Some(RawFeedItem::Post {
269
+
uri: item.post,
270
+
context: None,
271
+
}),
272
+
"repost" => Some(RawFeedItem::Repost {
273
+
uri: item.uri,
274
+
post: item.post,
275
+
by: item.did,
276
+
at: item.sort_at,
277
+
context: None,
278
+
}),
279
+
_ => None,
280
+
})
244
281
.collect::<Vec<_>>();
245
282
246
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
283
+
if let Some(post) = pin {
284
+
raw_feed.insert(
285
+
0,
286
+
RawFeedItem::Pin {
287
+
uri: post,
288
+
context: None,
289
+
},
290
+
);
291
+
}
247
292
248
-
let feed = results
249
-
.into_iter()
250
-
.filter_map(|(_, uri)| posts.remove(&uri))
251
-
.collect();
293
+
let feed = hyd.hydrate_feed_posts(raw_feed, author_threads_only).await;
252
294
253
295
Ok(Json(FeedRes { cursor, feed }))
254
296
}
···
291
333
.last()
292
334
.map(|(last, _)| last.timestamp_millis().to_string());
293
335
294
-
let at_uris = results
336
+
let raw_feed = results
295
337
.iter()
296
-
.map(|(_, uri)| uri.clone())
338
+
.map(|(_, uri)| RawFeedItem::Post {
339
+
uri: uri.clone(),
340
+
context: None,
341
+
})
297
342
.collect::<Vec<_>>();
298
343
299
-
let mut posts = hyd.hydrate_feed_posts(at_uris).await;
300
-
301
-
let feed = results
302
-
.into_iter()
303
-
.filter_map(|(_, uri)| posts.remove(&uri))
304
-
.collect();
344
+
let feed = hyd.hydrate_feed_posts(raw_feed, false).await;
305
345
306
346
Ok(Json(FeedRes { cursor, feed }))
307
347
}
···
347
387
let depth = query.depth.unwrap_or(6).clamp(0, 1000);
348
388
let parent_height = query.parent_height.unwrap_or(80).clamp(0, 1000);
349
389
390
+
let root = hyd
391
+
.hydrate_post(uri.clone())
392
+
.await
393
+
.ok_or(Error::not_found())?;
394
+
let threadgate = root.threadgate.clone();
395
+
396
+
if let Some(viewer) = &root.author.viewer {
397
+
if viewer.blocked_by || viewer.blocking.is_some() {
398
+
return Ok(Json(GetPostThreadRes {
399
+
thread: ThreadViewPostType::Blocked {
400
+
uri,
401
+
blocked: true,
402
+
author: BlockedAuthor {
403
+
did: root.author.did,
404
+
viewer: root.author.viewer,
405
+
},
406
+
},
407
+
threadgate,
408
+
}));
409
+
}
410
+
}
411
+
350
412
let replies = diesel::sql_query(include_str!("../../../sql/thread.sql"))
351
413
.bind::<diesel::sql_types::Text, _>(&uri)
352
414
.bind::<diesel::sql_types::Integer, _>(depth as i32)
···
362
424
let reply_uris = replies.iter().map(|item| item.at_uri.clone()).collect();
363
425
let parent_uris = parents.iter().map(|item| item.at_uri.clone()).collect();
364
426
365
-
let root = hyd
366
-
.hydrate_post(uri.clone())
367
-
.await
368
-
.ok_or(Error::not_found())?;
369
427
let mut replies_hydrated = hyd.hydrate_posts(reply_uris).await;
370
428
let mut parents_hydrated = hyd.hydrate_posts(parent_uris).await;
371
429
···
381
439
continue;
382
440
};
383
441
384
-
entry.push(ThreadViewPostType::Post(Box::new(ThreadViewPost {
385
-
post,
386
-
parent: None,
387
-
replies: this_post_replies,
388
-
})));
442
+
entry.push(postview_to_tvpt(post, None, this_post_replies));
389
443
}
390
444
391
445
let mut root_parent = None;
···
394
448
395
449
let parent = parents_hydrated
396
450
.remove(&parent.at_uri)
397
-
.map(|post| {
398
-
ThreadViewPostType::Post(Box::new(ThreadViewPost {
399
-
post,
400
-
parent: p2,
401
-
replies: vec![],
402
-
}))
403
-
})
451
+
.map(|post| postview_to_tvpt(post, p2, Vec::default()))
404
452
.unwrap_or(ThreadViewPostType::NotFound {
405
453
uri: parent.at_uri.clone(),
406
454
not_found: true,
···
410
458
}
411
459
412
460
let replies = tmpbuf.remove(&root.uri).unwrap_or_default();
413
-
414
-
let threadgate = root.threadgate.clone();
415
461
416
462
Ok(Json(GetPostThreadRes {
417
463
threadgate,
···
629
675
}
630
676
}
631
677
632
-
async fn get_skeleton_repost_data<'a>(
678
+
async fn get_skeleton_repost_data(
633
679
conn: &mut AsyncPgConnection,
634
-
hyd: &StatefulHydrator<'a>,
635
680
reposts: Vec<String>,
636
-
) -> HashMap<String, FeedReasonRepost> {
681
+
) -> HashMap<String, (String, NaiveDateTime)> {
637
682
let Ok(repost_data) = schema::records::table
638
683
.select((
639
684
schema::records::at_uri,
···
647
692
return HashMap::new();
648
693
};
649
694
650
-
let profiles = repost_data.iter().map(|(_, did, _)| did.clone()).collect();
651
-
let profiles = hyd.hydrate_profiles_basic(profiles).await;
652
-
653
695
repost_data
654
696
.into_iter()
655
-
.filter_map(|(uri, did, indexed_at)| {
656
-
let by = profiles.get(&did).cloned()?;
697
+
.map(|(uri, did, at)| (uri, (did, at)))
698
+
.collect()
699
+
}
657
700
658
-
let repost = FeedReasonRepost {
659
-
by,
660
-
uri: Some(uri.clone()),
661
-
cid: None, // okay, we do have this, but the app doesn't seem to be bothered about not setting it.
662
-
indexed_at: indexed_at.and_utc(),
663
-
};
664
-
665
-
Some((uri, repost))
666
-
})
667
-
.collect()
701
+
fn postview_to_tvpt(
702
+
post: PostView,
703
+
parent: Option<ThreadViewPostType>,
704
+
replies: Vec<ThreadViewPostType>,
705
+
) -> ThreadViewPostType {
706
+
match &post.author.viewer {
707
+
Some(v) if v.blocked_by || v.blocking.is_some() => ThreadViewPostType::Blocked {
708
+
uri: post.uri.clone(),
709
+
blocked: true,
710
+
author: BlockedAuthor {
711
+
did: post.author.did,
712
+
viewer: post.author.viewer,
713
+
},
714
+
},
715
+
_ => ThreadViewPostType::Post(Box::new(ThreadViewPost {
716
+
post,
717
+
parent,
718
+
replies,
719
+
})),
720
+
}
668
721
}
+1
-1
parakeet/src/xrpc/app_bsky/graph/relations.rs
+1
-1
parakeet/src/xrpc/app_bsky/graph/relations.rs
+6
parakeet/src/xrpc/app_bsky/mod.rs
+6
parakeet/src/xrpc/app_bsky/mod.rs
···
10
10
#[rustfmt::skip]
11
11
pub fn routes() -> Router<crate::GlobalState> {
12
12
Router::new()
13
+
.route("/app.bsky.actor.getPreferences", get(not_implemented))
14
+
.route("/app.bsky.actor.putPreferences", post(not_implemented))
13
15
.route("/app.bsky.actor.getProfile", get(actor::get_profile))
14
16
.route("/app.bsky.actor.getProfiles", get(actor::get_profiles))
15
17
// TODO: app.bsky.actor.getSuggestions (recs)
···
63
65
// TODO: app.bsky.notification.putPreferences
64
66
// TODO: app.bsky.notification.putPreferencesV2
65
67
}
68
+
69
+
async fn not_implemented() -> axum::http::StatusCode {
70
+
axum::http::StatusCode::NOT_IMPLEMENTED
71
+
}
+25
-4
parakeet-db/src/models.rs
+25
-4
parakeet-db/src/models.rs
···
37
37
pub joined_sp_uri: Option<String>,
38
38
pub joined_sp_cid: Option<String>,
39
39
40
+
pub pronouns: Option<String>,
41
+
pub website: Option<String>,
42
+
40
43
pub created_at: NaiveDateTime,
41
44
pub indexed_at: NaiveDateTime,
42
45
}
···
145
148
pub embed: Option<String>,
146
149
pub embed_subtype: Option<String>,
147
150
151
+
pub mentions: Option<Vec<Option<String>>>,
152
+
pub violates_threadgate: bool,
153
+
148
154
pub created_at: DateTime<Utc>,
149
155
pub indexed_at: NaiveDateTime,
150
156
}
···
247
253
pub post_uri: String,
248
254
249
255
pub hidden_replies: Vec<Option<String>>,
250
-
pub allow: Vec<Option<String>>,
251
-
pub allowed_lists: Vec<Option<String>>,
256
+
pub allow: Option<Vec<Option<String>>>,
257
+
pub allowed_lists: Option<Vec<Option<String>>>,
252
258
253
259
pub record: serde_json::Value,
254
260
···
292
298
pub indexed_at: NaiveDateTime,
293
299
}
294
300
295
-
#[derive(Clone, Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, Associations)]
301
+
#[derive(
302
+
Clone, Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, Associations,
303
+
)]
296
304
#[diesel(table_name = crate::schema::labeler_defs)]
297
305
#[diesel(belongs_to(LabelerService, foreign_key = labeler))]
298
306
#[diesel(check_for_backend(diesel::pg::Pg))]
···
408
416
pub subject_cid: Option<String>,
409
417
pub subject_type: &'a str,
410
418
pub tags: Vec<String>,
411
-
}
419
+
}
420
+
421
+
#[derive(Debug, Queryable, Selectable, Identifiable)]
422
+
#[diesel(table_name = crate::schema::author_feeds)]
423
+
#[diesel(primary_key(uri))]
424
+
#[diesel(check_for_backend(diesel::pg::Pg))]
425
+
pub struct AuthorFeedItem {
426
+
pub uri: String,
427
+
pub cid: String,
428
+
pub post: String,
429
+
pub did: String,
430
+
pub typ: String,
431
+
pub sort_at: DateTime<Utc>,
432
+
}
+31
-2
parakeet-db/src/schema.rs
+31
-2
parakeet-db/src/schema.rs
···
13
13
}
14
14
15
15
diesel::table! {
16
+
author_feeds (uri) {
17
+
uri -> Text,
18
+
cid -> Text,
19
+
post -> Text,
20
+
did -> Text,
21
+
typ -> Text,
22
+
sort_at -> Timestamptz,
23
+
}
24
+
}
25
+
26
+
diesel::table! {
16
27
backfill (repo, repo_ver) {
17
28
repo -> Text,
18
29
repo_ver -> Text,
···
284
295
embed_subtype -> Nullable<Text>,
285
296
created_at -> Timestamptz,
286
297
indexed_at -> Timestamp,
298
+
mentions -> Nullable<Array<Nullable<Text>>>,
299
+
violates_threadgate -> Bool,
300
+
}
301
+
}
302
+
303
+
diesel::table! {
304
+
profile_states (did, subject) {
305
+
did -> Text,
306
+
subject -> Text,
307
+
muting -> Bool,
308
+
blocked -> Bool,
309
+
blocking -> Nullable<Text>,
310
+
following -> Nullable<Text>,
311
+
followed -> Nullable<Text>,
287
312
}
288
313
}
289
314
···
301
326
joined_sp_cid -> Nullable<Text>,
302
327
created_at -> Timestamp,
303
328
indexed_at -> Timestamp,
329
+
pronouns -> Nullable<Text>,
330
+
website -> Nullable<Text>,
304
331
}
305
332
}
306
333
···
364
391
cid -> Text,
365
392
post_uri -> Text,
366
393
hidden_replies -> Array<Nullable<Text>>,
367
-
allow -> Array<Nullable<Text>>,
368
-
allowed_lists -> Array<Nullable<Text>>,
394
+
allow -> Nullable<Array<Nullable<Text>>>,
395
+
allowed_lists -> Nullable<Array<Nullable<Text>>>,
369
396
record -> Jsonb,
370
397
created_at -> Timestamptz,
371
398
indexed_at -> Timestamp,
···
415
442
416
443
diesel::allow_tables_to_appear_in_same_query!(
417
444
actors,
445
+
author_feeds,
418
446
backfill,
419
447
backfill_jobs,
420
448
blocks,
···
439
467
post_embed_video_captions,
440
468
postgates,
441
469
posts,
470
+
profile_states,
442
471
profiles,
443
472
records,
444
473
reposts,