Alternative ATProto PDS implementation

implement actor_store

Changed files
+149 -32
src
+44 -3
src/actor_store/blob.rs
··· 43 /// DID of the actor 44 pub did: String, 45 /// Database connection 46 - pub db: deadpool_diesel::Connection<SqliteConnection>, 47 } 48 49 impl BlobReader { 50 /// Create a new blob reader 51 - pub fn new(blobstore: BlobStoreSql, db: deadpool_diesel::Connection<SqliteConnection>) -> Self { 52 BlobReader { 53 did: blobstore.did.clone(), 54 blobstore, ··· 63 let did = self.did.clone(); 64 let found = self 65 .db 66 .interact(move |conn| { 67 BlobSchema::blob 68 .filter(BlobSchema::did.eq(did)) ··· 106 let did = self.did.clone(); 107 let res = self 108 .db 109 .interact(move |conn| { 110 let results = RecordBlobSchema::record_blob 111 .filter(RecordBlobSchema::blobCid.eq(cid.to_string())) ··· 163 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 164 165 let did = self.did.clone(); 166 - self.db.interact(move |conn| { 167 let BlobMetadata { 168 temp_key, 169 size, ··· 265 let uris_clone = uris.clone(); 266 let deleted_repo_blobs: Vec<models::RecordBlob> = self 267 .db 268 .interact(move |conn| { 269 RecordBlobSchema::record_blob 270 .filter(RecordBlobSchema::recordUri.eq_any(&uris_clone)) ··· 281 // Now perform the delete 282 let uris_clone = uris.clone(); 283 self.db 284 .interact(move |conn| { 285 delete(RecordBlobSchema::record_blob) 286 .filter(RecordBlobSchema::recordUri.eq_any(uris_clone)) ··· 300 let did_clone = self.did.clone(); 301 let duplicated_cids: Vec<String> = self 302 .db 303 .interact(move |conn| { 304 RecordBlobSchema::record_blob 305 .filter(RecordBlobSchema::blobCid.eq_any(cids_clone)) ··· 336 let cids = cids_to_delete.clone(); 337 let did_clone = self.did.clone(); 338 self.db 339 .interact(move |conn| { 340 delete(BlobSchema::blob) 341 .filter(BlobSchema::cid.eq_any(cids)) ··· 368 369 let found = self 370 .db 371 .interact(move |conn| { 372 BlobSchema::blob 373 .filter( ··· 390 .await?; 391 } 392 self.db 393 .interact(move |conn| { 394 update(BlobSchema::blob) 395 .filter(BlobSchema::tempKey.eq(found.temp_key)) ··· 412 let did = self.did.clone(); 413 414 self.db 415 .interact(move |conn| { 416 insert_into(RecordBlobSchema::record_blob) 417 .values(( ··· 434 435 let did = self.did.clone(); 436 self.db 437 .interact(move |conn| { 438 let res = BlobSchema::blob 439 .filter(BlobSchema::did.eq(&did)) ··· 451 452 let did = self.did.clone(); 453 self.db 454 .interact(move |conn| { 455 let res: i64 = RecordBlobSchema::record_blob 456 .filter(RecordBlobSchema::did.eq(&did)) ··· 472 473 let did = self.did.clone(); 474 self.db 475 .interact(move |conn| { 476 let ListMissingBlobsOpts { cursor, limit } = opts; 477 ··· 552 builder = builder.filter(RecordBlobSchema::blobCid.gt(cursor)); 553 } 554 self.db 555 .interact(move |conn| builder.load(conn)) 556 .await 557 .expect("Failed to list blobs")? ··· 567 builder = builder.filter(RecordBlobSchema::blobCid.gt(cursor)); 568 } 569 self.db 570 .interact(move |conn| builder.load(conn)) 571 .await 572 .expect("Failed to list blobs")? ··· 580 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 581 582 self.db 583 .interact(move |conn| { 584 let res = BlobSchema::blob 585 .filter(BlobSchema::cid.eq(cid.to_string())) ··· 621 let did_clone = self.did.clone(); 622 623 self.db 624 .interact(move |conn| { 625 update(BlobSchema::blob) 626 .filter(BlobSchema::cid.eq(blob_cid))
··· 43 /// DID of the actor 44 pub did: String, 45 /// Database connection 46 + pub db: deadpool_diesel::Pool< 47 + deadpool_diesel::Manager<SqliteConnection>, 48 + deadpool_diesel::sqlite::Object, 49 + >, 50 } 51 52 impl BlobReader { 53 /// Create a new blob reader 54 + pub fn new( 55 + blobstore: BlobStoreSql, 56 + db: deadpool_diesel::Pool< 57 + deadpool_diesel::Manager<SqliteConnection>, 58 + deadpool_diesel::sqlite::Object, 59 + >, 60 + ) -> Self { 61 BlobReader { 62 did: blobstore.did.clone(), 63 blobstore, ··· 72 let did = self.did.clone(); 73 let found = self 74 .db 75 + .get() 76 + .await? 77 .interact(move |conn| { 78 BlobSchema::blob 79 .filter(BlobSchema::did.eq(did)) ··· 117 let did = self.did.clone(); 118 let res = self 119 .db 120 + .get() 121 + .await? 122 .interact(move |conn| { 123 let results = RecordBlobSchema::record_blob 124 .filter(RecordBlobSchema::blobCid.eq(cid.to_string())) ··· 176 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 177 178 let did = self.did.clone(); 179 + self.db.get().await?.interact(move |conn| { 180 let BlobMetadata { 181 temp_key, 182 size, ··· 278 let uris_clone = uris.clone(); 279 let deleted_repo_blobs: Vec<models::RecordBlob> = self 280 .db 281 + .get() 282 + .await? 283 .interact(move |conn| { 284 RecordBlobSchema::record_blob 285 .filter(RecordBlobSchema::recordUri.eq_any(&uris_clone)) ··· 296 // Now perform the delete 297 let uris_clone = uris.clone(); 298 self.db 299 + .get() 300 + .await? 301 .interact(move |conn| { 302 delete(RecordBlobSchema::record_blob) 303 .filter(RecordBlobSchema::recordUri.eq_any(uris_clone)) ··· 317 let did_clone = self.did.clone(); 318 let duplicated_cids: Vec<String> = self 319 .db 320 + .get() 321 + .await? 322 .interact(move |conn| { 323 RecordBlobSchema::record_blob 324 .filter(RecordBlobSchema::blobCid.eq_any(cids_clone)) ··· 355 let cids = cids_to_delete.clone(); 356 let did_clone = self.did.clone(); 357 self.db 358 + .get() 359 + .await? 360 .interact(move |conn| { 361 delete(BlobSchema::blob) 362 .filter(BlobSchema::cid.eq_any(cids)) ··· 389 390 let found = self 391 .db 392 + .get() 393 + .await? 394 .interact(move |conn| { 395 BlobSchema::blob 396 .filter( ··· 413 .await?; 414 } 415 self.db 416 + .get() 417 + .await? 418 .interact(move |conn| { 419 update(BlobSchema::blob) 420 .filter(BlobSchema::tempKey.eq(found.temp_key)) ··· 437 let did = self.did.clone(); 438 439 self.db 440 + .get() 441 + .await? 442 .interact(move |conn| { 443 insert_into(RecordBlobSchema::record_blob) 444 .values(( ··· 461 462 let did = self.did.clone(); 463 self.db 464 + .get() 465 + .await? 466 .interact(move |conn| { 467 let res = BlobSchema::blob 468 .filter(BlobSchema::did.eq(&did)) ··· 480 481 let did = self.did.clone(); 482 self.db 483 + .get() 484 + .await? 485 .interact(move |conn| { 486 let res: i64 = RecordBlobSchema::record_blob 487 .filter(RecordBlobSchema::did.eq(&did)) ··· 503 504 let did = self.did.clone(); 505 self.db 506 + .get() 507 + .await? 508 .interact(move |conn| { 509 let ListMissingBlobsOpts { cursor, limit } = opts; 510 ··· 585 builder = builder.filter(RecordBlobSchema::blobCid.gt(cursor)); 586 } 587 self.db 588 + .get() 589 + .await? 590 .interact(move |conn| builder.load(conn)) 591 .await 592 .expect("Failed to list blobs")? ··· 602 builder = builder.filter(RecordBlobSchema::blobCid.gt(cursor)); 603 } 604 self.db 605 + .get() 606 + .await? 607 .interact(move |conn| builder.load(conn)) 608 .await 609 .expect("Failed to list blobs")? ··· 617 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 618 619 self.db 620 + .get() 621 + .await? 622 .interact(move |conn| { 623 let res = BlobSchema::blob 624 .filter(BlobSchema::cid.eq(cid.to_string())) ··· 660 let did_clone = self.did.clone(); 661 662 self.db 663 + .get() 664 + .await? 665 .interact(move |conn| { 666 update(BlobSchema::blob) 667 .filter(BlobSchema::cid.eq(blob_cid))
+17 -15
src/actor_store/mod.rs
··· 75 pub fn new( 76 did: String, 77 blobstore: BlobStoreSql, 78 - db: deadpool_diesel::Connection<SqliteConnection>, 79 ) -> Self { 80 ActorStore { 81 - storage: Arc::new(RwLock::new(SqlRepoReader::new( 82 - did.clone(), 83 - None, 84 - db.clone(), 85 - ))), 86 record: RecordReader::new(did.clone(), db.clone()), 87 pref: PreferenceReader::new(did.clone(), db.clone()), 88 did, 89 - blob: BlobReader::new(blobstore, db.clone()), // Unlike TS impl, just use blob reader vs generator 90 } 91 } 92 ··· 438 pub async fn destroy(&mut self) -> Result<()> { 439 let did: String = self.did.clone(); 440 let storage_guard = self.storage.read().await; 441 - let db: deadpool_diesel::Connection<SqliteConnection> = storage_guard.db.clone(); 442 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 443 444 - let blob_rows: Vec<String> = db 445 - .run(move |conn| { 446 BlobSchema::blob 447 .filter(BlobSchema::did.eq(did)) 448 .select(BlobSchema::cid) 449 .get_results(conn) 450 }) 451 - .await?; 452 let cids = blob_rows 453 .into_iter() 454 .map(|row| Ok(Cid::from_str(&row)?)) ··· 472 } 473 let did: String = self.did.clone(); 474 let storage_guard = self.storage.read().await; 475 - let db: deadpool_diesel::Connection<SqliteConnection> = storage_guard.db.clone(); 476 use rsky_pds::schema::pds::record::dsl as RecordSchema; 477 478 let cid_strs: Vec<String> = cids.into_iter().map(|c| c.to_string()).collect(); 479 let touched_uri_strs: Vec<String> = touched_uris.iter().map(|t| t.to_string()).collect(); 480 - let res: Vec<String> = db 481 - .run(move |conn| { 482 RecordSchema::record 483 .filter(RecordSchema::did.eq(did)) 484 .filter(RecordSchema::cid.eq_any(cid_strs)) ··· 486 .select(RecordSchema::cid) 487 .get_results(conn) 488 }) 489 - .await?; 490 res.into_iter() 491 .map(|row| Cid::from_str(&row).map_err(|error| anyhow::Error::new(error))) 492 .collect::<Result<Vec<Cid>>>()
··· 75 pub fn new( 76 did: String, 77 blobstore: BlobStoreSql, 78 + db: deadpool_diesel::Pool< 79 + deadpool_diesel::Manager<SqliteConnection>, 80 + deadpool_diesel::sqlite::Object, 81 + >, 82 + conn: deadpool_diesel::sqlite::Object, 83 ) -> Self { 84 ActorStore { 85 + storage: Arc::new(RwLock::new(SqlRepoReader::new(did.clone(), None, conn))), 86 record: RecordReader::new(did.clone(), db.clone()), 87 pref: PreferenceReader::new(did.clone(), db.clone()), 88 did, 89 + blob: BlobReader::new(blobstore, db.clone()), 90 } 91 } 92 ··· 438 pub async fn destroy(&mut self) -> Result<()> { 439 let did: String = self.did.clone(); 440 let storage_guard = self.storage.read().await; 441 use rsky_pds::schema::pds::blob::dsl as BlobSchema; 442 443 + let blob_rows: Vec<String> = storage_guard 444 + .db 445 + .interact(move |conn| { 446 BlobSchema::blob 447 .filter(BlobSchema::did.eq(did)) 448 .select(BlobSchema::cid) 449 .get_results(conn) 450 }) 451 + .await 452 + .expect("Failed to get blob rows")?; 453 let cids = blob_rows 454 .into_iter() 455 .map(|row| Ok(Cid::from_str(&row)?)) ··· 473 } 474 let did: String = self.did.clone(); 475 let storage_guard = self.storage.read().await; 476 use rsky_pds::schema::pds::record::dsl as RecordSchema; 477 478 let cid_strs: Vec<String> = cids.into_iter().map(|c| c.to_string()).collect(); 479 let touched_uri_strs: Vec<String> = touched_uris.iter().map(|t| t.to_string()).collect(); 480 + let res: Vec<String> = storage_guard 481 + .db 482 + .interact(move |conn| { 483 RecordSchema::record 484 .filter(RecordSchema::did.eq(did)) 485 .filter(RecordSchema::cid.eq_any(cid_strs)) ··· 487 .select(RecordSchema::cid) 488 .get_results(conn) 489 }) 490 + .await 491 + .expect("Failed to get duplicate record cids")?; 492 res.into_iter() 493 .map(|row| Cid::from_str(&row).map_err(|error| anyhow::Error::new(error))) 494 .collect::<Result<Vec<Cid>>>()
+14 -3
src/actor_store/preference.rs
··· 14 15 pub struct PreferenceReader { 16 pub did: String, 17 - pub db: deadpool_diesel::Connection<SqliteConnection>, 18 } 19 20 impl PreferenceReader { 21 - pub fn new(did: String, db: deadpool_diesel::Connection<SqliteConnection>) -> Self { 22 PreferenceReader { did, db } 23 } 24 ··· 31 32 let did = self.did.clone(); 33 self.db 34 .interact(move |conn| { 35 let prefs_res = AccountPrefSchema::account_pref 36 .filter(AccountPrefSchema::did.eq(&did)) ··· 69 scope: AuthScope, 70 ) -> Result<()> { 71 let did = self.did.clone(); 72 - self.db 73 .interact(move |conn| { 74 match values 75 .iter()
··· 14 15 pub struct PreferenceReader { 16 pub did: String, 17 + pub db: deadpool_diesel::Pool< 18 + deadpool_diesel::Manager<SqliteConnection>, 19 + deadpool_diesel::sqlite::Object, 20 + >, 21 } 22 23 impl PreferenceReader { 24 + pub fn new( 25 + did: String, 26 + db: deadpool_diesel::Pool< 27 + deadpool_diesel::Manager<SqliteConnection>, 28 + deadpool_diesel::sqlite::Object, 29 + >, 30 + ) -> Self { 31 PreferenceReader { did, db } 32 } 33 ··· 40 41 let did = self.did.clone(); 42 self.db 43 + .get() 44 + .await? 45 .interact(move |conn| { 46 let prefs_res = AccountPrefSchema::account_pref 47 .filter(AccountPrefSchema::did.eq(&did)) ··· 80 scope: AuthScope, 81 ) -> Result<()> { 82 let did = self.did.clone(); 83 + self.db.get().await? 84 .interact(move |conn| { 85 match values 86 .iter()
+37 -2
src/actor_store/record.rs
··· 21 /// Combined handler for record operations with both read and write capabilities. 22 pub(crate) struct RecordReader { 23 /// Database connection. 24 - pub db: deadpool_diesel::Connection<SqliteConnection>, 25 /// DID of the actor. 26 pub did: String, 27 } 28 29 impl RecordReader { 30 /// Create a new record handler. 31 - pub(crate) fn new(did: String, db: deadpool_diesel::Connection<SqliteConnection>) -> Self { 32 Self { did, db } 33 } 34 ··· 38 39 let other_did = self.did.clone(); 40 self.db 41 .interact(move |conn| { 42 let res: i64 = record.filter(did.eq(&other_did)).count().get_result(conn)?; 43 Ok(res) ··· 52 53 let other_did = self.did.clone(); 54 self.db 55 .interact(move |conn| { 56 let collections = record 57 .filter(did.eq(&other_did)) ··· 117 } 118 let res: Vec<(Record, RepoBlock)> = self 119 .db 120 .interact(move |conn| builder.load(conn)) 121 .await 122 .expect("Failed to load records")?; ··· 159 } 160 let record: Option<(Record, RepoBlock)> = self 161 .db 162 .interact(move |conn| builder.first(conn).optional()) 163 .await 164 .expect("Failed to load record")?; ··· 201 } 202 let record_uri = self 203 .db 204 .interact(move |conn| builder.first::<String>(conn).optional()) 205 .await 206 .expect("Failed to check record")?; ··· 216 217 let res = self 218 .db 219 .interact(move |conn| { 220 RecordSchema::record 221 .select(RecordSchema::takedownRef) ··· 248 249 let res = self 250 .db 251 .interact(move |conn| { 252 RecordSchema::record 253 .select(RecordSchema::cid) ··· 276 277 let res = self 278 .db 279 .interact(move |conn| { 280 RecordSchema::record 281 .inner_join( ··· 373 // Track current version of record 374 let (record, uri) = self 375 .db 376 .interact(move |conn| { 377 insert_into(RecordSchema::record) 378 .values(row) ··· 411 use rsky_pds::schema::pds::record::dsl as RecordSchema; 412 let uri = uri.to_string(); 413 self.db 414 .interact(move |conn| { 415 delete(RecordSchema::record) 416 .filter(RecordSchema::uri.eq(&uri)) ··· 432 use rsky_pds::schema::pds::backlink::dsl as BacklinkSchema; 433 let uri = uri.to_string(); 434 self.db 435 .interact(move |conn| { 436 delete(BacklinkSchema::backlink) 437 .filter(BacklinkSchema::uri.eq(uri)) ··· 449 } else { 450 use rsky_pds::schema::pds::backlink::dsl as BacklinkSchema; 451 self.db 452 .interact(move |conn| { 453 insert_or_ignore_into(BacklinkSchema::backlink) 454 .values(&backlinks) ··· 478 let uri_string = uri.to_string(); 479 480 self.db 481 .interact(move |conn| { 482 update(RecordSchema::record) 483 .filter(RecordSchema::uri.eq(uri_string))
··· 21 /// Combined handler for record operations with both read and write capabilities. 22 pub(crate) struct RecordReader { 23 /// Database connection. 24 + pub db: deadpool_diesel::Pool< 25 + deadpool_diesel::Manager<SqliteConnection>, 26 + deadpool_diesel::sqlite::Object, 27 + >, 28 /// DID of the actor. 29 pub did: String, 30 } 31 32 impl RecordReader { 33 /// Create a new record handler. 34 + pub(crate) fn new( 35 + did: String, 36 + db: deadpool_diesel::Pool< 37 + deadpool_diesel::Manager<SqliteConnection>, 38 + deadpool_diesel::sqlite::Object, 39 + >, 40 + ) -> Self { 41 Self { did, db } 42 } 43 ··· 47 48 let other_did = self.did.clone(); 49 self.db 50 + .get() 51 + .await? 52 .interact(move |conn| { 53 let res: i64 = record.filter(did.eq(&other_did)).count().get_result(conn)?; 54 Ok(res) ··· 63 64 let other_did = self.did.clone(); 65 self.db 66 + .get() 67 + .await? 68 .interact(move |conn| { 69 let collections = record 70 .filter(did.eq(&other_did)) ··· 130 } 131 let res: Vec<(Record, RepoBlock)> = self 132 .db 133 + .get() 134 + .await? 135 .interact(move |conn| builder.load(conn)) 136 .await 137 .expect("Failed to load records")?; ··· 174 } 175 let record: Option<(Record, RepoBlock)> = self 176 .db 177 + .get() 178 + .await? 179 .interact(move |conn| builder.first(conn).optional()) 180 .await 181 .expect("Failed to load record")?; ··· 218 } 219 let record_uri = self 220 .db 221 + .get() 222 + .await? 223 .interact(move |conn| builder.first::<String>(conn).optional()) 224 .await 225 .expect("Failed to check record")?; ··· 235 236 let res = self 237 .db 238 + .get() 239 + .await? 240 .interact(move |conn| { 241 RecordSchema::record 242 .select(RecordSchema::takedownRef) ··· 269 270 let res = self 271 .db 272 + .get() 273 + .await? 274 .interact(move |conn| { 275 RecordSchema::record 276 .select(RecordSchema::cid) ··· 299 300 let res = self 301 .db 302 + .get() 303 + .await? 304 .interact(move |conn| { 305 RecordSchema::record 306 .inner_join( ··· 398 // Track current version of record 399 let (record, uri) = self 400 .db 401 + .get() 402 + .await? 403 .interact(move |conn| { 404 insert_into(RecordSchema::record) 405 .values(row) ··· 438 use rsky_pds::schema::pds::record::dsl as RecordSchema; 439 let uri = uri.to_string(); 440 self.db 441 + .get() 442 + .await? 443 .interact(move |conn| { 444 delete(RecordSchema::record) 445 .filter(RecordSchema::uri.eq(&uri)) ··· 461 use rsky_pds::schema::pds::backlink::dsl as BacklinkSchema; 462 let uri = uri.to_string(); 463 self.db 464 + .get() 465 + .await? 466 .interact(move |conn| { 467 delete(BacklinkSchema::backlink) 468 .filter(BacklinkSchema::uri.eq(uri)) ··· 480 } else { 481 use rsky_pds::schema::pds::backlink::dsl as BacklinkSchema; 482 self.db 483 + .get() 484 + .await? 485 .interact(move |conn| { 486 insert_or_ignore_into(BacklinkSchema::backlink) 487 .values(&backlinks) ··· 511 let uri_string = uri.to_string(); 512 513 self.db 514 + .get() 515 + .await? 516 .interact(move |conn| { 517 update(RecordSchema::record) 518 .filter(RecordSchema::uri.eq(uri_string))
+29 -3
src/actor_store/sql_blob.rs
··· 26 /// SQL-based implementation of blob storage 27 pub struct BlobStoreSql { 28 /// Database connection for metadata 29 - pub db: deadpool_diesel::Connection<SqliteConnection>, 30 /// DID of the actor 31 pub did: String, 32 } ··· 57 58 impl BlobStoreSql { 59 /// Create a new SQL-based blob store for the given DID 60 - pub fn new(did: String, db: deadpool_diesel::Connection<SqliteConnection>) -> Self { 61 BlobStoreSql { db, did } 62 } 63 64 // /// Create a factory function for blob stores 65 // pub fn creator( 66 - // db: deadpool_diesel::Connection<SqliteConnection>, 67 // ) -> Box<dyn Fn(String) -> BlobStoreSql> { 68 // let db_clone = db.clone(); 69 // Box::new(move |did: String| BlobStoreSql::new(did, db_clone.clone())) ··· 107 108 // Store directly in the database 109 self.db 110 .interact(move |conn| { 111 let data_clone = bytes.clone(); 112 let entry = BlobEntry { ··· 145 146 // Update the quarantine flag in the database 147 self.db 148 .interact(move |conn| { 149 diesel::update(blobs::table) 150 .filter(blobs::cid.eq(&cid_str)) ··· 166 167 // Update the quarantine flag in the database 168 self.db 169 .interact(move |conn| { 170 diesel::update(blobs::table) 171 .filter(blobs::cid.eq(&cid_str)) ··· 190 // Get the blob data from the database 191 let blob_data = self 192 .db 193 .interact(move |conn| { 194 blobs 195 .filter(self::blobs::cid.eq(&cid_str)) ··· 229 230 // Delete from database 231 self.db 232 .interact(move |conn| { 233 diesel::delete(blobs) 234 .filter(self::blobs::cid.eq(&blob_cid)) ··· 251 252 // Delete all blobs in one operation 253 self.db 254 .interact(move |conn| { 255 diesel::delete(blobs) 256 .filter(self::blobs::cid.eq_any(cid_strings)) ··· 273 274 let exists = self 275 .db 276 .interact(move |conn| { 277 diesel::select(diesel::dsl::exists( 278 blobs
··· 26 /// SQL-based implementation of blob storage 27 pub struct BlobStoreSql { 28 /// Database connection for metadata 29 + pub db: deadpool_diesel::Pool< 30 + deadpool_diesel::Manager<SqliteConnection>, 31 + deadpool_diesel::sqlite::Object, 32 + >, 33 /// DID of the actor 34 pub did: String, 35 } ··· 60 61 impl BlobStoreSql { 62 /// Create a new SQL-based blob store for the given DID 63 + pub fn new( 64 + did: String, 65 + db: deadpool_diesel::Pool< 66 + deadpool_diesel::Manager<SqliteConnection>, 67 + deadpool_diesel::sqlite::Object, 68 + >, 69 + ) -> Self { 70 BlobStoreSql { db, did } 71 } 72 73 // /// Create a factory function for blob stores 74 // pub fn creator( 75 + // db: deadpool_diesel::Pool< 76 + // deadpool_diesel::Manager<SqliteConnection>, 77 + // deadpool_diesel::sqlite::Object, 78 + // >, 79 // ) -> Box<dyn Fn(String) -> BlobStoreSql> { 80 // let db_clone = db.clone(); 81 // Box::new(move |did: String| BlobStoreSql::new(did, db_clone.clone())) ··· 119 120 // Store directly in the database 121 self.db 122 + .get() 123 + .await? 124 .interact(move |conn| { 125 let data_clone = bytes.clone(); 126 let entry = BlobEntry { ··· 159 160 // Update the quarantine flag in the database 161 self.db 162 + .get() 163 + .await? 164 .interact(move |conn| { 165 diesel::update(blobs::table) 166 .filter(blobs::cid.eq(&cid_str)) ··· 182 183 // Update the quarantine flag in the database 184 self.db 185 + .get() 186 + .await? 187 .interact(move |conn| { 188 diesel::update(blobs::table) 189 .filter(blobs::cid.eq(&cid_str)) ··· 208 // Get the blob data from the database 209 let blob_data = self 210 .db 211 + .get() 212 + .await? 213 .interact(move |conn| { 214 blobs 215 .filter(self::blobs::cid.eq(&cid_str)) ··· 249 250 // Delete from database 251 self.db 252 + .get() 253 + .await? 254 .interact(move |conn| { 255 diesel::delete(blobs) 256 .filter(self::blobs::cid.eq(&blob_cid)) ··· 273 274 // Delete all blobs in one operation 275 self.db 276 + .get() 277 + .await? 278 .interact(move |conn| { 279 diesel::delete(blobs) 280 .filter(self::blobs::cid.eq_any(cid_strings)) ··· 297 298 let exists = self 299 .db 300 + .get() 301 + .await? 302 .interact(move |conn| { 303 diesel::select(diesel::dsl::exists( 304 blobs
+2 -6
src/actor_store/sql_repo.rs
··· 27 28 pub struct SqlRepoReader { 29 pub cache: Arc<RwLock<BlockMap>>, 30 - pub db: deadpool_diesel::Connection<SqliteConnection>, 31 pub root: Option<Cid>, 32 pub rev: Option<String>, 33 pub now: String, ··· 327 328 // Basically handles getting ipld blocks from db 329 impl SqlRepoReader { 330 - pub fn new( 331 - did: String, 332 - now: Option<String>, 333 - db: deadpool_diesel::Connection<SqliteConnection>, 334 - ) -> Self { 335 let now = now.unwrap_or_else(rsky_common::now); 336 SqlRepoReader { 337 cache: Arc::new(RwLock::new(BlockMap::new())),
··· 27 28 pub struct SqlRepoReader { 29 pub cache: Arc<RwLock<BlockMap>>, 30 + pub db: deadpool_diesel::sqlite::Object, 31 pub root: Option<Cid>, 32 pub rev: Option<String>, 33 pub now: String, ··· 327 328 // Basically handles getting ipld blocks from db 329 impl SqlRepoReader { 330 + pub fn new(did: String, now: Option<String>, db: deadpool_diesel::sqlite::Object) -> Self { 331 let now = now.unwrap_or_else(rsky_common::now); 332 SqlRepoReader { 333 cache: Arc::new(RwLock::new(BlockMap::new())),
+6
src/endpoints/repo/apply_writes.rs
··· 159 let actor_db = db_actors 160 .get(did) 161 .ok_or_else(|| anyhow!("Actor DB not found"))?; 162 let mut actor_store = ActorStore::new( 163 did.clone(), 164 BlobStoreSql::new(did.clone(), actor_db.blob), 165 actor_db.repo, 166 ); 167 168 let commit = actor_store
··· 159 let actor_db = db_actors 160 .get(did) 161 .ok_or_else(|| anyhow!("Actor DB not found"))?; 162 + let conn = actor_db 163 + .repo 164 + .get() 165 + .await 166 + .context("Failed to get actor db connection")?; 167 let mut actor_store = ActorStore::new( 168 did.clone(), 169 BlobStoreSql::new(did.clone(), actor_db.blob), 170 actor_db.repo, 171 + conn, 172 ); 173 174 let commit = actor_store