Alternative ATProto PDS implementation

prototype sequencer use

Changed files
+74 -54
src
account_manager
-22
Cargo.lock
··· 1282 1282 dependencies = [ 1283 1283 "anyhow", 1284 1284 "argon2", 1285 - "async-trait", 1286 1285 "atrium-api 0.25.3", 1287 1286 "atrium-crypto", 1288 1287 "atrium-repo", 1289 - "atrium-xrpc", 1290 - "atrium-xrpc-client", 1291 1288 "axum", 1292 1289 "azure_core", 1293 1290 "azure_identity", ··· 1306 1303 "futures", 1307 1304 "hex", 1308 1305 "http-cache-reqwest", 1309 - "ipld-core", 1310 - "k256", 1311 - "lazy_static", 1312 1306 "memmap2", 1313 1307 "metrics", 1314 1308 "metrics-exporter-prometheus", 1315 - "multihash 0.19.3", 1316 - "r2d2", 1317 1309 "rand 0.8.5", 1318 - "regex", 1319 1310 "reqwest 0.12.15", 1320 1311 "reqwest-middleware", 1321 1312 "rsky-common", ··· 1325 1316 "rsky-syntax", 1326 1317 "secp256k1", 1327 1318 "serde", 1328 - "serde_bytes", 1329 1319 "serde_ipld_dagcbor", 1330 - "serde_ipld_dagjson", 1331 1320 "serde_json", 1332 1321 "sha2", 1333 1322 "thiserror 2.0.12", ··· 5839 5828 "ipld-core", 5840 5829 "scopeguard", 5841 5830 "serde", 5842 - ] 5843 - 5844 - [[package]] 5845 - name = "serde_ipld_dagjson" 5846 - version = "0.2.0" 5847 - source = "registry+https://github.com/rust-lang/crates.io-index" 5848 - checksum = "3359b47ba7f4a306ef5984665e10539e212e97217afa489437d533208eecda36" 5849 - dependencies = [ 5850 - "ipld-core", 5851 - "serde", 5852 - "serde_json", 5853 5831 ] 5854 5832 5855 5833 [[package]]
+14 -11
Cargo.toml
··· 1 + # cargo-features = ["codegen-backend"] 2 + 1 3 [package] 2 4 name = "bluepds" 3 5 version = "0.0.0" ··· 13 15 14 16 [profile.dev.package."*"] 15 17 opt-level = 3 18 + # codegen-backend = "cranelift" 16 19 17 20 [profile.dev] 18 21 opt-level = 1 22 + # codegen-backend = "cranelift" 19 23 20 24 [profile.release] 21 25 opt-level = "s" # Slightly slows compile times, great improvements to file size and runtime performance. ··· 131 135 # expect_used = "deny" 132 136 133 137 [dependencies] 134 - multihash = "0.19.3" 138 + # multihash = "0.19.3" 135 139 diesel = { version = "2.1.5", features = [ 136 140 "chrono", 137 141 "sqlite", ··· 139 143 "returning_clauses_for_sqlite_3_35", 140 144 ] } 141 145 diesel_migrations = { version = "2.1.0" } 142 - r2d2 = "0.8.10" 146 + # r2d2 = "0.8.10" 143 147 144 148 atrium-repo = "0.1" 145 149 atrium-api = "0.25" 146 150 # atrium-common = { version = "0.1.2", path = "atrium-common" } 147 151 atrium-crypto = "0.1" 148 152 # atrium-identity = { version = "0.1.4", path = "atrium-identity" } 149 - atrium-xrpc = "0.12" 150 - atrium-xrpc-client = "0.5" 153 + # atrium-xrpc = "0.12" 154 + # atrium-xrpc-client = "0.5" 151 155 # bsky-sdk = { version = "0.1.19", path = "bsky-sdk" } 152 156 rsky-syntax = { git = "https://github.com/blacksky-algorithms/rsky.git" } 153 157 rsky-repo = { git = "https://github.com/blacksky-algorithms/rsky.git" } ··· 159 163 # async-stream = "0.3" 160 164 161 165 # DAG-CBOR codec 162 - ipld-core = "0.4.2" 166 + # ipld-core = "0.4.2" 163 167 serde_ipld_dagcbor = { version = "0.6.2", default-features = false, features = [ 164 168 "std", 165 169 ] } 166 - serde_ipld_dagjson = "0.2.0" 170 + # serde_ipld_dagjson = "0.2.0" 167 171 cidv10 = { version = "0.10.1", package = "cid" } 168 172 169 173 # Parsing and validation ··· 172 176 hex = "0.4.3" 173 177 # langtag = "0.3" 174 178 # multibase = "0.9.1" 175 - regex = "1.11.1" 179 + # regex = "1.11.1" 176 180 serde = { version = "1.0.218", features = ["derive"] } 177 - serde_bytes = "0.11.17" 181 + # serde_bytes = "0.11.17" 178 182 # serde_html_form = "0.2.6" 179 183 serde_json = "1.0.139" 180 184 # unsigned-varint = "0.8" ··· 184 188 # elliptic-curve = "0.13.6" 185 189 # jose-jwa = "0.1.2" 186 190 # jose-jwk = { version = "0.1.2", default-features = false } 187 - k256 = "0.13.4" 191 + # k256 = "0.13.4" 188 192 # p256 = { version = "0.13.2", default-features = false } 189 193 rand = "0.8.5" 190 194 sha2 = "0.10.8" ··· 256 260 url = "2.5.4" 257 261 uuid = { version = "1.14.0", features = ["v4"] } 258 262 urlencoding = "2.1.3" 259 - async-trait = "0.1.88" 260 - lazy_static = "1.5.0" 263 + # lazy_static = "1.5.0" 261 264 secp256k1 = "0.28.2" 262 265 dotenvy = "0.15.7" 263 266 deadpool-diesel = { version = "0.6.1", features = [
+5
src/account_manager/mod.rs
··· 31 31 use std::collections::BTreeMap; 32 32 use std::env; 33 33 use std::time::SystemTime; 34 + use tokio::sync::RwLock; 34 35 35 36 pub(crate) mod helpers { 36 37 pub mod account; ··· 500 501 email_token::create_email_token(did, purpose, &self.db).await 501 502 } 502 503 } 504 + 505 + pub struct SharedAccountManager { 506 + pub account_manager: RwLock<AccountManagerCreator>, 507 + }
+55 -21
src/lib.rs
··· 19 19 #[cfg(test)] 20 20 mod tests; 21 21 22 + use account_manager::{AccountManager, SharedAccountManager}; 22 23 use anyhow::{Context as _, anyhow}; 23 24 use atrium_api::types::string::Did; 24 25 use atrium_crypto::keypair::{Export as _, Secp256k1Keypair}; ··· 44 45 use firehose::FirehoseProducer; 45 46 use http_cache_reqwest::{CacheMode, HttpCacheOptions, MokaManager}; 46 47 use rand::Rng as _; 48 + use rsky_pds::{crawlers::Crawlers, sequencer::Sequencer}; 47 49 use serde::{Deserialize, Serialize}; 48 50 use service_proxy::service_proxy; 49 51 use std::{ ··· 52 54 str::FromStr as _, 53 55 sync::Arc, 54 56 }; 55 - use tokio::net::TcpListener; 57 + use tokio::{net::TcpListener, sync::RwLock}; 56 58 use tower_http::{cors::CorsLayer, trace::TraceLayer}; 57 59 use tracing::{info, warn}; 58 60 use uuid::Uuid; ··· 67 69 pub type Result<T> = std::result::Result<T, Error>; 68 70 /// The reqwest client type with middleware. 69 71 pub type Client = reqwest_middleware::ClientWithMiddleware; 70 - /// The Azure credential type. 71 - pub type Cred = Arc<dyn TokenCredential>; 72 + 73 + /// The Shared Sequencer which requests crawls from upstream relays and emits events to the firehose. 74 + pub struct SharedSequencer { 75 + /// The sequencer instance. 76 + pub sequencer: RwLock<Sequencer>, 77 + } 72 78 73 79 #[expect( 74 80 clippy::arbitrary_source_item_ordering, ··· 129 135 pub verbosity: Verbosity<InfoLevel>, 130 136 } 131 137 138 + /// The actor pools for the database connections. 132 139 pub struct ActorPools { 140 + /// The database connection pool for the actor's repository. 133 141 pub repo: Pool, 142 + /// The database connection pool for the actor's blobs. 134 143 pub blob: Pool, 135 144 } 136 145 ··· 148 157 pub struct AppState { 149 158 /// The application configuration. 150 159 pub config: AppConfig, 151 - /// The Azure credential. 152 - pub cred: Cred, 153 160 /// The main database connection pool. Used for common PDS data, like invite codes. 154 161 pub db: Pool, 155 162 /// Actor-specific database connection pools. Hashed by DID. ··· 160 167 /// The simple HTTP client. 161 168 pub simple_client: reqwest::Client, 162 169 /// The firehose producer. 163 - pub firehose: FirehoseProducer, 170 + pub sequencer: Arc<SharedSequencer>, 171 + /// The account manager. 172 + pub account_manager: Arc<SharedAccountManager>, 164 173 165 174 /// The signing key. 166 175 pub signing_key: SigningKey, ··· 341 350 // conn.run_pending_migrations(MIGRATIONS) 342 351 // .expect("should be able to run migrations"); 343 352 344 - let (_fh, fhp) = firehose::spawn(client.clone(), config.clone()); 353 + let hostname = config.host_name.clone(); 354 + let crawlers: Vec<String> = config 355 + .firehose 356 + .relays 357 + .iter() 358 + .map(|s| s.to_string()) 359 + .collect(); 360 + let sequencer = Arc::new(SharedSequencer { 361 + sequencer: RwLock::new(Sequencer::new( 362 + Crawlers::new(hostname, crawlers.clone()), 363 + None, 364 + )), 365 + }); 366 + let account_manager = SharedAccountManager { 367 + account_manager: RwLock::new(AccountManager::creator()), 368 + }; 345 369 346 370 let addr = config 347 371 .listen_address ··· 360 384 .layer(CorsLayer::permissive()) 361 385 .layer(TraceLayer::new_for_http()) 362 386 .with_state(AppState { 363 - cred, 364 387 config: config.clone(), 365 388 db: pool.clone(), 366 389 db_actors: actor_pools.clone(), 367 390 client: client.clone(), 368 391 simple_client, 369 - firehose: fhp, 392 + sequencer: sequencer.clone(), 393 + account_manager: Arc::new(account_manager), 370 394 signing_key: skey, 371 395 rotation_key: rkey, 372 396 }); ··· 386 410 387 411 let result = conn.interact(move |conn| { 388 412 diesel::sql_query( 389 - "SELECT (SELECT COUNT(*) FROM accounts) + (SELECT COUNT(*) FROM invites) AS total_count", 413 + "SELECT (SELECT COUNT(*) FROM account) + (SELECT COUNT(*) FROM invite_code) AS total_count", 390 414 ) 391 415 .get_result::<TotalCount>(conn) 392 416 }) ··· 399 423 if c == 0 { 400 424 let uuid = Uuid::new_v4().to_string(); 401 425 426 + use crate::models::pds as models; 427 + use crate::schema::pds::invite_code::dsl as InviteCode; 402 428 let uuid_clone = uuid.clone(); 403 - _ = conn 404 - .interact(move |conn| { 405 - diesel::sql_query( 406 - "INSERT INTO invites (id, did, count, created_at) VALUES (?, NULL, 1, datetime('now'))", 407 - ) 408 - .bind::<diesel::sql_types::Text, _>(uuid_clone) 409 - .execute(conn) 410 - .context("failed to create new invite code") 411 - .expect("should be able to create invite code") 429 + drop( 430 + conn.interact(move |conn| { 431 + diesel::insert_into(InviteCode::invite_code) 432 + .values(models::InviteCode { 433 + code: uuid_clone, 434 + available_uses: 1, 435 + disabled: 0, 436 + for_account: "None".to_owned(), 437 + created_by: "None".to_owned(), 438 + created_at: "None".to_owned(), 439 + }) 440 + .execute(conn) 441 + .context("failed to create new invite code") 412 442 }) 413 443 .await 414 - .expect("should be able to create invite code"); 444 + .expect("should be able to create invite code"), 445 + ); 415 446 416 447 // N.B: This is a sensitive message, so we're bypassing `tracing` here and 417 448 // logging it directly to console. ··· 435 466 }); 436 467 437 468 // Now that the app is live, request a crawl from upstream relays. 438 - firehose::reconnect_relays(&client, &config).await; 469 + let mut background_sequencer = sequencer.sequencer.write().await.clone(); 470 + drop(tokio::spawn( 471 + async move { background_sequencer.start().await }, 472 + )); 439 473 440 474 serve 441 475 .await