Rust AppView - highly experimental!
fork

Configure Feed

Select the types of activity you want to include in your feed.

fix: cursors, list uris

+38 -27
+8 -5
parakeet/src/entities/core/list.rs
··· 419 419 list_rkey: &str, 420 420 cursor: Option<&chrono::DateTime<chrono::Utc>>, 421 421 limit: u8, 422 - ) -> eyre::Result<Vec<(i32, chrono::DateTime<chrono::Utc>)>> { 422 + ) -> eyre::Result<Vec<(i32, i32, i64, chrono::DateTime<chrono::Utc>)>> { 423 423 let mut conn = self.db_pool.get().await?; 424 424 425 425 use diesel::sql_types::{BigInt, Integer, Text}; ··· 428 428 #[derive(diesel::QueryableByName)] 429 429 struct ItemRow { 430 430 #[diesel(sql_type = Integer)] 431 - subject_actor_id: i32, 431 + actor_id: i32, 432 432 #[diesel(sql_type = BigInt)] 433 433 rkey: i64, 434 + #[diesel(sql_type = Integer)] 435 + subject_actor_id: i32, 434 436 } 435 437 436 438 // Simple query - just get the data we need 437 439 let results: Vec<ItemRow> = diesel::sql_query( 438 440 r#" 439 441 SELECT 440 - subject_actor_id, 441 - rkey 442 + actor_id, 443 + rkey, 444 + subject_actor_id 442 445 FROM list_items 443 446 WHERE list_owner_actor_id = $1 444 447 AND list_rkey = $2 ··· 466 469 } 467 470 } 468 471 469 - processed.push((row.subject_actor_id, created_at)); 472 + processed.push((row.actor_id, row.subject_actor_id, row.rkey, created_at)); 470 473 471 474 if processed.len() >= limit as usize { 472 475 break;
+2 -13
parakeet/src/entities/core/post.rs
··· 480 480 481 481 // Extract text from content if available 482 482 let text = if let Some(ref content_bytes) = data.post.content { 483 - // Content is zstd compressed 483 + // Content is zstd compressed plain text 484 484 // Create a codec and decompress the content 485 485 let codec = parakeet_db::compression::PostContentCodec::new(); 486 - if let Ok(decompressed_str) = codec.decompress(content_bytes.as_slice()) { 487 - if let Ok(record) = serde_json::from_str::<serde_json::Value>(&decompressed_str) { 488 - record.get("text") 489 - .and_then(|t| t.as_str()) 490 - .map(|s| s.to_string()) 491 - .unwrap_or_default() 492 - } else { 493 - String::new() 494 - } 495 - } else { 496 - String::new() 497 - } 486 + codec.decompress(content_bytes.as_slice()).unwrap_or_default() 498 487 } else { 499 488 String::new() 500 489 };
+1 -1
parakeet/src/xrpc/app_bsky/feed/feedgen.rs
··· 144 144 State(state): State<GlobalState>, 145 145 AtpAcceptLabelers(_labelers): AtpAcceptLabelers, 146 146 maybe_auth: Option<AtpAuth>, 147 - Query(query): Query<GetFeedGeneratorsQuery>, 147 + axum_extra::extract::Query(query): axum_extra::extract::Query<GetFeedGeneratorsQuery>, 148 148 ) -> XrpcResult<Json<GetFeedGeneratorsRes>> { 149 149 // Get viewer DID if authenticated 150 150 let viewer_did = maybe_auth.as_ref().map(|auth| auth.0.clone());
+2 -2
parakeet/src/xrpc/app_bsky/feed/get_timeline.rs
··· 116 116 let cursor = if has_next && timeline_items.len() > limit as usize { 117 117 let last_rkey = timeline_items[limit as usize - 1].0; 118 118 let timestamp = parakeet_db::tid_util::tid_to_datetime(last_rkey); 119 - Some(timestamp.timestamp_millis().to_string()) 119 + Some(timestamp.to_rfc3339()) 120 120 } else { 121 121 None 122 122 }; ··· 263 263 let cursor = if has_next && posts_to_return.len() == limit as usize { 264 264 let last_rkey = posts_to_return[posts_to_return.len() - 1].1; 265 265 let timestamp = parakeet_db::tid_util::tid_to_datetime(last_rkey); 266 - Some(timestamp.timestamp_millis().to_string()) 266 + Some(timestamp.to_rfc3339()) 267 267 } else { 268 268 None 269 269 };
+24 -5
parakeet/src/xrpc/app_bsky/graph/lists.rs
··· 128 128 // Calculate next cursor 129 129 let cursor = item_results 130 130 .last() 131 - .map(|item| item.1.to_rfc3339()); 131 + .map(|item| item.3.to_rfc3339()); 132 132 133 133 // Get profiles for all subject actor IDs 134 134 let subject_ids: Vec<i32> = item_results 135 135 .iter() 136 - .map(|item| item.0) 136 + .map(|item| item.1) // subject_actor_id 137 + .collect(); 138 + 139 + // Also get DIDs for list item actors 140 + let item_actor_ids: Vec<i32> = item_results 141 + .iter() 142 + .map(|item| item.0) // actor_id 137 143 .collect(); 138 144 139 145 // Get the actual actor profiles 140 146 let profiles = state.profile_entity.get_profile_views(&subject_ids).await; 141 147 148 + // Get DIDs for list item actors 149 + let mut item_actor_dids = std::collections::HashMap::new(); 150 + for actor_id in &item_actor_ids { 151 + if let Ok(did) = state.profile_entity.get_did_by_id(*actor_id).await { 152 + item_actor_dids.insert(*actor_id, did); 153 + } 154 + } 155 + 142 156 // Map actor_id to ProfileView 143 157 let mut actor_views = std::collections::HashMap::new(); 144 158 for (idx, profile) in profiles.into_iter().enumerate() { ··· 150 164 // Build ListItemViews 151 165 let items: Vec<ListItemView> = item_results 152 166 .into_iter() 153 - .filter_map(|(subject_actor_id, _created_at)| { 167 + .filter_map(|(actor_id, subject_actor_id, rkey, _created_at)| { 154 168 let subject = actor_views.get(&subject_actor_id).cloned()?; 169 + let item_did = item_actor_dids.get(&actor_id)?; 155 170 156 - // TODO: Construct proper item URI 157 - let item_uri = format!("at://unknown/app.bsky.graph.listitem/unknown"); 171 + // Construct proper item URI 172 + let item_uri = format!( 173 + "at://{}/app.bsky.graph.listitem/{}", 174 + item_did, 175 + parakeet_db::tid_util::encode_tid(rkey) 176 + ); 158 177 159 178 Some(ListItemView { 160 179 uri: item_uri,
+1 -1
parakeet/src/xrpc/app_bsky/mod.rs
··· 57 57 .route("/app.bsky.bookmark.getBookmarks", get(bookmark::get_bookmarks)) 58 58 .route("/app.bsky.feed.getActorFeeds", get(feed::feedgen::get_actor_feeds)) 59 59 .route("/app.bsky.feed.getActorLikes", get(feed::likes::get_actor_likes)) 60 - .route("/app.bsky.feed.getAuthorFeed", get(feed::posts::get_author_feed)) 60 + .route("/app.bsky.feed.getAuthorFeed", get(feed::get_timeline::get_author_feed)) 61 61 .route("/app.bsky.feed.getFeed", get(feed::posts::get_feed)) 62 62 .route("/app.bsky.feed.getFeedGenerator", get(feed::feedgen::get_feed_generator)) 63 63 .route("/app.bsky.feed.getFeedGenerators", get(feed::feedgen::get_feed_generators))