personal activity index (bluesky, leaflet, substack)
pai.desertthunder.dev
rss
bluesky
1use pai_core::{CorsConfig, Item, ListFilter, SourceKind};
2use serde::{Deserialize, Serialize};
3use wasm_bindgen::JsValue;
4use worker::*;
5
6#[derive(Serialize, Deserialize)]
7struct ApiDocumentation {
8 name: String,
9 version: String,
10 description: String,
11 endpoints: Vec<Endpoint>,
12 sources: Sources,
13 scheduled_sync: ScheduledSync,
14}
15
16#[derive(Serialize, Deserialize)]
17struct Endpoint {
18 method: String,
19 path: String,
20 url: Option<String>,
21 description: String,
22 #[serde(skip_serializing_if = "Option::is_none")]
23 parameters: Option<Vec<Parameter>>,
24 #[serde(skip_serializing_if = "Option::is_none")]
25 examples: Option<Vec<String>>,
26 response: serde_json::Value,
27}
28
29#[derive(Serialize, Deserialize)]
30struct Parameter {
31 name: String,
32 r#type: String,
33 required: bool,
34 #[serde(skip_serializing_if = "Option::is_none")]
35 default: Option<serde_json::Value>,
36 description: String,
37 #[serde(skip_serializing_if = "Option::is_none")]
38 values: Option<Vec<String>>,
39}
40
41#[derive(Serialize, Deserialize)]
42struct Sources {
43 substack: String,
44 bluesky: String,
45 leaflet: String,
46 bearblog: String,
47}
48
49#[derive(Serialize, Deserialize)]
50struct ScheduledSync {
51 description: String,
52 schedule: String,
53}
54
55#[derive(Deserialize)]
56struct SyncConfig {
57 substack: Option<SubstackConfig>,
58 bluesky: Option<BlueskyConfig>,
59 leaflet: Vec<LeafletConfig>,
60 bearblog: Vec<BearBlogConfig>,
61}
62
63#[derive(Deserialize)]
64struct SubstackConfig {
65 base_url: String,
66}
67
68#[derive(Deserialize)]
69struct BlueskyConfig {
70 handle: String,
71}
72
73#[derive(Deserialize)]
74struct LeafletConfig {
75 id: String,
76 base_url: String,
77}
78
79#[derive(Deserialize)]
80struct BearBlogConfig {
81 id: String,
82 base_url: String,
83}
84
85#[derive(Deserialize)]
86struct FeedParams {
87 source_kind: Option<SourceKind>,
88 source_id: Option<String>,
89 limit: Option<usize>,
90 since: Option<String>,
91 q: Option<String>,
92}
93
94#[derive(Serialize)]
95struct FeedResponse {
96 items: Vec<Item>,
97}
98
99#[derive(Serialize)]
100struct StatusResponse {
101 status: &'static str,
102 version: &'static str,
103 total_items: usize,
104 sources: std::collections::HashMap<String, usize>,
105}
106
107#[event(fetch)]
108async fn fetch(req: Request, env: Env, _ctx: Context) -> Result<Response> {
109 let cors_config = load_cors_config(&env);
110
111 if req.method() == Method::Options {
112 return handle_preflight(&req, &cors_config);
113 }
114
115 if !is_cors_authorized(&req, &cors_config) {
116 return Response::error("Forbidden", 403);
117 }
118
119 let origin = req.headers().get("Origin").ok().flatten();
120
121 let router = Router::new();
122 let mut response = router
123 .get_async("/", |req, _ctx| async move {
124 let url = req
125 .url()
126 .map_err(|e| Error::RustError(format!("Failed to get URL: {e}")))?;
127 let base_url = url.origin().unicode_serialization();
128
129 let docs_template = include_str!("../api-docs.json");
130 let mut docs: ApiDocumentation = serde_json::from_str(docs_template)
131 .map_err(|e| Error::RustError(format!("Failed to parse API docs: {e}")))?;
132
133 docs.version = env!("CARGO_PKG_VERSION").to_string();
134
135 for endpoint in &mut docs.endpoints {
136 endpoint.url = Some(format!("{}{}", base_url, endpoint.path));
137
138 if endpoint.path == "/api/feed" {
139 endpoint.examples = Some(vec![
140 format!("{}/api/feed", base_url),
141 format!("{}/api/feed?source_kind=bluesky&limit=10", base_url),
142 format!("{}/api/feed?q=rust&limit=5", base_url),
143 ]);
144 }
145 }
146
147 Response::from_json(&docs)
148 })
149 .get_async("/api/feed", |req, ctx| async move { handle_feed(req, ctx).await })
150 .get_async("/api/item/:id", |_req, ctx| async move {
151 let id = ctx
152 .param("id")
153 .ok_or_else(|| Error::RustError("Missing id parameter".into()))?;
154 handle_item(id, &ctx).await
155 })
156 .post_async("/api/sync", |_req, ctx| async move {
157 match run_sync(&ctx.env).await {
158 Ok(_) => Response::from_json(&serde_json::json!({
159 "status": "success",
160 "message": "Sync completed successfully"
161 })),
162 Err(e) => Response::error(format!("Sync failed: {e}"), 500),
163 }
164 })
165 .get_async("/status", |_req, ctx| async move {
166 let db = ctx.env.d1("DB")?;
167
168 let total_result = db
169 .prepare("SELECT COUNT(*) as count FROM items")
170 .first::<serde_json::Value>(None)
171 .await?;
172
173 let total_items = total_result.and_then(|v| v.get("count")?.as_u64()).unwrap_or(0) as usize;
174
175 let sources_result = db
176 .prepare("SELECT source_kind, COUNT(*) as count FROM items GROUP BY source_kind")
177 .all()
178 .await?;
179
180 let mut sources = std::collections::HashMap::new();
181 if let Ok(results) = sources_result.results::<serde_json::Value>() {
182 for result in results {
183 if let (Some(kind), Some(count)) = (
184 result.get("source_kind").and_then(|v| v.as_str()),
185 result.get("count").and_then(|v| v.as_u64()),
186 ) {
187 sources.insert(kind.to_string(), count as usize);
188 }
189 }
190 }
191
192 let status = StatusResponse { status: "ok", version: env!("CARGO_PKG_VERSION"), total_items, sources };
193 Response::from_json(&status)
194 })
195 .run(req, env)
196 .await?;
197
198 if let Some(origin_str) = origin {
199 response.headers_mut().set("Access-Control-Allow-Origin", &origin_str)?;
200 response.headers_mut().set("Access-Control-Allow-Credentials", "true")?;
201 }
202
203 Ok(response)
204}
205
206#[event(scheduled)]
207async fn scheduled(_event: ScheduledEvent, env: Env, _ctx: ScheduleContext) {
208 if let Err(e) = run_sync(&env).await {
209 console_error!("Scheduled sync failed: {}", e);
210 }
211}
212
213async fn handle_feed(req: Request, ctx: RouteContext<()>) -> Result<Response> {
214 let url = req.url()?;
215 let params: FeedParams = serde_urlencoded::from_str(url.query().unwrap_or(""))
216 .map_err(|e| Error::RustError(format!("Invalid query parameters: {e}")))?;
217
218 let filter = ListFilter {
219 source_kind: params.source_kind,
220 source_id: params.source_id,
221 limit: Some(params.limit.unwrap_or(20)),
222 since: params.since,
223 query: params.q,
224 };
225
226 let db = ctx.env.d1("DB")?;
227 let items = query_items(&db, &filter).await?;
228
229 let response = FeedResponse { items };
230 Response::from_json(&response)
231}
232
233async fn handle_item(id: &str, ctx: &RouteContext<()>) -> Result<Response> {
234 let db = ctx.env.d1("DB")?;
235 let stmt = db.prepare("SELECT * FROM items WHERE id = ?1").bind(&[id.into()])?;
236
237 let result = stmt.first::<Item>(None).await?;
238
239 match result {
240 Some(item) => Response::from_json(&item),
241 None => Response::error("Item not found", 404),
242 }
243}
244
245async fn query_items(db: &D1Database, filter: &ListFilter) -> Result<Vec<Item>> {
246 let mut query = String::from(
247 "SELECT id, source_kind, source_id, author, title, summary, url, content_html, published_at, created_at FROM items WHERE 1=1"
248 );
249 let mut bindings = vec![];
250
251 if let Some(kind) = filter.source_kind {
252 query.push_str(" AND source_kind = ?");
253 bindings.push(kind.to_string().into());
254 }
255
256 if let Some(ref source_id) = filter.source_id {
257 query.push_str(" AND source_id = ?");
258 bindings.push(source_id.clone().into());
259 }
260
261 if let Some(ref since) = filter.since {
262 query.push_str(" AND published_at >= ?");
263 bindings.push(since.clone().into());
264 }
265
266 if let Some(ref q) = filter.query {
267 query.push_str(" AND (title LIKE ? OR summary LIKE ?)");
268 let pattern = format!("%{q}%");
269 bindings.push(pattern.clone().into());
270 bindings.push(pattern.into());
271 }
272
273 query.push_str(" ORDER BY published_at DESC");
274
275 if let Some(limit) = filter.limit {
276 query.push_str(" LIMIT ?");
277 bindings.push((limit as f64).into());
278 }
279
280 let stmt = if bindings.is_empty() { db.prepare(&query) } else { db.prepare(&query).bind(&bindings)? };
281
282 let results = stmt.all().await?;
283 let items: Vec<Item> = results.results()?;
284
285 Ok(items)
286}
287
288async fn run_sync(env: &Env) -> Result<()> {
289 let config = load_sync_config(env)?;
290
291 let db = env.d1("DB")?;
292 let mut synced = 0;
293
294 if let Some(substack_config) = config.substack {
295 match sync_substack(&substack_config, &db).await {
296 Ok(count) => {
297 console_log!("Synced {} items from Substack", count);
298 synced += count;
299 }
300 Err(e) => console_error!("Substack sync failed: {}", e),
301 }
302 }
303
304 if let Some(bluesky_config) = config.bluesky {
305 match sync_bluesky(&bluesky_config, &db).await {
306 Ok(count) => {
307 console_log!("Synced {} items from Bluesky", count);
308 synced += count;
309 }
310 Err(e) => console_error!("Bluesky sync failed: {}", e),
311 }
312 }
313
314 for leaflet_config in config.leaflet {
315 match sync_leaflet(&leaflet_config, &db).await {
316 Ok(count) => {
317 console_log!("Synced {} items from Leaflet ({})", count, leaflet_config.id);
318 synced += count;
319 }
320 Err(e) => console_error!("Leaflet sync failed for {}: {}", leaflet_config.id, e),
321 }
322 }
323
324 for bearblog_config in config.bearblog {
325 match sync_bearblog(&bearblog_config, &db).await {
326 Ok(count) => {
327 console_log!("Synced {} items from BearBlog ({})", count, bearblog_config.id);
328 synced += count;
329 }
330 Err(e) => console_error!("BearBlog sync failed for {}: {}", bearblog_config.id, e),
331 }
332 }
333
334 console_log!("Sync completed: {} total items", synced);
335 Ok(())
336}
337
338fn load_sync_config(env: &Env) -> Result<SyncConfig> {
339 let substack = env
340 .var("SUBSTACK_URL")
341 .ok()
342 .map(|url| SubstackConfig { base_url: url.to_string() });
343
344 let bluesky = env
345 .var("BLUESKY_HANDLE")
346 .ok()
347 .map(|handle| BlueskyConfig { handle: handle.to_string() });
348
349 let leaflet = if let Ok(urls) = env.var("LEAFLET_URLS") {
350 urls.to_string()
351 .split(',')
352 .filter_map(|entry| {
353 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
354 if parts.len() == 2 {
355 Some(LeafletConfig { id: parts[0].to_string(), base_url: parts[1].to_string() })
356 } else {
357 None
358 }
359 })
360 .collect()
361 } else {
362 Vec::new()
363 };
364
365 let bearblog = if let Ok(urls) = env.var("BEARBLOG_URLS") {
366 urls.to_string()
367 .split(',')
368 .filter_map(|entry| {
369 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
370 if parts.len() == 2 {
371 Some(BearBlogConfig { id: parts[0].to_string(), base_url: parts[1].to_string() })
372 } else {
373 None
374 }
375 })
376 .collect()
377 } else {
378 Vec::new()
379 };
380
381 Ok(SyncConfig { substack, bluesky, leaflet, bearblog })
382}
383
384/// Load CORS configuration from environment variables
385fn load_cors_config(env: &Env) -> CorsConfig {
386 let allowed_origins = env
387 .var("CORS_ALLOWED_ORIGINS")
388 .ok()
389 .map(|origins| origins.to_string().split(',').map(|s| s.trim().to_string()).collect())
390 .unwrap_or_default();
391
392 let dev_key = env.var("CORS_DEV_KEY").ok().map(|k| k.to_string());
393
394 CorsConfig { allowed_origins, dev_key }
395}
396
397/// Check if request is authorized for CORS
398fn is_cors_authorized(req: &Request, cors_config: &CorsConfig) -> bool {
399 if let Ok(Some(key)) = req.headers().get("X-Local-Dev-Key") {
400 if cors_config.is_dev_key_valid(Some(&key)) {
401 return true;
402 }
403 }
404
405 if let Ok(Some(origin_str)) = req.headers().get("Origin") {
406 return cors_config.is_origin_allowed(&origin_str);
407 }
408
409 true
410}
411
412/// Handle preflight OPTIONS requests
413fn handle_preflight(req: &Request, cors_config: &CorsConfig) -> Result<Response> {
414 if !is_cors_authorized(req, cors_config) {
415 return Response::error("Forbidden", 403);
416 }
417
418 let mut response = Response::empty()?;
419 let response_headers = response.headers_mut();
420
421 if let Ok(Some(origin)) = req.headers().get("Origin") {
422 response_headers.set("Access-Control-Allow-Origin", &origin)?;
423 }
424
425 response_headers.set("Access-Control-Allow-Methods", "GET, POST, OPTIONS")?;
426 response_headers.set("Access-Control-Allow-Headers", "Content-Type, X-Local-Dev-Key")?;
427 response_headers.set("Access-Control-Max-Age", "3600")?;
428
429 Ok(response)
430}
431
432async fn sync_substack(config: &SubstackConfig, db: &D1Database) -> Result<usize> {
433 let feed_url = format!("{}/feed", config.base_url);
434
435 let mut req = Request::new(&feed_url, Method::Get)?;
436 req.headers_mut()?.set("User-Agent", "pai-worker/0.1.0")?;
437
438 let mut resp = Fetch::Request(req).send().await?;
439 let body = resp.text().await?;
440
441 let channel =
442 rss::Channel::read_from(body.as_bytes()).map_err(|e| Error::RustError(format!("Failed to parse RSS: {e}")))?;
443
444 let source_id = normalize_source_id(&config.base_url);
445 let mut count = 0;
446
447 for item in channel.items() {
448 let id = item.guid().map(|g| g.value()).unwrap_or(item.link().unwrap_or(""));
449 let url = item.link().unwrap_or(id);
450 let title = item.title();
451 let summary = item.description();
452 let author = item.author();
453 let content_html = item.content();
454
455 let published_at = item
456 .pub_date()
457 .and_then(|s| chrono::DateTime::parse_from_rfc2822(s).ok())
458 .map(|dt| dt.to_rfc3339())
459 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
460
461 let created_at = chrono::Utc::now().to_rfc3339();
462
463 let stmt = db.prepare(
464 "INSERT OR REPLACE INTO items (id, source_kind, source_id, author, title, summary, url, content_html, published_at, created_at)
465 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)"
466 );
467
468 stmt.bind(&[
469 id.into(),
470 "substack".into(),
471 source_id.clone().into(),
472 author.map(|s| s.into()).unwrap_or(JsValue::NULL),
473 title.map(|s| s.into()).unwrap_or(JsValue::NULL),
474 summary.map(|s| s.into()).unwrap_or(JsValue::NULL),
475 url.into(),
476 content_html.map(|s| s.into()).unwrap_or(JsValue::NULL),
477 published_at.into(),
478 created_at.into(),
479 ])?
480 .run()
481 .await?;
482
483 count += 1;
484 }
485
486 Ok(count)
487}
488
489async fn sync_bluesky(config: &BlueskyConfig, db: &D1Database) -> Result<usize> {
490 let api_url = format!(
491 "https://public.api.bsky.app/xrpc/app.bsky.feed.getAuthorFeed?actor={}&limit=50",
492 config.handle
493 );
494
495 let mut req = Request::new(&api_url, Method::Get)?;
496 req.headers_mut()?.set("User-Agent", "pai-worker/0.1.0")?;
497
498 let mut resp = Fetch::Request(req).send().await?;
499 let json: serde_json::Value = resp.json().await?;
500
501 let feed = json["feed"]
502 .as_array()
503 .ok_or_else(|| Error::RustError("Invalid Bluesky response".into()))?;
504
505 let mut count = 0;
506
507 for item in feed {
508 let post = &item["post"];
509
510 if item.get("reason").is_some() {
511 continue;
512 }
513
514 let uri = post["uri"]
515 .as_str()
516 .ok_or_else(|| Error::RustError("Missing URI".into()))?;
517 let record = &post["record"];
518 let text = record["text"].as_str().unwrap_or("");
519
520 let post_id = uri.split('/').next_back().unwrap_or("");
521 let url = format!("https://bsky.app/profile/{}/post/{}", config.handle, post_id);
522
523 let title = if text.len() > 100 { format!("{}...", &text[..97]) } else { text.to_string() };
524
525 let published_at = record["createdAt"].as_str().unwrap_or("").to_string();
526 let created_at = chrono::Utc::now().to_rfc3339();
527
528 let stmt = db.prepare(
529 "INSERT OR REPLACE INTO items (id, source_kind, source_id, author, title, summary, url, content_html, published_at, created_at)
530 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)"
531 );
532
533 stmt.bind(&[
534 uri.into(),
535 "bluesky".into(),
536 config.handle.clone().into(),
537 config.handle.clone().into(),
538 title.into(),
539 text.into(),
540 url.into(),
541 JsValue::NULL,
542 published_at.into(),
543 created_at.into(),
544 ])?
545 .run()
546 .await?;
547
548 count += 1;
549 }
550
551 Ok(count)
552}
553
554async fn sync_leaflet(config: &LeafletConfig, db: &D1Database) -> Result<usize> {
555 let feed_url = format!("{}/rss", config.base_url.trim_end_matches('/'));
556
557 let mut req = Request::new(&feed_url, Method::Get)?;
558 req.headers_mut()?.set("User-Agent", "pai-worker/0.1.0")?;
559
560 let mut resp = Fetch::Request(req).send().await?;
561 let body = resp.text().await?;
562
563 let channel =
564 rss::Channel::read_from(body.as_bytes()).map_err(|e| Error::RustError(format!("Failed to parse RSS: {e}")))?;
565
566 let mut count = 0;
567
568 for item in channel.items() {
569 let id = item.guid().map(|g| g.value()).unwrap_or(item.link().unwrap_or(""));
570 let url = item.link().unwrap_or(id);
571 let title = item.title();
572 let summary = item.description();
573 let author = item.author();
574 let content_html = item.content();
575
576 let published_at = item
577 .pub_date()
578 .and_then(|s| chrono::DateTime::parse_from_rfc2822(s).ok())
579 .map(|dt| dt.to_rfc3339())
580 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
581
582 let created_at = chrono::Utc::now().to_rfc3339();
583
584 let stmt = db.prepare(
585 "INSERT OR REPLACE INTO items (id, source_kind, source_id, author, title, summary, url, content_html, published_at, created_at)
586 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)"
587 );
588
589 stmt.bind(&[
590 id.into(),
591 "leaflet".into(),
592 config.id.clone().into(),
593 author.map(|s| s.into()).unwrap_or(JsValue::NULL),
594 title.map(|s| s.into()).unwrap_or(JsValue::NULL),
595 summary.map(|s| s.into()).unwrap_or(JsValue::NULL),
596 url.into(),
597 content_html.map(|s| s.into()).unwrap_or(JsValue::NULL),
598 published_at.into(),
599 created_at.into(),
600 ])?
601 .run()
602 .await?;
603
604 count += 1;
605 }
606
607 Ok(count)
608}
609
610async fn sync_bearblog(config: &BearBlogConfig, db: &D1Database) -> Result<usize> {
611 let feed_url = format!("{}/feed/?type=rss", config.base_url.trim_end_matches('/'));
612
613 let mut req = Request::new(&feed_url, Method::Get)?;
614 req.headers_mut()?.set("User-Agent", "pai-worker/0.1.0")?;
615
616 let mut resp = Fetch::Request(req).send().await?;
617 let body = resp.text().await?;
618
619 let channel =
620 rss::Channel::read_from(body.as_bytes()).map_err(|e| Error::RustError(format!("Failed to parse RSS: {e}")))?;
621
622 let mut count = 0;
623
624 for item in channel.items() {
625 let id = item.guid().map(|g| g.value()).unwrap_or(item.link().unwrap_or(""));
626 let url = item.link().unwrap_or(id);
627 let title = item.title();
628 let summary = item.description();
629 let author = item.author();
630 let content_html = item.content();
631
632 let published_at = item
633 .pub_date()
634 .and_then(|s| chrono::DateTime::parse_from_rfc2822(s).ok())
635 .map(|dt| dt.to_rfc3339())
636 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
637
638 let created_at = chrono::Utc::now().to_rfc3339();
639
640 let stmt = db.prepare(
641 "INSERT OR REPLACE INTO items (id, source_kind, source_id, author, title, summary, url, content_html, published_at, created_at)
642 VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)"
643 );
644
645 stmt.bind(&[
646 id.into(),
647 "bearblog".into(),
648 config.id.clone().into(),
649 author.map(|s| s.into()).unwrap_or(JsValue::NULL),
650 title.map(|s| s.into()).unwrap_or(JsValue::NULL),
651 summary.map(|s| s.into()).unwrap_or(JsValue::NULL),
652 url.into(),
653 content_html.map(|s| s.into()).unwrap_or(JsValue::NULL),
654 published_at.into(),
655 created_at.into(),
656 ])?
657 .run()
658 .await?;
659
660 count += 1;
661 }
662
663 Ok(count)
664}
665
666fn normalize_source_id(base_url: &str) -> String {
667 base_url
668 .trim_start_matches("https://")
669 .trim_start_matches("http://")
670 .trim_end_matches('/')
671 .to_string()
672}
673
674#[cfg(test)]
675mod tests {
676 use super::*;
677
678 #[test]
679 fn test_api_docs_json_is_valid() {
680 let docs_str = include_str!("../api-docs.json");
681 let result = serde_json::from_str::<ApiDocumentation>(docs_str);
682 assert!(result.is_ok(), "API docs JSON should be valid: {:?}", result.err());
683
684 let docs = result.unwrap();
685 assert_eq!(docs.name, "Personal Activity Index API");
686 assert!(!docs.description.is_empty());
687 assert!(!docs.endpoints.is_empty());
688 }
689
690 #[test]
691 fn test_api_docs_has_all_endpoints() {
692 let docs_str = include_str!("../api-docs.json");
693 let docs: ApiDocumentation = serde_json::from_str(docs_str).unwrap();
694
695 let paths: Vec<&str> = docs.endpoints.iter().map(|e| e.path.as_str()).collect();
696
697 assert!(paths.contains(&"/"));
698 assert!(paths.contains(&"/status"));
699 assert!(paths.contains(&"/api/feed"));
700 assert!(paths.contains(&"/api/item/:id"));
701 assert!(paths.contains(&"/api/sync"));
702 }
703
704 #[test]
705 fn test_api_docs_feed_endpoint_parameters() {
706 let docs_str = include_str!("../api-docs.json");
707 let docs: ApiDocumentation = serde_json::from_str(docs_str).unwrap();
708
709 let feed_endpoint = docs.endpoints.iter().find(|e| e.path == "/api/feed").unwrap();
710 let params = feed_endpoint.parameters.as_ref().unwrap();
711 let param_names: Vec<&str> = params.iter().map(|p| p.name.as_str()).collect();
712
713 assert!(param_names.contains(&"source_kind"));
714 assert!(param_names.contains(&"source_id"));
715 assert!(param_names.contains(&"limit"));
716 assert!(param_names.contains(&"since"));
717 assert!(param_names.contains(&"q"));
718 }
719
720 #[test]
721 fn test_api_docs_has_source_descriptions() {
722 let docs_str = include_str!("../api-docs.json");
723 let docs: ApiDocumentation = serde_json::from_str(docs_str).unwrap();
724
725 assert!(!docs.sources.substack.is_empty());
726 assert!(!docs.sources.bluesky.is_empty());
727 assert!(!docs.sources.leaflet.is_empty());
728 assert!(!docs.sources.bearblog.is_empty());
729 }
730
731 #[test]
732 fn test_api_docs_url_generation() {
733 let docs_str = include_str!("../api-docs.json");
734 let mut docs: ApiDocumentation = serde_json::from_str(docs_str).unwrap();
735
736 let base_url = "https://example.workers.dev";
737 for endpoint in &mut docs.endpoints {
738 endpoint.url = Some(format!("{}{}", base_url, endpoint.path));
739 }
740
741 let root = docs.endpoints.iter().find(|e| e.path == "/").unwrap();
742 assert_eq!(root.url.as_ref().unwrap(), "https://example.workers.dev/");
743
744 let feed = docs.endpoints.iter().find(|e| e.path == "/api/feed").unwrap();
745 assert_eq!(feed.url.as_ref().unwrap(), "https://example.workers.dev/api/feed");
746 }
747
748 #[test]
749 fn test_normalize_source_id_https() {
750 assert_eq!(
751 normalize_source_id("https://patternmatched.substack.com"),
752 "patternmatched.substack.com"
753 );
754 }
755
756 #[test]
757 fn test_normalize_source_id_http() {
758 assert_eq!(normalize_source_id("http://example.com/"), "example.com");
759 }
760
761 #[test]
762 fn test_normalize_source_id_trailing_slash() {
763 assert_eq!(normalize_source_id("https://test.leaflet.pub/"), "test.leaflet.pub");
764 }
765
766 #[test]
767 fn test_normalize_source_id_no_protocol() {
768 assert_eq!(normalize_source_id("example.com"), "example.com");
769 }
770
771 #[test]
772 fn test_bluesky_title_truncation_short() {
773 let text = "Short post";
774 let title = if text.len() > 100 { format!("{}...", &text[..97]) } else { text.to_string() };
775 assert_eq!(title, "Short post");
776 }
777
778 #[test]
779 fn test_bluesky_title_truncation_long() {
780 let text = "a".repeat(150);
781 let title = if text.len() > 100 { format!("{}...", &text[..97]) } else { text.to_string() };
782 assert_eq!(title.len(), 100);
783 assert!(title.ends_with("..."));
784 }
785
786 #[test]
787 fn test_bluesky_title_truncation_boundary() {
788 let text = "a".repeat(100);
789 let title = if text.len() > 100 { format!("{}...", &text[..97]) } else { text.to_string() };
790 assert_eq!(title, text);
791 }
792
793 #[test]
794 fn test_bluesky_post_id_extraction() {
795 let uri = "at://did:plc:abc123/app.bsky.feed.post/3ld7xyqnvqk2a";
796 let post_id = uri.split('/').next_back().unwrap_or("");
797 assert_eq!(post_id, "3ld7xyqnvqk2a");
798 }
799
800 #[test]
801 fn test_bluesky_url_construction() {
802 let handle = "desertthunder.dev";
803 let post_id = "3ld7xyqnvqk2a";
804 let url = format!("https://bsky.app/profile/{handle}/post/{post_id}");
805 assert_eq!(url, "https://bsky.app/profile/desertthunder.dev/post/3ld7xyqnvqk2a");
806 }
807
808 #[test]
809 fn test_leaflet_config_parsing() {
810 let entry = "desertthunder:https://desertthunder.leaflet.pub";
811 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
812 assert_eq!(parts.len(), 2);
813 assert_eq!(parts[0], "desertthunder");
814 assert_eq!(parts[1], "https://desertthunder.leaflet.pub");
815 }
816
817 #[test]
818 fn test_leaflet_config_parsing_invalid() {
819 let entry = "invalid-entry-no-colon";
820 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
821 assert_ne!(parts.len(), 2);
822 }
823
824 #[test]
825 fn test_leaflet_config_parsing_multiple() {
826 let urls = "id1:https://pub1.leaflet.pub,id2:https://pub2.leaflet.pub";
827 let configs: Vec<_> = urls
828 .split(',')
829 .filter_map(|entry| {
830 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
831 if parts.len() == 2 {
832 Some((parts[0].to_string(), parts[1].to_string()))
833 } else {
834 None
835 }
836 })
837 .collect();
838
839 assert_eq!(configs.len(), 2);
840 assert_eq!(configs[0].0, "id1");
841 assert_eq!(configs[0].1, "https://pub1.leaflet.pub");
842 assert_eq!(configs[1].0, "id2");
843 assert_eq!(configs[1].1, "https://pub2.leaflet.pub");
844 }
845
846 #[test]
847 fn test_substack_feed_url_construction() {
848 let base_url = "https://patternmatched.substack.com";
849 let feed_url = format!("{base_url}/feed");
850 assert_eq!(feed_url, "https://patternmatched.substack.com/feed");
851 }
852
853 #[test]
854 fn test_bluesky_api_url_construction() {
855 let handle = "desertthunder.dev";
856 let api_url = format!("https://public.api.bsky.app/xrpc/app.bsky.feed.getAuthorFeed?actor={handle}&limit=50");
857 assert_eq!(
858 api_url,
859 "https://public.api.bsky.app/xrpc/app.bsky.feed.getAuthorFeed?actor=desertthunder.dev&limit=50"
860 );
861 }
862
863 #[test]
864 fn test_leaflet_feed_url_construction() {
865 let base_url = "https://desertthunder.leaflet.pub";
866 let feed_url = format!("{}/rss", base_url.trim_end_matches('/'));
867 assert_eq!(feed_url, "https://desertthunder.leaflet.pub/rss");
868 }
869
870 #[test]
871 fn test_bearblog_feed_url_construction() {
872 let base_url = "https://desertthunder.bearblog.dev";
873 let feed_url = format!("{}/feed/", base_url.trim_end_matches('/'));
874 assert_eq!(feed_url, "https://desertthunder.bearblog.dev/feed/");
875 }
876
877 #[test]
878 fn test_bearblog_config_parsing() {
879 let entry = "desertthunder:https://desertthunder.bearblog.dev";
880 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
881 assert_eq!(parts.len(), 2);
882 assert_eq!(parts[0], "desertthunder");
883 assert_eq!(parts[1], "https://desertthunder.bearblog.dev");
884 }
885
886 #[test]
887 fn test_bearblog_config_parsing_multiple() {
888 let urls = "id1:https://blog1.bearblog.dev,id2:https://blog2.bearblog.dev";
889 let configs: Vec<_> = urls
890 .split(',')
891 .filter_map(|entry| {
892 let parts: Vec<&str> = entry.trim().splitn(2, ':').collect();
893 if parts.len() == 2 {
894 Some((parts[0].to_string(), parts[1].to_string()))
895 } else {
896 None
897 }
898 })
899 .collect();
900
901 assert_eq!(configs.len(), 2);
902 assert_eq!(configs[0].0, "id1");
903 assert_eq!(configs[0].1, "https://blog1.bearblog.dev");
904 assert_eq!(configs[1].0, "id2");
905 assert_eq!(configs[1].1, "https://blog2.bearblog.dev");
906 }
907}