personal activity index (bluesky, leaflet, substack)
pai.desertthunder.dev
rss
bluesky
1mod app;
2mod paths;
3
4use app::{Cli, Commands, ExportOpts};
5use chrono::{DateTime, Duration, Utc};
6use clap::Parser;
7use owo_colors::OwoColorize;
8use pai_core::{Config, Item, ListFilter, PaiError, SourceKind};
9use pai_server::SqliteStorage;
10use rss::{Channel, ChannelBuilder, ItemBuilder};
11use std::fs;
12use std::io::{self, Write};
13use std::path::{Path, PathBuf};
14use std::str::FromStr;
15
16const PUBLISHED_WIDTH: usize = 19;
17const KIND_WIDTH: usize = 9;
18const SOURCE_WIDTH: usize = 24;
19const TITLE_WIDTH: usize = 60;
20const MAN_PAGE: &str = include_str!(env!("PAI_MAN_PAGE"));
21
22fn main() {
23 let cli = Cli::parse();
24
25 let result = match cli.command {
26 Commands::Sync { all, kind, source_id } => handle_sync(cli.config_dir, cli.db_path, all, kind, source_id),
27 Commands::List { kind, source_id, limit, since, query } => {
28 handle_list(cli.db_path, kind, source_id, limit, since, query)
29 }
30 Commands::Export(opts) => handle_export(cli.db_path, opts),
31 Commands::Serve { address } => handle_serve(cli.db_path, address),
32 Commands::DbCheck => handle_db_check(cli.db_path),
33 Commands::Init { force } => handle_init(cli.config_dir, force),
34 Commands::Man { output, install, install_dir } => handle_man(output, install, install_dir),
35 Commands::CfInit { output_dir, dry_run } => handle_cf_init(output_dir, dry_run),
36 };
37
38 if let Err(e) = result {
39 eprintln!("{} {}", "Error:".red().bold(), e);
40 std::process::exit(1);
41 }
42}
43
44fn handle_sync(
45 config_dir: Option<PathBuf>, db_path: Option<PathBuf>, _all: bool, kind: Option<SourceKind>,
46 source_id: Option<String>,
47) -> Result<(), PaiError> {
48 let db_path = paths::resolve_db_path(db_path)?;
49 let config_dir = paths::resolve_config_dir(config_dir)?;
50
51 let storage = SqliteStorage::new(db_path)?;
52
53 let config_path = config_dir.join("config.toml");
54 let config = if config_path.exists() {
55 Config::from_file(&config_path)?
56 } else {
57 println!(
58 "{} No config file found, using default configuration",
59 "Warning:".yellow()
60 );
61 Config::default()
62 };
63
64 let count = pai_core::sync_all_sources(&config, &storage, kind, source_id.as_deref())?;
65
66 if count == 0 {
67 println!("{} No sources synced (check your config or filters)", "Info:".cyan());
68 } else {
69 println!("{} Synced {}", "Success:".green(), format!("{count} source(s)").bold());
70 }
71
72 Ok(())
73}
74
75fn handle_list(
76 db_path: Option<PathBuf>, kind: Option<SourceKind>, source_id: Option<String>, limit: usize, since: Option<String>,
77 query: Option<String>,
78) -> Result<(), PaiError> {
79 let db_path = paths::resolve_db_path(db_path)?;
80 let storage = SqliteStorage::new(db_path)?;
81
82 let since = normalize_since_input(since)?;
83 let limit = ensure_positive_limit(limit)?;
84 let source_id = normalize_optional_string(source_id);
85 let query = normalize_optional_string(query);
86
87 let filter = ListFilter { source_kind: kind, source_id, limit: Some(limit), since, query };
88
89 let items = pai_core::Storage::list_items(&storage, &filter)?;
90
91 if items.is_empty() {
92 println!("{}", "No items found".yellow());
93 return Ok(());
94 }
95
96 println!("{} {}", "Found".cyan(), format!("{} item(s)", items.len()).bold());
97 println!();
98 render_items_table(&items)?;
99
100 Ok(())
101}
102
103fn handle_export(db_path: Option<PathBuf>, opts: ExportOpts) -> Result<(), PaiError> {
104 let db_path = paths::resolve_db_path(db_path)?;
105 let storage = SqliteStorage::new(db_path)?;
106
107 let ExportOpts { kind, source_id, limit, since, query, format, output } = opts;
108 let since = normalize_since_input(since)?;
109 let limit = ensure_optional_limit(limit)?;
110 let source_id = normalize_optional_string(source_id);
111 let query = normalize_optional_string(query);
112
113 let filter = ListFilter { source_kind: kind, source_id, limit, since, query };
114 let items = pai_core::Storage::list_items(&storage, &filter)?;
115
116 let export_format = ExportFormat::from_str(&format)?;
117 let mut writer = create_output_writer(output.as_ref())?;
118 export_items(&items, export_format, writer.as_mut())?;
119
120 match output {
121 Some(path) => println!(
122 "{} Exported {} item(s) to {}",
123 "Success:".green(),
124 items.len(),
125 path.display()
126 ),
127 None => println!("{} Exported {} item(s) to stdout", "Success:".green(), items.len()),
128 }
129
130 Ok(())
131}
132
133fn handle_serve(db_path: Option<PathBuf>, address: String) -> Result<(), PaiError> {
134 let db_path = paths::resolve_db_path(db_path)?;
135 let config_path = paths::resolve_config_dir(None)?.join("config.toml");
136 let config = if config_path.exists() { Config::from_file(&config_path)? } else { Config::default() };
137 pai_server::serve(config, db_path, &address)
138}
139
140fn handle_db_check(db_path: Option<PathBuf>) -> Result<(), PaiError> {
141 let db_path = paths::resolve_db_path(db_path)?;
142 let storage = SqliteStorage::new(db_path)?;
143
144 println!("{}", "Verifying database schema...".cyan());
145 storage.verify_schema()?;
146 println!("{} {}\n", "Schema verification:".green(), "OK".bold());
147
148 println!("{}", "Database statistics:".cyan().bold());
149 let total = storage.count_items()?;
150 println!(" {}: {}", "Total items".bright_black(), total.to_string().bold());
151
152 let stats = storage.get_stats()?;
153 if !stats.is_empty() {
154 println!("\n{}", "Items by source:".cyan().bold());
155 for (source_kind, count) in stats {
156 println!(" {}: {}", source_kind.bright_black(), count.to_string().bold());
157 }
158 }
159
160 Ok(())
161}
162
163fn handle_init(config_dir: Option<PathBuf>, force: bool) -> Result<(), PaiError> {
164 let config_dir = paths::resolve_config_dir(config_dir)?;
165 let config_path = config_dir.join("config.toml");
166
167 if config_path.exists() && !force {
168 println!(
169 "{} Config file already exists at {}",
170 "Error:".red().bold(),
171 config_path.display()
172 );
173 println!("{} Use {} to overwrite", "Hint:".yellow(), "pai init -f".bold());
174 return Err(PaiError::Config("Config file already exists".to_string()));
175 }
176
177 fs::create_dir_all(&config_dir).map_err(|e| PaiError::Config(format!("Failed to create config directory: {e}")))?;
178
179 let default_config = include_str!("../../config.example.toml");
180 fs::write(&config_path, default_config)
181 .map_err(|e| PaiError::Config(format!("Failed to write config file: {e}")))?;
182
183 println!("{} Created configuration file", "Success:".green().bold());
184 println!(
185 " {}: {}",
186 "Location".bright_black(),
187 config_path.display().to_string().bold()
188 );
189 println!();
190 println!("{}", "Next steps:".cyan().bold());
191 println!(" 1. Edit the config file to add your sources:");
192 println!(" {}", format!("$EDITOR {}", config_path.display()).bright_black());
193 println!(" 2. Run sync to fetch content:");
194 println!(" {}", "pai sync".bright_black());
195 println!(" 3. List your items:");
196 println!(" {}", "pai list -n 10".bright_black());
197
198 Ok(())
199}
200
201fn handle_man(output: Option<PathBuf>, install: bool, install_dir: Option<PathBuf>) -> Result<(), PaiError> {
202 if install && output.is_some() {
203 return Err(PaiError::InvalidArgument(
204 "Use either --install or -o/--output when generating manpages".to_string(),
205 ));
206 }
207
208 let target = if install { Some(resolve_man_install_path(install_dir)?) } else { output };
209
210 let mut writer = create_output_writer(target.as_ref())?;
211 writer.write_all(MAN_PAGE.as_bytes()).map_err(PaiError::Io)?;
212 writer.flush().map_err(PaiError::Io)?;
213
214 if let Some(path) = target {
215 if install {
216 println!("{} Installed manpage to {}", "Success:".green(), path.display());
217 if let Some(root) = man_root_for(&path) {
218 println!(
219 "{} Ensure {} is on your MANPATH, then run {}",
220 "Hint:".yellow(),
221 root.display(),
222 "man pai".bright_black()
223 );
224 } else {
225 println!(
226 "{} Run man pai after adding the install dir to MANPATH.",
227 "Hint:".yellow()
228 );
229 }
230 } else {
231 println!("{} Wrote manpage to {}", "Success:".green(), path.display());
232 }
233 }
234
235 Ok(())
236}
237
238fn resolve_man_install_path(custom_dir: Option<PathBuf>) -> Result<PathBuf, PaiError> {
239 let base = if let Some(dir) = custom_dir { dir } else { find_writable_man_dir()? };
240
241 let install_dir = if base.file_name().map(|os| os == "man1").unwrap_or(false) { base } else { base.join("man1") };
242
243 fs::create_dir_all(&install_dir).map_err(|e| {
244 PaiError::Io(io::Error::new(
245 e.kind(),
246 format!("Failed to create man directory {}: {}", install_dir.display(), e),
247 ))
248 })?;
249
250 Ok(install_dir.join("pai.1"))
251}
252
253fn find_writable_man_dir() -> Result<PathBuf, PaiError> {
254 let candidates = [
255 dirs::data_local_dir().map(|d| d.join("man")),
256 dirs::home_dir().map(|d| d.join(".local/share/man")),
257 Some(PathBuf::from("/usr/local/share/man")),
258 Some(PathBuf::from("/opt/homebrew/share/man")),
259 Some(PathBuf::from("/usr/local/Homebrew/share/man")),
260 ];
261
262 for candidate in candidates.iter().flatten() {
263 if candidate.exists() {
264 let test_file = candidate.join(".pai-write-test");
265 if fs::write(&test_file, b"test").is_ok() {
266 let _ = fs::remove_file(&test_file);
267 return Ok(candidate.clone());
268 }
269 } else if let Some(parent) = candidate.parent() {
270 if parent.exists() {
271 let test_dir = candidate.join("man1");
272 if fs::create_dir_all(&test_dir).is_ok() {
273 let _ = fs::remove_dir_all(&test_dir);
274 return Ok(candidate.clone());
275 }
276 }
277 }
278 }
279
280 if let Some(data_dir) = dirs::data_local_dir() {
281 return Ok(data_dir.join("man"));
282 }
283
284 Err(PaiError::Config(
285 "Unable to find a writable man page directory. Use --install-dir to specify one.".to_string(),
286 ))
287}
288
289fn man_root_for(path: &Path) -> Option<&Path> {
290 path.parent()?.parent()
291}
292
293fn handle_cf_init(output_dir: Option<PathBuf>, dry_run: bool) -> Result<(), PaiError> {
294 let target_dir = output_dir.unwrap_or_else(|| PathBuf::from("."));
295
296 let wrangler_template = include_str!("../../worker/wrangler.example.toml");
297 let schema_sql = include_str!("../../worker/schema.sql");
298
299 let readme_content = r#"# Cloudflare Worker Deployment
300
301## Quick Start
302
3031. **Create D1 Database:**
304 ```sh
305 wrangler d1 create personal-activity-db
306 ```
307
3082. **Copy the configuration:**
309 ```sh
310 cp wrangler.example.toml wrangler.toml
311 ```
312
3133. **Update `wrangler.toml`:**
314 - Replace `{DATABASE_ID}` with the ID from step 1
315 - Adjust `name` and `database_name` if desired
316
3174. **Initialize the database schema:**
318 ```sh
319 wrangler d1 execute personal-activity-db --file=schema.sql
320 ```
321
3225. **Build the worker:**
323 ```sh
324 cd ..
325 cargo install worker-build
326 worker-build --release -p pai-worker
327 ```
328
3296. **Deploy:**
330 ```sh
331 cd worker
332 wrangler deploy
333 ```
334
335## Testing Locally
336
337Run the worker locally with:
338```sh
339wrangler dev
340```
341
342## Scheduled Syncs
343
344The worker is configured with a cron trigger (see `wrangler.toml`). The default schedule runs every hour.
345To modify the schedule, edit the `crons` array in `wrangler.toml`.
346
347## API Endpoints
348
349- `GET /api/feed` - List items with optional filters
350- `GET /api/item/:id` - Get a single item by ID
351- `GET /status` - Health check
352
353## Environment Variables
354
355Configure in `wrangler.toml` under `[vars]`:
356- `LOG_LEVEL` - Set logging verbosity (optional)
357"#;
358
359 let files = vec![
360 ("wrangler.example.toml", wrangler_template),
361 ("schema.sql", schema_sql),
362 ("README.md", readme_content),
363 ];
364
365 if dry_run {
366 println!("{} Dry run - showing files that would be created:\n", "Info:".cyan());
367 for (filename, content) in &files {
368 let path = target_dir.join(filename);
369 println!(" {} {}", "Would create:".bright_black(), path.display());
370 println!(" {} bytes", content.len());
371 }
372 println!("\n{} Run without --dry-run to create these files", "Hint:".yellow());
373 return Ok(());
374 }
375
376 fs::create_dir_all(&target_dir)?;
377
378 for (filename, content) in &files {
379 let path = target_dir.join(filename);
380 if path.exists() {
381 println!("{} {} already exists, skipping", "Warning:".yellow(), filename);
382 continue;
383 }
384 fs::write(&path, content)?;
385 println!("{} Created {}", "Success:".green(), path.display());
386 }
387
388 println!("\n{} Cloudflare Worker scaffolding created!", "Success:".green().bold());
389 println!("\n{} Next steps:", "Info:".cyan());
390 println!(" 1. cd {}", target_dir.display());
391 println!(" 2. Read README.md for deployment instructions");
392 println!(" 3. wrangler d1 create personal-activity-db");
393 println!(" 4. Update wrangler.example.toml with your database ID");
394
395 Ok(())
396}
397
398fn normalize_since_input(since: Option<String>) -> Result<Option<String>, PaiError> {
399 normalize_since_with_now(since, Utc::now())
400}
401
402fn normalize_since_with_now(since: Option<String>, now: DateTime<Utc>) -> Result<Option<String>, PaiError> {
403 let value = match since {
404 Some(raw) => {
405 let trimmed = raw.trim();
406 if trimmed.is_empty() {
407 return Ok(None);
408 }
409 trimmed.to_string()
410 }
411 None => return Ok(None),
412 };
413
414 if let Some(duration) = parse_relative_duration(&value) {
415 let instant = now - duration;
416 return Ok(Some(instant.to_rfc3339()));
417 }
418
419 if let Ok(dt) = DateTime::parse_from_rfc3339(&value) {
420 return Ok(Some(dt.with_timezone(&Utc).to_rfc3339()));
421 }
422
423 if let Ok(dt) = DateTime::parse_from_rfc2822(&value) {
424 return Ok(Some(dt.with_timezone(&Utc).to_rfc3339()));
425 }
426
427 let msg = format!(
428 "Invalid since value '{value}'. Use ISO 8601 (e.g. 2024-01-01T00:00:00Z) or relative forms like 7d/24h/60m."
429 );
430 Err(PaiError::InvalidArgument(msg))
431}
432
433fn parse_relative_duration(input: &str) -> Option<Duration> {
434 if input.len() < 2 {
435 return None;
436 }
437
438 let unit = input.chars().last()?.to_ascii_lowercase();
439 let magnitude: i64 = input[..input.len() - 1].parse().ok()?;
440
441 match unit {
442 'm' => Some(Duration::minutes(magnitude)),
443 'h' => Some(Duration::hours(magnitude)),
444 'd' => Some(Duration::days(magnitude)),
445 'w' => Some(Duration::weeks(magnitude)),
446 _ => None,
447 }
448}
449
450fn ensure_positive_limit(limit: usize) -> Result<usize, PaiError> {
451 if limit == 0 {
452 return Err(PaiError::InvalidArgument("Limit must be greater than zero".to_string()));
453 }
454 Ok(limit)
455}
456
457fn ensure_optional_limit(limit: Option<usize>) -> Result<Option<usize>, PaiError> {
458 match limit {
459 Some(value) => Ok(Some(ensure_positive_limit(value)?)),
460 None => Ok(None),
461 }
462}
463
464fn normalize_optional_string(value: Option<String>) -> Option<String> {
465 value.and_then(|input| {
466 let trimmed = input.trim();
467 if trimmed.is_empty() {
468 None
469 } else {
470 Some(trimmed.to_string())
471 }
472 })
473}
474
475enum ExportFormat {
476 Json,
477 Ndjson,
478 Rss,
479}
480
481impl FromStr for ExportFormat {
482 type Err = PaiError;
483
484 fn from_str(s: &str) -> Result<Self, Self::Err> {
485 match s.to_ascii_lowercase().as_str() {
486 "json" => Ok(Self::Json),
487 "ndjson" => Ok(Self::Ndjson),
488 "rss" => Ok(Self::Rss),
489 other => Err(PaiError::InvalidArgument(format!(
490 "Unsupported export format '{other}'. Expected json, ndjson, or rss."
491 ))),
492 }
493 }
494}
495
496fn create_output_writer(path: Option<&PathBuf>) -> Result<Box<dyn Write>, PaiError> {
497 if let Some(path) = path {
498 if let Some(parent) = path.parent() {
499 if !parent.as_os_str().is_empty() {
500 fs::create_dir_all(parent)?;
501 }
502 }
503 let file = fs::File::create(path)?;
504 Ok(Box::new(file))
505 } else {
506 Ok(Box::new(io::stdout()))
507 }
508}
509
510fn export_items(items: &[Item], format: ExportFormat, writer: &mut dyn Write) -> Result<(), PaiError> {
511 match format {
512 ExportFormat::Json => write_json(items, writer)?,
513 ExportFormat::Ndjson => write_ndjson(items, writer)?,
514 ExportFormat::Rss => write_rss(items, writer)?,
515 }
516
517 writer.flush().map_err(PaiError::Io)
518}
519
520fn write_json(items: &[Item], writer: &mut dyn Write) -> Result<(), PaiError> {
521 serde_json::to_writer_pretty(&mut *writer, items)
522 .map_err(|e| PaiError::Parse(format!("Failed to serialize JSON export: {e}")))?;
523 writer.write_all(b"\n").map_err(PaiError::Io)
524}
525
526fn write_ndjson(items: &[Item], writer: &mut dyn Write) -> Result<(), PaiError> {
527 for item in items {
528 serde_json::to_writer(&mut *writer, item)
529 .map_err(|e| PaiError::Parse(format!("Failed to serialize JSON export: {e}")))?;
530 writer.write_all(b"\n").map_err(PaiError::Io)?;
531 }
532 Ok(())
533}
534
535fn write_rss(items: &[Item], writer: &mut dyn Write) -> Result<(), PaiError> {
536 let channel = build_rss_channel(items)?;
537 let rss_string = channel.to_string();
538 writer.write_all(rss_string.as_bytes()).map_err(PaiError::Io)?;
539 writer.write_all(b"\n").map_err(PaiError::Io)
540}
541
542fn build_rss_channel(items: &[Item]) -> Result<Channel, PaiError> {
543 const TITLE: &str = "Personal Activity Index";
544 const LINK: &str = "https://personal-activity-index.local/";
545 const DESCRIPTION: &str = "Aggregated feed exported by the Personal Activity Index CLI.";
546
547 let rss_items: Vec<rss::Item> = items
548 .iter()
549 .map(|item| {
550 let title = item
551 .title
552 .as_deref()
553 .or(item.summary.as_deref())
554 .unwrap_or(&item.url)
555 .to_string();
556 let description = item
557 .summary
558 .as_deref()
559 .or(item.content_html.as_deref())
560 .unwrap_or("")
561 .to_string();
562 let author = item.author.as_deref().unwrap_or("Unknown").to_string();
563 let pub_date = format_rss_date(&item.published_at);
564
565 ItemBuilder::default()
566 .title(Some(title))
567 .link(Some(item.url.clone()))
568 .guid(Some(
569 rss::GuidBuilder::default().value(&item.id).permalink(false).build(),
570 ))
571 .pub_date(Some(pub_date))
572 .author(Some(author))
573 .description(Some(description))
574 .categories(vec![rss::CategoryBuilder::default()
575 .name(item.source_kind.to_string())
576 .build()])
577 .build()
578 })
579 .collect();
580
581 let channel = ChannelBuilder::default()
582 .title(TITLE)
583 .link(LINK)
584 .description(DESCRIPTION)
585 .items(rss_items)
586 .build();
587
588 Ok(channel)
589}
590
591fn format_rss_date(value: &str) -> String {
592 if let Ok(dt) = DateTime::parse_from_rfc3339(value) {
593 dt.to_rfc2822()
594 } else if let Ok(dt) = DateTime::parse_from_rfc2822(value) {
595 dt.to_rfc2822()
596 } else {
597 value.to_string()
598 }
599}
600
601fn format_published_display(value: &str) -> String {
602 if let Ok(dt) = DateTime::parse_from_rfc3339(value) {
603 dt.with_timezone(&Utc).format("%Y-%m-%d %H:%M").to_string()
604 } else if let Ok(dt) = DateTime::parse_from_rfc2822(value) {
605 dt.with_timezone(&Utc).format("%Y-%m-%d %H:%M").to_string()
606 } else {
607 value.to_string()
608 }
609}
610
611fn truncate_for_column(value: &str, max_chars: usize) -> String {
612 let total_chars = value.chars().count();
613 if total_chars <= max_chars {
614 return value.to_string();
615 }
616
617 if max_chars <= 3 {
618 return value.chars().take(max_chars).collect();
619 }
620
621 let mut truncated = String::new();
622 for ch in value.chars().take(max_chars - 3) {
623 truncated.push(ch);
624 }
625 truncated.push_str("...");
626 truncated
627}
628
629fn render_items_table(items: &[Item]) -> Result<(), PaiError> {
630 let mut stdout = io::stdout();
631 write_items_table(items, &mut stdout).map_err(PaiError::Io)
632}
633
634fn write_items_table<W: Write>(items: &[Item], writer: &mut W) -> io::Result<()> {
635 let header = format!(
636 "| {published:<pub_width$} | {kind:<kind_width$} | {source:<source_width$} | {title:<title_width$} |",
637 published = "Published",
638 kind = "Kind",
639 source = "Source",
640 title = "Title",
641 pub_width = PUBLISHED_WIDTH,
642 kind_width = KIND_WIDTH,
643 source_width = SOURCE_WIDTH,
644 title_width = TITLE_WIDTH,
645 );
646 let separator = "-".repeat(header.len());
647
648 writeln!(writer, "{separator}")?;
649 writeln!(writer, "{header}")?;
650 writeln!(writer, "{}", separator.clone())?;
651
652 for item in items {
653 let published = truncate_for_column(&format_published_display(&item.published_at), PUBLISHED_WIDTH);
654 let kind = truncate_for_column(&item.source_kind.to_string(), KIND_WIDTH);
655 let source = truncate_for_column(&item.source_id, SOURCE_WIDTH);
656 let title_text = item.title.as_deref().or(item.summary.as_deref()).unwrap_or(&item.url);
657 let title = truncate_for_column(title_text, TITLE_WIDTH);
658
659 let row = format!(
660 "| {published:<PUBLISHED_WIDTH$} | {kind:<KIND_WIDTH$} | {source:<SOURCE_WIDTH$} | {title:<TITLE_WIDTH$} |",
661 );
662 writeln!(writer, "{row}")?;
663 }
664
665 writeln!(writer, "{separator}")
666}
667
668#[cfg(test)]
669mod tests {
670 use super::*;
671 use chrono::TimeZone;
672
673 fn sample_item() -> Item {
674 Item {
675 id: "sample-id".to_string(),
676 source_kind: SourceKind::Substack,
677 source_id: "patternmatched.substack.com".to_string(),
678 author: Some("Pattern Matched".to_string()),
679 title: Some("Test entry".to_string()),
680 summary: Some("Summary".to_string()),
681 url: "https://patternmatched.substack.com/p/test".to_string(),
682 content_html: None,
683 published_at: "2024-01-01T00:00:00Z".to_string(),
684 created_at: "2024-01-01T00:00:00Z".to_string(),
685 }
686 }
687
688 #[test]
689 fn normalize_since_accepts_iso8601() {
690 let now = Utc.with_ymd_and_hms(2024, 1, 10, 0, 0, 0).unwrap();
691 let since = normalize_since_with_now(Some("2024-01-01T00:00:00Z".to_string()), now).unwrap();
692 assert_eq!(since.unwrap(), "2024-01-01T00:00:00+00:00");
693 }
694
695 #[test]
696 fn normalize_since_accepts_relative_days() {
697 let now = Utc.with_ymd_and_hms(2024, 1, 10, 0, 0, 0).unwrap();
698 let since = normalize_since_with_now(Some("3d".to_string()), now).unwrap();
699 assert_eq!(since.unwrap(), "2024-01-07T00:00:00+00:00");
700 }
701
702 #[test]
703 fn ensure_positive_limit_rejects_zero() {
704 assert!(ensure_positive_limit(0).is_err());
705 assert!(ensure_optional_limit(Some(0)).is_err());
706 }
707
708 #[test]
709 fn export_format_parsing() {
710 assert!(matches!(ExportFormat::from_str("json").unwrap(), ExportFormat::Json));
711 assert!(matches!(
712 ExportFormat::from_str("NDJSON").unwrap(),
713 ExportFormat::Ndjson
714 ));
715 assert!(matches!(ExportFormat::from_str("rss").unwrap(), ExportFormat::Rss));
716 assert!(ExportFormat::from_str("invalid").is_err());
717 }
718
719 #[test]
720 fn json_export_serializes_items() {
721 let mut buffer = Vec::new();
722 export_items(&[sample_item()], ExportFormat::Json, &mut buffer).unwrap();
723 let output = String::from_utf8(buffer).unwrap();
724 assert!(output.trim_start().starts_with('['));
725 assert!(output.contains("sample-id"));
726 }
727
728 #[test]
729 fn ndjson_export_serializes_items() {
730 let mut buffer = Vec::new();
731 export_items(&[sample_item()], ExportFormat::Ndjson, &mut buffer).unwrap();
732 let output = String::from_utf8(buffer).unwrap();
733 assert!(output.lines().next().unwrap().contains("sample-id"));
734 }
735
736 #[test]
737 fn rss_export_contains_items() {
738 let channel = build_rss_channel(&[sample_item()]).unwrap();
739 let feed = channel.to_string();
740 assert!(feed.contains("<rss"));
741 assert!(feed.contains("<item>"));
742 assert!(feed.contains("sample-id"));
743 }
744
745 #[test]
746 fn table_writer_emits_rows() {
747 let mut buffer = Vec::new();
748 write_items_table(&[sample_item()], &mut buffer).unwrap();
749 let output = String::from_utf8(buffer).unwrap();
750 assert!(output.contains("Published"));
751 assert!(output.contains("patternmatched"));
752 }
753
754 #[test]
755 fn truncate_column_adds_ellipsis() {
756 let truncated = truncate_for_column("abcdefghijklmnopqrstuvwxyz", 8);
757 assert_eq!(truncated, "abcde...");
758 }
759
760 #[test]
761 fn manpage_contains_name_section() {
762 assert!(MAN_PAGE.contains("NAME"));
763 assert!(MAN_PAGE.contains("pai"));
764 }
765}