A tool to help managing forked repos with their own history
1use anyhow::Result;
2use std::collections::HashSet;
3
4use crate::config::{self, Config};
5use crate::git::{self, FileContent};
6use crate::patch::{self, PatchEntry};
7
8pub fn run(ignore_uncommitted: bool) -> Result<()> {
9 let config = Config::load()?;
10 let repo = git::open_repo()?;
11 git::ensure_on_forkme_branch(&repo)?;
12
13 let changes = git::get_changes_from_upstream(&repo, &config.upstream.branch)?;
14
15 if changes.is_empty() {
16 println!("No changes from upstream. Patches are up to date.");
17 return Ok(());
18 }
19
20 // Track which files have been processed
21 let mut processed_files: HashSet<String> = HashSet::new();
22 let mut skipped_files: Vec<String> = Vec::new();
23
24 // Generate and save patches/binaries
25 for change in &changes {
26 // Check if file has uncommitted changes and should be skipped
27 if ignore_uncommitted && git::has_uncommitted_changes(&repo, &change.path)? {
28 skipped_files.push(change.path.clone());
29 continue;
30 }
31
32 // First, remove any existing entries for this file (clean slate)
33 patch::delete_all_for_file(&change.path)?;
34
35 match (&change.old_content, &change.new_content) {
36 // File deleted
37 (Some(_), None) => {
38 patch::save_deleted_marker(&change.path)?;
39 processed_files.insert(change.path.clone());
40 println!(" deleted {}", change.path);
41 }
42
43 // File added or modified
44 (old, Some(new_content)) => {
45 let is_new = old.is_none();
46
47 match new_content {
48 FileContent::Binary(bytes) => {
49 // Save binary file directly
50 patch::save_binary(&change.path, bytes)?;
51 processed_files.insert(change.path.clone());
52 let status = if is_new {
53 "added (binary)"
54 } else {
55 "modified (binary)"
56 };
57 println!(" {} {}", status, change.path);
58 }
59 FileContent::Text(new_text) => {
60 // Generate text patch
61 let old_text = old.as_ref().and_then(|c| c.as_text()).unwrap_or("");
62 let patch_content = patch::generate_patch(Some(old_text), Some(new_text));
63
64 // Skip empty patches (shouldn't happen, but just in case)
65 if patch_content.lines().count() <= 2 {
66 continue;
67 }
68
69 patch::save_patch(&change.path, &patch_content)?;
70 processed_files.insert(change.path.clone());
71 let status = if is_new { "added" } else { "modified" };
72 println!(" {} {}", status, change.path);
73 }
74 }
75 }
76
77 // No content (shouldn't happen)
78 (None, None) => continue,
79 }
80 }
81
82 // Remove entries for files that are no longer modified
83 let existing_entries = patch::list_all_entries()?;
84 for entry in existing_entries {
85 let file_path = entry.file_path();
86 if !processed_files.contains(file_path) {
87 patch::delete_all_for_file(file_path)?;
88 let suffix = match entry {
89 PatchEntry::TextPatch(_) => ".patch",
90 PatchEntry::Binary(_) => " (binary)",
91 PatchEntry::Deleted(_) => ".deleted",
92 };
93 println!(" removed {}{}", file_path, suffix);
94 }
95 }
96
97 // Clean up empty directories
98 patch::cleanup_empty_dirs()?;
99
100 // Update the lock file with current upstream commit
101 let upstream_sha = git::get_upstream_commit_sha(&repo, &config.upstream.branch)?;
102 config::save_lock(&upstream_sha)?;
103
104 println!("\nSynced {} files.", processed_files.len());
105 println!("Updated forkme.lock to {}", &upstream_sha[..12]);
106
107 if !skipped_files.is_empty() {
108 println!(
109 "Skipped {} file(s) with uncommitted changes:",
110 skipped_files.len()
111 );
112 for file in &skipped_files {
113 println!(" {}", file);
114 }
115 }
116
117 Ok(())
118}