just playing with tangled
at diffedit3 2949 lines 113 kB view raw
1// Copyright 2022 The Jujutsu Authors 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// https://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15use core::fmt; 16use std::borrow::Cow; 17use std::collections::{BTreeMap, HashSet}; 18use std::env::{self, ArgsOs, VarError}; 19use std::ffi::OsString; 20use std::fmt::Debug; 21use std::io::{self, Write as _}; 22use std::path::{Path, PathBuf}; 23use std::process::ExitCode; 24use std::rc::Rc; 25use std::str::FromStr; 26use std::sync::Arc; 27use std::time::SystemTime; 28use std::{fs, str}; 29 30use clap::builder::{ 31 MapValueParser, NonEmptyStringValueParser, TypedValueParser, ValueParserFactory, 32}; 33use clap::error::{ContextKind, ContextValue}; 34use clap::{ArgAction, ArgMatches, Command, FromArgMatches}; 35use indexmap::{IndexMap, IndexSet}; 36use itertools::Itertools; 37use jj_lib::backend::{ChangeId, CommitId, MergedTreeId, TreeValue}; 38use jj_lib::commit::Commit; 39use jj_lib::fileset::{FilesetExpression, FilesetParseContext}; 40use jj_lib::git_backend::GitBackend; 41use jj_lib::gitignore::{GitIgnoreError, GitIgnoreFile}; 42use jj_lib::hex_util::to_reverse_hex; 43use jj_lib::id_prefix::IdPrefixContext; 44use jj_lib::matchers::Matcher; 45use jj_lib::merge::MergedTreeValue; 46use jj_lib::merged_tree::MergedTree; 47use jj_lib::object_id::ObjectId; 48use jj_lib::op_store::{OpStoreError, OperationId, RefTarget, WorkspaceId}; 49use jj_lib::op_walk::OpsetEvaluationError; 50use jj_lib::operation::Operation; 51use jj_lib::repo::{ 52 merge_factories_map, CheckOutCommitError, EditCommitError, MutableRepo, ReadonlyRepo, Repo, 53 RepoLoader, StoreFactories, StoreLoadError, 54}; 55use jj_lib::repo_path::{FsPathParseError, RepoPath, RepoPathBuf}; 56use jj_lib::revset::{ 57 RevsetAliasesMap, RevsetExpression, RevsetExtensions, RevsetFilterPredicate, RevsetIteratorExt, 58 RevsetModifier, RevsetParseContext, RevsetWorkspaceContext, SymbolResolverExtension, 59}; 60use jj_lib::rewrite::restore_tree; 61use jj_lib::settings::{ConfigResultExt as _, UserSettings}; 62use jj_lib::signing::SignInitError; 63use jj_lib::str_util::StringPattern; 64use jj_lib::transaction::Transaction; 65use jj_lib::view::View; 66use jj_lib::working_copy::{ 67 CheckoutStats, LockedWorkingCopy, SnapshotOptions, WorkingCopy, WorkingCopyFactory, 68}; 69use jj_lib::workspace::{ 70 default_working_copy_factories, LockedWorkspace, WorkingCopyFactories, Workspace, 71 WorkspaceLoadError, WorkspaceLoader, 72}; 73use jj_lib::{dag_walk, file_util, fileset, git, op_heads_store, op_walk, revset}; 74use once_cell::unsync::OnceCell; 75use tracing::instrument; 76use tracing_chrome::ChromeLayerBuilder; 77use tracing_subscriber::prelude::*; 78 79use crate::command_error::{ 80 cli_error, config_error_with_message, handle_command_result, internal_error, 81 internal_error_with_message, user_error, user_error_with_hint, user_error_with_message, 82 CommandError, 83}; 84use crate::commit_templater::{CommitTemplateLanguage, CommitTemplateLanguageExtension}; 85use crate::config::{ 86 new_config_path, AnnotatedValue, CommandNameAndArgs, ConfigSource, LayeredConfigs, 87}; 88use crate::formatter::{FormatRecorder, Formatter, PlainTextFormatter}; 89use crate::git_util::{ 90 is_colocated_git_workspace, print_failed_git_export, print_git_import_stats, 91}; 92use crate::merge_tools::{DiffEditor, MergeEditor, MergeToolConfigError}; 93use crate::operation_templater::OperationTemplateLanguageExtension; 94use crate::revset_util::RevsetExpressionEvaluator; 95use crate::template_builder::TemplateLanguage; 96use crate::template_parser::TemplateAliasesMap; 97use crate::templater::{PropertyPlaceholder, TemplateRenderer}; 98use crate::ui::{ColorChoice, Ui}; 99use crate::{revset_util, template_builder, text_util}; 100 101#[derive(Clone)] 102struct ChromeTracingFlushGuard { 103 _inner: Option<Rc<tracing_chrome::FlushGuard>>, 104} 105 106impl Debug for ChromeTracingFlushGuard { 107 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 108 let Self { _inner } = self; 109 f.debug_struct("ChromeTracingFlushGuard") 110 .finish_non_exhaustive() 111 } 112} 113 114/// Handle to initialize or change tracing subscription. 115#[derive(Clone, Debug)] 116pub struct TracingSubscription { 117 reload_log_filter: tracing_subscriber::reload::Handle< 118 tracing_subscriber::EnvFilter, 119 tracing_subscriber::Registry, 120 >, 121 _chrome_tracing_flush_guard: ChromeTracingFlushGuard, 122} 123 124impl TracingSubscription { 125 /// Initializes tracing with the default configuration. This should be 126 /// called as early as possible. 127 pub fn init() -> Self { 128 let filter = tracing_subscriber::EnvFilter::builder() 129 .with_default_directive(tracing::metadata::LevelFilter::ERROR.into()) 130 .from_env_lossy(); 131 let (filter, reload_log_filter) = tracing_subscriber::reload::Layer::new(filter); 132 133 let (chrome_tracing_layer, chrome_tracing_flush_guard) = match std::env::var("JJ_TRACE") { 134 Ok(filename) => { 135 let filename = if filename.is_empty() { 136 format!( 137 "jj-trace-{}.json", 138 SystemTime::now() 139 .duration_since(SystemTime::UNIX_EPOCH) 140 .unwrap() 141 .as_secs(), 142 ) 143 } else { 144 filename 145 }; 146 let include_args = std::env::var("JJ_TRACE_INCLUDE_ARGS").is_ok(); 147 let (layer, guard) = ChromeLayerBuilder::new() 148 .file(filename) 149 .include_args(include_args) 150 .build(); 151 ( 152 Some(layer), 153 ChromeTracingFlushGuard { 154 _inner: Some(Rc::new(guard)), 155 }, 156 ) 157 } 158 Err(_) => (None, ChromeTracingFlushGuard { _inner: None }), 159 }; 160 161 tracing_subscriber::registry() 162 .with( 163 tracing_subscriber::fmt::Layer::default() 164 .with_writer(std::io::stderr) 165 .with_filter(filter), 166 ) 167 .with(chrome_tracing_layer) 168 .init(); 169 TracingSubscription { 170 reload_log_filter, 171 _chrome_tracing_flush_guard: chrome_tracing_flush_guard, 172 } 173 } 174 175 pub fn enable_debug_logging(&self) -> Result<(), CommandError> { 176 self.reload_log_filter 177 .modify(|filter| { 178 *filter = tracing_subscriber::EnvFilter::builder() 179 .with_default_directive(tracing::metadata::LevelFilter::DEBUG.into()) 180 .from_env_lossy() 181 }) 182 .map_err(|err| internal_error_with_message("failed to enable debug logging", err))?; 183 tracing::info!("debug logging enabled"); 184 Ok(()) 185 } 186} 187 188pub struct CommandHelper { 189 app: Command, 190 cwd: PathBuf, 191 string_args: Vec<String>, 192 matches: ArgMatches, 193 global_args: GlobalArgs, 194 settings: UserSettings, 195 layered_configs: LayeredConfigs, 196 revset_extensions: Arc<RevsetExtensions>, 197 commit_template_extensions: Vec<Arc<dyn CommitTemplateLanguageExtension>>, 198 operation_template_extensions: Vec<Arc<dyn OperationTemplateLanguageExtension>>, 199 maybe_workspace_loader: Result<WorkspaceLoader, CommandError>, 200 store_factories: StoreFactories, 201 working_copy_factories: WorkingCopyFactories, 202} 203 204impl CommandHelper { 205 pub fn app(&self) -> &Command { 206 &self.app 207 } 208 209 /// Canonical form of the current working directory path. 210 /// 211 /// A loaded `Workspace::workspace_root()` also returns a canonical path, so 212 /// relative paths can be easily computed from these paths. 213 pub fn cwd(&self) -> &Path { 214 &self.cwd 215 } 216 217 pub fn string_args(&self) -> &Vec<String> { 218 &self.string_args 219 } 220 221 pub fn matches(&self) -> &ArgMatches { 222 &self.matches 223 } 224 225 pub fn global_args(&self) -> &GlobalArgs { 226 &self.global_args 227 } 228 229 pub fn settings(&self) -> &UserSettings { 230 &self.settings 231 } 232 233 pub fn resolved_config_values( 234 &self, 235 prefix: &[&str], 236 ) -> Result<Vec<AnnotatedValue>, crate::config::ConfigError> { 237 self.layered_configs.resolved_config_values(prefix) 238 } 239 240 pub fn revset_extensions(&self) -> &Arc<RevsetExtensions> { 241 &self.revset_extensions 242 } 243 244 /// Loads template aliases from the configs. 245 /// 246 /// For most commands that depend on a loaded repo, you should use 247 /// `WorkspaceCommandHelper::template_aliases_map()` instead. 248 fn load_template_aliases(&self, ui: &Ui) -> Result<TemplateAliasesMap, CommandError> { 249 load_template_aliases(ui, &self.layered_configs) 250 } 251 252 /// Parses template of the given language into evaluation tree. 253 /// 254 /// This function also loads template aliases from the settings. Use 255 /// `WorkspaceCommandHelper::parse_template()` if you've already 256 /// instantiated the workspace helper. 257 pub fn parse_template<'a, C: Clone + 'a, L: TemplateLanguage<'a> + ?Sized>( 258 &self, 259 ui: &Ui, 260 language: &L, 261 template_text: &str, 262 wrap_self: impl Fn(PropertyPlaceholder<C>) -> L::Property, 263 ) -> Result<TemplateRenderer<'a, C>, CommandError> { 264 let aliases = self.load_template_aliases(ui)?; 265 Ok(template_builder::parse( 266 language, 267 template_text, 268 &aliases, 269 wrap_self, 270 )?) 271 } 272 273 pub fn operation_template_extensions(&self) -> &[Arc<dyn OperationTemplateLanguageExtension>] { 274 &self.operation_template_extensions 275 } 276 277 pub fn workspace_loader(&self) -> Result<&WorkspaceLoader, CommandError> { 278 self.maybe_workspace_loader.as_ref().map_err(Clone::clone) 279 } 280 281 /// Loads workspace and repo, then snapshots the working copy if allowed. 282 #[instrument(skip(self, ui))] 283 pub fn workspace_helper(&self, ui: &mut Ui) -> Result<WorkspaceCommandHelper, CommandError> { 284 let mut workspace_command = self.workspace_helper_no_snapshot(ui)?; 285 workspace_command.maybe_snapshot(ui)?; 286 Ok(workspace_command) 287 } 288 289 /// Loads workspace and repo, but never snapshots the working copy. Most 290 /// commands should use `workspace_helper()` instead. 291 #[instrument(skip(self, ui))] 292 pub fn workspace_helper_no_snapshot( 293 &self, 294 ui: &mut Ui, 295 ) -> Result<WorkspaceCommandHelper, CommandError> { 296 let workspace = self.load_workspace()?; 297 let op_head = self.resolve_operation(ui, workspace.repo_loader())?; 298 let repo = workspace.repo_loader().load_at(&op_head)?; 299 self.for_loaded_repo(ui, workspace, repo) 300 } 301 302 pub fn get_working_copy_factory(&self) -> Result<&dyn WorkingCopyFactory, CommandError> { 303 let loader = self.workspace_loader()?; 304 305 // We convert StoreLoadError -> WorkspaceLoadError -> CommandError 306 let factory: Result<_, WorkspaceLoadError> = loader 307 .get_working_copy_factory(&self.working_copy_factories) 308 .map_err(|e| e.into()); 309 let factory = factory 310 .map_err(|err| map_workspace_load_error(err, self.global_args.repository.as_deref()))?; 311 Ok(factory) 312 } 313 314 #[instrument(skip_all)] 315 pub fn load_workspace(&self) -> Result<Workspace, CommandError> { 316 let loader = self.workspace_loader()?; 317 loader 318 .load( 319 &self.settings, 320 &self.store_factories, 321 &self.working_copy_factories, 322 ) 323 .map_err(|err| map_workspace_load_error(err, self.global_args.repository.as_deref())) 324 } 325 326 #[instrument(skip_all)] 327 pub fn resolve_operation( 328 &self, 329 ui: &mut Ui, 330 repo_loader: &RepoLoader, 331 ) -> Result<Operation, CommandError> { 332 if self.global_args.at_operation == "@" { 333 op_heads_store::resolve_op_heads( 334 repo_loader.op_heads_store().as_ref(), 335 repo_loader.op_store(), 336 |op_heads| { 337 writeln!( 338 ui.status(), 339 "Concurrent modification detected, resolving automatically.", 340 )?; 341 let base_repo = repo_loader.load_at(&op_heads[0])?; 342 // TODO: It may be helpful to print each operation we're merging here 343 let mut tx = 344 start_repo_transaction(&base_repo, &self.settings, &self.string_args); 345 for other_op_head in op_heads.into_iter().skip(1) { 346 tx.merge_operation(other_op_head)?; 347 let num_rebased = tx.mut_repo().rebase_descendants(&self.settings)?; 348 if num_rebased > 0 { 349 writeln!( 350 ui.status(), 351 "Rebased {num_rebased} descendant commits onto commits rewritten \ 352 by other operation" 353 )?; 354 } 355 } 356 Ok(tx 357 .write("resolve concurrent operations") 358 .leave_unpublished() 359 .operation() 360 .clone()) 361 }, 362 ) 363 } else { 364 let operation = 365 op_walk::resolve_op_for_load(repo_loader, &self.global_args.at_operation)?; 366 Ok(operation) 367 } 368 } 369 370 #[instrument(skip_all)] 371 pub fn for_loaded_repo( 372 &self, 373 ui: &mut Ui, 374 workspace: Workspace, 375 repo: Arc<ReadonlyRepo>, 376 ) -> Result<WorkspaceCommandHelper, CommandError> { 377 WorkspaceCommandHelper::new(ui, self, workspace, repo) 378 } 379} 380 381/// A ReadonlyRepo along with user-config-dependent derived data. The derived 382/// data is lazily loaded. 383struct ReadonlyUserRepo { 384 repo: Arc<ReadonlyRepo>, 385 id_prefix_context: OnceCell<IdPrefixContext>, 386} 387 388impl ReadonlyUserRepo { 389 fn new(repo: Arc<ReadonlyRepo>) -> Self { 390 Self { 391 repo, 392 id_prefix_context: OnceCell::new(), 393 } 394 } 395 396 pub fn git_backend(&self) -> Option<&GitBackend> { 397 self.repo.store().backend_impl().downcast_ref() 398 } 399} 400 401/// A branch that should be advanced to satisfy the "advance-branches" feature. 402/// This is a helper for `WorkspaceCommandTransaction`. It provides a type-safe 403/// way to separate the work of checking whether a branch can be advanced and 404/// actually advancing it. Advancing the branch never fails, but can't be done 405/// until the new `CommitId` is available. Splitting the work in this way also 406/// allows us to identify eligible branches without actually moving them and 407/// return config errors to the user early. 408pub struct AdvanceableBranch { 409 name: String, 410 old_commit_id: CommitId, 411} 412 413/// Helper for parsing and evaluating settings for the advance-branches feature. 414/// Settings are configured in the jj config.toml as lists of [`StringPattern`]s 415/// for enabled and disabled branches. Example: 416/// ```toml 417/// [experimental-advance-branches] 418/// # Enable the feature for all branches except "main". 419/// enabled-branches = ["glob:*"] 420/// disabled-branches = ["main"] 421/// ``` 422struct AdvanceBranchesSettings { 423 enabled_branches: Vec<StringPattern>, 424 disabled_branches: Vec<StringPattern>, 425} 426 427impl AdvanceBranchesSettings { 428 fn from_config(config: &config::Config) -> Result<Self, CommandError> { 429 let get_setting = |setting_key| { 430 let setting = format!("experimental-advance-branches.{setting_key}"); 431 match config.get::<Vec<String>>(&setting).optional()? { 432 Some(patterns) => patterns 433 .into_iter() 434 .map(|s| { 435 StringPattern::parse(&s).map_err(|e| { 436 config_error_with_message( 437 format!("Error parsing '{s}' for {setting}"), 438 e, 439 ) 440 }) 441 }) 442 .collect(), 443 None => Ok(Vec::new()), 444 } 445 }; 446 Ok(Self { 447 enabled_branches: get_setting("enabled-branches")?, 448 disabled_branches: get_setting("disabled-branches")?, 449 }) 450 } 451 452 /// Returns true if the advance-branches feature is enabled for 453 /// `branch_name`. 454 fn branch_is_eligible(&self, branch_name: &str) -> bool { 455 if self 456 .disabled_branches 457 .iter() 458 .any(|d| d.matches(branch_name)) 459 { 460 return false; 461 } 462 self.enabled_branches.iter().any(|e| e.matches(branch_name)) 463 } 464 465 /// Returns true if the config includes at least one "enabled-branches" 466 /// pattern. 467 fn feature_enabled(&self) -> bool { 468 !self.enabled_branches.is_empty() 469 } 470} 471 472/// Provides utilities for writing a command that works on a [`Workspace`] 473/// (which most commands do). 474pub struct WorkspaceCommandHelper { 475 cwd: PathBuf, 476 string_args: Vec<String>, 477 global_args: GlobalArgs, 478 settings: UserSettings, 479 workspace: Workspace, 480 user_repo: ReadonlyUserRepo, 481 revset_extensions: Arc<RevsetExtensions>, 482 // TODO: Parsed template can be cached if it doesn't capture 'repo lifetime 483 commit_summary_template_text: String, 484 commit_template_extensions: Vec<Arc<dyn CommitTemplateLanguageExtension>>, 485 revset_aliases_map: RevsetAliasesMap, 486 template_aliases_map: TemplateAliasesMap, 487 may_update_working_copy: bool, 488 working_copy_shared_with_git: bool, 489} 490 491impl WorkspaceCommandHelper { 492 #[instrument(skip_all)] 493 pub fn new( 494 ui: &mut Ui, 495 command: &CommandHelper, 496 workspace: Workspace, 497 repo: Arc<ReadonlyRepo>, 498 ) -> Result<Self, CommandError> { 499 let settings = command.settings.clone(); 500 let commit_summary_template_text = 501 settings.config().get_string("templates.commit_summary")?; 502 let revset_aliases_map = revset_util::load_revset_aliases(ui, &command.layered_configs)?; 503 let template_aliases_map = command.load_template_aliases(ui)?; 504 let loaded_at_head = command.global_args.at_operation == "@"; 505 let may_update_working_copy = loaded_at_head && !command.global_args.ignore_working_copy; 506 let working_copy_shared_with_git = is_colocated_git_workspace(&workspace, &repo); 507 let helper = Self { 508 cwd: command.cwd.clone(), 509 string_args: command.string_args.clone(), 510 global_args: command.global_args.clone(), 511 settings, 512 workspace, 513 user_repo: ReadonlyUserRepo::new(repo), 514 revset_extensions: command.revset_extensions.clone(), 515 commit_summary_template_text, 516 commit_template_extensions: command.commit_template_extensions.clone(), 517 revset_aliases_map, 518 template_aliases_map, 519 may_update_working_copy, 520 working_copy_shared_with_git, 521 }; 522 // Parse commit_summary template (and short-prefixes revset) early to 523 // report error before starting mutable operation. 524 helper.parse_commit_template(&helper.commit_summary_template_text)?; 525 Ok(helper) 526 } 527 528 pub fn git_backend(&self) -> Option<&GitBackend> { 529 self.user_repo.git_backend() 530 } 531 532 pub fn check_working_copy_writable(&self) -> Result<(), CommandError> { 533 if self.may_update_working_copy { 534 Ok(()) 535 } else { 536 let hint = if self.global_args.ignore_working_copy { 537 "Don't use --ignore-working-copy." 538 } else { 539 "Don't use --at-op." 540 }; 541 Err(user_error_with_hint( 542 "This command must be able to update the working copy.", 543 hint, 544 )) 545 } 546 } 547 548 /// Snapshot the working copy if allowed, and import Git refs if the working 549 /// copy is collocated with Git. 550 #[instrument(skip_all)] 551 pub fn maybe_snapshot(&mut self, ui: &mut Ui) -> Result<(), CommandError> { 552 if self.may_update_working_copy { 553 if self.working_copy_shared_with_git { 554 self.import_git_head(ui)?; 555 } 556 // Because the Git refs (except HEAD) aren't imported yet, the ref 557 // pointing to the new working-copy commit might not be exported. 558 // In that situation, the ref would be conflicted anyway, so export 559 // failure is okay. 560 self.snapshot_working_copy(ui)?; 561 // import_git_refs() can rebase the working-copy commit. 562 if self.working_copy_shared_with_git { 563 self.import_git_refs(ui)?; 564 } 565 } 566 Ok(()) 567 } 568 569 /// Imports new HEAD from the colocated Git repo. 570 /// 571 /// If the Git HEAD has changed, this function abandons our old checkout and 572 /// checks out the new Git HEAD. The working-copy state will be reset to 573 /// point to the new Git HEAD. The working-copy contents won't be updated. 574 #[instrument(skip_all)] 575 fn import_git_head(&mut self, ui: &mut Ui) -> Result<(), CommandError> { 576 assert!(self.may_update_working_copy); 577 let mut tx = self.start_transaction(); 578 git::import_head(tx.mut_repo())?; 579 if !tx.mut_repo().has_changes() { 580 return Ok(()); 581 } 582 583 // TODO: There are various ways to get duplicated working-copy 584 // commits. Some of them could be mitigated by checking the working-copy 585 // operation id after acquiring the lock, but that isn't enough. 586 // 587 // - moved HEAD was observed by multiple jj processes, and new working-copy 588 // commits are created concurrently. 589 // - new HEAD was exported by jj, but the operation isn't committed yet. 590 // - new HEAD was exported by jj, but the new working-copy commit isn't checked 591 // out yet. 592 593 let mut tx = tx.into_inner(); 594 let old_git_head = self.repo().view().git_head().clone(); 595 let new_git_head = tx.mut_repo().view().git_head().clone(); 596 if let Some(new_git_head_id) = new_git_head.as_normal() { 597 let workspace_id = self.workspace_id().to_owned(); 598 if let Some(old_wc_commit_id) = self.repo().view().get_wc_commit_id(&workspace_id) { 599 tx.mut_repo() 600 .record_abandoned_commit(old_wc_commit_id.clone()); 601 } 602 let new_git_head_commit = tx.mut_repo().store().get_commit(new_git_head_id)?; 603 tx.mut_repo() 604 .check_out(workspace_id, &self.settings, &new_git_head_commit)?; 605 let mut locked_ws = self.workspace.start_working_copy_mutation()?; 606 // The working copy was presumably updated by the git command that updated 607 // HEAD, so we just need to reset our working copy 608 // state to it without updating working copy files. 609 locked_ws.locked_wc().reset(&new_git_head_commit)?; 610 tx.mut_repo().rebase_descendants(&self.settings)?; 611 self.user_repo = ReadonlyUserRepo::new(tx.commit("import git head")); 612 locked_ws.finish(self.user_repo.repo.op_id().clone())?; 613 if old_git_head.is_present() { 614 writeln!( 615 ui.status(), 616 "Reset the working copy parent to the new Git HEAD." 617 )?; 618 } else { 619 // Don't print verbose message on initial checkout. 620 } 621 } else { 622 // Unlikely, but the HEAD ref got deleted by git? 623 self.finish_transaction(ui, tx, "import git head")?; 624 } 625 Ok(()) 626 } 627 628 /// Imports branches and tags from the underlying Git repo, abandons old 629 /// branches. 630 /// 631 /// If the working-copy branch is rebased, and if update is allowed, the new 632 /// working-copy commit will be checked out. 633 /// 634 /// This function does not import the Git HEAD, but the HEAD may be reset to 635 /// the working copy parent if the repository is colocated. 636 #[instrument(skip_all)] 637 fn import_git_refs(&mut self, ui: &mut Ui) -> Result<(), CommandError> { 638 let git_settings = self.settings.git_settings(); 639 let mut tx = self.start_transaction(); 640 // Automated import shouldn't fail because of reserved remote name. 641 let stats = git::import_some_refs(tx.mut_repo(), &git_settings, |ref_name| { 642 !git::is_reserved_git_remote_ref(ref_name) 643 })?; 644 if !tx.mut_repo().has_changes() { 645 return Ok(()); 646 } 647 648 print_git_import_stats(ui, tx.repo(), &stats, false)?; 649 let mut tx = tx.into_inner(); 650 // Rebase here to show slightly different status message. 651 let num_rebased = tx.mut_repo().rebase_descendants(&self.settings)?; 652 if num_rebased > 0 { 653 writeln!( 654 ui.status(), 655 "Rebased {num_rebased} descendant commits off of commits rewritten from git" 656 )?; 657 } 658 self.finish_transaction(ui, tx, "import git refs")?; 659 writeln!( 660 ui.status(), 661 "Done importing changes from the underlying Git repo." 662 )?; 663 Ok(()) 664 } 665 666 pub fn repo(&self) -> &Arc<ReadonlyRepo> { 667 &self.user_repo.repo 668 } 669 670 pub fn working_copy(&self) -> &dyn WorkingCopy { 671 self.workspace.working_copy() 672 } 673 674 pub fn unchecked_start_working_copy_mutation( 675 &mut self, 676 ) -> Result<(LockedWorkspace, Commit), CommandError> { 677 self.check_working_copy_writable()?; 678 let wc_commit = if let Some(wc_commit_id) = self.get_wc_commit_id() { 679 self.repo().store().get_commit(wc_commit_id)? 680 } else { 681 return Err(user_error("Nothing checked out in this workspace")); 682 }; 683 684 let locked_ws = self.workspace.start_working_copy_mutation()?; 685 686 Ok((locked_ws, wc_commit)) 687 } 688 689 pub fn start_working_copy_mutation( 690 &mut self, 691 ) -> Result<(LockedWorkspace, Commit), CommandError> { 692 let (mut locked_ws, wc_commit) = self.unchecked_start_working_copy_mutation()?; 693 if wc_commit.tree_id() != locked_ws.locked_wc().old_tree_id() { 694 return Err(user_error("Concurrent working copy operation. Try again.")); 695 } 696 Ok((locked_ws, wc_commit)) 697 } 698 699 pub fn workspace_root(&self) -> &PathBuf { 700 self.workspace.workspace_root() 701 } 702 703 pub fn workspace_id(&self) -> &WorkspaceId { 704 self.workspace.workspace_id() 705 } 706 707 pub fn get_wc_commit_id(&self) -> Option<&CommitId> { 708 self.repo().view().get_wc_commit_id(self.workspace_id()) 709 } 710 711 pub fn working_copy_shared_with_git(&self) -> bool { 712 self.working_copy_shared_with_git 713 } 714 715 pub fn format_file_path(&self, file: &RepoPath) -> String { 716 file_util::relative_path(&self.cwd, &file.to_fs_path(self.workspace_root())) 717 .to_str() 718 .unwrap() 719 .to_owned() 720 } 721 722 /// Parses a path relative to cwd into a RepoPath, which is relative to the 723 /// workspace root. 724 pub fn parse_file_path(&self, input: &str) -> Result<RepoPathBuf, FsPathParseError> { 725 RepoPathBuf::parse_fs_path(&self.cwd, self.workspace_root(), input) 726 } 727 728 /// Parses the given strings as file patterns. 729 pub fn parse_file_patterns( 730 &self, 731 values: &[String], 732 ) -> Result<FilesetExpression, CommandError> { 733 // TODO: This function might be superseded by parse_union_filesets(), 734 // but it would be weird if parse_union_*() had a special case for the 735 // empty arguments. 736 if values.is_empty() { 737 Ok(FilesetExpression::all()) 738 } else if self.settings.config().get_bool("ui.allow-filesets")? { 739 self.parse_union_filesets(values) 740 } else { 741 let expressions = values 742 .iter() 743 .map(|v| self.parse_file_path(v)) 744 .map_ok(FilesetExpression::prefix_path) 745 .try_collect()?; 746 Ok(FilesetExpression::union_all(expressions)) 747 } 748 } 749 750 /// Parses the given fileset expressions and concatenates them all. 751 pub fn parse_union_filesets( 752 &self, 753 file_args: &[String], // TODO: introduce FileArg newtype? 754 ) -> Result<FilesetExpression, CommandError> { 755 let ctx = self.fileset_parse_context(); 756 let expressions: Vec<_> = file_args 757 .iter() 758 .map(|arg| fileset::parse_maybe_bare(arg, &ctx)) 759 .try_collect()?; 760 Ok(FilesetExpression::union_all(expressions)) 761 } 762 763 pub(crate) fn fileset_parse_context(&self) -> FilesetParseContext<'_> { 764 FilesetParseContext { 765 cwd: &self.cwd, 766 workspace_root: self.workspace.workspace_root(), 767 } 768 } 769 770 #[instrument(skip_all)] 771 pub fn base_ignores(&self) -> Result<Arc<GitIgnoreFile>, GitIgnoreError> { 772 fn get_excludes_file_path(config: &gix::config::File) -> Option<PathBuf> { 773 // TODO: maybe use path_by_key() and interpolate(), which can process non-utf-8 774 // path on Unix. 775 if let Some(value) = config.string_by_key("core.excludesFile") { 776 str::from_utf8(&value) 777 .ok() 778 .map(crate::git_util::expand_git_path) 779 } else { 780 xdg_config_home().ok().map(|x| x.join("git").join("ignore")) 781 } 782 } 783 784 fn xdg_config_home() -> Result<PathBuf, VarError> { 785 if let Ok(x) = std::env::var("XDG_CONFIG_HOME") { 786 if !x.is_empty() { 787 return Ok(PathBuf::from(x)); 788 } 789 } 790 std::env::var("HOME").map(|x| Path::new(&x).join(".config")) 791 } 792 793 let mut git_ignores = GitIgnoreFile::empty(); 794 if let Some(git_backend) = self.git_backend() { 795 let git_repo = git_backend.git_repo(); 796 if let Some(excludes_file_path) = get_excludes_file_path(&git_repo.config_snapshot()) { 797 git_ignores = git_ignores.chain_with_file("", excludes_file_path)?; 798 } 799 git_ignores = git_ignores 800 .chain_with_file("", git_backend.git_repo_path().join("info").join("exclude"))?; 801 } else if let Ok(git_config) = gix::config::File::from_globals() { 802 if let Some(excludes_file_path) = get_excludes_file_path(&git_config) { 803 git_ignores = git_ignores.chain_with_file("", excludes_file_path)?; 804 } 805 } 806 Ok(git_ignores) 807 } 808 809 /// Loads diff editor from the settings. 810 /// 811 /// If the `tool_name` isn't specified, the default editor will be returned. 812 pub fn diff_editor( 813 &self, 814 ui: &Ui, 815 tool_name: Option<&str>, 816 ) -> Result<DiffEditor, CommandError> { 817 let base_ignores = self.base_ignores()?; 818 if let Some(name) = tool_name { 819 Ok(DiffEditor::with_name(name, &self.settings, base_ignores)?) 820 } else { 821 Ok(DiffEditor::from_settings(ui, &self.settings, base_ignores)?) 822 } 823 } 824 825 /// Conditionally loads diff editor from the settings. 826 /// 827 /// If the `tool_name` is specified, interactive session is implied. 828 pub fn diff_selector( 829 &self, 830 ui: &Ui, 831 tool_name: Option<&str>, 832 force_interactive: bool, 833 ) -> Result<DiffSelector, CommandError> { 834 if tool_name.is_some() || force_interactive { 835 Ok(DiffSelector::Interactive(self.diff_editor(ui, tool_name)?)) 836 } else { 837 Ok(DiffSelector::NonInteractive) 838 } 839 } 840 841 /// Loads 3-way merge editor from the settings. 842 /// 843 /// If the `tool_name` isn't specified, the default editor will be returned. 844 pub fn merge_editor( 845 &self, 846 ui: &Ui, 847 tool_name: Option<&str>, 848 ) -> Result<MergeEditor, MergeToolConfigError> { 849 if let Some(name) = tool_name { 850 MergeEditor::with_name(name, &self.settings) 851 } else { 852 MergeEditor::from_settings(ui, &self.settings) 853 } 854 } 855 856 pub fn resolve_single_op(&self, op_str: &str) -> Result<Operation, OpsetEvaluationError> { 857 op_walk::resolve_op_with_repo(self.repo(), op_str) 858 } 859 860 /// Resolve a revset to a single revision. Return an error if the revset is 861 /// empty or has multiple revisions. 862 pub fn resolve_single_rev(&self, revision_arg: &RevisionArg) -> Result<Commit, CommandError> { 863 let expression = self.parse_revset(revision_arg)?; 864 let should_hint_about_all_prefix = false; 865 revset_util::evaluate_revset_to_single_commit( 866 revision_arg.as_ref(), 867 &expression, 868 || self.commit_summary_template(), 869 should_hint_about_all_prefix, 870 ) 871 } 872 873 /// Evaluates revset expressions to non-empty set of commits. The returned 874 /// set preserves the order of the input expressions. 875 /// 876 /// If an input expression is prefixed with `all:`, it may be evaluated to 877 /// any number of revisions (including 0.) 878 pub fn resolve_some_revsets_default_single( 879 &self, 880 revision_args: &[RevisionArg], 881 ) -> Result<IndexSet<Commit>, CommandError> { 882 let mut all_commits = IndexSet::new(); 883 for revision_arg in revision_args { 884 let (expression, modifier) = self.parse_revset_with_modifier(revision_arg)?; 885 let all = match modifier { 886 Some(RevsetModifier::All) => true, 887 None => self 888 .settings 889 .config() 890 .get_bool("ui.always-allow-large-revsets")?, 891 }; 892 if all { 893 for commit in expression.evaluate_to_commits()? { 894 all_commits.insert(commit?); 895 } 896 } else { 897 let should_hint_about_all_prefix = true; 898 let commit = revset_util::evaluate_revset_to_single_commit( 899 revision_arg.as_ref(), 900 &expression, 901 || self.commit_summary_template(), 902 should_hint_about_all_prefix, 903 )?; 904 let commit_hash = short_commit_hash(commit.id()); 905 if !all_commits.insert(commit) { 906 return Err(user_error(format!( 907 r#"More than one revset resolved to revision {commit_hash}"#, 908 ))); 909 } 910 } 911 } 912 if all_commits.is_empty() { 913 Err(user_error("Empty revision set")) 914 } else { 915 Ok(all_commits) 916 } 917 } 918 919 pub fn parse_revset( 920 &self, 921 revision_arg: &RevisionArg, 922 ) -> Result<RevsetExpressionEvaluator<'_>, CommandError> { 923 let expression = revset::parse(revision_arg.as_ref(), &self.revset_parse_context())?; 924 self.attach_revset_evaluator(expression) 925 } 926 927 // TODO: maybe better to parse all: prefix even if it is the default? It 928 // shouldn't be allowed in aliases, though. 929 fn parse_revset_with_modifier( 930 &self, 931 revision_arg: &RevisionArg, 932 ) -> Result<(RevsetExpressionEvaluator<'_>, Option<RevsetModifier>), CommandError> { 933 let context = self.revset_parse_context(); 934 let (expression, modifier) = revset::parse_with_modifier(revision_arg.as_ref(), &context)?; 935 Ok((self.attach_revset_evaluator(expression)?, modifier)) 936 } 937 938 /// Parses the given revset expressions and concatenates them all. 939 pub fn parse_union_revsets( 940 &self, 941 revision_args: &[RevisionArg], 942 ) -> Result<RevsetExpressionEvaluator<'_>, CommandError> { 943 let context = self.revset_parse_context(); 944 let expressions: Vec<_> = revision_args 945 .iter() 946 .map(|arg| revset::parse(arg.as_ref(), &context)) 947 .try_collect()?; 948 let expression = RevsetExpression::union_all(&expressions); 949 self.attach_revset_evaluator(expression) 950 } 951 952 pub fn attach_revset_evaluator( 953 &self, 954 expression: Rc<RevsetExpression>, 955 ) -> Result<RevsetExpressionEvaluator<'_>, CommandError> { 956 Ok(RevsetExpressionEvaluator::new( 957 self.repo().as_ref(), 958 self.revset_extensions.clone(), 959 self.id_prefix_context()?, 960 expression, 961 )) 962 } 963 964 pub(crate) fn revset_parse_context(&self) -> RevsetParseContext { 965 let workspace_context = RevsetWorkspaceContext { 966 cwd: &self.cwd, 967 workspace_id: self.workspace_id(), 968 workspace_root: self.workspace.workspace_root(), 969 }; 970 RevsetParseContext { 971 aliases_map: &self.revset_aliases_map, 972 user_email: self.settings.user_email(), 973 extensions: &self.revset_extensions, 974 workspace: Some(workspace_context), 975 } 976 } 977 978 pub fn id_prefix_context(&self) -> Result<&IdPrefixContext, CommandError> { 979 self.user_repo.id_prefix_context.get_or_try_init(|| { 980 let mut context: IdPrefixContext = IdPrefixContext::new(self.revset_extensions.clone()); 981 let revset_string: String = self 982 .settings 983 .config() 984 .get_string("revsets.short-prefixes") 985 .unwrap_or_else(|_| self.settings.default_revset()); 986 if !revset_string.is_empty() { 987 let disambiguation_revset = 988 revset::parse(&revset_string, &self.revset_parse_context()).map_err(|err| { 989 config_error_with_message("Invalid `revsets.short-prefixes`", err) 990 })?; 991 context = context.disambiguate_within(revset::optimize(disambiguation_revset)); 992 } 993 Ok(context) 994 }) 995 } 996 997 pub fn template_aliases_map(&self) -> &TemplateAliasesMap { 998 &self.template_aliases_map 999 } 1000 1001 /// Parses template of the given language into evaluation tree. 1002 /// 1003 /// `wrap_self` specifies the type of the top-level property, which should 1004 /// be one of the `L::wrap_*()` functions. 1005 pub fn parse_template<'a, C: Clone + 'a, L: TemplateLanguage<'a> + ?Sized>( 1006 &self, 1007 language: &L, 1008 template_text: &str, 1009 wrap_self: impl Fn(PropertyPlaceholder<C>) -> L::Property, 1010 ) -> Result<TemplateRenderer<'a, C>, CommandError> { 1011 let aliases = &self.template_aliases_map; 1012 Ok(template_builder::parse( 1013 language, 1014 template_text, 1015 aliases, 1016 wrap_self, 1017 )?) 1018 } 1019 1020 /// Parses commit template into evaluation tree. 1021 pub fn parse_commit_template( 1022 &self, 1023 template_text: &str, 1024 ) -> Result<TemplateRenderer<'_, Commit>, CommandError> { 1025 let language = self.commit_template_language()?; 1026 self.parse_template( 1027 &language, 1028 template_text, 1029 CommitTemplateLanguage::wrap_commit, 1030 ) 1031 } 1032 1033 /// Creates commit template language environment for this workspace. 1034 pub fn commit_template_language(&self) -> Result<CommitTemplateLanguage<'_>, CommandError> { 1035 Ok(CommitTemplateLanguage::new( 1036 self.repo().as_ref(), 1037 self.workspace_id(), 1038 self.revset_parse_context(), 1039 self.id_prefix_context()?, 1040 &self.commit_template_extensions, 1041 )) 1042 } 1043 1044 /// Template for one-line summary of a commit. 1045 pub fn commit_summary_template(&self) -> TemplateRenderer<'_, Commit> { 1046 self.parse_commit_template(&self.commit_summary_template_text) 1047 .expect("parse error should be confined by WorkspaceCommandHelper::new()") 1048 } 1049 1050 /// Returns one-line summary of the given `commit`. 1051 /// 1052 /// Use `write_commit_summary()` to get colorized output. Use 1053 /// `commit_summary_template()` if you have many commits to process. 1054 pub fn format_commit_summary(&self, commit: &Commit) -> String { 1055 let mut output = Vec::new(); 1056 self.write_commit_summary(&mut PlainTextFormatter::new(&mut output), commit) 1057 .expect("write() to PlainTextFormatter should never fail"); 1058 String::from_utf8(output).expect("template output should be utf-8 bytes") 1059 } 1060 1061 /// Writes one-line summary of the given `commit`. 1062 /// 1063 /// Use `commit_summary_template()` if you have many commits to process. 1064 #[instrument(skip_all)] 1065 pub fn write_commit_summary( 1066 &self, 1067 formatter: &mut dyn Formatter, 1068 commit: &Commit, 1069 ) -> std::io::Result<()> { 1070 self.commit_summary_template().format(commit, formatter) 1071 } 1072 1073 pub fn check_rewritable<'a>( 1074 &self, 1075 commits: impl IntoIterator<Item = &'a CommitId>, 1076 ) -> Result<(), CommandError> { 1077 if self.global_args.ignore_immutable { 1078 let root_id = self.repo().store().root_commit_id(); 1079 return if commits.into_iter().contains(root_id) { 1080 Err(user_error(format!( 1081 "The root commit {} is immutable", 1082 short_commit_hash(root_id), 1083 ))) 1084 } else { 1085 Ok(()) 1086 }; 1087 } 1088 let to_rewrite_revset = 1089 RevsetExpression::commits(commits.into_iter().cloned().collect_vec()); 1090 let immutable = revset_util::parse_immutable_expression(&self.revset_parse_context()) 1091 .map_err(|e| { 1092 config_error_with_message("Invalid `revset-aliases.immutable_heads()`", e) 1093 })?; 1094 let mut expression = self.attach_revset_evaluator(immutable)?; 1095 expression.intersect_with(&to_rewrite_revset); 1096 1097 let mut commit_id_iter = expression.evaluate_to_commit_ids().map_err(|e| { 1098 config_error_with_message("Invalid `revset-aliases.immutable_heads()`", e) 1099 })?; 1100 1101 if let Some(commit_id) = commit_id_iter.next() { 1102 let error = if &commit_id == self.repo().store().root_commit_id() { 1103 user_error(format!( 1104 "The root commit {} is immutable", 1105 short_commit_hash(&commit_id), 1106 )) 1107 } else { 1108 user_error_with_hint( 1109 format!("Commit {} is immutable", short_commit_hash(&commit_id)), 1110 "Pass `--ignore-immutable` or configure the set of immutable commits via \ 1111 `revset-aliases.immutable_heads()`.", 1112 ) 1113 }; 1114 return Err(error); 1115 } 1116 1117 Ok(()) 1118 } 1119 1120 #[instrument(skip_all)] 1121 fn snapshot_working_copy(&mut self, ui: &mut Ui) -> Result<(), CommandError> { 1122 let workspace_id = self.workspace_id().to_owned(); 1123 let get_wc_commit = |repo: &ReadonlyRepo| -> Result<Option<_>, _> { 1124 repo.view() 1125 .get_wc_commit_id(&workspace_id) 1126 .map(|id| repo.store().get_commit(id)) 1127 .transpose() 1128 }; 1129 let repo = self.repo().clone(); 1130 let Some(wc_commit) = get_wc_commit(&repo)? else { 1131 // If the workspace has been deleted, it's unclear what to do, so we just skip 1132 // committing the working copy. 1133 return Ok(()); 1134 }; 1135 let base_ignores = self.base_ignores()?; 1136 1137 // Compare working-copy tree and operation with repo's, and reload as needed. 1138 let mut locked_ws = self.workspace.start_working_copy_mutation()?; 1139 let old_op_id = locked_ws.locked_wc().old_operation_id().clone(); 1140 let (repo, wc_commit) = 1141 match check_stale_working_copy(locked_ws.locked_wc(), &wc_commit, &repo) { 1142 Ok(WorkingCopyFreshness::Fresh) => (repo, wc_commit), 1143 Ok(WorkingCopyFreshness::Updated(wc_operation)) => { 1144 let repo = repo.reload_at(&wc_operation)?; 1145 let wc_commit = if let Some(wc_commit) = get_wc_commit(&repo)? { 1146 wc_commit 1147 } else { 1148 return Ok(()); // The workspace has been deleted (see 1149 // above) 1150 }; 1151 (repo, wc_commit) 1152 } 1153 Ok(WorkingCopyFreshness::WorkingCopyStale) => { 1154 return Err(user_error_with_hint( 1155 format!( 1156 "The working copy is stale (not updated since operation {}).", 1157 short_operation_hash(&old_op_id) 1158 ), 1159 "Run `jj workspace update-stale` to update it. 1160See https://github.com/martinvonz/jj/blob/main/docs/working-copy.md#stale-working-copy \ 1161 for more information.", 1162 )); 1163 } 1164 Ok(WorkingCopyFreshness::SiblingOperation) => { 1165 return Err(internal_error(format!( 1166 "The repo was loaded at operation {}, which seems to be a sibling of the \ 1167 working copy's operation {}", 1168 short_operation_hash(repo.op_id()), 1169 short_operation_hash(&old_op_id) 1170 ))); 1171 } 1172 Err(OpStoreError::ObjectNotFound { .. }) => { 1173 return Err(user_error_with_hint( 1174 "Could not read working copy's operation.", 1175 "Run `jj workspace update-stale` to recover. 1176See https://github.com/martinvonz/jj/blob/main/docs/working-copy.md#stale-working-copy \ 1177 for more information.", 1178 )) 1179 } 1180 Err(e) => return Err(e.into()), 1181 }; 1182 self.user_repo = ReadonlyUserRepo::new(repo); 1183 let progress = crate::progress::snapshot_progress(ui); 1184 let new_tree_id = locked_ws.locked_wc().snapshot(SnapshotOptions { 1185 base_ignores, 1186 fsmonitor_kind: self.settings.fsmonitor_kind()?, 1187 progress: progress.as_ref().map(|x| x as _), 1188 max_new_file_size: self.settings.max_new_file_size()?, 1189 })?; 1190 drop(progress); 1191 if new_tree_id != *wc_commit.tree_id() { 1192 let mut tx = 1193 start_repo_transaction(&self.user_repo.repo, &self.settings, &self.string_args); 1194 tx.set_is_snapshot(true); 1195 let mut_repo = tx.mut_repo(); 1196 let commit = mut_repo 1197 .rewrite_commit(&self.settings, &wc_commit) 1198 .set_tree_id(new_tree_id) 1199 .write()?; 1200 mut_repo.set_wc_commit(workspace_id, commit.id().clone())?; 1201 1202 // Rebase descendants 1203 let num_rebased = mut_repo.rebase_descendants(&self.settings)?; 1204 if num_rebased > 0 { 1205 writeln!( 1206 ui.status(), 1207 "Rebased {num_rebased} descendant commits onto updated working copy" 1208 )?; 1209 } 1210 1211 if self.working_copy_shared_with_git { 1212 let failed_branches = git::export_refs(mut_repo)?; 1213 print_failed_git_export(ui, &failed_branches)?; 1214 } 1215 1216 self.user_repo = ReadonlyUserRepo::new(tx.commit("snapshot working copy")); 1217 } 1218 locked_ws.finish(self.user_repo.repo.op_id().clone())?; 1219 Ok(()) 1220 } 1221 1222 fn update_working_copy( 1223 &mut self, 1224 ui: &mut Ui, 1225 maybe_old_commit: Option<&Commit>, 1226 new_commit: &Commit, 1227 ) -> Result<(), CommandError> { 1228 assert!(self.may_update_working_copy); 1229 let stats = update_working_copy( 1230 &self.user_repo.repo, 1231 &mut self.workspace, 1232 maybe_old_commit, 1233 new_commit, 1234 )?; 1235 if Some(new_commit) != maybe_old_commit { 1236 if let Some(mut formatter) = ui.status_formatter() { 1237 let template = self.commit_summary_template(); 1238 write!(formatter, "Working copy now at: ")?; 1239 formatter.with_label("working_copy", |fmt| template.format(new_commit, fmt))?; 1240 writeln!(formatter)?; 1241 for parent in new_commit.parents() { 1242 // "Working copy now at: " 1243 write!(formatter, "Parent commit : ")?; 1244 template.format(&parent, formatter.as_mut())?; 1245 writeln!(formatter)?; 1246 } 1247 } 1248 } 1249 if let Some(stats) = stats { 1250 print_checkout_stats(ui, stats, new_commit)?; 1251 } 1252 if Some(new_commit) != maybe_old_commit { 1253 if let Some(mut formatter) = ui.status_formatter() { 1254 let conflicts = new_commit.tree()?.conflicts().collect_vec(); 1255 if !conflicts.is_empty() { 1256 writeln!(formatter, "There are unresolved conflicts at these paths:")?; 1257 print_conflicted_paths(&conflicts, formatter.as_mut(), self)?; 1258 } 1259 } 1260 } 1261 Ok(()) 1262 } 1263 1264 pub fn start_transaction(&mut self) -> WorkspaceCommandTransaction { 1265 let tx = start_repo_transaction(self.repo(), &self.settings, &self.string_args); 1266 WorkspaceCommandTransaction { helper: self, tx } 1267 } 1268 1269 fn finish_transaction( 1270 &mut self, 1271 ui: &mut Ui, 1272 mut tx: Transaction, 1273 description: impl Into<String>, 1274 ) -> Result<(), CommandError> { 1275 if !tx.mut_repo().has_changes() { 1276 writeln!(ui.status(), "Nothing changed.")?; 1277 return Ok(()); 1278 } 1279 let num_rebased = tx.mut_repo().rebase_descendants(&self.settings)?; 1280 if num_rebased > 0 { 1281 writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; 1282 } 1283 1284 let old_repo = tx.base_repo().clone(); 1285 1286 let maybe_old_wc_commit = old_repo 1287 .view() 1288 .get_wc_commit_id(self.workspace_id()) 1289 .map(|commit_id| tx.base_repo().store().get_commit(commit_id)) 1290 .transpose()?; 1291 let maybe_new_wc_commit = tx 1292 .repo() 1293 .view() 1294 .get_wc_commit_id(self.workspace_id()) 1295 .map(|commit_id| tx.repo().store().get_commit(commit_id)) 1296 .transpose()?; 1297 if self.working_copy_shared_with_git { 1298 let git_repo = self.git_backend().unwrap().open_git_repo()?; 1299 if let Some(wc_commit) = &maybe_new_wc_commit { 1300 git::reset_head(tx.mut_repo(), &git_repo, wc_commit)?; 1301 } 1302 let failed_branches = git::export_refs(tx.mut_repo())?; 1303 print_failed_git_export(ui, &failed_branches)?; 1304 } 1305 self.user_repo = ReadonlyUserRepo::new(tx.commit(description)); 1306 self.report_repo_changes(ui, &old_repo)?; 1307 1308 if self.may_update_working_copy { 1309 if let Some(new_commit) = &maybe_new_wc_commit { 1310 self.update_working_copy(ui, maybe_old_wc_commit.as_ref(), new_commit)?; 1311 } else { 1312 // It seems the workspace was deleted, so we shouldn't try to 1313 // update it. 1314 } 1315 } 1316 let settings = &self.settings; 1317 if settings.user_name().is_empty() || settings.user_email().is_empty() { 1318 writeln!( 1319 ui.warning_default(), 1320 r#"Name and email not configured. Until configured, your commits will be created with the empty identity, and can't be pushed to remotes. To configure, run: 1321 jj config set --user user.name "Some One" 1322 jj config set --user user.email "someone@example.com""# 1323 )?; 1324 } 1325 Ok(()) 1326 } 1327 1328 /// Inform the user about important changes to the repo since the previous 1329 /// operation (when `old_repo` was loaded). 1330 fn report_repo_changes( 1331 &self, 1332 ui: &mut Ui, 1333 old_repo: &Arc<ReadonlyRepo>, 1334 ) -> Result<(), CommandError> { 1335 let Some(mut fmt) = ui.status_formatter() else { 1336 return Ok(()); 1337 }; 1338 let old_view = old_repo.view(); 1339 let new_repo = self.repo().as_ref(); 1340 let new_view = new_repo.view(); 1341 let old_heads = RevsetExpression::commits(old_view.heads().iter().cloned().collect()); 1342 let new_heads = RevsetExpression::commits(new_view.heads().iter().cloned().collect()); 1343 // Filter the revsets by conflicts instead of reading all commits and doing the 1344 // filtering here. That way, we can afford to evaluate the revset even if there 1345 // are millions of commits added to the repo, assuming the revset engine can 1346 // efficiently skip non-conflicting commits. Filter out empty commits mostly so 1347 // `jj new <conflicted commit>` doesn't result in a message about new conflicts. 1348 let conflicts = RevsetExpression::filter(RevsetFilterPredicate::HasConflict).intersection( 1349 &RevsetExpression::filter(RevsetFilterPredicate::File(FilesetExpression::all())), 1350 ); 1351 let removed_conflicts_expr = new_heads.range(&old_heads).intersection(&conflicts); 1352 let added_conflicts_expr = old_heads.range(&new_heads).intersection(&conflicts); 1353 1354 let get_commits = |expr: Rc<RevsetExpression>| -> Result<Vec<Commit>, CommandError> { 1355 let commits = expr 1356 .evaluate_programmatic(new_repo)? 1357 .iter() 1358 .commits(new_repo.store()) 1359 .try_collect()?; 1360 Ok(commits) 1361 }; 1362 let removed_conflict_commits = get_commits(removed_conflicts_expr)?; 1363 let added_conflict_commits = get_commits(added_conflicts_expr)?; 1364 1365 fn commits_by_change_id(commits: &[Commit]) -> IndexMap<&ChangeId, Vec<&Commit>> { 1366 let mut result: IndexMap<&ChangeId, Vec<&Commit>> = IndexMap::new(); 1367 for commit in commits { 1368 result.entry(commit.change_id()).or_default().push(commit); 1369 } 1370 result 1371 } 1372 let removed_conflicts_by_change_id = commits_by_change_id(&removed_conflict_commits); 1373 let added_conflicts_by_change_id = commits_by_change_id(&added_conflict_commits); 1374 let mut resolved_conflicts_by_change_id = removed_conflicts_by_change_id.clone(); 1375 resolved_conflicts_by_change_id 1376 .retain(|change_id, _commits| !added_conflicts_by_change_id.contains_key(change_id)); 1377 let mut new_conflicts_by_change_id = added_conflicts_by_change_id.clone(); 1378 new_conflicts_by_change_id 1379 .retain(|change_id, _commits| !removed_conflicts_by_change_id.contains_key(change_id)); 1380 1381 // TODO: Also report new divergence and maybe resolved divergence 1382 let template = self.commit_summary_template(); 1383 if !resolved_conflicts_by_change_id.is_empty() { 1384 writeln!( 1385 fmt, 1386 "Existing conflicts were resolved or abandoned from these commits:" 1387 )?; 1388 for (_, old_commits) in &resolved_conflicts_by_change_id { 1389 // TODO: Report which ones were resolved and which ones were abandoned. However, 1390 // that involves resolving the change_id among the visible commits in the new 1391 // repo, which isn't currently supported by Google's revset engine. 1392 for commit in old_commits { 1393 write!(fmt, " ")?; 1394 template.format(commit, fmt.as_mut())?; 1395 writeln!(fmt)?; 1396 } 1397 } 1398 } 1399 if !new_conflicts_by_change_id.is_empty() { 1400 writeln!(fmt, "New conflicts appeared in these commits:")?; 1401 for (_, new_commits) in &new_conflicts_by_change_id { 1402 for commit in new_commits { 1403 write!(fmt, " ")?; 1404 template.format(commit, fmt.as_mut())?; 1405 writeln!(fmt)?; 1406 } 1407 } 1408 } 1409 1410 // Hint that the user might want to `jj new` to the first conflict commit to 1411 // resolve conflicts. Only show the hints if there were any new or resolved 1412 // conflicts, and only if there are still some conflicts. 1413 if !(added_conflict_commits.is_empty() 1414 || resolved_conflicts_by_change_id.is_empty() && new_conflicts_by_change_id.is_empty()) 1415 { 1416 // If the user just resolved some conflict and squashed them in, there won't be 1417 // any new conflicts. Clarify to them that there are still some other conflicts 1418 // to resolve. (We don't mention conflicts in commits that weren't affected by 1419 // the operation, however.) 1420 if new_conflicts_by_change_id.is_empty() { 1421 writeln!( 1422 fmt, 1423 "There are still unresolved conflicts in rebased descendants.", 1424 )?; 1425 } 1426 1427 self.report_repo_conflicts( 1428 fmt.as_mut(), 1429 new_repo, 1430 added_conflict_commits 1431 .iter() 1432 .map(|commit| commit.id().clone()) 1433 .collect(), 1434 )?; 1435 } 1436 1437 Ok(()) 1438 } 1439 1440 pub fn report_repo_conflicts( 1441 &self, 1442 fmt: &mut dyn Formatter, 1443 repo: &ReadonlyRepo, 1444 conflicted_commits: Vec<CommitId>, 1445 ) -> Result<(), CommandError> { 1446 let only_one_conflicted_commit = conflicted_commits.len() == 1; 1447 let root_conflicts_revset = RevsetExpression::commits(conflicted_commits) 1448 .roots() 1449 .evaluate_programmatic(repo)?; 1450 1451 let root_conflict_change_ids: Vec<_> = root_conflicts_revset 1452 .iter() 1453 .commits(repo.store()) 1454 .map(|maybe_commit| maybe_commit.map(|c| c.change_id().clone())) 1455 .try_collect()?; 1456 1457 if !root_conflict_change_ids.is_empty() { 1458 fmt.push_label("hint")?; 1459 if only_one_conflicted_commit { 1460 writeln!(fmt, "To resolve the conflicts, start by updating to it:",)?; 1461 } else if root_conflict_change_ids.len() == 1 { 1462 writeln!( 1463 fmt, 1464 "To resolve the conflicts, start by updating to the first one:", 1465 )?; 1466 } else { 1467 writeln!( 1468 fmt, 1469 "To resolve the conflicts, start by updating to one of the first ones:", 1470 )?; 1471 } 1472 for change_id in root_conflict_change_ids { 1473 writeln!(fmt, " jj new {}", short_change_hash(&change_id))?; 1474 } 1475 writeln!( 1476 fmt, 1477 r#"Then use `jj resolve`, or edit the conflict markers in the file directly. 1478Once the conflicts are resolved, you may want inspect the result with `jj diff`. 1479Then run `jj squash` to move the resolution into the conflicted commit."#, 1480 )?; 1481 fmt.pop_label()?; 1482 } 1483 Ok(()) 1484 } 1485 1486 /// Identifies branches which are eligible to be moved automatically during 1487 /// `jj commit` and `jj new`. Whether a branch is eligible is determined by 1488 /// its target and the user and repo config for "advance-branches". 1489 /// 1490 /// Returns a Vec of branches in `repo` that point to any of the `from` 1491 /// commits and that are eligible to advance. The `from` commits are 1492 /// typically the parents of the target commit of `jj commit` or `jj new`. 1493 /// 1494 /// Branches are not moved until 1495 /// `WorkspaceCommandTransaction::advance_branches()` is called with the 1496 /// `AdvanceableBranch`s returned by this function. 1497 /// 1498 /// Returns an empty `std::Vec` if no branches are eligible to advance. 1499 pub fn get_advanceable_branches<'a>( 1500 &self, 1501 from: impl IntoIterator<Item = &'a CommitId>, 1502 ) -> Result<Vec<AdvanceableBranch>, CommandError> { 1503 let ab_settings = AdvanceBranchesSettings::from_config(self.settings.config())?; 1504 if !ab_settings.feature_enabled() { 1505 // Return early if we know that there's no work to do. 1506 return Ok(Vec::new()); 1507 } 1508 1509 let mut advanceable_branches = Vec::new(); 1510 for from_commit in from { 1511 for (name, _) in self.repo().view().local_branches_for_commit(from_commit) { 1512 if ab_settings.branch_is_eligible(name) { 1513 advanceable_branches.push(AdvanceableBranch { 1514 name: name.to_owned(), 1515 old_commit_id: from_commit.clone(), 1516 }); 1517 } 1518 } 1519 } 1520 1521 Ok(advanceable_branches) 1522 } 1523} 1524 1525/// A [`Transaction`] tied to a particular workspace. 1526/// `WorkspaceCommandTransaction`s are created with 1527/// [`WorkspaceCommandHelper::start_transaction`] and committed with 1528/// [`WorkspaceCommandTransaction::finish`]. The inner `Transaction` can also be 1529/// extracted using [`WorkspaceCommandTransaction::into_inner`] in situations 1530/// where finer-grained control over the `Transaction` is necessary. 1531#[must_use] 1532pub struct WorkspaceCommandTransaction<'a> { 1533 helper: &'a mut WorkspaceCommandHelper, 1534 tx: Transaction, 1535} 1536 1537impl WorkspaceCommandTransaction<'_> { 1538 /// Workspace helper that may use the base repo. 1539 pub fn base_workspace_helper(&self) -> &WorkspaceCommandHelper { 1540 self.helper 1541 } 1542 1543 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> { 1544 self.tx.base_repo() 1545 } 1546 1547 pub fn repo(&self) -> &MutableRepo { 1548 self.tx.repo() 1549 } 1550 1551 pub fn mut_repo(&mut self) -> &mut MutableRepo { 1552 self.tx.mut_repo() 1553 } 1554 1555 pub fn check_out(&mut self, commit: &Commit) -> Result<Commit, CheckOutCommitError> { 1556 let workspace_id = self.helper.workspace_id().to_owned(); 1557 let settings = &self.helper.settings; 1558 self.tx.mut_repo().check_out(workspace_id, settings, commit) 1559 } 1560 1561 pub fn edit(&mut self, commit: &Commit) -> Result<(), EditCommitError> { 1562 let workspace_id = self.helper.workspace_id().to_owned(); 1563 self.tx.mut_repo().edit(workspace_id, commit) 1564 } 1565 1566 pub fn format_commit_summary(&self, commit: &Commit) -> String { 1567 let mut output = Vec::new(); 1568 self.write_commit_summary(&mut PlainTextFormatter::new(&mut output), commit) 1569 .expect("write() to PlainTextFormatter should never fail"); 1570 String::from_utf8(output).expect("template output should be utf-8 bytes") 1571 } 1572 1573 pub fn write_commit_summary( 1574 &self, 1575 formatter: &mut dyn Formatter, 1576 commit: &Commit, 1577 ) -> std::io::Result<()> { 1578 // TODO: Use the disambiguation revset 1579 let id_prefix_context = IdPrefixContext::new(self.helper.revset_extensions.clone()); 1580 let language = CommitTemplateLanguage::new( 1581 self.tx.repo(), 1582 self.helper.workspace_id(), 1583 self.helper.revset_parse_context(), 1584 &id_prefix_context, 1585 &self.helper.commit_template_extensions, 1586 ); 1587 let template = self 1588 .helper 1589 .parse_template( 1590 &language, 1591 &self.helper.commit_summary_template_text, 1592 CommitTemplateLanguage::wrap_commit, 1593 ) 1594 .expect("parse error should be confined by WorkspaceCommandHelper::new()"); 1595 template.format(commit, formatter) 1596 } 1597 1598 pub fn finish(self, ui: &mut Ui, description: impl Into<String>) -> Result<(), CommandError> { 1599 self.helper.finish_transaction(ui, self.tx, description) 1600 } 1601 1602 /// Returns the wrapped [`Transaction`] for circumstances where 1603 /// finer-grained control is needed. The caller becomes responsible for 1604 /// finishing the `Transaction`, including rebasing descendants and updating 1605 /// the working copy, if applicable. 1606 pub fn into_inner(self) -> Transaction { 1607 self.tx 1608 } 1609 1610 /// Moves each branch in `branches` from an old commit it's associated with 1611 /// (configured by `get_advanceable_branches`) to the `move_to` commit. If 1612 /// the branch is conflicted before the update, it will remain conflicted 1613 /// after the update, but the conflict will involve the `move_to` commit 1614 /// instead of the old commit. 1615 pub fn advance_branches(&mut self, branches: Vec<AdvanceableBranch>, move_to: &CommitId) { 1616 for branch in branches { 1617 // This removes the old commit ID from the branch's RefTarget and 1618 // replaces it with the `move_to` ID. 1619 self.mut_repo().merge_local_branch( 1620 &branch.name, 1621 &RefTarget::normal(branch.old_commit_id), 1622 &RefTarget::normal(move_to.clone()), 1623 ); 1624 } 1625 } 1626} 1627 1628fn find_workspace_dir(cwd: &Path) -> &Path { 1629 cwd.ancestors() 1630 .find(|path| path.join(".jj").is_dir()) 1631 .unwrap_or(cwd) 1632} 1633 1634fn map_workspace_load_error(err: WorkspaceLoadError, workspace_path: Option<&str>) -> CommandError { 1635 match err { 1636 WorkspaceLoadError::NoWorkspaceHere(wc_path) => { 1637 // Prefer user-specified workspace_path_str instead of absolute wc_path. 1638 let workspace_path_str = workspace_path.unwrap_or("."); 1639 let message = format!(r#"There is no jj repo in "{workspace_path_str}""#); 1640 let git_dir = wc_path.join(".git"); 1641 if git_dir.is_dir() { 1642 user_error_with_hint( 1643 message, 1644 "It looks like this is a git repo. You can create a jj repo backed by it by \ 1645 running this: 1646jj git init --colocate", 1647 ) 1648 } else { 1649 user_error(message) 1650 } 1651 } 1652 WorkspaceLoadError::RepoDoesNotExist(repo_dir) => user_error(format!( 1653 "The repository directory at {} is missing. Was it moved?", 1654 repo_dir.display(), 1655 )), 1656 WorkspaceLoadError::StoreLoadError(err @ StoreLoadError::UnsupportedType { .. }) => { 1657 internal_error_with_message( 1658 "This version of the jj binary doesn't support this type of repo", 1659 err, 1660 ) 1661 } 1662 WorkspaceLoadError::StoreLoadError( 1663 err @ (StoreLoadError::ReadError { .. } | StoreLoadError::Backend(_)), 1664 ) => internal_error_with_message("The repository appears broken or inaccessible", err), 1665 WorkspaceLoadError::StoreLoadError(StoreLoadError::Signing( 1666 err @ SignInitError::UnknownBackend(_), 1667 )) => user_error(err), 1668 WorkspaceLoadError::StoreLoadError(err) => internal_error(err), 1669 WorkspaceLoadError::WorkingCopyState(err) => internal_error(err), 1670 WorkspaceLoadError::NonUnicodePath | WorkspaceLoadError::Path(_) => user_error(err), 1671 } 1672} 1673 1674pub fn start_repo_transaction( 1675 repo: &Arc<ReadonlyRepo>, 1676 settings: &UserSettings, 1677 string_args: &[String], 1678) -> Transaction { 1679 let mut tx = repo.start_transaction(settings); 1680 // TODO: Either do better shell-escaping here or store the values in some list 1681 // type (which we currently don't have). 1682 let shell_escape = |arg: &String| { 1683 if arg.as_bytes().iter().all(|b| { 1684 matches!(b, 1685 b'A'..=b'Z' 1686 | b'a'..=b'z' 1687 | b'0'..=b'9' 1688 | b',' 1689 | b'-' 1690 | b'.' 1691 | b'/' 1692 | b':' 1693 | b'@' 1694 | b'_' 1695 ) 1696 }) { 1697 arg.clone() 1698 } else { 1699 format!("'{}'", arg.replace('\'', "\\'")) 1700 } 1701 }; 1702 let mut quoted_strings = vec!["jj".to_string()]; 1703 quoted_strings.extend(string_args.iter().skip(1).map(shell_escape)); 1704 tx.set_tag("args".to_string(), quoted_strings.join(" ")); 1705 tx 1706} 1707 1708/// Whether the working copy is stale or not. 1709#[derive(Clone, Debug, Eq, PartialEq)] 1710pub enum WorkingCopyFreshness { 1711 /// The working copy isn't stale, and no need to reload the repo. 1712 Fresh, 1713 /// The working copy was updated since we loaded the repo. The repo must be 1714 /// reloaded at the working copy's operation. 1715 Updated(Box<Operation>), 1716 /// The working copy is behind the latest operation. 1717 WorkingCopyStale, 1718 /// The working copy is a sibling of the latest operation. 1719 SiblingOperation, 1720} 1721 1722#[instrument(skip_all)] 1723pub fn check_stale_working_copy( 1724 locked_wc: &dyn LockedWorkingCopy, 1725 wc_commit: &Commit, 1726 repo: &ReadonlyRepo, 1727) -> Result<WorkingCopyFreshness, OpStoreError> { 1728 // Check if the working copy's tree matches the repo's view 1729 let wc_tree_id = locked_wc.old_tree_id(); 1730 if wc_commit.tree_id() == wc_tree_id { 1731 // The working copy isn't stale, and no need to reload the repo. 1732 Ok(WorkingCopyFreshness::Fresh) 1733 } else { 1734 let wc_operation_data = repo 1735 .op_store() 1736 .read_operation(locked_wc.old_operation_id())?; 1737 let wc_operation = Operation::new( 1738 repo.op_store().clone(), 1739 locked_wc.old_operation_id().clone(), 1740 wc_operation_data, 1741 ); 1742 let repo_operation = repo.operation(); 1743 let ancestor_op = dag_walk::closest_common_node_ok( 1744 [Ok(wc_operation.clone())], 1745 [Ok(repo_operation.clone())], 1746 |op: &Operation| op.id().clone(), 1747 |op: &Operation| op.parents().collect_vec(), 1748 )? 1749 .expect("unrelated operations"); 1750 if ancestor_op.id() == repo_operation.id() { 1751 // The working copy was updated since we loaded the repo. The repo must be 1752 // reloaded at the working copy's operation. 1753 Ok(WorkingCopyFreshness::Updated(Box::new(wc_operation))) 1754 } else if ancestor_op.id() == wc_operation.id() { 1755 // The working copy was not updated when some repo operation committed, 1756 // meaning that it's stale compared to the repo view. 1757 Ok(WorkingCopyFreshness::WorkingCopyStale) 1758 } else { 1759 Ok(WorkingCopyFreshness::SiblingOperation) 1760 } 1761 } 1762} 1763 1764#[instrument(skip_all)] 1765pub fn print_conflicted_paths( 1766 conflicts: &[(RepoPathBuf, MergedTreeValue)], 1767 formatter: &mut dyn Formatter, 1768 workspace_command: &WorkspaceCommandHelper, 1769) -> Result<(), CommandError> { 1770 let formatted_paths = conflicts 1771 .iter() 1772 .map(|(path, _conflict)| workspace_command.format_file_path(path)) 1773 .collect_vec(); 1774 let max_path_len = formatted_paths.iter().map(|p| p.len()).max().unwrap_or(0); 1775 let formatted_paths = formatted_paths 1776 .into_iter() 1777 .map(|p| format!("{:width$}", p, width = max_path_len.min(32) + 3)); 1778 1779 for ((_, conflict), formatted_path) in std::iter::zip(conflicts.iter(), formatted_paths) { 1780 let sides = conflict.num_sides(); 1781 let n_adds = conflict.adds().flatten().count(); 1782 let deletions = sides - n_adds; 1783 1784 let mut seen_objects = BTreeMap::new(); // Sort for consistency and easier testing 1785 if deletions > 0 { 1786 seen_objects.insert( 1787 format!( 1788 // Starting with a number sorts this first 1789 "{deletions} deletion{}", 1790 if deletions > 1 { "s" } else { "" } 1791 ), 1792 "normal", // Deletions don't interfere with `jj resolve` or diff display 1793 ); 1794 } 1795 // TODO: We might decide it's OK for `jj resolve` to ignore special files in the 1796 // `removes` of a conflict (see e.g. https://github.com/martinvonz/jj/pull/978). In 1797 // that case, `conflict.removes` should be removed below. 1798 for term in itertools::chain(conflict.removes(), conflict.adds()).flatten() { 1799 seen_objects.insert( 1800 match term { 1801 TreeValue::File { 1802 executable: false, .. 1803 } => continue, 1804 TreeValue::File { 1805 executable: true, .. 1806 } => "an executable", 1807 TreeValue::Symlink(_) => "a symlink", 1808 TreeValue::Tree(_) => "a directory", 1809 TreeValue::GitSubmodule(_) => "a git submodule", 1810 TreeValue::Conflict(_) => "another conflict (you found a bug!)", 1811 } 1812 .to_string(), 1813 "difficult", 1814 ); 1815 } 1816 1817 write!(formatter, "{formatted_path} ")?; 1818 formatter.with_label("conflict_description", |formatter| { 1819 let print_pair = |formatter: &mut dyn Formatter, (text, label): &(String, &str)| { 1820 write!(formatter.labeled(label), "{text}") 1821 }; 1822 print_pair( 1823 formatter, 1824 &( 1825 format!("{sides}-sided"), 1826 if sides > 2 { "difficult" } else { "normal" }, 1827 ), 1828 )?; 1829 write!(formatter, " conflict")?; 1830 1831 if !seen_objects.is_empty() { 1832 write!(formatter, " including ")?; 1833 let seen_objects = seen_objects.into_iter().collect_vec(); 1834 match &seen_objects[..] { 1835 [] => unreachable!(), 1836 [only] => print_pair(formatter, only)?, 1837 [first, middle @ .., last] => { 1838 print_pair(formatter, first)?; 1839 for pair in middle { 1840 write!(formatter, ", ")?; 1841 print_pair(formatter, pair)?; 1842 } 1843 write!(formatter, " and ")?; 1844 print_pair(formatter, last)?; 1845 } 1846 }; 1847 } 1848 Ok(()) 1849 })?; 1850 writeln!(formatter)?; 1851 } 1852 Ok(()) 1853} 1854 1855pub fn print_checkout_stats( 1856 ui: &mut Ui, 1857 stats: CheckoutStats, 1858 new_commit: &Commit, 1859) -> Result<(), std::io::Error> { 1860 if stats.added_files > 0 || stats.updated_files > 0 || stats.removed_files > 0 { 1861 writeln!( 1862 ui.status(), 1863 "Added {} files, modified {} files, removed {} files", 1864 stats.added_files, 1865 stats.updated_files, 1866 stats.removed_files 1867 )?; 1868 } 1869 if stats.skipped_files != 0 { 1870 writeln!( 1871 ui.warning_default(), 1872 "{} of those updates were skipped because there were conflicting changes in the \ 1873 working copy.", 1874 stats.skipped_files 1875 )?; 1876 if let Some(mut writer) = ui.hint_default() { 1877 writeln!( 1878 writer, 1879 "Inspect the changes compared to the intended target with `jj diff --from {}`. 1880Discard the conflicting changes with `jj restore --from {}`.", 1881 short_commit_hash(new_commit.id()), 1882 short_commit_hash(new_commit.id()) 1883 )?; 1884 } 1885 } 1886 Ok(()) 1887} 1888 1889/// Prints warning about explicit paths that don't match any of the tree 1890/// entries. 1891pub fn print_unmatched_explicit_paths<'a>( 1892 ui: &Ui, 1893 workspace_command: &WorkspaceCommandHelper, 1894 expression: &FilesetExpression, 1895 trees: impl IntoIterator<Item = &'a MergedTree>, 1896) -> io::Result<()> { 1897 let mut explicit_paths = expression.explicit_paths().collect_vec(); 1898 for tree in trees { 1899 explicit_paths.retain(|&path| tree.path_value(path).is_absent()); 1900 if explicit_paths.is_empty() { 1901 return Ok(()); 1902 } 1903 } 1904 let ui_paths = explicit_paths 1905 .iter() 1906 .map(|&path| workspace_command.format_file_path(path)) 1907 .join(", "); 1908 writeln!( 1909 ui.warning_default(), 1910 "No matching entries for paths: {ui_paths}" 1911 )?; 1912 Ok(()) 1913} 1914 1915pub fn print_trackable_remote_branches(ui: &Ui, view: &View) -> io::Result<()> { 1916 let remote_branch_names = view 1917 .branches() 1918 .filter(|(_, branch_target)| branch_target.local_target.is_present()) 1919 .flat_map(|(name, branch_target)| { 1920 branch_target 1921 .remote_refs 1922 .into_iter() 1923 .filter(|&(_, remote_ref)| !remote_ref.is_tracking()) 1924 .map(move |(remote, _)| format!("{name}@{remote}")) 1925 }) 1926 .collect_vec(); 1927 if remote_branch_names.is_empty() { 1928 return Ok(()); 1929 } 1930 1931 if let Some(mut writer) = ui.hint_default() { 1932 writeln!( 1933 writer, 1934 "The following remote branches aren't associated with the existing local branches:" 1935 )?; 1936 } 1937 if let Some(mut formatter) = ui.status_formatter() { 1938 for full_name in &remote_branch_names { 1939 write!(formatter, " ")?; 1940 writeln!(formatter.labeled("branch"), "{full_name}")?; 1941 } 1942 } 1943 if let Some(mut writer) = ui.hint_default() { 1944 writeln!( 1945 writer, 1946 "Run `jj branch track {names}` to keep local branches updated on future pulls.", 1947 names = remote_branch_names.join(" "), 1948 )?; 1949 } 1950 Ok(()) 1951} 1952 1953pub fn update_working_copy( 1954 repo: &Arc<ReadonlyRepo>, 1955 workspace: &mut Workspace, 1956 old_commit: Option<&Commit>, 1957 new_commit: &Commit, 1958) -> Result<Option<CheckoutStats>, CommandError> { 1959 let old_tree_id = old_commit.map(|commit| commit.tree_id().clone()); 1960 let stats = if Some(new_commit.tree_id()) != old_tree_id.as_ref() { 1961 // TODO: CheckoutError::ConcurrentCheckout should probably just result in a 1962 // warning for most commands (but be an error for the checkout command) 1963 let stats = workspace 1964 .check_out(repo.op_id().clone(), old_tree_id.as_ref(), new_commit) 1965 .map_err(|err| { 1966 internal_error_with_message( 1967 format!("Failed to check out commit {}", new_commit.id().hex()), 1968 err, 1969 ) 1970 })?; 1971 Some(stats) 1972 } else { 1973 // Record new operation id which represents the latest working-copy state 1974 let locked_ws = workspace.start_working_copy_mutation()?; 1975 locked_ws.finish(repo.op_id().clone())?; 1976 None 1977 }; 1978 Ok(stats) 1979} 1980 1981fn load_template_aliases( 1982 ui: &Ui, 1983 layered_configs: &LayeredConfigs, 1984) -> Result<TemplateAliasesMap, CommandError> { 1985 const TABLE_KEY: &str = "template-aliases"; 1986 let mut aliases_map = TemplateAliasesMap::new(); 1987 // Load from all config layers in order. 'f(x)' in default layer should be 1988 // overridden by 'f(a)' in user. 1989 for (_, config) in layered_configs.sources() { 1990 let table = if let Some(table) = config.get_table(TABLE_KEY).optional()? { 1991 table 1992 } else { 1993 continue; 1994 }; 1995 for (decl, value) in table.into_iter().sorted_by(|a, b| a.0.cmp(&b.0)) { 1996 let r = value 1997 .into_string() 1998 .map_err(|e| e.to_string()) 1999 .and_then(|v| aliases_map.insert(&decl, v).map_err(|e| e.to_string())); 2000 if let Err(s) = r { 2001 writeln!( 2002 ui.warning_default(), 2003 r#"Failed to load "{TABLE_KEY}.{decl}": {s}"# 2004 )?; 2005 } 2006 } 2007 } 2008 Ok(aliases_map) 2009} 2010 2011/// Helper to reformat content of log-like commands. 2012#[derive(Clone, Debug)] 2013pub enum LogContentFormat { 2014 NoWrap, 2015 Wrap { term_width: usize }, 2016} 2017 2018impl LogContentFormat { 2019 pub fn new(ui: &Ui, settings: &UserSettings) -> Result<Self, config::ConfigError> { 2020 if settings.config().get_bool("ui.log-word-wrap")? { 2021 let term_width = usize::from(ui.term_width().unwrap_or(80)); 2022 Ok(LogContentFormat::Wrap { term_width }) 2023 } else { 2024 Ok(LogContentFormat::NoWrap) 2025 } 2026 } 2027 2028 pub fn write( 2029 &self, 2030 formatter: &mut dyn Formatter, 2031 content_fn: impl FnOnce(&mut dyn Formatter) -> std::io::Result<()>, 2032 ) -> std::io::Result<()> { 2033 self.write_graph_text(formatter, content_fn, || 0) 2034 } 2035 2036 pub fn write_graph_text( 2037 &self, 2038 formatter: &mut dyn Formatter, 2039 content_fn: impl FnOnce(&mut dyn Formatter) -> std::io::Result<()>, 2040 graph_width_fn: impl FnOnce() -> usize, 2041 ) -> std::io::Result<()> { 2042 match self { 2043 LogContentFormat::NoWrap => content_fn(formatter), 2044 LogContentFormat::Wrap { term_width } => { 2045 let mut recorder = FormatRecorder::new(); 2046 content_fn(&mut recorder)?; 2047 text_util::write_wrapped( 2048 formatter, 2049 &recorder, 2050 term_width.saturating_sub(graph_width_fn()), 2051 )?; 2052 Ok(()) 2053 } 2054 } 2055 } 2056} 2057 2058// TODO: Use a proper TOML library to serialize instead. 2059pub fn serialize_config_value(value: &config::Value) -> String { 2060 match &value.kind { 2061 config::ValueKind::Table(table) => format!( 2062 "{{{}}}", 2063 // TODO: Remove sorting when config crate maintains deterministic ordering. 2064 table 2065 .iter() 2066 .sorted_by_key(|(k, _)| *k) 2067 .map(|(k, v)| format!("{k}={}", serialize_config_value(v))) 2068 .join(", ") 2069 ), 2070 config::ValueKind::Array(vals) => { 2071 format!("[{}]", vals.iter().map(serialize_config_value).join(", ")) 2072 } 2073 config::ValueKind::String(val) => format!("{val:?}"), 2074 _ => value.to_string(), 2075 } 2076} 2077 2078pub fn write_config_value_to_file( 2079 key: &str, 2080 value_str: &str, 2081 path: &Path, 2082) -> Result<(), CommandError> { 2083 // Read config 2084 let config_toml = std::fs::read_to_string(path).or_else(|err| { 2085 match err.kind() { 2086 // If config doesn't exist yet, read as empty and we'll write one. 2087 std::io::ErrorKind::NotFound => Ok("".to_string()), 2088 _ => Err(user_error_with_message( 2089 format!("Failed to read file {path}", path = path.display()), 2090 err, 2091 )), 2092 } 2093 })?; 2094 let mut doc = toml_edit::Document::from_str(&config_toml).map_err(|err| { 2095 user_error_with_message( 2096 format!("Failed to parse file {path}", path = path.display()), 2097 err, 2098 ) 2099 })?; 2100 2101 // Apply config value 2102 // Interpret value as string if it can't be parsed as a TOML value. 2103 // TODO(#531): Infer types based on schema (w/ --type arg to override). 2104 let item = match toml_edit::Value::from_str(value_str) { 2105 Ok(value) => toml_edit::value(value), 2106 _ => toml_edit::value(value_str), 2107 }; 2108 let mut target_table = doc.as_table_mut(); 2109 let mut key_parts_iter = key.split('.'); 2110 // Note: split guarantees at least one item. 2111 let last_key_part = key_parts_iter.next_back().unwrap(); 2112 for key_part in key_parts_iter { 2113 target_table = target_table 2114 .entry(key_part) 2115 .or_insert_with(|| toml_edit::Item::Table(toml_edit::Table::new())) 2116 .as_table_mut() 2117 .ok_or_else(|| { 2118 user_error(format!( 2119 "Failed to set {key}: would overwrite non-table value with parent table" 2120 )) 2121 })?; 2122 } 2123 // Error out if overwriting non-scalar value for key (table or array) with 2124 // scalar. 2125 match target_table.get(last_key_part) { 2126 None | Some(toml_edit::Item::None | toml_edit::Item::Value(_)) => {} 2127 Some(toml_edit::Item::Table(_) | toml_edit::Item::ArrayOfTables(_)) => { 2128 return Err(user_error(format!( 2129 "Failed to set {key}: would overwrite entire table" 2130 ))); 2131 } 2132 } 2133 target_table[last_key_part] = item; 2134 2135 // Write config back 2136 std::fs::write(path, doc.to_string()).map_err(|err| { 2137 user_error_with_message( 2138 format!("Failed to write file {path}", path = path.display()), 2139 err, 2140 ) 2141 }) 2142} 2143 2144pub fn get_new_config_file_path( 2145 config_source: &ConfigSource, 2146 command: &CommandHelper, 2147) -> Result<PathBuf, CommandError> { 2148 let edit_path = match config_source { 2149 // TODO(#531): Special-case for editors that can't handle viewing directories? 2150 ConfigSource::User => { 2151 new_config_path()?.ok_or_else(|| user_error("No repo config path found to edit"))? 2152 } 2153 ConfigSource::Repo => command.workspace_loader()?.repo_path().join("config.toml"), 2154 _ => { 2155 return Err(user_error(format!( 2156 "Can't get path for config source {config_source:?}" 2157 ))); 2158 } 2159 }; 2160 Ok(edit_path) 2161} 2162 2163pub fn run_ui_editor(settings: &UserSettings, edit_path: &PathBuf) -> Result<(), CommandError> { 2164 let editor: CommandNameAndArgs = settings 2165 .config() 2166 .get("ui.editor") 2167 .map_err(|err| config_error_with_message("Invalid `ui.editor`", err))?; 2168 let exit_status = editor.to_command().arg(edit_path).status().map_err(|err| { 2169 user_error_with_message( 2170 format!( 2171 // The executable couldn't be found or run; command-line arguments are not relevant 2172 "Failed to run editor '{name}'", 2173 name = editor.split_name(), 2174 ), 2175 err, 2176 ) 2177 })?; 2178 if !exit_status.success() { 2179 return Err(user_error(format!( 2180 "Editor '{editor}' exited with an error" 2181 ))); 2182 } 2183 2184 Ok(()) 2185} 2186 2187pub fn edit_temp_file( 2188 error_name: &str, 2189 tempfile_suffix: &str, 2190 dir: &Path, 2191 content: &str, 2192 settings: &UserSettings, 2193) -> Result<String, CommandError> { 2194 let path = (|| -> Result<_, io::Error> { 2195 let mut file = tempfile::Builder::new() 2196 .prefix("editor-") 2197 .suffix(tempfile_suffix) 2198 .tempfile_in(dir)?; 2199 file.write_all(content.as_bytes())?; 2200 let (_, path) = file.keep().map_err(|e| e.error)?; 2201 Ok(path) 2202 })() 2203 .map_err(|e| { 2204 user_error_with_message( 2205 format!( 2206 r#"Failed to create {} file in "{}""#, 2207 error_name, 2208 dir.display(), 2209 ), 2210 e, 2211 ) 2212 })?; 2213 2214 run_ui_editor(settings, &path)?; 2215 2216 let edited = fs::read_to_string(&path).map_err(|e| { 2217 user_error_with_message( 2218 format!(r#"Failed to read {} file "{}""#, error_name, path.display()), 2219 e, 2220 ) 2221 })?; 2222 2223 // Delete the file only if everything went well. 2224 // TODO: Tell the user the name of the file we left behind. 2225 std::fs::remove_file(path).ok(); 2226 2227 Ok(edited) 2228} 2229 2230pub fn short_commit_hash(commit_id: &CommitId) -> String { 2231 commit_id.hex()[0..12].to_string() 2232} 2233 2234pub fn short_change_hash(change_id: &ChangeId) -> String { 2235 // TODO: We could avoid the unwrap() and make this more efficient by converting 2236 // straight from binary. 2237 to_reverse_hex(&change_id.hex()[0..12]).unwrap() 2238} 2239 2240pub fn short_operation_hash(operation_id: &OperationId) -> String { 2241 operation_id.hex()[0..12].to_string() 2242} 2243 2244/// Wrapper around a `DiffEditor` to conditionally start interactive session. 2245#[derive(Clone, Debug)] 2246pub enum DiffSelector { 2247 NonInteractive, 2248 Interactive(DiffEditor), 2249} 2250 2251impl DiffSelector { 2252 pub fn is_interactive(&self) -> bool { 2253 matches!(self, DiffSelector::Interactive(_)) 2254 } 2255 2256 /// Restores diffs from the `right_tree` to the `left_tree` by using an 2257 /// interactive editor if enabled. 2258 pub fn select( 2259 &self, 2260 left_tree: &MergedTree, 2261 right_tree: &MergedTree, 2262 matcher: &dyn Matcher, 2263 instructions: Option<&str>, 2264 ) -> Result<MergedTreeId, CommandError> { 2265 match self { 2266 DiffSelector::NonInteractive => Ok(restore_tree(right_tree, left_tree, matcher)?), 2267 DiffSelector::Interactive(editor) => { 2268 Ok(editor.edit(left_tree, right_tree, matcher, instructions)?) 2269 } 2270 } 2271 } 2272} 2273 2274#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] 2275pub struct RemoteBranchName { 2276 pub branch: String, 2277 pub remote: String, 2278} 2279 2280impl fmt::Display for RemoteBranchName { 2281 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 2282 let RemoteBranchName { branch, remote } = self; 2283 write!(f, "{branch}@{remote}") 2284 } 2285} 2286 2287#[derive(Clone, Debug)] 2288pub struct RemoteBranchNamePattern { 2289 pub branch: StringPattern, 2290 pub remote: StringPattern, 2291} 2292 2293impl FromStr for RemoteBranchNamePattern { 2294 type Err = String; 2295 2296 fn from_str(src: &str) -> Result<Self, Self::Err> { 2297 // The kind prefix applies to both branch and remote fragments. It's 2298 // weird that unanchored patterns like substring:branch@remote is split 2299 // into two, but I can't think of a better syntax. 2300 // TODO: should we disable substring pattern? what if we added regex? 2301 let (maybe_kind, pat) = src 2302 .split_once(':') 2303 .map_or((None, src), |(kind, pat)| (Some(kind), pat)); 2304 let to_pattern = |pat: &str| { 2305 if let Some(kind) = maybe_kind { 2306 StringPattern::from_str_kind(pat, kind).map_err(|err| err.to_string()) 2307 } else { 2308 Ok(StringPattern::exact(pat)) 2309 } 2310 }; 2311 // TODO: maybe reuse revset parser to handle branch/remote name containing @ 2312 let (branch, remote) = pat 2313 .rsplit_once('@') 2314 .ok_or_else(|| "remote branch must be specified in branch@remote form".to_owned())?; 2315 Ok(RemoteBranchNamePattern { 2316 branch: to_pattern(branch)?, 2317 remote: to_pattern(remote)?, 2318 }) 2319 } 2320} 2321 2322impl RemoteBranchNamePattern { 2323 pub fn is_exact(&self) -> bool { 2324 self.branch.is_exact() && self.remote.is_exact() 2325 } 2326} 2327 2328impl fmt::Display for RemoteBranchNamePattern { 2329 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 2330 let RemoteBranchNamePattern { branch, remote } = self; 2331 write!(f, "{branch}@{remote}") 2332 } 2333} 2334 2335/// Jujutsu (An experimental VCS) 2336/// 2337/// To get started, see the tutorial at https://github.com/martinvonz/jj/blob/main/docs/tutorial.md. 2338#[allow(rustdoc::bare_urls)] 2339#[derive(clap::Parser, Clone, Debug)] 2340#[command(name = "jj")] 2341pub struct Args { 2342 #[command(flatten)] 2343 pub global_args: GlobalArgs, 2344} 2345 2346#[derive(clap::Args, Clone, Debug)] 2347#[command(next_help_heading = "Global Options")] 2348pub struct GlobalArgs { 2349 /// Path to repository to operate on 2350 /// 2351 /// By default, Jujutsu searches for the closest .jj/ directory in an 2352 /// ancestor of the current working directory. 2353 #[arg(long, short = 'R', global = true, value_hint = clap::ValueHint::DirPath)] 2354 pub repository: Option<String>, 2355 /// Don't snapshot the working copy, and don't update it 2356 /// 2357 /// By default, Jujutsu snapshots the working copy at the beginning of every 2358 /// command. The working copy is also updated at the end of the command, 2359 /// if the command modified the working-copy commit (`@`). If you want 2360 /// to avoid snapshotting the working copy and instead see a possibly 2361 /// stale working copy commit, you can use `--ignore-working-copy`. 2362 /// This may be useful e.g. in a command prompt, especially if you have 2363 /// another process that commits the working copy. 2364 /// 2365 /// Loading the repository at a specific operation with `--at-operation` 2366 /// implies `--ignore-working-copy`. 2367 #[arg(long, global = true)] 2368 pub ignore_working_copy: bool, 2369 /// Allow rewriting immutable commits 2370 /// 2371 /// By default, Jujutsu prevents rewriting commits in the configured set of 2372 /// immutable commits. This option disables that check and lets you rewrite 2373 /// any commit but the root commit. 2374 /// 2375 /// This option only affects the check. It does not affect the 2376 /// `immutable_heads()` revset or the `immutable` template keyword. 2377 #[arg(long, global = true)] 2378 pub ignore_immutable: bool, 2379 /// Operation to load the repo at 2380 /// 2381 /// Operation to load the repo at. By default, Jujutsu loads the repo at the 2382 /// most recent operation. You can use `--at-op=<operation ID>` to see what 2383 /// the repo looked like at an earlier operation. For example `jj 2384 /// --at-op=<operation ID> st` will show you what `jj st` would have 2385 /// shown you when the given operation had just finished. 2386 /// 2387 /// Use `jj op log` to find the operation ID you want. Any unambiguous 2388 /// prefix of the operation ID is enough. 2389 /// 2390 /// When loading the repo at an earlier operation, the working copy will be 2391 /// ignored, as if `--ignore-working-copy` had been specified. 2392 /// 2393 /// It is possible to run mutating commands when loading the repo at an 2394 /// earlier operation. Doing that is equivalent to having run concurrent 2395 /// commands starting at the earlier operation. There's rarely a reason to 2396 /// do that, but it is possible. 2397 #[arg(long, visible_alias = "at-op", global = true, default_value = "@")] 2398 pub at_operation: String, 2399 /// Enable debug logging 2400 #[arg(long, global = true)] 2401 pub debug: bool, 2402 2403 #[command(flatten)] 2404 pub early_args: EarlyArgs, 2405} 2406 2407#[derive(clap::Args, Clone, Debug)] 2408pub struct EarlyArgs { 2409 /// When to colorize output (always, never, auto) 2410 #[arg(long, value_name = "WHEN", global = true)] 2411 pub color: Option<ColorChoice>, 2412 /// Silence non-primary command output 2413 /// 2414 /// For example, `jj files` will still list files, but it won't tell you if 2415 /// the working copy was snapshotted or if descendants were rebased. 2416 /// 2417 /// Warnings and errors will still be printed. 2418 #[arg(long, global = true, action = ArgAction::SetTrue)] 2419 // Parsing with ignore_errors will crash if this is bool, so use 2420 // Option<bool>. 2421 pub quiet: Option<bool>, 2422 /// Disable the pager 2423 #[arg(long, value_name = "WHEN", global = true, action = ArgAction::SetTrue)] 2424 // Parsing with ignore_errors will crash if this is bool, so use 2425 // Option<bool>. 2426 pub no_pager: Option<bool>, 2427 /// Additional configuration options (can be repeated) 2428 // TODO: Introduce a `--config` option with simpler syntax for simple 2429 // cases, designed so that `--config ui.color=auto` works 2430 #[arg(long, value_name = "TOML", global = true)] 2431 pub config_toml: Vec<String>, 2432} 2433 2434/// Wrapper around revset expression argument. 2435/// 2436/// An empty string is rejected early by the CLI value parser, but it's still 2437/// allowed to construct an empty `RevisionArg` from a config value for 2438/// example. An empty expression will be rejected by the revset parser. 2439#[derive(Clone, Debug)] 2440pub struct RevisionArg(Cow<'static, str>); 2441 2442impl RevisionArg { 2443 /// The working-copy symbol, which is the default of the most commands. 2444 pub const AT: Self = RevisionArg(Cow::Borrowed("@")); 2445} 2446 2447impl From<String> for RevisionArg { 2448 fn from(s: String) -> Self { 2449 RevisionArg(s.into()) 2450 } 2451} 2452 2453impl AsRef<str> for RevisionArg { 2454 fn as_ref(&self) -> &str { 2455 &self.0 2456 } 2457} 2458 2459impl fmt::Display for RevisionArg { 2460 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 2461 write!(f, "{}", self.0) 2462 } 2463} 2464 2465impl ValueParserFactory for RevisionArg { 2466 type Parser = MapValueParser<NonEmptyStringValueParser, fn(String) -> RevisionArg>; 2467 2468 fn value_parser() -> Self::Parser { 2469 NonEmptyStringValueParser::new().map(RevisionArg::from) 2470 } 2471} 2472 2473fn get_string_or_array( 2474 config: &config::Config, 2475 key: &str, 2476) -> Result<Vec<String>, config::ConfigError> { 2477 config 2478 .get(key) 2479 .map(|string| vec![string]) 2480 .or_else(|_| config.get::<Vec<String>>(key)) 2481} 2482 2483fn resolve_default_command( 2484 ui: &Ui, 2485 config: &config::Config, 2486 app: &Command, 2487 mut string_args: Vec<String>, 2488) -> Result<Vec<String>, CommandError> { 2489 const PRIORITY_FLAGS: &[&str] = &["help", "--help", "-h", "--version", "-V"]; 2490 2491 let has_priority_flag = string_args 2492 .iter() 2493 .any(|arg| PRIORITY_FLAGS.contains(&arg.as_str())); 2494 if has_priority_flag { 2495 return Ok(string_args); 2496 } 2497 2498 let app_clone = app 2499 .clone() 2500 .allow_external_subcommands(true) 2501 .ignore_errors(true); 2502 let matches = app_clone.try_get_matches_from(&string_args).ok(); 2503 2504 if let Some(matches) = matches { 2505 if matches.subcommand_name().is_none() { 2506 let args = get_string_or_array(config, "ui.default-command").optional()?; 2507 if args.is_none() { 2508 if let Some(mut writer) = ui.hint_default() { 2509 writeln!(writer, "Use `jj -h` for a list of available commands.")?; 2510 writeln!( 2511 writer, 2512 "Run `jj config set --user ui.default-command log` to disable this \ 2513 message." 2514 )?; 2515 } 2516 } 2517 let default_command = args.unwrap_or_else(|| vec!["log".to_string()]); 2518 2519 // Insert the default command directly after the path to the binary. 2520 string_args.splice(1..1, default_command); 2521 } 2522 } 2523 Ok(string_args) 2524} 2525 2526fn resolve_aliases( 2527 config: &config::Config, 2528 app: &Command, 2529 mut string_args: Vec<String>, 2530) -> Result<Vec<String>, CommandError> { 2531 let mut aliases_map = config.get_table("aliases")?; 2532 if let Ok(alias_map) = config.get_table("alias") { 2533 for (alias, definition) in alias_map { 2534 if aliases_map.insert(alias.clone(), definition).is_some() { 2535 return Err(user_error_with_hint( 2536 format!(r#"Alias "{alias}" is defined in both [aliases] and [alias]"#), 2537 "[aliases] is the preferred section for aliases. Please remove the alias from \ 2538 [alias].", 2539 )); 2540 } 2541 } 2542 } 2543 let mut resolved_aliases = HashSet::new(); 2544 let mut real_commands = HashSet::new(); 2545 for command in app.get_subcommands() { 2546 real_commands.insert(command.get_name().to_string()); 2547 for alias in command.get_all_aliases() { 2548 real_commands.insert(alias.to_string()); 2549 } 2550 } 2551 loop { 2552 let app_clone = app.clone().allow_external_subcommands(true); 2553 let matches = app_clone.try_get_matches_from(&string_args).ok(); 2554 if let Some((command_name, submatches)) = matches.as_ref().and_then(|m| m.subcommand()) { 2555 if !real_commands.contains(command_name) { 2556 let alias_name = command_name.to_string(); 2557 let alias_args = submatches 2558 .get_many::<OsString>("") 2559 .unwrap_or_default() 2560 .map(|arg| arg.to_str().unwrap().to_string()) 2561 .collect_vec(); 2562 if resolved_aliases.contains(&alias_name) { 2563 return Err(user_error(format!( 2564 r#"Recursive alias definition involving "{alias_name}""# 2565 ))); 2566 } 2567 if let Some(value) = aliases_map.remove(&alias_name) { 2568 if let Ok(alias_definition) = value.try_deserialize::<Vec<String>>() { 2569 assert!(string_args.ends_with(&alias_args)); 2570 string_args.truncate(string_args.len() - 1 - alias_args.len()); 2571 string_args.extend(alias_definition); 2572 string_args.extend_from_slice(&alias_args); 2573 resolved_aliases.insert(alias_name.clone()); 2574 continue; 2575 } else { 2576 return Err(user_error(format!( 2577 r#"Alias definition for "{alias_name}" must be a string list"# 2578 ))); 2579 } 2580 } else { 2581 // Not a real command and not an alias, so return what we've resolved so far 2582 return Ok(string_args); 2583 } 2584 } 2585 } 2586 // No more alias commands, or hit unknown option 2587 return Ok(string_args); 2588 } 2589} 2590 2591/// Parse args that must be interpreted early, e.g. before printing help. 2592fn handle_early_args( 2593 ui: &mut Ui, 2594 app: &Command, 2595 args: &[String], 2596 layered_configs: &mut LayeredConfigs, 2597) -> Result<(), CommandError> { 2598 // ignore_errors() bypasses errors like missing subcommand 2599 let early_matches = app 2600 .clone() 2601 .disable_version_flag(true) 2602 .disable_help_flag(true) 2603 .disable_help_subcommand(true) 2604 .ignore_errors(true) 2605 .try_get_matches_from(args)?; 2606 let mut args: EarlyArgs = EarlyArgs::from_arg_matches(&early_matches).unwrap(); 2607 2608 if let Some(choice) = args.color { 2609 args.config_toml.push(format!(r#"ui.color="{choice}""#)); 2610 } 2611 if args.quiet.unwrap_or_default() { 2612 args.config_toml.push(r#"ui.quiet=true"#.to_string()); 2613 } 2614 if args.no_pager.unwrap_or_default() { 2615 args.config_toml.push(r#"ui.paginate="never""#.to_owned()); 2616 } 2617 if !args.config_toml.is_empty() { 2618 layered_configs.parse_config_args(&args.config_toml)?; 2619 ui.reset(&layered_configs.merge())?; 2620 } 2621 Ok(()) 2622} 2623 2624pub fn expand_args( 2625 ui: &Ui, 2626 app: &Command, 2627 args_os: ArgsOs, 2628 config: &config::Config, 2629) -> Result<Vec<String>, CommandError> { 2630 let mut string_args: Vec<String> = vec![]; 2631 for arg_os in args_os { 2632 if let Some(string_arg) = arg_os.to_str() { 2633 string_args.push(string_arg.to_owned()); 2634 } else { 2635 return Err(cli_error("Non-utf8 argument")); 2636 } 2637 } 2638 2639 let string_args = resolve_default_command(ui, config, app, string_args)?; 2640 resolve_aliases(config, app, string_args) 2641} 2642 2643pub fn parse_args( 2644 ui: &mut Ui, 2645 app: &Command, 2646 tracing_subscription: &TracingSubscription, 2647 string_args: &[String], 2648 layered_configs: &mut LayeredConfigs, 2649) -> Result<(ArgMatches, Args), CommandError> { 2650 handle_early_args(ui, app, string_args, layered_configs)?; 2651 let matches = app 2652 .clone() 2653 .arg_required_else_help(true) 2654 .subcommand_required(true) 2655 .try_get_matches_from(string_args)?; 2656 2657 let args: Args = Args::from_arg_matches(&matches).unwrap(); 2658 if args.global_args.debug { 2659 // TODO: set up debug logging as early as possible 2660 tracing_subscription.enable_debug_logging()?; 2661 } 2662 2663 Ok((matches, args)) 2664} 2665 2666pub fn format_template<C: Clone>(ui: &Ui, arg: &C, template: &TemplateRenderer<C>) -> String { 2667 let mut output = vec![]; 2668 template 2669 .format(arg, ui.new_formatter(&mut output).as_mut()) 2670 .expect("write() to vec backed formatter should never fail"); 2671 String::from_utf8(output).expect("template output should be utf-8 bytes") 2672} 2673 2674/// CLI command builder and runner. 2675#[must_use] 2676pub struct CliRunner { 2677 tracing_subscription: TracingSubscription, 2678 app: Command, 2679 extra_configs: Vec<config::Config>, 2680 store_factories: StoreFactories, 2681 working_copy_factories: WorkingCopyFactories, 2682 revset_extensions: RevsetExtensions, 2683 commit_template_extensions: Vec<Arc<dyn CommitTemplateLanguageExtension>>, 2684 operation_template_extensions: Vec<Arc<dyn OperationTemplateLanguageExtension>>, 2685 dispatch_fn: CliDispatchFn, 2686 start_hook_fns: Vec<CliDispatchFn>, 2687 process_global_args_fns: Vec<ProcessGlobalArgsFn>, 2688} 2689 2690type CliDispatchFn = Box<dyn FnOnce(&mut Ui, &CommandHelper) -> Result<(), CommandError>>; 2691 2692type ProcessGlobalArgsFn = Box<dyn FnOnce(&mut Ui, &ArgMatches) -> Result<(), CommandError>>; 2693 2694impl CliRunner { 2695 /// Initializes CLI environment and returns a builder. This should be called 2696 /// as early as possible. 2697 pub fn init() -> Self { 2698 let tracing_subscription = TracingSubscription::init(); 2699 crate::cleanup_guard::init(); 2700 CliRunner { 2701 tracing_subscription, 2702 app: crate::commands::default_app(), 2703 extra_configs: vec![], 2704 store_factories: StoreFactories::default(), 2705 working_copy_factories: default_working_copy_factories(), 2706 revset_extensions: Default::default(), 2707 commit_template_extensions: vec![], 2708 operation_template_extensions: vec![], 2709 dispatch_fn: Box::new(crate::commands::run_command), 2710 start_hook_fns: vec![], 2711 process_global_args_fns: vec![], 2712 } 2713 } 2714 2715 /// Set the version to be displayed by `jj version`. 2716 pub fn version(mut self, version: &str) -> Self { 2717 self.app = self.app.version(version.to_string()); 2718 self 2719 } 2720 2721 /// Adds default configs in addition to the normal defaults. 2722 pub fn add_extra_config(mut self, extra_configs: config::Config) -> Self { 2723 self.extra_configs.push(extra_configs); 2724 self 2725 } 2726 2727 /// Adds `StoreFactories` to be used. 2728 pub fn add_store_factories(mut self, store_factories: StoreFactories) -> Self { 2729 self.store_factories.merge(store_factories); 2730 self 2731 } 2732 2733 /// Adds working copy factories to be used. 2734 pub fn add_working_copy_factories( 2735 mut self, 2736 working_copy_factories: WorkingCopyFactories, 2737 ) -> Self { 2738 merge_factories_map(&mut self.working_copy_factories, working_copy_factories); 2739 self 2740 } 2741 2742 pub fn add_symbol_resolver_extension( 2743 mut self, 2744 symbol_resolver: Box<dyn SymbolResolverExtension>, 2745 ) -> Self { 2746 self.revset_extensions.add_symbol_resolver(symbol_resolver); 2747 self 2748 } 2749 2750 pub fn add_commit_template_extension( 2751 mut self, 2752 commit_template_extension: Box<dyn CommitTemplateLanguageExtension>, 2753 ) -> Self { 2754 self.commit_template_extensions 2755 .push(commit_template_extension.into()); 2756 self 2757 } 2758 2759 pub fn add_operation_template_extension( 2760 mut self, 2761 operation_template_extension: Box<dyn OperationTemplateLanguageExtension>, 2762 ) -> Self { 2763 self.operation_template_extensions 2764 .push(operation_template_extension.into()); 2765 self 2766 } 2767 2768 pub fn add_start_hook(mut self, start_hook_fn: CliDispatchFn) -> Self { 2769 self.start_hook_fns.push(start_hook_fn); 2770 self 2771 } 2772 2773 /// Registers new subcommands in addition to the default ones. 2774 pub fn add_subcommand<C, F>(mut self, custom_dispatch_fn: F) -> Self 2775 where 2776 C: clap::Subcommand, 2777 F: FnOnce(&mut Ui, &CommandHelper, C) -> Result<(), CommandError> + 'static, 2778 { 2779 let old_dispatch_fn = self.dispatch_fn; 2780 let new_dispatch_fn = 2781 move |ui: &mut Ui, command_helper: &CommandHelper| match C::from_arg_matches( 2782 command_helper.matches(), 2783 ) { 2784 Ok(command) => custom_dispatch_fn(ui, command_helper, command), 2785 Err(_) => old_dispatch_fn(ui, command_helper), 2786 }; 2787 self.app = C::augment_subcommands(self.app); 2788 self.dispatch_fn = Box::new(new_dispatch_fn); 2789 self 2790 } 2791 2792 /// Registers new global arguments in addition to the default ones. 2793 pub fn add_global_args<A, F>(mut self, process_before: F) -> Self 2794 where 2795 A: clap::Args, 2796 F: FnOnce(&mut Ui, A) -> Result<(), CommandError> + 'static, 2797 { 2798 let process_global_args_fn = move |ui: &mut Ui, matches: &ArgMatches| { 2799 let custom_args = A::from_arg_matches(matches).unwrap(); 2800 process_before(ui, custom_args) 2801 }; 2802 self.app = A::augment_args(self.app); 2803 self.process_global_args_fns 2804 .push(Box::new(process_global_args_fn)); 2805 self 2806 } 2807 2808 #[instrument(skip_all)] 2809 fn run_internal( 2810 self, 2811 ui: &mut Ui, 2812 mut layered_configs: LayeredConfigs, 2813 ) -> Result<(), CommandError> { 2814 // `cwd` is canonicalized for consistency with `Workspace::workspace_root()` and 2815 // to easily compute relative paths between them. 2816 let cwd = env::current_dir() 2817 .and_then(|cwd| cwd.canonicalize()) 2818 .map_err(|_| { 2819 user_error_with_hint( 2820 "Could not determine current directory", 2821 "Did you update to a commit where the directory doesn't exist?", 2822 ) 2823 })?; 2824 // Use cwd-relative workspace configs to resolve default command and 2825 // aliases. WorkspaceLoader::init() won't do any heavy lifting other 2826 // than the path resolution. 2827 let maybe_cwd_workspace_loader = WorkspaceLoader::init(find_workspace_dir(&cwd)) 2828 .map_err(|err| map_workspace_load_error(err, None)); 2829 layered_configs.read_user_config()?; 2830 if let Ok(loader) = &maybe_cwd_workspace_loader { 2831 layered_configs.read_repo_config(loader.repo_path())?; 2832 } 2833 let config = layered_configs.merge(); 2834 ui.reset(&config)?; 2835 2836 let string_args = expand_args(ui, &self.app, env::args_os(), &config)?; 2837 let (matches, args) = parse_args( 2838 ui, 2839 &self.app, 2840 &self.tracing_subscription, 2841 &string_args, 2842 &mut layered_configs, 2843 ) 2844 .map_err(|err| map_clap_cli_error(err, ui, &layered_configs))?; 2845 for process_global_args_fn in self.process_global_args_fns { 2846 process_global_args_fn(ui, &matches)?; 2847 } 2848 2849 let maybe_workspace_loader = if let Some(path) = &args.global_args.repository { 2850 // Invalid -R path is an error. No need to proceed. 2851 let loader = WorkspaceLoader::init(&cwd.join(path)) 2852 .map_err(|err| map_workspace_load_error(err, Some(path)))?; 2853 layered_configs.read_repo_config(loader.repo_path())?; 2854 Ok(loader) 2855 } else { 2856 maybe_cwd_workspace_loader 2857 }; 2858 2859 // Apply workspace configs and --config-toml arguments. 2860 let config = layered_configs.merge(); 2861 ui.reset(&config)?; 2862 2863 // If -R is specified, check if the expanded arguments differ. Aliases 2864 // can also be injected by --config-toml, but that's obviously wrong. 2865 if args.global_args.repository.is_some() { 2866 let new_string_args = expand_args(ui, &self.app, env::args_os(), &config).ok(); 2867 if new_string_args.as_ref() != Some(&string_args) { 2868 writeln!( 2869 ui.warning_default(), 2870 "Command aliases cannot be loaded from -R/--repository path" 2871 )?; 2872 } 2873 } 2874 2875 let settings = UserSettings::from_config(config); 2876 let command_helper = CommandHelper { 2877 app: self.app, 2878 cwd, 2879 string_args, 2880 matches, 2881 global_args: args.global_args, 2882 settings, 2883 layered_configs, 2884 revset_extensions: self.revset_extensions.into(), 2885 commit_template_extensions: self.commit_template_extensions, 2886 operation_template_extensions: self.operation_template_extensions, 2887 maybe_workspace_loader, 2888 store_factories: self.store_factories, 2889 working_copy_factories: self.working_copy_factories, 2890 }; 2891 for start_hook_fn in self.start_hook_fns { 2892 start_hook_fn(ui, &command_helper)?; 2893 } 2894 (self.dispatch_fn)(ui, &command_helper) 2895 } 2896 2897 #[must_use] 2898 #[instrument(skip(self))] 2899 pub fn run(mut self) -> ExitCode { 2900 let builder = config::Config::builder().add_source(crate::config::default_config()); 2901 let config = self 2902 .extra_configs 2903 .drain(..) 2904 .fold(builder, |builder, config| builder.add_source(config)) 2905 .build() 2906 .unwrap(); 2907 let layered_configs = LayeredConfigs::from_environment(config); 2908 let mut ui = Ui::with_config(&layered_configs.merge()) 2909 .expect("default config should be valid, env vars are stringly typed"); 2910 let result = self.run_internal(&mut ui, layered_configs); 2911 let exit_code = handle_command_result(&mut ui, result); 2912 ui.finalize_pager(); 2913 exit_code 2914 } 2915} 2916 2917fn map_clap_cli_error( 2918 mut cmd_err: CommandError, 2919 ui: &Ui, 2920 layered_configs: &LayeredConfigs, 2921) -> CommandError { 2922 let Some(err) = cmd_err.error.downcast_ref::<clap::Error>() else { 2923 return cmd_err; 2924 }; 2925 if let (Some(ContextValue::String(arg)), Some(ContextValue::String(value))) = ( 2926 err.get(ContextKind::InvalidArg), 2927 err.get(ContextKind::InvalidValue), 2928 ) { 2929 if arg.as_str() == "--template <TEMPLATE>" && value.is_empty() { 2930 // Suppress the error, it's less important than the original error. 2931 if let Ok(template_aliases) = load_template_aliases(ui, layered_configs) { 2932 cmd_err.add_hint(format_template_aliases_hint(&template_aliases)); 2933 } 2934 } 2935 } 2936 cmd_err 2937} 2938 2939fn format_template_aliases_hint(template_aliases: &TemplateAliasesMap) -> String { 2940 let mut hint = String::from("The following template aliases are defined:\n"); 2941 hint.push_str( 2942 &template_aliases 2943 .symbol_names() 2944 .sorted_unstable() 2945 .map(|name| format!("- {name}")) 2946 .join("\n"), 2947 ); 2948 hint 2949}