just playing with tangled
1// Copyright 2020 The Jujutsu Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use std::collections::HashMap;
16use std::env;
17use std::fs;
18use std::fs::OpenOptions;
19use std::io::Read as _;
20use std::io::Write as _;
21use std::path::Path;
22use std::path::PathBuf;
23use std::sync::Arc;
24
25use itertools::Itertools as _;
26use jj_lib::backend;
27use jj_lib::backend::Backend;
28use jj_lib::backend::BackendInitError;
29use jj_lib::backend::ChangeId;
30use jj_lib::backend::CommitId;
31use jj_lib::backend::FileId;
32use jj_lib::backend::MergedTreeId;
33use jj_lib::backend::MillisSinceEpoch;
34use jj_lib::backend::Signature;
35use jj_lib::backend::Timestamp;
36use jj_lib::backend::TreeValue;
37use jj_lib::commit::Commit;
38use jj_lib::commit_builder::CommitBuilder;
39use jj_lib::config::ConfigLayer;
40use jj_lib::config::ConfigSource;
41use jj_lib::config::StackedConfig;
42use jj_lib::git_backend::GitBackend;
43use jj_lib::merged_tree::MergedTree;
44use jj_lib::object_id::ObjectId as _;
45use jj_lib::repo::MutableRepo;
46use jj_lib::repo::ReadonlyRepo;
47use jj_lib::repo::Repo;
48use jj_lib::repo::RepoLoader;
49use jj_lib::repo::StoreFactories;
50use jj_lib::repo_path::RepoPath;
51use jj_lib::repo_path::RepoPathBuf;
52use jj_lib::rewrite::RebaseOptions;
53use jj_lib::rewrite::RebasedCommit;
54use jj_lib::secret_backend::SecretBackend;
55use jj_lib::settings::UserSettings;
56use jj_lib::signing::Signer;
57use jj_lib::simple_backend::SimpleBackend;
58use jj_lib::store::Store;
59use jj_lib::transaction::Transaction;
60use jj_lib::tree::Tree;
61use jj_lib::tree_builder::TreeBuilder;
62use jj_lib::working_copy::SnapshotError;
63use jj_lib::working_copy::SnapshotOptions;
64use jj_lib::working_copy::SnapshotStats;
65use jj_lib::workspace::Workspace;
66use pollster::FutureExt as _;
67use tempfile::TempDir;
68
69use crate::test_backend::TestBackendFactory;
70
71pub mod git;
72pub mod test_backend;
73
74// TODO: Consider figuring out a way to make `GitBackend` and `git(1)` calls in
75// tests ignore external configuration and removing this function. This is
76// somewhat tricky because `gix` looks at system and user configuration, and
77// `GitBackend` also calls into `git(1)` for things like garbage collection.
78pub fn hermetic_git() {
79 #[cfg(feature = "git2")]
80 {
81 // libgit2 respects init.defaultBranch (and possibly other config
82 // variables) in the user's config files. Disable access to them to make
83 // our tests hermetic.
84 //
85 // set_search_path is unsafe because it cannot guarantee thread safety (as
86 // its documentation states). For the same reason, we wrap these invocations
87 // in `call_once`.
88 use std::sync::Once;
89 static CONFIGURE_GIT2: Once = Once::new();
90 CONFIGURE_GIT2.call_once(|| unsafe {
91 git2::opts::set_search_path(git2::ConfigLevel::System, "").unwrap();
92 git2::opts::set_search_path(git2::ConfigLevel::Global, "").unwrap();
93 git2::opts::set_search_path(git2::ConfigLevel::XDG, "").unwrap();
94 git2::opts::set_search_path(git2::ConfigLevel::ProgramData, "").unwrap();
95 });
96 }
97
98 // Prevent GitBackend from loading user and system configurations. For
99 // gitoxide API use in tests, Config::isolated() is probably better.
100 env::set_var("GIT_CONFIG_SYSTEM", "/dev/null");
101 env::set_var("GIT_CONFIG_GLOBAL", "/dev/null");
102 // gitoxide uses "main" as the default branch name, whereas git and libgit2
103 // uses "master".
104 env::set_var("GIT_CONFIG_KEY_0", "init.defaultBranch");
105 env::set_var("GIT_CONFIG_VALUE_0", "master");
106 env::set_var("GIT_CONFIG_COUNT", "1");
107}
108
109pub fn new_temp_dir() -> TempDir {
110 hermetic_git();
111 tempfile::Builder::new()
112 .prefix("jj-test-")
113 .tempdir()
114 .unwrap()
115}
116
117/// Returns new low-level config object that includes fake user configuration
118/// needed to run basic operations.
119pub fn base_user_config() -> StackedConfig {
120 let config_text = r#"
121 user.name = "Test User"
122 user.email = "test.user@example.com"
123 operation.username = "test-username"
124 operation.hostname = "host.example.com"
125 debug.randomness-seed = 42
126 "#;
127 let mut config = StackedConfig::with_defaults();
128 config.add_layer(ConfigLayer::parse(ConfigSource::User, config_text).unwrap());
129 config
130}
131
132/// Returns new immutable settings object that includes fake user configuration
133/// needed to run basic operations.
134pub fn user_settings() -> UserSettings {
135 UserSettings::from_config(base_user_config()).unwrap()
136}
137
138/// Panic if `CI` environment variable is set to a non-empty value
139///
140/// Most CI environments set this variable automatically. See e.g.
141/// <https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables>
142#[track_caller]
143pub fn ensure_running_outside_ci(reason: &str) {
144 let running_in_ci = std::env::var("CI").is_ok_and(|value| !value.is_empty());
145 assert!(!running_in_ci, "Running in CI, {reason}.");
146}
147
148#[derive(Debug)]
149pub struct TestEnvironment {
150 temp_dir: TempDir,
151 test_backend_factory: TestBackendFactory,
152}
153
154impl TestEnvironment {
155 pub fn init() -> Self {
156 TestEnvironment {
157 temp_dir: new_temp_dir(),
158 test_backend_factory: TestBackendFactory::default(),
159 }
160 }
161
162 pub fn root(&self) -> &Path {
163 self.temp_dir.path()
164 }
165
166 pub fn default_store_factories(&self) -> StoreFactories {
167 let mut factories = StoreFactories::default();
168 factories.add_backend("test", {
169 let factory = self.test_backend_factory.clone();
170 Box::new(move |_settings, store_path| Ok(Box::new(factory.load(store_path))))
171 });
172 factories.add_backend(
173 SecretBackend::name(),
174 Box::new(|settings, store_path| {
175 Ok(Box::new(SecretBackend::load(settings, store_path)?))
176 }),
177 );
178 factories
179 }
180
181 pub fn load_repo_at_head(
182 &self,
183 settings: &UserSettings,
184 repo_path: &Path,
185 ) -> Arc<ReadonlyRepo> {
186 RepoLoader::init_from_file_system(settings, repo_path, &self.default_store_factories())
187 .unwrap()
188 .load_at_head()
189 .unwrap()
190 }
191}
192
193pub struct TestRepo {
194 pub env: TestEnvironment,
195 pub repo: Arc<ReadonlyRepo>,
196 repo_path: PathBuf,
197}
198
199#[derive(PartialEq, Eq, Copy, Clone)]
200pub enum TestRepoBackend {
201 Git,
202 Simple,
203 Test,
204}
205
206impl TestRepoBackend {
207 fn init_backend(
208 &self,
209 env: &TestEnvironment,
210 settings: &UserSettings,
211 store_path: &Path,
212 ) -> Result<Box<dyn Backend>, BackendInitError> {
213 match self {
214 TestRepoBackend::Git => Ok(Box::new(GitBackend::init_internal(settings, store_path)?)),
215 TestRepoBackend::Simple => Ok(Box::new(SimpleBackend::init(store_path))),
216 TestRepoBackend::Test => Ok(Box::new(env.test_backend_factory.init(store_path))),
217 }
218 }
219}
220
221impl TestRepo {
222 pub fn init() -> Self {
223 Self::init_with_backend(TestRepoBackend::Test)
224 }
225
226 pub fn init_with_backend(backend: TestRepoBackend) -> Self {
227 Self::init_with_backend_and_settings(backend, &user_settings())
228 }
229
230 pub fn init_with_settings(settings: &UserSettings) -> Self {
231 Self::init_with_backend_and_settings(TestRepoBackend::Test, settings)
232 }
233
234 pub fn init_with_backend_and_settings(
235 backend: TestRepoBackend,
236 settings: &UserSettings,
237 ) -> Self {
238 let env = TestEnvironment::init();
239
240 let repo_dir = env.root().join("repo");
241 fs::create_dir(&repo_dir).unwrap();
242
243 let repo = ReadonlyRepo::init(
244 settings,
245 &repo_dir,
246 &|settings, store_path| backend.init_backend(&env, settings, store_path),
247 Signer::from_settings(settings).unwrap(),
248 ReadonlyRepo::default_op_store_initializer(),
249 ReadonlyRepo::default_op_heads_store_initializer(),
250 ReadonlyRepo::default_index_store_initializer(),
251 ReadonlyRepo::default_submodule_store_initializer(),
252 )
253 .unwrap();
254
255 Self {
256 env,
257 repo,
258 repo_path: repo_dir,
259 }
260 }
261
262 pub fn repo_path(&self) -> &Path {
263 &self.repo_path
264 }
265}
266
267pub struct TestWorkspace {
268 pub env: TestEnvironment,
269 pub workspace: Workspace,
270 pub repo: Arc<ReadonlyRepo>,
271}
272
273impl TestWorkspace {
274 pub fn init() -> Self {
275 Self::init_with_backend(TestRepoBackend::Test)
276 }
277
278 pub fn init_with_backend(backend: TestRepoBackend) -> Self {
279 Self::init_with_backend_and_settings(backend, &user_settings())
280 }
281
282 pub fn init_with_settings(settings: &UserSettings) -> Self {
283 Self::init_with_backend_and_settings(TestRepoBackend::Test, settings)
284 }
285
286 pub fn init_with_backend_and_settings(
287 backend: TestRepoBackend,
288 settings: &UserSettings,
289 ) -> Self {
290 let signer = Signer::from_settings(settings).unwrap();
291 Self::init_with_backend_and_signer(backend, signer, settings)
292 }
293
294 pub fn init_with_backend_and_signer(
295 backend: TestRepoBackend,
296 signer: Signer,
297 settings: &UserSettings,
298 ) -> Self {
299 let env = TestEnvironment::init();
300
301 let workspace_root = env.root().join("repo");
302 fs::create_dir(&workspace_root).unwrap();
303
304 let (workspace, repo) = Workspace::init_with_backend(
305 settings,
306 &workspace_root,
307 &|settings, store_path| backend.init_backend(&env, settings, store_path),
308 signer,
309 )
310 .unwrap();
311
312 Self {
313 env,
314 workspace,
315 repo,
316 }
317 }
318
319 pub fn root_dir(&self) -> PathBuf {
320 self.env.root().join("repo").join("..")
321 }
322
323 pub fn repo_path(&self) -> &Path {
324 self.workspace.repo_path()
325 }
326
327 /// Snapshots the working copy and returns the tree. Updates the working
328 /// copy state on disk, but does not update the working-copy commit (no
329 /// new operation).
330 pub fn snapshot_with_options(
331 &mut self,
332 options: &SnapshotOptions,
333 ) -> Result<(MergedTree, SnapshotStats), SnapshotError> {
334 let mut locked_ws = self.workspace.start_working_copy_mutation().unwrap();
335 let (tree_id, stats) = locked_ws.locked_wc().snapshot(options)?;
336 // arbitrary operation id
337 locked_ws.finish(self.repo.op_id().clone()).unwrap();
338 Ok((self.repo.store().get_root_tree(&tree_id).unwrap(), stats))
339 }
340
341 /// Like `snapshot_with_option()` but with default options
342 pub fn snapshot(&mut self) -> Result<MergedTree, SnapshotError> {
343 let (tree_id, _stats) = self.snapshot_with_options(&SnapshotOptions::empty_for_test())?;
344 Ok(tree_id)
345 }
346}
347
348pub fn commit_transactions(txs: Vec<Transaction>) -> Arc<ReadonlyRepo> {
349 let repo_loader = txs[0].base_repo().loader().clone();
350 let mut op_ids = vec![];
351 for tx in txs {
352 op_ids.push(tx.commit("test").unwrap().op_id().clone());
353 std::thread::sleep(std::time::Duration::from_millis(1));
354 }
355 let repo = repo_loader.load_at_head().unwrap();
356 // Test the setup. The assumption here is that the parent order matches the
357 // order in which they were merged (which currently matches the transaction
358 // commit order), so we want to know make sure they appear in a certain
359 // order, so the caller can decide the order by passing them to this
360 // function in a certain order.
361 assert_eq!(*repo.operation().parent_ids(), op_ids);
362 repo
363}
364
365pub fn read_file(store: &Store, path: &RepoPath, id: &FileId) -> Vec<u8> {
366 let mut reader = store.read_file(path, id).unwrap();
367 let mut content = vec![];
368 reader.read_to_end(&mut content).unwrap();
369 content
370}
371
372pub fn write_file(store: &Store, path: &RepoPath, contents: &str) -> FileId {
373 store
374 .write_file(path, &mut contents.as_bytes())
375 .block_on()
376 .unwrap()
377}
378
379pub fn write_normal_file(
380 tree_builder: &mut TreeBuilder,
381 path: &RepoPath,
382 contents: &str,
383) -> FileId {
384 let id = write_file(tree_builder.store(), path, contents);
385 tree_builder.set(
386 path.to_owned(),
387 TreeValue::File {
388 id: id.clone(),
389 executable: false,
390 },
391 );
392 id
393}
394
395pub fn write_executable_file(tree_builder: &mut TreeBuilder, path: &RepoPath, contents: &str) {
396 let id = write_file(tree_builder.store(), path, contents);
397 tree_builder.set(
398 path.to_owned(),
399 TreeValue::File {
400 id,
401 executable: true,
402 },
403 );
404}
405
406pub fn write_symlink(tree_builder: &mut TreeBuilder, path: &RepoPath, target: &str) {
407 let id = tree_builder
408 .store()
409 .write_symlink(path, target)
410 .block_on()
411 .unwrap();
412 tree_builder.set(path.to_owned(), TreeValue::Symlink(id));
413}
414
415pub fn create_single_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]) -> Tree {
416 let store = repo.store();
417 let mut tree_builder = store.tree_builder(store.empty_tree_id().clone());
418 for (path, contents) in path_contents {
419 write_normal_file(&mut tree_builder, path, contents);
420 }
421 let id = tree_builder.write_tree().unwrap();
422 store.get_tree(RepoPathBuf::root(), &id).unwrap()
423}
424
425pub fn create_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]) -> MergedTree {
426 MergedTree::resolved(create_single_tree(repo, path_contents))
427}
428
429#[must_use]
430pub fn create_random_tree(repo: &Arc<ReadonlyRepo>) -> MergedTreeId {
431 let number = rand::random::<u32>();
432 let path = RepoPathBuf::from_internal_string(format!("file{number}"));
433 create_tree(repo, &[(&path, "contents")]).id()
434}
435
436pub fn create_random_commit(mut_repo: &mut MutableRepo) -> CommitBuilder<'_> {
437 let tree_id = create_random_tree(mut_repo.base_repo());
438 let number = rand::random::<u32>();
439 mut_repo
440 .new_commit(vec![mut_repo.store().root_commit_id().clone()], tree_id)
441 .set_description(format!("random commit {number}"))
442}
443
444pub fn commit_with_tree(store: &Arc<Store>, tree_id: MergedTreeId) -> Commit {
445 let signature = Signature {
446 name: "Some One".to_string(),
447 email: "someone@example.com".to_string(),
448 timestamp: Timestamp {
449 timestamp: MillisSinceEpoch(0),
450 tz_offset: 0,
451 },
452 };
453 let commit = backend::Commit {
454 parents: vec![store.root_commit_id().clone()],
455 predecessors: vec![],
456 root_tree: tree_id,
457 change_id: ChangeId::from_hex("abcd"),
458 description: "description".to_string(),
459 author: signature.clone(),
460 committer: signature,
461 secure_sig: None,
462 };
463 store.write_commit(commit, None).block_on().unwrap()
464}
465
466pub fn dump_tree(store: &Arc<Store>, tree_id: &MergedTreeId) -> String {
467 use std::fmt::Write as _;
468 let mut buf = String::new();
469 writeln!(
470 &mut buf,
471 "tree {}",
472 tree_id
473 .to_merge()
474 .iter()
475 .map(|tree_id| tree_id.hex())
476 .join("&")
477 )
478 .unwrap();
479 let tree = store.get_root_tree(tree_id).unwrap();
480 for (path, result) in tree.entries() {
481 match result.unwrap().into_resolved() {
482 Ok(Some(TreeValue::File { id, executable: _ })) => {
483 let file_buf = read_file(store, &path, &id);
484 let file_contents = String::from_utf8_lossy(&file_buf);
485 writeln!(&mut buf, " file {path:?} ({id}): {file_contents:?}").unwrap();
486 }
487 Ok(Some(TreeValue::Symlink(id))) => {
488 writeln!(&mut buf, " symlink {path:?} ({id})").unwrap();
489 }
490 Ok(Some(TreeValue::GitSubmodule(id))) => {
491 writeln!(&mut buf, " submodule {path:?} ({id})").unwrap();
492 }
493 entry => {
494 unimplemented!("dumping tree entry {entry:?}");
495 }
496 }
497 }
498 buf
499}
500
501pub fn write_random_commit(mut_repo: &mut MutableRepo) -> Commit {
502 create_random_commit(mut_repo).write().unwrap()
503}
504
505pub fn write_working_copy_file(workspace_root: &Path, path: &RepoPath, contents: &str) {
506 let path = path.to_fs_path(workspace_root).unwrap();
507 if let Some(parent) = path.parent() {
508 fs::create_dir_all(parent).unwrap();
509 }
510 let mut file = OpenOptions::new()
511 .write(true)
512 .create(true)
513 .truncate(true)
514 .open(path)
515 .unwrap();
516 file.write_all(contents.as_bytes()).unwrap();
517}
518
519pub struct CommitGraphBuilder<'repo> {
520 mut_repo: &'repo mut MutableRepo,
521}
522
523impl<'repo> CommitGraphBuilder<'repo> {
524 pub fn new(mut_repo: &'repo mut MutableRepo) -> Self {
525 CommitGraphBuilder { mut_repo }
526 }
527
528 pub fn initial_commit(&mut self) -> Commit {
529 write_random_commit(self.mut_repo)
530 }
531
532 pub fn commit_with_parents(&mut self, parents: &[&Commit]) -> Commit {
533 let parent_ids = parents
534 .iter()
535 .map(|commit| commit.id().clone())
536 .collect_vec();
537 create_random_commit(self.mut_repo)
538 .set_parents(parent_ids)
539 .write()
540 .unwrap()
541 }
542}
543
544/// Rebase descendants of the rewritten commits. Returns map of original commit
545/// ID to rebased (or abandoned parent) commit ID.
546pub fn rebase_descendants_with_options_return_map(
547 repo: &mut MutableRepo,
548 options: &RebaseOptions,
549) -> HashMap<CommitId, CommitId> {
550 let mut rebased: HashMap<CommitId, CommitId> = HashMap::new();
551 repo.rebase_descendants_with_options(options, |old_commit, rebased_commit| {
552 let old_commit_id = old_commit.id().clone();
553 let new_commit_id = match rebased_commit {
554 RebasedCommit::Rewritten(new_commit) => new_commit.id().clone(),
555 RebasedCommit::Abandoned { parent_id } => parent_id,
556 };
557 rebased.insert(old_commit_id, new_commit_id);
558 })
559 .unwrap();
560 rebased
561}
562
563fn assert_in_rebased_map(
564 repo: &impl Repo,
565 rebased: &HashMap<CommitId, CommitId>,
566 expected_old_commit: &Commit,
567) -> Commit {
568 let new_commit_id = rebased.get(expected_old_commit.id()).unwrap_or_else(|| {
569 panic!(
570 "Expected commit to have been rebased: {}",
571 expected_old_commit.id().hex()
572 )
573 });
574 let new_commit = repo.store().get_commit(new_commit_id).unwrap().clone();
575 new_commit
576}
577
578pub fn assert_rebased_onto(
579 repo: &impl Repo,
580 rebased: &HashMap<CommitId, CommitId>,
581 expected_old_commit: &Commit,
582 expected_new_parent_ids: &[&CommitId],
583) -> Commit {
584 let new_commit = assert_in_rebased_map(repo, rebased, expected_old_commit);
585 assert_eq!(
586 new_commit.parent_ids().to_vec(),
587 expected_new_parent_ids
588 .iter()
589 .map(|x| (*x).clone())
590 .collect_vec()
591 );
592 assert_eq!(new_commit.change_id(), expected_old_commit.change_id());
593 new_commit
594}
595
596/// Maps children of an abandoned commit to a new rebase target.
597///
598/// If `expected_old_commit` was abandoned, the `rebased` map indicates the
599/// commit the children of `expected_old_commit` should be rebased to, which
600/// would have a different change id. This happens when the EmptyBehavior in
601/// RebaseOptions is not the default; because of the details of the
602/// implementation this returned parent commit is always singular.
603pub fn assert_abandoned_with_parent(
604 repo: &impl Repo,
605 rebased: &HashMap<CommitId, CommitId>,
606 expected_old_commit: &Commit,
607 expected_new_parent_id: &CommitId,
608) -> Commit {
609 let new_parent_commit = assert_in_rebased_map(repo, rebased, expected_old_commit);
610 assert_eq!(new_parent_commit.id(), expected_new_parent_id);
611 assert_ne!(
612 new_parent_commit.change_id(),
613 expected_old_commit.change_id()
614 );
615 new_parent_commit
616}
617
618pub fn assert_no_forgotten_test_files(test_dir: &Path) {
619 let runner_path = test_dir.join("runner.rs");
620 let runner = fs::read_to_string(&runner_path).unwrap();
621 let entries = fs::read_dir(test_dir).unwrap();
622 for entry in entries {
623 let path = entry.unwrap().path();
624 if let Some(ext) = path.extension() {
625 let name = path.file_stem().unwrap();
626 if ext == "rs" && name != "runner" {
627 let search = format!("mod {};", name.to_str().unwrap());
628 assert!(
629 runner.contains(&search),
630 "missing `{search}` declaration in {}",
631 runner_path.display()
632 );
633 }
634 }
635 }
636}