1{lib, stdenv, which, coreutils, perl, fetchurl, makeWrapper, diffutils , writeScriptBin, bzip2}:
2
3# quick usage:
4# storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination
5# Its slow the first time because it compresses all files bigger than 1k (default setting)
6# The backup tool is bookkeeping which files got compressed
7
8# btrfs warning: you may run out of hardlinks soon
9
10# known impurity: test cases seem to bu using /tmp/storeBackup.lock ..
11
12let dummyMount = writeScriptBin "mount" "#!${stdenv.shell}";
13in
14
15stdenv.mkDerivation rec {
16
17 version = "3.5";
18
19 pname = "store-backup";
20
21 enableParallelBuilding = true;
22
23 nativeBuildInputs = [ makeWrapper ];
24 buildInputs = [ perl ];
25
26 src = fetchurl {
27 url = "https://download.savannah.gnu.org/releases/storebackup/storeBackup-${version}.tar.bz2";
28 sha256 = "0y4gzssc93x6y93mjsxm5b5cdh68d7ffa43jf6np7s7c99xxxz78";
29 };
30
31 installPhase = ''
32 mkdir -p $out/scripts
33 mv * $out
34 mv $out/_ATTENTION_ $out/doc
35 mv $out/{correct.sh,cron-storebackup} $out/scripts
36
37 find $out -name "*.pl" | xargs sed -i \
38 -e 's@/bin/pwd@${coreutils}/bin/pwd@' \
39 -e 's@/bin/sync@${coreutils}/bin/sync@' \
40 -e '1 s@/usr/bin/env perl@${perl.withPackages (p: [ p.DBFile ])}/bin/perl@'
41
42 for p in $out/bin/*
43 do wrapProgram "$p" --prefix PATH ":" "${lib.makeBinPath [ which bzip2 ]}"
44 done
45
46 patchShebangs $out
47 # do a dummy test ensuring this works
48
49 PATH=$PATH:${dummyMount}/bin
50
51
52 { # simple sanity test, test backup/restore of simple store paths
53
54 mkdir backup
55
56 backupRestore(){
57 source="$2"
58 echo =========
59 echo RUNNING TEST "$1" source: "$source"
60 mkdir restored
61
62 $out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup
63 latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1)
64 $out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r /
65 ${diffutils}/bin/diff -r "$source" restored
66
67 # storeBackupCheckSource should return 0
68 $out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup"
69 # storeBackupCheckSource should return not 0 when using different source
70 ! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup"
71
72 # storeBackupCheckBackup should return 0
73 $out/bin/storeBackupCheckBackup.pl -c "$latestBackup"
74
75 chmod -R +w restored
76 rm -fr restored
77 }
78
79 testDir=$TMP/testDir
80
81 mkdir $testDir
82 echo X > $testDir/X
83 ln -s ./X $testDir/Y
84
85 backupRestore 'test 1: backup, restore' $testDir
86
87 # test huge blocks, according to docs files bigger than 100MB get split
88 # into pieces
89 dd if=/dev/urandom bs=100M of=block-1 count=1
90 dd if=/dev/urandom bs=100M of=block-2 count=1
91 cat block-1 block-2 > $testDir/block
92 backupRestore 'test 1 with huge block' $testDir
93
94 cat block-2 block-1 > $testDir/block
95 backupRestore 'test 1 with huge block reversed' $testDir
96
97 backupRestore 'test 2: backup, restore' $out
98 backupRestore 'test 3: backup, restore' $out
99 backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
100 }
101 '';
102
103 meta = {
104 description = "A backup suite that stores files on other disks";
105 homepage = "https://savannah.nongnu.org/projects/storebackup";
106 license = lib.licenses.gpl3Plus;
107 maintainers = [lib.maintainers.marcweber];
108 platforms = lib.platforms.linux;
109 };
110}