nixpkgs mirror (for testing)
github.com/NixOS/nixpkgs
nix
1{
2 lib,
3 stdenv,
4 which,
5 coreutils,
6 perl,
7 fetchurl,
8 makeWrapper,
9 diffutils,
10 writeScriptBin,
11 bzip2,
12}:
13
14# quick usage:
15# storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination
16# Its slow the first time because it compresses all files bigger than 1k (default setting)
17# The backup tool is bookkeeping which files got compressed
18
19# btrfs warning: you may run out of hardlinks soon
20
21# known impurity: test cases seem to bu using /tmp/storeBackup.lock ..
22
23let
24 dummyMount = writeScriptBin "mount" "#!${stdenv.shell}";
25in
26
27stdenv.mkDerivation (finalAttrs: {
28
29 version = "3.5.2";
30
31 pname = "store-backup";
32
33 enableParallelBuilding = true;
34
35 nativeBuildInputs = [ makeWrapper ];
36 buildInputs = [ perl ];
37
38 src = fetchurl {
39 url = "mirror://savannah/storebackup/storeBackup-${finalAttrs.version}.tar.bz2";
40 hash = "sha256-Ki1DT2zypFFiiMVd9Y8eSX7T+yr8moWMoALmAexjqWU=";
41 };
42
43 patches = [
44 # https://www.openwall.com/lists/oss-security/2020/01/20/3
45 ./CVE-2020-7040.patch
46 ];
47
48 installPhase = ''
49 mkdir -p $out/scripts
50 mv * $out
51 mv $out/_ATTENTION_ $out/doc
52 mv $out/{correct.sh,cron-storebackup} $out/scripts
53
54 find $out -name "*.pl" | xargs sed -i \
55 -e 's@/bin/pwd@${coreutils}/bin/pwd@' \
56 -e 's@/bin/sync@${coreutils}/bin/sync@' \
57 -e '1 s@/usr/bin/env perl@${perl.withPackages (p: [ p.DBFile ])}/bin/perl@'
58
59 for p in $out/bin/*
60 do wrapProgram "$p" --prefix PATH ":" "${
61 lib.makeBinPath [
62 which
63 bzip2
64 ]
65 }"
66 done
67
68 patchShebangs $out
69 # do a dummy test ensuring this works
70
71 PATH=$PATH:${dummyMount}/bin
72
73 export USER=test
74 export HOME=$(mktemp -d)
75 { # simple sanity test, test backup/restore of simple store paths
76
77 mkdir backup
78
79 backupRestore(){
80 source="$2"
81 echo =========
82 echo RUNNING TEST "$1" source: "$source"
83 mkdir restored
84
85 $out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup
86 latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1)
87 $out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r /
88 ${diffutils}/bin/diff -r "$source" restored
89
90 # storeBackupCheckSource should return 0
91 $out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup"
92 # storeBackupCheckSource should return not 0 when using different source
93 ! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup"
94
95 # storeBackupCheckBackup should return 0
96 $out/bin/storeBackupCheckBackup.pl -c "$latestBackup"
97
98 chmod -R +w restored
99 rm -fr restored
100 }
101
102 testDir=$TMP/testDir
103
104 mkdir $testDir
105 echo X > $testDir/X
106 ln -s ./X $testDir/Y
107
108 backupRestore 'test 1: backup, restore' $testDir
109
110 # test huge blocks, according to docs files bigger than 100MB get split
111 # into pieces
112 dd if=/dev/urandom bs=100M of=block-1 count=1
113 dd if=/dev/urandom bs=100M of=block-2 count=1
114 cat block-1 block-2 > $testDir/block
115 backupRestore 'test 1 with huge block' $testDir
116
117 cat block-2 block-1 > $testDir/block
118 backupRestore 'test 1 with huge block reversed' $testDir
119
120 backupRestore 'test 2: backup, restore' $out
121 backupRestore 'test 3: backup, restore' $out
122 backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
123 }
124 '';
125
126 meta = {
127 description = "Backup suite that stores files on other disks";
128 homepage = "https://savannah.nongnu.org/projects/storebackup";
129 license = lib.licenses.gpl3Plus;
130 maintainers = [ lib.maintainers.marcweber ];
131 platforms = lib.platforms.linux;
132 };
133})