15 # storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination
16 # Its slow the first time because it compresses all files bigger than 1k (default setting)
17 # The backup tool is bookkeeping which files got compressed
19 # btrfs warning: you may run out of hardlinks soon
21 # known impurity: test cases seem to bu using /tmp/storeBackup.lock ..
24 dummyMount = writeScriptBin "mount" "#!${stdenv.shell}";
27 stdenv.mkDerivation rec {
31 pname = "store-backup";
33 enableParallelBuilding = true;
35 nativeBuildInputs = [ makeWrapper ];
36 buildInputs = [ perl ];
39 url = "https://download.savannah.gnu.org/releases/storebackup/storeBackup-${version}.tar.bz2";
40 hash = "sha256-Ki1DT2zypFFiiMVd9Y8eSX7T+yr8moWMoALmAexjqWU=";
44 # https://www.openwall.com/lists/oss-security/2020/01/20/3
51 mv $out/_ATTENTION_ $out/doc
52 mv $out/{correct.sh,cron-storebackup} $out/scripts
54 find $out -name "*.pl" | xargs sed -i \
55 -e 's@/bin/pwd@${coreutils}/bin/pwd@' \
56 -e 's@/bin/sync@${coreutils}/bin/sync@' \
57 -e '1 s@/usr/bin/env perl@${perl.withPackages (p: [ p.DBFile ])}/bin/perl@'
60 do wrapProgram "$p" --prefix PATH ":" "${
69 # do a dummy test ensuring this works
71 PATH=$PATH:${dummyMount}/bin
74 export HOME=$(mktemp -d)
75 { # simple sanity test, test backup/restore of simple store paths
82 echo RUNNING TEST "$1" source: "$source"
85 $out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup
86 latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1)
87 $out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r /
88 ${diffutils}/bin/diff -r "$source" restored
90 # storeBackupCheckSource should return 0
91 $out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup"
92 # storeBackupCheckSource should return not 0 when using different source
93 ! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup"
95 # storeBackupCheckBackup should return 0
96 $out/bin/storeBackupCheckBackup.pl -c "$latestBackup"
108 backupRestore 'test 1: backup, restore' $testDir
110 # test huge blocks, according to docs files bigger than 100MB get split
112 dd if=/dev/urandom bs=100M of=block-1 count=1
113 dd if=/dev/urandom bs=100M of=block-2 count=1
114 cat block-1 block-2 > $testDir/block
115 backupRestore 'test 1 with huge block' $testDir
117 cat block-2 block-1 > $testDir/block
118 backupRestore 'test 1 with huge block reversed' $testDir
120 backupRestore 'test 2: backup, restore' $out
121 backupRestore 'test 3: backup, restore' $out
122 backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
127 description = "Backup suite that stores files on other disks";
128 homepage = "https://savannah.nongnu.org/projects/storebackup";
129 license = lib.licenses.gpl3Plus;
130 maintainers = [ lib.maintainers.marcweber ];
131 platforms = lib.platforms.linux;