# HG changeset patch # User Heiko Schlittermann (ZARAFA.ctq.de) hs@schlittermann.de # Date 1303943118 -7200 # Node ID 40533435cfd305d620ab5789abd3aacc6b266733 # Parent ce823daf21412238f8cf4d890e7f94a31d4883a7 cleaning old dumps (based on the number of dumps) seems to work diff -r ce823daf2141 -r 40533435cfd3 bin/ftbackup --- a/bin/ftbackup Wed Apr 27 17:01:00 2011 +0200 +++ b/bin/ftbackup Thu Apr 28 00:25:18 2011 +0200 @@ -182,12 +182,6 @@ } } - # now check, which of the old backups can be purged - # The config KEEP tells us how many full dumps we need to - # keep. The pre-dump cleaning should keep this number - # and after successfull dump we need to cleanup again - #$last[0] = [ sort { $a->{stamp} <=> $b->{stamp} } @{$last[0]} ]; - # for safety we check if there is really a full dump not older than xxx days if ($dev->{level} > 0) { if (!@last) { @@ -494,10 +488,22 @@ return @devs; } +sub human_number($) { + my $n = shift; + my @units = ("", qw(K M G T)); + while (length($n) > 3) { + $n = int($n / 1024); + shift @units; + } + return "$n $units[0]"; +} + + sub get_estimate($$) { my ($dev, $level) = @_; - warn "% estimating $dev->{rdev} at level $level\n"; + print STDERR "% estimating $dev->{rdev} at level $level: "; chomp(my $_ = `dump -S -$level $dev->{rdev}`); + print STDERR human_number($_) . "Byte\n"; return $_; } @@ -552,10 +558,14 @@ # ones. # if we found level 0 dumps, we remove all level 1+ dumps older than # the oldest level 0 dump we'll remove - @{$dumps[0]} = reverse sort { $a->{stamp} <=> $b->{stamp} } @{$dumps[0]}; - my @unlink = @{$dumps[0]}[$keep..$#{$dumps[0]}]; - push @unlink => grep { $_->{stamp} <= $unlink[0]->{stamp} } @{@dumps[1..$#dumps]} - if @unlink; + @{$dumps[0]} = sort { $a->{stamp} <=> $b->{stamp} } @{$dumps[0]}; + my @unlink = splice(@{$dumps[0]}, -1, @{$dumps[0]} - $keep); + + if ($dumps[1]) { + if (!@{$dumps[0]}) { push @unlink, @{@dumps[1..$#dumps]} } + else { push @unlink => grep { $_->{stamp} <= $dumps[0][0]{stamp} } @{@dumps[1..$#dumps]} } + } + ### @unlink foreach (@unlink) {