--- trunk/bin/BackupPC_incPartsUpdate 2005/10/13 17:11:59 194 +++ trunk/bin/BackupPC_incPartsUpdate 2005/10/24 16:41:13 228 @@ -6,11 +6,19 @@ use DBI; use BackupPC::Lib; use BackupPC::View; +use BackupPC::Attrib qw/:all/; use Data::Dumper; use Time::HiRes qw/time/; use POSIX qw/strftime/; use BackupPC::SearchLib; use Cwd qw/abs_path/; +use File::Which; +use Archive::Tar::Streamed; +use Algorithm::Diff; +use Getopt::Std; + +# cludge: minimum .tar.gz size +my $MIN_TAR_SIZE = 80; my $path = abs_path($0); $path =~ s#/[^/]+$#/#; @@ -18,7 +26,17 @@ die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate); -my $debug = 0; +my $bin; +foreach my $c (qw/gzip split/) { + $bin->{$c} = which($c) || die "$0 needs $c, install it\n"; +} + +my %opt; +getopts("cd", \%opt ); + +my $debug = $opt{d}; +my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n"; + $|=1; my $start_t = time(); @@ -55,6 +73,88 @@ return strftime($t_fmt,localtime()); } +sub tar_join($) { + my $filename = shift; + + my $in = my $out = $filename; + $out .= '.tmp'; + + # FIXME I should really order parts manually! + system("cat $in/part* > $out && rm -Rf $in && mv $out $in") == 0 or die "can't join $in: $?"; + +} + +sub tar_check($$$$) { + my ($host,$share,$num,$filename) = @_; + + if ($debug) { + print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename"; + } else { + print " check"; + } + + if (-d $filename) { + print STDERR ", joining"; + tar_join($filename); + } + + print STDERR ", opening" if ($debug); + open(my $fh, "gzip -cd $filename |") or die "can't open $filename: $!"; + binmode($fh); + my $tar = Archive::Tar::Streamed->new($fh); + + print STDERR ", tar" if ($debug); + my @tar_files; + while(my $entry = $tar->next) { + push @tar_files, $entry->name; + } + @tar_files = sort @tar_files; + print STDERR " ",($#tar_files + 1), " files" if ($debug); + + print STDERR ", database" if ($debug); + + my $sth = $dbh->prepare(qq{ + SELECT path,type + FROM files + JOIN shares on shares.id = shareid + JOIN hosts on hosts.id = shares.hostid + WHERE hosts.name = ? and shares.name = ? and backupnum = ? + }); + $sth->execute($host, $share, $num); + my @db_files; + while( my $row = $sth->fetchrow_hashref ) { + + my $path = $row->{'path'} || die "no path?"; + $path =~ s#^/#./#; + $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR); + push @db_files, $path; + } + + print STDERR " ",($#db_files + 1), " files, diff" if ($debug); + + @db_files = sort @db_files; + + my $same = 1; + if ($#tar_files != $#db_files) { + $same = 0; + print STDERR " NUMBER" if ($debug); + } else { + my $diff = Algorithm::Diff->new(\@tar_files, \@db_files); + while ( $diff->Next() ) { + next if $diff->Same(); + $same = 0; + print "< $_\n" for $diff->Items(1); + print "> $_\n" for $diff->Items(2); + } + } + + print " ",($same ? 'ok' : 'DIFFERENT'); + print STDERR " }} " if ($debug); + + return $same; +} + + #----- main my $sth = $dbh->prepare( qq{ @@ -64,7 +164,8 @@ hosts.name as host, shares.name as share, backups.num as num, - inc_size + inc_size, + parts from backups join shares on backups.hostid = shares.hostid and shares.id = backups.shareid @@ -74,48 +175,73 @@ } ); -$sth->execute(); - -my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ? where id = ? }); +my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ?, parts = ? where id = ? }); my $sth_inc_deleted = $dbh->prepare(qq{ update backups set inc_deleted = ? where id = ? }); %BackupPC::SearchLib::Conf = %Conf; +$sth->execute(); +my $num_backups = $sth->rows; +my $curr_backup = 1; + while (my $row = $sth->fetchrow_hashref) { my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); # this will return -1 if file doesn't exist my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file); - print curr_time, " ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; + print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; + $curr_backup++; my $t = time(); # re-create archive? - if ($row->{'inc_size'} == -1 || $size == -1 || $row->{'inc_size'} != $size) { - my $cmd = qq{$tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | gzip -9 > $tar_dir/$tar_file}; + if ($row->{'inc_size'} == -1 || $size == -1 || + $row->{'inc_size'} != $size || + $check && ! tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, "$tar_dir/$tar_file") + ) { + my $cmd = qq{rm -Rf $tar_dir/$tar_file && $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | $bin->{'gzip'} $Conf{GzipLevel} > ${tar_dir}/${tar_file}.tmp}; print STDERR "## $cmd\n" if ($debug); system($cmd) == 0 or die "failed: $?"; - + + rename("${tar_dir}/${tar_file}.tmp", "$tar_dir/$tar_file") or die "can't rename $tar_dir/$tar_file: $!"; + $size = (stat( "$tar_dir/$tar_file" ))[7]; } - if ($size > 45) { + if ($size > $MIN_TAR_SIZE) { my $max_size = $Conf{'MaxArchiveSize'} || die "problem with MaxArchieSize parametar"; + $max_size *= 1024; # convert to bytes + + # maximum file size on ISO image is 4Gb + # this will require Linux kernel 2.6.8 or newer + my $max_iso_file_size = 2^32 - 2048; + if ( $max_size > $max_iso_file_size ) { + $max_size = $max_iso_file_size; + } + + my $parts = int( ($size + $max_size - 1) / $max_size ); + + if (-d "$tar_dir/$tar_file" && $parts != $row->{'parts'}) { + print " join"; + tar_join("$tar_dir/$tar_file"); + } if ($size > $max_size && ! -d "$tar_dir/$tar_file") { - print " split"; + print " split/$parts"; my $in = my $out = "$tar_dir/$tar_file"; $out .= '.tmp'; rename $in, $out || die "can't rename $in: $!"; mkdir $in || die "can't mkdir $in: $!"; - system("split -d -b $max_size $out $in/part") == 0 or die "can't split $out: $!"; + + my $suffix_len = length("$parts"); + system("$bin->{'split'} -d -b $max_size -a $suffix_len $out $in/part") == 0 or die "can't split $out: $?"; unlink $out || die "can't unlink $out: $!"; } - $sth_inc_size->execute($size, $row->{'backup_id'}); + $sth_inc_size->execute($size, $parts, $row->{'backup_id'}); $sth_inc_deleted->execute(0, $row->{'backup_id'}); printf(" %1.2f MB", ($size / 1024 / 1024));