--- trunk/bin/BackupPC_incPartsUpdate 2005/12/12 20:59:53 256 +++ trunk/bin/BackupPC_incPartsUpdate 2005/12/13 00:10:47 264 @@ -15,6 +15,7 @@ use Archive::Tar::Streamed; use Algorithm::Diff; use Getopt::Std; +use File::Slurp; my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; my %Conf = $bpc->Conf(); @@ -69,9 +70,74 @@ return strftime($t_fmt,localtime()); } +my $hsn_cache; + +sub get_backup_id($$$) { + my ($host, $share, $num) = @_; + + my $key = "$host $share $num"; + return $hsn_cache->{$key} if ($hsn_cache->{$key}); + + my $sth = $dbh->prepare(qq{ + SELECT + backups.id + FROM backups + INNER JOIN shares ON backups.shareID=shares.ID + INNER JOIN hosts ON backups.hostID = hosts.ID + where hosts.name = ? and shares.name = ? and backups.num = ? + }); + $sth->execute($host, $share, $num); + my ($id) = $sth->fetchrow_array; + + $hsn_cache->{"$host $share $num"} = $id; + + print STDERR "# $host $share $num == $id\n" if ($opt{d}); + + return $id; +} + + sub tar_check($$$$) { my ($host,$share,$num,$filename) = @_; + sub check_part { + my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; + my $backup_id = get_backup_id($host, $share, $num); + my $sth_md5 = $dbh->prepare(qq{ + select + id, tar_size, size, md5, items + from backup_parts + where backup_id = ? and part_nr = ? + }); + + $sth_md5->execute($backup_id, $part_nr); + + if (my $row = $sth_md5->fetchrow_hashref) { + return if ( + $row->{tar_size} == $tar_size && + $row->{size} == $size && + $row->{md5} eq $md5 && + $row->{items} == $items + ); + print STDERR "# deleting invalid row $row->{id}\n" if ($opt{d}); + $dbh->do(qq{ delete from backup_parts where id = $row->{id} }); + } + print STDERR "# inserting new backup_part row\n"; + my $sth_insert = $dbh->prepare(qq{ + insert into backup_parts ( + backup_id, + part_nr, + tar_size, + size, + md5, + items + ) values (?,?,?,?,?,?) + }); + + $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items); + $dbh->commit; + } + if ($debug) { print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename"; } else { @@ -94,6 +160,8 @@ my $same = 1; my @tar_files; + my $backup_part; + print " reading"; foreach my $tarfilename (@tar_parts) { @@ -101,33 +169,44 @@ print STDERR " $tarfilename" if ($debug); my $path = "$tar_dir/$tarfilename"; - my $md5 = $path; - $md5 =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5"; - if (! -e $md5) { + my $md5_path = $path; + $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; + if (! -e $md5_path) { print ", creating md5"; - system( $bin->{md5sum} . " $path > $md5") == 0 or die "can't create md5 $path: $!"; + system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; } + my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; + + my $part_nr = 1; + $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); + + my $size = (stat( "$tar_dir/$tarfilename" ))[7] || die "can't stat $tar_dir/$tarfilename"; + open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!"; binmode($fh); my $tar = Archive::Tar::Streamed->new($fh); - my $total_size = 0; + my $tar_size = 0; + my $items = 0; while(my $entry = $tar->next) { push @tar_files, $entry->name; - $total_size += $entry->size; + $items++; + $tar_size += $entry->size; } - if ($total_size > $Conf{MaxArchiveFileSize}) { - print STDERR " part too big $total_size > $Conf{MaxArchiveFileSize} }}" if ($debug); + if ($tar_size > $Conf{MaxArchiveFileSize}) { + print STDERR " part too big $tar_size > $Conf{MaxArchiveFileSize} }}" if ($debug); $same = 0; last; - } elsif ($total_size > $Conf{MaxArchiveSize}) { - print STDERR " part bigger than media $total_size > $Conf{MaxArchiveSize} }}" if ($debug); + } elsif ($size > $Conf{MaxArchiveSize}) { + print STDERR " part bigger than media $size > $Conf{MaxArchiveSize} }}" if ($debug); $same = 0; last; } + + check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items); } # short-cut and exit;