1 |
#!/usr/local/bin/perl -w |
2 |
|
3 |
use strict; |
4 |
use lib "__INSTALLDIR__/lib"; |
5 |
|
6 |
use DBI; |
7 |
use BackupPC::Lib; |
8 |
use BackupPC::View; |
9 |
use BackupPC::Attrib qw/:all/; |
10 |
use Data::Dumper; |
11 |
use Time::HiRes qw/time/; |
12 |
use POSIX qw/strftime/; |
13 |
use Cwd qw/abs_path/; |
14 |
use File::Which; |
15 |
use Archive::Tar::Streamed; |
16 |
use Algorithm::Diff; |
17 |
use Getopt::Std; |
18 |
use File::Slurp; |
19 |
|
20 |
my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; |
21 |
my %Conf = $bpc->Conf(); |
22 |
|
23 |
use BackupPC::SearchLib; |
24 |
%BackupPC::SearchLib::Conf = %Conf; |
25 |
|
26 |
my $path = abs_path($0); |
27 |
$path =~ s#/[^/]+$#/#; |
28 |
my $tarIncCreate = $path .= 'BackupPC_tarIncCreate'; |
29 |
|
30 |
die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate); |
31 |
|
32 |
my $bin; |
33 |
foreach my $c (qw/gzip md5sum/) { |
34 |
$bin->{$c} = which($c) || die "$0 needs $c, install it\n"; |
35 |
} |
36 |
|
37 |
my %opt; |
38 |
getopts("cd", \%opt ); |
39 |
|
40 |
my $debug = $opt{d}; |
41 |
my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n"; |
42 |
|
43 |
$|=1; |
44 |
|
45 |
my $start_t = time(); |
46 |
|
47 |
my $t_fmt = '%Y-%m-%d %H:%M:%S'; |
48 |
|
49 |
my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n"; |
50 |
my $user = $Conf{SearchUser} || ''; |
51 |
|
52 |
my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 }); |
53 |
|
54 |
my $tar_dir = $Conf{InstallDir}.'/'.$Conf{GzipTempDir}; |
55 |
|
56 |
die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir); |
57 |
|
58 |
#---- subs ---- |
59 |
|
60 |
sub fmt_time { |
61 |
my $t = shift || return; |
62 |
my $out = ""; |
63 |
my ($ss,$mm,$hh) = gmtime($t); |
64 |
$out .= "${hh}h" if ($hh); |
65 |
$out .= sprintf("%02d:%02d", $mm,$ss); |
66 |
return $out; |
67 |
} |
68 |
|
69 |
sub curr_time { |
70 |
return strftime($t_fmt,localtime()); |
71 |
} |
72 |
|
73 |
my $hsn_cache; |
74 |
|
75 |
sub get_backup_id($$$) { |
76 |
my ($host, $share, $num) = @_; |
77 |
|
78 |
my $key = "$host $share $num"; |
79 |
return $hsn_cache->{$key} if ($hsn_cache->{$key}); |
80 |
|
81 |
my $sth = $dbh->prepare(qq{ |
82 |
SELECT |
83 |
backups.id |
84 |
FROM backups |
85 |
INNER JOIN shares ON backups.shareID=shares.ID |
86 |
INNER JOIN hosts ON backups.hostID = hosts.ID |
87 |
where hosts.name = ? and shares.name = ? and backups.num = ? |
88 |
}); |
89 |
$sth->execute($host, $share, $num); |
90 |
my ($id) = $sth->fetchrow_array; |
91 |
|
92 |
$hsn_cache->{"$host $share $num"} = $id; |
93 |
|
94 |
print STDERR "# $host $share $num == $id\n" if ($opt{d}); |
95 |
|
96 |
return $id; |
97 |
} |
98 |
|
99 |
|
100 |
sub tar_check($$$$) { |
101 |
my ($host,$share,$num,$filename) = @_; |
102 |
|
103 |
sub check_part { |
104 |
my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; |
105 |
my $backup_id = get_backup_id($host, $share, $num); |
106 |
my $sth_md5 = $dbh->prepare(qq{ |
107 |
select |
108 |
id, tar_size, size, md5, items |
109 |
from backup_parts |
110 |
where backup_id = ? and part_nr = ? |
111 |
}); |
112 |
|
113 |
$sth_md5->execute($backup_id, $part_nr); |
114 |
|
115 |
if (my $row = $sth_md5->fetchrow_hashref) { |
116 |
return if ( |
117 |
$row->{tar_size} == $tar_size && |
118 |
$row->{size} == $size && |
119 |
$row->{md5} eq $md5 && |
120 |
$row->{items} == $items |
121 |
); |
122 |
print STDERR "# deleting invalid row $row->{id}\n" if ($opt{d}); |
123 |
$dbh->do(qq{ delete from backup_parts where id = $row->{id} }); |
124 |
} |
125 |
print STDERR "# inserting new backup_part row\n"; |
126 |
my $sth_insert = $dbh->prepare(qq{ |
127 |
insert into backup_parts ( |
128 |
backup_id, |
129 |
part_nr, |
130 |
tar_size, |
131 |
size, |
132 |
md5, |
133 |
items |
134 |
) values (?,?,?,?,?,?) |
135 |
}); |
136 |
|
137 |
$sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items); |
138 |
$dbh->commit; |
139 |
} |
140 |
|
141 |
if ($debug) { |
142 |
print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename"; |
143 |
} else { |
144 |
print " check"; |
145 |
} |
146 |
|
147 |
my @tar_parts; |
148 |
|
149 |
if (-d "$tar_dir/$filename") { |
150 |
print STDERR " multi-part" if ($opt{d}); |
151 |
opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!"; |
152 |
@tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir); |
153 |
closedir($dir); |
154 |
} else { |
155 |
push @tar_parts, "${filename}.tar.gz"; |
156 |
} |
157 |
|
158 |
print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d}); |
159 |
|
160 |
my $same = 1; |
161 |
my @tar_files; |
162 |
|
163 |
my $backup_part; |
164 |
|
165 |
print " reading"; |
166 |
|
167 |
foreach my $tarfilename (@tar_parts) { |
168 |
|
169 |
print STDERR " $tarfilename" if ($debug); |
170 |
|
171 |
my $path = "$tar_dir/$tarfilename"; |
172 |
my $md5_path = $path; |
173 |
$md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; |
174 |
if (! -e $md5_path) { |
175 |
print ", creating md5"; |
176 |
system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; |
177 |
} |
178 |
|
179 |
my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; |
180 |
|
181 |
my $part_nr = 1; |
182 |
$part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); |
183 |
|
184 |
my $size = (stat( "$tar_dir/$tarfilename" ))[7] || die "can't stat $tar_dir/$tarfilename"; |
185 |
|
186 |
open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!"; |
187 |
binmode($fh); |
188 |
my $tar = Archive::Tar::Streamed->new($fh); |
189 |
|
190 |
my $tar_size = 0; |
191 |
my $items = 0; |
192 |
|
193 |
while(my $entry = $tar->next) { |
194 |
push @tar_files, $entry->name; |
195 |
$items++; |
196 |
$tar_size += $entry->size; |
197 |
} |
198 |
|
199 |
if ($tar_size > $Conf{MaxArchiveFileSize}) { |
200 |
print STDERR " part too big $tar_size > $Conf{MaxArchiveFileSize} }}" if ($debug); |
201 |
$same = 0; |
202 |
last; |
203 |
} elsif ($size > $Conf{MaxArchiveSize}) { |
204 |
print STDERR " part bigger than media $size > $Conf{MaxArchiveSize} }}" if ($debug); |
205 |
$same = 0; |
206 |
last; |
207 |
} |
208 |
|
209 |
check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items); |
210 |
} |
211 |
|
212 |
# short-cut and exit; |
213 |
return $same unless($same); |
214 |
|
215 |
@tar_files = sort @tar_files; |
216 |
print STDERR " ",($#tar_files + 1), " files" if ($debug); |
217 |
|
218 |
print STDERR ", database" if ($debug); |
219 |
|
220 |
my $sth = $dbh->prepare(qq{ |
221 |
SELECT path,type |
222 |
FROM files |
223 |
JOIN shares on shares.id = shareid |
224 |
JOIN hosts on hosts.id = shares.hostid |
225 |
WHERE hosts.name = ? and shares.name = ? and backupnum = ? |
226 |
}); |
227 |
$sth->execute($host, $share, $num); |
228 |
my @db_files; |
229 |
while( my $row = $sth->fetchrow_hashref ) { |
230 |
|
231 |
my $path = $row->{'path'} || die "no path?"; |
232 |
$path =~ s#^/#./#; |
233 |
$path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR); |
234 |
push @db_files, $path; |
235 |
} |
236 |
|
237 |
print STDERR " ",($#db_files + 1), " files, diff" if ($debug); |
238 |
|
239 |
@db_files = sort @db_files; |
240 |
|
241 |
if ($#tar_files != $#db_files) { |
242 |
$same = 0; |
243 |
print STDERR " NUMBER" if ($debug); |
244 |
} else { |
245 |
my $diff = Algorithm::Diff->new(\@tar_files, \@db_files); |
246 |
while ( $diff->Next() ) { |
247 |
next if $diff->Same(); |
248 |
$same = 0; |
249 |
print "< $_\n" for $diff->Items(1); |
250 |
print "> $_\n" for $diff->Items(2); |
251 |
} |
252 |
} |
253 |
|
254 |
print " ",($same ? 'ok' : 'DIFFERENT'); |
255 |
print STDERR " }} " if ($debug); |
256 |
|
257 |
return $same; |
258 |
} |
259 |
|
260 |
|
261 |
#----- main |
262 |
|
263 |
my $sth = $dbh->prepare( qq{ |
264 |
|
265 |
select |
266 |
backups.id as backup_id, |
267 |
hosts.name as host, |
268 |
shares.name as share, |
269 |
backups.num as num, |
270 |
inc_size, |
271 |
parts |
272 |
from backups |
273 |
join shares on backups.hostid = shares.hostid |
274 |
and shares.id = backups.shareid |
275 |
join hosts on shares.hostid = hosts.id |
276 |
where not inc_deleted |
277 |
order by backups.date |
278 |
|
279 |
} ); |
280 |
|
281 |
$sth->execute(); |
282 |
my $num_backups = $sth->rows; |
283 |
my $curr_backup = 1; |
284 |
|
285 |
while (my $row = $sth->fetchrow_hashref) { |
286 |
my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); |
287 |
|
288 |
# this will return -1 if file doesn't exist |
289 |
my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file); |
290 |
|
291 |
print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d}); |
292 |
|
293 |
if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} == $size) { |
294 |
if ($check) { |
295 |
tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next; |
296 |
} else { |
297 |
next; |
298 |
} |
299 |
} |
300 |
|
301 |
print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; |
302 |
$curr_backup++; |
303 |
|
304 |
my $t = time(); |
305 |
|
306 |
# re-create archive? |
307 |
my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f }; |
308 |
print STDERR "## $cmd\n" if ($debug); |
309 |
|
310 |
if (system($cmd) != 0) { |
311 |
print STDERR " FAILED"; |
312 |
} |
313 |
|
314 |
print ", dur: ",fmt_time(time() - $t), "\n"; |
315 |
|
316 |
$dbh->commit; |
317 |
|
318 |
} |
319 |
|
320 |
undef $sth; |
321 |
$dbh->disconnect; |