/[BackupPC]/trunk/bin/BackupPC_incPartsUpdate
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Contents of /trunk/bin/BackupPC_incPartsUpdate

Parent Directory Parent Directory | Revision Log Revision Log


Revision 280 - (show annotations)
Wed Dec 14 10:40:10 2005 UTC (18 years, 5 months ago) by dpavlin
File size: 8223 byte(s)
 r11683@llin:  dpavlin | 2005-12-14 13:40:02 +0100
 fix tars without files

1 #!/usr/local/bin/perl -w
2
3 use strict;
4 use lib "__INSTALLDIR__/lib";
5
6 use DBI;
7 use BackupPC::Lib;
8 use BackupPC::View;
9 use BackupPC::Attrib qw/:all/;
10 use Data::Dumper;
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
13 use Cwd qw/abs_path/;
14 use File::Which;
15 use Archive::Tar::Streamed;
16 use Algorithm::Diff;
17 use Getopt::Std;
18 use File::Slurp;
19
20 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
21 my %Conf = $bpc->Conf();
22
23 use BackupPC::SearchLib;
24 %BackupPC::SearchLib::Conf = %Conf;
25
26 my $path = abs_path($0);
27 $path =~ s#/[^/]+$#/#;
28 my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
29
30 die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
31
32 my $bin;
33 foreach my $c (qw/gzip md5sum/) {
34 $bin->{$c} = which($c) || die "$0 needs $c, install it\n";
35 }
36
37 my %opt;
38 getopts("cd", \%opt );
39
40 my $debug = $opt{d};
41 my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
42
43 $|=1;
44
45 my $start_t = time();
46
47 my $t_fmt = '%Y-%m-%d %H:%M:%S';
48
49 my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
50 my $user = $Conf{SearchUser} || '';
51
52 my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 });
53
54 my $tar_dir = $Conf{InstallDir}.'/'.$Conf{GzipTempDir};
55
56 die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir);
57
58 #---- subs ----
59
60 sub fmt_time {
61 my $t = shift || return;
62 my $out = "";
63 my ($ss,$mm,$hh) = gmtime($t);
64 $out .= "${hh}h" if ($hh);
65 $out .= sprintf("%02d:%02d", $mm,$ss);
66 return $out;
67 }
68
69 sub curr_time {
70 return strftime($t_fmt,localtime());
71 }
72
73 my $hsn_cache;
74
75 sub get_backup_id($$$) {
76 my ($host, $share, $num) = @_;
77
78 my $key = "$host $share $num";
79 return $hsn_cache->{$key} if ($hsn_cache->{$key});
80
81 my $sth = $dbh->prepare(qq{
82 SELECT
83 backups.id
84 FROM backups
85 INNER JOIN shares ON backups.shareID=shares.ID
86 INNER JOIN hosts ON backups.hostID = hosts.ID
87 where hosts.name = ? and shares.name = ? and backups.num = ?
88 });
89 $sth->execute($host, $share, $num);
90 my ($id) = $sth->fetchrow_array;
91
92 $hsn_cache->{"$host $share $num"} = $id;
93
94 print STDERR "# $host $share $num == $id\n" if ($opt{d});
95
96 return $id;
97 }
98
99
100 sub tar_check($$$$) {
101 my ($host,$share,$num,$filename) = @_;
102
103 my $t = time();
104 print curr_time, " check $host:$share#$num -> $filename";
105
106 sub check_part {
107 my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
108 my $backup_id = get_backup_id($host, $share, $num);
109 my $sth_md5 = $dbh->prepare(qq{
110 select
111 id, tar_size, size, md5, items
112 from backup_parts
113 where backup_id = ? and part_nr = ?
114 });
115
116 $sth_md5->execute($backup_id, $part_nr);
117
118 if (my $row = $sth_md5->fetchrow_hashref) {
119 return if (
120 $row->{tar_size} >= $tar_size &&
121 $row->{size} == $size &&
122 $row->{md5} eq $md5 &&
123 $row->{items} == $items
124 );
125 print ", deleting invalid backup_parts $row->{id}";
126 $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
127 }
128 print ", inserting new";
129 my $sth_insert = $dbh->prepare(qq{
130 insert into backup_parts (
131 backup_id,
132 part_nr,
133 tar_size,
134 size,
135 md5,
136 items
137 ) values (?,?,?,?,?,?)
138 });
139
140 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
141 $dbh->commit;
142 }
143
144 my @tar_parts;
145
146 if (-d "$tar_dir/$filename") {
147 print ", multi-part";
148 opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
149 @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
150 closedir($dir);
151 } else {
152 push @tar_parts, "${filename}.tar.gz";
153 }
154
155 print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
156
157 my $same = 1;
158 my @tar_files;
159
160 my $backup_part;
161
162 print " reading" if ($opt{d});
163
164 foreach my $tarfilename (@tar_parts) {
165
166 print "\n\t- $tarfilename";
167
168 my $size = (stat( "$tar_dir/$tarfilename" ))[7] || die "can't stat $tar_dir/$tarfilename";
169
170 if ($size > $Conf{MaxArchiveSize}) {
171 print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
172 return 0;
173 }
174
175 print ", $size bytes";
176
177 my $path = "$tar_dir/$tarfilename";
178
179 open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!";
180 binmode($fh);
181 my $tar = Archive::Tar::Streamed->new($fh);
182
183 my $tar_size = 0;
184 my $items = 0;
185
186 while(my $entry = $tar->next) {
187 push @tar_files, $entry->name;
188 $items++;
189 $tar_size += $entry->size;
190
191 if ($tar_size > $Conf{MaxArchiveFileSize}) {
192 print ", part $tarfilename is too big $tar_size > $Conf{MaxArchiveFileSize}\n";
193 return 0;
194 }
195
196 }
197
198 print ", $items items";
199
200 if ($tar_size == 0 && $items == 0) {
201 print ", EMPTY tar\n";
202
203 my $backup_id = get_backup_id($host, $share, $num);
204
205 my $sth_inc_deleted = $dbh->prepare(qq{
206 update backups set
207 inc_deleted = true
208 where id = ?
209 });
210 $sth_inc_deleted->execute($backup_id);
211
212 $dbh->commit;
213
214 return 1;
215 }
216
217 # fix tar_size for tars without any files
218 $tar_size ||= 512 * $items;
219
220
221 #
222 # check if md5 exists, and if not, create one
223 #
224
225 my $md5_path = $path;
226 $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
227 if (! -e $md5_path || -z $md5_path) {
228 print ", creating md5";
229 system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
230 } else {
231 ## FIXME check if existing md5 is valid
232 }
233
234 my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
235 $md5 =~ s#\s.*$##;
236
237 # extract part number from filename
238 my $part_nr = 1;
239 $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
240
241 #
242 # finally, check if backup_parts table in database is valid
243 #
244
245 check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
246 }
247
248 # short-cut and exit;
249 return $same unless($same);
250
251 @tar_files = sort @tar_files;
252 print "\n\t",($#tar_files + 1), " tar files";
253
254 my $sth = $dbh->prepare(qq{
255 SELECT path,type
256 FROM files
257 JOIN shares on shares.id = shareid
258 JOIN hosts on hosts.id = shares.hostid
259 WHERE hosts.name = ? and shares.name = ? and backupnum = ?
260 });
261 $sth->execute($host, $share, $num);
262 my @db_files;
263 while( my $row = $sth->fetchrow_hashref ) {
264
265 my $path = $row->{'path'} || die "no path?";
266 $path =~ s#^/#./#;
267 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
268 push @db_files, $path;
269 }
270
271 print " ",($#db_files + 1), " database files, diff";
272
273 @db_files = sort @db_files;
274
275 if ($#tar_files != $#db_files) {
276 $same = 0;
277 print " NUMBER";
278 } else {
279 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
280 while ( $diff->Next() ) {
281 next if $diff->Same();
282 $same = 0;
283 print "< $_\n" for $diff->Items(1);
284 print "> $_\n" for $diff->Items(2);
285 }
286 }
287
288 print " ",($same ? 'ok' : 'DIFFERENT'),
289 ", dur: ",fmt_time(time() - $t), "\n";
290
291 return $same;
292 }
293
294
295 #----- main
296
297 my $sth = $dbh->prepare( qq{
298
299 select
300 backups.id as backup_id,
301 hosts.name as host,
302 shares.name as share,
303 backups.num as num,
304 inc_size,
305 parts
306 from backups
307 join shares on backups.hostid = shares.hostid
308 and shares.id = backups.shareid
309 join hosts on shares.hostid = hosts.id
310 where not inc_deleted
311 order by backups.date
312
313 } );
314
315 $sth->execute();
316 my $num_backups = $sth->rows;
317 my $curr_backup = 1;
318
319 while (my $row = $sth->fetchrow_hashref) {
320
321 $curr_backup++;
322
323 my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
324
325 # this will return -1 if file doesn't exist
326 my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file);
327
328 print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
329
330 if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size) {
331 if ($check) {
332 tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
333 } else {
334 next;
335 }
336 }
337
338 print curr_time, " creating $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file";
339
340 my $t = time();
341
342 # re-create archive?
343 my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f };
344 print STDERR "## $cmd\n" if ($debug);
345
346 if (system($cmd) != 0) {
347 print STDERR " FAILED";
348 }
349
350 print ", dur: ",fmt_time(time() - $t), "\n";
351
352 $dbh->commit;
353
354 }
355
356 undef $sth;
357 $dbh->disconnect;

Properties

Name Value
svn:executable *

  ViewVC Help
Powered by ViewVC 1.1.26