/[BackupPC]/trunk/bin/BackupPC_tarIncCreate
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Annotation of /trunk/bin/BackupPC_tarIncCreate

Parent Directory Parent Directory | Revision Log Revision Log


Revision 234 - (hide annotations)
Tue Nov 8 20:24:45 2005 UTC (18 years, 6 months ago) by dpavlin
File size: 21183 byte(s)
 r8745@llin:  dpavlin | 2005-11-08 21:24:32 +0100
 re-wrote creation of tar archives. gzip is now called directly from
 BackupPC_tarIncCreate. It uses multiple pipes to create .tar.gz and md5sum
 on the fly, supports MaxArchiveFileSize as maximum size of UNCOMPRESSED
 archive (to facilitate decompression on filesystems with limited file size).
 
 For that, there are two split implementations:
 - one which splits multiple files (smaller than MaxArchiveFileSize) into
   multiple tar archives
 - other which splits individual files (larger than MaxArchiveFileSize) into
   multiple tar archives (dir is named like file, and files are named as
   part number)

1 dpavlin 234 #!/usr/bin/perl -w
2 dpavlin 100 #============================================================= -*-perl-*-
3     #
4 dpavlin 109 # BackupPC_tarIncCreate: create a tar archive of an existing incremental dump
5     #
6 dpavlin 100 #
7     # DESCRIPTION
8     #
9 dpavlin 112 # Usage: BackupPC_tarIncCreate [options]
10 dpavlin 100 #
11     # Flags:
12     # Required options:
13     #
14     # -h host Host from which the tar archive is created.
15     # -n dumpNum Dump number from which the tar archive is created.
16     # A negative number means relative to the end (eg -1
17     # means the most recent dump, -2 2nd most recent etc).
18     # -s shareName Share name from which the tar archive is created.
19     #
20     # Other options:
21     # -t print summary totals
22     # -r pathRemove path prefix that will be replaced with pathAdd
23     # -p pathAdd new path prefix
24     # -b BLOCKS BLOCKS x 512 bytes per record (default 20; same as tar)
25     # -w writeBufSz write buffer size (default 1MB)
26     #
27     # The -h, -n and -s options specify which dump is used to generate
28     # the tar archive. The -r and -p options can be used to relocate
29     # the paths in the tar archive so extracted files can be placed
30     # in a location different from their original location.
31     #
32     # AUTHOR
33     # Craig Barratt <cbarratt@users.sourceforge.net>
34 dpavlin 112 # Ivan Klaric <iklaric@gmail.com>
35     # Dobrica Pavlinusic <dpavlin@rot13.org>
36 dpavlin 100 #
37     # COPYRIGHT
38     # Copyright (C) 2001-2003 Craig Barratt
39     #
40     # This program is free software; you can redistribute it and/or modify
41     # it under the terms of the GNU General Public License as published by
42     # the Free Software Foundation; either version 2 of the License, or
43     # (at your option) any later version.
44     #
45     # This program is distributed in the hope that it will be useful,
46     # but WITHOUT ANY WARRANTY; without even the implied warranty of
47     # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
48     # GNU General Public License for more details.
49     #
50     # You should have received a copy of the GNU General Public License
51     # along with this program; if not, write to the Free Software
52     # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
53     #
54     #========================================================================
55     #
56     # Version 2.1.0, released 20 Jun 2004.
57     #
58     # See http://backuppc.sourceforge.net.
59     #
60     #========================================================================
61    
62     use strict;
63     no utf8;
64     use lib "__INSTALLDIR__/lib";
65     use File::Path;
66     use Getopt::Std;
67     use DBI;
68     use BackupPC::Lib;
69     use BackupPC::Attrib qw(:all);
70     use BackupPC::FileZIO;
71     use BackupPC::View;
72     use BackupPC::SearchLib;
73 dpavlin 112 use Time::HiRes qw/time/;
74     use POSIX qw/strftime/;
75 dpavlin 234 use File::Which;
76     use File::Path;
77 dpavlin 112 use Data::Dumper; ### FIXME
78 dpavlin 100
79     die("BackupPC::Lib->new failed\n") if ( !(my $bpc = BackupPC::Lib->new) );
80     my $TopDir = $bpc->TopDir();
81     my $BinDir = $bpc->BinDir();
82     my %Conf = $bpc->Conf();
83 dpavlin 234 %BackupPC::SearchLib::Conf = %Conf;
84 dpavlin 100 my %opts;
85 dpavlin 112 my $in_backup_increment;
86 dpavlin 100
87 dpavlin 112
88 dpavlin 234 if ( !getopts("th:n:p:r:s:b:w:vd", \%opts) ) {
89 dpavlin 100 print STDERR <<EOF;
90 dpavlin 112 usage: $0 [options]
91 dpavlin 100 Required options:
92     -h host host from which the tar archive is created
93     -n dumpNum dump number from which the tar archive is created
94     A negative number means relative to the end (eg -1
95     means the most recent dump, -2 2nd most recent etc).
96     -s shareName share name from which the tar archive is created
97    
98     Other options:
99     -t print summary totals
100     -r pathRemove path prefix that will be replaced with pathAdd
101     -p pathAdd new path prefix
102     -b BLOCKS BLOCKS x 512 bytes per record (default 20; same as tar)
103     -w writeBufSz write buffer size (default 1048576 = 1MB)
104 dpavlin 156 -v verbose output
105 dpavlin 234 -d debug output
106 dpavlin 100 EOF
107     exit(1);
108     }
109    
110     if ( $opts{h} !~ /^([\w\.\s-]+)$/ ) {
111     print(STDERR "$0: bad host name '$opts{h}'\n");
112     exit(1);
113     }
114     my $Host = $opts{h};
115    
116     if ( $opts{n} !~ /^(-?\d+)$/ ) {
117     print(STDERR "$0: bad dump number '$opts{n}'\n");
118     exit(1);
119     }
120     my $Num = $opts{n};
121    
122 dpavlin 234 my $bin;
123     foreach my $c (qw/gzip md5sum tee/) {
124     $bin->{$c} = which($c) || die "$0 needs $c, install it\n";
125     }
126    
127 dpavlin 100 my @Backups = $bpc->BackupInfoRead($Host);
128     my $FileCnt = 0;
129     my $ByteCnt = 0;
130     my $DirCnt = 0;
131     my $SpecialCnt = 0;
132     my $ErrorCnt = 0;
133 dpavlin 234 my $current_tar_size = 0;
134 dpavlin 100
135     my $i;
136     $Num = $Backups[@Backups + $Num]{num} if ( -@Backups <= $Num && $Num < 0 );
137     for ( $i = 0 ; $i < @Backups ; $i++ ) {
138     last if ( $Backups[$i]{num} == $Num );
139     }
140     if ( $i >= @Backups ) {
141     print(STDERR "$0: bad backup number $Num for host $Host\n");
142     exit(1);
143     }
144    
145     my $PathRemove = $1 if ( $opts{r} =~ /(.+)/ );
146     my $PathAdd = $1 if ( $opts{p} =~ /(.+)/ );
147     if ( $opts{s} !~ /^([\w\s\.\/\$-]+)$/ && $opts{s} ne "*" ) {
148     print(STDERR "$0: bad share name '$opts{s}'\n");
149     exit(1);
150     }
151     our $ShareName = $opts{s};
152     our $view = BackupPC::View->new($bpc, $Host, \@Backups);
153    
154     #
155     # This constant and the line of code below that uses it are borrowed
156     # from Archive::Tar. Thanks to Calle Dybedahl and Stephen Zander.
157     # See www.cpan.org.
158     #
159     # Archive::Tar is Copyright 1997 Calle Dybedahl. All rights reserved.
160     # Copyright 1998 Stephen Zander. All rights reserved.
161     #
162     my $tar_pack_header
163     = 'a100 a8 a8 a8 a12 a12 A8 a1 a100 a6 a2 a32 a32 a8 a8 a155 x12';
164     my $tar_header_length = 512;
165    
166     my $BufSize = $opts{w} || 1048576; # 1MB or 2^20
167     my $WriteBuf = "";
168     my $WriteBufSz = ($opts{b} || 20) * $tar_header_length;
169    
170     my(%UidCache, %GidCache);
171     my(%HardLinkExtraFiles, @HardLinks);
172    
173     #
174     # Write out all the requested files/directories
175     #
176 dpavlin 112
177 dpavlin 234 my $max_file_size = $Conf{'MaxArchiveFileSize'} || die "problem with MaxArchiveFileSize parametar";
178     $max_file_size *= 1024;
179    
180     my $tar_dir = $Conf{InstallDir}.'/'.$Conf{GzipTempDir};
181     die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir);
182    
183     my $tar_file = BackupPC::SearchLib::getGzipName($Host, $ShareName, $Num) || die "can't getGzipName($Host, $ShareName, $Num)";
184    
185     my $tar_path = $tar_dir . '/' . $tar_file . '.tmp';
186     $tar_path =~ s#//#/#g;
187    
188     print STDERR "working dir: $tar_dir, max uncompressed size $max_file_size bytes, tar $tar_file\n" if ($opts{d});
189    
190     my $fh;
191     my $part = 0;
192     my $no_files = 0;
193    
194     sub new_tar_part {
195     if ($fh) {
196     return if ($current_tar_size == 0);
197    
198     print STDERR "# closing part $part\n" if ($opts{d});
199    
200     # finish tar archive
201     my $data = "\0" x ($tar_header_length * 2);
202     TarWrite($fh, \$data);
203     TarWrite($fh, undef);
204    
205     close($fh) || die "can't close archive part $part: $!";
206     }
207    
208     $part++;
209    
210     # if this is first part, create directory
211    
212     if ($part == 1) {
213     if (-d $tar_path) {
214     print STDERR "# deleting existing $tar_path\n" if ($opts{d});
215     rmtree($tar_path);
216     }
217     mkdir($tar_path) || die "can't create directory $tar_path: $!";
218     }
219    
220     my $file = $tar_path . '/' . $part;
221    
222     #
223     # create comprex pipe which will pass output through gzip
224     # for compression, create file on disk using tee
225     # and pipe same output to md5sum to create checksum
226     #
227    
228     my $cmd = '| ' . $bin->{'gzip'} . ' ' . $Conf{GzipLevel} . ' ' .
229     '| ' . $bin->{'tee'} . ' ' . $file . '.tar.gz' . ' ' .
230     '| ' . $bin->{'md5sum'} . ' - > ' . $file . '.md5';
231    
232     print STDERR "## $cmd\n" if ($opts{d});
233    
234     open($fh, $cmd) or die "can't open $cmd: $!";
235     binmode($fh);
236     $current_tar_size = 0;
237     }
238    
239     new_tar_part();
240    
241 dpavlin 112 if (seedCache($Host, $ShareName, $Num)) {
242     archiveWrite($fh, '/');
243     archiveWriteHardLinks($fh);
244 dpavlin 100 } else {
245 dpavlin 170 print STDERR "NOTE: no files found for $Host:$ShareName, increment $Num\n" if ($opts{v});
246 dpavlin 234 $no_files = 1;
247 dpavlin 100 }
248    
249     #
250     # Finish with two null 512 byte headers, and then round out a full
251     # block.
252     #
253     my $data = "\0" x ($tar_header_length * 2);
254     TarWrite($fh, \$data);
255     TarWrite($fh, undef);
256    
257 dpavlin 234 if (! close($fh)) {
258     rmtree($tar_path);
259     die "can't close archive\n";
260     }
261    
262     # remove temporary files if there are no files
263     if ($no_files) {
264     rmtree($tar_path);
265     } elsif ($part == 1) {
266     warn "FIXME: if there is only one part move to parent directory and rename";
267     }
268    
269 dpavlin 100 #
270     # print out totals if requested
271     #
272     if ( $opts{t} ) {
273     print STDERR "Done: $FileCnt files, $ByteCnt bytes, $DirCnt dirs,",
274     " $SpecialCnt specials, $ErrorCnt errors\n";
275     }
276     if ( $ErrorCnt && !$FileCnt && !$DirCnt ) {
277     #
278     # Got errors, with no files or directories; exit with non-zero
279     # status
280     #
281 dpavlin 234 cleanup();
282 dpavlin 100 exit(1);
283     }
284 dpavlin 234
285 dpavlin 100 exit(0);
286    
287     ###########################################################################
288     # Subroutines
289     ###########################################################################
290    
291     sub archiveWrite
292     {
293     my($fh, $dir, $tarPathOverride) = @_;
294    
295     if ( $dir =~ m{(^|/)\.\.(/|$)} ) {
296     print(STDERR "$0: bad directory '$dir'\n");
297     $ErrorCnt++;
298     return;
299     }
300     $dir = "/" if ( $dir eq "." );
301     #print(STDERR "calling find with $Num, $ShareName, $dir\n");
302    
303     if ( $view->find($Num, $ShareName, $dir, 0, \&TarWriteFile,
304     $fh, $tarPathOverride) < 0 ) {
305     print(STDERR "$0: bad share or directory '$ShareName/$dir'\n");
306     $ErrorCnt++;
307     return;
308     }
309     }
310    
311     #
312     # Write out any hardlinks (if any)
313     #
314     sub archiveWriteHardLinks
315     {
316     my $fh = @_;
317     foreach my $hdr ( @HardLinks ) {
318     $hdr->{size} = 0;
319     if ( defined($PathRemove)
320     && substr($hdr->{linkname}, 0, length($PathRemove)+1)
321     eq ".$PathRemove" ) {
322     substr($hdr->{linkname}, 0, length($PathRemove)+1) = ".$PathAdd";
323     }
324     TarWriteFileInfo($fh, $hdr);
325     }
326     @HardLinks = ();
327     %HardLinkExtraFiles = ();
328     }
329    
330     sub UidLookup
331     {
332     my($uid) = @_;
333    
334     $UidCache{$uid} = (getpwuid($uid))[0] if ( !exists($UidCache{$uid}) );
335     return $UidCache{$uid};
336     }
337    
338     sub GidLookup
339     {
340     my($gid) = @_;
341    
342     $GidCache{$gid} = (getgrgid($gid))[0] if ( !exists($GidCache{$gid}) );
343     return $GidCache{$gid};
344     }
345    
346     sub TarWrite
347     {
348     my($fh, $dataRef) = @_;
349    
350 dpavlin 234
351 dpavlin 100 if ( !defined($dataRef) ) {
352     #
353     # do flush by padding to a full $WriteBufSz
354     #
355     my $data = "\0" x ($WriteBufSz - length($WriteBuf));
356     $dataRef = \$data;
357     }
358 dpavlin 234
359     # poor man's tell :-)
360     $current_tar_size += length($$dataRef);
361    
362 dpavlin 100 if ( length($WriteBuf) + length($$dataRef) < $WriteBufSz ) {
363     #
364     # just buffer and return
365     #
366     $WriteBuf .= $$dataRef;
367     return;
368     }
369     my $done = $WriteBufSz - length($WriteBuf);
370     if ( syswrite($fh, $WriteBuf . substr($$dataRef, 0, $done))
371     != $WriteBufSz ) {
372     print(STDERR "Unable to write to output file ($!)\n");
373     exit(1);
374     }
375     while ( $done + $WriteBufSz <= length($$dataRef) ) {
376     if ( syswrite($fh, substr($$dataRef, $done, $WriteBufSz))
377     != $WriteBufSz ) {
378     print(STDERR "Unable to write to output file ($!)\n");
379     exit(1);
380     }
381     $done += $WriteBufSz;
382     }
383     $WriteBuf = substr($$dataRef, $done);
384     }
385    
386     sub TarWritePad
387     {
388     my($fh, $size) = @_;
389    
390     if ( $size % $tar_header_length ) {
391     my $data = "\0" x ($tar_header_length - ($size % $tar_header_length));
392     TarWrite($fh, \$data);
393     }
394     }
395    
396     sub TarWriteHeader
397     {
398     my($fh, $hdr) = @_;
399    
400     $hdr->{uname} = UidLookup($hdr->{uid}) if ( !defined($hdr->{uname}) );
401     $hdr->{gname} = GidLookup($hdr->{gid}) if ( !defined($hdr->{gname}) );
402     my $devmajor = defined($hdr->{devmajor}) ? sprintf("%07o", $hdr->{devmajor})
403     : "";
404     my $devminor = defined($hdr->{devminor}) ? sprintf("%07o", $hdr->{devminor})
405     : "";
406     my $sizeStr;
407     if ( $hdr->{size} >= 2 * 65536 * 65536 ) {
408     #
409     # GNU extension for files >= 8GB: send size in big-endian binary
410     #
411     $sizeStr = pack("c4 N N", 0x80, 0, 0, 0,
412     $hdr->{size} / (65536 * 65536),
413     $hdr->{size} % (65536 * 65536));
414     } elsif ( $hdr->{size} >= 1 * 65536 * 65536 ) {
415     #
416     # sprintf octal only handles up to 2^32 - 1
417     #
418     $sizeStr = sprintf("%03o", $hdr->{size} / (1 << 24))
419     . sprintf("%08o", $hdr->{size} % (1 << 24));
420     } else {
421     $sizeStr = sprintf("%011o", $hdr->{size});
422     }
423     my $data = pack($tar_pack_header,
424     substr($hdr->{name}, 0, 99),
425     sprintf("%07o", $hdr->{mode}),
426     sprintf("%07o", $hdr->{uid}),
427     sprintf("%07o", $hdr->{gid}),
428     $sizeStr,
429     sprintf("%011o", $hdr->{mtime}),
430     "", #checksum field - space padded by pack("A8")
431     $hdr->{type},
432     substr($hdr->{linkname}, 0, 99),
433     $hdr->{magic} || 'ustar ',
434     $hdr->{version} || ' ',
435     $hdr->{uname},
436     $hdr->{gname},
437     $devmajor,
438     $devminor,
439     "" # prefix is empty
440     );
441     substr($data, 148, 7) = sprintf("%06o\0", unpack("%16C*",$data));
442     TarWrite($fh, \$data);
443     }
444    
445     sub TarWriteFileInfo
446     {
447     my($fh, $hdr) = @_;
448    
449     #
450     # Handle long link names (symbolic links)
451     #
452     if ( length($hdr->{linkname}) > 99 ) {
453     my %h;
454     my $data = $hdr->{linkname} . "\0";
455     $h{name} = "././\@LongLink";
456     $h{type} = "K";
457     $h{size} = length($data);
458     TarWriteHeader($fh, \%h);
459     TarWrite($fh, \$data);
460     TarWritePad($fh, length($data));
461     }
462     #
463     # Handle long file names
464     #
465     if ( length($hdr->{name}) > 99 ) {
466     my %h;
467     my $data = $hdr->{name} . "\0";
468     $h{name} = "././\@LongLink";
469     $h{type} = "L";
470     $h{size} = length($data);
471     TarWriteHeader($fh, \%h);
472     TarWrite($fh, \$data);
473     TarWritePad($fh, length($data));
474     }
475     TarWriteHeader($fh, $hdr);
476     }
477    
478     #
479 dpavlin 112 # seed cache of files in this increment
480 dpavlin 100 #
481 dpavlin 112 sub seedCache($$$) {
482     my ($host, $share, $dumpNo) = @_;
483    
484 dpavlin 100 my $dsn = $Conf{SearchDSN};
485     my $db_user = $Conf{SearchUser} || '';
486    
487 dpavlin 156 print STDERR curr_time(), "getting files for $host:$share increment $dumpNo..." if ($opts{v});
488 dpavlin 112 my $sql = q{
489 dpavlin 234 SELECT path,size
490 dpavlin 112 FROM files
491     JOIN shares on shares.id = shareid
492     JOIN hosts on hosts.id = shares.hostid
493     WHERE hosts.name = ? and shares.name = ? and backupnum = ?
494     };
495 dpavlin 100
496 dpavlin 112 my $dbh = DBI->connect($dsn, $db_user, "", { RaiseError => 1, AutoCommit => 1} );
497     my $sth = $dbh->prepare($sql);
498     $sth->execute($host, $share, $dumpNo);
499     my $count = $sth->rows;
500 dpavlin 156 print STDERR " found $count items\n" if ($opts{v});
501 dpavlin 112 while (my $row = $sth->fetchrow_arrayref) {
502 dpavlin 156 #print STDERR "+ ", $row->[0],"\n";
503 dpavlin 234 $in_backup_increment->{ $row->[0] } = $row->[1];
504 dpavlin 100 }
505 dpavlin 112
506     $sth->finish();
507     $dbh->disconnect();
508 dpavlin 100
509 dpavlin 112 return $count;
510 dpavlin 100 }
511    
512 dpavlin 234 #
513     # calculate overhad for one file in tar
514     #
515     sub tar_overhead($) {
516     my $name = shift || '';
517    
518     # header, padding of file and two null blocks at end
519     my $len = 4 * $tar_header_length;
520    
521     # if filename is longer than 99 chars subtract blocks for
522     # long filename
523     if ( length($name) > 99 ) {
524     $len += int( ( length($name) + $tar_header_length ) / $tar_header_length ) * $tar_header_length;
525     }
526    
527     return $len;
528     }
529    
530 dpavlin 100 my $Attr;
531     my $AttrDir;
532    
533     sub TarWriteFile
534     {
535     my($hdr, $fh, $tarPathOverride) = @_;
536    
537     my $tarPath = $hdr->{relPath};
538     $tarPath = $tarPathOverride if ( defined($tarPathOverride) );
539    
540     $tarPath =~ s{//+}{/}g;
541 dpavlin 112
542 dpavlin 234 #print STDERR "? $tarPath\n" if ($opts{d});
543     my $size = $in_backup_increment->{$tarPath};
544     return unless (defined($size));
545 dpavlin 112
546 dpavlin 234 # is this file too large to fit into MaxArchiveFileSize?
547    
548     if ( ($current_tar_size + tar_overhead($tarPath) + $size) > $max_file_size ) {
549     print STDERR "# tar file $current_tar_size + $tar_header_length + $size > $max_file_size, splitting\n" if ($opts{d});
550     new_tar_part();
551     }
552    
553     print STDERR "A $tarPath [$size] tell: $current_tar_size\n" if ($opts{d});
554    
555 dpavlin 100 if ( defined($PathRemove)
556     && substr($tarPath, 0, length($PathRemove)) eq $PathRemove ) {
557     substr($tarPath, 0, length($PathRemove)) = $PathAdd;
558     }
559     $tarPath = "./" . $tarPath if ( $tarPath !~ /^\.\// );
560     $tarPath =~ s{//+}{/}g;
561     $hdr->{name} = $tarPath;
562    
563     if ( $hdr->{type} == BPC_FTYPE_DIR ) {
564     #
565     # Directory: just write the header
566     #
567     $hdr->{name} .= "/" if ( $hdr->{name} !~ m{/$} );
568 dpavlin 112 TarWriteFileInfo($fh, $hdr);
569     $DirCnt++;
570 dpavlin 100 } elsif ( $hdr->{type} == BPC_FTYPE_FILE ) {
571     #
572     # Regular file: write the header and file
573     #
574     my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0, $hdr->{compress});
575     if ( !defined($f) ) {
576     print(STDERR "Unable to open file $hdr->{fullPath}\n");
577     $ErrorCnt++;
578     return;
579     }
580 dpavlin 234 # do we need to split file?
581     if ($hdr->{size} < $max_file_size) {
582     TarWriteFileInfo($fh, $hdr);
583     my($data, $size);
584     while ( $f->read(\$data, $BufSize) > 0 ) {
585     TarWrite($fh, \$data);
586     $size += length($data);
587     }
588     $f->close;
589     TarWritePad($fh, $size);
590 dpavlin 100 $FileCnt++;
591     $ByteCnt += $size;
592 dpavlin 234 } else {
593     my $full_size = $hdr->{size};
594     my $orig_name = $hdr->{name};
595     my $max_part_size = $max_file_size - tar_overhead($hdr->{name});
596    
597     my $parts = int(($full_size + $max_part_size - 1) / $max_part_size);
598     print STDERR "# splitting $orig_name [$full_size bytes] into $parts parts\n" if ($opts{d});
599     foreach my $subpart ( 1 .. $parts ) {
600     new_tar_part();
601     if ($subpart < $parts) {
602     $hdr->{size} = $max_part_size;
603     } else {
604     $hdr->{size} = $full_size % $max_part_size;
605     }
606     $hdr->{name} = $orig_name . '/' . $subpart;
607     print STDERR "## creating part $subpart ",$hdr->{name}, " [", $hdr->{size}," bytes]\n";
608    
609     TarWriteFileInfo($fh, $hdr);
610     my($data, $size);
611     if (0) {
612     for ( 1 .. int($hdr->{size} / $BufSize) ) {
613     my $r_size = $f->read(\$data, $BufSize);
614     die "expected $BufSize bytes read, got $r_size bytes!" if ($r_size != $BufSize);
615     TarWrite($fh, \$data);
616     $size += length($data);
617     }
618     }
619     my $size_left = $hdr->{size} % $BufSize;
620     my $r_size = $f->read(\$data, $size_left);
621     die "expected $size_left bytes last read, got $r_size bytes!" if ($r_size != $size_left);
622    
623     TarWrite($fh, \$data);
624     $size += length($data);
625     TarWritePad($fh, $size);
626     }
627     $f->close;
628     $FileCnt++;
629     $ByteCnt += $full_size;
630     new_tar_part();
631     }
632 dpavlin 100 } elsif ( $hdr->{type} == BPC_FTYPE_HARDLINK ) {
633     #
634     # Hardlink file: either write a hardlink or the complete file
635 dpavlin 234 # depending upon whether the linked-to file will be written
636     # to the archive.
637 dpavlin 100 #
638 dpavlin 234 # Start by reading the contents of the link.
639     #
640 dpavlin 100 my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0, $hdr->{compress});
641     if ( !defined($f) ) {
642     print(STDERR "Unable to open file $hdr->{fullPath}\n");
643     $ErrorCnt++;
644     return;
645     }
646     my $data;
647     while ( $f->read(\$data, $BufSize) > 0 ) {
648     $hdr->{linkname} .= $data;
649     }
650 dpavlin 234 $f->close;
651     my $done = 0;
652     my $name = $hdr->{linkname};
653     $name =~ s{^\./}{/};
654     if ( $HardLinkExtraFiles{$name} ) {
655     #
656     # Target file will be or was written, so just remember
657     # the hardlink so we can dump it later.
658     #
659     push(@HardLinks, $hdr);
660     $SpecialCnt++;
661     } else {
662     #
663     # Have to dump the original file. Just call the top-level
664     # routine, so that we save the hassle of dealing with
665     # mangling, merging and attributes.
666     #
667     $HardLinkExtraFiles{$hdr->{linkname}} = 1;
668     archiveWrite($fh, $hdr->{linkname}, $hdr->{name});
669     }
670 dpavlin 100 } elsif ( $hdr->{type} == BPC_FTYPE_SYMLINK ) {
671     #
672     # Symbolic link: read the symbolic link contents into the header
673     # and write the header.
674     #
675     my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0, $hdr->{compress});
676     if ( !defined($f) ) {
677     print(STDERR "Unable to open symlink file $hdr->{fullPath}\n");
678     $ErrorCnt++;
679     return;
680     }
681     my $data;
682     while ( $f->read(\$data, $BufSize) > 0 ) {
683     $hdr->{linkname} .= $data;
684     }
685     $f->close;
686     $hdr->{size} = 0;
687     TarWriteFileInfo($fh, $hdr);
688     $SpecialCnt++;
689     } elsif ( $hdr->{type} == BPC_FTYPE_CHARDEV
690     || $hdr->{type} == BPC_FTYPE_BLOCKDEV
691     || $hdr->{type} == BPC_FTYPE_FIFO ) {
692     #
693     # Special files: for char and block special we read the
694     # major and minor numbers from a plain file.
695     #
696     if ( $hdr->{type} != BPC_FTYPE_FIFO ) {
697     my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0,
698     $hdr->{compress});
699     my $data;
700     if ( !defined($f) || $f->read(\$data, $BufSize) < 0 ) {
701     print(STDERR "Unable to open/read char/block special file"
702     . " $hdr->{fullPath}\n");
703     $f->close if ( defined($f) );
704     $ErrorCnt++;
705     return;
706     }
707     $f->close;
708     if ( $data =~ /(\d+),(\d+)/ ) {
709     $hdr->{devmajor} = $1;
710     $hdr->{devminor} = $2;
711     }
712     }
713     $hdr->{size} = 0;
714     TarWriteFileInfo($fh, $hdr);
715     $SpecialCnt++;
716     } else {
717     print(STDERR "Got unknown type $hdr->{type} for $hdr->{name}\n");
718     $ErrorCnt++;
719     }
720     }
721 dpavlin 112
722     my $t_fmt = '%Y-%m-%d %H:%M:%S';
723     sub curr_time {
724     return strftime($t_fmt,localtime());
725     }

Properties

Name Value
svn:executable *

  ViewVC Help
Powered by ViewVC 1.1.26