2 #============================================================= -*-perl-*-
4 # BackupPC_tarPCCopy: create a tar archive of the PC directory
5 # for copying the entire PC data directory. The archive will
6 # contain hardlinks to the pool directory, which should be copied
7 # before BackupPC_tarPCCopy is run.
11 # Usage: BackupPC_tarPCCopy [options] files/directories...
14 # -c don't cache inode data (reduces memory usage at the
15 # expense of longer run time)
18 # Craig Barratt <cbarratt@users.sourceforge.net>
21 # Copyright (C) 2005 Craig Barratt
23 # This program is free software; you can redistribute it and/or modify
24 # it under the terms of the GNU General Public License as published by
25 # the Free Software Foundation; either version 2 of the License, or
26 # (at your option) any later version.
28 # This program is distributed in the hope that it will be useful,
29 # but WITHOUT ANY WARRANTY; without even the implied warranty of
30 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
31 # GNU General Public License for more details.
33 # You should have received a copy of the GNU General Public License
34 # along with this program; if not, write to the Free Software
35 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
37 #========================================================================
39 # Version 3.0.0beta1, released 30 Jul 2006.
41 # See http://backuppc.sourceforge.net.
43 #========================================================================
47 use lib "/usr/local/BackupPC/lib";
53 use BackupPC::Attrib qw(:all);
54 use BackupPC::FileZIO;
57 use constant S_IFMT => 0170000; # type of file
59 die("BackupPC::Lib->new failed\n") if ( !(my $bpc = BackupPC::Lib->new) );
60 my $TopDir = $bpc->TopDir();
61 my $BinDir = $bpc->BinDir();
62 my %Conf = $bpc->Conf();
66 if ( !getopts("c", \%opts) || @ARGV < 1 ) {
68 usage: $0 [options] files/directories...
70 -c don't cache inode data (reduces memory usage at the
71 expense of longer run time)
77 # This constant and the line of code below that uses it are borrowed
78 # from Archive::Tar. Thanks to Calle Dybedahl and Stephen Zander.
81 # Archive::Tar is Copyright 1997 Calle Dybedahl. All rights reserved.
82 # Copyright 1998 Stephen Zander. All rights reserved.
85 = 'a100 a8 a8 a8 a12 a12 A8 a1 a100 a6 a2 a32 a32 a8 a8 a155 x12';
86 my $tar_header_length = 512;
88 my $BufSize = 1048576; # 1MB or 2^20
90 my $WriteBufSz = ($opts{b} || 20) * $tar_header_length;
92 my(%UidCache, %GidCache);
94 my($ClientName, $ClientBackups, $ClientBkupNum, $ClientDirAttr, $ClientDir);
101 my $ClientBkupCompress = 1;
102 my $ClientBkupMangle = 1;
107 # Write out all the requested files/directories
116 my $path = shift(@ARGV);
118 if ( $path !~ m{^\Q$TopDir/\E} ) {
119 print STDERR "Argument $path must be an absolute path starting with $TopDir\n";
123 print STDERR "Argument $path does not exist\n";
127 find({wanted => sub { archiveFile($fh) } }, $path);
130 # To avoid using too much memory for the inode cache,
131 # remove it after each top-level directory is done.
138 print STDERR "Done $path ($argCnt of $argMax): $DirCnt dirs,"
139 . " $FileCnt files, $HLinkCnt hardlinks\n";
150 # Finish with two null 512 byte headers, and then round out a full
153 my $data = "\0" x ($tar_header_length * 2);
154 TarWrite($fh, \$data);
155 TarWrite($fh, undef);
159 # Got errors so exit with a non-zero status
161 print STDERR "Got $ErrorCnt warnings/errors\n";
166 ###########################################################################
168 ###########################################################################
178 # We just handle directories and files; no symlinks or
179 # char/block special files.
181 $hdr->{type} = -d _ ? BPC_FTYPE_DIR
182 : -f _ ? BPC_FTYPE_FILE
184 $hdr->{fullPath} = $File::Find::name;
185 $hdr->{inode} = $s[1];
186 $hdr->{nlink} = $s[3];
187 $hdr->{size} = $s[7];
188 $hdr->{devmajor} = $s[6] >> 8;
189 $hdr->{devminor} = $s[6] & 0xff;
192 $hdr->{mode} = $s[2];
193 $hdr->{mtime} = $s[9];
194 $hdr->{compress} = 1;
196 if ( $hdr->{fullPath} !~ m{\Q$TopDir\E/pc/(.*)} ) {
197 print STDERR "Can't extract TopDir ($TopDir) from"
198 . " $hdr->{fullPath}\n";
202 $hdr->{relPath} = $1;
203 if ( $hdr->{relPath} =~ m{(.*)/(.*)} ) {
206 $hdr->{name} = $hdr->{relPath};
209 if ( $hdr->{relPath} =~ m{(.*?)/} ) {
211 if ( $ClientName ne $clientName ) {
212 $ClientName = $clientName;
213 $ClientBackups = [ $bpc->BackupInfoRead($ClientName) ];
214 #print STDERR "Setting Client to $ClientName\n";
216 if ( $hdr->{relPath} =~ m{(.*?)/(\d+)/}
217 || $hdr->{relPath} =~ m{(.*?)/(\d+)$} ) {
219 if ( $ClientBkupNum != $backupNum ) {
221 $ClientBkupNum = $backupNum;
222 # print STDERR "Setting ClientBkupNum to $ClientBkupNum\n";
223 for ( $i = 0 ; $i < @$ClientBackups ; $i++ ) {
224 if ( $ClientBackups->[$i]{num} == $ClientBkupNum ) {
225 $ClientBkupCompress = $ClientBackups->[$i]{compress};
226 $ClientBkupMangle = $ClientBackups->[$i]{mangle};
227 # print STDERR "Setting $ClientBkupNum compress to $ClientBkupCompress, mangle to $ClientBkupMangle\n";
232 $hdr->{compress} = $ClientBkupCompress;
233 if ( $hdr->{type} == BPC_FTYPE_FILE && $hdr->{nlink} > 1
234 && $hdr->{name} =~ /^f/ ) {
235 (my $dir = $hdr->{fullPath}) =~ s{(.*)/.*}{$1};
236 if ( $ClientDir ne $dir ) {
238 $ClientDirAttr = BackupPC::Attrib->new(
239 { compress => $ClientBkupCompress }
241 if ( -f $ClientDirAttr->fileName($dir)
242 && !$ClientDirAttr->read($dir) ) {
243 print STDERR "Can't read attrib file in $dir\n";
247 my $name = $hdr->{name};
248 $name = $bpc->fileNameUnmangle($name) if ( $ClientBkupMangle );
249 my $attr = $ClientDirAttr->get($name);
250 $hdr->{realSize} = $attr->{size} if ( defined($attr) );
251 #print STDERR "$hdr->{fullPath} has real size $hdr->{realSize}\n";
255 $hdr->{compress} = 0;
256 $hdr->{realSize} = $hdr->{size};
259 #print STDERR "$File::Find::name\n";
261 TarWriteFile($hdr, $fh);
268 $UidCache{$uid} = (getpwuid($uid))[0] if ( !exists($UidCache{$uid}) );
269 return $UidCache{$uid};
276 $GidCache{$gid} = (getgrgid($gid))[0] if ( !exists($GidCache{$gid}) );
277 return $GidCache{$gid};
282 my($fh, $dataRef) = @_;
284 if ( !defined($dataRef) ) {
286 # do flush by padding to a full $WriteBufSz
288 my $data = "\0" x ($WriteBufSz - length($WriteBuf));
291 if ( length($WriteBuf) + length($$dataRef) < $WriteBufSz ) {
293 # just buffer and return
295 $WriteBuf .= $$dataRef;
298 my $done = $WriteBufSz - length($WriteBuf);
299 if ( (my $n = syswrite($fh, $WriteBuf . substr($$dataRef, 0, $done)))
301 print(STDERR "Unable to write to output file ($!) ($n vs $WriteBufSz)\n");
304 while ( $done + $WriteBufSz <= length($$dataRef) ) {
305 if ( (my $n = syswrite($fh, substr($$dataRef, $done, $WriteBufSz)))
307 print(STDERR "Unable to write to output file ($!) ($n v $WriteBufSz)\n");
310 $done += $WriteBufSz;
312 $WriteBuf = substr($$dataRef, $done);
319 if ( $size % $tar_header_length ) {
320 my $data = "\0" x ($tar_header_length - ($size % $tar_header_length));
321 TarWrite($fh, \$data);
329 $hdr->{uname} = UidLookup($hdr->{uid}) if ( !defined($hdr->{uname}) );
330 $hdr->{gname} = GidLookup($hdr->{gid}) if ( !defined($hdr->{gname}) );
331 my $devmajor = defined($hdr->{devmajor}) ? sprintf("%07o", $hdr->{devmajor})
333 my $devminor = defined($hdr->{devminor}) ? sprintf("%07o", $hdr->{devminor})
336 if ( $hdr->{size} >= 2 * 65536 * 65536 ) {
338 # GNU extension for files >= 8GB: send size in big-endian binary
340 $sizeStr = pack("c4 N N", 0x80, 0, 0, 0,
341 $hdr->{size} / (65536 * 65536),
342 $hdr->{size} % (65536 * 65536));
343 } elsif ( $hdr->{size} >= 1 * 65536 * 65536 ) {
345 # sprintf octal only handles up to 2^32 - 1
347 $sizeStr = sprintf("%03o", $hdr->{size} / (1 << 24))
348 . sprintf("%08o", $hdr->{size} % (1 << 24));
350 $sizeStr = sprintf("%011o", $hdr->{size});
352 my $data = pack($tar_pack_header,
353 substr($hdr->{name}, 0, 99),
354 sprintf("%07o", $hdr->{mode}),
355 sprintf("%07o", $hdr->{uid}),
356 sprintf("%07o", $hdr->{gid}),
358 sprintf("%011o", $hdr->{mtime}),
359 "", #checksum field - space padded by pack("A8")
361 substr($hdr->{linkname}, 0, 99),
362 $hdr->{magic} || 'ustar ',
363 $hdr->{version} || ' ',
370 substr($data, 148, 7) = sprintf("%06o\0", unpack("%16C*",$data));
371 TarWrite($fh, \$data);
379 # Handle long link names (symbolic links)
381 if ( length($hdr->{linkname}) > 99 ) {
383 my $data = $hdr->{linkname} . "\0";
384 $h{name} = "././\@LongLink";
386 $h{size} = length($data);
387 TarWriteHeader($fh, \%h);
388 TarWrite($fh, \$data);
389 TarWritePad($fh, length($data));
392 # Handle long file names
394 if ( length($hdr->{name}) > 99 ) {
396 my $data = $hdr->{name} . "\0";
397 $h{name} = "././\@LongLink";
399 $h{size} = length($data);
400 TarWriteHeader($fh, \%h);
401 TarWrite($fh, \$data);
402 TarWritePad($fh, length($data));
404 TarWriteHeader($fh, $hdr);
414 my $tarPath = $hdr->{relPath};
416 $tarPath =~ s{//+}{/}g;
417 $tarPath = "./" . $tarPath if ( $tarPath !~ /^\.\// );
418 $tarPath =~ s{//+}{/}g;
419 $hdr->{name} = $tarPath;
421 if ( $hdr->{type} == BPC_FTYPE_DIR ) {
423 # Directory: just write the header
425 $hdr->{name} .= "/" if ( $hdr->{name} !~ m{/$} );
426 TarWriteFileInfo($fh, $hdr);
428 } elsif ( $hdr->{type} == BPC_FTYPE_FILE ) {
430 # Regular file: write the header and file
432 my($data, $dataMD5, $size, $linkName);
434 if ( $hdr->{type} == BPC_FTYPE_FILE && $hdr->{nlink} > 1 ) {
435 if ( defined($Inode2Path{$hdr->{inode}}) ) {
436 $linkName = $Inode2Path{$hdr->{inode}};
437 #print STDERR "Got cache hit for $linkName\n";
439 my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0,
441 if ( !defined($f) ) {
442 print(STDERR "Unable to open file $hdr->{fullPath}\n");
447 # Try to find the hardlink it points to by computing
448 # the pool file digest.
450 $f->read(\$dataMD5, $BufSize);
451 if ( !defined($hdr->{realSize}) ) {
453 # Need to get the real size
455 $size = length($dataMD5);
456 while ( $f->read(\$data, $BufSize) > 0 ) {
457 $size += length($data);
459 $hdr->{realSize} = $size;
462 my $md5 = Digest::MD5->new;
463 my $len = length($dataMD5);
464 $hdr->{realSize} = $len if ( $hdr->{type} != BPC_FTYPE_FILE );
465 if ( $hdr->{realSize} < 1048576
466 && length($dataMD5) != $hdr->{realSize} ) {
467 print(STDERR "File $hdr->{fullPath} has bad size"
468 . " (expect $hdr->{realSize}, got $len)\n");
470 my $digest = $bpc->Buffer2MD5($md5, $hdr->{realSize},
472 my $path = $bpc->MD52Path($digest, $hdr->{compress});
475 # print(STDERR "Looking up $hdr->{fullPath} at $path\n");
477 my $testPath = $path;
478 $testPath .= "_$i" if ( $i >= 0 );
479 last if ( !-f $testPath );
480 my $inode = (stat(_))[1];
481 if ( $inode == $hdr->{inode} ) {
483 # Found it! Just emit a tar hardlink
485 $testPath =~ s{\Q$TopDir\E}{..};
486 $linkName = $testPath;
493 if ( defined($linkName) ) {
494 $hdr->{type} = BPC_FTYPE_HARDLINK;
495 $hdr->{linkname} = $linkName;
496 TarWriteFileInfo($fh, $hdr);
498 #print STDERR "$hdr->{relPath} matches $testPath\n";
499 if ( !$opts{c} && $hdr->{nlink} > 2 ) {
501 # add it to the cache if there are more
502 # than 2 links (pool + current file),
503 # since there are more to go
505 $Inode2Path{$hdr->{inode}} = $linkName;
510 print STDERR "Can't find $hdr->{relPath} in pool, will copy file\n";
514 my $f = BackupPC::FileZIO->open($hdr->{fullPath}, 0, 0);
515 if ( !defined($f) ) {
516 print(STDERR "Unable to open file $hdr->{fullPath}\n");
520 TarWriteFileInfo($fh, $hdr);
521 while ( $f->read(\$data, $BufSize) > 0 ) {
522 if ( $size + length($data) > $hdr->{size} ) {
523 print(STDERR "Error: truncating $hdr->{fullPath} to"
524 . " $hdr->{size} bytes\n");
525 $data = substr($data, 0, $hdr->{size} - $size);
528 TarWrite($fh, \$data);
529 $size += length($data);
532 if ( $size != $hdr->{size} ) {
533 print(STDERR "Error: padding $hdr->{fullPath} to $hdr->{size}"
534 . " bytes from $size bytes\n");
536 while ( $size < $hdr->{size} ) {
537 my $len = $hdr->{size} - $size;
538 $len = $BufSize if ( $len > $BufSize );
540 TarWrite($fh, \$data);
544 TarWritePad($fh, $size);
548 print(STDERR "Got unknown type $hdr->{type} for $hdr->{name}\n");