-#!/bin/perl -T
+#!/usr/bin/perl
#============================================================= -*-perl-*-
#
# BackupPC_tarCreate: create a tar archive of an existing dump
#
# DESCRIPTION
#
-# Usage: BackupPC_tarCreate [-t] [-h host] [-n dumpNum] [-s shareName]
-# [-r pathRemove] [-p pathAdd] files/directories...
+# Usage: BackupPC_tarCreate [options] files/directories...
#
# Flags:
# Required options:
#
-# -h host host from which the tar archive is created
-# -n dumpNum dump number from which the tar archive is created
-# -s shareName share name from which the tar archive is created
+# -h host Host from which the tar archive is created.
+# -n dumpNum Dump number from which the tar archive is created.
+# A negative number means relative to the end (eg -1
+# means the most recent dump, -2 2nd most recent etc).
+# -s shareName Share name from which the tar archive is created.
#
# Other options:
# -t print summary totals
# -r pathRemove path prefix that will be replaced with pathAdd
# -p pathAdd new path prefix
+# -b BLOCKS output write buffer size in 512-byte blocks (default 20; same as tar)
+# -w readBufSz buffer size for reading files (default 1048576 = 1MB)
+# -e charset charset for encoding file names (default: value of
+# $Conf{ClientCharset} when backup was done)
+# -l just print a file listing; don't generate an archive
+# -L just print a detailed file listing; don't generate an archive
#
# The -h, -n and -s options specify which dump is used to generate
# the tar archive. The -r and -p options can be used to relocate
# Craig Barratt <cbarratt@users.sourceforge.net>
#
# COPYRIGHT
-# Copyright (C) 2001-2003 Craig Barratt
+# Copyright (C) 2001-2009 Craig Barratt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#
#========================================================================
#
-# Version 2.1.0_CVS, released 3 Jul 2003.
+# Version 3.2.0, released 31 Jul 2010.
#
# See http://backuppc.sourceforge.net.
#
use lib "/usr/local/BackupPC/lib";
use File::Path;
use Getopt::Std;
+use Encode qw/from_to/;
use BackupPC::Lib;
use BackupPC::Attrib qw(:all);
use BackupPC::FileZIO;
use BackupPC::View;
die("BackupPC::Lib->new failed\n") if ( !(my $bpc = BackupPC::Lib->new) );
-my $TopDir = $bpc->TopDir();
-my $BinDir = $bpc->BinDir();
-my %Conf = $bpc->Conf();
my %opts;
-if ( !getopts("th:n:p:r:s:", \%opts) || @ARGV < 1 ) {
- print(STDERR "usage: $0 [-t] [-h host] [-n dumpNum] [-s shareName]"
- . " [-r pathRemove] [-p pathAdd]"
- . " files/directories...\n");
+if ( !getopts("Llte:h:n:p:r:s:b:w:i", \%opts) || @ARGV < 1 ) {
+ print STDERR <<EOF;
+usage: $0 [options] files/directories...
+ Required options:
+ -h host host from which the tar archive is created
+ -n dumpNum dump number from which the tar archive is created
+ A negative number means relative to the end (eg -1
+ means the most recent dump, -2 2nd most recent etc).
+ -s shareName share name from which the tar archive is created
+
+ Other options:
+ -t print summary totals
+ -r pathRemove path prefix that will be replaced with pathAdd
+ -p pathAdd new path prefix
+ -b BLOCKS output write buffer size in 512-byte blocks (default 20; same as tar)
+ -w readBufSz buffer size for reading files (default 1048576 = 1MB)
+ -e charset charset for encoding file names (default: value of
+ \$Conf{ClientCharset} when backup was done)
+ -l just print a file listing; don't generate an archive
+ -L just print a detailed file listing; don't generate an archive
+ -i create incremental tar dump with just new files
+EOF
exit(1);
}
-if ( $opts{h} !~ /^([\w\.\s-]+)$/ ) {
+if ( $opts{h} !~ /^([\w\.\s-]+)$/
+ || $opts{h} =~ m{(^|/)\.\.(/|$)} ) {
print(STDERR "$0: bad host name '$opts{h}'\n");
exit(1);
}
my $Host = $opts{h};
-if ( $opts{n} !~ /^(\d+)$/ ) {
+if ( $opts{n} !~ /^(-?\d+)$/ ) {
print(STDERR "$0: bad dump number '$opts{n}'\n");
exit(1);
}
my $ErrorCnt = 0;
my $i;
+$Num = $Backups[@Backups + $Num]{num} if ( -@Backups <= $Num && $Num < 0 );
for ( $i = 0 ; $i < @Backups ; $i++ ) {
last if ( $Backups[$i]{num} == $Num );
}
exit(1);
}
+my $Charset = $Backups[$i]{charset};
+$Charset = $opts{e} if ( $opts{e} ne "" );
+
my $PathRemove = $1 if ( $opts{r} =~ /(.+)/ );
my $PathAdd = $1 if ( $opts{p} =~ /(.+)/ );
-if ( $opts{s} !~ /^([\w\s\.\/\$-]+)$/ ) {
+if ( $opts{s} =~ m{(^|/)\.\.(/|$)} ) {
print(STDERR "$0: bad share name '$opts{s}'\n");
exit(1);
}
-my $ShareName = $opts{s};
+
+# XXX ASA Search extension
+my $view_opts;
+
+my %Conf = $bpc->Conf;
+if ( $Conf{TarCreateIncremental} || $opts{i} ) {
+ warn "# incremental dump";
+ $view_opts = { only_increment => 1 };
+}
+
+our $ShareName = $opts{s};
+our $view = BackupPC::View->new($bpc, $Host, \@Backups, $view_opts);
#
# This constant and the line of code below that uses it are borrowed
= 'a100 a8 a8 a8 a12 a12 A8 a1 a100 a6 a2 a32 a32 a8 a8 a155 x12';
my $tar_header_length = 512;
-my $BufSize = 1048576; # 1MB or 2^20
+my $BufSize = $opts{w} || 1048576; # 1MB or 2^20
my $WriteBuf = "";
-my $WriteBufSz = 20 * $tar_header_length;
+my $WriteBufSz = ($opts{b} || 20) * $tar_header_length;
my(%UidCache, %GidCache);
my(%HardLinkExtraFiles, @HardLinks);
#
binmode(STDOUT);
my $fh = *STDOUT;
-foreach my $dir ( @ARGV ) {
- archiveWrite($fh, $dir);
-}
-
-#
-# Write out any hardlinks (if any)
-#
-foreach my $hdr ( @HardLinks ) {
- $hdr->{size} = 0;
- if ( defined($PathRemove)
- && substr($hdr->{linkname}, 0, length($PathRemove)+1)
- eq ".$PathRemove" ) {
- substr($hdr->{linkname}, 0, length($PathRemove)+1) = ".$PathAdd";
+if ( $ShareName eq "*" ) {
+ my $PathRemoveOrig = $PathRemove;
+ my $PathAddOrig = $PathAdd;
+ foreach $ShareName ( $view->shareList($Num) ) {
+ #print(STDERR "Doing share ($ShareName)\n");
+ $PathRemove = "/" if ( !defined($PathRemoveOrig) );
+ ($PathAdd = "/$ShareName/$PathAddOrig") =~ s{//+}{/}g;
+ foreach my $dir ( @ARGV ) {
+ archiveWrite($fh, $dir);
+ }
+ archiveWriteHardLinks($fh);
+ }
+} else {
+ foreach my $dir ( @ARGV ) {
+ archiveWrite($fh, $dir);
}
- TarWriteFileInfo($fh, $hdr);
+ archiveWriteHardLinks($fh);
}
-#
-# Finish with two null 512 byte headers, and then round out a full
-# block.
-#
-my $data = "\0" x ($tar_header_length * 2);
-TarWrite($fh, \$data);
-TarWrite($fh, undef);
+if ( !$opts{l} && !$opts{L} ) {
+ #
+ # Finish with two null 512 byte headers, and then round out a full
+ # block.
+ #
+ my $data = "\0" x ($tar_header_length * 2);
+ TarWrite($fh, \$data);
+ TarWrite($fh, undef);
+}
#
# print out totals if requested
print STDERR "Done: $FileCnt files, $ByteCnt bytes, $DirCnt dirs,",
" $SpecialCnt specials, $ErrorCnt errors\n";
}
+if ( $ErrorCnt && !$FileCnt && !$DirCnt ) {
+ #
+ # Got errors, with no files or directories; exit with non-zero
+ # status
+ #
+ exit(1);
+}
exit(0);
###########################################################################
{
my($fh, $dir, $tarPathOverride) = @_;
- my $view = BackupPC::View->new($bpc, $Host, \@Backups);
-
if ( $dir =~ m{(^|/)\.\.(/|$)} ) {
print(STDERR "$0: bad directory '$dir'\n");
$ErrorCnt++;
return;
}
- $view->find($Num, $ShareName, $dir, 0, \&TarWriteFile,
- $fh, $tarPathOverride);
+ $dir = "/" if ( $dir eq "." );
+ #print(STDERR "calling find with $Num, $ShareName, $dir\n");
+ if ( $view->find($Num, $ShareName, $dir, 0, \&TarWriteFile,
+ $fh, $tarPathOverride) < 0 ) {
+ print(STDERR "$0: bad share or directory '$ShareName/$dir'\n");
+ $ErrorCnt++;
+ return;
+ }
+}
+
+#
+# Write out any hardlinks (if any)
+#
+sub archiveWriteHardLinks
+{
+ my($fh) = @_;
+ foreach my $hdr ( @HardLinks ) {
+ $hdr->{size} = 0;
+ my $name = $hdr->{linkname};
+ $name =~ s{^\./}{/};
+ if ( defined($HardLinkExtraFiles{$name}) ) {
+ $hdr->{linkname} = $HardLinkExtraFiles{$name};
+ }
+ if ( defined($PathRemove)
+ && substr($hdr->{linkname}, 0, length($PathRemove)+1)
+ eq ".$PathRemove" ) {
+ substr($hdr->{linkname}, 0, length($PathRemove)+1) = ".$PathAdd";
+ }
+ TarWriteFileInfo($fh, $hdr);
+ }
+ @HardLinks = ();
+ %HardLinkExtraFiles = ();
}
sub UidLookup
: "";
my $devminor = defined($hdr->{devminor}) ? sprintf("%07o", $hdr->{devminor})
: "";
+ my $sizeStr;
+ if ( $hdr->{size} >= 2 * 65536 * 65536 ) {
+ #
+ # GNU extension for files >= 8GB: send size in big-endian binary
+ #
+ $sizeStr = pack("c4 N N", 0x80, 0, 0, 0,
+ $hdr->{size} / (65536 * 65536),
+ $hdr->{size} % (65536 * 65536));
+ } elsif ( $hdr->{size} >= 1 * 65536 * 65536 ) {
+ #
+ # sprintf octal only handles up to 2^32 - 1
+ #
+ $sizeStr = sprintf("%03o", $hdr->{size} / (1 << 24))
+ . sprintf("%08o", $hdr->{size} % (1 << 24));
+ } else {
+ $sizeStr = sprintf("%011o", $hdr->{size});
+ }
my $data = pack($tar_pack_header,
substr($hdr->{name}, 0, 99),
sprintf("%07o", $hdr->{mode}),
sprintf("%07o", $hdr->{uid}),
sprintf("%07o", $hdr->{gid}),
- sprintf("%011o", $hdr->{size}),
+ $sizeStr,
sprintf("%011o", $hdr->{mtime}),
"", #checksum field - space padded by pack("A8")
$hdr->{type},
{
my($fh, $hdr) = @_;
+ #
+ # Convert path names to requested (eg: client) charset
+ #
+ if ( $Charset ne "" ) {
+ from_to($hdr->{name}, "utf8", $Charset);
+ from_to($hdr->{linkname}, "utf8", $Charset);
+ }
+
+ if ( $opts{l} ) {
+ print($hdr->{name} . "\n");
+ return;
+ } elsif ( $opts{L} ) {
+ my $owner = "$hdr->{uid}/$hdr->{gid}";
+
+ my $name = $hdr->{name};
+
+ if ( $hdr->{type} == BPC_FTYPE_SYMLINK
+ || $hdr->{type} == BPC_FTYPE_HARDLINK ) {
+ $name .= " -> $hdr->{linkname}";
+ }
+ $name =~ s/\n/\\n/g;
+
+ printf("%6o %9s %11.0f %s\n",
+ $hdr->{mode},
+ $owner,
+ $hdr->{size},
+ $name);
+ return;
+ }
+
#
# Handle long link names (symbolic links)
#
TarWrite($fh, \$data);
TarWritePad($fh, length($data));
}
+
#
# Handle long file names
#
my $tarPath = $hdr->{relPath};
$tarPath = $tarPathOverride if ( defined($tarPathOverride) );
+ $tarPath =~ s{//+}{/}g;
if ( defined($PathRemove)
&& substr($tarPath, 0, length($PathRemove)) eq $PathRemove ) {
substr($tarPath, 0, length($PathRemove)) = $PathAdd;
TarWriteFileInfo($fh, $hdr);
$DirCnt++;
} elsif ( $hdr->{type} == BPC_FTYPE_FILE ) {
+ my($data, $size);
#
# Regular file: write the header and file
#
return;
}
TarWriteFileInfo($fh, $hdr);
- my($data, $size);
- while ( $f->read(\$data, $BufSize) > 0 ) {
- TarWrite($fh, \$data);
- $size += length($data);
+ if ( $opts{l} || $opts{L} ) {
+ $size = $hdr->{size};
+ } else {
+ while ( $f->read(\$data, $BufSize) > 0 ) {
+ if ( $size + length($data) > $hdr->{size} ) {
+ print(STDERR "Error: truncating $hdr->{fullPath} to"
+ . " $hdr->{size} bytes\n");
+ $data = substr($data, 0, $hdr->{size} - $size);
+ $ErrorCnt++;
+ }
+ TarWrite($fh, \$data);
+ $size += length($data);
+ }
+ $f->close;
+ if ( $size != $hdr->{size} ) {
+ print(STDERR "Error: padding $hdr->{fullPath} to $hdr->{size}"
+ . " bytes from $size bytes\n");
+ $ErrorCnt++;
+ while ( $size < $hdr->{size} ) {
+ my $len = $hdr->{size} - $size;
+ $len = $BufSize if ( $len > $BufSize );
+ $data = "\0" x $len;
+ TarWrite($fh, \$data);
+ $size += $len;
+ }
+ }
+ TarWritePad($fh, $size);
}
- $f->close;
- TarWritePad($fh, $size);
$FileCnt++;
$ByteCnt += $size;
} elsif ( $hdr->{type} == BPC_FTYPE_HARDLINK ) {
my $done = 0;
my $name = $hdr->{linkname};
$name =~ s{^\./}{/};
- if ( $HardLinkExtraFiles{$name} ) {
+ if ( defined($HardLinkExtraFiles{$name}) ) {
$done = 1;
} else {
foreach my $arg ( @ARGV ) {
+ $arg = "/" if ( $arg eq "." );
$arg =~ s{^\./+}{/};
$arg =~ s{/+$}{};
- $done = 1 if ( $name eq $arg || $name =~ /^\Q$arg\// );
+ $done = 1 if ( $name eq $arg || $name =~ /^\Q$arg\// || $arg eq "" );
}
}
if ( $done ) {
# routine, so that we save the hassle of dealing with
# mangling, merging and attributes.
#
- $HardLinkExtraFiles{$hdr->{linkname}} = 1;
- archiveWrite($fh, $hdr->{linkname}, $hdr->{name});
+ my $name = $hdr->{linkname};
+ $name =~ s{^\./}{/};
+ $HardLinkExtraFiles{$name} = $hdr->{name};
+ archiveWrite($fh, $name, $hdr->{name});
}
} elsif ( $hdr->{type} == BPC_FTYPE_SYMLINK ) {
#
$hdr->{size} = 0;
TarWriteFileInfo($fh, $hdr);
$SpecialCnt++;
+ } elsif ( $hdr->{type} == BPC_FTYPE_SOCKET
+ || $hdr->{type} == BPC_FTYPE_UNKNOWN ) {
+ #
+ # ignore these two file types - these are dynamic file types created
+ # by applications as needed
+ #
} else {
print(STDERR "Got unknown type $hdr->{type} for $hdr->{name}\n");
$ErrorCnt++;