use Archive::Tar::Streamed;
use Algorithm::Diff;
use Getopt::Std;
+use File::Slurp;
my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
my %Conf = $bpc->Conf();
use BackupPC::SearchLib;
%BackupPC::SearchLib::Conf = %Conf;
-# cludge: minimum .tar.gz size
-my $MIN_TAR_SIZE = 80;
-
my $path = abs_path($0);
$path =~ s#/[^/]+$#/#;
my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
my $bin;
-foreach my $c (qw/gzip/) {
+foreach my $c (qw/gzip md5sum/) {
$bin->{$c} = which($c) || die "$0 needs $c, install it\n";
}
return strftime($t_fmt,localtime());
}
+my $hsn_cache;
+
+sub get_backup_id($$$) {
+ my ($host, $share, $num) = @_;
+
+ my $key = "$host $share $num";
+ return $hsn_cache->{$key} if ($hsn_cache->{$key});
+
+ my $sth = $dbh->prepare(qq{
+ SELECT
+ backups.id
+ FROM backups
+ INNER JOIN shares ON backups.shareID=shares.ID
+ INNER JOIN hosts ON backups.hostID = hosts.ID
+ where hosts.name = ? and shares.name = ? and backups.num = ?
+ });
+ $sth->execute($host, $share, $num);
+ my ($id) = $sth->fetchrow_array;
+
+ $hsn_cache->{"$host $share $num"} = $id;
+
+ print STDERR "# $host $share $num == $id\n" if ($opt{d});
+
+ return $id;
+}
+
+
sub tar_check($$$$) {
my ($host,$share,$num,$filename) = @_;
- return 1; # FIXME
+ sub check_part {
+ my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
+ my $backup_id = get_backup_id($host, $share, $num);
+ my $sth_md5 = $dbh->prepare(qq{
+ select
+ id, tar_size, size, md5, items
+ from backup_parts
+ where backup_id = ? and part_nr = ?
+ });
+
+ $sth_md5->execute($backup_id, $part_nr);
+
+ if (my $row = $sth_md5->fetchrow_hashref) {
+ return if (
+ $row->{tar_size} >= $tar_size &&
+ $row->{size} == $size &&
+ $row->{md5} eq $md5 &&
+ $row->{items} == $items
+ );
+ print STDERR "# deleting invalid row $row->{id}\n" if ($opt{d});
+ $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
+ }
+ print STDERR "# inserting new backup_part row\n";
+ my $sth_insert = $dbh->prepare(qq{
+ insert into backup_parts (
+ backup_id,
+ part_nr,
+ tar_size,
+ size,
+ md5,
+ items
+ ) values (?,?,?,?,?,?)
+ });
+
+ $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
+ $dbh->commit;
+ }
if ($debug) {
print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename";
print " check";
}
- if (-d $filename) {
- print STDERR ", joining";
- tar_join($filename);
+ my @tar_parts;
+
+ if (-d "$tar_dir/$filename") {
+ print STDERR " multi-part" if ($opt{d});
+ opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
+ @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
+ closedir($dir);
+ } else {
+ push @tar_parts, "${filename}.tar.gz";
}
- print STDERR ", opening" if ($debug);
- open(my $fh, "gzip -cd $filename |") or die "can't open $filename: $!";
- binmode($fh);
- my $tar = Archive::Tar::Streamed->new($fh);
+ print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
- print STDERR ", tar" if ($debug);
+ my $same = 1;
my @tar_files;
- while(my $entry = $tar->next) {
- push @tar_files, $entry->name;
+
+ my $backup_part;
+
+ print " reading";
+
+ foreach my $tarfilename (@tar_parts) {
+
+ print STDERR " $tarfilename" if ($debug);
+
+ my $path = "$tar_dir/$tarfilename";
+ my $md5_path = $path;
+ $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
+ if (! -e $md5_path || -z $md5_path) {
+ print ", creating md5";
+ system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
+ }
+
+ my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
+ $md5 =~ s#\s.*$##;
+
+ my $part_nr = 1;
+ $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
+
+ my $size = (stat( "$tar_dir/$tarfilename" ))[7] || die "can't stat $tar_dir/$tarfilename";
+
+ open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!";
+ binmode($fh);
+ my $tar = Archive::Tar::Streamed->new($fh);
+
+ my $tar_size = 0;
+ my $items = 0;
+
+ while(my $entry = $tar->next) {
+ push @tar_files, $entry->name;
+ $items++;
+ $tar_size += $entry->size;
+ }
+
+ if ($tar_size > $Conf{MaxArchiveFileSize}) {
+ print STDERR " part too big $tar_size > $Conf{MaxArchiveFileSize} }}" if ($debug);
+ $same = 0;
+ last;
+ } elsif ($size > $Conf{MaxArchiveSize}) {
+ print STDERR " part bigger than media $size > $Conf{MaxArchiveSize} }}" if ($debug);
+ $same = 0;
+ last;
+ }
+
+ check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
}
+
+ # short-cut and exit;
+ return $same unless($same);
+
@tar_files = sort @tar_files;
print STDERR " ",($#tar_files + 1), " files" if ($debug);
@db_files = sort @db_files;
- my $same = 1;
if ($#tar_files != $#db_files) {
$same = 0;
print STDERR " NUMBER" if ($debug);
print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
- if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} == $size && ( $check && tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, "$tar_dir/$tar_file") || 1) ) {
- next;
+ if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size) {
+ if ($check) {
+ tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
+ } else {
+ next;
+ }
}
print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file";
my $t = time();
# re-create archive?
- my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} };
+ my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f };
print STDERR "## $cmd\n" if ($debug);
if (system($cmd) != 0) {