use BackupPC::SearchLib;
%BackupPC::SearchLib::Conf = %Conf;
-# cludge: minimum .tar.gz size
-my $MIN_TAR_SIZE = 80;
-
my $path = abs_path($0);
$path =~ s#/[^/]+$#/#;
my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
my $bin;
-foreach my $c (qw/gzip split/) {
+foreach my $c (qw/gzip md5sum/) {
$bin->{$c} = which($c) || die "$0 needs $c, install it\n";
}
return strftime($t_fmt,localtime());
}
-sub tar_join($) {
- my $filename = shift;
-
- my $in = my $out = $filename;
- $out .= '.tmp';
-
- # FIXME I should really order parts manually!
- system("cat $in/part* > $out && rm -Rf $in && mv $out $in") == 0 or die "can't join $in: $?";
-
-}
-
sub tar_check($$$$) {
my ($host,$share,$num,$filename) = @_;
print " check";
}
- if (-d $filename) {
- print STDERR ", joining";
- tar_join($filename);
+ my @tar_parts;
+
+ if (-d "$tar_dir/$filename") {
+ print STDERR " multi-part" if ($opt{d});
+ opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
+ @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
+ closedir($dir);
+ } else {
+ push @tar_parts, "${filename}.tar.gz";
}
- print STDERR ", opening" if ($debug);
- open(my $fh, "gzip -cd $filename |") or die "can't open $filename: $!";
- binmode($fh);
- my $tar = Archive::Tar::Streamed->new($fh);
+ print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
- print STDERR ", tar" if ($debug);
+ my $same = 1;
my @tar_files;
- while(my $entry = $tar->next) {
- push @tar_files, $entry->name;
+
+ print " reading";
+
+ foreach my $tarfilename (@tar_parts) {
+
+ print STDERR " $tarfilename" if ($debug);
+
+ my $path = "$tar_dir/$tarfilename";
+ my $md5 = $path;
+ $md5 =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5";
+ if (! -e $md5) {
+ print ", creating md5";
+ system( $bin->{md5sum} . " $path > $md5") == 0 or die "can't create md5 $path: $!";
+ }
+
+ open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!";
+ binmode($fh);
+ my $tar = Archive::Tar::Streamed->new($fh);
+
+ my $total_size = 0;
+
+ while(my $entry = $tar->next) {
+ push @tar_files, $entry->name;
+ $total_size += $entry->size;
+ }
+
+ if ($total_size > $Conf{MaxArchiveFileSize}) {
+ print STDERR " part too big $total_size > $Conf{MaxArchiveFileSize} }}" if ($debug);
+ $same = 0;
+ last;
+ } elsif ($total_size > $Conf{MaxArchiveSize}) {
+ print STDERR " part bigger than media $total_size > $Conf{MaxArchiveSize} }}" if ($debug);
+ $same = 0;
+ last;
+ }
}
+
+ # short-cut and exit;
+ return $same unless($same);
+
@tar_files = sort @tar_files;
print STDERR " ",($#tar_files + 1), " files" if ($debug);
@db_files = sort @db_files;
- my $same = 1;
if ($#tar_files != $#db_files) {
$same = 0;
print STDERR " NUMBER" if ($debug);
} );
-my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ?, parts = ? where id = ? });
-my $sth_inc_deleted = $dbh->prepare(qq{ update backups set inc_deleted = ? where id = ? });
-
-
$sth->execute();
my $num_backups = $sth->rows;
my $curr_backup = 1;
# this will return -1 if file doesn't exist
my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file);
+ print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
+
+ if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} == $size) {
+ if ($check) {
+ tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
+ } else {
+ next;
+ }
+ }
+
print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file";
$curr_backup++;
my $t = time();
# re-create archive?
- if ($row->{'inc_size'} == -1 || $size == -1 ||
- $row->{'inc_size'} != $size ||
- $check && ! tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, "$tar_dir/$tar_file")
- ) {
- my $cmd = qq{rm -Rf $tar_dir/$tar_file && $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | $bin->{'gzip'} $Conf{GzipLevel} > ${tar_dir}/${tar_file}.tmp};
- print STDERR "## $cmd\n" if ($debug);
-
- system($cmd) == 0 or die "failed: $?";
+ my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f };
+ print STDERR "## $cmd\n" if ($debug);
- rename("${tar_dir}/${tar_file}.tmp", "$tar_dir/$tar_file") or die "can't rename $tar_dir/$tar_file: $!";
-
- $size = (stat( "$tar_dir/$tar_file" ))[7];
+ if (system($cmd) != 0) {
+ print STDERR " FAILED";
}
- if ($size > $MIN_TAR_SIZE) {
-
- my $max_size = $Conf{'MaxArchiveSize'} || die "problem with MaxArchiveSize parametar";
- $max_size *= 1024; # convert to bytes
-
- my $max_file_size = $Conf{'MaxArchiveFileSize'} || die "problem with MaxArchiveFileSize parametar";
- $max_file_size *= 1024; # bytes
-
- if ($max_file_size > $max_size) {
- warn "MaxArchiveFileSize ($max_file_size) is bigger than MaxArchiveSize ($max_size)\n";
- $max_file_size = $max_size;
- }
-
- # maximum file size on ISO image is 4Gb
- # this will require Linux kernel 2.6.8 or newer
- if ( $max_size > $max_file_size ) {
- $max_size = $max_file_size;
- }
-
- my $parts = int( ($size + $max_size - 1) / $max_size );
-
- if (-d "$tar_dir/$tar_file" && $parts != $row->{'parts'}) {
- print " join";
- tar_join("$tar_dir/$tar_file");
- }
-
- if ($size > $max_size && ! -d "$tar_dir/$tar_file") {
- print " split/$parts";
- my $in = my $out = "$tar_dir/$tar_file";
- $out .= '.tmp';
- rename $in, $out || die "can't rename $in: $!";
- mkdir $in || die "can't mkdir $in: $!";
-
- my $suffix_len = length("$parts");
- system("$bin->{'split'} -d -b $max_size -a $suffix_len $out $in/part") == 0 or die "can't split $out: $?";
- unlink $out || die "can't unlink $out: $!";
- }
-
- $sth_inc_size->execute($size, $parts, $row->{'backup_id'});
- $sth_inc_deleted->execute(0, $row->{'backup_id'});
-
- printf(" %1.2f MB", ($size / 1024 / 1024));
-
- } else {
- $sth_inc_deleted->execute(1, $row->{'backup_id'});
- unlink "$tar_dir/$tar_file" || die "can't delete $tar_dir/$tar_file: $!\n";
- print " EMPTY";
- }
print ", dur: ",fmt_time(time() - $t), "\n";
$dbh->commit;