use Data::Dumper;
use Time::HiRes qw/time/;
use POSIX qw/strftime/;
-use BackupPC::SearchLib;
use Cwd qw/abs_path/;
use File::Which;
use Archive::Tar::Streamed;
use Algorithm::Diff;
use Getopt::Std;
+use File::Slurp;
+use File::Pid;
+
+=head1 NAME
+
+BackupPC_incPartsUpdate
+
+=head1 DESCRIPTION
+
+Create C<.tar.gz> increments on disk calling C<BackupPC_tarIncCreate>.
+
+Following options are supported (but all are optional):
+
+=over 4
+
+=item -h hostname
+
+Update parts for just single C<hostname>
+
+=item -c
+
+Force check for tar archives which exist on disk
+
+=item -d
+
+Turn debugging output
+
+=back
+
+=cut
-# cludge: minimum .tar.gz size
-my $MIN_TAR_SIZE = 80;
+my %opt;
+getopts("cdh:", \%opt );
+
+my $debug = $opt{d};
+my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
+
+my $pid_path = abs_path($0);
+$pid_path =~ s/\W+/_/g;
+
+my $pidfile = new File::Pid({
+ file => "/tmp/$pid_path",
+});
+
+if (my $pid = $pidfile->running ) {
+ die "$0 already running: $pid\n";
+} elsif ($pidfile->pid ne $$) {
+ $pidfile->remove;
+ $pidfile = new File::Pid;
+}
+
+print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n";
+$pidfile->write;
+
+my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
+my %Conf = $bpc->Conf();
+
+use BackupPC::SearchLib;
+%BackupPC::SearchLib::Conf = %Conf;
my $path = abs_path($0);
$path =~ s#/[^/]+$#/#;
die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
my $bin;
-foreach my $c (qw/gzip split/) {
+foreach my $c (qw/gzip md5sum/) {
$bin->{$c} = which($c) || die "$0 needs $c, install it\n";
}
-my %opt;
-getopts("cd", \%opt );
-
-my $debug = $opt{d};
-my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
-
$|=1;
my $start_t = time();
my $t_fmt = '%Y-%m-%d %H:%M:%S';
-my $hosts;
-my $bpc = BackupPC::Lib->new || die;
-my %Conf = $bpc->Conf();
-my $TopDir = $bpc->TopDir();
-my $beenThere = {};
-
my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
my $user = $Conf{SearchUser} || '';
return strftime($t_fmt,localtime());
}
-sub tar_join($) {
- my $filename = shift;
+my $hsn_cache;
- my $in = my $out = $filename;
- $out .= '.tmp';
+sub get_backup_id($$$) {
+ my ($host, $share, $num) = @_;
- # FIXME I should really order parts manually!
- system("cat $in/part* > $out && rm -Rf $in && mv $out $in") == 0 or die "can't join $in: $?";
+ my $key = "$host $share $num";
+ return $hsn_cache->{$key} if ($hsn_cache->{$key});
+ my $sth = $dbh->prepare(qq{
+ SELECT
+ backups.id
+ FROM backups
+ INNER JOIN shares ON backups.shareID=shares.ID
+ INNER JOIN hosts ON backups.hostID = hosts.ID
+ WHERE hosts.name = ? and shares.name = ? and backups.num = ?
+ });
+ $sth->execute($host, $share, $num);
+ my ($id) = $sth->fetchrow_array;
+
+ $hsn_cache->{"$host $share $num"} = $id;
+
+ print STDERR "# $host $share $num == $id\n" if ($opt{d});
+
+ return $id;
+}
+
+sub backup_inc_deleted($) {
+ my $backup_id = shift;
+ my $sth_inc_deleted = $dbh->prepare(qq{
+ update backups set
+ inc_deleted = true
+ where id = ?
+ });
+ $sth_inc_deleted->execute($backup_id);
}
sub tar_check($$$$) {
my ($host,$share,$num,$filename) = @_;
- if ($debug) {
- print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename";
- } else {
- print " check";
+ my $t = time();
+ print curr_time, " check $host:$share#$num -> $filename";
+
+ # depending on expected returned value this is used like:
+ # my $uncompress_size = get_gzip_size('/full/path/to.gz');
+ # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
+ sub get_gzip_size($) {
+ my $filename = shift;
+ die "file $filename problem: $!" unless (-r $filename);
+ open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
+ my $line = <$gzip>;
+ chomp($line);
+ $line = <$gzip> if ($line =~ /^\s+compressed/);
+
+ my ($comp, $uncomp) = (0,0);
+
+ if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
+ if (wantarray) {
+ return [ $1, $2 ];
+ } else {
+ return $2;
+ }
+ } else {
+ die "can't find size in line: $line";
+ }
+ }
+
+ sub check_part {
+ my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
+ my $backup_id = get_backup_id($host, $share, $num);
+ my $sth_md5 = $dbh->prepare(qq{
+ select
+ id, tar_size, size, md5, items
+ from backup_parts
+ where backup_id = ? and part_nr = ?
+ });
+
+ $sth_md5->execute($backup_id, $part_nr);
+
+ if (my $row = $sth_md5->fetchrow_hashref) {
+ return if (
+ $row->{tar_size} >= $tar_size &&
+ $row->{size} == $size &&
+ $row->{md5} eq $md5 &&
+ $row->{items} == $items
+ );
+ print ", deleting invalid backup_parts $row->{id}";
+ $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
+ }
+ print ", inserting new";
+ my $sth_insert = $dbh->prepare(qq{
+ insert into backup_parts (
+ backup_id,
+ part_nr,
+ tar_size,
+ size,
+ md5,
+ items
+ ) values (?,?,?,?,?,?)
+ });
+
+ $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
+ $dbh->commit;
}
- if (-d $filename) {
- print STDERR ", joining";
- tar_join($filename);
+ my @tar_parts;
+
+ if (-d "$tar_dir/$filename") {
+ print ", multi-part";
+ opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
+ @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
+ closedir($dir);
+ } else {
+ push @tar_parts, "${filename}.tar.gz";
}
- print STDERR ", opening" if ($debug);
- open(my $fh, "gzip -cd $filename |") or die "can't open $filename: $!";
- binmode($fh);
- my $tar = Archive::Tar::Streamed->new($fh);
+ print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
- print STDERR ", tar" if ($debug);
+ my $same = 1;
my @tar_files;
- while(my $entry = $tar->next) {
- push @tar_files, $entry->name;
+
+ my $backup_part;
+
+ print " reading" if ($opt{d});
+
+ foreach my $tarfilename (@tar_parts) {
+
+ print "\n\t- $tarfilename";
+
+ my $path = "$tar_dir/$tarfilename";
+
+ my $size = (stat( $path ))[7] || die "can't stat $path: $!";
+
+ if ($size > $Conf{MaxArchiveSize}) {
+ print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
+ return 0;
+ }
+
+ print ", $size bytes";
+
+
+ open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
+ binmode($fh);
+ my $tar = Archive::Tar::Streamed->new($fh);
+
+ my $tar_size_inarc = 0;
+ my $items = 0;
+
+ while(my $entry = $tar->next) {
+ push @tar_files, $entry->name;
+ $items++;
+ $tar_size_inarc += $entry->size;
+
+ if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
+ print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
+ return 0;
+ }
+
+ }
+
+ close($fh);
+
+ print ", $items items";
+
+ if ($tar_size_inarc == 0 && $items == 0) {
+ print ", EMPTY tar\n";
+
+ my $backup_id = get_backup_id($host, $share, $num);
+ backup_inc_deleted( $backup_id );
+
+ $dbh->commit;
+
+ return 1;
+ }
+
+ my $tar_size = get_gzip_size( $path );
+
+ # real tar size is bigger because of padding
+ if ($tar_size_inarc > $tar_size) {
+ print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
+ return 0;
+ }
+
+ #
+ # check if md5 exists, and if not, create one
+ #
+
+ my $md5_path = $path;
+ $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
+ if (! -e $md5_path || -z $md5_path) {
+ print ", creating md5";
+ system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
+ } else {
+ ## FIXME check if existing md5 is valid
+ }
+
+ my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
+ $md5 =~ s#\s.*$##;
+
+ # extract part number from filename
+ my $part_nr = 1;
+ $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
+
+ #
+ # finally, check if backup_parts table in database is valid
+ #
+
+ check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
}
- @tar_files = sort @tar_files;
- print STDERR " ",($#tar_files + 1), " files" if ($debug);
- print STDERR ", database" if ($debug);
+ # short-cut and exit;
+ return $same unless($same);
+
+ @tar_files = sort @tar_files;
+ print "\n\t",($#tar_files + 1), " tar files";
my $sth = $dbh->prepare(qq{
SELECT path,type
push @db_files, $path;
}
- print STDERR " ",($#db_files + 1), " files, diff" if ($debug);
+ print " ",($#db_files + 1), " database files, diff";
@db_files = sort @db_files;
- my $same = 1;
if ($#tar_files != $#db_files) {
$same = 0;
- print STDERR " NUMBER" if ($debug);
+ print " NUMBER";
} else {
my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
while ( $diff->Next() ) {
}
}
- print " ",($same ? 'ok' : 'DIFFERENT');
- print STDERR " }} " if ($debug);
+ print " ",($same ? 'ok' : 'DIFFERENT'),
+ ", dur: ",fmt_time(time() - $t), "\n";
return $same;
}
hosts.name as host,
shares.name as share,
backups.num as num,
+ backups.date,
inc_size,
- parts
+ parts,
+ count(backup_parts.backup_id) as backup_parts
from backups
join shares on backups.hostid = shares.hostid
and shares.id = backups.shareid
join hosts on shares.hostid = hosts.id
-where not inc_deleted
+ full outer join backup_parts on backups.id = backup_parts.backup_id
+where not inc_deleted and backups.size > 0
+group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
order by backups.date
} );
-my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ?, parts = ? where id = ? });
-my $sth_inc_deleted = $dbh->prepare(qq{ update backups set inc_deleted = ? where id = ? });
-
-%BackupPC::SearchLib::Conf = %Conf;
-
$sth->execute();
my $num_backups = $sth->rows;
my $curr_backup = 1;
-while (my $row = $sth->fetchrow_hashref) {
- my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
-
- # this will return -1 if file doesn't exist
- my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file);
-
- print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file";
- $curr_backup++;
-
- my $t = time();
-
- # re-create archive?
- if ($row->{'inc_size'} == -1 || $size == -1 ||
- $row->{'inc_size'} != $size ||
- $check && ! tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, "$tar_dir/$tar_file")
- ) {
- my $cmd = qq{rm -Rf $tar_dir/$tar_file && $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | $bin->{'gzip'} $Conf{GzipLevel} > ${tar_dir}/${tar_file}.tmp};
- print STDERR "## $cmd\n" if ($debug);
-
- system($cmd) == 0 or die "failed: $?";
+if ($opt{h}) {
+ warn "making increments just for host $opt{h}\n";
+}
- rename("${tar_dir}/${tar_file}.tmp", "$tar_dir/$tar_file") or die "can't rename $tar_dir/$tar_file: $!";
+while (my $row = $sth->fetchrow_hashref) {
- $size = (stat( "$tar_dir/$tar_file" ))[7];
+ if ($opt{h} && $row->{host} ne $opt{h}) {
+ warn "skipped $row->{host}\n" if ($debug);
+ next;
}
- if ($size > $MIN_TAR_SIZE) {
+ $curr_backup++;
- my $max_size = $Conf{'MaxArchiveSize'} || die "problem with MaxArchieSize parametar";
- $max_size *= 1024; # convert to bytes
+ my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
- # maximum file size on ISO image is 4Gb
- # this will require Linux kernel 2.6.8 or newer
- my $max_iso_file_size = 2^32 - 2048;
- if ( $max_size > $max_iso_file_size ) {
- $max_size = $max_iso_file_size;
- }
+ # this will return -1 if file doesn't exist
+ my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file);
- my $parts = int( ($size + $max_size - 1) / $max_size );
+ print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
- if (-d "$tar_dir/$tar_file" && $parts != $row->{'parts'}) {
- print " join";
- tar_join("$tar_dir/$tar_file");
+ if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
+ if ($check) {
+ tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
+ } else {
+ next;
}
+ }
- if ($size > $max_size && ! -d "$tar_dir/$tar_file") {
- print " split/$parts";
- my $in = my $out = "$tar_dir/$tar_file";
- $out .= '.tmp';
- rename $in, $out || die "can't rename $in: $!";
- mkdir $in || die "can't mkdir $in: $!";
-
- my $suffix_len = length("$parts");
- system("$bin->{'split'} -d -b $max_size -a $suffix_len $out $in/part") == 0 or die "can't split $out: $?";
- unlink $out || die "can't unlink $out: $!";
- }
+ print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
+ " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
- $sth_inc_size->execute($size, $parts, $row->{'backup_id'});
- $sth_inc_deleted->execute(0, $row->{'backup_id'});
+ my $t = time();
- printf(" %1.2f MB", ($size / 1024 / 1024));
+ # re-create archive?
+ my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
+ print STDERR "## $cmd\n" if ($debug);
- } else {
- $sth_inc_deleted->execute(1, $row->{'backup_id'});
- unlink "$tar_dir/$tar_file" || die "can't delete $tar_dir/$tar_file: $!\n";
- print " EMPTY";
+ if (system($cmd) != 0) {
+ print STDERR " FAILED, marking this backup deleted";
+ backup_inc_deleted( $row->{backup_id} );
}
+
print ", dur: ",fmt_time(time() - $t), "\n";
$dbh->commit;