X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=bin%2FBackupPC_incPartsUpdate;h=7f7313cd4f2c3fae097851e258eb0438ccee8b7f;hb=c2bfdee61fbf96c5fd286b2fe810b138128b5877;hp=7e04e08127cc5ccc0e96dbdb37d8e8b26fcdfdd5;hpb=d9b531a0b6e791215ac3d2a0393d866bff8e3db2;p=BackupPC.git diff --git a/bin/BackupPC_incPartsUpdate b/bin/BackupPC_incPartsUpdate index 7e04e08..7f7313c 100755 --- a/bin/BackupPC_incPartsUpdate +++ b/bin/BackupPC_incPartsUpdate @@ -6,12 +6,40 @@ use lib "__INSTALLDIR__/lib"; use DBI; use BackupPC::Lib; use BackupPC::View; +use BackupPC::Attrib qw/:all/; use Data::Dumper; use Time::HiRes qw/time/; use POSIX qw/strftime/; -use BackupPC::SearchLib; use Cwd qw/abs_path/; use File::Which; +use Archive::Tar::Streamed; +use Algorithm::Diff; +use Getopt::Std; +use File::Slurp; +use File::Pid; + +my $pid_path = abs_path($0); +$pid_path =~ s/\W+/_/g; + +my $pidfile = new File::Pid({ + file => "/tmp/$pid_path", +}); + +if (my $pid = $pidfile->running ) { + die "$0 already running: $pid\n"; +} elsif ($pidfile->pid ne $$) { + $pidfile->remove; + $pidfile = new File::Pid; +} + +print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n"; +$pidfile->write; + +my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; +my %Conf = $bpc->Conf(); + +use BackupPC::SearchLib; +%BackupPC::SearchLib::Conf = %Conf; my $path = abs_path($0); $path =~ s#/[^/]+$#/#; @@ -20,24 +48,22 @@ my $tarIncCreate = $path .= 'BackupPC_tarIncCreate'; die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate); my $bin; -foreach my $c (qw/gzip split/) { +foreach my $c (qw/gzip md5sum/) { $bin->{$c} = which($c) || die "$0 needs $c, install it\n"; } +my %opt; +getopts("cd", \%opt ); + +my $debug = $opt{d}; +my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n"; -my $debug = 0; $|=1; my $start_t = time(); my $t_fmt = '%Y-%m-%d %H:%M:%S'; -my $hosts; -my $bpc = BackupPC::Lib->new || die; -my %Conf = $bpc->Conf(); -my $TopDir = $bpc->TopDir(); -my $beenThere = {}; - my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n"; my $user = $Conf{SearchUser} || ''; @@ -62,6 +88,259 @@ sub curr_time { return strftime($t_fmt,localtime()); } +my $hsn_cache; + +sub get_backup_id($$$) { + my ($host, $share, $num) = @_; + + my $key = "$host $share $num"; + return $hsn_cache->{$key} if ($hsn_cache->{$key}); + + my $sth = $dbh->prepare(qq{ + SELECT + backups.id + FROM backups + INNER JOIN shares ON backups.shareID=shares.ID + INNER JOIN hosts ON backups.hostID = hosts.ID + where hosts.name = ? and shares.name = ? and backups.num = ? + }); + $sth->execute($host, $share, $num); + my ($id) = $sth->fetchrow_array; + + $hsn_cache->{"$host $share $num"} = $id; + + print STDERR "# $host $share $num == $id\n" if ($opt{d}); + + return $id; +} + + +sub tar_check($$$$) { + my ($host,$share,$num,$filename) = @_; + + my $t = time(); + print curr_time, " check $host:$share#$num -> $filename"; + + # depending on expected returned value this is used like: + # my $uncompress_size = get_gzip_size('/full/path/to.gz'); + # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz'); + sub get_gzip_size($) { + my $filename = shift; + die "file $filename problem: $!" unless (-r $filename); + open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!"; + my $line = <$gzip>; + chomp($line); + $line = <$gzip> if ($line =~ /^\s+compressed/); + + my ($comp, $uncomp) = (0,0); + + if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) { + if (wantarray) { + return [ $1, $2 ]; + } else { + return $2; + } + } else { + die "can't find size in line: $line"; + } + } + + sub check_part { + my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; + my $backup_id = get_backup_id($host, $share, $num); + my $sth_md5 = $dbh->prepare(qq{ + select + id, tar_size, size, md5, items + from backup_parts + where backup_id = ? and part_nr = ? + }); + + $sth_md5->execute($backup_id, $part_nr); + + if (my $row = $sth_md5->fetchrow_hashref) { + return if ( + $row->{tar_size} >= $tar_size && + $row->{size} == $size && + $row->{md5} eq $md5 && + $row->{items} == $items + ); + print ", deleting invalid backup_parts $row->{id}"; + $dbh->do(qq{ delete from backup_parts where id = $row->{id} }); + } + print ", inserting new"; + my $sth_insert = $dbh->prepare(qq{ + insert into backup_parts ( + backup_id, + part_nr, + tar_size, + size, + md5, + items + ) values (?,?,?,?,?,?) + }); + + $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items); + $dbh->commit; + } + + my @tar_parts; + + if (-d "$tar_dir/$filename") { + print ", multi-part"; + opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!"; + @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir); + closedir($dir); + } else { + push @tar_parts, "${filename}.tar.gz"; + } + + print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d}); + + my $same = 1; + my @tar_files; + + my $backup_part; + + print " reading" if ($opt{d}); + + foreach my $tarfilename (@tar_parts) { + + print "\n\t- $tarfilename"; + + my $path = "$tar_dir/$tarfilename"; + + my $size = (stat( $path ))[7] || die "can't stat $path: $!"; + + if ($size > $Conf{MaxArchiveSize}) { + print ", part bigger than media $size > $Conf{MaxArchiveSize}\n"; + return 0; + } + + print ", $size bytes"; + + + open(my $fh, "gzip -cd $path |") or die "can't open $path: $!"; + binmode($fh); + my $tar = Archive::Tar::Streamed->new($fh); + + my $tar_size_inarc = 0; + my $items = 0; + + while(my $entry = $tar->next) { + push @tar_files, $entry->name; + $items++; + $tar_size_inarc += $entry->size; + + if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) { + print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n"; + return 0; + } + + } + + close($fh); + + print ", $items items"; + + if ($tar_size_inarc == 0 && $items == 0) { + print ", EMPTY tar\n"; + + my $backup_id = get_backup_id($host, $share, $num); + + my $sth_inc_deleted = $dbh->prepare(qq{ + update backups set + inc_deleted = true + where id = ? + }); + $sth_inc_deleted->execute($backup_id); + + $dbh->commit; + + return 1; + } + + my $tar_size = get_gzip_size( $path ); + + # real tar size is bigger because of padding + if ($tar_size_inarc > $tar_size) { + print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n"; + return 0; + } + + # + # check if md5 exists, and if not, create one + # + + my $md5_path = $path; + $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; + if (! -e $md5_path || -z $md5_path) { + print ", creating md5"; + system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; + } else { + ## FIXME check if existing md5 is valid + } + + my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; + $md5 =~ s#\s.*$##; + + # extract part number from filename + my $part_nr = 1; + $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); + + # + # finally, check if backup_parts table in database is valid + # + + check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items); + } + + # short-cut and exit; + return $same unless($same); + + @tar_files = sort @tar_files; + print "\n\t",($#tar_files + 1), " tar files"; + + my $sth = $dbh->prepare(qq{ + SELECT path,type + FROM files + JOIN shares on shares.id = shareid + JOIN hosts on hosts.id = shares.hostid + WHERE hosts.name = ? and shares.name = ? and backupnum = ? + }); + $sth->execute($host, $share, $num); + my @db_files; + while( my $row = $sth->fetchrow_hashref ) { + + my $path = $row->{'path'} || die "no path?"; + $path =~ s#^/#./#; + $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR); + push @db_files, $path; + } + + print " ",($#db_files + 1), " database files, diff"; + + @db_files = sort @db_files; + + if ($#tar_files != $#db_files) { + $same = 0; + print " NUMBER"; + } else { + my $diff = Algorithm::Diff->new(\@tar_files, \@db_files); + while ( $diff->Next() ) { + next if $diff->Same(); + $same = 0; + print "< $_\n" for $diff->Items(1); + print "> $_\n" for $diff->Items(2); + } + } + + print " ",($same ? 'ok' : 'DIFFERENT'), + ", dur: ",fmt_time(time() - $t), "\n"; + + return $same; +} + + #----- main my $sth = $dbh->prepare( qq{ @@ -83,73 +362,40 @@ order by backups.date } ); $sth->execute(); +my $num_backups = $sth->rows; +my $curr_backup = 1; -my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ?, parts = ? where id = ? }); -my $sth_inc_deleted = $dbh->prepare(qq{ update backups set inc_deleted = ? where id = ? }); +while (my $row = $sth->fetchrow_hashref) { -%BackupPC::SearchLib::Conf = %Conf; + $curr_backup++; -while (my $row = $sth->fetchrow_hashref) { my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); # this will return -1 if file doesn't exist my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file); - print curr_time, " ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; - - my $t = time(); - - # re-create archive? - if ($row->{'inc_size'} == -1 || $size == -1 || $row->{'inc_size'} != $size) { - my $cmd = qq{rm -Rf $tar_dir/$tar_file && $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | $bin->{'gzip'} $Conf{GzipLevel} > ${tar_dir}/${tar_file}.tmp}; - print STDERR "## $cmd\n" if ($debug); - - system($cmd) == 0 or die "failed: $?"; - - rename "${tar_dir}/${$tar_file}.tmp", "$tar_dir/$tar_file" or die "can't rename $tar_dir/$tar_file: $!"; - - $size = (stat( "$tar_dir/$tar_file" ))[7]; - } + print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d}); - if ($size > 45) { - - my $max_size = $Conf{'MaxArchiveSize'} || die "problem with MaxArchieSize parametar"; - $max_size *= 1024; # convert to bytes - - my $parts = int( ($size + $max_size - 1) / $max_size ); - - if (-d "$tar_dir/$tar_file" && $parts != $row->{'parts'}) { - print " join"; - - my $in = my $out = "$tar_dir/$tar_file"; - $out .= '.tmp'; - - # FIXME I should really order parts manually! - system("cat $in/part* > $out && rm -Rf $in && mv $out $in") == 0 or die "can't join $in: $?"; + if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size) { + if ($check) { + tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next; + } else { + next; } + } - if ($size > $max_size && ! -d "$tar_dir/$tar_file") { - print " split/$parts"; - my $in = my $out = "$tar_dir/$tar_file"; - $out .= '.tmp'; - rename $in, $out || die "can't rename $in: $!"; - mkdir $in || die "can't mkdir $in: $!"; - - my $suffix_len = length("$parts"); - system("$bin->{'split'} -d -b $max_size -a $suffix_len $out $in/part") == 0 or die "can't split $out: $?"; - unlink $out || die "can't unlink $out: $!"; - } + print curr_time, " creating $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; - $sth_inc_size->execute($size, $parts, $row->{'backup_id'}); - $sth_inc_deleted->execute(0, $row->{'backup_id'}); + my $t = time(); - printf(" %1.2f MB", ($size / 1024 / 1024)); + # re-create archive? + my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f }; + print STDERR "## $cmd\n" if ($debug); - } else { - $sth_inc_deleted->execute(1, $row->{'backup_id'}); - unlink "$tar_dir/$tar_file" || die "can't delete $tar_dir/$tar_file: $!\n"; - print " EMPTY"; + if (system($cmd) != 0) { + print STDERR " FAILED"; } + print ", dur: ",fmt_time(time() - $t), "\n"; $dbh->commit;