X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=bin%2FBackupPC_incPartsUpdate;h=7f7313cd4f2c3fae097851e258eb0438ccee8b7f;hb=c2bfdee61fbf96c5fd286b2fe810b138128b5877;hp=1ea28c0619519dc91d515a126eeda5a62e9a63f3;hpb=7d287a12ce0a6552b1b79dd512ee864bc570affd;p=BackupPC.git diff --git a/bin/BackupPC_incPartsUpdate b/bin/BackupPC_incPartsUpdate index 1ea28c0..7f7313c 100755 --- a/bin/BackupPC_incPartsUpdate +++ b/bin/BackupPC_incPartsUpdate @@ -16,6 +16,24 @@ use Archive::Tar::Streamed; use Algorithm::Diff; use Getopt::Std; use File::Slurp; +use File::Pid; + +my $pid_path = abs_path($0); +$pid_path =~ s/\W+/_/g; + +my $pidfile = new File::Pid({ + file => "/tmp/$pid_path", +}); + +if (my $pid = $pidfile->running ) { + die "$0 already running: $pid\n"; +} elsif ($pidfile->pid ne $$) { + $pidfile->remove; + $pidfile = new File::Pid; +} + +print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n"; +$pidfile->write; my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; my %Conf = $bpc->Conf(); @@ -100,6 +118,33 @@ sub get_backup_id($$$) { sub tar_check($$$$) { my ($host,$share,$num,$filename) = @_; + my $t = time(); + print curr_time, " check $host:$share#$num -> $filename"; + + # depending on expected returned value this is used like: + # my $uncompress_size = get_gzip_size('/full/path/to.gz'); + # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz'); + sub get_gzip_size($) { + my $filename = shift; + die "file $filename problem: $!" unless (-r $filename); + open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!"; + my $line = <$gzip>; + chomp($line); + $line = <$gzip> if ($line =~ /^\s+compressed/); + + my ($comp, $uncomp) = (0,0); + + if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) { + if (wantarray) { + return [ $1, $2 ]; + } else { + return $2; + } + } else { + die "can't find size in line: $line"; + } + } + sub check_part { my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; my $backup_id = get_backup_id($host, $share, $num); @@ -119,10 +164,10 @@ sub tar_check($$$$) { $row->{md5} eq $md5 && $row->{items} == $items ); - print STDERR "# deleting invalid row $row->{id}\n" if ($opt{d}); + print ", deleting invalid backup_parts $row->{id}"; $dbh->do(qq{ delete from backup_parts where id = $row->{id} }); } - print STDERR "# inserting new backup_part row\n"; + print ", inserting new"; my $sth_insert = $dbh->prepare(qq{ insert into backup_parts ( backup_id, @@ -138,16 +183,10 @@ sub tar_check($$$$) { $dbh->commit; } - if ($debug) { - print STDERR " {{ CHECK: ${host}:${share}#${num} and $filename"; - } else { - print " check"; - } - my @tar_parts; if (-d "$tar_dir/$filename") { - print STDERR " multi-part" if ($opt{d}); + print ", multi-part"; opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!"; @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir); closedir($dir); @@ -162,51 +201,96 @@ sub tar_check($$$$) { my $backup_part; - print " reading"; + print " reading" if ($opt{d}); foreach my $tarfilename (@tar_parts) { - print STDERR " $tarfilename" if ($debug); + print "\n\t- $tarfilename"; my $path = "$tar_dir/$tarfilename"; - my $md5_path = $path; - $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; - if (! -e $md5_path || -z $md5_path) { - print ", creating md5"; - system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; - } - my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; - $md5 =~ s#\s.*$##; + my $size = (stat( $path ))[7] || die "can't stat $path: $!"; - my $part_nr = 1; - $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); + if ($size > $Conf{MaxArchiveSize}) { + print ", part bigger than media $size > $Conf{MaxArchiveSize}\n"; + return 0; + } + + print ", $size bytes"; - my $size = (stat( "$tar_dir/$tarfilename" ))[7] || die "can't stat $tar_dir/$tarfilename"; - open(my $fh, "gzip -cd $tar_dir/$tarfilename |") or die "can't open $tar_dir/$tarfilename: $!"; + open(my $fh, "gzip -cd $path |") or die "can't open $path: $!"; binmode($fh); my $tar = Archive::Tar::Streamed->new($fh); - my $tar_size = 0; + my $tar_size_inarc = 0; my $items = 0; while(my $entry = $tar->next) { push @tar_files, $entry->name; $items++; - $tar_size += $entry->size; + $tar_size_inarc += $entry->size; + + if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) { + print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n"; + return 0; + } + } - if ($tar_size > $Conf{MaxArchiveFileSize}) { - print STDERR " part too big $tar_size > $Conf{MaxArchiveFileSize} }}" if ($debug); - $same = 0; - last; - } elsif ($size > $Conf{MaxArchiveSize}) { - print STDERR " part bigger than media $size > $Conf{MaxArchiveSize} }}" if ($debug); - $same = 0; - last; + close($fh); + + print ", $items items"; + + if ($tar_size_inarc == 0 && $items == 0) { + print ", EMPTY tar\n"; + + my $backup_id = get_backup_id($host, $share, $num); + + my $sth_inc_deleted = $dbh->prepare(qq{ + update backups set + inc_deleted = true + where id = ? + }); + $sth_inc_deleted->execute($backup_id); + + $dbh->commit; + + return 1; + } + + my $tar_size = get_gzip_size( $path ); + + # real tar size is bigger because of padding + if ($tar_size_inarc > $tar_size) { + print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n"; + return 0; } + # + # check if md5 exists, and if not, create one + # + + my $md5_path = $path; + $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; + if (! -e $md5_path || -z $md5_path) { + print ", creating md5"; + system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; + } else { + ## FIXME check if existing md5 is valid + } + + my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; + $md5 =~ s#\s.*$##; + + # extract part number from filename + my $part_nr = 1; + $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); + + # + # finally, check if backup_parts table in database is valid + # + check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items); } @@ -214,9 +298,7 @@ sub tar_check($$$$) { return $same unless($same); @tar_files = sort @tar_files; - print STDERR " ",($#tar_files + 1), " files" if ($debug); - - print STDERR ", database" if ($debug); + print "\n\t",($#tar_files + 1), " tar files"; my $sth = $dbh->prepare(qq{ SELECT path,type @@ -235,13 +317,13 @@ sub tar_check($$$$) { push @db_files, $path; } - print STDERR " ",($#db_files + 1), " files, diff" if ($debug); + print " ",($#db_files + 1), " database files, diff"; @db_files = sort @db_files; if ($#tar_files != $#db_files) { $same = 0; - print STDERR " NUMBER" if ($debug); + print " NUMBER"; } else { my $diff = Algorithm::Diff->new(\@tar_files, \@db_files); while ( $diff->Next() ) { @@ -252,8 +334,8 @@ sub tar_check($$$$) { } } - print " ",($same ? 'ok' : 'DIFFERENT'); - print STDERR " }} " if ($debug); + print " ",($same ? 'ok' : 'DIFFERENT'), + ", dur: ",fmt_time(time() - $t), "\n"; return $same; } @@ -284,6 +366,9 @@ my $num_backups = $sth->rows; my $curr_backup = 1; while (my $row = $sth->fetchrow_hashref) { + + $curr_backup++; + my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); # this will return -1 if file doesn't exist @@ -299,8 +384,7 @@ while (my $row = $sth->fetchrow_hashref) { } } - print curr_time, " $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; - $curr_backup++; + print curr_time, " creating $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; my $t = time();