X-Git-Url: http://git.rot13.org/?p=BackupPC.git;a=blobdiff_plain;f=bin%2FBackupPC_ASA_PostArchive_Update;h=afccb6215eca9e0213fa2ddd58cb6ec34eb7ed39;hp=442c027221e86c56efdd79e5bbed94f1ec69297d;hb=5b0e5232f2207b308b77e9e1c9d01335563a6e31;hpb=485515f3a375c43115b1b04ab06b4048b6a64aaf diff --git a/bin/BackupPC_ASA_PostArchive_Update b/bin/BackupPC_ASA_PostArchive_Update index 442c027..afccb62 100755 --- a/bin/BackupPC_ASA_PostArchive_Update +++ b/bin/BackupPC_ASA_PostArchive_Update @@ -7,84 +7,58 @@ use DBI; use BackupPC::Lib; use BackupPC::View; use BackupPC::Attrib qw/:all/; -use Data::Dumper; +use Data::Dump qw(dump); use Time::HiRes qw/time/; use POSIX qw/strftime/; use Cwd qw/abs_path/; -use File::Which; use Archive::Tar::Streamed; use Algorithm::Diff; use Getopt::Std; use File::Slurp; -use File::Pid; =head1 NAME -BackupPC_incPartsUpdate +BackupPC_ASA_PostArchive_Update =head1 DESCRIPTION -Create C<.tar.gz> increments on disk calling C. - -Following options are supported (but all are optional): - -=over 4 - -=item -h hostname - -Update parts for just single C - -=item -c - -Force check for tar archives which exist on disk - -=item -d - -Turn debugging output - -=back + # /etc/BackupPC/pc/dvd_tar.pl =cut -my %opt; -getopts("cdh:", \%opt ); +# FIXME +my $debug = $ENV{DEBUG} || 1; +my $check = $ENV{CHECK} || 1; -my $debug = $opt{d}; -my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n"; -my $pid_path = abs_path($0); -$pid_path =~ s/\W+/_/g; +my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; +my %Conf = $bpc->Conf(); +warn "## ARGV=",dump @ARGV; -my $pidfile = new File::Pid({ - file => "/tmp/$pid_path", -}); -if (my $pid = $pidfile->running ) { - die "$0 already running: $pid\n"; -} elsif ($pidfile->pid ne $$) { - $pidfile->remove; - $pidfile = new File::Pid; +my $args; +my $name; +foreach ( @ARGV ) { + my $v = $_; + if ( m/(\w+)=(.+)/ ) { + $name = $1; + $v = $2; + } + if ( $name =~ m/List/ ) { + push @{ $args->{$name} }, $v; + } else { + $args->{$name} = $v; + } } -print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n"; -$pidfile->write; +warn "args = ",dump($args); -my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib"; -my %Conf = $bpc->Conf(); use BackupPC::Search; %BackupPC::Search::Conf = %Conf; my $path = abs_path($0); -$path =~ s#/[^/]+$#/#; -my $tarIncCreate = $path .= 'BackupPC_tarIncCreate'; - -die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate); - -my $bin; -foreach my $c (qw/gzip md5sum/) { - $bin->{$c} = which($c) || die "$0 needs $c, install it\n"; -} +$path =~ s{/[^/]+$}{/}; # FIXME remove? $|=1; @@ -92,16 +66,19 @@ my $start_t = time(); my $t_fmt = '%Y-%m-%d %H:%M:%S'; +warn "## Conf = ",dump( \%Conf ); + my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n"; my $user = $Conf{SearchUser} || ''; -my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 }); +my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 }); -my $tar_dir = $Conf{GzipTempDir}; +#---- subs ---- -die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir); -#---- subs ---- +sub curr_time { + return strftime($t_fmt,localtime()); +} sub fmt_time { my $t = shift || return; @@ -112,16 +89,12 @@ sub fmt_time { return $out; } -sub curr_time { - return strftime($t_fmt,localtime()); -} - my $hsn_cache; -sub get_backup_id($$$) { - my ($host, $share, $num) = @_; +sub get_backup_id($$) { + my ($host, $num) = @_; - my $key = "$host $share $num"; + my $key = "$host $num"; return $hsn_cache->{$key} if ($hsn_cache->{$key}); my $sth = $dbh->prepare(qq{ @@ -130,14 +103,14 @@ sub get_backup_id($$$) { FROM backups INNER JOIN shares ON backups.shareID=shares.ID INNER JOIN hosts ON backups.hostID = hosts.ID - WHERE hosts.name = ? and shares.name = ? and backups.num = ? + WHERE hosts.name = ? and backups.num = ? }); - $sth->execute($host, $share, $num); + $sth->execute($host, $num); my ($id) = $sth->fetchrow_array; - $hsn_cache->{"$host $share $num"} = $id; + $hsn_cache->{"$host $num"} = $id; - print STDERR "# $host $share $num == $id\n" if ($opt{d}); + print STDERR "# $host $num == $id\n" if $debug; return $id; } @@ -152,11 +125,47 @@ sub backup_inc_deleted($) { $sth_inc_deleted->execute($backup_id); } -sub tar_check($$$$) { - my ($host,$share,$num,$filename) = @_; +sub system_ok { + warn "## system_ok @_\n"; + system(@_) == 0 || die "system @_:$!"; +} + +my $sth_inc_size = $dbh->prepare(qq{ + update backups set + inc_size = ?, + parts = ?, + inc_deleted = false + where id = ? +}); + +sub check_archive { + my ($host,$num) = @_; my $t = time(); - print curr_time, " check $host:$share#$num -> $filename"; + + my @tar_parts = + sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ } + glob "$Conf{ArchiveDest}/$host.$num.*" + ; + + return unless @tar_parts; + + print curr_time, " check $host $num"; + + my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5"; + unlink $md5_path if -s $md5_path == 0; # fix empty + + if ( ! -e $md5_path ) { + system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path"; + } else { + system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $check; + } + + my $md5sum; + foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) { + my ( $md5, $path ) = split(/\s+/,$_); + $md5sum->{$path} = $md5; + } # depending on expected returned value this is used like: # my $uncompress_size = get_gzip_size('/full/path/to.gz'); @@ -164,7 +173,12 @@ sub tar_check($$$$) { sub get_gzip_size($) { my $filename = shift; die "file $filename problem: $!" unless (-r $filename); - open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!"; + + if ( $filename !~ m/\.gz$/ ) { + return -s $filename; + } + + open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!"; my $line = <$gzip>; chomp($line); $line = <$gzip> if ($line =~ /^\s+compressed/); @@ -183,8 +197,8 @@ sub tar_check($$$$) { } sub check_part { - my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; - my $backup_id = get_backup_id($host, $share, $num); + my ($host, $num, $part_nr, $tar_size, $size, $md5, $items) = @_; + my $backup_id = get_backup_id($host, $num); my $sth_md5 = $dbh->prepare(qq{ select id, tar_size, size, md5, items @@ -220,31 +234,25 @@ sub tar_check($$$$) { $dbh->commit; } - my @tar_parts; - - if (-d "$tar_dir/$filename") { - print ", multi-part"; - opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!"; - @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir); - closedir($dir); - } else { - push @tar_parts, "${filename}.tar.gz"; - } - - print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d}); + print " [parts: ",join(", ", @tar_parts),"]" if $debug; - my $same = 1; my @tar_files; my $backup_part; - print " reading" if ($opt{d}); + print " reading" if $debug; + + my $part_nr = 0; + my $inc_size = 0; + + foreach my $filename (@tar_parts) { - foreach my $tarfilename (@tar_parts) { + next if $filename eq "$host.$num.md5"; - print "\n\t- $tarfilename"; + print "\n\t- $filename"; - my $path = "$tar_dir/$tarfilename"; + my $path = "$Conf{ArchiveDest}/$filename"; + $path =~ s{//+}{/}g; my $size = (stat( $path ))[7] || die "can't stat $path: $!"; @@ -255,6 +263,7 @@ sub tar_check($$$$) { print ", $size bytes"; +=for later open(my $fh, "gzip -cd $path |") or die "can't open $path: $!"; binmode($fh); @@ -269,7 +278,7 @@ sub tar_check($$$$) { $tar_size_inarc += $entry->size; if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) { - print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n"; + print ", part $filename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n"; return 0; } @@ -290,43 +299,31 @@ sub tar_check($$$$) { return 1; } - my $tar_size = get_gzip_size( $path ); +=cut - # real tar size is bigger because of padding - if ($tar_size_inarc > $tar_size) { - print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n"; - return 0; - } + # FIXME + my $tar_size = get_gzip_size( $path ); # - # check if md5 exists, and if not, create one + # finally, check if backup_parts table in database is valid # - my $md5_path = $path; - $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path"; - if (! -e $md5_path || -z $md5_path) { - print ", creating md5"; - system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!"; - } else { - ## FIXME check if existing md5 is valid - } - - my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!"; - $md5 =~ s#\s.*$##; - - # extract part number from filename - my $part_nr = 1; - $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#); + my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum); + my $items = 1; + $part_nr++; - # - # finally, check if backup_parts table in database is valid - # + check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items); - check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items); + # round increment size to 2k block size + $inc_size += int(($size + 2048) / 2048); } - # short-cut and exit; - return $same unless($same); + $sth_inc_size->execute( + $inc_size, + $part_nr, + get_backup_id($host, $num), + ); + $dbh->commit; @tar_files = sort @tar_files; print "\n\t",($#tar_files + 1), " tar files"; @@ -336,9 +333,9 @@ sub tar_check($$$$) { FROM files JOIN shares on shares.id = shareid JOIN hosts on hosts.id = shares.hostid - WHERE hosts.name = ? and shares.name = ? and backupnum = ? + WHERE hosts.name = ? and backupnum = ? }); - $sth->execute($host, $share, $num); + $sth->execute($host, $num); my @db_files; while( my $row = $sth->fetchrow_hashref ) { @@ -352,6 +349,8 @@ sub tar_check($$$$) { @db_files = sort @db_files; + my $same = 1; + if ($#tar_files != $#db_files) { $same = 0; print " NUMBER"; @@ -374,6 +373,17 @@ sub tar_check($$$$) { #----- main +foreach ( 0 .. $#{ $args->{HostList} } ) { + + my $host = $args->{'HostList'}->[$_]; + my $num = $args->{'BackupList'}->[$_]; + + check_archive $host => $num; + +} + +exit; + my $sth = $dbh->prepare( qq{ select @@ -400,17 +410,8 @@ $sth->execute(); my $num_backups = $sth->rows; my $curr_backup = 1; -if ($opt{h}) { - warn "making increments just for host $opt{h}\n"; -} - while (my $row = $sth->fetchrow_hashref) { - if ($opt{h} && $row->{host} ne $opt{h}) { - warn "skipped $row->{host}\n" if ($debug); - next; - } - $curr_backup++; my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); @@ -418,7 +419,7 @@ while (my $row = $sth->fetchrow_hashref) { # this will return -1 if file doesn't exist my $size = BackupPC::Search::get_tgz_size_by_name($tar_file); - print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d}); + print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $debug; if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) { if ($check) { @@ -433,6 +434,7 @@ while (my $row = $sth->fetchrow_hashref) { my $t = time(); +=for later # re-create archive? my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ]; print STDERR "## $cmd\n" if ($debug); @@ -441,6 +443,7 @@ while (my $row = $sth->fetchrow_hashref) { print STDERR " FAILED, marking this backup deleted"; backup_inc_deleted( $row->{backup_id} ); } +=cut print ", dur: ",fmt_time(time() - $t), "\n";