use BackupPC::Lib;
use BackupPC::View;
use BackupPC::Attrib qw/:all/;
-#use Data::Dumper;
use Data::Dump qw(dump);
use Time::HiRes qw/time/;
use POSIX qw/strftime/;
use Cwd qw/abs_path/;
-#use Archive::Tar::Streamed;
-#use Algorithm::Diff;
+use Archive::Tar::Streamed;
+use Algorithm::Diff;
use Getopt::Std;
use File::Slurp;
=cut
+# FIXME
+my $debug = $ENV{DEBUG} || 1;
+my $check = $ENV{CHECK} || 1;
+
+
+my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
+my %Conf = $bpc->Conf();
warn "## ARGV=",dump @ARGV;
+
my $args;
my $name;
foreach ( @ARGV ) {
warn "args = ",dump($args);
-# FIXME
-my $debug = $ENV{DEBUG} || 1;
-my $check = 0;
-
-
-my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
-my %Conf = $bpc->Conf();
use BackupPC::Search;
%BackupPC::Search::Conf = %Conf;
my $t_fmt = '%Y-%m-%d %H:%M:%S';
-my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
-my $user = $Conf{SearchUser} || '';
+#warn "## Conf = ",dump( \%Conf );
my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
return $out;
}
-=for later
-
my $hsn_cache;
-sub get_backup_id($$$) {
- my ($host, $share, $num) = @_;
+sub get_backup_id($$) {
+ my ($host, $num) = @_;
- my $key = "$host $share $num";
+ my $key = "$host $num";
return $hsn_cache->{$key} if ($hsn_cache->{$key});
my $sth = $dbh->prepare(qq{
FROM backups
INNER JOIN shares ON backups.shareID=shares.ID
INNER JOIN hosts ON backups.hostID = hosts.ID
- WHERE hosts.name = ? and shares.name = ? and backups.num = ?
+ WHERE hosts.name = ? and backups.num = ?
});
- $sth->execute($host, $share, $num);
+ $sth->execute($host, $num);
my ($id) = $sth->fetchrow_array;
- $hsn_cache->{"$host $share $num"} = $id;
+ $hsn_cache->{"$host $num"} = $id;
- print STDERR "# $host $share $num == $id\n" if ($opt{d});
+ print STDERR "# $host $num == $id\n" if $debug;
return $id;
}
$sth_inc_deleted->execute($backup_id);
}
-sub tar_check($$$$) {
- my ($host,$share,$num,$filename) = @_;
+sub system_ok {
+ warn "## system_ok @_\n";
+ system(@_) == 0 || die "system @_:$!";
+}
+
+my $sth_inc_size = $dbh->prepare(qq{
+ update backups set
+ inc_size = ?,
+ parts = ?,
+ inc_deleted = false
+ where id = ?
+});
+
+sub check_archive {
+ my ($host,$num) = @_;
my $t = time();
- print curr_time, " check $host:$share#$num -> $filename";
+
+ my @tar_parts =
+ sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ }
+ glob "$Conf{ArchiveDest}/$host.$num.*"
+ ;
+
+ return unless @tar_parts;
+
+ print curr_time, " check $host $num";
+
+ my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
+ unlink $md5_path if -s $md5_path == 0; # fix empty
+
+ if ( ! -e $md5_path ) {
+ system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
+ } else {
+ system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $check;
+ }
+
+ my $md5sum;
+ foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
+ my ( $md5, $path ) = split(/\s+/,$_);
+ $md5sum->{$path} = $md5;
+ }
# depending on expected returned value this is used like:
# my $uncompress_size = get_gzip_size('/full/path/to.gz');
sub get_gzip_size($) {
my $filename = shift;
die "file $filename problem: $!" unless (-r $filename);
- open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
+
+ if ( $filename !~ m/\.gz$/ ) {
+ return -s $filename;
+ }
+
+ open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!";
+ local $/ = undef;
my $line = <$gzip>;
- chomp($line);
- $line = <$gzip> if ($line =~ /^\s+compressed/);
+ close($gzip);
my ($comp, $uncomp) = (0,0);
- if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
+ if ($line =~ m/\s+(\d+)\s+(\d+)\s+\d+\.\d+/s) {
if (wantarray) {
return [ $1, $2 ];
} else {
return $2;
}
} else {
- die "can't find size in line: $line";
+ warn "ERROR can't parse: $line";
+ return -s $filename;
}
}
sub check_part {
- my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
- my $backup_id = get_backup_id($host, $share, $num);
+ my ($host, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
+ my $backup_id = get_backup_id($host, $num);
my $sth_md5 = $dbh->prepare(qq{
select
id, tar_size, size, md5, items
$dbh->commit;
}
- my @tar_parts;
-
- if (-d "$tar_dir/$filename") {
- print ", multi-part";
- opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
- @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
- closedir($dir);
- } else {
- push @tar_parts, "${filename}.tar.gz";
- }
-
- print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
+ print " [parts: ",join(", ", @tar_parts),"]" if $debug;
- my $same = 1;
my @tar_files;
my $backup_part;
- print " reading" if ($opt{d});
+ print " reading" if $debug;
+
+ my $part_nr = 0;
+ my $inc_size = 0;
- foreach my $tarfilename (@tar_parts) {
+ foreach my $filename (@tar_parts) {
- print "\n\t- $tarfilename";
+ next if $filename eq "$host.$num.md5";
- my $path = "$tar_dir/$tarfilename";
+ print "\n\t- $filename";
+
+ my $path = "$Conf{ArchiveDest}/$filename";
+ $path =~ s{//+}{/}g;
my $size = (stat( $path ))[7] || die "can't stat $path: $!";
print ", $size bytes";
+=for later
open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
binmode($fh);
$tar_size_inarc += $entry->size;
if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
- print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
+ print ", part $filename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
return 0;
}
return 1;
}
- my $tar_size = get_gzip_size( $path );
+=cut
- # real tar size is bigger because of padding
- if ($tar_size_inarc > $tar_size) {
- print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
- return 0;
- }
+ # FIXME
+ my $tar_size = get_gzip_size( $path );
#
- # check if md5 exists, and if not, create one
+ # finally, check if backup_parts table in database is valid
#
- my $md5_path = $path;
- $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
- if (! -e $md5_path || -z $md5_path) {
- print ", creating md5";
- system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
- } else {
- ## FIXME check if existing md5 is valid
- }
-
- my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
- $md5 =~ s#\s.*$##;
+ my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum);
+ my $items = 1;
+ $part_nr++;
- # extract part number from filename
- my $part_nr = 1;
- $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
+ check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items);
- #
- # finally, check if backup_parts table in database is valid
- #
-
- check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
+ # round increment size to 2k block size
+ $inc_size += int(($size + 2048) / 2048);
}
- # short-cut and exit;
- return $same unless($same);
+ $sth_inc_size->execute(
+ $inc_size,
+ $part_nr,
+ get_backup_id($host, $num),
+ );
+ $dbh->commit;
@tar_files = sort @tar_files;
print "\n\t",($#tar_files + 1), " tar files";
FROM files
JOIN shares on shares.id = shareid
JOIN hosts on hosts.id = shares.hostid
- WHERE hosts.name = ? and shares.name = ? and backupnum = ?
+ WHERE hosts.name = ? and backupnum = ?
});
- $sth->execute($host, $share, $num);
+ $sth->execute($host, $num);
my @db_files;
while( my $row = $sth->fetchrow_hashref ) {
@db_files = sort @db_files;
+ my $same = 1;
+
if ($#tar_files != $#db_files) {
$same = 0;
print " NUMBER";
return $same;
}
-=cut
#----- main
+foreach ( 0 .. $#{ $args->{HostList} } ) {
+
+ my $host = $args->{'HostList'}->[$_];
+ my $num = $args->{'BackupList'}->[$_];
+
+ check_archive $host => $num;
+
+}
+
+exit;
+
my $sth = $dbh->prepare( qq{
select
while (my $row = $sth->fetchrow_hashref) {
- my $found;
- foreach ( 0 .. $#{ $args->{HostList} } ) {
- if ( $args->{'HostList'}->[$_] eq $row->{host}
- && $args->{'BackupList'}->[$_] eq $row->{num} ) {
- $found = 1;
- }
- }
-
- if ( ! $found ) {
- warn "skipped ",dump($row);
- next;
- }
-
$curr_backup++;
my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});