move this to post-archive action
authorDobrica Pavlinusic <dpavlin@rot13.org>
Wed, 26 Jan 2011 16:57:20 +0000 (16:57 +0000)
committerDobrica Pavlinusic <dpavlin@rot13.org>
Wed, 26 Jan 2011 16:57:20 +0000 (16:57 +0000)
bin/BackupPC_ASA_ArchiveStart
bin/BackupPC_ASA_PostArchive_Update [new file with mode: 0755]
bin/BackupPC_incPartsUpdate [deleted file]

index c527fa4..bd294e4 100755 (executable)
@@ -15,6 +15,7 @@
 #
 # AUTHOR
 #   Craig Barratt  <cbarratt@users.sourceforge.net>
+#   Dobrica Pavlinusic <dpavlin@rot13.org>
 #
 # COPYRIGHT
 #   Copyright (C) 2007-2009  Craig Barratt
@@ -47,6 +48,8 @@ use lib "/usr/local/BackupPC/lib";
 use Getopt::Std;
 use BackupPC::Lib;
 
+use DBI;
+
 die("BackupPC::Lib->new failed\n") if ( !(my $bpc = BackupPC::Lib->new) );
 
 my %opts;
diff --git a/bin/BackupPC_ASA_PostArchive_Update b/bin/BackupPC_ASA_PostArchive_Update
new file mode 100755 (executable)
index 0000000..442c027
--- /dev/null
@@ -0,0 +1,452 @@
+#!/usr/local/bin/perl -w
+
+use strict;
+use lib "/usr/local/BackupPC/lib";
+
+use DBI;
+use BackupPC::Lib;
+use BackupPC::View;
+use BackupPC::Attrib qw/:all/;
+use Data::Dumper;
+use Time::HiRes qw/time/;
+use POSIX qw/strftime/;
+use Cwd qw/abs_path/;
+use File::Which;
+use Archive::Tar::Streamed;
+use Algorithm::Diff;
+use Getopt::Std;
+use File::Slurp;
+use File::Pid;
+
+=head1 NAME
+
+BackupPC_incPartsUpdate
+
+=head1 DESCRIPTION
+
+Create C<.tar.gz> increments on disk calling C<BackupPC_tarIncCreate>.
+
+Following options are supported (but all are optional):
+
+=over 4
+
+=item -h hostname
+
+Update parts for just single C<hostname>
+
+=item -c
+
+Force check for tar archives which exist on disk
+
+=item -d
+
+Turn debugging output
+
+=back
+
+=cut
+
+my %opt;
+getopts("cdh:", \%opt );
+
+my $debug = $opt{d};
+my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
+
+my $pid_path = abs_path($0);
+$pid_path =~ s/\W+/_/g;
+
+my $pidfile = new File::Pid({
+       file => "/tmp/$pid_path",
+});
+
+if (my $pid = $pidfile->running ) {
+       die "$0 already running: $pid\n";
+} elsif ($pidfile->pid ne $$) {
+       $pidfile->remove;
+       $pidfile = new File::Pid;
+}
+
+print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n";
+$pidfile->write;
+
+my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
+my %Conf = $bpc->Conf();
+
+use BackupPC::Search;
+%BackupPC::Search::Conf = %Conf;
+
+my $path = abs_path($0);
+$path =~ s#/[^/]+$#/#;
+my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
+
+die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
+
+my $bin;
+foreach my $c (qw/gzip md5sum/) {
+       $bin->{$c} = which($c) || die "$0 needs $c, install it\n";
+}
+
+$|=1;
+
+my $start_t = time();
+
+my $t_fmt = '%Y-%m-%d %H:%M:%S';
+
+my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
+my $user = $Conf{SearchUser} || '';
+
+my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 });
+
+my $tar_dir = $Conf{GzipTempDir};
+
+die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir);
+
+#---- subs ----
+
+sub fmt_time {
+       my $t = shift || return;
+       my $out = "";
+       my ($ss,$mm,$hh) = gmtime($t);
+       $out .= "${hh}h" if ($hh);
+       $out .= sprintf("%02d:%02d", $mm,$ss);
+       return $out;
+}
+
+sub curr_time {
+       return strftime($t_fmt,localtime());
+}
+
+my $hsn_cache;
+
+sub get_backup_id($$$) {
+       my ($host, $share, $num) = @_;
+
+       my $key = "$host $share $num";
+       return $hsn_cache->{$key} if ($hsn_cache->{$key});
+
+       my $sth = $dbh->prepare(qq{
+               SELECT 
+                       backups.id
+               FROM backups 
+               INNER JOIN shares       ON backups.shareID=shares.ID
+               INNER JOIN hosts        ON backups.hostID = hosts.ID
+               WHERE hosts.name = ? and shares.name = ? and backups.num = ?
+       });
+       $sth->execute($host, $share, $num);
+       my ($id) = $sth->fetchrow_array;
+
+       $hsn_cache->{"$host $share $num"} = $id;
+
+       print STDERR "# $host $share $num == $id\n" if ($opt{d});
+
+       return $id;
+}
+
+sub backup_inc_deleted($) {
+       my $backup_id = shift;
+       my $sth_inc_deleted = $dbh->prepare(qq{
+               update backups set
+                       inc_deleted = true
+               where id = ?
+       });
+       $sth_inc_deleted->execute($backup_id);
+}
+
+sub tar_check($$$$) {
+       my ($host,$share,$num,$filename) = @_;
+
+       my $t = time();
+       print curr_time, " check $host:$share#$num -> $filename";
+
+       # depending on expected returned value this is used like:
+       # my $uncompress_size = get_gzip_size('/full/path/to.gz');
+       # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
+       sub get_gzip_size($) {
+               my $filename = shift;
+               die "file $filename problem: $!" unless (-r $filename);
+               open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
+               my $line = <$gzip>;
+               chomp($line);
+               $line = <$gzip> if ($line =~ /^\s+compressed/);
+
+               my ($comp, $uncomp) = (0,0);
+
+               if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
+                       if (wantarray) {
+                               return [ $1, $2 ];
+                       } else {
+                               return $2;
+                       }
+               } else {
+                       die "can't find size in line: $line";
+               }
+       }
+
+       sub check_part {
+               my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
+               my $backup_id = get_backup_id($host, $share, $num);
+               my $sth_md5 = $dbh->prepare(qq{
+                       select
+                               id, tar_size, size, md5, items
+                       from backup_parts
+                       where backup_id = ? and part_nr = ?
+               });
+
+               $sth_md5->execute($backup_id, $part_nr);
+
+               if (my $row = $sth_md5->fetchrow_hashref) {
+                       return if (
+                               $row->{tar_size} >= $tar_size &&
+                               $row->{size} == $size &&
+                               $row->{md5} eq $md5 &&
+                               $row->{items} == $items
+                       );
+                       print ", deleting invalid backup_parts $row->{id}";
+                       $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
+               }
+               print ", inserting new";
+               my $sth_insert = $dbh->prepare(qq{
+                       insert into backup_parts (
+                               backup_id,
+                               part_nr,
+                               tar_size,
+                               size,
+                               md5,
+                               items
+                       ) values (?,?,?,?,?,?)
+               });
+
+               $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
+               $dbh->commit;
+       }
+
+       my @tar_parts;
+
+       if (-d "$tar_dir/$filename") {
+               print ", multi-part";
+               opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
+               @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
+               closedir($dir);
+       } else {
+               push @tar_parts, "${filename}.tar.gz";
+       }
+
+       print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
+
+       my $same = 1;
+       my @tar_files;
+
+       my $backup_part;
+
+       print " reading" if ($opt{d});
+
+       foreach my $tarfilename (@tar_parts) {
+
+               print "\n\t- $tarfilename";
+
+               my $path = "$tar_dir/$tarfilename";
+
+               my $size = (stat( $path ))[7] || die "can't stat $path: $!";
+
+               if ($size > $Conf{MaxArchiveSize}) {
+                       print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
+                       return 0;
+               }
+
+               print ", $size bytes";
+
+
+               open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
+               binmode($fh);
+               my $tar = Archive::Tar::Streamed->new($fh);
+
+               my $tar_size_inarc = 0;
+               my $items = 0;
+
+               while(my $entry = $tar->next) {
+                       push @tar_files, $entry->name;
+                       $items++;
+                       $tar_size_inarc += $entry->size;
+
+                       if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
+                               print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
+                               return 0;
+                       }
+
+               }
+
+               close($fh);
+
+               print ", $items items";
+
+               if ($tar_size_inarc == 0 && $items == 0) {
+                       print ", EMPTY tar\n";
+
+                       my $backup_id = get_backup_id($host, $share, $num);
+                       backup_inc_deleted( $backup_id );
+
+                       $dbh->commit;
+
+                       return 1;
+               }
+
+               my $tar_size = get_gzip_size( $path );
+
+               # real tar size is bigger because of padding    
+               if ($tar_size_inarc > $tar_size) {
+                       print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
+                       return 0;
+               }
+
+               #
+               # check if md5 exists, and if not, create one
+               #
+
+               my $md5_path = $path;
+               $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
+               if (! -e $md5_path || -z $md5_path) {
+                       print ", creating md5";
+                       system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
+               } else {
+                       ## FIXME check if existing md5 is valid
+               }
+
+               my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
+               $md5 =~ s#\s.*$##;
+
+               # extract part number from filename
+               my $part_nr = 1;
+               $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
+
+               #
+               # finally, check if backup_parts table in database is valid
+               #
+
+               check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
+       }
+
+       # short-cut and exit;
+       return $same unless($same);
+
+       @tar_files = sort @tar_files;
+       print "\n\t",($#tar_files + 1), " tar files";
+
+       my $sth = $dbh->prepare(qq{
+               SELECT path,type
+               FROM files
+               JOIN shares on shares.id = shareid
+               JOIN hosts on hosts.id = shares.hostid
+               WHERE hosts.name = ? and shares.name = ? and backupnum = ?
+       });
+       $sth->execute($host, $share, $num);
+       my @db_files;
+       while( my $row = $sth->fetchrow_hashref ) {
+
+               my $path = $row->{'path'} || die "no path?";
+               $path =~ s#^/#./#;
+               $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
+               push @db_files, $path;
+       }
+
+       print " ",($#db_files + 1), " database files, diff";
+
+       @db_files = sort @db_files;
+
+       if ($#tar_files != $#db_files) {
+               $same = 0;
+               print " NUMBER";
+       } else {
+               my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
+               while ( $diff->Next() ) {
+                       next if $diff->Same();
+                       $same = 0;
+                       print "< $_\n" for $diff->Items(1);
+                       print "> $_\n" for $diff->Items(2);
+               }
+       }
+
+       print " ",($same ? 'ok' : 'DIFFERENT'),
+               ", dur: ",fmt_time(time() - $t), "\n";
+
+       return $same;
+}
+
+
+#----- main
+
+my $sth = $dbh->prepare( qq{
+       
+select
+       backups.id as backup_id,
+       hosts.name as host,
+       shares.name as share,
+       backups.num as num,
+       backups.date,
+       inc_size,
+       parts,
+       count(backup_parts.backup_id) as backup_parts
+from backups
+       join shares on backups.hostid = shares.hostid
+               and shares.id = backups.shareid
+       join hosts on shares.hostid = hosts.id
+       full outer join backup_parts on backups.id = backup_parts.backup_id
+where not inc_deleted and backups.size > 0
+group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
+order by backups.date
+
+} );
+
+$sth->execute();
+my $num_backups = $sth->rows;
+my $curr_backup = 1;
+
+if ($opt{h}) {
+       warn "making increments just for host $opt{h}\n";
+}
+
+while (my $row = $sth->fetchrow_hashref) {
+
+       if ($opt{h} && $row->{host} ne $opt{h}) {
+               warn "skipped $row->{host}\n" if ($debug);
+               next;
+       }
+
+       $curr_backup++;
+
+       my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
+
+       # this will return -1 if file doesn't exist
+       my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
+
+       print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
+
+       if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
+               if ($check) {
+                       tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
+               } else {
+                       next;
+               }
+       }
+
+       print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
+               " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
+
+       my $t = time();
+
+       # re-create archive?
+       my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
+       print STDERR "## $cmd\n" if ($debug);
+
+       if (system($cmd) != 0) {
+               print STDERR " FAILED, marking this backup deleted";
+               backup_inc_deleted( $row->{backup_id} );
+       }
+
+       print ", dur: ",fmt_time(time() - $t), "\n";
+
+       $dbh->commit;
+
+}
+
+undef $sth;
+$dbh->disconnect;
diff --git a/bin/BackupPC_incPartsUpdate b/bin/BackupPC_incPartsUpdate
deleted file mode 100755 (executable)
index 442c027..0000000
+++ /dev/null
@@ -1,452 +0,0 @@
-#!/usr/local/bin/perl -w
-
-use strict;
-use lib "/usr/local/BackupPC/lib";
-
-use DBI;
-use BackupPC::Lib;
-use BackupPC::View;
-use BackupPC::Attrib qw/:all/;
-use Data::Dumper;
-use Time::HiRes qw/time/;
-use POSIX qw/strftime/;
-use Cwd qw/abs_path/;
-use File::Which;
-use Archive::Tar::Streamed;
-use Algorithm::Diff;
-use Getopt::Std;
-use File::Slurp;
-use File::Pid;
-
-=head1 NAME
-
-BackupPC_incPartsUpdate
-
-=head1 DESCRIPTION
-
-Create C<.tar.gz> increments on disk calling C<BackupPC_tarIncCreate>.
-
-Following options are supported (but all are optional):
-
-=over 4
-
-=item -h hostname
-
-Update parts for just single C<hostname>
-
-=item -c
-
-Force check for tar archives which exist on disk
-
-=item -d
-
-Turn debugging output
-
-=back
-
-=cut
-
-my %opt;
-getopts("cdh:", \%opt );
-
-my $debug = $opt{d};
-my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
-
-my $pid_path = abs_path($0);
-$pid_path =~ s/\W+/_/g;
-
-my $pidfile = new File::Pid({
-       file => "/tmp/$pid_path",
-});
-
-if (my $pid = $pidfile->running ) {
-       die "$0 already running: $pid\n";
-} elsif ($pidfile->pid ne $$) {
-       $pidfile->remove;
-       $pidfile = new File::Pid;
-}
-
-print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n";
-$pidfile->write;
-
-my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
-my %Conf = $bpc->Conf();
-
-use BackupPC::Search;
-%BackupPC::Search::Conf = %Conf;
-
-my $path = abs_path($0);
-$path =~ s#/[^/]+$#/#;
-my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
-
-die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
-
-my $bin;
-foreach my $c (qw/gzip md5sum/) {
-       $bin->{$c} = which($c) || die "$0 needs $c, install it\n";
-}
-
-$|=1;
-
-my $start_t = time();
-
-my $t_fmt = '%Y-%m-%d %H:%M:%S';
-
-my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
-my $user = $Conf{SearchUser} || '';
-
-my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 });
-
-my $tar_dir = $Conf{GzipTempDir};
-
-die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir);
-
-#---- subs ----
-
-sub fmt_time {
-       my $t = shift || return;
-       my $out = "";
-       my ($ss,$mm,$hh) = gmtime($t);
-       $out .= "${hh}h" if ($hh);
-       $out .= sprintf("%02d:%02d", $mm,$ss);
-       return $out;
-}
-
-sub curr_time {
-       return strftime($t_fmt,localtime());
-}
-
-my $hsn_cache;
-
-sub get_backup_id($$$) {
-       my ($host, $share, $num) = @_;
-
-       my $key = "$host $share $num";
-       return $hsn_cache->{$key} if ($hsn_cache->{$key});
-
-       my $sth = $dbh->prepare(qq{
-               SELECT 
-                       backups.id
-               FROM backups 
-               INNER JOIN shares       ON backups.shareID=shares.ID
-               INNER JOIN hosts        ON backups.hostID = hosts.ID
-               WHERE hosts.name = ? and shares.name = ? and backups.num = ?
-       });
-       $sth->execute($host, $share, $num);
-       my ($id) = $sth->fetchrow_array;
-
-       $hsn_cache->{"$host $share $num"} = $id;
-
-       print STDERR "# $host $share $num == $id\n" if ($opt{d});
-
-       return $id;
-}
-
-sub backup_inc_deleted($) {
-       my $backup_id = shift;
-       my $sth_inc_deleted = $dbh->prepare(qq{
-               update backups set
-                       inc_deleted = true
-               where id = ?
-       });
-       $sth_inc_deleted->execute($backup_id);
-}
-
-sub tar_check($$$$) {
-       my ($host,$share,$num,$filename) = @_;
-
-       my $t = time();
-       print curr_time, " check $host:$share#$num -> $filename";
-
-       # depending on expected returned value this is used like:
-       # my $uncompress_size = get_gzip_size('/full/path/to.gz');
-       # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
-       sub get_gzip_size($) {
-               my $filename = shift;
-               die "file $filename problem: $!" unless (-r $filename);
-               open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
-               my $line = <$gzip>;
-               chomp($line);
-               $line = <$gzip> if ($line =~ /^\s+compressed/);
-
-               my ($comp, $uncomp) = (0,0);
-
-               if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
-                       if (wantarray) {
-                               return [ $1, $2 ];
-                       } else {
-                               return $2;
-                       }
-               } else {
-                       die "can't find size in line: $line";
-               }
-       }
-
-       sub check_part {
-               my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
-               my $backup_id = get_backup_id($host, $share, $num);
-               my $sth_md5 = $dbh->prepare(qq{
-                       select
-                               id, tar_size, size, md5, items
-                       from backup_parts
-                       where backup_id = ? and part_nr = ?
-               });
-
-               $sth_md5->execute($backup_id, $part_nr);
-
-               if (my $row = $sth_md5->fetchrow_hashref) {
-                       return if (
-                               $row->{tar_size} >= $tar_size &&
-                               $row->{size} == $size &&
-                               $row->{md5} eq $md5 &&
-                               $row->{items} == $items
-                       );
-                       print ", deleting invalid backup_parts $row->{id}";
-                       $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
-               }
-               print ", inserting new";
-               my $sth_insert = $dbh->prepare(qq{
-                       insert into backup_parts (
-                               backup_id,
-                               part_nr,
-                               tar_size,
-                               size,
-                               md5,
-                               items
-                       ) values (?,?,?,?,?,?)
-               });
-
-               $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
-               $dbh->commit;
-       }
-
-       my @tar_parts;
-
-       if (-d "$tar_dir/$filename") {
-               print ", multi-part";
-               opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
-               @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
-               closedir($dir);
-       } else {
-               push @tar_parts, "${filename}.tar.gz";
-       }
-
-       print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
-
-       my $same = 1;
-       my @tar_files;
-
-       my $backup_part;
-
-       print " reading" if ($opt{d});
-
-       foreach my $tarfilename (@tar_parts) {
-
-               print "\n\t- $tarfilename";
-
-               my $path = "$tar_dir/$tarfilename";
-
-               my $size = (stat( $path ))[7] || die "can't stat $path: $!";
-
-               if ($size > $Conf{MaxArchiveSize}) {
-                       print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
-                       return 0;
-               }
-
-               print ", $size bytes";
-
-
-               open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
-               binmode($fh);
-               my $tar = Archive::Tar::Streamed->new($fh);
-
-               my $tar_size_inarc = 0;
-               my $items = 0;
-
-               while(my $entry = $tar->next) {
-                       push @tar_files, $entry->name;
-                       $items++;
-                       $tar_size_inarc += $entry->size;
-
-                       if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
-                               print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
-                               return 0;
-                       }
-
-               }
-
-               close($fh);
-
-               print ", $items items";
-
-               if ($tar_size_inarc == 0 && $items == 0) {
-                       print ", EMPTY tar\n";
-
-                       my $backup_id = get_backup_id($host, $share, $num);
-                       backup_inc_deleted( $backup_id );
-
-                       $dbh->commit;
-
-                       return 1;
-               }
-
-               my $tar_size = get_gzip_size( $path );
-
-               # real tar size is bigger because of padding    
-               if ($tar_size_inarc > $tar_size) {
-                       print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
-                       return 0;
-               }
-
-               #
-               # check if md5 exists, and if not, create one
-               #
-
-               my $md5_path = $path;
-               $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
-               if (! -e $md5_path || -z $md5_path) {
-                       print ", creating md5";
-                       system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
-               } else {
-                       ## FIXME check if existing md5 is valid
-               }
-
-               my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
-               $md5 =~ s#\s.*$##;
-
-               # extract part number from filename
-               my $part_nr = 1;
-               $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
-
-               #
-               # finally, check if backup_parts table in database is valid
-               #
-
-               check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
-       }
-
-       # short-cut and exit;
-       return $same unless($same);
-
-       @tar_files = sort @tar_files;
-       print "\n\t",($#tar_files + 1), " tar files";
-
-       my $sth = $dbh->prepare(qq{
-               SELECT path,type
-               FROM files
-               JOIN shares on shares.id = shareid
-               JOIN hosts on hosts.id = shares.hostid
-               WHERE hosts.name = ? and shares.name = ? and backupnum = ?
-       });
-       $sth->execute($host, $share, $num);
-       my @db_files;
-       while( my $row = $sth->fetchrow_hashref ) {
-
-               my $path = $row->{'path'} || die "no path?";
-               $path =~ s#^/#./#;
-               $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
-               push @db_files, $path;
-       }
-
-       print " ",($#db_files + 1), " database files, diff";
-
-       @db_files = sort @db_files;
-
-       if ($#tar_files != $#db_files) {
-               $same = 0;
-               print " NUMBER";
-       } else {
-               my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
-               while ( $diff->Next() ) {
-                       next if $diff->Same();
-                       $same = 0;
-                       print "< $_\n" for $diff->Items(1);
-                       print "> $_\n" for $diff->Items(2);
-               }
-       }
-
-       print " ",($same ? 'ok' : 'DIFFERENT'),
-               ", dur: ",fmt_time(time() - $t), "\n";
-
-       return $same;
-}
-
-
-#----- main
-
-my $sth = $dbh->prepare( qq{
-       
-select
-       backups.id as backup_id,
-       hosts.name as host,
-       shares.name as share,
-       backups.num as num,
-       backups.date,
-       inc_size,
-       parts,
-       count(backup_parts.backup_id) as backup_parts
-from backups
-       join shares on backups.hostid = shares.hostid
-               and shares.id = backups.shareid
-       join hosts on shares.hostid = hosts.id
-       full outer join backup_parts on backups.id = backup_parts.backup_id
-where not inc_deleted and backups.size > 0
-group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
-order by backups.date
-
-} );
-
-$sth->execute();
-my $num_backups = $sth->rows;
-my $curr_backup = 1;
-
-if ($opt{h}) {
-       warn "making increments just for host $opt{h}\n";
-}
-
-while (my $row = $sth->fetchrow_hashref) {
-
-       if ($opt{h} && $row->{host} ne $opt{h}) {
-               warn "skipped $row->{host}\n" if ($debug);
-               next;
-       }
-
-       $curr_backup++;
-
-       my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
-
-       # this will return -1 if file doesn't exist
-       my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
-
-       print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
-
-       if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
-               if ($check) {
-                       tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
-               } else {
-                       next;
-               }
-       }
-
-       print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
-               " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
-
-       my $t = time();
-
-       # re-create archive?
-       my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
-       print STDERR "## $cmd\n" if ($debug);
-
-       if (system($cmd) != 0) {
-               print STDERR " FAILED, marking this backup deleted";
-               backup_inc_deleted( $row->{backup_id} );
-       }
-
-       print ", dur: ",fmt_time(time() - $t), "\n";
-
-       $dbh->commit;
-
-}
-
-undef $sth;
-$dbh->disconnect;