1 #!/usr/local/bin/perl -w
4 use lib "/usr/local/BackupPC/lib";
9 use BackupPC::Attrib qw/:all/;
10 use Data::Dump qw(dump);
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
14 use Archive::Tar::Streamed;
17 use Getopt::Long::Descriptive;
21 BackupPC_ASA_PostArchive_Update
25 # /etc/BackupPC/pc/_search_archive.pl
30 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
31 $bpc->ConfigRead('_search_archive'); # read our configuration
32 my %Conf = $bpc->Conf();
35 %BackupPC::Search::Conf = %Conf;
37 my ($opt,$usage) = describe_options(
39 [ 'host|h=s@', "import just host(s)" ],
40 [ 'num|n=s@', "import just backup number(s)" ],
41 [ 'ok=n', "xferOK", { default => 0 } ],
42 [ 'check|c', "check archives on disk and sync", { default => 1 } ],
43 [ 'debug|d', "debug", { default => 1 } ],
44 [ 'help', "show help" ],
47 print($usage->text), exit if $opt->help;
53 my $t_fmt = '%Y-%m-%d %H:%M:%S';
55 #warn "## Conf = ",dump( \%Conf );
57 my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
63 return strftime($t_fmt,localtime());
67 my $t = shift || return;
69 my ($ss,$mm,$hh) = gmtime($t);
70 $out .= "${hh}h" if ($hh);
71 $out .= sprintf("%02d:%02d", $mm,$ss);
77 sub get_backup_id($$) {
78 my ($host, $num) = @_;
80 my $key = "$host $num";
81 return $hsn_cache->{$key} if ($hsn_cache->{$key});
83 # all backup parts will be attached to first share in backups
84 my $sth = $dbh->prepare(qq{
88 INNER JOIN shares ON backups.shareID=shares.ID
89 INNER JOIN hosts ON backups.hostID = hosts.ID
90 WHERE hosts.name = ? and backups.num = ?
92 $sth->execute($host, $num);
93 die "can't find backup $host:$num" unless $sth->rows == 1;
94 my ($id) = $sth->fetchrow_array;
96 $hsn_cache->{$key} = $id;
98 print STDERR "# $key == $id\n" if $opt->debug;
103 sub backup_inc_deleted($) {
104 my $backup_id = shift;
105 my $sth_inc_deleted = $dbh->prepare(qq{
110 $sth_inc_deleted->execute($backup_id);
114 warn "## system_ok @_\n";
115 system(@_) == 0 || die "system @_:$!";
118 my $sth_inc_size = $dbh->prepare(qq{
128 my $perm = (stat $full)[2] & 0444;
129 warn sprintf("chmod %03o %s\n",$perm,$full);
130 chmod $perm, $full || die $!;
134 my ($host,$num) = @_;
135 warn "# check_archive $host $num";
139 my $glob = "$Conf{ArchiveDest}/$host.$num.*";
141 my @tar_parts = sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ } glob $glob ;
143 if ( ! @tar_parts ) {
144 warn "ERROR: no files for $glob";
148 print curr_time, " check $host $num\n";
150 my $sth = $dbh->prepare(qq{
153 JOIN shares on shares.id = shareid
154 JOIN hosts on hosts.id = shares.hostid
155 WHERE hosts.name = ? and backupnum = ?
157 $sth->execute($host, $num);
158 my ($files) = $sth->fetchrow_array;
161 warn "EMPTY INCREMENT, cleanup ",dump( @tar_parts );
162 foreach my $path ( @tar_parts ) {
163 my $full = "$Conf{ArchiveDest}/$path";
165 unlink $full || die "can't remove $full: $!";
170 my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
171 unlink $md5_path if -e $md5_path && -s $md5_path == 0; # fix empty
173 my $read_protect = 0;
175 if ( ! -e $md5_path ) {
176 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
180 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $opt->check;
184 foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
185 my ( $md5, $path ) = split(/\s+/,$_);
186 $md5sum->{$path} = $md5;
187 read_only "$Conf{ArchiveDest}/$path" if $read_protect;
190 # depending on expected returned value this is used like:
191 # my $uncompress_size = get_gzip_size('/full/path/to.gz');
192 # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
193 sub get_gzip_size($) {
194 my $filename = shift;
195 die "file $filename problem: $!" unless (-r $filename);
197 if ( $filename !~ m/\.gz$/ ) {
201 open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!";
206 my ($comp, $uncomp) = (0,0);
208 if ($line =~ m/\s+(\d+)\s+(\d+)\s+\d+\.\d+/s) {
215 warn "ERROR can't parse: $line";
221 my ($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename) = @_;
222 my $backup_id = get_backup_id($host, $num);
223 my $sth_md5 = $dbh->prepare(qq{
225 id, tar_size, size, md5, items, filename
227 where backup_id = ? and part_nr = ? and filename = ?
230 $sth_md5->execute($backup_id, $part_nr, $filename);
232 if (my $row = $sth_md5->fetchrow_hashref) {
234 $row->{tar_size} >= $tar_size &&
235 $row->{size} == $size &&
236 $row->{md5} eq $md5 &&
237 $row->{items} == $items
239 print ", deleting invalid backup_parts $row->{id}";
240 $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
242 print ", inserting new";
243 my $sth_insert = $dbh->prepare(qq{
244 insert into backup_parts (
252 ) values (?,?,?,?,?,?,?)
255 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items, $filename);
258 print " [parts: ",join(", ", @tar_parts),"]" if $opt->debug;
264 print " reading" if $opt->debug;
269 foreach my $filename (@tar_parts) {
271 next if $filename eq "$host.$num.md5";
273 print "\n\t- $filename";
275 my $path = "$Conf{ArchiveDest}/$filename";
278 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
280 if ($size > $Conf{ArchiveMediaSize}) {
281 print ", part bigger than media $size > $Conf{ArchiveMediaSize}\n";
285 print ", $size bytes";
289 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
291 my $tar = Archive::Tar::Streamed->new($fh);
293 my $tar_size_inarc = 0;
296 while(my $entry = $tar->next) {
297 push @tar_files, $entry->name;
299 $tar_size_inarc += $entry->size;
301 if ($tar_size_inarc > $Conf{ArchiveMediaSize}) {
302 print ", part $filename is too big $tar_size_inarc > $Conf{ArchiveMediaSize}\n";
310 print ", $items items";
312 if ($tar_size_inarc == 0 && $items == 0) {
313 print ", EMPTY tar\n";
315 my $backup_id = get_backup_id($host, $share, $num);
316 backup_inc_deleted( $backup_id );
326 my $tar_size = get_gzip_size( $path );
329 # finally, check if backup_parts table in database is valid
332 my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum);
336 check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename);
338 # round increment size to 2k block size
339 $inc_size += int((($size + 2048) / 2048 ) * 2048);
342 $sth_inc_size->execute(
345 get_backup_id($host, $num),
355 @tar_files = sort @tar_files;
356 print "\n\t",($#tar_files + 1), " tar files";
358 my $sth = $dbh->prepare(qq{
361 JOIN shares on shares.id = shareid
362 JOIN hosts on hosts.id = shares.hostid
363 WHERE hosts.name = ? and backupnum = ?
365 $sth->execute($host, $num);
367 while( my $row = $sth->fetchrow_hashref ) {
369 my $path = $row->{'path'} || die "no path?";
371 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
372 push @db_files, $path;
375 print " ",($#db_files + 1), " database files, diff";
377 @db_files = sort @db_files;
381 if ($#tar_files != $#db_files) {
385 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
386 while ( $diff->Next() ) {
387 next if $diff->Same();
389 print "< $_\n" for $diff->Items(1);
390 print "> $_\n" for $diff->Items(2);
394 print " ",($same ? 'ok' : 'DIFFERENT'),
395 ", dur: ",fmt_time(time() - $t), "\n";
406 exit unless $opt->host;
408 foreach ( 0 .. $#{ $opt->host } ) {
410 my $host = lc $opt->host->[$_];
411 my $num = $opt->num->[$_];
414 warn "ERROR $host $num running cleanup";
415 foreach my $path ( glob "$Conf{ArchiveDest}/$host.$num.*" ) {
417 unlink $path || die $!;
420 check_archive $host => $num;
427 my $sth = $dbh->prepare( qq{
430 backups.id as backup_id,
432 shares.name as share,
437 count(backup_parts.backup_id) as backup_parts
439 join shares on backups.hostid = shares.hostid
440 and shares.id = backups.shareid
441 join hosts on shares.hostid = hosts.id
442 full outer join backup_parts on backups.id = backup_parts.backup_id
443 where not inc_deleted and backups.size > 0
444 group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
445 order by backups.date
450 my $num_backups = $sth->rows;
453 while (my $row = $sth->fetchrow_hashref) {
457 my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
459 # this will return -1 if file doesn't exist
460 my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
462 print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $opt->debug;
464 if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
466 tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
472 print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
473 " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
479 my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
480 print STDERR "## $cmd\n" if ($opt->debug);
482 if (system($cmd) != 0) {
483 print STDERR " FAILED, marking this backup deleted";
484 backup_inc_deleted( $row->{backup_id} );
488 print ", dur: ",fmt_time(time() - $t), "\n";