1 #!/usr/local/bin/perl -w
4 use lib "/usr/local/BackupPC/lib";
9 use BackupPC::Attrib qw/:all/;
10 use Data::Dump qw(dump);
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
14 use Archive::Tar::Streamed;
21 BackupPC_ASA_PostArchive_Update
25 # /etc/BackupPC/pc/dvd_tar.pl
30 my $debug = $ENV{DEBUG} || 1;
31 my $check = $ENV{CHECK} || 1;
34 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
35 my %Conf = $bpc->Conf();
36 warn "## ARGV=",dump @ARGV;
43 if ( m/(\w+)=(.+)/ ) {
47 if ( $name =~ m/List/ ) {
48 push @{ $args->{$name} }, $v;
54 warn "args = ",dump($args);
58 %BackupPC::Search::Conf = %Conf;
60 my $path = abs_path($0);
61 $path =~ s{/[^/]+$}{/}; # FIXME remove?
67 my $t_fmt = '%Y-%m-%d %H:%M:%S';
69 warn "## Conf = ",dump( \%Conf );
71 my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
72 my $user = $Conf{SearchUser} || '';
74 my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
80 return strftime($t_fmt,localtime());
84 my $t = shift || return;
86 my ($ss,$mm,$hh) = gmtime($t);
87 $out .= "${hh}h" if ($hh);
88 $out .= sprintf("%02d:%02d", $mm,$ss);
94 sub get_backup_id($$) {
95 my ($host, $num) = @_;
97 my $key = "$host $num";
98 return $hsn_cache->{$key} if ($hsn_cache->{$key});
100 my $sth = $dbh->prepare(qq{
104 INNER JOIN shares ON backups.shareID=shares.ID
105 INNER JOIN hosts ON backups.hostID = hosts.ID
106 WHERE hosts.name = ? and backups.num = ?
108 $sth->execute($host, $num);
109 my ($id) = $sth->fetchrow_array;
111 $hsn_cache->{"$host $num"} = $id;
113 print STDERR "# $host $num == $id\n" if $debug;
118 sub backup_inc_deleted($) {
119 my $backup_id = shift;
120 my $sth_inc_deleted = $dbh->prepare(qq{
125 $sth_inc_deleted->execute($backup_id);
129 warn "## system_ok @_\n";
130 system(@_) == 0 || die "system @_:$!";
134 my ($host,$num) = @_;
139 sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ }
140 glob "$Conf{ArchiveDest}/$host.$num.*"
143 return unless @tar_parts;
145 print curr_time, " check $host $num";
147 my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
148 unlink $md5_path if -s $md5_path == 0; # fix empty
150 if ( ! -e $md5_path ) {
151 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
153 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $check;
157 foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
158 my ( $md5, $path ) = split(/\s+/,$_);
159 $md5sum->{$path} = $md5;
162 # depending on expected returned value this is used like:
163 # my $uncompress_size = get_gzip_size('/full/path/to.gz');
164 # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
165 sub get_gzip_size($) {
166 my $filename = shift;
167 die "file $filename problem: $!" unless (-r $filename);
169 if ( $filename !~ m/\.gz$/ ) {
173 open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!";
176 $line = <$gzip> if ($line =~ /^\s+compressed/);
178 my ($comp, $uncomp) = (0,0);
180 if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
187 die "can't find size in line: $line";
192 my ($host, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
193 my $backup_id = get_backup_id($host, $num);
194 my $sth_md5 = $dbh->prepare(qq{
196 id, tar_size, size, md5, items
198 where backup_id = ? and part_nr = ?
201 $sth_md5->execute($backup_id, $part_nr);
203 if (my $row = $sth_md5->fetchrow_hashref) {
205 $row->{tar_size} >= $tar_size &&
206 $row->{size} == $size &&
207 $row->{md5} eq $md5 &&
208 $row->{items} == $items
210 print ", deleting invalid backup_parts $row->{id}";
211 $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
213 print ", inserting new";
214 my $sth_insert = $dbh->prepare(qq{
215 insert into backup_parts (
222 ) values (?,?,?,?,?,?)
225 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
229 print " [parts: ",join(", ", @tar_parts),"]" if $debug;
236 print " reading" if $debug;
240 foreach my $filename (@tar_parts) {
242 next if $filename eq "$host.$num.md5";
244 print "\n\t- $filename";
246 my $path = "$Conf{ArchiveDest}/$filename";
249 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
251 if ($size > $Conf{MaxArchiveSize}) {
252 print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
256 print ", $size bytes";
260 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
262 my $tar = Archive::Tar::Streamed->new($fh);
264 my $tar_size_inarc = 0;
267 while(my $entry = $tar->next) {
268 push @tar_files, $entry->name;
270 $tar_size_inarc += $entry->size;
272 if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
273 print ", part $filename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
281 print ", $items items";
283 if ($tar_size_inarc == 0 && $items == 0) {
284 print ", EMPTY tar\n";
286 my $backup_id = get_backup_id($host, $share, $num);
287 backup_inc_deleted( $backup_id );
297 my $tar_size = get_gzip_size( $path );
300 # finally, check if backup_parts table in database is valid
303 my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum);
306 check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items);
311 # short-cut and exit;
312 return $same unless($same);
314 @tar_files = sort @tar_files;
315 print "\n\t",($#tar_files + 1), " tar files";
317 my $sth = $dbh->prepare(qq{
320 JOIN shares on shares.id = shareid
321 JOIN hosts on hosts.id = shares.hostid
322 WHERE hosts.name = ? and backupnum = ?
324 $sth->execute($host, $num);
326 while( my $row = $sth->fetchrow_hashref ) {
328 my $path = $row->{'path'} || die "no path?";
330 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
331 push @db_files, $path;
334 print " ",($#db_files + 1), " database files, diff";
336 @db_files = sort @db_files;
338 if ($#tar_files != $#db_files) {
342 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
343 while ( $diff->Next() ) {
344 next if $diff->Same();
346 print "< $_\n" for $diff->Items(1);
347 print "> $_\n" for $diff->Items(2);
351 print " ",($same ? 'ok' : 'DIFFERENT'),
352 ", dur: ",fmt_time(time() - $t), "\n";
360 foreach ( 0 .. $#{ $args->{HostList} } ) {
362 my $host = $args->{'HostList'}->[$_];
363 my $num = $args->{'BackupList'}->[$_];
365 check_archive $host => $num;
371 my $sth = $dbh->prepare( qq{
374 backups.id as backup_id,
376 shares.name as share,
381 count(backup_parts.backup_id) as backup_parts
383 join shares on backups.hostid = shares.hostid
384 and shares.id = backups.shareid
385 join hosts on shares.hostid = hosts.id
386 full outer join backup_parts on backups.id = backup_parts.backup_id
387 where not inc_deleted and backups.size > 0
388 group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
389 order by backups.date
394 my $num_backups = $sth->rows;
397 while (my $row = $sth->fetchrow_hashref) {
401 my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
403 # this will return -1 if file doesn't exist
404 my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
406 print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $debug;
408 if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
410 tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
416 print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
417 " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
423 my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
424 print STDERR "## $cmd\n" if ($debug);
426 if (system($cmd) != 0) {
427 print STDERR " FAILED, marking this backup deleted";
428 backup_inc_deleted( $row->{backup_id} );
432 print ", dur: ",fmt_time(time() - $t), "\n";