1 #!/usr/local/bin/perl -w
4 use lib "/usr/local/BackupPC/lib";
9 use BackupPC::Attrib qw/:all/;
11 use Data::Dump qw(dump);
12 use Time::HiRes qw/time/;
13 use POSIX qw/strftime/;
15 #use Archive::Tar::Streamed;
22 BackupPC_ASA_PostArchive_Update
26 # /etc/BackupPC/pc/dvd_tar.pl
30 warn "## ARGV=",dump @ARGV;
36 if ( m/(\w+)=(.+)/ ) {
40 if ( $name =~ m/List/ ) {
41 push @{ $args->{$name} }, $v;
47 warn "args = ",dump($args);
50 my $debug = $ENV{DEBUG} || 1;
54 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
55 my %Conf = $bpc->Conf();
58 %BackupPC::Search::Conf = %Conf;
60 my $path = abs_path($0);
61 $path =~ s{/[^/]+$}{/}; # FIXME remove?
67 my $t_fmt = '%Y-%m-%d %H:%M:%S';
69 my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
70 my $user = $Conf{SearchUser} || '';
72 my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
78 return strftime($t_fmt,localtime());
82 my $t = shift || return;
84 my ($ss,$mm,$hh) = gmtime($t);
85 $out .= "${hh}h" if ($hh);
86 $out .= sprintf("%02d:%02d", $mm,$ss);
94 sub get_backup_id($$$) {
95 my ($host, $share, $num) = @_;
97 my $key = "$host $share $num";
98 return $hsn_cache->{$key} if ($hsn_cache->{$key});
100 my $sth = $dbh->prepare(qq{
104 INNER JOIN shares ON backups.shareID=shares.ID
105 INNER JOIN hosts ON backups.hostID = hosts.ID
106 WHERE hosts.name = ? and shares.name = ? and backups.num = ?
108 $sth->execute($host, $share, $num);
109 my ($id) = $sth->fetchrow_array;
111 $hsn_cache->{"$host $share $num"} = $id;
113 print STDERR "# $host $share $num == $id\n" if ($opt{d});
118 sub backup_inc_deleted($) {
119 my $backup_id = shift;
120 my $sth_inc_deleted = $dbh->prepare(qq{
125 $sth_inc_deleted->execute($backup_id);
128 sub tar_check($$$$) {
129 my ($host,$share,$num,$filename) = @_;
132 print curr_time, " check $host:$share#$num -> $filename";
134 # depending on expected returned value this is used like:
135 # my $uncompress_size = get_gzip_size('/full/path/to.gz');
136 # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
137 sub get_gzip_size($) {
138 my $filename = shift;
139 die "file $filename problem: $!" unless (-r $filename);
140 open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
143 $line = <$gzip> if ($line =~ /^\s+compressed/);
145 my ($comp, $uncomp) = (0,0);
147 if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
154 die "can't find size in line: $line";
159 my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
160 my $backup_id = get_backup_id($host, $share, $num);
161 my $sth_md5 = $dbh->prepare(qq{
163 id, tar_size, size, md5, items
165 where backup_id = ? and part_nr = ?
168 $sth_md5->execute($backup_id, $part_nr);
170 if (my $row = $sth_md5->fetchrow_hashref) {
172 $row->{tar_size} >= $tar_size &&
173 $row->{size} == $size &&
174 $row->{md5} eq $md5 &&
175 $row->{items} == $items
177 print ", deleting invalid backup_parts $row->{id}";
178 $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
180 print ", inserting new";
181 my $sth_insert = $dbh->prepare(qq{
182 insert into backup_parts (
189 ) values (?,?,?,?,?,?)
192 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
198 if (-d "$tar_dir/$filename") {
199 print ", multi-part";
200 opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
201 @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
204 push @tar_parts, "${filename}.tar.gz";
207 print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
214 print " reading" if ($opt{d});
216 foreach my $tarfilename (@tar_parts) {
218 print "\n\t- $tarfilename";
220 my $path = "$tar_dir/$tarfilename";
222 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
224 if ($size > $Conf{MaxArchiveSize}) {
225 print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
229 print ", $size bytes";
232 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
234 my $tar = Archive::Tar::Streamed->new($fh);
236 my $tar_size_inarc = 0;
239 while(my $entry = $tar->next) {
240 push @tar_files, $entry->name;
242 $tar_size_inarc += $entry->size;
244 if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
245 print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
253 print ", $items items";
255 if ($tar_size_inarc == 0 && $items == 0) {
256 print ", EMPTY tar\n";
258 my $backup_id = get_backup_id($host, $share, $num);
259 backup_inc_deleted( $backup_id );
266 my $tar_size = get_gzip_size( $path );
268 # real tar size is bigger because of padding
269 if ($tar_size_inarc > $tar_size) {
270 print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
275 # check if md5 exists, and if not, create one
278 my $md5_path = $path;
279 $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
280 if (! -e $md5_path || -z $md5_path) {
281 print ", creating md5";
282 system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
284 ## FIXME check if existing md5 is valid
287 my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
290 # extract part number from filename
292 $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
295 # finally, check if backup_parts table in database is valid
298 check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
301 # short-cut and exit;
302 return $same unless($same);
304 @tar_files = sort @tar_files;
305 print "\n\t",($#tar_files + 1), " tar files";
307 my $sth = $dbh->prepare(qq{
310 JOIN shares on shares.id = shareid
311 JOIN hosts on hosts.id = shares.hostid
312 WHERE hosts.name = ? and shares.name = ? and backupnum = ?
314 $sth->execute($host, $share, $num);
316 while( my $row = $sth->fetchrow_hashref ) {
318 my $path = $row->{'path'} || die "no path?";
320 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
321 push @db_files, $path;
324 print " ",($#db_files + 1), " database files, diff";
326 @db_files = sort @db_files;
328 if ($#tar_files != $#db_files) {
332 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
333 while ( $diff->Next() ) {
334 next if $diff->Same();
336 print "< $_\n" for $diff->Items(1);
337 print "> $_\n" for $diff->Items(2);
341 print " ",($same ? 'ok' : 'DIFFERENT'),
342 ", dur: ",fmt_time(time() - $t), "\n";
351 my $sth = $dbh->prepare( qq{
354 backups.id as backup_id,
356 shares.name as share,
361 count(backup_parts.backup_id) as backup_parts
363 join shares on backups.hostid = shares.hostid
364 and shares.id = backups.shareid
365 join hosts on shares.hostid = hosts.id
366 full outer join backup_parts on backups.id = backup_parts.backup_id
367 where not inc_deleted and backups.size > 0
368 group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
369 order by backups.date
374 my $num_backups = $sth->rows;
377 while (my $row = $sth->fetchrow_hashref) {
380 foreach ( 0 .. $#{ $args->{HostList} } ) {
381 if ( $args->{'HostList'}->[$_] eq $row->{host}
382 && $args->{'BackupList'}->[$_] eq $row->{num} ) {
388 warn "skipped ",dump($row);
394 my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
396 # this will return -1 if file doesn't exist
397 my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
399 print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $debug;
401 if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
403 tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
409 print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
410 " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
416 my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
417 print STDERR "## $cmd\n" if ($debug);
419 if (system($cmd) != 0) {
420 print STDERR " FAILED, marking this backup deleted";
421 backup_inc_deleted( $row->{backup_id} );
425 print ", dur: ",fmt_time(time() - $t), "\n";