read protect tar dumps after md5sum calculation
[BackupPC.git] / bin / BackupPC_ASA_PostArchive_Update
1 #!/usr/local/bin/perl -w
2
3 use strict;
4 use lib "/usr/local/BackupPC/lib";
5
6 use DBI;
7 use BackupPC::Lib;
8 use BackupPC::View;
9 use BackupPC::Attrib qw/:all/;
10 use Data::Dump qw(dump);
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
13 use Cwd qw/abs_path/;
14 use Archive::Tar::Streamed;
15 use Algorithm::Diff;
16 use File::Slurp;
17 use Getopt::Long::Descriptive;
18
19 =head1 NAME
20
21 BackupPC_ASA_PostArchive_Update
22
23 =head1 DESCRIPTION
24
25         # /etc/BackupPC/pc/_search_archive.pl
26
27 =cut
28
29
30 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
31 $bpc->ConfigRead('_search_archive'); # read our configuration
32 my %Conf = $bpc->Conf();
33
34 use BackupPC::Search;
35 %BackupPC::Search::Conf = %Conf;
36
37 my ($opt,$usage) = describe_options(
38 "%c %o",
39 [ 'host|h=s@',  "import just host(s)" ],
40 [ 'num|n=s@',   "import just backup number(s)" ],
41 [ 'check|c',    "check archives on disk and sync", { default => 1 } ],
42 [ 'debug|d',    "debug", { default => 1 } ],
43 [ 'help',       "show help" ],
44 );
45
46 print($usage->text), exit if $opt->help;
47
48 $|=1;
49
50 my $start_t = time();
51
52 my $t_fmt = '%Y-%m-%d %H:%M:%S';
53
54 #warn "## Conf = ",dump( \%Conf );
55
56 my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
57
58 #---- subs ----
59
60
61 sub curr_time {
62         return strftime($t_fmt,localtime());
63 }
64
65 sub fmt_time {
66         my $t = shift || return;
67         my $out = "";
68         my ($ss,$mm,$hh) = gmtime($t);
69         $out .= "${hh}h" if ($hh);
70         $out .= sprintf("%02d:%02d", $mm,$ss);
71         return $out;
72 }
73
74 my $hsn_cache;
75
76 sub get_backup_id($$) {
77         my ($host, $num) = @_;
78
79         my $key = "$host $num";
80         return $hsn_cache->{$key} if ($hsn_cache->{$key});
81
82         my $sth = $dbh->prepare(qq{
83                 SELECT 
84                         backups.id
85                 FROM backups 
86                 INNER JOIN shares       ON backups.shareID=shares.ID
87                 INNER JOIN hosts        ON backups.hostID = hosts.ID
88                 WHERE hosts.name = ? and backups.num = ?
89         });
90         $sth->execute($host, $num);
91         my ($id) = $sth->fetchrow_array;
92
93         $hsn_cache->{"$host $num"} = $id;
94
95         print STDERR "# $host $num == $id\n" if $opt->debug;
96
97         return $id;
98 }
99
100 sub backup_inc_deleted($) {
101         my $backup_id = shift;
102         my $sth_inc_deleted = $dbh->prepare(qq{
103                 update backups set
104                         inc_deleted = true
105                 where id = ?
106         });
107         $sth_inc_deleted->execute($backup_id);
108 }
109
110 sub system_ok {
111         warn "## system_ok @_\n";
112         system(@_) == 0 || die "system @_:$!";
113 }
114
115 my $sth_inc_size = $dbh->prepare(qq{
116         update backups set
117                 inc_size = ?,
118                 parts = ?,
119                 inc_deleted = false
120         where id = ?
121 });
122
123 sub check_archive {
124         my ($host,$num) = @_;
125         warn "# check_archive $host $num";
126
127         my $t = time();
128
129         my $glob = "$Conf{ArchiveDest}/$host.$num.*";
130
131         my @tar_parts = sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ } glob $glob ;
132
133         if ( ! @tar_parts ) {
134                 warn "ERROR: no files for $glob";
135                 return;
136         }
137
138         print curr_time, " check $host $num";
139
140         my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
141         unlink $md5_path if -s $md5_path == 0; # fix empty
142
143         my $read_protect = 0;
144
145         if ( ! -e $md5_path ) {
146                 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
147                 $read_protect = 1;
148         } else {
149                 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $opt->check;
150         }
151
152         my $md5sum;
153         foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
154                 my ( $md5, $path ) = split(/\s+/,$_);
155                 $md5sum->{$path} = $md5;
156                 if ( $read_protect ) {
157                         my $full = "$Conf{ArchiveDest}/$path";
158                         my $perm = (stat $full)[2] & 0444;
159                         warn sprintf("chmod %03o %s\n",$perm,$full);
160                         chmod $perm, $full;
161                 }
162         }
163
164         # depending on expected returned value this is used like:
165         # my $uncompress_size = get_gzip_size('/full/path/to.gz');
166         # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
167         sub get_gzip_size($) {
168                 my $filename = shift;
169                 die "file $filename problem: $!" unless (-r $filename);
170
171                 if ( $filename !~ m/\.gz$/ ) {
172                         return -s $filename;
173                 }
174
175                 open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!";
176                 local $/ = undef;
177                 my $line = <$gzip>;
178                 close($gzip);
179
180                 my ($comp, $uncomp) = (0,0);
181
182                 if ($line =~ m/\s+(\d+)\s+(\d+)\s+\d+\.\d+/s) {
183                         if (wantarray) {
184                                 return [ $1, $2 ];
185                         } else {
186                                 return $2;
187                         }
188                 } else {
189                         warn "ERROR can't parse: $line";
190                         return -s $filename;
191                 }
192         }
193
194         sub check_part {
195                 my ($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename) = @_;
196                 my $backup_id = get_backup_id($host, $num);
197                 my $sth_md5 = $dbh->prepare(qq{
198                         select
199                                 id, tar_size, size, md5, items, filename
200                         from backup_parts
201                         where backup_id = ? and part_nr = ? and filename = ?
202                 });
203
204                 $sth_md5->execute($backup_id, $part_nr, $filename);
205
206                 if (my $row = $sth_md5->fetchrow_hashref) {
207                         return if (
208                                 $row->{tar_size} >= $tar_size &&
209                                 $row->{size} == $size &&
210                                 $row->{md5} eq $md5 &&
211                                 $row->{items} == $items
212                         );
213                         print ", deleting invalid backup_parts $row->{id}";
214                         $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
215                 }
216                 print ", inserting new";
217                 my $sth_insert = $dbh->prepare(qq{
218                         insert into backup_parts (
219                                 backup_id,
220                                 part_nr,
221                                 tar_size,
222                                 size,
223                                 md5,
224                                 items,
225                                 filename
226                         ) values (?,?,?,?,?,?,?)
227                 });
228
229                 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items, $filename);
230                 $dbh->commit;
231         }
232
233         print " [parts: ",join(", ", @tar_parts),"]" if $opt->debug;
234
235         my @tar_files;
236
237         my $backup_part;
238
239         print " reading" if $opt->debug;
240
241         my $part_nr = 0;
242         my $inc_size = 0;
243
244         foreach my $filename (@tar_parts) {
245
246                 next if $filename eq "$host.$num.md5";
247
248                 print "\n\t- $filename";
249
250                 my $path = "$Conf{ArchiveDest}/$filename";
251                 $path =~ s{//+}{/}g;
252
253                 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
254
255                 if ($size > $Conf{ArchiveMediaSize}) {
256                         print ", part bigger than media $size > $Conf{ArchiveMediaSize}\n";
257                         return 0;
258                 }
259
260                 print ", $size bytes";
261
262 =for later
263
264                 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
265                 binmode($fh);
266                 my $tar = Archive::Tar::Streamed->new($fh);
267
268                 my $tar_size_inarc = 0;
269                 my $items = 0;
270
271                 while(my $entry = $tar->next) {
272                         push @tar_files, $entry->name;
273                         $items++;
274                         $tar_size_inarc += $entry->size;
275
276                         if ($tar_size_inarc > $Conf{ArchiveMediaSize}) {
277                                 print ", part $filename is too big $tar_size_inarc > $Conf{ArchiveMediaSize}\n";
278                                 return 0;
279                         }
280
281                 }
282
283                 close($fh);
284
285                 print ", $items items";
286
287                 if ($tar_size_inarc == 0 && $items == 0) {
288                         print ", EMPTY tar\n";
289
290                         my $backup_id = get_backup_id($host, $share, $num);
291                         backup_inc_deleted( $backup_id );
292
293                         $dbh->commit;
294
295                         return 1;
296                 }
297
298 =cut
299
300                 # FIXME
301                 my $tar_size = get_gzip_size( $path );
302
303                 #
304                 # finally, check if backup_parts table in database is valid
305                 #
306
307                 my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum);
308                 my $items = 1;
309                 $part_nr++;
310
311                 check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename);
312
313                 # round increment size to 2k block size
314                 $inc_size += int((($size + 2048) / 2048 ) * 2048);
315         }
316
317         $sth_inc_size->execute(
318                 $inc_size,
319                 $part_nr,
320                 get_backup_id($host, $num),
321         );
322         $dbh->commit;
323
324         @tar_files = sort @tar_files;
325         print "\n\t",($#tar_files + 1), " tar files";
326
327         my $sth = $dbh->prepare(qq{
328                 SELECT path,type
329                 FROM files
330                 JOIN shares on shares.id = shareid
331                 JOIN hosts on hosts.id = shares.hostid
332                 WHERE hosts.name = ? and backupnum = ?
333         });
334         $sth->execute($host, $num);
335         my @db_files;
336         while( my $row = $sth->fetchrow_hashref ) {
337
338                 my $path = $row->{'path'} || die "no path?";
339                 $path =~ s#^/#./#;
340                 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
341                 push @db_files, $path;
342         }
343
344         print " ",($#db_files + 1), " database files, diff";
345
346         @db_files = sort @db_files;
347
348         my $same = 1;
349
350         if ($#tar_files != $#db_files) {
351                 $same = 0;
352                 print " NUMBER";
353         } else {
354                 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
355                 while ( $diff->Next() ) {
356                         next if $diff->Same();
357                         $same = 0;
358                         print "< $_\n" for $diff->Items(1);
359                         print "> $_\n" for $diff->Items(2);
360                 }
361         }
362
363         print " ",($same ? 'ok' : 'DIFFERENT'),
364                 ", dur: ",fmt_time(time() - $t), "\n";
365
366         return $same;
367 }
368
369
370 #----- main
371
372 foreach ( 0 .. $#{ $opt->host } ) {
373
374         my $host = $opt->host->[$_];
375         my $num  = $opt->num->[$_];
376
377         check_archive $host => $num;
378
379 }
380
381 exit; # FIXME
382
383 my $sth = $dbh->prepare( qq{
384         
385 select
386         backups.id as backup_id,
387         hosts.name as host,
388         shares.name as share,
389         backups.num as num,
390         backups.date,
391         inc_size,
392         parts,
393         count(backup_parts.backup_id) as backup_parts
394 from backups
395         join shares on backups.hostid = shares.hostid
396                 and shares.id = backups.shareid
397         join hosts on shares.hostid = hosts.id
398         full outer join backup_parts on backups.id = backup_parts.backup_id
399 where not inc_deleted and backups.size > 0
400 group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
401 order by backups.date
402
403 } );
404
405 $sth->execute();
406 my $num_backups = $sth->rows;
407 my $curr_backup = 1;
408
409 while (my $row = $sth->fetchrow_hashref) {
410
411         $curr_backup++;
412
413         my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
414
415         # this will return -1 if file doesn't exist
416         my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
417
418         print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $opt->debug;
419
420         if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
421                 if ($opt->check) {
422                         tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
423                 } else {
424                         next;
425                 }
426         }
427
428         print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
429                 " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
430
431         my $t = time();
432
433 =for later
434         # re-create archive?
435         my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
436         print STDERR "## $cmd\n" if ($opt->debug);
437
438         if (system($cmd) != 0) {
439                 print STDERR " FAILED, marking this backup deleted";
440                 backup_inc_deleted( $row->{backup_id} );
441         }
442 =cut
443
444         print ", dur: ",fmt_time(time() - $t), "\n";
445
446         $dbh->commit;
447
448 }
449
450 undef $sth;
451 $dbh->disconnect;