added dvd_nr to archive_backup_parts
[BackupPC.git] / bin / BackupPC_ASA_PostArchive_Update
1 #!/usr/local/bin/perl -w
2
3 use strict;
4 use lib "/usr/local/BackupPC/lib";
5
6 use DBI;
7 use BackupPC::Lib;
8 use BackupPC::View;
9 use BackupPC::Attrib qw/:all/;
10 use Data::Dump qw(dump);
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
13 use Cwd qw/abs_path/;
14 use Archive::Tar::Streamed;
15 use Algorithm::Diff;
16 use File::Slurp;
17 use Getopt::Long::Descriptive;
18
19 =head1 NAME
20
21 BackupPC_ASA_PostArchive_Update
22
23 =head1 DESCRIPTION
24
25         # /etc/BackupPC/pc/_search_archive.pl
26
27 =cut
28
29
30 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
31 $bpc->ConfigRead('_search_archive'); # read our configuration
32 my %Conf = $bpc->Conf();
33
34 use BackupPC::Search;
35 %BackupPC::Search::Conf = %Conf;
36
37 my ($opt,$usage) = describe_options(
38 "%c %o",
39 [ 'host|h=s@',  "import just host(s)" ],
40 [ 'num|n=s@',   "import just backup number(s)" ],
41 [ 'check|c',    "check archives on disk and sync", { default => 1 } ],
42 [ 'debug|d',    "debug", { default => 1 } ],
43 [ 'help',       "show help" ],
44 );
45
46 print($usage->text), exit if $opt->help;
47
48 $|=1;
49
50 my $start_t = time();
51
52 my $t_fmt = '%Y-%m-%d %H:%M:%S';
53
54 #warn "## Conf = ",dump( \%Conf );
55
56 my $dbh = DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 0 });
57
58 #---- subs ----
59
60
61 sub curr_time {
62         return strftime($t_fmt,localtime());
63 }
64
65 sub fmt_time {
66         my $t = shift || return;
67         my $out = "";
68         my ($ss,$mm,$hh) = gmtime($t);
69         $out .= "${hh}h" if ($hh);
70         $out .= sprintf("%02d:%02d", $mm,$ss);
71         return $out;
72 }
73
74 my $hsn_cache;
75
76 sub get_backup_id($$) {
77         my ($host, $num) = @_;
78
79         my $key = "$host $num";
80         return $hsn_cache->{$key} if ($hsn_cache->{$key});
81
82         my $sth = $dbh->prepare(qq{
83                 SELECT 
84                         backups.id
85                 FROM backups 
86                 INNER JOIN shares       ON backups.shareID=shares.ID
87                 INNER JOIN hosts        ON backups.hostID = hosts.ID
88                 WHERE hosts.name = ? and backups.num = ?
89         });
90         $sth->execute($host, $num);
91         my ($id) = $sth->fetchrow_array;
92
93         $hsn_cache->{"$host $num"} = $id;
94
95         print STDERR "# $host $num == $id\n" if $opt->debug;
96
97         return $id;
98 }
99
100 sub backup_inc_deleted($) {
101         my $backup_id = shift;
102         my $sth_inc_deleted = $dbh->prepare(qq{
103                 update backups set
104                         inc_deleted = true
105                 where id = ?
106         });
107         $sth_inc_deleted->execute($backup_id);
108 }
109
110 sub system_ok {
111         warn "## system_ok @_\n";
112         system(@_) == 0 || die "system @_:$!";
113 }
114
115 my $sth_inc_size = $dbh->prepare(qq{
116         update backups set
117                 inc_size = ?,
118                 parts = ?,
119                 inc_deleted = false
120         where id = ?
121 });
122
123 sub check_archive {
124         my ($host,$num) = @_;
125         warn "# check_archive $host $num";
126
127         my $t = time();
128
129         my $glob = "$Conf{ArchiveDest}/$host.$num.*";
130
131         my @tar_parts = sort map { s/^\Q$Conf{ArchiveDest}\E\/*//; $_ } glob $glob ;
132
133         if ( ! @tar_parts ) {
134                 warn "ERROR: no files for $glob";
135                 return;
136         }
137
138         print curr_time, " check $host $num";
139
140         my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
141         unlink $md5_path if -s $md5_path == 0; # fix empty
142
143         if ( ! -e $md5_path ) {
144                 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
145         } else {
146                 system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $opt->check;
147         }
148
149         my $md5sum;
150         foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
151                 my ( $md5, $path ) = split(/\s+/,$_);
152                 $md5sum->{$path} = $md5;
153         }
154
155         # depending on expected returned value this is used like:
156         # my $uncompress_size = get_gzip_size('/full/path/to.gz');
157         # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
158         sub get_gzip_size($) {
159                 my $filename = shift;
160                 die "file $filename problem: $!" unless (-r $filename);
161
162                 if ( $filename !~ m/\.gz$/ ) {
163                         return -s $filename;
164                 }
165
166                 open(my $gzip, $Conf{GzipPath}." -l $filename |") || die "can't gzip -l $filename: $!";
167                 local $/ = undef;
168                 my $line = <$gzip>;
169                 close($gzip);
170
171                 my ($comp, $uncomp) = (0,0);
172
173                 if ($line =~ m/\s+(\d+)\s+(\d+)\s+\d+\.\d+/s) {
174                         if (wantarray) {
175                                 return [ $1, $2 ];
176                         } else {
177                                 return $2;
178                         }
179                 } else {
180                         warn "ERROR can't parse: $line";
181                         return -s $filename;
182                 }
183         }
184
185         sub check_part {
186                 my ($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename) = @_;
187                 my $backup_id = get_backup_id($host, $num);
188                 my $sth_md5 = $dbh->prepare(qq{
189                         select
190                                 id, tar_size, size, md5, items, filename
191                         from backup_parts
192                         where backup_id = ? and part_nr = ? and filename = ?
193                 });
194
195                 $sth_md5->execute($backup_id, $part_nr, $filename);
196
197                 if (my $row = $sth_md5->fetchrow_hashref) {
198                         return if (
199                                 $row->{tar_size} >= $tar_size &&
200                                 $row->{size} == $size &&
201                                 $row->{md5} eq $md5 &&
202                                 $row->{items} == $items
203                         );
204                         print ", deleting invalid backup_parts $row->{id}";
205                         $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
206                 }
207                 print ", inserting new";
208                 my $sth_insert = $dbh->prepare(qq{
209                         insert into backup_parts (
210                                 backup_id,
211                                 part_nr,
212                                 tar_size,
213                                 size,
214                                 md5,
215                                 items,
216                                 filename
217                         ) values (?,?,?,?,?,?,?)
218                 });
219
220                 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items, $filename);
221                 $dbh->commit;
222         }
223
224         print " [parts: ",join(", ", @tar_parts),"]" if $opt->debug;
225
226         my @tar_files;
227
228         my $backup_part;
229
230         print " reading" if $opt->debug;
231
232         my $part_nr = 0;
233         my $inc_size = 0;
234
235         foreach my $filename (@tar_parts) {
236
237                 next if $filename eq "$host.$num.md5";
238
239                 print "\n\t- $filename";
240
241                 my $path = "$Conf{ArchiveDest}/$filename";
242                 $path =~ s{//+}{/}g;
243
244                 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
245
246                 if ($size > $Conf{ArchiveMediaSize}) {
247                         print ", part bigger than media $size > $Conf{ArchiveMediaSize}\n";
248                         return 0;
249                 }
250
251                 print ", $size bytes";
252
253 =for later
254
255                 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
256                 binmode($fh);
257                 my $tar = Archive::Tar::Streamed->new($fh);
258
259                 my $tar_size_inarc = 0;
260                 my $items = 0;
261
262                 while(my $entry = $tar->next) {
263                         push @tar_files, $entry->name;
264                         $items++;
265                         $tar_size_inarc += $entry->size;
266
267                         if ($tar_size_inarc > $Conf{ArchiveChunkSize}) {
268                                 print ", part $filename is too big $tar_size_inarc > $Conf{ArchiveChunkSize}\n";
269                                 return 0;
270                         }
271
272                 }
273
274                 close($fh);
275
276                 print ", $items items";
277
278                 if ($tar_size_inarc == 0 && $items == 0) {
279                         print ", EMPTY tar\n";
280
281                         my $backup_id = get_backup_id($host, $share, $num);
282                         backup_inc_deleted( $backup_id );
283
284                         $dbh->commit;
285
286                         return 1;
287                 }
288
289 =cut
290
291                 # FIXME
292                 my $tar_size = get_gzip_size( $path );
293
294                 #
295                 # finally, check if backup_parts table in database is valid
296                 #
297
298                 my $md5 = $md5sum->{$filename} || die "no md5sum for $filename in ",dump($md5sum);
299                 my $items = 1;
300                 $part_nr++;
301
302                 check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename);
303
304                 # round increment size to 2k block size
305                 $inc_size += int((($size + 2048) / 2048 ) * 2048);
306         }
307
308         $sth_inc_size->execute(
309                 $inc_size,
310                 $part_nr,
311                 get_backup_id($host, $num),
312         );
313         $dbh->commit;
314
315         @tar_files = sort @tar_files;
316         print "\n\t",($#tar_files + 1), " tar files";
317
318         my $sth = $dbh->prepare(qq{
319                 SELECT path,type
320                 FROM files
321                 JOIN shares on shares.id = shareid
322                 JOIN hosts on hosts.id = shares.hostid
323                 WHERE hosts.name = ? and backupnum = ?
324         });
325         $sth->execute($host, $num);
326         my @db_files;
327         while( my $row = $sth->fetchrow_hashref ) {
328
329                 my $path = $row->{'path'} || die "no path?";
330                 $path =~ s#^/#./#;
331                 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
332                 push @db_files, $path;
333         }
334
335         print " ",($#db_files + 1), " database files, diff";
336
337         @db_files = sort @db_files;
338
339         my $same = 1;
340
341         if ($#tar_files != $#db_files) {
342                 $same = 0;
343                 print " NUMBER";
344         } else {
345                 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
346                 while ( $diff->Next() ) {
347                         next if $diff->Same();
348                         $same = 0;
349                         print "< $_\n" for $diff->Items(1);
350                         print "> $_\n" for $diff->Items(2);
351                 }
352         }
353
354         print " ",($same ? 'ok' : 'DIFFERENT'),
355                 ", dur: ",fmt_time(time() - $t), "\n";
356
357         return $same;
358 }
359
360
361 #----- main
362
363 foreach ( 0 .. $#{ $opt->host } ) {
364
365         my $host = $opt->host->[$_];
366         my $num  = $opt->num->[$_];
367
368         check_archive $host => $num;
369
370 }
371
372 exit; # FIXME
373
374 my $sth = $dbh->prepare( qq{
375         
376 select
377         backups.id as backup_id,
378         hosts.name as host,
379         shares.name as share,
380         backups.num as num,
381         backups.date,
382         inc_size,
383         parts,
384         count(backup_parts.backup_id) as backup_parts
385 from backups
386         join shares on backups.hostid = shares.hostid
387                 and shares.id = backups.shareid
388         join hosts on shares.hostid = hosts.id
389         full outer join backup_parts on backups.id = backup_parts.backup_id
390 where not inc_deleted and backups.size > 0
391 group by backups.id, hosts.name, shares.name, backups.num, backups.date, inc_size, parts, backup_parts.backup_id
392 order by backups.date
393
394 } );
395
396 $sth->execute();
397 my $num_backups = $sth->rows;
398 my $curr_backup = 1;
399
400 while (my $row = $sth->fetchrow_hashref) {
401
402         $curr_backup++;
403
404         my $tar_file = BackupPC::Search::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
405
406         # this will return -1 if file doesn't exist
407         my $size = BackupPC::Search::get_tgz_size_by_name($tar_file);
408
409         print "# host: ".$row->{host}.", share: ".$row->{'share'}.", backup_num:".$row->{num}." size: $size backup.size: ", $row->{inc_size},"\n" if $opt->debug;
410
411         if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size && $row->{parts} == $row->{backup_parts}) {
412                 if ($opt->check) {
413                         tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
414                 } else {
415                         next;
416                 }
417         }
418
419         print curr_time, " creating $curr_backup/$num_backups ", $row->{host}, ":", $row->{share}, " #", $row->{num},
420                 " ", strftime('%Y-%m-%d', localtime($row->{date})), " -> $tar_file";
421
422         my $t = time();
423
424 =for later
425         # re-create archive?
426         my $cmd = qq[ $tarIncCreate -h "$row->{host}" -s "$row->{share}" -n $row->{num} -f ];
427         print STDERR "## $cmd\n" if ($opt->debug);
428
429         if (system($cmd) != 0) {
430                 print STDERR " FAILED, marking this backup deleted";
431                 backup_inc_deleted( $row->{backup_id} );
432         }
433 =cut
434
435         print ", dur: ",fmt_time(time() - $t), "\n";
436
437         $dbh->commit;
438
439 }
440
441 undef $sth;
442 $dbh->disconnect;