1 #!/usr/local/bin/perl -w
4 use lib "__INSTALLDIR__/lib";
9 use BackupPC::Attrib qw/:all/;
11 use Time::HiRes qw/time/;
12 use POSIX qw/strftime/;
15 use Archive::Tar::Streamed;
21 my $pid_path = abs_path($0);
22 $pid_path =~ s/\W+/_/g;
24 my $pidfile = new File::Pid({
25 file => "/tmp/$pid_path",
28 if (my $pid = $pidfile->running ) {
29 die "$0 already running: $pid\n";
30 } elsif ($pidfile->pid ne $$) {
32 $pidfile = new File::Pid;
35 print STDERR "$0 using pid ",$pidfile->pid," file ",$pidfile->file,"\n";
38 my $bpc = BackupPC::Lib->new || die "can't create BackupPC::Lib";
39 my %Conf = $bpc->Conf();
41 use BackupPC::SearchLib;
42 %BackupPC::SearchLib::Conf = %Conf;
44 my $path = abs_path($0);
45 $path =~ s#/[^/]+$#/#;
46 my $tarIncCreate = $path .= 'BackupPC_tarIncCreate';
48 die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate);
51 foreach my $c (qw/gzip md5sum/) {
52 $bin->{$c} = which($c) || die "$0 needs $c, install it\n";
56 getopts("cd", \%opt );
59 my $check = $opt{c} && print STDERR "NOTICE: tar archive check forced\n";
65 my $t_fmt = '%Y-%m-%d %H:%M:%S';
67 my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n";
68 my $user = $Conf{SearchUser} || '';
70 my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 });
72 my $tar_dir = $Conf{InstallDir}.'/'.$Conf{GzipTempDir};
74 die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir);
79 my $t = shift || return;
81 my ($ss,$mm,$hh) = gmtime($t);
82 $out .= "${hh}h" if ($hh);
83 $out .= sprintf("%02d:%02d", $mm,$ss);
88 return strftime($t_fmt,localtime());
93 sub get_backup_id($$$) {
94 my ($host, $share, $num) = @_;
96 my $key = "$host $share $num";
97 return $hsn_cache->{$key} if ($hsn_cache->{$key});
99 my $sth = $dbh->prepare(qq{
103 INNER JOIN shares ON backups.shareID=shares.ID
104 INNER JOIN hosts ON backups.hostID = hosts.ID
105 where hosts.name = ? and shares.name = ? and backups.num = ?
107 $sth->execute($host, $share, $num);
108 my ($id) = $sth->fetchrow_array;
110 $hsn_cache->{"$host $share $num"} = $id;
112 print STDERR "# $host $share $num == $id\n" if ($opt{d});
118 sub tar_check($$$$) {
119 my ($host,$share,$num,$filename) = @_;
122 print curr_time, " check $host:$share#$num -> $filename";
124 # depending on expected returned value this is used like:
125 # my $uncompress_size = get_gzip_size('/full/path/to.gz');
126 # my ($compress_size, $uncompress_size) = get_gzip_size('/path.gz');
127 sub get_gzip_size($) {
128 my $filename = shift;
129 die "file $filename problem: $!" unless (-r $filename);
130 open(my $gzip, $bin->{gzip}." -l $filename |") || die "can't gzip -l $filename: $!";
133 $line = <$gzip> if ($line =~ /^\s+compressed/);
135 my ($comp, $uncomp) = (0,0);
137 if ($line =~ m/^\s+(\d+)\s+(\d+)\s+\d+\.\d+/) {
144 die "can't find size in line: $line";
149 my ($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
150 my $backup_id = get_backup_id($host, $share, $num);
151 my $sth_md5 = $dbh->prepare(qq{
153 id, tar_size, size, md5, items
155 where backup_id = ? and part_nr = ?
158 $sth_md5->execute($backup_id, $part_nr);
160 if (my $row = $sth_md5->fetchrow_hashref) {
162 $row->{tar_size} >= $tar_size &&
163 $row->{size} == $size &&
164 $row->{md5} eq $md5 &&
165 $row->{items} == $items
167 print ", deleting invalid backup_parts $row->{id}";
168 $dbh->do(qq{ delete from backup_parts where id = $row->{id} });
170 print ", inserting new";
171 my $sth_insert = $dbh->prepare(qq{
172 insert into backup_parts (
179 ) values (?,?,?,?,?,?)
182 $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
188 if (-d "$tar_dir/$filename") {
189 print ", multi-part";
190 opendir(my $dir, "$tar_dir/$filename") || die "can't readdir $tar_dir/$filename: $!";
191 @tar_parts = map { my $p = $_; $p =~ s#^#${filename}/#; $p } grep { !/^\./ && !/md5/ && -f "$tar_dir/$filename/$_" } readdir($dir);
194 push @tar_parts, "${filename}.tar.gz";
197 print " [parts: ",join(", ", @tar_parts),"]" if ($opt{d});
204 print " reading" if ($opt{d});
206 foreach my $tarfilename (@tar_parts) {
208 print "\n\t- $tarfilename";
210 my $path = "$tar_dir/$tarfilename";
212 my $size = (stat( $path ))[7] || die "can't stat $path: $!";
214 if ($size > $Conf{MaxArchiveSize}) {
215 print ", part bigger than media $size > $Conf{MaxArchiveSize}\n";
219 print ", $size bytes";
222 open(my $fh, "gzip -cd $path |") or die "can't open $path: $!";
224 my $tar = Archive::Tar::Streamed->new($fh);
226 my $tar_size_inarc = 0;
229 while(my $entry = $tar->next) {
230 push @tar_files, $entry->name;
232 $tar_size_inarc += $entry->size;
234 if ($tar_size_inarc > $Conf{MaxArchiveFileSize}) {
235 print ", part $tarfilename is too big $tar_size_inarc > $Conf{MaxArchiveFileSize}\n";
243 print ", $items items";
245 if ($tar_size_inarc == 0 && $items == 0) {
246 print ", EMPTY tar\n";
248 my $backup_id = get_backup_id($host, $share, $num);
250 my $sth_inc_deleted = $dbh->prepare(qq{
255 $sth_inc_deleted->execute($backup_id);
262 my $tar_size = get_gzip_size( $path );
264 # real tar size is bigger because of padding
265 if ($tar_size_inarc > $tar_size) {
266 print ", size of files in tar ($tar_size_inarc) bigger than whole tar ($tar_size)!\n";
271 # check if md5 exists, and if not, create one
274 my $md5_path = $path;
275 $md5_path =~ s/\.tar\.gz$/.md5/ || die "can't create md5 filename from $md5_path";
276 if (! -e $md5_path || -z $md5_path) {
277 print ", creating md5";
278 system( $bin->{md5sum} . " $path > $md5_path") == 0 or die "can't create md5 $path: $!";
280 ## FIXME check if existing md5 is valid
283 my $md5 = read_file( $md5_path ) || die "can't read md5sum file $md5_path: $!";
286 # extract part number from filename
288 $part_nr = $1 if ($tarfilename =~ m#/(\d+)\.tar\.gz#);
291 # finally, check if backup_parts table in database is valid
294 check_part($host, $share, $num, $part_nr, $tar_size, $size, $md5, $items);
297 # short-cut and exit;
298 return $same unless($same);
300 @tar_files = sort @tar_files;
301 print "\n\t",($#tar_files + 1), " tar files";
303 my $sth = $dbh->prepare(qq{
306 JOIN shares on shares.id = shareid
307 JOIN hosts on hosts.id = shares.hostid
308 WHERE hosts.name = ? and shares.name = ? and backupnum = ?
310 $sth->execute($host, $share, $num);
312 while( my $row = $sth->fetchrow_hashref ) {
314 my $path = $row->{'path'} || die "no path?";
316 $path .= '/' if ($row->{'type'} == BPC_FTYPE_DIR);
317 push @db_files, $path;
320 print " ",($#db_files + 1), " database files, diff";
322 @db_files = sort @db_files;
324 if ($#tar_files != $#db_files) {
328 my $diff = Algorithm::Diff->new(\@tar_files, \@db_files);
329 while ( $diff->Next() ) {
330 next if $diff->Same();
332 print "< $_\n" for $diff->Items(1);
333 print "> $_\n" for $diff->Items(2);
337 print " ",($same ? 'ok' : 'DIFFERENT'),
338 ", dur: ",fmt_time(time() - $t), "\n";
346 my $sth = $dbh->prepare( qq{
349 backups.id as backup_id,
351 shares.name as share,
356 join shares on backups.hostid = shares.hostid
357 and shares.id = backups.shareid
358 join hosts on shares.hostid = hosts.id
359 where not inc_deleted
360 order by backups.date
365 my $num_backups = $sth->rows;
368 while (my $row = $sth->fetchrow_hashref) {
372 my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'});
374 # this will return -1 if file doesn't exist
375 my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file);
377 print "# size: $size backup.size: ", $row->{inc_size},"\n" if ($opt{d});
379 if ( $row->{'inc_size'} != -1 && $size != -1 && $row->{'inc_size'} >= $size) {
381 tar_check($row->{'host'}, $row->{'share'}, $row->{'num'}, $tar_file) && next;
387 print curr_time, " creating $curr_backup/$num_backups ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file";
392 my $cmd = qq{ $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} -f };
393 print STDERR "## $cmd\n" if ($debug);
395 if (system($cmd) != 0) {
396 print STDERR " FAILED";
399 print ", dur: ",fmt_time(time() - $t), "\n";