"%c %o",
[ 'host|h=s@', "import just host(s)" ],
[ 'num|n=s@', "import just backup number(s)" ],
+[ 'ok=n', "xferOK", { default => 0 } ],
[ 'check|c', "check archives on disk and sync", { default => 1 } ],
[ 'debug|d', "debug", { default => 1 } ],
[ 'help', "show help" ],
my $key = "$host $num";
return $hsn_cache->{$key} if ($hsn_cache->{$key});
+ # all backup parts will be attached to first share in backups
my $sth = $dbh->prepare(qq{
SELECT
- backups.id
+ min(backups.id)
FROM backups
INNER JOIN shares ON backups.shareID=shares.ID
INNER JOIN hosts ON backups.hostID = hosts.ID
WHERE hosts.name = ? and backups.num = ?
});
$sth->execute($host, $num);
+ die "can't find backup $host:$num" unless $sth->rows == 1;
my ($id) = $sth->fetchrow_array;
- $hsn_cache->{"$host $num"} = $id;
+ $hsn_cache->{$key} = $id;
- print STDERR "# $host $num == $id\n" if $opt->debug;
+ print STDERR "# $key == $id\n" if $opt->debug;
return $id;
}
where id = ?
});
+sub read_only {
+ my $full = shift;
+ my $perm = (stat $full)[2] & 0444;
+ warn sprintf("chmod %03o %s\n",$perm,$full);
+ chmod $perm, $full || die $!;
+}
+
sub check_archive {
my ($host,$num) = @_;
warn "# check_archive $host $num";
return;
}
- print curr_time, " check $host $num";
+ print curr_time, " check $host $num\n";
+
+ my $sth = $dbh->prepare(qq{
+ SELECT count(*)
+ FROM files
+ JOIN shares on shares.id = shareid
+ JOIN hosts on hosts.id = shares.hostid
+ WHERE hosts.name = ? and backupnum = ?
+ });
+ $sth->execute($host, $num);
+ my ($files) = $sth->fetchrow_array;
+
+ if ( $files == 0 ) {
+ warn "EMPTY INCREMENT, cleanup ",dump( @tar_parts );
+ foreach my $path ( @tar_parts ) {
+ my $full = "$Conf{ArchiveDest}/$path";
+ warn "rm $full\n";
+ unlink $full || die "can't remove $full: $!";
+ }
+ return;
+ }
my $md5_path = "$Conf{ArchiveDest}/$host.$num.md5";
- unlink $md5_path if -s $md5_path == 0; # fix empty
+ unlink $md5_path if -e $md5_path && -s $md5_path == 0; # fix empty
+
+ my $read_protect = 0;
if ( ! -e $md5_path ) {
system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum $host.$num.* > $md5_path";
+ read_only $md5_path;
+ $read_protect = 1;
} else {
system_ok "cd $Conf{ArchiveDest} && /usr/bin/md5sum -c $md5_path" if $opt->check;
}
foreach ( split(/\n/, read_file "$Conf{ArchiveDest}/$host.$num.md5" ) ) {
my ( $md5, $path ) = split(/\s+/,$_);
$md5sum->{$path} = $md5;
+ read_only "$Conf{ArchiveDest}/$path" if $read_protect;
}
# depending on expected returned value this is used like:
}
sub check_part {
- my ($host, $num, $part_nr, $tar_size, $size, $md5, $items) = @_;
+ my ($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename) = @_;
my $backup_id = get_backup_id($host, $num);
my $sth_md5 = $dbh->prepare(qq{
select
- id, tar_size, size, md5, items
+ id, tar_size, size, md5, items, filename
from backup_parts
- where backup_id = ? and part_nr = ?
+ where backup_id = ? and part_nr = ? and filename = ?
});
- $sth_md5->execute($backup_id, $part_nr);
+ $sth_md5->execute($backup_id, $part_nr, $filename);
if (my $row = $sth_md5->fetchrow_hashref) {
return if (
tar_size,
size,
md5,
- items
- ) values (?,?,?,?,?,?)
+ items,
+ filename
+ ) values (?,?,?,?,?,?,?)
});
- $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items);
- $dbh->commit;
+ $sth_insert->execute($backup_id, $part_nr, $tar_size, $size, $md5, $items, $filename);
}
print " [parts: ",join(", ", @tar_parts),"]" if $opt->debug;
$items++;
$tar_size_inarc += $entry->size;
- if ($tar_size_inarc > $Conf{ArchiveChunkSize}) {
- print ", part $filename is too big $tar_size_inarc > $Conf{ArchiveChunkSize}\n";
+ if ($tar_size_inarc > $Conf{ArchiveMediaSize}) {
+ print ", part $filename is too big $tar_size_inarc > $Conf{ArchiveMediaSize}\n";
return 0;
}
my $items = 1;
$part_nr++;
- check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items);
+ check_part($host, $num, $part_nr, $tar_size, $size, $md5, $items, $filename);
# round increment size to 2k block size
$inc_size += int((($size + 2048) / 2048 ) * 2048);
$part_nr,
get_backup_id($host, $num),
);
+
+ warn "## commit\n";
$dbh->commit;
+ return;
+
+=for removed
+
@tar_files = sort @tar_files;
print "\n\t",($#tar_files + 1), " tar files";
print " ",($same ? 'ok' : 'DIFFERENT'),
", dur: ",fmt_time(time() - $t), "\n";
+ $dbh->commit;
+
return $same;
+=cut
}
#----- main
+exit unless $opt->host;
+
foreach ( 0 .. $#{ $opt->host } ) {
- my $host = $opt->host->[$_];
+ my $host = lc $opt->host->[$_];
my $num = $opt->num->[$_];
- check_archive $host => $num;
+ if ( ! $opt->ok ) {
+ warn "ERROR $host $num running cleanup";
+ foreach my $path ( glob "$Conf{ArchiveDest}/$host.$num.*" ) {
+ warn "# rm $path";
+ unlink $path || die $!;
+ }
+ } else {
+ check_archive $host => $num;
+ }
}
print ", dur: ",fmt_time(time() - $t), "\n";
- $dbh->commit;
-
}
undef $sth;