#!/usr/local/bin/perl -w use strict; use lib "__INSTALLDIR__/lib"; use DBI; use BackupPC::Lib; use BackupPC::View; use Data::Dumper; use Time::HiRes qw/time/; use POSIX qw/strftime/; use BackupPC::SearchLib; use Cwd qw/abs_path/; my $path = abs_path($0); $path =~ s#/[^/]+$#/#; my $tarIncCreate = $path .= 'BackupPC_tarIncCreate'; die "can't find $tarIncCreate: $!\n" unless (-x $tarIncCreate); my $debug = 0; $|=1; my $start_t = time(); my $t_fmt = '%Y-%m-%d %H:%M:%S'; my $hosts; my $bpc = BackupPC::Lib->new || die; my %Conf = $bpc->Conf(); my $TopDir = $bpc->TopDir(); my $beenThere = {}; my $dsn = $Conf{SearchDSN} || die "Need SearchDSN in config.pl\n"; my $user = $Conf{SearchUser} || ''; my $dbh = DBI->connect($dsn, $user, "", { RaiseError => 1, AutoCommit => 0 }); my $tar_dir = $Conf{InstallDir}.'/'.$Conf{GzipTempDir}; die "problem with $tar_dir, check GzipTempDir in configuration\n" unless (-d $tar_dir && -w $tar_dir); #---- subs ---- sub fmt_time { my $t = shift || return; my $out = ""; my ($ss,$mm,$hh) = gmtime($t); $out .= "${hh}h" if ($hh); $out .= sprintf("%02d:%02d", $mm,$ss); return $out; } sub curr_time { return strftime($t_fmt,localtime()); } #----- main my $sth = $dbh->prepare( qq{ select backups.id as backup_id, hosts.name as host, shares.name as share, backups.num as num, inc_size, parts from backups join shares on backups.hostid = shares.hostid and shares.id = backups.shareid join hosts on shares.hostid = hosts.id where not inc_deleted order by backups.date } ); $sth->execute(); my $sth_inc_size = $dbh->prepare(qq{ update backups set inc_size = ?, parts = ? where id = ? }); my $sth_inc_deleted = $dbh->prepare(qq{ update backups set inc_deleted = ? where id = ? }); %BackupPC::SearchLib::Conf = %Conf; while (my $row = $sth->fetchrow_hashref) { my $tar_file = BackupPC::SearchLib::getGzipName($row->{'host'}, $row->{'share'}, $row->{'num'}); # this will return -1 if file doesn't exist my $size = BackupPC::SearchLib::get_tgz_size_by_name($tar_file); print curr_time, " ", $row->{'host'}, ":", $row->{'share'}, " #", $row->{'num'}, " -> $tar_file"; my $t = time(); # re-create archive? if ($row->{'inc_size'} == -1 || $size == -1 || $row->{'inc_size'} != $size) { my $cmd = qq{rm -Rf $tar_dir/$tar_file && $tarIncCreate -h "$row->{'host'}" -s "$row->{'share'}" -n $row->{'num'} | gzip -9 > $tar_dir/$tar_file}; print STDERR "## $cmd\n" if ($debug); system($cmd) == 0 or die "failed: $?"; $size = (stat( "$tar_dir/$tar_file" ))[7]; } if ($size > 45) { my $max_size = $Conf{'MaxArchiveSize'} || die "problem with MaxArchieSize parametar"; $max_size *= 1024; # convert to bytes my $parts = int( ($size + $max_size - 1) / $max_size ); if (-d "$tar_dir/$tar_file" && $parts != $row->{'parts'}) { print " join"; my $in = my $out = "$tar_dir/$tar_file"; $out .= '.tmp'; # FIXME I should really order parts manually! system("cat $in/part* > $out && rm -Rf $in && mv $out $in") == 0 or die "can't join $in: $?"; } if ($size > $max_size && ! -d "$tar_dir/$tar_file") { print " split/$parts"; my $in = my $out = "$tar_dir/$tar_file"; $out .= '.tmp'; rename $in, $out || die "can't rename $in: $!"; mkdir $in || die "can't mkdir $in: $!"; my $suffix_len = length("$parts"); system("split -d -b $max_size -a $suffix_len $out $in/part") == 0 or die "can't split $out: $?"; unlink $out || die "can't unlink $out: $!"; } $sth_inc_size->execute($size, $parts, $row->{'backup_id'}); $sth_inc_deleted->execute(0, $row->{'backup_id'}); printf(" %1.2f MB", ($size / 1024 / 1024)); } else { $sth_inc_deleted->execute(1, $row->{'backup_id'}); unlink "$tar_dir/$tar_file" || die "can't delete $tar_dir/$tar_file: $!\n"; print " EMPTY"; } print ", dur: ",fmt_time(time() - $t), "\n"; $dbh->commit; } undef $sth; $dbh->disconnect;