* Cleaned up warning message for restore using rsync when checksum
caching is on, but when file didn't have cached checksums.
+* Fixed BackupPC_archiveHost to support par2 (par2cmdline).
+ Patch submitted by Jaco Bongers and adapted by Josh Marshall.
+
+* Improved stat() usage in BackupPC_nightly, plus some other cleanup,
+ giving a significant performance improvement. Patch submitted by
+ Wayne Scott.
+
+* Allow several BackupPC_nightly processes to run in parallel based
+ on new $Conf{MaxBackupPCNightlyJobs} setting. This speeds up the
+ traversal of the pool, reducing the overall run time for
+ BackupPC_nightly.
+
+* Allow BackupPC_nightly to split the pool traversal across several
+ nightly runs. This improves the running time per night, at the expense
+ of a slight increase in disk storage as unused pool files might not
+ be deleted for a couple of days. Controller by new config setting
+ $Conf{BackupPCNightlyPeriod}.
+
#------------------------------------------------------------------------
# Version 2.1.0beta0, 20 Mar 2004
#------------------------------------------------------------------------
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
# Misc variables
#
my($RunNightlyWhenIdle, $FirstWakeup, $CmdJob, $ServerInetPort);
+my($BackupPCNightlyJobs, $BackupPCNightlyLock);
#
# Complete the rest of the initialization
$CmdQueueOn{$bpc->trashJob} = 1;
}
if ( keys(%Jobs) == $trashCleanRunning && $RunNightlyWhenIdle == 1 ) {
- push(@CmdQueue, {
- host => $bpc->adminJob,
- user => "BackupPC",
- reqTime => time,
- cmd => ["$BinDir/BackupPC_nightly"],
- });
- $CmdQueueOn{$bpc->adminJob} = 1;
- $RunNightlyWhenIdle = 2;
+
+ #
+ # Queue multiple nightly jobs based on the configuration
+ #
+ $Conf{MaxBackupPCNightlyJobs} = 1
+ if ( $Conf{MaxBackupPCNightlyJobs} <= 0 );
+ $Conf{BackupPCNightlyPeriod} = 1
+ if ( $Conf{BackupPCNightlyPeriod} <= 0 );
+ #
+ # Decide what subset of the 16 top-level directories 0..9a..f
+ # we run BackupPC_nightly on, based on $Conf{BackupPCNightlyPeriod}.
+ # If $Conf{BackupPCNightlyPeriod} == 1 then we run 0..15 every
+ # time. If $Conf{BackupPCNightlyPeriod} == 2 then we run
+ # 0..7 one night and 89a-f the next night. And so on.
+ #
+ # $Info{NightlyPhase} counts which night, from 0 to
+ # $Conf{BackupPCNightlyPeriod} - 1.
+ #
+ my $start = int($Info{NightlyPhase} * 16
+ / $Conf{BackupPCNightlyPeriod});
+ my $end = int(($Info{NightlyPhase} + 1) * 16
+ / $Conf{BackupPCNightlyPeriod});
+ $end = $start + 1 if ( $end <= $start );
+ $Info{NightlyPhase}++;
+ $Info{NightlyPhase} = 0 if ( $end >= 16 );
+
+ #
+ # Zero out the data we expect to get from BackupPC_nightly.
+ # In the future if we want to split BackupPC_nightly over
+ # more than one night we will only zero out the portion
+ # that we are running right now.
+ #
+ for my $p ( qw(pool cpool) ) {
+ for ( my $i = $start ; $i < $end ; $i++ ) {
+ $Info{pool}{$p}[$i]{FileCnt} = 0;
+ $Info{pool}{$p}[$i]{DirCnt} = 0;
+ $Info{pool}{$p}[$i]{Kb} = 0;
+ $Info{pool}{$p}[$i]{Kb2} = 0;
+ $Info{pool}{$p}[$i]{KbRm} = 0;
+ $Info{pool}{$p}[$i]{FileCntRm} = 0;
+ $Info{pool}{$p}[$i]{FileCntRep} = 0;
+ $Info{pool}{$p}[$i]{FileRepMax} = 0;
+ $Info{pool}{$p}[$i]{FileCntRename} = 0;
+ $Info{pool}{$p}[$i]{FileLinkMax} = 0;
+ $Info{pool}{$p}[$i]{Time} = 0;
+ }
+ }
+ print(LOG $bpc->timeStamp,
+ sprintf("Running %d BackupPC_nightly jobs from %d..%d"
+ . " (out of 0..15)\n",
+ $Conf{MaxBackupPCNightlyJobs}, $start, $end - 1));
+
+ #
+ # Now queue the $Conf{MaxBackupPCNightlyJobs} jobs.
+ # The granularity on start and end is now 0..256.
+ #
+ $start *= 16;
+ $end *= 16;
+ my $start0 = $start;
+ for ( my $i = 0 ; $i < $Conf{MaxBackupPCNightlyJobs} ; $i++ ) {
+ #
+ # The first nightly job gets the -m option (does email, log aging).
+ # All jobs get the start and end options from 0..255 telling
+ # them which parts of the pool to traverse.
+ #
+ my $cmd = ["$BinDir/BackupPC_nightly"];
+ push(@$cmd, "-m") if ( $i == 0 );
+ push(@$cmd, $start);
+ $start = $start0 + int(($end - $start0)
+ * ($i + 1) / $Conf{MaxBackupPCNightlyJobs});
+ push(@$cmd, $start - 1);
+
+ my $job = $bpc->adminJob($i);
+ unshift(@CmdQueue, {
+ host => $job,
+ user => "BackupPC",
+ reqTime => time,
+ cmd => $cmd,
+ });
+ $CmdQueueOn{$job} = 1;
+ }
+ $RunNightlyWhenIdle = 2;
+
}
}
sub Main_TryToRun_CmdQueue
{
my($req, $host);
- if ( $CmdJob eq "" && @CmdQueue > 0 && $RunNightlyWhenIdle != 1 ) {
+
+ while ( $CmdJob eq "" && @CmdQueue > 0 && $RunNightlyWhenIdle != 1
+ || @CmdQueue > 0 && $RunNightlyWhenIdle == 2
+ && $bpc->isAdminJob($CmdQueue[0]->{host})
+ ) {
local(*FH);
$req = pop(@CmdQueue);
vec($FDread, $Jobs{$host}{fn}, 1) = 1;
$Jobs{$host}{startTime} = time;
$Jobs{$host}{reqTime} = $req->{reqTime};
- $cmd = join(" ", @$cmd);
+ $cmd = $bpc->execCmd2ShellCmd(@$cmd);
$Jobs{$host}{cmd} = $cmd;
+ $Jobs{$host}{user} = $req->{user};
$Jobs{$host}{type} = $Status{$host}{type};
$Status{$host}{state} = "Status_link_running";
$Status{$host}{activeJob} = 1;
$CmdJob = $host if ( $host ne $bpc->trashJob );
$cmd =~ s/$BinDir\///g;
print(LOG $bpc->timeStamp, "Running $cmd (pid=$pid)\n");
+ if ( $cmd =~ /^BackupPC_nightly\s/ ) {
+ $BackupPCNightlyJobs++;
+ $BackupPCNightlyLock++;
+ }
}
}
#
# CmdJob and trashClean don't count towards MaxBackups / MaxUserBackups
#
- $nJobs-- if ( $CmdJob ne "" );
+ $nJobs -= $BackupPCNightlyJobs if ( $CmdJob ne "" );
$nJobs-- if ( defined($Jobs{$bpc->trashJob} ) );
if ( $nJobs < $Conf{MaxBackups} + $Conf{MaxUserBackups}
&& @UserQueue > 0 ) {
$Jobs{$host}{startTime} = time;
$Jobs{$host}{reqTime} = $req->{reqTime};
$Jobs{$host}{userReq} = $req->{userReq};
- $Jobs{$host}{cmd} = join(" ", $progName, @args);
+ $Jobs{$host}{cmd} = $bpc->execCmd2ShellCmd($progName, @args);
$Jobs{$host}{user} = $user;
$Jobs{$host}{type} = $type;
$Status{$host}{userReq} = $req->{userReq}
}
} elsif ( $mesg =~ /^log\s+(.*)/ ) {
print(LOG $bpc->timeStamp, "$1\n");
- } elsif ( $mesg =~ /^BackupPC_stats = (.*)/ ) {
- my @f = split(/,/, $1);
- $Info{"$f[0]FileCnt"} = $f[1];
- $Info{"$f[0]DirCnt"} = $f[2];
- $Info{"$f[0]Kb"} = $f[3];
- $Info{"$f[0]Kb2"} = $f[4];
- $Info{"$f[0]KbRm"} = $f[5];
- $Info{"$f[0]FileCntRm"} = $f[6];
- $Info{"$f[0]FileCntRep"} = $f[7];
- $Info{"$f[0]FileRepMax"} = $f[8];
- $Info{"$f[0]FileCntRename"} = $f[9];
- $Info{"$f[0]FileLinkMax"} = $f[10];
- $Info{"$f[0]Time"} = time;
- printf(LOG "%s%s nightly clean removed %d files of"
- . " size %.2fGB\n",
- $bpc->timeStamp, ucfirst($f[0]),
- $Info{"$f[0]FileCntRm"},
- $Info{"$f[0]KbRm"} / (1000 * 1024));
- printf(LOG "%s%s is %.2fGB, %d files (%d repeated, "
- . "%d max chain, %d max links), %d directories\n",
- $bpc->timeStamp, ucfirst($f[0]),
- $Info{"$f[0]Kb"} / (1000 * 1024),
- $Info{"$f[0]FileCnt"}, $Info{"$f[0]FileCntRep"},
- $Info{"$f[0]FileRepMax"},
- $Info{"$f[0]FileLinkMax"}, $Info{"$f[0]DirCnt"});
+ } elsif ( $mesg =~ /^BackupPC_stats (\d+) = (.*)/ ) {
+ my $chunk = int($1 / 16);
+ my @f = split(/,/, $2);
+ $Info{pool}{$f[0]}[$chunk]{FileCnt} += $f[1];
+ $Info{pool}{$f[0]}[$chunk]{DirCnt} += $f[2];
+ $Info{pool}{$f[0]}[$chunk]{Kb} += $f[3];
+ $Info{pool}{$f[0]}[$chunk]{Kb2} += $f[4];
+ $Info{pool}{$f[0]}[$chunk]{KbRm} += $f[5];
+ $Info{pool}{$f[0]}[$chunk]{FileCntRm} += $f[6];
+ $Info{pool}{$f[0]}[$chunk]{FileCntRep} += $f[7];
+ $Info{pool}{$f[0]}[$chunk]{FileRepMax} = $f[8]
+ if ( $Info{pool}{$f[0]}[$chunk]{FileRepMax} < $f[8] );
+ $Info{pool}{$f[0]}[$chunk]{FileCntRename} += $f[9];
+ $Info{pool}{$f[0]}[$chunk]{FileLinkMax} = $f[10]
+ if ( $Info{pool}{$f[0]}[$chunk]{FileLinkMax} < $f[10] );
+ $Info{pool}{$f[0]}[$chunk]{Time} = time;
} elsif ( $mesg =~ /^BackupPC_nightly lock_off/ ) {
- $RunNightlyWhenIdle = 0;
+ $BackupPCNightlyLock--;
+ if ( $BackupPCNightlyLock == 0 ) {
+ #
+ # This means the last BackupPC_nightly is done with
+ # the pool clean, so it's to start running regular
+ # backups again.
+ #
+ $RunNightlyWhenIdle = 0;
+ }
} elsif ( $mesg =~ /^processState\s+(.+)/ ) {
$Jobs{$host}{processState} = $1;
} elsif ( $mesg =~ /^link\s+(.+)/ ) {
if ( $nbytes <= 0 ) {
close($Jobs{$host}{fh});
vec($FDread, $Jobs{$host}{fn}, 1) = 0;
- if ( $CmdJob eq $host ) {
+ if ( $CmdJob eq $host || $bpc->isAdminJob($host) ) {
my $cmd = $Jobs{$host}{cmd};
$cmd =~ s/$BinDir\///g;
print(LOG $bpc->timeStamp, "Finished $host ($cmd)\n");
$Status{$host}{state} = "Status_idle";
$Status{$host}{endTime} = time;
- $CmdJob = "";
- $RunNightlyWhenIdle = 0 if ( $cmd eq "BackupPC_nightly"
- && $RunNightlyWhenIdle );
+ if ( $cmd =~ /^BackupPC_nightly\s/ ) {
+ $BackupPCNightlyJobs--;
+ #print(LOG $bpc->timeStamp, "BackupPC_nightly done; now"
+ # . " have $BackupPCNightlyJobs running\n");
+ if ( $BackupPCNightlyJobs <= 0 ) {
+ $BackupPCNightlyJobs = 0;
+ $RunNightlyWhenIdle = 0;
+ $CmdJob = "";
+ #
+ # Combine the 16 per-directory results
+ #
+ for my $p ( qw(pool cpool) ) {
+ $Info{"${p}FileCnt"} = 0;
+ $Info{"${p}DirCnt"} = 0;
+ $Info{"${p}Kb"} = 0;
+ $Info{"${p}Kb2"} = 0;
+ $Info{"${p}KbRm"} = 0;
+ $Info{"${p}FileCntRm"} = 0;
+ $Info{"${p}FileCntRep"} = 0;
+ $Info{"${p}FileRepMax"} = 0;
+ $Info{"${p}FileCntRename"} = 0;
+ $Info{"${p}FileLinkMax"} = 0;
+ $Info{"${p}Time"} = 0;
+ for ( my $i = 0 ; $i < 16 ; $i++ ) {
+ $Info{"${p}FileCnt"}
+ += $Info{pool}{$p}[$i]{FileCnt};
+ $Info{"${p}DirCnt"}
+ += $Info{pool}{$p}[$i]{DirCnt};
+ $Info{"${p}Kb"}
+ += $Info{pool}{$p}[$i]{Kb};
+ $Info{"${p}Kb2"}
+ += $Info{pool}{$p}[$i]{Kb2};
+ $Info{"${p}KbRm"}
+ += $Info{pool}{$p}[$i]{KbRm};
+ $Info{"${p}FileCntRm"}
+ += $Info{pool}{$p}[$i]{FileCntRm};
+ $Info{"${p}FileCntRep"}
+ += $Info{pool}{$p}[$i]{FileCntRep};
+ $Info{"${p}FileRepMax"}
+ = $Info{pool}{$p}[$i]{FileRepMax}
+ if ( $Info{"${p}FileRepMax"} <
+ $Info{pool}{$p}[$i]{FileRepMax} );
+ $Info{"${p}FileCntRename"}
+ += $Info{pool}{$p}[$i]{FileCntRename};
+ $Info{"${p}FileLinkMax"}
+ = $Info{pool}{$p}[$i]{FileLinkMax}
+ if ( $Info{"${p}FileLinkMax"} <
+ $Info{pool}{$p}[$i]{FileLinkMax} );
+ $Info{"${p}Time"} = $Info{pool}{$p}[$i]{Time}
+ if ( $Info{"${p}Time"} <
+ $Info{pool}{$p}[$i]{Time} );
+ }
+ printf(LOG "%s%s nightly clean removed %d files of"
+ . " size %.2fGB\n",
+ $bpc->timeStamp, ucfirst($p),
+ $Info{"${p}FileCntRm"},
+ $Info{"${p}KbRm"} / (1000 * 1024));
+ printf(LOG "%s%s is %.2fGB, %d files (%d repeated, "
+ . "%d max chain, %d max links), %d directories\n",
+ $bpc->timeStamp, ucfirst($p),
+ $Info{"${p}Kb"} / (1000 * 1024),
+ $Info{"${p}FileCnt"}, $Info{"${p}FileCntRep"},
+ $Info{"${p}FileRepMax"},
+ $Info{"${p}FileLinkMax"}, $Info{"${p}DirCnt"});
+ }
+ }
+ } else {
+ $CmdJob = "";
+ }
} else {
#
# Queue BackupPC_link to complete the backup
}
} elsif ( $cmd =~ /^backup all$/ ) {
QueueAllPCs();
+ } elsif ( $cmd =~ /^BackupPC_nightly run$/ ) {
+ $RunNightlyWhenIdle = 1;
} elsif ( $cmd =~ /^backup (\S+)\s+(\S+)\s+(\S+)\s+(\S+)/ ) {
my $hostIP = $1;
$host = $2;
}
foreach my $host ( sort(keys(%Status)) ) {
next if ( $host eq $bpc->trashJob
- || $host eq $bpc->adminJob
+ || $bpc->isAdminJob($host)
|| defined($Hosts->{$host})
|| defined($Jobs{$host})
|| $BgQueueOn{$host}
# Usage: BackupPC_archive <user> <archiveclient> <reqFileName>
#
# AUTHOR
-# Craig Barratt <cbarratt@users.sourceforge.net>
+# Josh Marshall
#
# COPYRIGHT
-# Copyright (C) 2001-2003 Craig Barratt
+# Copyright (C) 2001-2004 Craig Barratt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
# AUTHOR
# Craig Barratt <cbarratt@users.sourceforge.net>
+# Josh Marshall
#
# COPYRIGHT
-# Copyright (C) 2001-2003 Craig Barratt
+# Copyright (C) 2001-2004 Craig Barratt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
# ie: not a tape device).
#
if ( -d $outLoc && -x $parPath ) {
- print("Running $parPath to create parity files\n");
- $ret = system("$parPath a -n $parfile $outLoc/$host.$bkupNum.tar$fileExt.par $outLoc/$host.$bkupNum.tar$fileExt.*");
- if ( $ret ) {
- print("Error: $parPath failed\n");
- exit(1);
+ if ( $parfile != 0 ) {
+ print("Running $parPath to create parity files\n");
+ $ret = system("$parPath c -r$parfile $outLoc/$host.$bkupNum.tar$fileExt.par2 $outLoc/$host.$bkupNum.tar$fileExt.*");
+ if ( $ret ) {
+ print("Error: $parPath failed\n");
+ exit(1);
+ }
}
}
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
# BackupPC_nightly: Nightly cleanup & statistics script.
#
# DESCRIPTION
+#
# BackupPC_nightly performs several administrative tasks:
#
-# - monthly aging of per-PC log files
+# - monthly aging of per-PC log files (only with -m option)
+#
+# - pruning files from pool no longer used (ie: those with only one
+# hard link).
+#
+# - sending email to users and administrators (only with -m option)
#
-# - pruning files from pool no longer used (ie: those with only one
-# hard link).
+# Usage: BackupPC_nightly [-m] poolRangeStart poolRangeEnd
#
-# - sending email to users and administrators.
+# Flags:
+#
+# -m Do monthly aging of per-PC log files and sending of email.
+# Otherise, BackupPC_nightly just does pool pruning.
+#
+# The poolRangeStart and poolRangeEnd arguments are integers from 0 to 255.
+# These specify which parts of the pool to process. There are 256 2nd-level
+# directories in the pool (0/0, 0/1, ..., f/e, f/f). BackupPC_nightly
+# processes the given subset of this list (0 means 0/0, 255 means f/f).
+# Therefore, arguments of 0 255 process the entire pool, 0 127 does
+# the first half (ie: 0/0 through 7/f), 127 255 does the other half
+# (eg: 8/0 through f/f) and 0 15 does just the first 1/16 of the pool
+# (ie: 0/0 through 0/f).
#
# AUTHOR
# Craig Barratt <cbarratt@users.sourceforge.net>
#
# COPYRIGHT
-# Copyright (C) 2001-2003 Craig Barratt
+# Copyright (C) 2001-2004 Craig Barratt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
use lib "/usr/local/BackupPC/lib";
use BackupPC::Lib;
use BackupPC::FileZIO;
+use Getopt::Std;
use File::Find;
use File::Path;
my $TopDir = $bpc->TopDir();
my $BinDir = $bpc->BinDir();
my %Conf = $bpc->Conf();
+my(%Status, %Info, %Jobs, @BgQueue, @UserQueue, @CmdQueue);
$bpc->ChildInit();
-my $err = $bpc->ServerConnect($Conf{ServerHost}, $Conf{ServerPort});
-if ( $err ) {
- print("Can't connect to server ($err)\n");
+my %opts;
+if ( !getopts("m", \%opts) || @ARGV != 2 ) {
+ print("usage: $0 [-m] poolRangeStart poolRangeEnd\n");
exit(1);
}
-my $reply = $bpc->ServerMesg("status hosts");
-$reply = $1 if ( $reply =~ /(.*)/s );
-my(%Status, %Info, %Jobs, @BgQueue, @UserQueue, @CmdQueue);
-eval($reply);
+if ( $ARGV[0] !~ /^(\d+)$/ || $1 > 255 ) {
+ print("$0: bad poolRangeStart '$ARGV[0]'\n");
+ exit(1);
+}
+my $poolRangeStart = $1;
+if ( $ARGV[1] !~ /^(\d+)$/ || $1 > 255 ) {
+ print("$0: bad poolRangeEnd '$ARGV[1]'\n");
+ exit(1);
+}
+my $poolRangeEnd = $1;
+
+if ( $opts{m} ) {
+ my $err = $bpc->ServerConnect($Conf{ServerHost}, $Conf{ServerPort});
+ if ( $err ) {
+ print("Can't connect to server ($err)\n");
+ exit(1);
+ }
+ my $reply = $bpc->ServerMesg("status hosts");
+ $reply = $1 if ( $reply =~ /(.*)/s );
+ eval($reply);
+}
###########################################################################
# When BackupPC_nightly starts, BackupPC will not run any simultaneous
# BackupPC_dump commands. We first do things that contend with
# BackupPC_dump, eg: aging per-PC log files etc.
###########################################################################
-
-#
-# Do per-PC log file aging
-#
-my($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time);
-if ( $mday == 1 ) {
- foreach my $host ( keys(%Status) ) {
- my $lastLog = $Conf{MaxOldPerPCLogFiles} - 1;
- unlink("$TopDir/pc/$host/LOG.$lastLog")
- if ( -f "$TopDir/pc/$host/LOG.$lastLog" );
- unlink("$TopDir/pc/$host/LOG.$lastLog.z")
- if ( -f "$TopDir/pc/$host/LOG.$lastLog.z" );
- for ( my $i = $lastLog - 1 ; $i >= 0 ; $i-- ) {
- my $j = $i + 1;
- if ( -f "$TopDir/pc/$host/LOG.$i" ) {
- rename("$TopDir/pc/$host/LOG.$i", "$TopDir/pc/$host/LOG.$j");
- } elsif ( -f "$TopDir/pc/$host/LOG.$i.z" ) {
- rename("$TopDir/pc/$host/LOG.$i.z",
- "$TopDir/pc/$host/LOG.$j.z");
- }
- }
- #
- # Compress the log file LOG -> LOG.0.z (if enabled).
- # Otherwise, just rename LOG -> LOG.0.
- #
- BackupPC::FileZIO->compressCopy("$TopDir/pc/$host/LOG",
- "$TopDir/pc/$host/LOG.0.z",
- "$TopDir/pc/$host/LOG.0",
- $Conf{CompressLevel}, 1);
- open(LOG, ">", "$TopDir/pc/$host/LOG") && close(LOG);
- }
-}
+doPerPCLogFileAging() if ( $opts{m} );
###########################################################################
# Get statistics on the pool, and remove files that have only one link.
# contiguous)
my %FixList; # list of paths that need to be renamed to avoid
# new holes
+my @hexChars = qw(0 1 2 3 4 5 6 7 8 9 a b c d e f);
+
for my $pool ( qw(pool cpool) ) {
- $fileCnt = 0;
- $dirCnt = 0;
- $blkCnt = 0;
- $fileCntRm = 0;
- $blkCntRm = 0;
- $blkCnt2 = 0;
- $fileCntRep = 0;
- $fileRepMax = 0;
- $fileLinkMax = 0;
- $fileCntRename = 0;
- %FixList = ();
- find({wanted => \&GetPoolStats, no_chdir => 1}, "$TopDir/$pool");
- my $kb = $blkCnt / 2;
- my $kbRm = $blkCntRm / 2;
- my $kb2 = $blkCnt2 / 2;
-
- #
- # Now make sure that files with repeated checksums are still
- # sequentially numbered
- #
- foreach my $name ( sort(keys(%FixList)) ) {
- my $rmCnt = $FixList{$name} + 1;
- my $new = -1;
- for ( my $old = -1 ; ; $old++ ) {
- my $oldName = $name;
- $oldName .= "_$old" if ( $old >= 0 );
- if ( !-f $oldName ) {
- #
- # We know we are done when we have missed at least
- # the number of files that were removed from this
- # base name, plus a couple just to be sure
- #
- last if ( $rmCnt-- <= 0 );
- next;
+ for ( my $i = $poolRangeStart ; $i <= $poolRangeEnd ; $i++ ) {
+ my $dir = "$hexChars[int($i / 16)]/$hexChars[$i % 16]";
+ # print("Doing $pool/$dir\n") if ( ($i % 16) == 0 );
+ $fileCnt = 0;
+ $dirCnt = 0;
+ $blkCnt = 0;
+ $fileCntRm = 0;
+ $blkCntRm = 0;
+ $blkCnt2 = 0;
+ $fileCntRep = 0;
+ $fileRepMax = 0;
+ $fileLinkMax = 0;
+ $fileCntRename = 0;
+ %FixList = ();
+ find({wanted => \&GetPoolStats}, "$TopDir/$pool/$dir");
+ my $kb = $blkCnt / 2;
+ my $kbRm = $blkCntRm / 2;
+ my $kb2 = $blkCnt2 / 2;
+
+ #
+ # Main BackupPC_nightly counts the top-level directory
+ #
+ $dirCnt++ if ( $opts{m} && -d "$TopDir/$pool" && $i == 0 );
+
+ #
+ # Also count the next level directories
+ #
+ $dirCnt++ if ( ($i % 16) == 0
+ && -d "$TopDir/$pool/$hexChars[int($i / 16)]" );
+
+ #
+ # Now make sure that files with repeated checksums are still
+ # sequentially numbered
+ #
+ foreach my $name ( sort(keys(%FixList)) ) {
+ my $rmCnt = $FixList{$name} + 1;
+ my $new = -1;
+ for ( my $old = -1 ; ; $old++ ) {
+ my $oldName = $name;
+ $oldName .= "_$old" if ( $old >= 0 );
+ if ( !-f $oldName ) {
+ #
+ # We know we are done when we have missed at least
+ # the number of files that were removed from this
+ # base name, plus a couple just to be sure
+ #
+ last if ( $rmCnt-- <= 0 );
+ next;
+ }
+ my $newName = $name;
+ $newName .= "_$new" if ( $new >= 0 );
+ $new++;
+ next if ( $oldName eq $newName );
+ rename($oldName, $newName);
+ $fileCntRename++;
}
- my $newName = $name;
- $newName .= "_$new" if ( $new >= 0 );
- $new++;
- next if ( $oldName eq $newName );
- rename($oldName, $newName);
- $fileCntRename++;
}
+ print("BackupPC_stats $i = $pool,$fileCnt,$dirCnt,$kb,$kb2,$kbRm,"
+ . "$fileCntRm,$fileCntRep,$fileRepMax,"
+ . "$fileCntRename,$fileLinkMax\n");
}
- print("BackupPC_stats = $pool,$fileCnt,$dirCnt,$kb,$kb2,$kbRm,$fileCntRm,"
- . "$fileCntRep,$fileRepMax,$fileCntRename,"
- . "$fileLinkMax\n");
}
###########################################################################
###########################################################################
# Send email
###########################################################################
-system("$BinDir/BackupPC_sendEmail");
+if ( $opts{m} ) {
+ print("log BackupPC_nightly now running BackupPC_sendEmail\n");
+ system("$BinDir/BackupPC_sendEmail")
+}
+
+#
+# Do per-PC log file aging
+#
+sub doPerPCLogFileAging
+{
+ my($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time);
+ if ( $mday == 1 ) {
+ foreach my $host ( keys(%Status) ) {
+ my $lastLog = $Conf{MaxOldPerPCLogFiles} - 1;
+ unlink("$TopDir/pc/$host/LOG.$lastLog")
+ if ( -f "$TopDir/pc/$host/LOG.$lastLog" );
+ unlink("$TopDir/pc/$host/LOG.$lastLog.z")
+ if ( -f "$TopDir/pc/$host/LOG.$lastLog.z" );
+ for ( my $i = $lastLog - 1 ; $i >= 0 ; $i-- ) {
+ my $j = $i + 1;
+ if ( -f "$TopDir/pc/$host/LOG.$i" ) {
+ rename("$TopDir/pc/$host/LOG.$i",
+ "$TopDir/pc/$host/LOG.$j");
+ } elsif ( -f "$TopDir/pc/$host/LOG.$i.z" ) {
+ rename("$TopDir/pc/$host/LOG.$i.z",
+ "$TopDir/pc/$host/LOG.$j.z");
+ }
+ }
+ #
+ # Compress the log file LOG -> LOG.0.z (if enabled).
+ # Otherwise, just rename LOG -> LOG.0.
+ #
+ BackupPC::FileZIO->compressCopy("$TopDir/pc/$host/LOG",
+ "$TopDir/pc/$host/LOG.0.z",
+ "$TopDir/pc/$host/LOG.0",
+ $Conf{CompressLevel}, 1);
+ open(LOG, ">", "$TopDir/pc/$host/LOG") && close(LOG);
+ }
+ }
+}
sub GetPoolStats
{
- my($name) = $File::Find::name;
- my($baseName) = "";
- my(@s);
+ my($nlinks, $nblocks) = (lstat($_))[3, 12];
- return if ( !-d && !-f );
- $dirCnt += -d;
- $name = $1 if ( $name =~ /(.*)/ );
- @s = stat($name);
- if ( $name =~ /(.*)_(\d+)$/ ) {
- $baseName = $1;
- if ( $s[3] != 1 ) {
- $fileRepMax = $2 + 1 if ( $fileRepMax <= $2 );
- $fileCntRep++;
- }
+ if ( -d _ ) {
+ $dirCnt++;
+ return;
+ } elsif ( ! -f _ ) {
+ return;
}
- if ( -f && $s[3] == 1 ) {
- $blkCntRm += $s[12];
+ if ( $nlinks == 1 ) {
+ $blkCntRm += $nblocks;
$fileCntRm++;
- unlink($name);
- #
- # We must keep repeated files numbered sequential (ie: files
- # that have the same checksum are appended with _0, _1 etc).
- # There are two cases: we remove the base file xxxx, but xxxx_0
- # exists, or we remove any file of the form xxxx_nnn. We remember
- # the base name and fix it up later (not in the middle of find).
- #
- $baseName = $name if ( $baseName eq "" );
- $FixList{$baseName}++;
+ unlink($_);
+ #
+ # We must keep repeated files numbered sequential (ie: files
+ # that have the same checksum are appended with _0, _1 etc).
+ # There are two cases: we remove the base file xxxx, but xxxx_0
+ # exists, or we remove any file of the form xxxx_nnn. We remember
+ # the base name and fix it up later (not in the middle of find).
+ #
+ my($baseName);
+ ($baseName = $File::Find::name) =~ s/_\d+$//;
+ $FixList{$baseName}++;
} else {
- $fileCnt += -f;
- $blkCnt += $s[12];
- $blkCnt2 += $s[12] if ( -f && $s[3] == 2 );
- $fileLinkMax = $s[3] if ( $fileLinkMax < $s[3] );
+ if ( /_(\d+)$/ ) {
+ $fileRepMax = $1 + 1 if ( $fileRepMax <= $1 );
+ $fileCntRep++;
+ }
+ $fileCnt += 1;
+ $blkCnt += $nblocks;
+ $blkCnt2 += $nblocks if ( $nlinks == 2 );
+ $fileLinkMax = $nlinks if ( $fileLinkMax < $nlinks );
}
}
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
$Conf{MaxPendingCmds} = 10;
+#
+# How many BackupPC_nightly processes to run in parallel.
+#
+# Each night, at the first wakeup listed in $Conf{WakeupSchedule},
+# BackupPC_nightly is run. Its job is to remove unneeded files
+# in the pool, ie: files that only have one link. To avoid race
+# conditions, BackupPC_nightly runs only when there are no backups
+# running, and no backups will start while it runs.
+#
+# So to reduce the elapsed time, you might want to increase this
+# setting to run several BackupPC_nightly processes in parallel
+# (eg: 4, or even 8).
+#
+$Conf{MaxBackupPCNightlyJobs} = 2;
+
+#
+# How many days (runs) it takes BackupPC_nightly to traverse the
+# entire pool. Normally this is 1, which means every night it runs,
+# it does traverse the entire pool removing unused pool files.
+#
+# Other valid values are 2, 4, 8, 16. This causes BackupPC_nightly to
+# traverse 1/2, 1/4, 1/8 or 1/16th of the pool each night, meaning it
+# takes 2, 4, 8 or 16 days to completely traverse the pool. The
+# advantage is that each night the running time of BackupPC_nightly
+# is reduced roughly in proportion, since the total job is split
+# over multiple days. The disadvantage is that unused pool files
+# take longer to get deleted, which will slightly increase disk
+# usage.
+#
+# Note that even when $Conf{BackupPCNightlyPeriod} > 1, BackupPC_nightly
+# still runs every night. It just does less work each time it runs.
+#
+# Examples:
+#
+# $Conf{BackupPCNightlyPeriod} = 1; # entire pool is checked every night
+#
+# $Conf{BackupPCNightlyPeriod} = 2; # two days to complete pool check
+# # (different half each night)
+#
+# $Conf{BackupPCNightlyPeriod} = 4; # four days to complete pool check
+# # (different quarter each night)
+#
+$Conf{BackupPCNightlyPeriod} = 1;
+
#
# Maximum number of log files we keep around in log directory.
# These files are aged nightly. A setting of 14 means the log
#
$Conf{SplitPath} = '/usr/bin/split';
-$Conf{ParPath} = '/usr/bin/par';
+$Conf{ParPath} = '/usr/bin/par2';
$Conf{CatPath} = '/bin/cat';
$Conf{GzipPath} = '/bin/gzip';
$Conf{Bzip2Path} = '/usr/bin/bzip2';
#
# Archive Parity Files
#
-# The number of Parity Files to generate.
-# Uses the commandline par available from
+# The amount of Parity data to generate, as a percentage
+# of the archive size.
+# Uses the commandline par2 (par2cmdline) available from
# http://parchive.sourceforge.net
#
# Only useful for file dumps.
# $Installdir The installation directory of BackupPC
# $tarCreatePath The path to BackupPC_tarCreate
# $splitpath The path to the split program
-# $parpath The path to the par program
+# $parpath The path to the par2 program
# $host The host to archive
# $backupnumber The backup number of the host to archive
# $compression The path to the compression program
# $compext The extension assigned to the compression type
# $splitsize The number of bytes to split archives into
# $archiveloc The location to put the archive
-# $parfile The number of par files to create
+# $parfile The amount of parity data to create (percentage)
#
$Conf{ArchiveClientCmd} = '$Installdir/bin/BackupPC_archiveHost'
. ' $tarCreatePath $splitpath $parpath $host $backupnumber'
# $HostList list of hosts being archived
# $BackupList list of backup numbers for the hosts being archived
# $archiveloc location where the archive is sent to
-# $parfile number of par files being generated
+# $parfile amount of parity data being generated (percentage)
# $compression compression program being used (eg: cat, gzip, bzip2)
# $compext extension used for compression type (eg: raw, gz, bz2)
# $splitsize size of the files that the archive creates
sendmail => "SendmailPath",
hostname => "HostnamePath",
split => "SplitPath",
- 'parchive/par' => "ParPath",
+ par2 => "ParPath",
cat => "CatPath",
gzip => "GzipPath",
bzip2 => "Bzip2Path",
=item *
+Develop a FAQ and move some significant parts of this document
+to the FAQ (eg: ssh setup, this roadmap etc). Volunteers?
+
+=item *
+
Adding hardlink support to rsync.
=item *
Currently smbclient incrementals only depend upon mtime, so
deleted files or renamed files are not detected. FileSys::SmbClient
would also allow resuming of incomplete full backups in the
-same manner as rsync will.
+same manner as rsync will. (I'm not sure if FileSys::SmbClient
+has been updated for samba 3.x.)
=item *
-Support --listed-incremental or --incremental for tar,
-so that incrementals will depend upon any attribute change (eg: exist,
-mtime, file size, uid, gid), rather than just mtime. This will allow
-tar to be to as capable as FileSys::SmbClient and rsync.
+Possibly support --listed-incremental or --incremental for tar, so that
+incrementals will depend upon any attribute change (eg: exist, mtime,
+file size, uid, gid), rather than just mtime. This will allow tar to
+be to as capable as FileSys::SmbClient and rsync.
=item *
=item *
+Improve the warning messages about locked files on WinXX machines,
+so that more file types than just outlook pst files will produce
+warning emails (and configuration settings for specifying the
+file extensions that produce email warngings).
+
+=item *
+
+Long term, support bare metal restore. For *nix machines there's
+not a lot to do (althought rsync needs hardlink support). For
+WinXX machines the file locking problem has to get resolved.
+Plus ACL save/restore would need to be supported. This is
+really long term.
+
+=item *
+
+Support client pull for restores. For example, BackupPC could
+emulate an rsync server. That way you could boot a knoppix cd,
+and you wouldn't need perl, BackupPC, ssh setup or anything on
+the client to restore. You would just run an rsync command like:
+
+ rsync -aH BackupPCServer::moduleName /path/to/emtpy/disk
+
+ModuleName could contain the client name, share name and backup number.
+There would have to be some way of specifying the password; perhaps
+the CGI could be used to "turn on" rsynd for a specific client and
+a specific time period (eg: only listens for X minutes, only serves
+up a specific client backup, accepts connections from a specific IP).
+BackupPC listens for and serves the request; it's not a real rsyncd on
+the server.
+
+=item *
+
+Add support for wget as an XferMethod. This would allow ftp and http
+files to be backed up. This might be useful, for example, for backing
+up the configuration of a router (via http), so that you have a
+backup copy of all the router setup screens in case the router
+fails.
+
+=item *
+
+Possibly support client push for backups, in addition to the
+existing server pull. This would be helpful for clients behind
+firewalls who can connect to the server, but the server cannot
+see the client.
+
+=item *
+
More speculative: Storing binary file deltas (in fact, reverse deltas)
for files that have the same name as a previous backup, but that aren't
already in the pool. This will save storage for things like mailbox
is only run when there are no BackupPC_dump or BackupPC_link processes
running.
+To improve the running time for BackupPC_nightly, several
+BackupPC_nightly processes can be run concurrently, based on
+the $Conf{MaxBackupPCNightlyJobs} setting.
+
+If BackupPC_nightly still takes too long to run,
+$Conf{BackupPCNightlyPeriod} can be used to split
+BackupPC_nightly's pool traversal across multiple nights,
+proportionally reducing its runtime each night.
+
=back
BackupPC also listens for TCP connections on $Conf{ServerPort}, which
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
eval($reply);
# ignore status related to admin and trashClean jobs
if ( $status =~ /\bhosts\b/ ) {
- delete($Status{$bpc->adminJob});
+ foreach my $host ( grep(/admin/, keys(%Status)) ) {
+ delete($Status{$host}) if ( $bpc->isAdminJob($host) );
+ }
delete($Status{$bpc->trashJob});
}
}
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
while ( @CmdQueue ) {
my $req = pop(@CmdQueue);
my $reqTime = timeStamp2($req->{reqTime});
- (my $cmd = $req->{cmd}[0]) =~ s/$BinDir\///;
+ (my $cmd = $bpc->execCmd2ShellCmd(@{$req->{cmd}})) =~ s/$BinDir\///;
$strCmd .= <<EOF;
<tr><td> ${HostLink($req->{host})} </td>
<td align="center"> $reqTime </td>
<td align="center"> $req->{user} </td>
- <td> $cmd $req->{cmd}[0] </td></tr>
+ <td> $cmd </td></tr>
EOF
}
my $content = eval ( "qq{$Lang->{Backup_Queue_Summary}}");
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
$Lang{BackupPC_Archive2_parity} = <<EOF;
<tr>
- <td>Anzahl Parität-Dateien</td>
+ <td>Prozentsatz Paritätsdaten (0 = keine, 5 = Standard)</td>
<td><input type="numeric" value="\$ArchivePar" name="par"></td>
</tr>
EOF
$Lang{BackupPC_Archive2_parity} = <<EOF;
<tr>
- <td>Number of Parity Files</td>
+ <td>Percentage of Parity Data (0 = disable, 5 = typical)</td>
<td><input type="numeric" value="\$ArchivePar" name="par"></td>
</tr>
EOF
$Lang{BackupPC_Archive2_parity} = <<EOF;
<tr>
- <td>Nombre de fichiers de parité</td>
+ <td>Pourcentage des données de parité (0 = désactivé, 5 = typique)</td>
<td><input type="numeric" value="\$ArchivePar" name="par"></td>
</tr>
EOF
#!/bin/perl
#
-# $Id: it.pm,v 1.5 2004/03/29 19:05:24 cbarratt Exp $
+# $Id: it.pm,v 1.6 2004/04/10 06:52:47 cbarratt Exp $
#
# Italian i18n file
#
$Lang{BackupPC_Archive2_parity} = <<EOF;
<tr>
- <td>Numero di file di parità</td>
+ <td>ENG Percentage of Parity Data (0 = disable, 5 = typical)</td>
<td><input type="numeric" value="\$ArchivePar" name="par"></td>
</tr>
EOF
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
TopDir => $topDir || '/data/BackupPC',
BinDir => $installDir || '/usr/local/BackupPC',
LibDir => $installDir || '/usr/local/BackupPC',
- Version => '2.1.0beta0',
+ Version => '2.1.0beta1',
BackupFields => [qw(
num type startTime endTime
nFiles size nFilesExist sizeExist nFilesNew sizeNew
sub adminJob
{
- return " admin ";
+ my($bpc, $num) = @_;
+ return " admin " if ( !$num );
+ return " admin$num ";
+}
+
+sub isAdminJob
+{
+ my($bpc, $str) = @_;
+ return $str =~ /^ admin/;
}
sub trashJob
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
#
#========================================================================
#
-# Version 2.1.0beta0, released 20 Mar 2004.
+# Version 2.1.0beta1, released 9 Apr 2004.
#
# See http://backuppc.sourceforge.net.
#
umask(0022);
-my $Version = "2.1.0beta0_CVS";
-my $ReleaseDate = "29 Mar 2004";
+my $Version = "2.1.0beta1";
+my $ReleaseDate = "9 Apr 2004";
my $DistDir = "dist/BackupPC-$Version";
my @PerlSrc = qw(