#!/bin/perl -T #============================================================= -*-perl-*- # # BackupPC_nightly: Nightly cleanup & statistics script. # # DESCRIPTION # BackupPC_nightly performs several administrative tasks: # # - monthly aging of per-PC log files # # - pruning files from pool no longer used (ie: those with only one # hard link). # # - sending email to users and administrators. # # AUTHOR # Craig Barratt # # COPYRIGHT # Copyright (C) 2001-2003 Craig Barratt # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # #======================================================================== # # Version 2.1.0_CVS, released 3 Jul 2003. # # See http://backuppc.sourceforge.net. # #======================================================================== use strict; no utf8; use lib "/usr/local/BackupPC/lib"; use BackupPC::Lib; use BackupPC::FileZIO; use File::Find; use File::Path; use Data::Dumper; die("BackupPC::Lib->new failed\n") if ( !(my $bpc = BackupPC::Lib->new) ); my $TopDir = $bpc->TopDir(); my $BinDir = $bpc->BinDir(); my %Conf = $bpc->Conf(); $bpc->ChildInit(); my $err = $bpc->ServerConnect($Conf{ServerHost}, $Conf{ServerPort}); if ( $err ) { print("Can't connect to server ($err)\n"); exit(1); } my $reply = $bpc->ServerMesg("status hosts"); $reply = $1 if ( $reply =~ /(.*)/s ); my(%Status, %Info, %Jobs, @BgQueue, @UserQueue, @CmdQueue); eval($reply); ########################################################################### # When BackupPC_nightly starts, BackupPC will not run any simultaneous # BackupPC_dump commands. We first do things that contend with # BackupPC_dump, eg: aging per-PC log files etc. ########################################################################### # # Do per-PC log file aging # my($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time); if ( $mday == 1 ) { foreach my $host ( keys(%Status) ) { my $lastLog = $Conf{MaxOldPerPCLogFiles} - 1; unlink("$TopDir/pc/$host/LOG.$lastLog") if ( -f "$TopDir/pc/$host/LOG.$lastLog" ); unlink("$TopDir/pc/$host/LOG.$lastLog.z") if ( -f "$TopDir/pc/$host/LOG.$lastLog.z" ); for ( my $i = $lastLog - 1 ; $i >= 0 ; $i-- ) { my $j = $i + 1; if ( -f "$TopDir/pc/$host/LOG.$i" ) { rename("$TopDir/pc/$host/LOG.$i", "$TopDir/pc/$host/LOG.$j"); } elsif ( -f "$TopDir/pc/$host/LOG.$i.z" ) { rename("$TopDir/pc/$host/LOG.$i.z", "$TopDir/pc/$host/LOG.$j.z"); } } # # Compress the log file LOG -> LOG.0.z (if enabled). # Otherwise, just rename LOG -> LOG.0. # BackupPC::FileZIO->compressCopy("$TopDir/pc/$host/LOG", "$TopDir/pc/$host/LOG.0.z", "$TopDir/pc/$host/LOG.0", $Conf{CompressLevel}, 1); open(LOG, ">", "$TopDir/pc/$host/LOG") && close(LOG); } } ########################################################################### # Get statistics on the pool, and remove files that have only one link. ########################################################################### my $fileCnt; # total number of files my $dirCnt; # total number of directories my $blkCnt; # total block size of files my $fileCntRm; # total number of removed files my $blkCntRm; # total block size of removed files my $blkCnt2; # total block size of files with just 2 links # (ie: files that only occur once among all backups) my $fileCntRep; # total number of file names containing "_", ie: files # that have repeated md5 checksums my $fileRepMax; # worse case number of files that have repeated checksums # (ie: max(nnn+1) for all names xxxxxxxxxxxxxxxx_nnn) my $fileLinkMax; # maximum number of hardlinks on a pool file my $fileCntRename; # number of renamed files (to keep file numbering # contiguous) my %FixList; # list of paths that need to be renamed to avoid # new holes for my $pool ( qw(pool cpool) ) { $fileCnt = 0; $dirCnt = 0; $blkCnt = 0; $fileCntRm = 0; $blkCntRm = 0; $blkCnt2 = 0; $fileCntRep = 0; $fileRepMax = 0; $fileLinkMax = 0; $fileCntRename = 0; %FixList = (); find({wanted => \&GetPoolStats, no_chdir => 1}, "$TopDir/$pool"); my $kb = $blkCnt / 2; my $kbRm = $blkCntRm / 2; my $kb2 = $blkCnt2 / 2; # # Now make sure that files with repeated checksums are still # sequentially numbered # foreach my $name ( sort(keys(%FixList)) ) { my $rmCnt = $FixList{$name} + 1; my $new = -1; for ( my $old = -1 ; ; $old++ ) { my $oldName = $name; $oldName .= "_$old" if ( $old >= 0 ); if ( !-f $oldName ) { # # We know we are done when we have missed at least # the number of files that were removed from this # base name, plus a couple just to be sure # last if ( $rmCnt-- <= 0 ); next; } my $newName = $name; $newName .= "_$new" if ( $new >= 0 ); $new++; next if ( $oldName eq $newName ); rename($oldName, $newName); $fileCntRename++; } } print("BackupPC_stats = $pool,$fileCnt,$dirCnt,$kb,$kb2,$kbRm,$fileCntRm," . "$fileCntRep,$fileRepMax,$fileCntRename," . "$fileLinkMax\n"); } ########################################################################### # Tell BackupPC that it is now ok to start running BackupPC_dump # commands. We are guaranteed that no BackupPC_link commands will # run since only a single CmdQueue command runs at a time, and # that means we are safe. ########################################################################### printf("BackupPC_nightly lock_off\n"); ########################################################################### # Send email ########################################################################### system("$BinDir/BackupPC_sendEmail"); sub GetPoolStats { my($name) = $File::Find::name; my($baseName) = ""; my(@s); return if ( !-d && !-f ); $dirCnt += -d; $name = $1 if ( $name =~ /(.*)/ ); @s = stat($name); if ( $name =~ /(.*)_(\d+)$/ ) { $baseName = $1; if ( $s[3] != 1 ) { $fileRepMax = $2 + 1 if ( $fileRepMax <= $2 ); $fileCntRep++; } } if ( -f && $s[3] == 1 ) { $blkCntRm += $s[12]; $fileCntRm++; unlink($name); # # We must keep repeated files numbered sequential (ie: files # that have the same checksum are appended with _0, _1 etc). # There are two cases: we remove the base file xxxx, but xxxx_0 # exists, or we remove any file of the form xxxx_nnn. We remember # the base name and fix it up later (not in the middle of find). # $baseName = $name if ( $baseName eq "" ); $FixList{$baseName}++; } else { $fileCnt += -f; $blkCnt += $s[12]; $blkCnt2 += $s[12] if ( -f && $s[3] == 2 ); $fileLinkMax = $s[3] if ( $fileLinkMax < $s[3] ); } }