X-Git-Url: http://git.rot13.org/?p=BackupPC.git;a=blobdiff_plain;f=lib%2FBackupPC%2FSearch.pm;h=f2dac44032b8a661042a284e18dbeedc25c63b8e;hp=41f4e764f75b81169e3d7b2d33181e3e4f00d5f1;hb=1f12f5ba25530a04f3a5e7b86c3ac88d9526fe1d;hpb=d060aa5c2807ff541ac5c83eacfed101ed82c391 diff --git a/lib/BackupPC/Search.pm b/lib/BackupPC/Search.pm index 41f4e76..f2dac44 100644 --- a/lib/BackupPC/Search.pm +++ b/lib/BackupPC/Search.pm @@ -12,18 +12,22 @@ use XML::Writer; use IO::File; use Data::Dump qw(dump); +require Exporter; +our @ISA=qw(Exporter); +our @EXPORT=qw(unit); + my $on_page = 100; my $pager_pages = 10; -my $dsn = $Conf{SearchDSN}; -my $db_user = $Conf{SearchUser} || ''; +my $dbh; -our $search_module; +my $bpc = BackupPC::Lib->new || die; +$bpc->ConfigRead('_search_archive'); +my %Conf = $bpc->Conf(); sub search_module { - $search_module = "BackupPC::Search::Estraier"; - $search_module = "BackupPC::Search::KinoSearch" if $ENV{KINO}; + my $search_module = $Conf{SearchModule} || die "search is disabled"; eval "use $search_module"; if ( $@ ) { warn "ERROR: $search_module: $!"; @@ -31,16 +35,13 @@ sub search_module { warn "# using $search_module for full-text search"; } - my $bpc = BackupPC::Lib->new || die; - my %Conf = $bpc->Conf(); - return $search_module->new( %Conf ); } my $dbh; sub get_dbh { - $dbh ||= DBI->connect($dsn, $db_user, "", { RaiseError => 1, AutoCommit => 1 } ); + $dbh ||= DBI->connect($Conf{SearchDSN}, $Conf{SearchUser}, "", { RaiseError => 1, AutoCommit => 1 } ); return $dbh; } @@ -149,12 +150,12 @@ my $sort_def = { search => { default => 'date_a', sql => { - share_d => 'shares.name DESC', - share_a => 'shares.name ASC', - path_d => 'files.path DESC', - path_a => 'files.path ASC', - num_d => 'files.backupnum DESC', - num_a => 'files.backupnum ASC', + sname_d => 'shares.name DESC', + sname_a => 'shares.name ASC', + filepath_d => 'files.path DESC', + filepath_a => 'files.path ASC', + backupnum_d => 'files.backupnum DESC', + backupnum_a => 'files.backupnum ASC', size_d => 'files.size DESC', size_a => 'files.size ASC', date_d => 'files.date DESC', @@ -163,8 +164,8 @@ my $sort_def = { }, burn => { default => 'date_a', sql => { - share_d => 'host DESC, share DESC', - share_a => 'host ASC, share ASC', + sname_d => 'host DESC, share DESC', + sname_a => 'host ASC, share ASC', num_d => 'backupnum DESC', num_a => 'backupnum ASC', date_d => 'date DESC', @@ -303,10 +304,9 @@ sub getGzipName($$$) sub get_tgz_size_by_name($) { my $name = shift; - my $tgz = $Conf{InstallDir}.'/'.$Conf{GzipTempDir}.'/'.$name; + my $tgz = $Conf{GzipTempDir}.'/'.$name; my $size = -1; - my $Dir = $Conf{InstallDir}."/data/log"; $|=1; if (-f "${tgz}.tar.gz") { $size = (stat("${tgz}.tar.gz"))[7]; @@ -377,7 +377,7 @@ sub getGzipSize($$) sub getVolumes($) { my $id = shift; - my $max_archive_size = $Conf{MaxArchiveSize} || die "no MaxArchiveSize"; + my $max_archive_size = $Conf{ArchiveMediaSize} || die "no ArchiveMediaSize"; my $sth = $dbh->prepare(qq{ select @@ -453,13 +453,11 @@ print STDERR "## sort=". ($param->{'sort'} || 'no sort param') . " burn sql orde $row->{'age'} = sprintf("%0.1f", ( $row->{'age'} / 86400 ) ); #$row->{'age'} = sprintf("%0.1f", ( (time() - $row->{'date'}) / 86400 ) ); - my $max_archive_size = $Conf{MaxArchiveSize} || die "no MaxArchiveSize"; + my $max_archive_size = $Conf{ArchiveMediaSize} || die "no ArchiveMediaSize"; if ($row->{size} > $max_archive_size) { ($row->{volumes}, $row->{inc_size_calc}) = getVolumes($row->{id}); } - $row->{size} = sprintf("%0.2f", $row->{size} / 1024 / 1024); - # do some cluster calculation (approximate) $row->{inc_size} = int(( ($row->{inc_size} + 1023 ) / 2 ) * 2); $row->{inc_size_calc} ||= $row->{inc_size}; @@ -473,8 +471,8 @@ sub displayBackupsGrid($) { my $param = shift; - my $max_archive_size = $Conf{MaxArchiveSize} || die "no MaxArchiveSize"; - my $max_archive_file_size = $Conf{MaxArchiveFileSize} || die "no MaxFileInSize"; + my $max_archive_size = $Conf{ArchiveMediaSize} || die "no ArchiveMediaSize"; + my $max_archive_file_size = $Conf{ArchiveChunkSize} || die "no MaxFileInSize"; my $retHTML .= q{
@@ -832,8 +830,8 @@ EOF3 } . sort_header($param, 'Date', 'date', 'center') . sort_header($param, 'Age/days', 'age', 'center') . - sort_header($param, 'Size/Mb', 'size', 'center') . - sort_header($param, 'gzip size/Kb', 'incsize', 'center') . + sort_header($param, 'Size', 'size', 'center') . + sort_header($param, 'gzip size', 'incsize', 'center') . qq{ medias }; @@ -871,8 +869,8 @@ EOF3 '' . $backup->{'type'} . '' . '' . epoch_to_iso( $backup->{'date'} ) . '' . '' . $backup->{'age'} . '' . - '' . $backup->{'size'} . '' . - '' . sprintf("%0.1f", $backup->{'inc_size'} / 1024 ) . + '' . unit($backup->{'size'}) . '' . + '' . unit($backup->{'inc_size'}) . '' . '' . '' . '' . ( qq{media} x $backup->{volumes} ) . '' . @@ -967,9 +965,9 @@ sub displayGrid($) { } $retHTML .= - sort_header($param, 'Share', 'share', 'center') . - sort_header($param, 'Type and Name', 'path', 'center') . - sort_header($param, '#', 'num', 'center') . + sort_header($param, 'Share', 'sname', 'center') . + sort_header($param, 'Type and Name', 'filepath', 'center') . + sort_header($param, '#', 'backupnum', 'center') . sort_header($param, 'Size', 'size', 'center') . sort_header($param, 'Date', 'date', 'center'); @@ -1122,4 +1120,24 @@ sub displayGrid($) { return $retHTML; } +my @units = qw/b k M G/; +sub unit { + my $v = shift; + + my $o = 0; + + while ( ( $v / 10000 ) >= 1 ) { + $o++; + $v /= 1024; + } + + if ( $v >= 1 ) { + return sprintf("%d%s", $v, $units[$o]); + } elsif ( $v == 0 ) { + return 0; + } else { + return sprintf("%.1f%s", $v, $units[$o]); + } +} + 1;