#!/usr/bin/perl
use strict;
-#use lib "/data/backuppc/lib";
-use lib "/usr/share/backuppc/lib";
+use lib "/usr/local/BackupPC/lib";
use DBI;
use BackupPC::Lib;
use Cwd qw/abs_path/;
use Data::Dump qw(dump);
+our $search_module;
+BEGIN {
+ $search_module = "BackupPC::Search::Estraier";
+ $search_module = "BackupPC::Search::KinoSearch" if $ENV{KINO};
+ eval "use $search_module";
+ if ( $@ ) {
+ warn "ERROR: $search_module: $!";
+ } else {
+ warn "# using $search_module for full-text search";
+ }
+}
+
use constant BPC_FTYPE_DIR => 5;
use constant EST_CHUNK => 4096;
return strftime($t_fmt,localtime());
}
-my $hest_node;
-
sub hest_update {
my ($host_id, $share_id, $num) = @_;
my $skip_check = $opt{j} && print STDERR "Skipping check for existing files -- this should be used only with initital import\n";
- unless ($index_node_url && $index_node_url =~ m#^http://#) {
- print STDERR "HyperEstraier support not enabled or index node invalid\n" if ($debug);
- $index_node_url = 0;
- return;
- }
-
- print curr_time," updating Hyper Estraier:";
+ print curr_time," updating fulltext:";
my $t = time();
my $offset = 0;
my $added = 0;
- if ($index_node_url) {
- print " opening index $index_node_url";
- $hest_node ||= Search::Estraier::Node->new(
- url => $index_node_url,
- user => 'admin',
- passwd => 'admin',
- croak_on_error => 1,
- );
- print " via node URL";
- }
+ my $search = $search_module->new( $index_node_url );
my $results = 0;
}
while (my $row = $sth->fetchrow_hashref()) {
-
- my $uri = $row->{hname} . ':' . $row->{sname} . '#' . $row->{backupnum} . ' ' . $row->{filepath};
- if (! $skip_check && $hest_node) {
- my $id = $hest_node->uri_to_id($uri);
- next if ($id && $id == -1);
- }
-
- # create a document object
- my $doc = Search::Estraier::Document->new;
-
- # add attributes to the document object
- $doc->add_attr('@uri', $uri);
-
- foreach my $c (@{ $sth->{NAME} }) {
- print STDERR "attr $c = $row->{$c}\n" if ($debug > 2);
- $doc->add_attr($c, $row->{$c}) if (defined($row->{$c}));
- }
-
- #$doc->add_attr('@cdate', fmt_date($row->{'date'}));
-
- # add the body text to the document object
- my $path = $row->{'filepath'};
- $doc->add_text($path);
- $path =~ s/(.)/$1 /g;
- $doc->add_hidden_text($path);
-
- print STDERR $doc->dump_draft,"\n" if ($debug > 1);
-
- # register the document object to the database
- $hest_node->put_doc($doc) if ($hest_node);
-
+ next if $search->exists( $row );
+ $search->add_doc( $row );
$added++;
}
date timestamp default now(),
primary key(id)
);
+
+ -- report backups and corresponding dvd
+
+ create view backups_on_dvds as
+ select
+ backups.id as id,
+ hosts.name || ':' || shares.name as share,
+ backups.num as num,
+ backups.type as type,
+ abstime(backups.date) as backup_date,
+ backups.size as size,
+ backups.inc_size as gzip_size,
+ archive.id as archive_id,
+ archive.dvd_nr
+ from backups
+ join shares on backups.shareid=shares.id
+ join hosts on shares.hostid = hosts.id
+ left outer join archive_backup on backups.id = archive_backup.backup_id
+ left outer join archive on archive_backup.archive_id = archive.id
+ where backups.parts > 0 and size > 0
+ order by backups.date
+ ;
});
print "creating indexes: ";
$host_nr++;
# get backups for a host
my @backups = $bpc->BackupInfoRead($hostname);
-warn "XXXX ",dump(@backups);
my $incs = scalar @backups;
my $host_header = sprintf("host %s [%d/%d]: %d increments\n",
);
print $share_header unless ($opt{q});
- my $files = BackupPC::View->new($bpc, $hostname, \@backups, 1);
+ my $files = BackupPC::View->new($bpc, $hostname, \@backups, { only_first => 1 });
+
foreach my $share ($files->shareList($backupNum)) {
my $t = time();