use Cwd qw/abs_path/;
use File::Temp qw/tempdir/;
-use Data::Dumper;
use lib './lib';
-use WebPAC::Lookup;
-use WebPAC::Input::ISIS;
+use WebPAC::Common 0.02;
+use WebPAC::Lookup 0.03;
+use WebPAC::Input 0.07;
use WebPAC::Store 0.03;
-use WebPAC::Normalize::XML;
+use WebPAC::Normalize 0.11;
use WebPAC::Output::TT;
-use WebPAC::Output::Estraier 0.02;
+use WebPAC::Validate;
+use WebPAC::Output::MARC;
use YAML qw/LoadFile/;
-use LWP::Simple;
+use Getopt::Long;
+use File::Path;
+use Time::HiRes qw/time/;
+use File::Slurp;
+use Data::Dump qw/dump/;
+use Storable qw/dclone/;
-my $limit = shift @ARGV;
+use Proc::Queue size => 1;
+use POSIX ":sys_wait_h"; # imports WNOHANG
-my $config = LoadFile('conf/config.yml');
+=head1 NAME
-print "config = ",Dumper($config);
+run.pl - start WebPAC indexing
+
+B<this command will probably go away. Don't get used to it!>
+
+Options:
+
+=over 4
+
+=item --offset 42
+
+start loading (all) databases at offset 42
+
+=item --limit 100
+
+limit loading to 100 records
+
+=item --clean
+
+remove database and Hyper Estraier index before indexing
+
+=item --only=database_name/input_filter
+
+reindex just single database (legacy name is --one)
+
+C</input_filter> is optional part which can be C<name>
+or C<type> from input
+
+=item --config conf/config.yml
+
+path to YAML configuration file
+
+=item --stats
+
+disable indexing and dump statistics about field and subfield
+usage for each input
+
+=item --validate path/to/validation_file
+
+turn on extra validation of imput records, see L<WebPAC::Validation>
+
+=item --marc-normalize conf/normalize/mapping.pl
+
+This option specifies normalisation file for MARC creation
+
+=item --marc-output out/marc/test.marc
+
+Optional path to output file
+
+=item --marc-lint
+
+By default turned on if C<--marc-normalize> is used. You can disable lint
+messages with C<--no-marc-lint>.
+
+=item --marc-dump
+
+Force dump or input and marc record for debugging.
+
+=item --parallel 4
+
+Run databases in parallel (aproximatly same as number of processors in
+machine if you want to use full load)
+
+=item --only-links
+
+Create just links
+
+=item --merge
+
+Create merged index of databases which have links
+
+=back
+
+=cut
+
+my $offset;
+my $limit;
+
+my $clean = 0;
+my $config = 'conf/config.yml';
+my $debug = 0;
+my $only_filter;
+my $stats = 0;
+my $validate_path;
+my ($marc_normalize, $marc_output);
+my $marc_lint = 1;
+my $marc_dump = 0;
+my $parallel = 0;
+my $only_links = 0;
+my $merge = 0;
+
+my $log = _new WebPAC::Common()->_get_logger();
+
+my $hostname = `hostname`;
+chomp($hostname);
+$hostname =~ s/\..+$//;
+if (-e "conf/$hostname.yml") {
+ $config = "conf/$hostname.yml";
+ $log->info("using host configuration file: $config");
+}
+
+GetOptions(
+ "limit=i" => \$limit,
+ "offset=i" => \$offset,
+ "clean" => \$clean,
+ "one=s" => \$only_filter,
+ "only=s" => \$only_filter,
+ "config" => \$config,
+ "debug+" => \$debug,
+ "stats" => \$stats,
+ "validate=s" => \$validate_path,
+ "marc-normalize=s" => \$marc_normalize,
+ "marc-output=s" => \$marc_output,
+ "marc-lint!" => \$marc_lint,
+ "marc-dump!" => \$marc_dump,
+ "parallel=i" => \$parallel,
+ "only-links!" => \$only_links,
+ "merge" => \$merge,
+);
+
+$config = LoadFile($config);
+
+#print "config = ",dump($config) if ($debug);
die "no databases in config file!\n" unless ($config->{databases});
+$log->info( "-" x 79 );
+
+
+my $estcmd_fh;
+my $estcmd_path = './estcmd-merge.sh';
+if ($merge) {
+ open($estcmd_fh, '>', $estcmd_path) || $log->logdie("can't open $estcmd_path: $!");
+ print $estcmd_fh 'cd /data/estraier/_node/ || exit 1',$/;
+ print $estcmd_fh 'sudo /etc/init.d/hyperestraier stop',$/;
+ $log->info("created merge batch file $estcmd_path");
+}
+
+
+my $validate;
+$validate = new WebPAC::Validate(
+ path => $validate_path,
+) if ($validate_path);
+
+
+my $use_indexer = $config->{use_indexer} || 'hyperestraier';
+if ($stats) {
+ $log->debug("option --stats disables update of indexing engine...");
+ $use_indexer = undef;
+} else {
+ $log->info("using $use_indexer indexing engine...");
+}
+
+# disable indexing when creating marc
+$use_indexer = undef if ($marc_normalize);
+
my $total_rows = 0;
+my $start_t = time();
+
+my @links;
+
+if ($parallel) {
+ $log->info("Using $parallel processes for speedup");
+ Proc::Queue::size($parallel);
+}
while (my ($database, $db_config) = each %{ $config->{databases} }) {
+ my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter);
+ next if ($only_database && $database !~ m/$only_database/i);
+
+ if ($parallel) {
+ my $f=fork;
+ if(defined ($f) and $f==0) {
+ $log->info("Created processes $$ for speedup");
+ } else {
+ next;
+ }
+ }
+
+ my $indexer;
+ if ($use_indexer) {
+ my $indexer_config = $config->{$use_indexer} || $log->logdie("can't find '$use_indexer' part in confguration");
+ $indexer_config->{database} = $database;
+ $indexer_config->{clean} = $clean;
+ $indexer_config->{label} = $db_config->{name};
+
+ # force clean if database has links
+ $indexer_config->{clean} = 1 if ($db_config->{links});
+
+ if ($use_indexer eq 'hyperestraier') {
+
+ # open Hyper Estraier database
+ use WebPAC::Output::Estraier '0.10';
+ $indexer = new WebPAC::Output::Estraier( %{ $indexer_config } );
+
+ } elsif ($use_indexer eq 'kinosearch') {
+
+ # open KinoSearch
+ use WebPAC::Output::KinoSearch;
+ $indexer_config->{clean} = 1 unless (-e $indexer_config->{index_path});
+ $indexer = new WebPAC::Output::KinoSearch( %{ $indexer_config } );
+
+ } else {
+ $log->logdie("unknown use_indexer: $use_indexer");
+ }
+
+ $log->logide("can't continue without valid indexer") unless ($indexer);
+ }
+
+
+ #
+ # store Hyper Estraier links to other databases
+ #
+ if (ref($db_config->{links}) eq 'ARRAY' && $use_indexer) {
+ foreach my $link (@{ $db_config->{links} }) {
+ if ($use_indexer eq 'hyperestraier') {
+ if ($merge) {
+ print $estcmd_fh 'sudo -u www-data estcmd merge ' . $database . ' ' . $link->{to},$/;
+ } else {
+ $log->info("saving link $database -> $link->{to} [$link->{credit}]");
+ push @links, sub {
+ $log->info("adding link $database -> $link->{to} [$link->{credit}]");
+ $indexer->add_link(
+ from => $database,
+ to => $link->{to},
+ credit => $link->{credit},
+ );
+ };
+ }
+ } else {
+ $log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]");
+ }
+ }
+ }
+ next if ($only_links);
+
+
+ #
+ # now WebPAC::Store
+ #
my $abs_path = abs_path($0);
$abs_path =~ s#/[^/]*$#/#;
my $db_path = $config->{webpac}->{db_path} . '/' . $database;
+ if ($clean) {
+ $log->info("creating new database '$database' in $db_path");
+ rmtree( $db_path ) || $log->warn("can't remove $db_path: $!");
+ } else {
+ $log->info("working on database '$database' in $db_path");
+ }
+
my $db = new WebPAC::Store(
path => $db_path,
database => $database,
- debug => 1,
+ debug => $debug,
);
- my $log = $db->_get_logger;
- $log->info("working on $database in $db_path");
-
- my $est_config = $config->{hyperestraier} || $log->logdie("can't find 'hyperestraier' part in confguration");
- $est_config->{database} = $database;
-
- $log->info("using HyperEstraier URL $est_config->{masterurl}");
-
- my $est = new WebPAC::Output::Estraier(
- %{ $est_config },
- );
#
# now, iterate through input formats
my @inputs;
if (ref($db_config->{input}) eq 'ARRAY') {
@inputs = @{ $db_config->{input} };
- } else {
+ } elsif ($db_config->{input}) {
push @inputs, $db_config->{input};
+ } else {
+ $log->info("database $database doesn't have inputs defined");
}
+ my @supported_inputs = keys %{ $config->{webpac}->{inputs} };
+
foreach my $input (@inputs) {
+ next if ($only_input && ($input->{name} !~ m#$only_input#i && $input->{type} !~ m#$only_input#i));
+
my $type = lc($input->{type});
- die "I know only how to handle input type isis, not '$type'!\n" unless ($type eq 'isis');
+ die "I know only how to handle input types ", join(",", @supported_inputs), " not '$type'!\n" unless (grep(/$type/, @supported_inputs));
- my $lookup = new WebPAC::Lookup(
- lookup_file => $input->{lookup},
- );
+ my $lookup;
+ if ($input->{lookup}) {
+ $lookup = new WebPAC::Lookup(
+ lookup_file => $input->{lookup},
+ );
+ delete( $input->{lookup} );
+ }
- $log->info("working on input $input->{path} [$input->{type}]");
+ my $input_module = $config->{webpac}->{inputs}->{$type};
+
+ $log->info("working on input '$input->{name}' in $input->{path} [type: $input->{type}] using $input_module",
+ $input->{lookup} ? "lookup '$input->{lookup}'" : ""
+ );
- my $isis = new WebPAC::Input::ISIS(
- code_page => $config->{webpac}->{webpac_encoding},
- limit_mfn => $input->{limit},
- lookup => $lookup,
+ my $input_db = new WebPAC::Input(
+ module => $input_module,
+ encoding => $config->{webpac}->{webpac_encoding},
+ limit => $limit || $input->{limit},
+ offset => $offset,
+ lookup_coderef => sub {
+ my $rec = shift || return;
+ $lookup->add( $rec );
+ },
+ recode => $input->{recode},
+ stats => $stats,
+ modify_records => $input->{modify_records},
);
+ $log->logdie("can't create input using $input_module") unless ($input);
- my $maxmfn = $isis->open(
- filename => $input->{path},
+ my $maxmfn = $input_db->open(
+ path => $input->{path},
code_page => $input->{encoding}, # database encoding
+ %{ $input },
);
-$log->info( Dumper($lookup->{_lookup_data}) );
+ my @norm_array = ref($input->{normalize}) eq 'ARRAY' ?
+ @{ $input->{normalize} } : ( $input->{normalize} );
- my $n = new WebPAC::Normalize::XML(
- # filter => { 'foo' => sub { shift } },
- db => $db,
- lookup_regex => $lookup->regex,
- lookup => $lookup,
- prefix => $input->{name},
- );
+ if ($marc_normalize) {
+ @norm_array = ( {
+ path => $marc_normalize,
+ output => $marc_output || 'out/marc/' . $database . '-' . $input->{name} . '.marc',
+ } );
+ }
- $n->open(
- tag => $input->{normalize}->{tag},
- xml_file => $input->{normalize}->{path},
- );
+ foreach my $normalize (@norm_array) {
- for ( 0 ... $isis->size ) {
+ my $normalize_path = $normalize->{path} || $log->logdie("can't find normalize path in config");
- my $row = $isis->fetch || next;
+ $log->logdie("Found '$normalize_path' as normalization file which isn't supported any more!") unless ( $normalize_path =~ m!\.pl$!i );
- my $mfn = $row->{'000'}->[0] || die "can't find MFN";
+ my $rules = read_file( $normalize_path ) or die "can't open $normalize_path: $!";
- my $ds = $n->data_structure($row);
+ $log->info("Using $normalize_path for normalization...");
- $est->add(
- id => $input->{name} . "#" . $mfn,
- ds => $ds,
- type => $config->{hyperestraier}->{type},
- );
+ my $marc = new WebPAC::Output::MARC(
+ path => $normalize->{output},
+ lint => $marc_lint,
+ dump => $marc_dump,
+ ) if ($normalize->{output});
+
+ # reset position in database
+ $input_db->seek(1);
+
+ foreach my $pos ( 0 ... $input_db->size ) {
+
+ my $row = $input_db->fetch || next;
+
+ my $mfn = $row->{'000'}->[0];
+
+ if (! $mfn || $mfn !~ m#^\d+$#) {
+ $log->warn("record $pos doesn't have valid MFN but '$mfn', using $pos");
+ $mfn = $pos;
+ push @{ $row->{'000'} }, $pos;
+ }
+
+
+ if ($validate) {
+ my @errors = $validate->validate_errors( $row );
+ $log->error( "MFN $mfn validation errors:\n", join("\n", @errors) ) if (@errors);
+ }
+
+ my $ds_config = dclone($db_config);
+
+ # default values -> database key
+ $ds_config->{_} = $database;
+
+ # current mfn
+ $ds_config->{_mfn} = $mfn;
+
+ # attach current input
+ $ds_config->{input} = $input;
+
+ my $ds = WebPAC::Normalize::data_structure(
+ row => $row,
+ rules => $rules,
+ lookup => $lookup ? $lookup->lookup_hash : undef,
+ config => $ds_config,
+ marc_encoding => 'utf-8',
+ );
+
+ $db->save_ds(
+ id => $mfn,
+ ds => $ds,
+ prefix => $input->{name},
+ ) if ($ds && !$stats);
+
+ $indexer->add(
+ id => $input->{name} . "/" . $mfn,
+ ds => $ds,
+ type => $config->{$use_indexer}->{type},
+ ) if ($indexer && $ds);
+
+ if ($marc) {
+ my $i = 0;
+
+ while (my $fields = WebPAC::Normalize::_get_marc_fields( fetch_next => 1 ) ) {
+ $marc->add(
+ id => $mfn . ( $i ? "/$i" : '' ),
+ fields => $fields,
+ leader => WebPAC::Normalize::marc_leader(),
+ row => $row,
+ );
+ $i++;
+ }
+
+ $log->info("Created $i instances of MFN $mfn\n") if ($i > 1);
+ }
+
+ $total_rows++;
+ }
+
+ $log->info("statistics of fields usage:\n", $input_db->stats) if ($stats);
+
+ # close MARC file
+ $marc->finish if ($marc);
- $total_rows++;
}
- };
+ }
+
+ eval { $indexer->finish } if ($indexer && $indexer->can('finish'));
+
+ my $dt = time() - $start_t;
+ $log->info("$total_rows records ", $indexer ? "indexed " : "",
+ sprintf("in %.2f sec [%.2f rec/sec]",
+ $dt, ($total_rows / $dt)
+ )
+ );
+
+
+ # end forked process
+ if ($parallel) {
+ $log->info("parallel process $$ finished");
+ exit(0);
+ }
- $log->info("$total_rows records indexed");
}
+if ($parallel) {
+ # wait all children to finish
+ sleep(1) while wait != -1;
+ $log->info("all parallel processes finished");
+}
+
+#
+# handle links or merge after indexing
+#
+
+if ($merge) {
+ print $estcmd_fh 'sudo /etc/init.d/hyperestraier start',$/;
+ close($estcmd_fh);
+ chmod 0700, $estcmd_path || $log->warn("can't chmod 0700 $estcmd_path: $!");
+ system $estcmd_path;
+} else {
+ foreach my $link (@links) {
+ $log->logdie("coderef in link ", Dumper($link), " is ", ref($link), " and not CODE") unless (ref($link) eq 'CODE');
+ $link->();
+ }
+}