use Cwd qw/abs_path/;
use File::Temp qw/tempdir/;
-use Data::Dumper;
use lib './lib';
use WebPAC::Common 0.02;
-use WebPAC::Lookup;
-use WebPAC::Input 0.03;
-use WebPAC::Store 0.03;
-use WebPAC::Normalize::XML;
-use WebPAC::Normalize::Set;
+use WebPAC::Parser 0.08;
+use WebPAC::Input 0.16;
+use WebPAC::Store 0.14;
+use WebPAC::Normalize 0.22;
use WebPAC::Output::TT;
-use YAML qw/LoadFile/;
+use WebPAC::Validate 0.06;
+use WebPAC::Output::MARC;
+use WebPAC::Config;
use Getopt::Long;
use File::Path;
use Time::HiRes qw/time/;
use File::Slurp;
+use Data::Dump qw/dump/;
+use Storable qw/dclone/;
+use Pod::Usage qw/pod2usage/;
+
+use Proc::Queue size => 1;
+use POSIX ":sys_wait_h"; # imports WNOHANG
=head1 NAME
B<this command will probably go away. Don't get used to it!>
-Options:
+=head1 OPTIONS
=over 4
remove database and Hyper Estraier index before indexing
-=item --only=database_name
+=item --only=database_name/input_filter
reindex just single database (legacy name is --one)
+C</input_filter> is optional part which can be C<name>
+or C<type> from input
+
=item --config conf/config.yml
path to YAML configuration file
-=item --force-set
+=item --stats
-force conversion C<< normalize->path >> in C<config.yml> from
-C<.xml> to C<.pl>
+disable indexing, modify_* in configuration and dump statistics about field
+and subfield usage for each input
-=item --stats
+=item --validate path/to/validation_file
+
+turn on extra validation of imput records, see L<WebPAC::Validation>
+
+=item --marc-generate
+
+Generate MARC file. This will automatically be on if file contains C<marc*> directives.
+You can use this option as C<--no-marc-generate> to disable MARC generation.
+
+=item --marc-lint
+
+By default turned on if normalisation file has C<marc*> directives. You can disable lint
+messages with C<--no-marc-lint>.
+
+=item --marc-dump
+
+Force dump or input and marc record for debugging.
+
+=item --parallel 4
+
+Run databases in parallel (aproximatly same as number of processors in
+machine if you want to use full load)
+
+=item --only-links
+
+Create just links
+
+=item --merge
-disable indexing and dump statistics about field and subfield
-usage for each input
+Create merged index of databases which have links
=back
my $limit;
my $clean = 0;
-my $config = 'conf/config.yml';
+my $config_path;
my $debug = 0;
-my $only_db_name;
-my $force_set = 0;
+my $only_filter;
my $stats = 0;
+my $validate_path;
+my $marc_generate = 1;
+my $marc_lint = 1;
+my $marc_dump = 0;
+my $parallel = 0;
+my $only_links = 0;
+my $merge = 0;
+my $help;
+
+my $log = _new WebPAC::Common()->_get_logger();
GetOptions(
"limit=i" => \$limit,
"offset=i" => \$offset,
"clean" => \$clean,
- "one=s" => \$only_db_name,
- "only=s" => \$only_db_name,
- "config" => \$config,
- "debug" => \$debug,
- "force-set" => \$force_set,
+ "one=s" => \$only_filter,
+ "only=s" => \$only_filter,
+ "config" => \$config_path,
+ "debug+" => \$debug,
"stats" => \$stats,
+ "validate=s" => \$validate_path,
+ "marc-generate!" => \$marc_generate,
+ "marc-lint!" => \$marc_lint,
+ "marc-dump!" => \$marc_dump,
+ "parallel=i" => \$parallel,
+ "only-links!" => \$only_links,
+ "merge" => \$merge,
+ "help" => \$help,
);
-$config = LoadFile($config);
+pod2usage(-verbose => 2) if ($help);
-print "config = ",Dumper($config) if ($debug);
+my $config = new WebPAC::Config( path => $config_path );
-die "no databases in config file!\n" unless ($config->{databases});
+#print "config = ",dump($config) if ($debug);
+
+die "no databases in config file!\n" unless ($config->databases);
-my $log = _new WebPAC::Common()->_get_logger();
$log->info( "-" x 79 );
-my $use_indexer = $config->{use_indexer} || 'hyperestraier';
+my $log_file = 'log';
+
+if (-e $log_file ) { # && -s $log_file > 5 * 1024 * 1024) {
+ $log->info("moved old log with ", -s $log_file, " bytes to '${log_file}.old'");
+ rename $log_file, "${log_file}.old" || $log->logwarn("can't rename $log_file to ${log_file}.old: $!");
+}
+
+my $estcmd_fh;
+my $estcmd_path = './estcmd-merge.sh';
+if ($merge) {
+ open($estcmd_fh, '>', $estcmd_path) || $log->logdie("can't open $estcmd_path: $!");
+ print $estcmd_fh 'cd /data/estraier/_node/ || exit 1',$/;
+ print $estcmd_fh 'sudo /etc/init.d/hyperestraier stop',$/;
+ $log->info("created merge batch file $estcmd_path");
+}
+
+
+my $validate;
+$validate = new WebPAC::Validate(
+ path => $validate_path,
+) if ($validate_path);
+
+
+my $use_indexer = $config->use_indexer;
+$stats ||= $validate;
if ($stats) {
- $log->debug("option --stats disables update of indexing engine...");
+ $log->debug("disabled indexing for stats collection");
$use_indexer = undef;
} else {
$log->info("using $use_indexer indexing engine...");
}
+# parse normalize files and create source files for lookup and normalization
+
+my $parser = new WebPAC::Parser( config => $config );
+
my $total_rows = 0;
my $start_t = time();
-while (my ($database, $db_config) = each %{ $config->{databases} }) {
+my @links;
- next if ($only_db_name && $database !~ m/$only_db_name/i);
+if ($parallel) {
+ $log->info("Using $parallel processes for speedup");
+ Proc::Queue::size($parallel);
+}
+
+sub create_ds_config {
+ my ($db_config, $database, $input, $mfn) = @_;
+ my $c = dclone( $db_config );
+ $c->{_} = $database || $log->logconfess("need database");
+ $c->{_mfn} = $mfn || $log->logconfess("need mfn");
+ $c->{input} = $input || $log->logconfess("need input");
+ return $c;
+}
+
+while (my ($database, $db_config) = each %{ $config->databases }) {
+
+ my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter);
+ next if ($only_database && $database !~ m/$only_database/i);
+
+ if ($parallel) {
+ my $f=fork;
+ if(defined ($f) and $f==0) {
+ $log->info("Created processes $$ for speedup");
+ } else {
+ next;
+ }
+ }
my $indexer;
+ if ($use_indexer && $parser->have_rules( 'search', $database )) {
+
+ my $cfg_name = $use_indexer;
+ $cfg_name =~ s/\-.*$//;
- if ($use_indexer) {
- my $indexer_config = $config->{$use_indexer} || $log->logdie("can't find '$use_indexer' part in confguration");
+ my $indexer_config = $config->get( $cfg_name ) || $log->logdie("can't find '$cfg_name' part in confguration");
$indexer_config->{database} = $database;
$indexer_config->{clean} = $clean;
$indexer_config->{label} = $db_config->{name};
+ # force clean if database has links
+ $indexer_config->{clean} = 1 if ($db_config->{links});
+
if ($use_indexer eq 'hyperestraier') {
# open Hyper Estraier database
use WebPAC::Output::Estraier '0.10';
$indexer = new WebPAC::Output::Estraier( %{ $indexer_config } );
+ } elsif ($use_indexer eq 'hyperestraier-native') {
+
+ # open Hyper Estraier database
+ use WebPAC::Output::EstraierNative;
+ $indexer = new WebPAC::Output::EstraierNative( %{ $indexer_config } );
+
} elsif ($use_indexer eq 'kinosearch') {
# open KinoSearch
}
+ #
+ # store Hyper Estraier links to other databases
+ #
+ if (ref($db_config->{links}) eq 'ARRAY' && $use_indexer) {
+ foreach my $link (@{ $db_config->{links} }) {
+ if ($use_indexer eq 'hyperestraier') {
+ if ($merge) {
+ print $estcmd_fh 'sudo -u www-data estcmd merge ' . $database . ' ' . $link->{to},$/;
+ } else {
+ $log->info("saving link $database -> $link->{to} [$link->{credit}]");
+ push @links, sub {
+ $log->info("adding link $database -> $link->{to} [$link->{credit}]");
+ $indexer->add_link(
+ from => $database,
+ to => $link->{to},
+ credit => $link->{credit},
+ );
+ };
+ }
+ } else {
+ $log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]");
+ }
+ }
+ }
+ next if ($only_links);
+
+
#
# now WebPAC::Store
#
my $abs_path = abs_path($0);
$abs_path =~ s#/[^/]*$#/#;
- my $db_path = $config->{webpac}->{db_path} . '/' . $database;
+ my $db_path = $config->webpac('db_path');
if ($clean) {
- $log->info("creating new database $database in $db_path");
+ $log->info("creating new database '$database' in $db_path");
rmtree( $db_path ) || $log->warn("can't remove $db_path: $!");
} else {
- $log->debug("working on $database in $db_path");
+ $log->info("working on database '$database' in $db_path");
}
- my $db = new WebPAC::Store(
+ my $store = new WebPAC::Store(
path => $db_path,
- database => $database,
debug => $debug,
);
$log->info("database $database doesn't have inputs defined");
}
- my @supported_inputs = keys %{ $config->{webpac}->{inputs} };
-
foreach my $input (@inputs) {
+ my $input_name = $input->{name} || $log->logdie("input without a name isn't valid: ",dump($input));
+
+ next if ($only_input && ($input_name !~ m#$only_input#i && $input->{type} !~ m#$only_input#i));
+
my $type = lc($input->{type});
- die "I know only how to handle input types ", join(",", @supported_inputs), " not '$type'!\n" unless (grep(/$type/, @supported_inputs));
+ die "I know only how to handle input types ", join(",", $config->webpac('inputs') ), " not '$type'!\n" unless (grep(/$type/, $config->webpac('inputs')));
- my $lookup = new WebPAC::Lookup(
- lookup_file => $input->{lookup},
- );
+ my $input_module = $config->webpac('inputs')->{$type};
- my $input_module = $config->{webpac}->{inputs}->{$type};
+ my @lookups = $parser->have_lookup_create($database, $input);
- $log->info("working on input '$input->{path}' [$input->{type}] using $input_module lookup '$input->{lookup}'");
+ $log->info("working on input '$input_name' in $input->{path} [type: $input->{type}] using $input_module",
+ @lookups ? " creating lookups: ".join(", ", @lookups) : ""
+ );
+
+ if ($stats) {
+ # disable modification of records if --stats is in use
+ delete($input->{modify_records});
+ delete($input->{modify_file});
+ }
my $input_db = new WebPAC::Input(
module => $input_module,
- code_page => $config->{webpac}->{webpac_encoding},
+ encoding => $config->webpac('webpac_encoding'),
limit => $limit || $input->{limit},
offset => $offset,
- lookup => $lookup,
recode => $input->{recode},
stats => $stats,
+ modify_records => $input->{modify_records},
+ modify_file => $input->{modify_file},
);
$log->logdie("can't create input using $input_module") unless ($input);
+ if (defined( $input->{lookup} )) {
+ $log->warn("$database/$input_name has depriciated lookup definition, removing it...");
+ delete( $input->{lookup} );
+ }
+
+ my $lookup_coderef;
+
+ if (@lookups) {
+
+ my $rules = $parser->lookup_create_rules($database, $input) || $log->logdie("no rules found for $database/$input");
+
+ $lookup_coderef = sub {
+ my $rec = shift || die "need rec!";
+ my $mfn = $rec->{'000'}->[0] || die "need mfn in 000";
+
+ WebPAC::Normalize::data_structure(
+ row => $rec,
+ rules => $rules,
+ config => create_ds_config( $db_config, $database, $input, $mfn ),
+ );
+
+ #warn "current lookup: ", dump(WebPAC::Normalize::_get_lookup());
+ };
+
+ WebPAC::Normalize::_set_lookup( undef );
+
+ $log->debug("created lookup_coderef using:\n$rules");
+
+ };
+
+ my $lookup_jar;
+
my $maxmfn = $input_db->open(
path => $input->{path},
code_page => $input->{encoding}, # database encoding
- );
+ lookup_coderef => $lookup_coderef,
+ lookup => $lookup_jar,
+ %{ $input },
+ load_row => sub {
+ my $a = shift;
+ return $store->load_row(
+ database => $database,
+ input => $input_name,
+ id => $a->{id},
+ );
+ },
+ save_row => sub {
+ my $a = shift;
+ return $store->save_row(
+ database => $database,
+ input => $input_name,
+ id => $a->{id},
+ row => $a->{row},
+ );
+ },
- my $n = new WebPAC::Normalize::XML(
- # filter => { 'foo' => sub { shift } },
- db => $db,
- lookup_regex => $lookup->regex,
- lookup => $lookup,
- prefix => $input->{name},
);
- my $rules;
- my $normalize_path = $input->{normalize}->{path};
+ my $lookup_data = WebPAC::Normalize::_get_lookup();
- if ($force_set) {
- my $new_norm_path = $normalize_path;
- $new_norm_path =~ s/\.xml$/.pl/;
- if (-e $new_norm_path) {
- $log->debug("--force-set replaced $normalize_path with $new_norm_path");
- $normalize_path = $new_norm_path;
- } else {
- $log->debug("--force-set failed on $new_norm_path, fallback to $normalize_path");
+ if (defined( $lookup_data->{$database}->{$input_name} )) {
+ $log->debug("created following lookups: ", sub { dump( $lookup_data ) } );
+
+ foreach my $key (keys %{ $lookup_data->{$database}->{$input_name} }) {
+ $store->save_lookup(
+ database => $database,
+ input => $input_name,
+ key => $key,
+ data => $lookup_data->{$database}->{$input_name}->{$key},
+ );
}
}
- if ($normalize_path =~ m/\.xml$/i) {
- $n->open(
- tag => $input->{normalize}->{tag},
- xml_file => $normalize_path,
- );
- } elsif ($normalize_path =~ m/\.(?:yml|yaml)$/i) {
- $n->open_yaml(
- path => $normalize_path,
- tag => $input->{normalize}->{tag},
+ my $report_fh;
+ if ($stats || $validate) {
+ my $path = "out/report/${database}-${input_name}.txt";
+ open($report_fh, '>', $path) || $log->logdie("can't open $path: $!");
+
+ print $report_fh "Report for database '$database' input '$input_name' records ",
+ $offset || 1, "-", $limit || $input->{limit} || $maxmfn, "\n\n";
+ $log->info("Generating report file $path");
+ }
+
+ my $marc;
+ if ($marc_generate && $parser->have_rules( 'marc', $database, $input_name )) {
+ $marc = new WebPAC::Output::MARC(
+ path => "out/marc/${database}-${input_name}.marc",
+ lint => $marc_lint,
+ dump => $marc_dump,
);
- } elsif ($normalize_path =~ m/\.(?:pl)$/i) {
- $n = undef;
- $log->info("using WebPAC::Normalize::Set to process $normalize_path");
- $rules = read_file( $normalize_path ) or die "can't open $normalize_path: $!";
}
+ my $rules = $parser->normalize_rules($database,$input_name) || $log->logdie("no normalize rules found for $database/$input_name");
+ $log->debug("parsed normalize rules:\n$rules");
+
+ # reset position in database
+ $input_db->seek(1);
+
+ # generate name of config key for indexer (strip everything after -)
+ my $indexer_config = $use_indexer;
+ $indexer_config =~ s/^(\w+)-?.*$/$1/g if ($indexer_config);
+
+ my $lookup_hash;
+ my $depends = $parser->depends($database,$input_name);
+
+ if ($depends) {
+ $log->debug("$database/$input_name depends on: ", dump($depends)) if ($depends);
+ $log->logdie("parser->depends didn't return HASH") unless (ref($depends) eq 'HASH');
+
+ foreach my $db (keys %$depends) {
+ foreach my $i (keys %{$depends->{$db}}) {
+ foreach my $k (keys %{$depends->{$db}->{$i}}) {
+ my $t = time();
+ $log->debug("loading lookup $db/$i");
+ $lookup_hash->{$db}->{$i}->{$k} = $store->load_lookup(
+ database => $db,
+ input => $i,
+ key => $k,
+ );
+ $log->debug(sprintf("lookup $db/$i took %.2fs", time() - $t));
+ }
+ }
+ }
+
+ $log->debug("lookup_hash = ", sub { dump( $lookup_hash ) });
+ }
+
+
foreach my $pos ( 0 ... $input_db->size ) {
my $row = $input_db->fetch || next;
+ $total_rows++;
+
my $mfn = $row->{'000'}->[0];
if (! $mfn || $mfn !~ m#^\d+$#) {
push @{ $row->{'000'} }, $pos;
}
-
- my $ds;
- if ($n) {
- $ds = $n->data_structure($row);
- } else {
- $ds = WebPAC::Normalize::Set::data_structure(
- row => $row,
- rules => $rules,
- lookup => $lookup->lookup_hash,
- );
- $db->save_ds(
- id => $mfn,
- ds => $ds,
- prefix => $input->{name},
- ) if ($ds && !$stats);
+ if ($validate) {
+ if ( my $errors = $validate->validate_rec( $row, $input_db->dump_ascii ) ) {
+ $log->error( "MFN $mfn validation error:\n",
+ $validate->report_error( $errors )
+ );
+ }
+ next; # validation doesn't create any output
}
+ my $ds = WebPAC::Normalize::data_structure(
+ row => $row,
+ rules => $rules,
+ lookup => $lookup_hash,
+ config => create_ds_config( $db_config, $database, $input, $mfn ),
+ marc_encoding => 'utf-8',
+ load_row_coderef => sub {
+ my ($database,$input,$mfn) = @_;
+ return $store->load_row(
+ database => $database,
+ input => $input,
+ id => $mfn,
+ );
+ },
+ );
+
+ $log->debug("ds = ", sub { dump($ds) }) if ($ds);
+
+ $store->save_ds(
+ database => $database,
+ input => $input_name,
+ id => $mfn,
+ ds => $ds,
+ ) if ($ds && !$stats);
+
$indexer->add(
- id => $input->{name} . "/" . $mfn,
+ id => "${input_name}/${mfn}",
ds => $ds,
- type => $config->{$use_indexer}->{type},
- ) if ($indexer);
+ type => $config->get($indexer_config)->{type},
+ ) if ($indexer && $ds);
+
+ if ($marc) {
+ my $i = 0;
+
+ while (my $fields = WebPAC::Normalize::_get_marc_fields( fetch_next => 1 ) ) {
+ $marc->add(
+ id => $mfn . ( $i ? "/$i" : '' ),
+ fields => $fields,
+ leader => WebPAC::Normalize::_get_marc_leader(),
+ row => $row,
+ );
+ $i++;
+ }
+
+ $log->info("Created $i instances of MFN $mfn\n") if ($i > 1);
+ }
+ }
- $total_rows++;
+ if ($validate) {
+ my $errors = $validate->report;
+ if ($errors) {
+ $log->info("validation errors:\n$errors\n" );
+ print $report_fh "$errors\n" if ($report_fh);
+ }
+ }
+
+ if ($stats) {
+ my $s = $input_db->stats;
+ $log->info("statistics of fields usage:\n$s");
+ print $report_fh "Statistics of fields usage:\n$s" if ($report_fh);
}
- $log->info("statistics of fields usage:\n", $input_db->stats) if ($stats);
+ # close MARC file
+ $marc->finish if ($marc);
+
+ # close report
+ close($report_fh) if ($report_fh)
- };
+ }
eval { $indexer->finish } if ($indexer && $indexer->can('finish'));
my $dt = time() - $start_t;
- $log->info("$total_rows records indexed in " .
- sprintf("%.2f sec [%.2f rec/sec]",
+ $log->info("$total_rows records ", $indexer ? "indexed " : "",
+ sprintf("in %.2f sec [%.2f rec/sec]",
$dt, ($total_rows / $dt)
)
);
- #
- # add Hyper Estraier links to other databases
- #
- if (ref($db_config->{links}) eq 'ARRAY') {
- foreach my $link (@{ $db_config->{links} }) {
- if ($use_indexer eq 'hyperestraier') {
- $log->info("adding link $database -> $link->{to} [$link->{credit}]");
- $indexer->add_link(
- from => $database,
- to => $link->{to},
- credit => $link->{credit},
- );
- } else {
- $log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]");
- }
- }
+
+ # end forked process
+ if ($parallel) {
+ $log->info("parallel process $$ finished");
+ exit(0);
}
}
+if ($parallel) {
+ # wait all children to finish
+ sleep(1) while wait != -1;
+ $log->info("all parallel processes finished");
+}
+
+#
+# handle links or merge after indexing
+#
+
+if ($merge) {
+ print $estcmd_fh 'sudo /etc/init.d/hyperestraier start',$/;
+ close($estcmd_fh);
+ chmod 0700, $estcmd_path || $log->warn("can't chmod 0700 $estcmd_path: $!");
+ system $estcmd_path;
+} else {
+ foreach my $link (@links) {
+ $log->logdie("coderef in link ", Dumper($link), " is ", ref($link), " and not CODE") unless (ref($link) eq 'CODE');
+ $link->();
+ }
+}