use Data::Dump qw/dump/;
use Storable qw/dclone/;
+use Proc::Queue size => 1;
+use POSIX ":sys_wait_h"; # imports WNOHANG
+
=head1 NAME
run.pl - start WebPAC indexing
Force dump or input and marc record for debugging.
+=item --parallel 4
+
+Run databases in parallel (aproximatly same as number of processors in
+machine if you want to use full load)
+
=back
=cut
my $marc_lint = 1;
my $marc_dump = 0;
+my $parallel = 0;
+
GetOptions(
"limit=i" => \$limit,
"offset=i" => \$offset,
"marc-output=s" => \$marc_output,
"marc-lint!" => \$marc_lint,
"marc-dump!" => \$marc_dump,
+ "parallel=i" => \$parallel,
);
$config = LoadFile($config);
my @links;
my $indexer;
+if ($parallel) {
+ $log->info("Using $parallel processes for speedup");
+ Proc::Queue::size($parallel);
+}
+
while (my ($database, $db_config) = each %{ $config->{databases} }) {
my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter);
next if ($only_database && $database !~ m/$only_database/i);
+ if ($parallel) {
+ my $f=fork;
+ if(defined ($f) and $f==0) {
+ $log->info("Created processes $$ for speedup");
+ } else {
+ next;
+ }
+ }
+
if ($use_indexer) {
my $indexer_config = $config->{$use_indexer} || $log->logdie("can't find '$use_indexer' part in confguration");
$indexer_config->{database} = $database;
}
}
+ # end forked process
+ if ($parallel) {
+ $log->info("parallel process $$ finished");
+ exit(0);
+ }
+
+}
+
+if ($parallel) {
+ # wait all children to finish
+ sleep(1) while wait != -1;
+ $log->info("all parallel processes finished");
}
foreach my $link (@links) {
$log->info("adding link $link->{from} -> $link->{to} [$link->{credit}]");
$indexer->add_link( %{ $link } );
}
+