4 $ENV{DBI_AUTOPROXY}='dbi:Gofer:transport=stream;url=ssh:dpavlin@koha.ffzg.hr';
10 use Data::Dump qw/dump/;
14 my $debug = @ARGV ? 1 : 0;
16 our ( $dbi, $user, $password ) = ( "DBI:mysql:database=test" );
17 our ( $database, $collection ) = ( 'test', 'test' );
18 our ( $table, $pk ) = ( 'biblio' => 'biblionumber' );
19 our ( $table2, $fk ) = ( 'biblioitems' => 'biblionumber' );
22 my $join_limit = 10000;
26 warn "# $dbi $user -> $database $collection $table.$pk<->$table2.$fk\n";
28 my $conn = MongoDB::Connection->new;
29 my $db = $conn->get_database( $database );
30 my $coll = $db->get_collection( $collection );
31 my $dbh = DBI->connect($dbi,$user,$password, {
33 # mysql_enable_utf8 => 1,
38 # db.items.find().sort({_id:-1}).limit(1);
39 my $last = $coll->query()->sort({ '_id' => -1 })->limit(1)->next;
41 my $last_id = $last->{_id} || 0;
43 print "import $table.$pk > $last_id from $dbi\n";
45 my $sth = $dbh->prepare(qq{
55 $sth->execute( $last_id );
56 warn "# $table columns ",dump( $sth->{NAME} );
57 print "import ",$sth->rows," from $table\n";
64 $sth_join = $dbh->prepare(qq{
73 print STDERR "$join_offset";
74 $sth_join->execute( $last_id );
75 warn "# $table2 columns ",dump( $sth_join->{NAME} );
76 print "join ",$sth_join->rows," from $table2 offset $join_offset limit $join_limit\n";
77 $join_more = $sth_join->rows == $join_limit ? 1 : 0;
83 $row_join = $sth_join->fetchrow_hashref();
84 if ( ! $row_join && $join_more ) {
85 $join_offset += $join_limit;
87 $row_join = $sth_join->fetchrow_hashref();
94 map { $row->{$_} * 1 } grep { defined $row->{$_} && $row->{$_} =~ /^\d+$/ } keys %$row;
101 while (my $row = $sth->fetchrow_hashref() ) {
103 while ( $row_join && $row_join->{$fk} < $row->{$pk} ) {
107 while ( $row_join && $row_join->{$fk} == $row->{$pk} ) {
108 push @{ $row->{ $table2 } }, guess_types($row_join);
113 $coll->insert( guess_types($row) );