7 use Data::Dump qw(dump);
9 our $koha_dsn = 'dbi:oursql:dbname=koha';
10 our $koha_user = 'kohaadmin';
11 our $koha_passwd = '';
13 our $geo_dsn = 'dbi:Pg:dbname=koha';
17 our $opts = { RaiseError => 1 }; #, AutoCommit => 0, pg_enable_utf8 => 1, oursql_enable_utf8 => 1 };
19 require '/srv/koha-config.pl';
21 my $k_dbh = DBI->connect($koha_dsn, $koha_user, $koha_passwd, $opts) || die $DBI::errstr;
22 my $g_dbh = DBI->connect($geo_dsn, $geo_user, $geo_passwd, $opts) || die $DBI::errstr;
25 my ( $table, $pk ) = @_;
27 warn "# clean $table";
28 $g_dbh->do( "delete from $table" );
32 my $sql = "select * from $table order by $pk";
34 warn "# import $table";
38 my $sth = $k_dbh->prepare( "$sql limit 1000 offset $offset" );
39 print STDERR "$table [$offset] ";
42 $offset = 0 if ! $sth->rows; # exit
44 my @cols = @{ $sth->{NAME_lc} };
45 my $sql_insert = "insert into $table (" . join(',',@cols) . ") values (" . join(',', map { '?' } @cols ) . ")";
46 my $sth_insert = $g_dbh->prepare( $sql_insert );
48 while( my $row = $sth->fetchrow_arrayref ) {
49 eval { $sth_insert->execute( @$row ) };
51 print STDERR "$offset " if $offset % 100 == 0;
59 #fetch_table 'biblio' => 'biblionumber' ;
60 #fetch_table 'biblioitems' => 'biblioitemnumber' ;
64 warn "# drop geo_biblioitems";
65 eval { $g_dbh->do(qq{ drop table geo_biblioitems }) };
67 warn "# create geo_biblioitems";
69 create table geo_biblioitems (
70 biblioitemnumber integer not null references biblioitems(biblioitemnumber),
71 biblionumber integer not null references biblio(biblionumber),
75 my $sth_insert = $g_dbh->prepare(qq{
76 insert into geo_biblioitems values (?,?,?)
79 warn "# select bibiloitems";
80 my $sth = $g_dbh->prepare(qq{
82 biblioitemnumber, biblionumber, isbn, issn, marcxml
87 warn $sth->rows, " rows\n";
90 warn dump @_,$/ if $ENV{DEBUG};
95 while ( my $row = $sth->fetchrow_hashref ) {
96 my $xml = XMLin( delete $row->{marcxml}, ForceArray => [ 'datafield', 'subfield' ] );
98 warn_dump($row, $xml);
100 my @tag_260 = grep { $_->{tag} eq '260' } @{ $xml->{datafield} };
102 next unless @tag_260;
104 warn_dump @tag_260 if $ENV{DEBUG};
106 foreach my $sf ( @{ $tag_260[0]->{subfield} } ) {
107 $row->{ 'tag_260_' . $sf->{code} } = $sf->{content};
110 $row->{city} = $row->{tag_260_a};
111 $row->{city} =~ s/['"]+//g;
112 $row->{city} =~ s/\s*\[etc.*\].*$//;
113 $row->{city} =~ s/\s*[:]\s*$//;
114 $row->{city} =~ s/[\[\]]+//g;
118 warn "# $i ", $row->{city}, $/ if $i++ % 100 == 0;
120 $sth_insert->execute( $row->{biblioitemnumber}, $row->{biblionumber}, $row->{city} );
129 create table geo_city (
130 city_koha text not null primary key,
142 my $sth_insert = $g_dbh->prepare(qq{insert into geo_city values (?,?,?,?,?,?,?,?)});
144 my $sth = $g_dbh->prepare(qq{
145 select count(*),city from geo_biblioitems where city not in (select city_koha from geo_city) group by city order by count(city) desc
149 warn $sth->rows, " cities to geolocate";
151 use Geo::Coder::PlaceFinder;
153 my $geocoder = Geo::Coder::PlaceFinder->new( appid => 'eQWEGC58' );
155 while( my $row = $sth->fetchrow_hashref ) {
156 my $location = $geocoder->geocode(location => $row->{city});
157 warn dump($location);
158 $sth_insert->execute(
161 , $location->{country}
162 , $location->{county}
163 , $location->{latitude}
164 , $location->{longitude}
165 , $location->{radius}
170 eval { $g_dbh->do(qq{ drop table geo_count }) };
181 join geo_city on city_koha = geo_biblioitems.city
182 where length(geo_city.city) > 1
183 group by geo_city.city, country, lat, lng, city_koha
184 order by count(biblionumber) desc