5 use Data::Dump qw/dump/;
9 my $debug = @ARGV ? 1 : 0;
11 my $database = 'reblog';
13 my $dbi = "DBI:mysql:database=$database";
14 $dbi .= ";host=127.0.0.1;port=13306"; # XXX over ssh
16 my $dbh = DBI->connect($dbi,"","",{ RaiseError => 1 });
19 create temporary table published_items as
25 label = 'published' and
36 f.title as feed_title,
38 f.description as feed_description
40 join published_items p on i.id = p.item_id
41 join feeds f on i.feed_id = f.id
49 items_userdata.item_id,
53 join published_items p
54 on items_userdata.item_id = p.item_id and label='tags'
56 items_userdata.item_id > ?
57 order by items_userdata.item_id asc
60 my $last_row = 0; # FIXME
61 $last_row = 0 if $debug;
63 print "Fetching items from $dbi id > $last_row\n";
65 my $sth = $dbh->prepare($sql);
66 $sth->execute( $last_row );
68 my @columns = @{ $sth->{NAME} };
69 warn dump( @columns );
71 my @feed = grep { s/^feed_// } @columns;
73 print "found ",$sth->rows," items to process...\n";
75 my $sth_tags = $dbh->prepare($sql_tags);
76 $sth_tags->execute( $last_row );
77 print "found ",$sth_tags->rows, " tags found...\n";
81 my $row_tags = $sth_tags->fetchrow_hashref();
83 while (my $row = $sth->fetchrow_hashref() ) {
84 my $_id = $row->{_id} || "c$count";
89 $feed->{$_} = delete $row->{ "feed_$_" } foreach @feed;
92 while ( $row_tags && $row_tags->{item_id} < $row->{item_id} ) {
93 $row_tags = $sth_tags->fetchrow_hashref();
94 warn "## got tags: ",dump( $row_tags ) if $debug;
95 $row->{tags} = [ split(/\s+/, $row_tags->{tags} ) ];