5 use Data::Dump qw/dump/;
10 my $debug = $ENV{DEBUG} || 0;
12 my $database = 'reblog';
14 my ( $offset, $limit ) = @ARGV;
16 $limit ||= $ENV{LIMIT} || 1000;
18 my $dbi = "DBI:mysql:database=$database";
19 $dbi .= ";host=127.0.0.1;port=13306"; # XXX over ssh
21 my $dbh = DBI->connect($dbi,"","",{ RaiseError => 1 });
23 warn "# published_items";
26 create temporary table published_items as
32 label = 'published' and
52 f.title as feed_title,
54 f.description as feed_description
56 join published_items p on i.id = p.item_id
57 join feeds f on i.feed_id = f.id
66 items_userdata.item_id,
70 join published_items p
71 on items_userdata.item_id = p.item_id and label='tags'
73 items_userdata.item_id > ?
74 order by items_userdata.item_id asc
77 my $last_row = 0; # FIXME
78 $last_row = 0 if $debug;
80 warn "Fetching items from $dbi id > $last_row [$offset $limit]\n";
82 my $sth = $dbh->prepare($sql);
83 $sth->execute( $last_row );
85 my @columns = @{ $sth->{NAME} };
86 warn "# columns ",dump( @columns );
88 my @feed = grep { s/^feed_// } @columns;
90 warn "found ",$sth->rows," items to process...\n";
92 my $sth_tags = $dbh->prepare($sql_tags);
93 $sth_tags->execute( $last_row );
94 warn "found ",$sth_tags->rows, " tags found...\n";
98 my $row_tags = $sth_tags->fetchrow_hashref();
100 while (my $row = $sth->fetchrow_hashref() ) {
101 my $_id = $row->{_id} || "c$count";
106 $feed->{$_} = delete $row->{ "feed_$_" } foreach @feed;
107 $row->{feed} = $feed;
109 while ( $row_tags && $row_tags->{item_id} < $row->{item_id} ) {
110 if ( $row_tags = $sth_tags->fetchrow_hashref() ) {
111 warn "## got tags: ",dump( $row_tags ) if $debug;
112 $row->{tags} = [ split(/\s+/, $row_tags->{tags} ) ];
120 print encode_json($row),$/;