X-Git-Url: http://git.rot13.org/?p=BackupPC.git;a=blobdiff_plain;f=lib%2FBackupPC%2FSearch.pm;h=7f75add02724ef493c4ae670a30fd816ac255ece;hp=38837c08ee7d1ee07e00d163598ffa0fa5567c9d;hb=ef9301b407599b033433abaa9d43ad6294bb663b;hpb=4271e4fc3abee4aaa0326cf76d3dc9ad36a3ffe4 diff --git a/lib/BackupPC/Search.pm b/lib/BackupPC/Search.pm index 38837c0..7f75add 100644 --- a/lib/BackupPC/Search.pm +++ b/lib/BackupPC/Search.pm @@ -227,24 +227,25 @@ sub getFiles($) { my $order = getSort('search', 'sql', $param->{'sort'}); + # XXX LIMIT $on_page doesn't work since we don't get correct number of results my $sql_order = qq{ ORDER BY $order - LIMIT $on_page OFFSET ? }; my $sql_results = qq{ select $sql_cols $sql_from $sql_where $sql_order }; my $sth = $dbh->prepare($sql_results); - $sth->execute( $offset ); + my $rows = $sth->execute( $offset ); my @ret; - + while (my $row = $sth->fetchrow_hashref()) { push @ret, $row; + last if $#ret + 1 >= $on_page; } $sth->finish(); - return ($sth->rows, \@ret); + return ($rows, \@ret); } sub getFilesHyperEstraier($) { @@ -353,6 +354,45 @@ sub getGzipSize($$) ); } +sub host_backup_nums { + my $host = shift; + my $sth = get_dbh->prepare(qq{ + select + hosts.name as host, -- FIXME for debug + backups.num as num, + inc_size, + size, + inc_deleted + from backups + join hosts on hosts.id = hostid + where hosts.name = ? + }); + $sth->execute($host); + # and inc_size < 0 and size > 0 and not inc_deleted + + my $all_backup_numbers; + # pre-seed with on disk backups + $all_backup_numbers->{ $_->{num} }++ foreach $bpc->BackupInfoRead($host); + + while( my $row = $sth->fetchrow_hashref ) { +warn "# row ",dump $row; + $all_backup_numbers->{ $row->{num} } = + $row->{inc_deleted} ? 0 : + $row->{size} == 0 ? 0 : + $row->{inc_size} > 0 ? 0 : + $row->{size} > 0 ? 1 : + 0; + } + +warn "# host $host all_backup_numbers = ",dump($all_backup_numbers); + my @backup_nums = + sort { $a <=> $b } + grep { $all_backup_numbers->{$_} } + keys %$all_backup_numbers; + + return @backup_nums; +} + sub getBackupsNotBurned($) {