2007-11-15 04:03:18 +01:00
// IndexControlRWIs_p.java
// -----------------------
// (C) 2004-2007 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
// first published 2004 on http://yacy.net
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// $LastChangedDate: 2007-11-14 01:15:28 +0000 (Mi, 14 Nov 2007) $
// $LastChangedRevision: 4216 $
// $LastChangedBy: orbiter $
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import java.io.File ;
import java.io.FileWriter ;
import java.io.IOException ;
import java.io.PrintWriter ;
import java.util.HashMap ;
import java.util.HashSet ;
import java.util.Iterator ;
import java.util.Set ;
import de.anomic.data.listManager ;
2008-08-25 20:11:47 +02:00
import de.anomic.http.httpRequestHeader ;
2009-01-30 23:08:08 +01:00
import de.anomic.kelondro.order.Bitfield ;
2009-03-02 12:04:13 +01:00
import de.anomic.kelondro.text.MetadataRowContainer ;
2009-03-02 11:00:32 +01:00
import de.anomic.kelondro.text.Reference ;
import de.anomic.kelondro.text.ReferenceContainer ;
2009-03-02 12:04:13 +01:00
import de.anomic.kelondro.text.ReferenceContainerCache ;
2009-03-02 11:00:32 +01:00
import de.anomic.kelondro.text.ReferenceRow ;
import de.anomic.kelondro.text.Word ;
2009-03-02 12:04:13 +01:00
import de.anomic.kelondro.text.AbstractBlacklist ;
2008-01-24 23:49:00 +01:00
import de.anomic.plasma.plasmaSearchAPI ;
2007-11-15 13:49:13 +01:00
import de.anomic.plasma.plasmaSearchEvent ;
2007-11-16 15:48:09 +01:00
import de.anomic.plasma.plasmaSearchRankingProcess ;
2007-11-15 04:03:18 +01:00
import de.anomic.plasma.plasmaSwitchboard ;
2009-03-16 17:24:53 +01:00
import de.anomic.plasma.plasmaWordIndex ;
2007-11-15 04:03:18 +01:00
import de.anomic.server.serverObjects ;
import de.anomic.server.serverSwitch ;
import de.anomic.yacy.yacyClient ;
import de.anomic.yacy.yacySeed ;
import de.anomic.yacy.yacyURL ;
public class IndexControlRWIs_p {
2008-08-25 20:11:47 +02:00
public static serverObjects respond ( final httpRequestHeader header , final serverObjects post , final serverSwitch < ? > env ) {
2007-11-15 04:03:18 +01:00
// return variable that accumulates replacements
2008-08-02 14:12:04 +02:00
final plasmaSwitchboard sb = ( plasmaSwitchboard ) env ;
final serverObjects prop = new serverObjects ( ) ;
2007-11-15 04:03:18 +01:00
prop . putHTML ( " keystring " , " " ) ;
prop . put ( " keyhash " , " " ) ;
prop . put ( " result " , " " ) ;
// switch off all optional forms/lists
prop . put ( " searchresult " , 0 ) ;
prop . put ( " keyhashsimilar " , 0 ) ;
prop . put ( " genUrlList " , 0 ) ;
2007-11-15 13:49:13 +01:00
// clean up all search events
plasmaSearchEvent . cleanupEvents ( true ) ;
2007-11-15 04:03:18 +01:00
if ( post ! = null ) {
// default values
2008-08-02 14:12:04 +02:00
final String keystring = post . get ( " keystring " , " " ) . trim ( ) ;
2007-11-15 04:03:18 +01:00
String keyhash = post . get ( " keyhash " , " " ) . trim ( ) ;
prop . putHTML ( " keystring " , keystring ) ;
2008-09-19 13:45:11 +02:00
prop . putHTML ( " keyhash " , keyhash ) ;
2007-11-15 04:03:18 +01:00
// read values from checkboxes
String [ ] urlx = post . getAll ( " urlhx.* " ) ;
2008-08-02 14:12:04 +02:00
final boolean delurl = post . containsKey ( " delurl " ) ;
final boolean delurlref = post . containsKey ( " delurlref " ) ;
2007-11-15 04:03:18 +01:00
if ( post . containsKey ( " keystringsearch " ) ) {
2009-03-02 00:58:14 +01:00
keyhash = Word . word2hash ( keystring ) ;
2007-11-15 04:03:18 +01:00
prop . put ( " keyhash " , keyhash ) ;
2008-03-05 22:46:55 +01:00
final plasmaSearchRankingProcess ranking = plasmaSearchAPI . genSearchresult ( prop , sb , keyhash , null ) ;
2007-11-22 00:14:57 +01:00
if ( ranking . filteredCount ( ) = = 0 ) {
2007-11-15 04:03:18 +01:00
prop . put ( " searchresult " , 1 ) ;
2008-09-19 13:45:11 +02:00
prop . putHTML ( " searchresult_word " , keystring ) ;
2007-11-15 04:03:18 +01:00
}
}
if ( post . containsKey ( " keyhashsearch " ) ) {
2009-03-02 00:58:14 +01:00
if ( keystring . length ( ) = = 0 | | ! Word . word2hash ( keystring ) . equals ( keyhash ) ) {
2007-11-15 04:03:18 +01:00
prop . put ( " keystring " , " <not possible to compute word from hash> " ) ;
}
2008-03-05 22:46:55 +01:00
final plasmaSearchRankingProcess ranking = plasmaSearchAPI . genSearchresult ( prop , sb , keyhash , null ) ;
2007-11-22 00:14:57 +01:00
if ( ranking . filteredCount ( ) = = 0 ) {
2007-11-15 04:03:18 +01:00
prop . put ( " searchresult " , 2 ) ;
2008-09-19 13:45:11 +02:00
prop . putHTML ( " searchresult_wordhash " , keyhash ) ;
2007-11-15 04:03:18 +01:00
}
}
2008-05-24 14:30:50 +02:00
// delete everything
2008-11-23 13:02:58 +01:00
if ( post . containsKey ( " deletecomplete " ) & & post . containsKey ( " confirmDelete " ) ) {
2008-05-24 14:30:50 +02:00
sb . webIndex . clear ( ) ;
2008-06-04 23:34:57 +02:00
sb . crawlQueues . clear ( ) ;
2008-12-17 23:53:06 +01:00
sb . crawlStacker . clear ( ) ;
2008-06-04 23:34:57 +02:00
try {
2008-07-05 02:35:20 +02:00
sb . robots . clear ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2008-06-04 23:34:57 +02:00
e . printStackTrace ( ) ;
}
2008-05-24 14:30:50 +02:00
post . remove ( " deletecomplete " ) ;
}
2007-11-15 04:03:18 +01:00
// delete word
2009-03-16 01:18:37 +01:00
if ( post . containsKey ( " keyhashdeleteall " ) ) try {
2007-11-15 04:03:18 +01:00
if ( delurl | | delurlref ) {
// generate an urlx array
2009-03-02 00:58:14 +01:00
ReferenceContainer index = null ;
2009-03-16 17:24:53 +01:00
index = sb . webIndex . index ( ) . get ( keyhash , null ) ;
2009-03-02 00:58:14 +01:00
final Iterator < ReferenceRow > en = index . entries ( ) ;
2007-11-15 04:03:18 +01:00
int i = 0 ;
urlx = new String [ index . size ( ) ] ;
while ( en . hasNext ( ) ) {
2008-01-22 20:03:47 +01:00
urlx [ i + + ] = en . next ( ) . urlHash ( ) ;
2007-11-15 04:03:18 +01:00
}
index = null ;
}
if ( delurlref ) {
for ( int i = 0 ; i < urlx . length ; i + + ) sb . removeAllUrlReferences ( urlx [ i ] , true ) ;
}
if ( delurl | | delurlref ) {
for ( int i = 0 ; i < urlx . length ; i + + ) {
sb . urlRemove ( urlx [ i ] ) ;
}
}
2009-03-16 17:24:53 +01:00
sb . webIndex . index ( ) . delete ( keyhash ) ;
2007-11-15 04:03:18 +01:00
post . remove ( " keyhashdeleteall " ) ;
post . put ( " urllist " , " generated " ) ;
2009-03-16 01:18:37 +01:00
} catch ( IOException e ) {
e . printStackTrace ( ) ;
2007-11-15 04:03:18 +01:00
}
// delete selected URLs
2009-03-16 01:18:37 +01:00
if ( post . containsKey ( " keyhashdelete " ) ) try {
2007-11-15 04:03:18 +01:00
if ( delurlref ) {
for ( int i = 0 ; i < urlx . length ; i + + ) sb . removeAllUrlReferences ( urlx [ i ] , true ) ;
}
if ( delurl | | delurlref ) {
for ( int i = 0 ; i < urlx . length ; i + + ) {
sb . urlRemove ( urlx [ i ] ) ;
}
}
2008-08-02 14:12:04 +02:00
final Set < String > urlHashes = new HashSet < String > ( ) ;
2007-11-15 04:03:18 +01:00
for ( int i = 0 ; i < urlx . length ; i + + ) urlHashes . add ( urlx [ i ] ) ;
2009-03-16 17:24:53 +01:00
sb . webIndex . index ( ) . remove ( keyhash , urlHashes ) ;
2007-11-15 04:03:18 +01:00
// this shall lead to a presentation of the list; so handle that the remaining program
// thinks that it was called for a list presentation
post . remove ( " keyhashdelete " ) ;
post . put ( " urllist " , " generated " ) ;
2009-03-16 01:18:37 +01:00
} catch ( IOException e ) {
e . printStackTrace ( ) ;
2007-11-15 04:03:18 +01:00
}
if ( post . containsKey ( " urllist " ) ) {
2009-03-02 00:58:14 +01:00
if ( keystring . length ( ) = = 0 | | ! Word . word2hash ( keystring ) . equals ( keyhash ) ) {
2007-11-15 04:03:18 +01:00
prop . put ( " keystring " , " <not possible to compute word from hash> " ) ;
}
2009-01-30 16:33:00 +01:00
final Bitfield flags = plasmaSearchAPI . compileFlags ( post ) ;
2008-08-02 14:12:04 +02:00
final int count = ( post . get ( " lines " , " all " ) . equals ( " all " ) ) ? - 1 : post . getInt ( " lines " , - 1 ) ;
2008-03-05 22:46:55 +01:00
final plasmaSearchRankingProcess ranking = plasmaSearchAPI . genSearchresult ( prop , sb , keyhash , flags ) ;
plasmaSearchAPI . genURLList ( prop , keyhash , keystring , ranking , flags , count ) ;
2007-11-15 04:03:18 +01:00
}
// transfer to other peer
2009-03-16 01:18:37 +01:00
if ( post . containsKey ( " keyhashtransfer " ) ) try {
2009-03-02 00:58:14 +01:00
if ( keystring . length ( ) = = 0 | | ! Word . word2hash ( keystring ) . equals ( keyhash ) ) {
2007-11-15 04:03:18 +01:00
prop . put ( " keystring " , " <not possible to compute word from hash> " ) ;
}
// find host & peer
String host = post . get ( " host " , " " ) ; // get host from input field
yacySeed seed = null ;
if ( host . length ( ) ! = 0 ) {
if ( host . length ( ) = = 12 ) {
// the host string is a peer hash
2009-03-13 11:34:51 +01:00
seed = sb . webIndex . peers ( ) . getConnected ( host ) ;
2007-11-15 04:03:18 +01:00
} else {
// the host string can be a host name
2009-03-13 11:34:51 +01:00
seed = sb . webIndex . peers ( ) . lookupByName ( host ) ;
2007-11-15 04:03:18 +01:00
}
} else {
host = post . get ( " hostHash " , " " ) ; // if input field is empty, get from select box
2009-03-13 11:34:51 +01:00
seed = sb . webIndex . peers ( ) . getConnected ( host ) ;
2007-11-15 04:03:18 +01:00
}
// prepare index
2009-03-02 00:58:14 +01:00
ReferenceContainer index ;
2008-08-02 14:12:04 +02:00
final long starttime = System . currentTimeMillis ( ) ;
2009-03-16 17:24:53 +01:00
index = sb . webIndex . index ( ) . get ( keyhash , null ) ;
2007-11-15 04:03:18 +01:00
// built urlCache
2009-03-02 00:58:14 +01:00
final Iterator < ReferenceRow > urlIter = index . entries ( ) ;
2009-03-02 12:04:13 +01:00
final HashMap < String , MetadataRowContainer > knownURLs = new HashMap < String , MetadataRowContainer > ( ) ;
2008-08-02 14:12:04 +02:00
final HashSet < String > unknownURLEntries = new HashSet < String > ( ) ;
2009-03-02 00:58:14 +01:00
Reference iEntry ;
2009-03-02 12:04:13 +01:00
MetadataRowContainer lurl ;
2007-11-15 04:03:18 +01:00
while ( urlIter . hasNext ( ) ) {
2008-01-22 20:03:47 +01:00
iEntry = urlIter . next ( ) ;
2009-03-13 11:34:51 +01:00
lurl = sb . webIndex . metadata ( ) . load ( iEntry . urlHash ( ) , null , 0 ) ;
2007-11-15 04:03:18 +01:00
if ( lurl = = null ) {
unknownURLEntries . add ( iEntry . urlHash ( ) ) ;
urlIter . remove ( ) ;
} else {
knownURLs . put ( iEntry . urlHash ( ) , lurl ) ;
}
}
replaced old DHT transmission method with new method. Many things have changed! some of them:
- after a index selection is made, the index is splitted into its vertical components
- from differrent index selctions the splitted components can be accumulated before they are placed into the transmission queue
- each splitted chunk gets its own transmission thread
- multiple transmission threads are started concurrently
- the process can be monitored with the blocking queue servlet
To implement that, a new package de.anomic.yacy.dht was created. Some old files have been removed.
The new index distribution model using a vertical DHT was implemented. An abstraction of this model
is implemented in the new dht package as interface. The freeworld network has now a configuration
of two vertial partitions; sixteen partitions are planned and will be configured if the process is bug-free.
This modification has three main targets:
- enhance the DHT transmission speed
- with a vertical DHT, a search will speed up. With two partitions, two times. With sixteen, sixteen times.
- the vertical DHT will apply a semi-dht for URLs, and peers will receive a fraction of the overall URLs they received before.
with two partitions, the fractions will be halve. With sixteen partitions, a 1/16 of the previous number of URLs.
BE CAREFULL, THIS IS A MAJOR CODE CHANGE, POSSIBLY FULL OF BUGS AND HARMFUL THINGS.
git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5586 6c8d7289-2bf4-0310-a012-ef5d649a1542
2009-02-10 01:06:59 +01:00
// make an indexContainerCache
2009-03-16 17:24:53 +01:00
ReferenceContainerCache icc = new ReferenceContainerCache ( index . rowdef , plasmaWordIndex . wordOrder ) ;
icc . add ( index ) ;
replaced old DHT transmission method with new method. Many things have changed! some of them:
- after a index selection is made, the index is splitted into its vertical components
- from differrent index selctions the splitted components can be accumulated before they are placed into the transmission queue
- each splitted chunk gets its own transmission thread
- multiple transmission threads are started concurrently
- the process can be monitored with the blocking queue servlet
To implement that, a new package de.anomic.yacy.dht was created. Some old files have been removed.
The new index distribution model using a vertical DHT was implemented. An abstraction of this model
is implemented in the new dht package as interface. The freeworld network has now a configuration
of two vertial partitions; sixteen partitions are planned and will be configured if the process is bug-free.
This modification has three main targets:
- enhance the DHT transmission speed
- with a vertical DHT, a search will speed up. With two partitions, two times. With sixteen, sixteen times.
- the vertical DHT will apply a semi-dht for URLs, and peers will receive a fraction of the overall URLs they received before.
with two partitions, the fractions will be halve. With sixteen partitions, a 1/16 of the previous number of URLs.
BE CAREFULL, THIS IS A MAJOR CODE CHANGE, POSSIBLY FULL OF BUGS AND HARMFUL THINGS.
git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5586 6c8d7289-2bf4-0310-a012-ef5d649a1542
2009-02-10 01:06:59 +01:00
2007-11-15 04:03:18 +01:00
// transport to other peer
2008-08-02 14:12:04 +02:00
final String gzipBody = sb . getConfig ( " indexControl.gzipBody " , " false " ) ;
final int timeout = ( int ) sb . getConfigLong ( " indexControl.timeout " , 60000 ) ;
2009-02-16 22:28:48 +01:00
final String error = yacyClient . transferIndex (
2007-11-15 04:03:18 +01:00
seed ,
replaced old DHT transmission method with new method. Many things have changed! some of them:
- after a index selection is made, the index is splitted into its vertical components
- from differrent index selctions the splitted components can be accumulated before they are placed into the transmission queue
- each splitted chunk gets its own transmission thread
- multiple transmission threads are started concurrently
- the process can be monitored with the blocking queue servlet
To implement that, a new package de.anomic.yacy.dht was created. Some old files have been removed.
The new index distribution model using a vertical DHT was implemented. An abstraction of this model
is implemented in the new dht package as interface. The freeworld network has now a configuration
of two vertial partitions; sixteen partitions are planned and will be configured if the process is bug-free.
This modification has three main targets:
- enhance the DHT transmission speed
- with a vertical DHT, a search will speed up. With two partitions, two times. With sixteen, sixteen times.
- the vertical DHT will apply a semi-dht for URLs, and peers will receive a fraction of the overall URLs they received before.
with two partitions, the fractions will be halve. With sixteen partitions, a 1/16 of the previous number of URLs.
BE CAREFULL, THIS IS A MAJOR CODE CHANGE, POSSIBLY FULL OF BUGS AND HARMFUL THINGS.
git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5586 6c8d7289-2bf4-0310-a012-ef5d649a1542
2009-02-10 01:06:59 +01:00
icc ,
2007-11-15 04:03:18 +01:00
knownURLs ,
" true " . equalsIgnoreCase ( gzipBody ) ,
timeout ) ;
2009-02-16 22:28:48 +01:00
prop . put ( " result " , ( error = = null ) ? ( " Successfully transferred " + knownURLs . size ( ) + " words in " + ( ( System . currentTimeMillis ( ) - starttime ) / 1000 ) + " seconds, " + unknownURLEntries + " URL not found " ) : " error: " + error ) ;
2007-11-15 04:03:18 +01:00
index = null ;
2009-03-16 01:18:37 +01:00
} catch ( IOException e ) {
e . printStackTrace ( ) ;
2007-11-15 04:03:18 +01:00
}
// generate list
2009-03-16 17:24:53 +01:00
if ( post . containsKey ( " keyhashsimilar " ) ) try {
final Iterator < ReferenceContainer > containerIt = sb . webIndex . index ( ) . references ( keyhash , true , 256 , false ) . iterator ( ) ;
2009-03-02 00:58:14 +01:00
ReferenceContainer container ;
2007-11-15 04:03:18 +01:00
int i = 0 ;
int rows = 0 , cols = 0 ;
prop . put ( " keyhashsimilar " , " 1 " ) ;
while ( containerIt . hasNext ( ) & & i < 256 ) {
2008-06-06 18:01:27 +02:00
container = containerIt . next ( ) ;
2007-11-15 04:03:18 +01:00
prop . put ( " keyhashsimilar_rows_ " + rows + " _cols_ " + cols + " _wordHash " , container . getWordHash ( ) ) ;
cols + + ;
if ( cols = = 8 ) {
prop . put ( " keyhashsimilar_rows_ " + rows + " _cols " , cols ) ;
cols = 0 ;
rows + + ;
}
i + + ;
}
prop . put ( " keyhashsimilar_rows_ " + rows + " _cols " , cols ) ;
prop . put ( " keyhashsimilar_rows " , rows + 1 ) ;
prop . put ( " result " , " " ) ;
2009-03-16 17:24:53 +01:00
} catch ( IOException e ) {
e . printStackTrace ( ) ;
2007-11-15 04:03:18 +01:00
}
if ( post . containsKey ( " blacklist " ) ) {
2008-08-02 14:12:04 +02:00
final String blacklist = post . get ( " blacklist " , " " ) ;
final Set < String > urlHashes = new HashSet < String > ( ) ;
2007-11-15 04:03:18 +01:00
if ( post . containsKey ( " blacklisturls " ) ) {
PrintWriter pw ;
try {
2008-08-02 14:12:04 +02:00
final String [ ] supportedBlacklistTypes = env . getConfig ( " BlackLists.types " , " " ) . split ( " , " ) ;
2007-11-15 04:03:18 +01:00
pw = new PrintWriter ( new FileWriter ( new File ( listManager . listsPath , blacklist ) , true ) ) ;
yacyURL url ;
for ( int i = 0 ; i < urlx . length ; i + + ) {
urlHashes . add ( urlx [ i ] ) ;
2009-03-13 11:34:51 +01:00
final MetadataRowContainer e = sb . webIndex . metadata ( ) . load ( urlx [ i ] , null , 0 ) ;
sb . webIndex . metadata ( ) . remove ( urlx [ i ] ) ;
2007-11-15 04:03:18 +01:00
if ( e ! = null ) {
2009-03-02 12:04:13 +01:00
url = e . metadata ( ) . url ( ) ;
2007-11-15 04:03:18 +01:00
pw . println ( url . getHost ( ) + " / " + url . getFile ( ) ) ;
for ( int blTypes = 0 ; blTypes < supportedBlacklistTypes . length ; blTypes + + ) {
if ( listManager . listSetContains ( supportedBlacklistTypes [ blTypes ] + " .BlackLists " , blacklist ) ) {
plasmaSwitchboard . urlBlacklist . add (
supportedBlacklistTypes [ blTypes ] ,
url . getHost ( ) ,
url . getFile ( ) ) ;
}
}
}
}
pw . close ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2007-11-15 04:03:18 +01:00
}
}
if ( post . containsKey ( " blacklistdomains " ) ) {
PrintWriter pw ;
try {
2009-03-02 12:04:13 +01:00
final String [ ] supportedBlacklistTypes = AbstractBlacklist . BLACKLIST_TYPES_STRING . split ( " , " ) ;
2007-11-15 04:03:18 +01:00
pw = new PrintWriter ( new FileWriter ( new File ( listManager . listsPath , blacklist ) , true ) ) ;
yacyURL url ;
for ( int i = 0 ; i < urlx . length ; i + + ) {
urlHashes . add ( urlx [ i ] ) ;
2009-03-13 11:34:51 +01:00
final MetadataRowContainer e = sb . webIndex . metadata ( ) . load ( urlx [ i ] , null , 0 ) ;
sb . webIndex . metadata ( ) . remove ( urlx [ i ] ) ;
2007-11-15 04:03:18 +01:00
if ( e ! = null ) {
2009-03-02 12:04:13 +01:00
url = e . metadata ( ) . url ( ) ;
2007-11-15 04:03:18 +01:00
pw . println ( url . getHost ( ) + " /.* " ) ;
for ( int blTypes = 0 ; blTypes < supportedBlacklistTypes . length ; blTypes + + ) {
if ( listManager . listSetContains ( supportedBlacklistTypes [ blTypes ] + " .BlackLists " , blacklist ) ) {
plasmaSwitchboard . urlBlacklist . add (
supportedBlacklistTypes [ blTypes ] ,
url . getHost ( ) , " .* " ) ;
}
}
}
}
pw . close ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2007-11-15 04:03:18 +01:00
}
}
2009-03-16 01:18:37 +01:00
try {
2009-03-16 17:24:53 +01:00
sb . webIndex . index ( ) . remove ( keyhash , urlHashes ) ;
2009-03-16 01:18:37 +01:00
} catch ( IOException e ) {
e . printStackTrace ( ) ;
}
2007-11-15 04:03:18 +01:00
}
2008-11-06 11:07:53 +01:00
if ( prop . getInt ( " searchresult " , 0 ) = = 3 ) plasmaSearchAPI . listHosts ( prop , keyhash , sb ) ;
2007-11-15 04:03:18 +01:00
}
// insert constants
2009-03-13 15:56:25 +01:00
prop . putNum ( " wcount " , sb . webIndex . index ( ) . size ( ) ) ;
2007-11-15 04:03:18 +01:00
// return rewrite properties
return prop ;
}
}