2005-10-05 15:14:18 +02:00
// search.java
2005-04-07 21:19:42 +02:00
// -----------------------
// part of the AnomicHTTPD caching proxy
// (C) by Michael Peter Christen; mc@anomic.de
// first published on http://www.anomic.de
// Frankfurt, Germany, 2004
2005-10-05 15:14:18 +02:00
//
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
2005-04-07 21:19:42 +02:00
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// Using this software in any meaning (reading, learning, copying, compiling,
// running) means that you agree that the Author(s) is (are) not responsible
// for cost, loss of data or any harm that may be caused directly or indirectly
// by usage of this softare or this documentation. The usage of this software
// is on your own risk. The installation and usage (starting/running) of this
// software may allow other people or application to access your computer and
// any attached devices and is highly dependent on the configuration of the
// software which must be done by the user of the software; the author(s) is
// (are) also not responsible for proper configuration and usage of the
// software, even if provoked by documentation provided together with
// the software.
//
// Any changes to this file according to the GPL as documented in the file
// gpl.txt aside this file in the shipment you received can be done to the
// lines that follows this copyright notice here, but changes must not be
// done inside the copyright notive above. A re-distribution must contain
// the intact and unchanged copyright notice.
// Contributions and changes to the program code must be marked as such.
2005-05-12 19:50:45 +02:00
// You must compile this file with
2005-04-07 21:19:42 +02:00
// javac -classpath .:../../Classes search.java
// if the shell's current path is htroot/yacy
2007-01-15 02:50:57 +01:00
import java.util.HashMap ;
2006-09-06 19:51:28 +02:00
import java.util.Iterator ;
2006-09-11 00:36:47 +02:00
import java.util.Map ;
2006-09-06 19:51:28 +02:00
import java.util.Set ;
2007-01-15 02:50:57 +01:00
import java.util.TreeSet ;
2006-09-06 19:51:28 +02:00
2005-05-05 07:36:42 +02:00
import de.anomic.http.httpHeader ;
2006-11-23 03:16:30 +01:00
import de.anomic.kelondro.kelondroBitfield ;
2006-09-06 19:51:28 +02:00
import de.anomic.index.indexContainer ;
2007-01-19 02:45:29 +01:00
import de.anomic.net.natLib ;
2006-11-10 02:13:33 +01:00
import de.anomic.plasma.plasmaURL ;
2006-11-08 17:17:47 +01:00
import de.anomic.index.indexURLEntry ;
2006-11-28 16:00:15 +01:00
import de.anomic.plasma.plasmaCondenser ;
2006-01-20 16:14:21 +01:00
import de.anomic.plasma.plasmaSearchEvent ;
2006-09-30 00:27:20 +02:00
import de.anomic.plasma.plasmaSearchQuery ;
2006-02-05 00:51:00 +01:00
import de.anomic.plasma.plasmaSearchRankingProfile ;
2007-01-19 01:38:03 +01:00
import de.anomic.plasma.plasmaSearchPostOrder ;
2006-02-05 00:51:00 +01:00
import de.anomic.plasma.plasmaSearchTimingProfile ;
2006-01-20 16:14:21 +01:00
import de.anomic.plasma.plasmaSnippetCache ;
2005-05-05 07:36:42 +02:00
import de.anomic.plasma.plasmaSwitchboard ;
2006-01-20 16:14:21 +01:00
import de.anomic.server.serverCore ;
2005-05-05 07:36:42 +02:00
import de.anomic.server.serverObjects ;
import de.anomic.server.serverSwitch ;
import de.anomic.yacy.yacyCore ;
2006-09-12 02:42:42 +02:00
import de.anomic.yacy.yacyDHTAction ;
2005-05-05 07:36:42 +02:00
import de.anomic.yacy.yacySeed ;
2006-12-20 16:44:29 +01:00
import de.anomic.tools.crypt ;
2005-04-07 21:19:42 +02:00
2005-10-05 12:45:33 +02:00
public final class search {
2005-04-07 21:19:42 +02:00
2005-10-05 15:14:18 +02:00
public static serverObjects respond ( httpHeader header , serverObjects post , serverSwitch ss ) {
2005-10-07 17:04:03 +02:00
if ( post = = null | | ss = = null ) { return null ; }
2005-04-07 21:19:42 +02:00
2005-10-05 15:14:18 +02:00
// return variable that accumulates replacements
final plasmaSwitchboard sb = ( plasmaSwitchboard ) ss ;
2006-01-20 16:14:21 +01:00
2005-10-05 15:14:18 +02:00
//System.out.println("yacy: search received request = " + post.toString());
2005-04-07 21:19:42 +02:00
2005-12-07 00:51:29 +01:00
final String oseed = post . get ( " myseed " , " " ) ; // complete seed of the requesting peer
// final String youare = post.get("youare", ""); // seed hash of the target peer, used for testing network stability
final String key = post . get ( " key " , " " ) ; // transmission key for response
2006-09-11 00:36:47 +02:00
final String query = post . get ( " query " , " " ) ; // a string of word hashes that shall be searched and combined
2006-10-10 14:22:16 +02:00
String urls = post . get ( " urls " , " " ) ; // a string of url hashes that are preselected for the search: no other may be returned
String abstracts = post . get ( " abstracts " , " " ) ; // a string of word hashes for abstracts that shall be generated, or 'auto' (for maxcount-word), or '' (for none)
2005-12-07 00:51:29 +01:00
// final String fwdep = post.get("fwdep", ""); // forward depth. if "0" then peer may NOT ask another peer for more results
// final String fwden = post.get("fwden", ""); // forward deny, a list of seed hashes. They may NOT be target of forward hopping
final long duetime = post . getLong ( " duetime " , 3000 ) ;
final int count = post . getInt ( " count " , 10 ) ; // maximum number of wanted results
2006-01-30 01:42:38 +01:00
final int maxdist = post . getInt ( " maxdist " , Integer . MAX_VALUE ) ;
2006-04-14 01:19:36 +02:00
final String prefer = post . get ( " prefer " , " " ) ;
2006-12-01 17:21:17 +01:00
final String contentdom = post . get ( " contentdom " , " text " ) ;
2006-04-20 12:15:00 +02:00
final String filter = post . get ( " filter " , " .* " ) ;
2007-02-01 14:27:23 +01:00
final int partitions = post . getInt ( " partitions " , 30 ) ;
2006-12-20 16:44:29 +01:00
String profile = post . get ( " profile " , " " ) ; // remote profile hand-over
if ( profile . length ( ) > 0 ) profile = crypt . simpleDecode ( profile , null ) ;
2006-10-02 03:15:02 +02:00
final boolean includesnippet = post . get ( " includesnippet " , " false " ) . equals ( " true " ) ;
2006-11-23 16:47:19 +01:00
final kelondroBitfield constraint = new kelondroBitfield ( 4 , post . get ( " constraint " , " ______ " ) ) ;
2005-12-05 10:13:13 +01:00
// final boolean global = ((String) post.get("resource", "global")).equals("global"); // if true, then result may consist of answers from other peers
2005-10-17 17:46:12 +02:00
// Date remoteTime = yacyCore.parseUniversalDate((String) post.get(yacySeed.MYTIME)); // read remote time
2006-01-20 16:14:21 +01:00
2006-10-10 14:22:16 +02:00
// test:
// http://localhost:8080/yacy/search.html?query=4galTpdpDM5Q (search for linux)
// http://localhost:8080/yacy/search.html?query=gh8DKIhGKXws (search for book)
// http://localhost:8080/yacy/search.html?query=4galTpdpDM5Qgh8DKIhGKXws&abstracts=auto (search for linux and book, generate abstract automatically)
// http://localhost:8080/yacy/search.html?query=&abstracts=4galTpdpDM5Q (only abstracts for linux)
2006-01-20 16:14:21 +01:00
// tell all threads to do nothing for a specific time
sb . intermissionAllThreads ( 2 * duetime ) ;
2006-10-10 14:22:16 +02:00
Set abstractSet = ( ( abstracts . length ( ) = = 0 ) | | ( abstracts . equals ( " auto " ) ) ) ? null : plasmaSearchQuery . hashes2Set ( abstracts ) ;
2006-01-20 16:14:21 +01:00
// store accessing peer
2005-04-07 21:19:42 +02:00
if ( yacyCore . seedDB = = null ) {
2005-08-30 23:32:59 +02:00
yacyCore . log . logSevere ( " yacy.search: seed cache not initialized " ) ;
2005-04-07 21:19:42 +02:00
} else {
2006-04-06 18:28:28 +02:00
yacyCore . peerActions . peerArrival ( yacySeed . genRemoteSeed ( oseed , key , true ) , true ) ;
2005-04-07 21:19:42 +02:00
}
2006-01-20 16:14:21 +01:00
// prepare search
2006-09-13 19:13:28 +02:00
final Set keyhashes = plasmaSearchQuery . hashes2Set ( query ) ;
2005-10-05 15:14:18 +02:00
final long timestamp = System . currentTimeMillis ( ) ;
2006-01-20 16:14:21 +01:00
serverObjects prop = new serverObjects ( ) ;
2006-09-11 00:36:47 +02:00
2006-10-10 14:22:16 +02:00
// prepare an abstract result
StringBuffer indexabstract = new StringBuffer ( ) ;
2006-09-14 00:19:34 +02:00
int joincount = 0 ;
2007-01-19 01:38:03 +01:00
plasmaSearchPostOrder acc = null ;
2006-10-10 14:22:16 +02:00
plasmaSearchQuery squery = null ;
2007-01-15 02:50:57 +01:00
plasmaSearchEvent theSearch = null ;
2006-10-10 14:22:16 +02:00
if ( ( query . length ( ) = = 0 ) & & ( abstractSet ! = null ) ) {
// this is _not_ a normal search, only a request for index abstracts
2006-12-01 17:21:17 +01:00
squery = new plasmaSearchQuery ( abstractSet , maxdist , prefer , plasmaSearchQuery . contentdomParser ( contentdom ) , count , duetime , filter , plasmaSearchQuery . catchall_constraint ) ;
2006-10-10 14:22:16 +02:00
squery . domType = plasmaSearchQuery . SEARCHDOM_LOCAL ;
2007-01-15 17:03:00 +01:00
yacyCore . log . logInfo ( " INIT HASH SEARCH (abstracts only): " + plasmaSearchQuery . anonymizedQueryHashes ( squery . queryHashes ) + " - " + squery . wantedResults + " links " ) ;
2006-10-10 14:22:16 +02:00
// prepare a search profile
2006-12-20 16:44:29 +01:00
plasmaSearchRankingProfile rankingProfile = ( profile . length ( ) = = 0 ) ? new plasmaSearchRankingProfile ( contentdom ) : new plasmaSearchRankingProfile ( " " , profile ) ;
2006-10-10 14:22:16 +02:00
plasmaSearchTimingProfile localTiming = new plasmaSearchTimingProfile ( squery . maximumTime , squery . wantedResults ) ;
plasmaSearchTimingProfile remoteTiming = null ;
2007-01-15 02:50:57 +01:00
theSearch = new plasmaSearchEvent ( squery , rankingProfile , localTiming , remoteTiming , true , yacyCore . log , sb . wordIndex , sb . wordIndex . loadedURL , sb . snippetCache ) ;
2006-10-10 14:22:16 +02:00
Map containers = theSearch . localSearchContainers ( plasmaSearchQuery . hashes2Set ( urls ) ) ;
if ( containers ! = null ) {
Iterator ci = containers . entrySet ( ) . iterator ( ) ;
Map . Entry entry ;
String wordhash ;
while ( ci . hasNext ( ) ) {
entry = ( Map . Entry ) ci . next ( ) ;
wordhash = ( String ) entry . getKey ( ) ;
indexContainer container = ( indexContainer ) entry . getValue ( ) ;
2006-11-10 02:13:33 +01:00
indexabstract . append ( " indexabstract. " + wordhash + " = " ) . append ( plasmaURL . compressIndex ( container , null , 1000 ) . toString ( ) ) . append ( serverCore . crlfString ) ;
2006-09-12 02:42:42 +02:00
}
2006-09-06 19:51:28 +02:00
}
2006-09-14 00:19:34 +02:00
2007-01-16 15:07:54 +01:00
prop . putASIS ( " indexcount " , " " ) ;
2006-10-10 14:22:16 +02:00
prop . put ( " joincount " , 0 ) ;
} else {
// retrieve index containers from search request
2006-12-01 17:21:17 +01:00
squery = new plasmaSearchQuery ( keyhashes , maxdist , prefer , plasmaSearchQuery . contentdomParser ( contentdom ) , count , duetime , filter , constraint ) ;
2006-10-10 14:22:16 +02:00
squery . domType = plasmaSearchQuery . SEARCHDOM_LOCAL ;
2007-01-15 17:03:00 +01:00
yacyCore . log . logInfo ( " INIT HASH SEARCH (query- " + abstracts + " ): " + plasmaSearchQuery . anonymizedQueryHashes ( squery . queryHashes ) + " - " + squery . wantedResults + " links " ) ;
2006-10-10 14:22:16 +02:00
// prepare a search profile
2006-12-20 16:44:29 +01:00
plasmaSearchRankingProfile rankingProfile = ( profile . length ( ) = = 0 ) ? new plasmaSearchRankingProfile ( contentdom ) : new plasmaSearchRankingProfile ( " " , profile ) ;
2006-10-10 14:22:16 +02:00
plasmaSearchTimingProfile localTiming = new plasmaSearchTimingProfile ( squery . maximumTime , squery . wantedResults ) ;
plasmaSearchTimingProfile remoteTiming = null ;
2007-01-15 02:50:57 +01:00
theSearch = new plasmaSearchEvent ( squery ,
2006-10-10 14:22:16 +02:00
rankingProfile , localTiming , remoteTiming , true ,
2006-12-05 03:47:51 +01:00
yacyCore . log , sb . wordIndex , sb . wordIndex . loadedURL ,
2006-10-10 14:22:16 +02:00
sb . snippetCache ) ;
Map containers = theSearch . localSearchContainers ( plasmaSearchQuery . hashes2Set ( urls ) ) ;
// set statistic details of search result and find best result index set
if ( containers = = null ) {
2007-01-16 15:07:54 +01:00
prop . putASIS ( " indexcount " , " " ) ;
prop . putASIS ( " joincount " , " 0 " ) ;
2006-09-14 00:19:34 +02:00
} else {
2006-10-10 14:22:16 +02:00
Iterator ci = containers . entrySet ( ) . iterator ( ) ;
StringBuffer indexcount = new StringBuffer ( ) ;
Map . Entry entry ;
int maxcount = - 1 ;
double mindhtdistance = 1 . 1 , d ;
String wordhash ;
String maxcounthash = null , neardhthash = null ;
while ( ci . hasNext ( ) ) {
entry = ( Map . Entry ) ci . next ( ) ;
wordhash = ( String ) entry . getKey ( ) ;
indexContainer container = ( indexContainer ) entry . getValue ( ) ;
if ( container . size ( ) > maxcount ) {
maxcounthash = wordhash ;
maxcount = container . size ( ) ;
}
d = yacyDHTAction . dhtDistance ( yacyCore . seedDB . mySeed . hash , wordhash ) ;
if ( d < mindhtdistance ) {
mindhtdistance = d ;
neardhthash = wordhash ;
}
indexcount . append ( " indexcount. " ) . append ( container . getWordHash ( ) ) . append ( '=' ) . append ( Integer . toString ( container . size ( ) ) ) . append ( serverCore . crlfString ) ;
if ( ( abstractSet ! = null ) & & ( abstractSet . contains ( wordhash ) ) ) {
2006-11-10 02:13:33 +01:00
indexabstract . append ( " indexabstract. " + wordhash + " = " ) . append ( plasmaURL . compressIndex ( container , null , 1000 ) . toString ( ) ) . append ( serverCore . crlfString ) ;
2006-10-10 14:22:16 +02:00
}
}
2007-01-16 15:07:54 +01:00
prop . putASIS ( " indexcount " , new String ( indexcount ) ) ;
2006-10-10 14:22:16 +02:00
// join and order the result
indexContainer localResults = theSearch . localSearchJoin ( containers . values ( ) ) ;
2006-11-05 03:10:40 +01:00
if ( localResults = = null ) {
joincount = 0 ;
prop . put ( " joincount " , 0 ) ;
acc = null ;
} else {
joincount = localResults . size ( ) ;
2007-01-16 15:07:54 +01:00
prop . putASIS ( " joincount " , Integer . toString ( joincount ) ) ;
2006-11-05 03:10:40 +01:00
acc = theSearch . orderFinal ( localResults ) ;
}
2006-10-10 14:22:16 +02:00
// generate compressed index for maxcounthash
// this is not needed if the search is restricted to specific
// urls, because it is a re-search
if ( ( maxcounthash = = null ) | | ( urls . length ( ) ! = 0 ) | | ( keyhashes . size ( ) = = 1 ) | | ( abstracts . length ( ) = = 0 ) ) {
2007-01-16 15:07:54 +01:00
prop . putASIS ( " indexabstract " , " " ) ;
2006-10-10 14:22:16 +02:00
} else if ( abstracts . equals ( " auto " ) ) {
2006-11-10 02:13:33 +01:00
indexabstract . append ( " indexabstract. " + maxcounthash + " = " ) . append ( plasmaURL . compressIndex ( ( ( indexContainer ) containers . get ( maxcounthash ) ) , localResults , 1000 ) . toString ( ) ) . append ( serverCore . crlfString ) ;
2006-10-10 14:22:16 +02:00
if ( ( neardhthash ! = null )
& & ( ! ( neardhthash . equals ( maxcounthash ) ) ) ) {
2006-11-10 02:13:33 +01:00
indexabstract . append ( " indexabstract. " + neardhthash + " = " ) . append ( plasmaURL . compressIndex ( ( ( indexContainer ) containers . get ( neardhthash ) ) , localResults , 1000 ) . toString ( ) ) . append ( serverCore . crlfString ) ;
2006-10-10 14:22:16 +02:00
}
//System.out.println("DEBUG-ABSTRACTGENERATION: maxcounthash = " + maxcounthash);
//System.out.println("DEBUG-ABSTRACTGENERATION: neardhthash = "+ neardhthash);
//yacyCore.log.logFine("DEBUG HASH SEARCH: " + indexabstract);
2006-09-14 00:19:34 +02:00
}
2006-09-12 02:42:42 +02:00
}
2007-02-01 14:27:23 +01:00
if ( partitions > 0 ) sb . requestedQueries = sb . requestedQueries + 1d / ( double ) partitions ; // increase query counter
2006-09-11 00:36:47 +02:00
}
2007-02-06 17:26:56 +01:00
prop . putASIS ( " indexabstract " , new String ( indexabstract ) ) ;
2006-09-06 19:51:28 +02:00
2007-01-15 02:50:57 +01:00
// prepare search statistics
Long trackerHandle = new Long ( System . currentTimeMillis ( ) ) ;
HashMap searchProfile = theSearch . resultProfile ( ) ;
2007-01-18 01:26:16 +01:00
searchProfile . put ( " resulttime " , new Long ( System . currentTimeMillis ( ) - timestamp ) ) ;
searchProfile . put ( " resultcount " , new Integer ( joincount ) ) ;
2007-01-15 02:50:57 +01:00
String client = ( String ) header . get ( " CLIENTIP " ) ;
searchProfile . put ( " host " , client ) ;
2007-01-19 02:45:29 +01:00
yacySeed remotepeer = yacyCore . seedDB . lookupByIP ( natLib . getInetAddress ( client ) , true , false , false ) ;
searchProfile . put ( " peername " , ( remotepeer = = null ) ? " unknown " : remotepeer . getName ( ) ) ;
2007-02-08 11:42:35 +01:00
searchProfile . put ( " time " , trackerHandle ) ;
sb . remoteSearches . add ( searchProfile ) ;
2007-01-15 02:50:57 +01:00
TreeSet handles = ( TreeSet ) sb . remoteSearchTracker . get ( client ) ;
if ( handles = = null ) handles = new TreeSet ( ) ;
handles . add ( trackerHandle ) ;
sb . remoteSearchTracker . put ( client , handles ) ;
2006-09-11 00:36:47 +02:00
// prepare result
2006-09-06 19:51:28 +02:00
if ( ( joincount = = 0 ) | | ( acc = = null ) ) {
2006-09-11 00:36:47 +02:00
// no results
2007-01-16 15:07:54 +01:00
prop . putASIS ( " links " , " " ) ;
prop . putASIS ( " linkcount " , " 0 " ) ;
prop . putASIS ( " references " , " " ) ;
2006-09-11 00:36:47 +02:00
2006-01-20 16:14:21 +01:00
} else {
2006-09-06 19:51:28 +02:00
// result is a List of urlEntry elements
2006-01-20 16:14:21 +01:00
int i = 0 ;
StringBuffer links = new StringBuffer ( ) ;
2006-12-11 02:31:23 +01:00
String resource = null ;
2006-11-08 17:17:47 +01:00
indexURLEntry urlentry ;
2006-12-11 02:31:23 +01:00
plasmaSnippetCache . TextSnippet snippet ;
2006-01-20 16:14:21 +01:00
while ( ( acc . hasMoreElements ( ) ) & & ( i < squery . wantedResults ) ) {
2006-11-08 17:17:47 +01:00
urlentry = ( indexURLEntry ) acc . nextElement ( ) ;
2006-10-02 03:15:02 +02:00
if ( includesnippet ) {
2006-12-11 02:31:23 +01:00
snippet = sb . snippetCache . retrieveTextSnippet ( urlentry . comp ( ) . url ( ) , squery . queryHashes , false , urlentry . flags ( ) . get ( plasmaCondenser . flag_cat_indexof ) , 260 , 1000 ) ;
2006-10-02 03:15:02 +02:00
} else {
snippet = null ;
}
2006-12-11 02:31:23 +01:00
if ( ( snippet ! = null ) & & ( snippet . exists ( ) ) ) {
resource = urlentry . toString ( snippet . getLineRaw ( ) ) ;
2006-01-20 16:14:21 +01:00
} else {
2006-12-11 02:31:23 +01:00
resource = urlentry . toString ( ) ;
}
if ( resource ! = null ) {
links . append ( " resource " ) . append ( i ) . append ( '=' ) . append ( resource ) . append ( serverCore . crlfString ) ;
i + + ;
2006-01-20 16:14:21 +01:00
}
}
2007-01-16 15:07:54 +01:00
prop . putASIS ( " links " , new String ( links ) ) ;
prop . putASIS ( " linkcount " , Integer . toString ( i ) ) ;
2006-01-20 16:14:21 +01:00
// prepare reference hints
Object [ ] ws = acc . getReferences ( 16 ) ;
StringBuffer refstr = new StringBuffer ( ) ;
for ( int j = 0 ; j < ws . length ; j + + )
refstr . append ( " , " ) . append ( ( String ) ws [ j ] ) ;
2007-02-06 17:26:56 +01:00
prop . putASIS ( " references " , ( refstr . length ( ) > 0 ) ? refstr . substring ( 1 ) : new String ( refstr ) ) ;
2006-01-20 16:14:21 +01:00
}
2005-10-12 14:28:49 +02:00
2006-09-06 19:51:28 +02:00
// add information about forward peers
2007-01-16 15:07:54 +01:00
prop . putASIS ( " fwhop " , " " ) ; // hops (depth) of forwards that had been performed to construct this result
prop . putASIS ( " fwsrc " , " " ) ; // peers that helped to construct this result
prop . putASIS ( " fwrec " , " " ) ; // peers that would have helped to construct this result (recommendations)
2006-09-06 19:51:28 +02:00
2006-01-20 16:14:21 +01:00
// log
2007-01-18 01:26:16 +01:00
yacyCore . log . logInfo ( " EXIT HASH SEARCH: " + plasmaSearchQuery . anonymizedQueryHashes ( squery . queryHashes ) + " - " + joincount + " links found, " + prop . get ( " linkcount " , " ? " ) + " links selected, " + ( ( System . currentTimeMillis ( ) - timestamp ) / 1000 ) + " seconds " ) ;
2006-01-20 16:14:21 +01:00
2007-01-16 15:07:54 +01:00
prop . putASIS ( " searchtime " , Long . toString ( System . currentTimeMillis ( ) - timestamp ) ) ;
2005-10-05 15:14:18 +02:00
final int links = Integer . parseInt ( prop . get ( " linkcount " , " 0 " ) ) ;
2005-04-07 21:19:42 +02:00
yacyCore . seedDB . mySeed . incSI ( links ) ;
yacyCore . seedDB . mySeed . incSU ( links ) ;
2005-10-07 17:04:03 +02:00
return prop ;
2005-04-07 21:19:42 +02:00
}
2007-01-16 15:07:54 +01:00
}