2011-10-04 11:06:24 +02:00
// crawlReceipt.java
2005-04-07 21:19:42 +02:00
// -----------------------
// part of the AnomicHTTPD caching proxy
2008-07-20 19:14:51 +02:00
// (C) by Michael Peter Christen; mc@yacy.net
2005-04-07 21:19:42 +02:00
// first published on http://www.anomic.de
// Frankfurt, Germany, 2004
2006-01-18 01:03:28 +01:00
//
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
2005-04-07 21:19:42 +02:00
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2005-05-12 19:50:45 +02:00
// You must compile this file with
2005-04-07 21:19:42 +02:00
// javac -classpath .:../classes crawlOrder.java
2005-12-11 01:25:02 +01:00
import java.io.IOException ;
2013-09-15 00:30:23 +02:00
import net.yacy.cora.document.encoding.ASCII ;
2013-09-17 15:27:02 +02:00
import net.yacy.cora.federate.solr.FailCategory ;
2010-08-23 14:32:02 +02:00
import net.yacy.cora.protocol.RequestHeader ;
2013-07-09 14:28:25 +02:00
import net.yacy.cora.util.ConcurrentLog ;
2012-09-21 15:48:16 +02:00
import net.yacy.crawler.data.ResultURLs ;
import net.yacy.crawler.data.ResultURLs.EventOrigin ;
2014-04-03 00:35:15 +02:00
import net.yacy.kelondro.data.meta.URIMetadataNode ;
2011-10-04 11:06:24 +02:00
import net.yacy.peers.Protocol ;
2011-12-17 01:27:08 +01:00
import net.yacy.peers.Seed ;
2012-06-11 00:17:30 +02:00
import net.yacy.repository.Blacklist.BlacklistType ;
2011-09-25 18:59:06 +02:00
import net.yacy.search.Switchboard ;
2012-09-21 15:48:16 +02:00
import net.yacy.server.serverObjects ;
import net.yacy.server.serverSwitch ;
import net.yacy.utils.crypt ;
2005-04-07 21:19:42 +02:00
2005-10-05 12:45:33 +02:00
public final class crawlReceipt {
2005-04-07 21:19:42 +02:00
2011-10-04 11:06:24 +02:00
2005-04-07 21:19:42 +02:00
/ *
* this is used to respond on a remote crawling request
* /
2008-03-12 01:05:18 +01:00
2012-07-05 09:14:04 +02:00
public static serverObjects respond ( @SuppressWarnings ( " unused " ) final RequestHeader header , final serverObjects post , final serverSwitch env ) {
2005-11-15 09:40:49 +01:00
// return variable that accumulates replacements
2009-07-19 22:37:44 +02:00
final Switchboard sb = ( Switchboard ) env ;
2008-08-02 14:12:04 +02:00
final serverObjects prop = new serverObjects ( ) ;
2011-10-04 11:06:24 +02:00
if ( ( post = = null ) | | ( env = = null ) | | ! Protocol . authentifyRequest ( post , env ) ) {
2010-11-27 01:54:59 +01:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2013-07-09 14:28:25 +02:00
final ConcurrentLog log = sb . getLog ( ) ;
2005-04-07 21:19:42 +02:00
2005-12-05 00:51:28 +01:00
//int proxyPrefetchDepth = Integer.parseInt(env.getConfig("proxyPrefetchDepth", "0"));
//int crawlingDepth = Integer.parseInt(env.getConfig("crawlingDepth", "0"));
2005-04-07 21:19:42 +02:00
2005-12-05 00:51:28 +01:00
// request values
2008-08-02 14:12:04 +02:00
final String iam = post . get ( " iam " , " " ) ; // seed hash of requester
final String youare = post . get ( " youare " , " " ) ; // seed hash of the target peer, needed for network stability
final String result = post . get ( " result " , " " ) ; // the result; either "ok" or "fail"
final String reason = post . get ( " reason " , " " ) ; // the reason for that result
2005-12-07 00:51:29 +01:00
//String words = post.get("wordh", ""); // priority word hashes
2012-07-05 10:23:07 +02:00
final String propStr = crypt . simpleDecode ( post . get ( " lurlEntry " , " " ) ) ;
2011-10-04 11:06:24 +02:00
2005-04-07 21:19:42 +02:00
/ *
the result can have one of the following values :
negative cases , no retry
unavailable - the resource is not available ( a broken link ) ; not found or interrupted
exception - an exception occurred
robot - a robot - file has denied to crawl that resource
negative cases , retry possible
rejected - the peer has rejected to load the resource
dequeue - peer too busy - rejected to crawl
2011-10-04 11:06:24 +02:00
2005-04-07 21:19:42 +02:00
positive cases with crawling
fill - the resource was loaded and processed
update - the resource was already in database but re - loaded and processed
2011-10-04 11:06:24 +02:00
positive cases without crawling
2005-04-07 21:19:42 +02:00
known - the resource is already in database , believed to be fresh and not reloaded
stale - the resource was reloaded but not processed because source had no changes
* /
2011-10-04 11:06:24 +02:00
final Seed otherPeer = sb . peers . get ( iam ) ;
final String otherPeerName = iam + " : " + ( ( otherPeer = = null ) ? " NULL " : ( otherPeer . getName ( ) + " / " + otherPeer . getVersion ( ) ) ) ;
2007-03-16 14:25:56 +01:00
2009-05-28 16:26:05 +02:00
if ( ( sb . peers . mySeed ( ) = = null ) | | ( ! ( sb . peers . mySeed ( ) . hash . equals ( youare ) ) ) ) {
2005-04-07 21:19:42 +02:00
// no yacy connection / unknown peers
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2007-03-16 14:25:56 +01:00
if ( propStr = = null ) {
2005-04-07 21:19:42 +02:00
// error with url / wrong key
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2008-05-06 01:13:47 +02:00
if ( ( sb . isRobinsonMode ( ) ) & & ( ! sb . isInMyCluster ( otherPeer ) ) ) {
2007-04-24 17:11:12 +02:00
// we reject urls that are from outside our cluster
2007-10-24 23:38:19 +02:00
prop . put ( " delay " , " 9999 " ) ;
2010-02-02 10:42:14 +01:00
return prop ;
2007-04-24 17:11:12 +02:00
}
2011-10-04 11:06:24 +02:00
2007-03-16 14:25:56 +01:00
// generating a new loaded URL entry
2015-03-24 12:32:39 +01:00
final URIMetadataNode entry = URIMetadataNode . importEntry ( propStr , " dht " ) ;
2007-03-16 14:25:56 +01:00
if ( entry = = null ) {
2013-07-09 14:28:25 +02:00
if ( log . isWarn ( ) ) log . warn ( " crawlReceipt: RECEIVED wrong RECEIPT (entry null) from peer " + iam + " \ n \ tURL properties: " + propStr ) ;
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2011-12-17 01:27:08 +01:00
if ( entry . url ( ) = = null ) {
2013-07-09 14:28:25 +02:00
if ( log . isWarn ( ) ) log . warn ( " crawlReceipt: RECEIVED wrong RECEIPT (url null) for hash " + ASCII . String ( entry . hash ( ) ) + " from peer " + iam + " \ n \ tURL properties: " + propStr ) ;
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2007-07-24 02:46:17 +02:00
// check if the entry is in our network domain
2011-12-17 01:27:08 +01:00
final String urlRejectReason = sb . crawlStacker . urlInAcceptedDomain ( entry . url ( ) ) ;
2014-04-03 00:35:15 +02:00
if ( urlRejectReason ! = null ) {
2013-07-09 14:28:25 +02:00
log . warn ( " crawlReceipt: RECEIVED wrong RECEIPT ( " + urlRejectReason + " ) for hash " + ASCII . String ( entry . hash ( ) ) + " from peer " + iam + " \ n \ tURL properties: " + propStr ) ;
2012-06-11 00:17:30 +02:00
prop . put ( " delay " , " 9999 " ) ;
return prop ;
}
// Check URL against DHT blacklist
2014-04-20 01:41:30 +02:00
if ( Switchboard . urlBlacklist . isListed ( BlacklistType . DHT , entry . url ( ) ) ) {
2012-06-11 00:17:30 +02:00
// URL is blacklisted
2014-04-03 00:35:15 +02:00
log . warn ( " crawlReceipt: RECEIVED wrong RECEIPT (URL is blacklisted) for URL " + ASCII . String ( entry . hash ( ) ) + " : " + entry . url ( ) . toNormalform ( false ) + " from peer " + iam ) ;
2007-10-24 23:38:19 +02:00
prop . put ( " delay " , " 9999 " ) ;
2007-07-24 02:46:17 +02:00
return prop ;
}
2011-10-04 11:06:24 +02:00
2015-05-23 02:06:39 +02:00
if ( " fill " . equals ( result ) & & sb . crawlQueues . delegatedURL ! = null ) try {
2007-03-16 14:25:56 +01:00
// put new entry into database
2013-05-29 18:27:27 +02:00
sb . index . fulltext ( ) . putMetadata ( entry ) ;
2012-10-18 14:29:11 +02:00
ResultURLs . stack ( ASCII . String ( entry . url ( ) . hash ( ) ) , entry . url ( ) . getHost ( ) , youare . getBytes ( ) , iam . getBytes ( ) , EventOrigin . REMOTE_RECEIPTS ) ;
2008-05-06 01:13:47 +02:00
sb . crawlQueues . delegatedURL . remove ( entry . hash ( ) ) ; // the delegated work has been done
2013-07-09 14:28:25 +02:00
if ( log . isInfo ( ) ) log . info ( " crawlReceipt: RECEIVED RECEIPT from " + otherPeerName + " for URL " + ASCII . String ( entry . hash ( ) ) + " : " + entry . url ( ) . toNormalform ( false ) ) ;
2007-03-16 14:25:56 +01:00
2005-04-07 21:19:42 +02:00
// ready for more
2007-10-24 23:38:19 +02:00
prop . put ( " delay " , " 10 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2013-07-09 14:28:25 +02:00
ConcurrentLog . logException ( e ) ;
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
2005-04-07 21:19:42 +02:00
}
2007-03-16 14:25:56 +01:00
2015-05-23 02:06:39 +02:00
if ( sb . crawlQueues . delegatedURL ! = null ) { // the delegated work is transformed into an error case
sb . crawlQueues . delegatedURL . remove ( entry . hash ( ) ) ;
sb . crawlQueues . errorURL . push ( entry . url ( ) , 997 , null , FailCategory . FINAL_LOAD_CONTEXT , result + " : " + reason , - 1 ) ;
}
2007-03-16 14:25:56 +01:00
//switchboard.noticeURL.remove(receivedUrlhash);
2008-03-12 01:05:18 +01:00
prop . put ( " delay " , " 3600 " ) ;
2007-03-16 14:25:56 +01:00
return prop ;
2011-10-04 11:06:24 +02:00
2007-03-16 14:25:56 +01:00
// return rewrite properties
2011-10-04 11:06:24 +02:00
2005-04-07 21:19:42 +02:00
}
}