2009-09-23 23:26:14 +02:00
//HTTPLoader.java
2005-04-21 12:31:40 +02:00
//------------------------
//part of YaCy
2008-07-20 19:14:51 +02:00
//(C) by Michael Peter Christen; mc@yacy.net
2005-04-21 12:31:40 +02:00
//first published on http://www.anomic.de
2006-09-04 16:38:29 +02:00
//Frankfurt, Germany, 2006
2005-09-07 15:18:34 +02:00
//
2009-09-23 23:26:14 +02:00
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
2005-04-21 12:31:40 +02:00
//
//This program is free software; you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation; either version 2 of the License, or
//(at your option) any later version.
//
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
//
//You should have received a copy of the GNU General Public License
//along with this program; if not, write to the Free Software
//Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2009-07-15 23:07:46 +02:00
package de.anomic.crawler.retrieval ;
2005-04-21 12:31:40 +02:00
import java.io.IOException ;
import java.util.Date ;
2006-09-04 08:09:20 +02:00
2009-10-18 02:53:43 +02:00
import net.yacy.document.Parser ;
2009-10-11 02:12:19 +02:00
import net.yacy.kelondro.data.meta.DigestURI ;
2009-10-10 01:13:30 +02:00
import net.yacy.kelondro.logging.Log ;
2009-07-15 23:07:46 +02:00
import de.anomic.crawler.Latency ;
2009-04-03 15:23:45 +02:00
import de.anomic.data.Blacklist ;
2009-07-19 22:37:44 +02:00
import de.anomic.http.client.Client ;
2009-10-11 02:24:42 +02:00
import de.anomic.http.server.HeaderFramework ;
import de.anomic.http.server.RequestHeader ;
import de.anomic.http.server.ResponseContainer ;
2009-10-11 02:12:19 +02:00
import de.anomic.search.Segments ;
2009-07-19 22:37:44 +02:00
import de.anomic.search.Switchboard ;
2005-04-21 12:31:40 +02:00
2008-05-06 02:32:41 +02:00
public final class HTTPLoader {
2005-11-04 14:41:51 +01:00
2008-05-24 13:04:44 +02:00
private static final String DEFAULT_ENCODING = " gzip,deflate " ;
private static final String DEFAULT_LANGUAGE = " en-us,en;q=0.5 " ;
private static final String DEFAULT_CHARSET = " ISO-8859-1,utf-8;q=0.7,*;q=0.7 " ;
private static final long DEFAULT_MAXFILESIZE = 1024 * 1024 * 10 ;
public static final int DEFAULT_CRAWLING_RETRY_COUNT = 5 ;
2009-07-19 22:37:44 +02:00
public static final String crawlerUserAgent = " yacybot ( " + Client . getSystemOST ( ) + " ) http://yacy.net/bot.html " ;
public static final String yacyUserAgent = " yacy ( " + Client . getSystemOST ( ) + " ) yacy.net " ;
2006-09-04 11:00:18 +02:00
/ * *
* The socket timeout that should be used
* /
2008-08-02 14:12:04 +02:00
private final int socketTimeout ;
2006-09-04 11:00:18 +02:00
2006-09-09 17:06:49 +02:00
/ * *
* The maximum allowed file size
* /
2008-05-24 13:04:44 +02:00
//private long maxFileSize = -1;
2006-09-09 17:06:49 +02:00
2008-05-24 13:04:44 +02:00
//private String acceptEncoding;
//private String acceptLanguage;
//private String acceptCharset;
2009-07-19 22:37:44 +02:00
private final Switchboard sb ;
2009-01-31 00:33:47 +01:00
private final Log log ;
2006-09-04 11:00:18 +02:00
2009-07-19 22:37:44 +02:00
public HTTPLoader ( final Switchboard sb , final Log theLog ) {
2007-10-29 02:43:20 +01:00
this . sb = sb ;
this . log = theLog ;
2007-06-07 17:26:41 +02:00
// refreshing timeout value
2007-10-29 02:43:20 +01:00
this . socketTimeout = ( int ) sb . getConfigLong ( " crawler.clientTimeout " , 10000 ) ;
2009-07-19 23:59:29 +02:00
}
2007-10-29 02:43:20 +01:00
2009-09-03 13:46:08 +02:00
public Response load ( final Request entry , final boolean acceptOnlyParseable ) throws IOException {
2009-03-20 11:21:23 +01:00
long start = System . currentTimeMillis ( ) ;
2009-09-03 13:46:08 +02:00
Response doc = load ( entry , acceptOnlyParseable , DEFAULT_CRAWLING_RETRY_COUNT ) ;
2009-03-20 11:21:23 +01:00
Latency . update ( entry . url ( ) . hash ( ) . substring ( 6 ) , entry . url ( ) . getHost ( ) , System . currentTimeMillis ( ) - start ) ;
return doc ;
2007-10-29 02:43:20 +01:00
}
2006-09-04 11:00:18 +02:00
2009-09-03 13:46:08 +02:00
private Response load ( final Request request , boolean acceptOnlyParseable , final int retryCount ) throws IOException {
2005-11-04 14:41:51 +01:00
2007-10-29 02:43:20 +01:00
if ( retryCount < 0 ) {
2009-07-19 23:59:29 +02:00
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " redirection counter exceeded " ) . store ( ) ;
throw new IOException ( " Redirection counter exceeded for URL " + request . url ( ) . toString ( ) + " . Processing aborted. " ) ;
2007-10-29 02:43:20 +01:00
}
2009-07-19 23:59:29 +02:00
final String host = request . url ( ) . getHost ( ) ;
2009-09-22 16:39:06 +02:00
if ( host = = null | | host . length ( ) < 2 ) throw new IOException ( " host is not well-formed: ' " + host + " ' " ) ;
2009-07-19 23:59:29 +02:00
final String path = request . url ( ) . getFile ( ) ;
int port = request . url ( ) . getPort ( ) ;
final boolean ssl = request . url ( ) . getProtocol ( ) . equals ( " https " ) ;
2005-06-02 03:33:10 +02:00
if ( port < 0 ) port = ( ssl ) ? 443 : 80 ;
2006-08-07 17:11:14 +02:00
2009-07-12 18:50:11 +02:00
// if not the right file type then reject file
2009-09-03 13:46:08 +02:00
if ( acceptOnlyParseable ) {
String supportError = Parser . supportsExtension ( request . url ( ) ) ;
if ( supportError ! = null ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , supportError ) ;
throw new IOException ( " REJECTED WRONG EXTENSION TYPE: " + supportError ) ;
}
}
2009-07-12 18:50:11 +02:00
2005-09-02 14:09:45 +02:00
// check if url is in blacklist
2008-08-02 14:12:04 +02:00
final String hostlow = host . toLowerCase ( ) ;
2009-07-19 22:37:44 +02:00
if ( Switchboard . urlBlacklist . isListed ( Blacklist . BLACKLIST_CRAWLER , hostlow , path ) ) {
2009-07-19 23:59:29 +02:00
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " url in blacklist " ) . store ( ) ;
throw new IOException ( " CRAWLER Rejecting URL ' " + request . url ( ) . toString ( ) + " '. URL is in blacklist. " ) ;
2005-11-04 14:41:51 +01:00
}
2007-10-29 02:43:20 +01:00
2005-06-02 03:33:10 +02:00
// take a file from the net
2009-07-19 23:59:29 +02:00
Response response = null ;
2008-06-24 21:11:27 +02:00
final long maxFileSize = sb . getConfigLong ( " crawler.http.maxFileSize " , DEFAULT_MAXFILESIZE ) ;
2005-11-04 14:41:51 +01:00
2009-07-19 23:59:29 +02:00
// create a request header
final RequestHeader requestHeader = new RequestHeader ( ) ;
requestHeader . put ( HeaderFramework . USER_AGENT , crawlerUserAgent ) ;
2009-10-11 02:12:19 +02:00
DigestURI refererURL = null ;
2009-10-09 16:44:20 +02:00
if ( request . referrerhash ( ) ! = null ) refererURL = sb . getURL ( Segments . Process . LOCALCRAWLING , request . referrerhash ( ) ) ;
2009-07-19 23:59:29 +02:00
if ( refererURL ! = null ) requestHeader . put ( RequestHeader . REFERER , refererURL . toNormalform ( true , true ) ) ;
requestHeader . put ( HeaderFramework . ACCEPT_LANGUAGE , sb . getConfig ( " crawler.http.acceptLanguage " , DEFAULT_LANGUAGE ) ) ;
requestHeader . put ( HeaderFramework . ACCEPT_CHARSET , sb . getConfig ( " crawler.http.acceptCharset " , DEFAULT_CHARSET ) ) ;
requestHeader . put ( HeaderFramework . ACCEPT_ENCODING , sb . getConfig ( " crawler.http.acceptEncoding " , DEFAULT_ENCODING ) ) ;
2005-11-04 14:41:51 +01:00
2009-07-19 23:59:29 +02:00
// HTTP-Client
final Client client = new Client ( socketTimeout , requestHeader ) ;
ResponseContainer res = null ;
try {
// send request
res = client . GET ( request . url ( ) . toString ( ) , maxFileSize ) ;
// FIXME: 30*-handling (bottom) is never reached
// we always get the final content because httpClient.followRedirects = true
2009-07-13 21:55:13 +02:00
2009-07-19 23:59:29 +02:00
if ( res . getStatusCode ( ) = = 200 | | res . getStatusCode ( ) = = 203 ) {
// the transfer is ok
2009-09-03 13:46:08 +02:00
if ( acceptOnlyParseable ) {
// if the response has not the right file type then reject file
String supportError = Parser . supports ( request . url ( ) , res . getResponseHeader ( ) . mime ( ) ) ;
if ( supportError ! = null ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , supportError ) ;
throw new IOException ( " REJECTED WRONG MIME TYPE: " + supportError ) ;
}
2009-07-19 23:59:29 +02:00
}
2009-09-03 13:46:08 +02:00
2009-07-19 23:59:29 +02:00
// we write the new cache entry to file system directly
res . setAccountingName ( " CRAWLER " ) ;
final byte [ ] responseBody = res . getData ( ) ;
long contentLength = responseBody . length ;
// check length again in case it was not possible to get the length before loading
if ( maxFileSize > 0 & & contentLength > maxFileSize ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " file size limit exceeded " ) ;
throw new IOException ( " REJECTED URL " + request . url ( ) + " because file size ' " + contentLength + " ' exceeds max filesize limit of " + maxFileSize + " bytes. (GET) " ) ;
}
2009-07-13 21:55:13 +02:00
2009-07-23 23:31:51 +02:00
// create a new cache entry
response = new Response (
request ,
requestHeader ,
res . getResponseHeader ( ) ,
res . getStatusLine ( ) ,
sb . crawler . profilesActiveCrawls . getEntry ( request . profileHandle ( ) ) ,
responseBody
) ;
2009-07-13 21:55:13 +02:00
2009-07-19 23:59:29 +02:00
return response ;
} else if ( res . getStatusLine ( ) . startsWith ( " 30 " ) ) {
if ( res . getResponseHeader ( ) . containsKey ( HeaderFramework . LOCATION ) ) {
// getting redirection URL
String redirectionUrlString = res . getResponseHeader ( ) . get ( HeaderFramework . LOCATION ) ;
redirectionUrlString = redirectionUrlString . trim ( ) ;
if ( redirectionUrlString . length ( ) = = 0 ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " redirection header empy " ) ;
throw new IOException ( " CRAWLER Redirection of URL= " + request . url ( ) . toString ( ) + " aborted. Location header is empty. " ) ;
2009-07-13 21:55:13 +02:00
}
2009-07-19 23:59:29 +02:00
// normalizing URL
2009-10-11 02:12:19 +02:00
final DigestURI redirectionUrl = DigestURI . newURL ( request . url ( ) , redirectionUrlString ) ;
2009-07-13 21:55:13 +02:00
2009-07-19 23:59:29 +02:00
// restart crawling with new url
this . log . logInfo ( " CRAWLER Redirection detected (' " + res . getStatusLine ( ) + " ') for URL " + request . url ( ) . toString ( ) ) ;
this . log . logInfo ( " CRAWLER ..Redirecting request to: " + redirectionUrl ) ;
2009-07-13 21:55:13 +02:00
2009-07-19 23:59:29 +02:00
// if we are already doing a shutdown we don't need to retry crawling
if ( Thread . currentThread ( ) . isInterrupted ( ) ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " server shutdown " ) ;
throw new IOException ( " CRAWLER Retry of URL= " + request . url ( ) . toString ( ) + " aborted because of server shutdown. " ) ;
}
// generating url hash
final String urlhash = redirectionUrl . hash ( ) ;
// check if the url was already indexed
2009-10-09 16:44:20 +02:00
final String dbname = sb . urlExists ( Segments . Process . LOCALCRAWLING , urlhash ) ;
2009-07-19 23:59:29 +02:00
if ( dbname ! = null ) {
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " redirection to double content " ) ;
throw new IOException ( " CRAWLER Redirection of URL= " + request . url ( ) . toString ( ) + " ignored. The url appears already in db " + dbname ) ;
}
// retry crawling with new url
request . redirectURL ( redirectionUrl ) ;
2009-09-03 13:46:08 +02:00
return load ( request , acceptOnlyParseable , retryCount - 1 ) ;
2008-04-05 15:17:16 +02:00
}
2009-07-19 23:59:29 +02:00
} else {
// if the response has not the right response type then reject file
sb . crawlQueues . errorURL . newEntry ( request , sb . peers . mySeed ( ) . hash , new Date ( ) , 1 , " wrong http status code " + res . getStatusCode ( ) + " ) " ) ;
throw new IOException ( " REJECTED WRONG STATUS TYPE ' " + res . getStatusLine ( ) + " ' for URL " + request . url ( ) . toString ( ) ) ;
}
} finally {
if ( res ! = null ) {
// release connection
res . closeStream ( ) ;
2009-04-13 23:21:47 +02:00
}
2009-07-19 23:59:29 +02:00
}
return response ;
2005-06-02 03:33:10 +02:00
}
2006-08-07 17:11:14 +02:00
2009-10-01 00:11:00 +02:00
public static Response load ( final Request request ) throws IOException {
return load ( request , 3 ) ;
}
private static Response load ( final Request request , int retryCount ) throws IOException {
if ( retryCount < 0 ) {
throw new IOException ( " Redirection counter exceeded for URL " + request . url ( ) . toString ( ) + " . Processing aborted. " ) ;
}
final String host = request . url ( ) . getHost ( ) ;
if ( host = = null | | host . length ( ) < 2 ) throw new IOException ( " host is not well-formed: ' " + host + " ' " ) ;
final String path = request . url ( ) . getFile ( ) ;
int port = request . url ( ) . getPort ( ) ;
final boolean ssl = request . url ( ) . getProtocol ( ) . equals ( " https " ) ;
if ( port < 0 ) port = ( ssl ) ? 443 : 80 ;
// check if url is in blacklist
final String hostlow = host . toLowerCase ( ) ;
if ( Switchboard . urlBlacklist ! = null & & Switchboard . urlBlacklist . isListed ( Blacklist . BLACKLIST_CRAWLER , hostlow , path ) ) {
throw new IOException ( " CRAWLER Rejecting URL ' " + request . url ( ) . toString ( ) + " '. URL is in blacklist. " ) ;
}
// take a file from the net
Response response = null ;
// create a request header
final RequestHeader requestHeader = new RequestHeader ( ) ;
requestHeader . put ( HeaderFramework . USER_AGENT , crawlerUserAgent ) ;
requestHeader . put ( HeaderFramework . ACCEPT_LANGUAGE , DEFAULT_LANGUAGE ) ;
requestHeader . put ( HeaderFramework . ACCEPT_CHARSET , DEFAULT_CHARSET ) ;
requestHeader . put ( HeaderFramework . ACCEPT_ENCODING , DEFAULT_ENCODING ) ;
// HTTP-Client
final Client client = new Client ( 20000 , requestHeader ) ;
ResponseContainer res = null ;
try {
// send request
res = client . GET ( request . url ( ) . toString ( ) , Long . MAX_VALUE ) ;
// FIXME: 30*-handling (bottom) is never reached
// we always get the final content because httpClient.followRedirects = true
if ( res . getStatusCode ( ) = = 200 | | res . getStatusCode ( ) = = 203 ) {
// the transfer is ok
// we write the new cache entry to file system directly
res . setAccountingName ( " CRAWLER " ) ;
final byte [ ] responseBody = res . getData ( ) ;
// create a new cache entry
response = new Response (
request ,
requestHeader ,
res . getResponseHeader ( ) ,
res . getStatusLine ( ) ,
null ,
responseBody
) ;
return response ;
} else if ( res . getStatusLine ( ) . startsWith ( " 30 " ) ) {
if ( res . getResponseHeader ( ) . containsKey ( HeaderFramework . LOCATION ) ) {
// getting redirection URL
String redirectionUrlString = res . getResponseHeader ( ) . get ( HeaderFramework . LOCATION ) ;
redirectionUrlString = redirectionUrlString . trim ( ) ;
if ( redirectionUrlString . length ( ) = = 0 ) {
throw new IOException ( " CRAWLER Redirection of URL= " + request . url ( ) . toString ( ) + " aborted. Location header is empty. " ) ;
}
// normalizing URL
2009-10-11 02:12:19 +02:00
final DigestURI redirectionUrl = DigestURI . newURL ( request . url ( ) , redirectionUrlString ) ;
2009-10-01 00:11:00 +02:00
// if we are already doing a shutdown we don't need to retry crawling
if ( Thread . currentThread ( ) . isInterrupted ( ) ) {
throw new IOException ( " CRAWLER Retry of URL= " + request . url ( ) . toString ( ) + " aborted because of server shutdown. " ) ;
}
// retry crawling with new url
request . redirectURL ( redirectionUrl ) ;
return load ( request , retryCount - 1 ) ;
}
} else {
// if the response has not the right response type then reject file
throw new IOException ( " REJECTED WRONG STATUS TYPE ' " + res . getStatusLine ( ) + " ' for URL " + request . url ( ) . toString ( ) ) ;
}
} finally {
if ( res ! = null ) {
// release connection
res . closeStream ( ) ;
}
}
return response ;
}
2007-06-07 17:26:41 +02:00
}