2005-09-07 13:17:21 +02:00
//plasmaCrawlRobotsTxt.java
//-------------------------------------
//part of YACY
2008-07-20 19:14:51 +02:00
//(C) by Michael Peter Christen; mc@yacy.net
2005-09-07 13:17:21 +02:00
//first published on http://www.anomic.de
//Frankfurt, Germany, 2004
//
2008-07-05 02:35:20 +02:00
//This file is contributed by Martin Thelian
// [MC] moved some methods from robotsParser file that had been created by Alexander Schier to this class
2005-09-07 13:17:21 +02:00
//last major change: $LastChangedDate$ by $LastChangedBy$
//Revision: $LastChangedRevision$
//
//This program is free software; you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation; either version 2 of the License, or
//(at your option) any later version.
//
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
//
//You should have received a copy of the GNU General Public License
//along with this program; if not, write to the Free Software
//Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2008-05-06 02:32:41 +02:00
package de.anomic.crawler ;
2005-09-07 13:17:21 +02:00
2008-07-05 02:35:20 +02:00
import java.io.BufferedInputStream ;
import java.io.BufferedOutputStream ;
2005-09-07 13:17:21 +02:00
import java.io.File ;
import java.io.IOException ;
2008-07-05 02:35:20 +02:00
import java.net.MalformedURLException ;
2005-09-07 13:17:21 +02:00
import java.util.ArrayList ;
import java.util.Date ;
2008-10-10 10:39:11 +02:00
import java.util.Map ;
2008-07-11 14:03:18 +02:00
import java.util.concurrent.ConcurrentHashMap ;
2006-09-30 00:27:20 +02:00
2009-10-10 02:43:25 +02:00
import net.yacy.kelondro.blob.Heap ;
import net.yacy.kelondro.blob.MapView ;
2009-10-11 02:12:19 +02:00
import net.yacy.kelondro.data.meta.DigestURI ;
2009-10-10 01:13:30 +02:00
import net.yacy.kelondro.logging.Log ;
2009-10-10 01:22:22 +02:00
import net.yacy.kelondro.order.NaturalOrder ;
2009-10-10 03:14:19 +02:00
import net.yacy.kelondro.util.ByteBuffer ;
import net.yacy.kelondro.util.DateFormatter ;
import net.yacy.kelondro.util.FileUtils ;
import net.yacy.kelondro.util.kelondroException ;
2009-10-10 01:13:30 +02:00
2009-07-15 23:07:46 +02:00
import de.anomic.crawler.retrieval.HTTPLoader ;
2009-07-19 22:37:44 +02:00
import de.anomic.http.client.Client ;
2009-10-11 02:24:42 +02:00
import de.anomic.http.server.HeaderFramework ;
import de.anomic.http.server.RequestHeader ;
import de.anomic.http.server.ResponseContainer ;
2005-09-07 13:17:21 +02:00
2008-05-06 02:32:41 +02:00
public class RobotsTxt {
2006-03-09 15:03:54 +01:00
public static final String ROBOTS_DB_PATH_SEPARATOR = " ; " ;
2009-01-31 00:33:47 +01:00
private static final Log log = new Log ( " ROBOTS " ) ;
2006-03-09 15:03:54 +01:00
2009-01-30 23:44:20 +01:00
MapView robotsTable ;
2005-10-09 06:43:07 +02:00
private final File robotsTableFile ;
2009-02-17 10:12:47 +01:00
private final ConcurrentHashMap < String , DomSync > syncObjects ;
2008-07-11 11:12:54 +02:00
//private static final HashSet<String> loadedRobots = new HashSet<String>(); // only for debugging
2005-09-07 13:17:21 +02:00
2009-02-17 10:12:47 +01:00
private static class DomSync {
public DomSync ( ) { }
}
2008-08-02 14:12:04 +02:00
public RobotsTxt ( final File robotsTableFile ) {
2005-09-07 13:17:21 +02:00
this . robotsTableFile = robotsTableFile ;
2006-08-24 04:19:25 +02:00
robotsTableFile . getParentFile ( ) . mkdirs ( ) ;
2009-06-17 11:58:15 +02:00
Heap blob = null ;
2008-12-18 01:18:37 +01:00
try {
2009-06-17 11:58:15 +02:00
blob = new Heap ( robotsTableFile , 64 , NaturalOrder . naturalOrder , 1024 * 1024 ) ;
2008-12-18 01:18:37 +01:00
} catch ( final IOException e ) {
2009-11-05 21:28:37 +01:00
Log . logException ( e ) ;
2008-07-10 02:47:37 +02:00
}
2009-05-27 17:04:04 +02:00
robotsTable = new MapView ( blob , 100 , '_' ) ;
2009-02-17 10:12:47 +01:00
syncObjects = new ConcurrentHashMap < String , DomSync > ( ) ;
2006-05-09 12:03:12 +02:00
}
2005-09-07 13:17:21 +02:00
private void resetDatabase ( ) {
// deletes the robots.txt database and creates a new one
2007-03-09 09:48:47 +01:00
if ( robotsTable ! = null ) robotsTable . close ( ) ;
2009-03-30 17:31:25 +02:00
FileUtils . deletedelete ( robotsTableFile ) ;
2005-12-13 00:59:58 +01:00
robotsTableFile . getParentFile ( ) . mkdirs ( ) ;
2009-06-17 11:58:15 +02:00
Heap blob = null ;
2008-12-18 01:18:37 +01:00
try {
2009-06-17 11:58:15 +02:00
blob = new Heap ( robotsTableFile , 64 , NaturalOrder . naturalOrder , 1024 * 1024 ) ;
2008-12-18 01:18:37 +01:00
} catch ( final IOException e ) {
2009-11-05 21:28:37 +01:00
Log . logException ( e ) ;
2008-12-18 01:18:37 +01:00
}
2009-05-27 17:04:04 +02:00
robotsTable = new MapView ( blob , 100 , '_' ) ;
2008-12-18 01:18:37 +01:00
syncObjects . clear ( ) ;
2005-09-07 13:17:21 +02:00
}
2008-06-04 23:34:57 +02:00
public void clear ( ) throws IOException {
this . robotsTable . clear ( ) ;
}
2005-09-07 13:17:21 +02:00
public void close ( ) {
2007-05-18 15:00:42 +02:00
this . robotsTable . close ( ) ;
2005-09-07 13:17:21 +02:00
}
public int size ( ) {
2007-05-18 15:00:42 +02:00
return this . robotsTable . size ( ) ;
2005-12-05 15:24:13 +01:00
}
2005-09-07 13:17:21 +02:00
2009-10-09 16:44:20 +02:00
private RobotsEntry getEntry ( final String urlHostPort , final boolean fetchOnlineIfNotAvailableOrNotFresh ) {
2008-07-11 11:12:54 +02:00
// this method will always return a non-null value
2009-10-09 16:44:20 +02:00
RobotsEntry robotsTxt4Host = null ;
2005-09-07 13:17:21 +02:00
try {
2008-10-10 10:39:11 +02:00
final Map < String , String > record = this . robotsTable . get ( urlHostPort ) ;
2009-10-09 16:44:20 +02:00
if ( record ! = null ) robotsTxt4Host = new RobotsEntry ( urlHostPort , record ) ;
2008-08-02 14:12:04 +02:00
} catch ( final kelondroException e ) {
2007-01-30 00:51:10 +01:00
resetDatabase ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2008-07-11 09:15:46 +02:00
resetDatabase ( ) ;
2005-09-07 13:17:21 +02:00
}
2008-07-11 11:12:54 +02:00
if ( fetchOnlineIfNotAvailableOrNotFresh & & (
robotsTxt4Host = = null | |
robotsTxt4Host . getLoadedDate ( ) = = null | |
System . currentTimeMillis ( ) - robotsTxt4Host . getLoadedDate ( ) . getTime ( ) > 7 * 24 * 60 * 60 * 1000
2008-07-11 14:03:18 +02:00
) ) {
2008-07-11 11:12:54 +02:00
2008-07-11 14:03:18 +02:00
// make or get a synchronization object
2009-02-17 10:12:47 +01:00
DomSync syncObj = this . syncObjects . get ( urlHostPort ) ;
2008-07-11 14:03:18 +02:00
if ( syncObj = = null ) {
2009-02-17 10:12:47 +01:00
syncObj = new DomSync ( ) ;
2008-07-11 14:03:18 +02:00
this . syncObjects . put ( urlHostPort , syncObj ) ;
2008-07-11 11:12:54 +02:00
}
2008-07-11 14:03:18 +02:00
// we can now synchronize for each host separately
synchronized ( syncObj ) {
// if we have not found any data or the data is older than 7 days, we need to load it from the remote server
// check the robots table again for all threads that come here because they waited for another one
// to complete a download
2008-07-11 11:12:54 +02:00
try {
2008-10-10 10:39:11 +02:00
final Map < String , String > record = this . robotsTable . get ( urlHostPort ) ;
2009-10-09 16:44:20 +02:00
if ( record ! = null ) robotsTxt4Host = new RobotsEntry ( urlHostPort , record ) ;
2008-08-02 14:12:04 +02:00
} catch ( final kelondroException e ) {
2008-07-11 14:03:18 +02:00
resetDatabase ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final IOException e ) {
2008-07-11 14:03:18 +02:00
resetDatabase ( ) ;
2008-07-11 11:12:54 +02:00
}
2008-07-11 14:03:18 +02:00
if ( robotsTxt4Host ! = null & &
robotsTxt4Host . getLoadedDate ( ) ! = null & &
2009-04-02 17:29:36 +02:00
System . currentTimeMillis ( ) - robotsTxt4Host . getLoadedDate ( ) . getTime ( ) < = 1 * 24 * 60 * 60 * 1000 ) {
2008-07-11 14:03:18 +02:00
return robotsTxt4Host ;
}
// generating the proper url to download the robots txt
2009-10-11 02:12:19 +02:00
DigestURI robotsURL = null ;
2008-07-11 14:03:18 +02:00
try {
2009-10-11 02:12:19 +02:00
robotsURL = new DigestURI ( " http:// " + urlHostPort + " /robots.txt " , null ) ;
2008-08-02 14:12:04 +02:00
} catch ( final MalformedURLException e ) {
2008-09-03 02:30:21 +02:00
log . logSevere ( " Unable to generate robots.txt URL for host:port ' " + urlHostPort + " '. " ) ;
2008-07-11 14:03:18 +02:00
robotsURL = null ;
2008-07-11 11:12:54 +02:00
}
2008-07-11 14:03:18 +02:00
Object [ ] result = null ;
if ( robotsURL ! = null ) {
2008-09-03 02:30:21 +02:00
if ( log . isFine ( ) ) log . logFine ( " Trying to download the robots.txt file from URL ' " + robotsURL + " '. " ) ;
2008-07-11 14:03:18 +02:00
try {
result = downloadRobotsTxt ( robotsURL , 5 , robotsTxt4Host ) ;
2008-08-02 14:12:04 +02:00
} catch ( final Exception e ) {
2008-07-11 14:03:18 +02:00
result = null ;
}
2008-07-11 11:12:54 +02:00
}
2008-07-11 14:03:18 +02:00
/ *
assert ! loadedRobots . contains ( robotsURL . toNormalform ( false , false ) ) :
" robots-url= " + robotsURL . toString ( ) +
" , robots= " + ( ( result = = null | | result [ DOWNLOAD_ROBOTS_TXT ] = = null ) ? " NULL " : new String ( ( byte [ ] ) result [ DOWNLOAD_ROBOTS_TXT ] ) ) +
" , robotsTxt4Host= " + ( ( robotsTxt4Host = = null ) ? " NULL " : robotsTxt4Host . getLoadedDate ( ) . toString ( ) ) ;
loadedRobots . add ( robotsURL . toNormalform ( false , false ) ) ;
* /
2008-07-11 11:12:54 +02:00
2008-07-11 14:03:18 +02:00
if ( result = = null ) {
// no robots.txt available, make an entry to prevent that the robots loading is done twice
if ( robotsTxt4Host = = null ) {
// generate artificial entry
2009-10-09 16:44:20 +02:00
robotsTxt4Host = new RobotsEntry (
2008-07-11 14:03:18 +02:00
urlHostPort ,
new ArrayList < String > ( ) ,
2008-07-24 13:54:37 +02:00
new ArrayList < String > ( ) ,
2008-07-11 14:03:18 +02:00
new Date ( ) ,
new Date ( ) ,
null ,
null ,
2008-08-06 21:43:12 +02:00
Integer . valueOf ( 0 ) ) ;
2008-07-11 14:03:18 +02:00
} else {
robotsTxt4Host . setLoadedDate ( new Date ( ) ) ;
}
// store the data into the robots DB
2009-06-15 00:09:08 +02:00
int sz = this . robotsTable . size ( ) ;
2008-07-11 14:03:18 +02:00
addEntry ( robotsTxt4Host ) ;
2009-06-15 00:09:08 +02:00
if ( this . robotsTable . size ( ) < = sz ) {
2009-07-25 23:38:57 +02:00
Log . logSevere ( " RobotsTxt " , " new entry in robots.txt table failed, resetting database " ) ;
2009-06-15 00:09:08 +02:00
this . resetDatabase ( ) ;
addEntry ( robotsTxt4Host ) ;
}
2008-07-11 14:03:18 +02:00
} else {
2008-08-02 14:12:04 +02:00
final robotsParser parserResult = new robotsParser ( ( byte [ ] ) result [ DOWNLOAD_ROBOTS_TXT ] ) ;
2008-07-24 13:54:37 +02:00
ArrayList < String > denyPath = parserResult . denyList ( ) ;
2008-07-11 14:03:18 +02:00
if ( ( ( Boolean ) result [ DOWNLOAD_ACCESS_RESTRICTED ] ) . booleanValue ( ) ) {
denyPath = new ArrayList < String > ( ) ;
denyPath . add ( " / " ) ;
}
// store the data into the robots DB
robotsTxt4Host = addEntry (
urlHostPort ,
2008-07-24 13:54:37 +02:00
parserResult . allowList ( ) ,
2008-07-11 14:03:18 +02:00
denyPath ,
new Date ( ) ,
( Date ) result [ DOWNLOAD_MODDATE ] ,
( String ) result [ DOWNLOAD_ETAG ] ,
2008-07-24 13:54:37 +02:00
parserResult . sitemap ( ) ,
2008-09-03 01:49:48 +02:00
parserResult . crawlDelayMillis ( ) ) ;
2008-07-11 14:03:18 +02:00
}
2008-07-11 11:12:54 +02:00
}
}
return robotsTxt4Host ;
2008-07-05 02:35:20 +02:00
}
2009-10-11 02:12:19 +02:00
public long crawlDelayMillis ( final DigestURI theURL ) {
2008-08-02 14:12:04 +02:00
final String urlHostPort = getHostPort ( theURL ) ;
2009-10-09 16:44:20 +02:00
final RobotsEntry robotsEntry = getEntry ( urlHostPort , true ) ;
2008-09-03 01:49:48 +02:00
return robotsEntry . getCrawlDelayMillis ( ) ;
2008-07-05 02:35:20 +02:00
}
2005-09-07 13:17:21 +02:00
2009-10-09 16:44:20 +02:00
private RobotsEntry addEntry (
2008-08-02 14:12:04 +02:00
final String hostName ,
final ArrayList < String > allowPathList ,
final ArrayList < String > denyPathList ,
final Date loadedDate ,
final Date modDate ,
final String eTag ,
final String sitemap ,
2008-09-03 01:49:48 +02:00
final long crawlDelayMillis
2007-05-18 15:00:42 +02:00
) {
2009-10-09 16:44:20 +02:00
final RobotsEntry entry = new RobotsEntry (
2008-07-24 13:54:37 +02:00
hostName , allowPathList , denyPathList , loadedDate , modDate ,
2008-09-03 01:49:48 +02:00
eTag , sitemap , crawlDelayMillis ) ;
2005-09-07 13:17:21 +02:00
addEntry ( entry ) ;
return entry ;
}
2009-10-09 16:44:20 +02:00
private String addEntry ( final RobotsEntry entry ) {
2005-09-07 13:17:21 +02:00
// writes a new page and returns key
try {
2008-07-11 00:08:16 +02:00
this . robotsTable . put ( entry . hostName , entry . mem ) ;
2005-09-07 13:17:21 +02:00
return entry . hostName ;
2009-12-10 00:27:26 +01:00
} catch ( final Exception e ) {
Log . logException ( e ) ;
2005-09-07 13:17:21 +02:00
return null ;
}
}
2008-07-05 02:35:20 +02:00
// methods that had been in robotsParser.java:
public static final int DOWNLOAD_ACCESS_RESTRICTED = 0 ;
public static final int DOWNLOAD_ROBOTS_TXT = 1 ;
public static final int DOWNLOAD_ETAG = 2 ;
public static final int DOWNLOAD_MODDATE = 3 ;
2009-10-11 02:12:19 +02:00
private static final String getHostPort ( final DigestURI theURL ) {
2008-07-05 02:35:20 +02:00
String urlHostPort = null ;
2008-08-02 14:12:04 +02:00
final int port = getPort ( theURL ) ;
2008-07-05 02:35:20 +02:00
urlHostPort = theURL . getHost ( ) + " : " + port ;
urlHostPort = urlHostPort . toLowerCase ( ) . intern ( ) ;
return urlHostPort ;
}
2009-10-11 02:12:19 +02:00
private static final int getPort ( final DigestURI theURL ) {
2008-07-05 02:35:20 +02:00
int port = theURL . getPort ( ) ;
if ( port = = - 1 ) {
if ( theURL . getProtocol ( ) . equalsIgnoreCase ( " http " ) ) {
port = 80 ;
} else if ( theURL . getProtocol ( ) . equalsIgnoreCase ( " https " ) ) {
port = 443 ;
}
}
return port ;
}
2009-10-11 02:12:19 +02:00
public DigestURI getSitemapURL ( final DigestURI theURL ) {
2008-07-05 02:35:20 +02:00
if ( theURL = = null ) throw new IllegalArgumentException ( ) ;
2009-10-11 02:12:19 +02:00
DigestURI sitemapURL = null ;
2008-07-05 02:35:20 +02:00
// generating the hostname:poart string needed to do a DB lookup
2008-08-02 14:12:04 +02:00
final String urlHostPort = getHostPort ( theURL ) ;
2009-10-09 16:44:20 +02:00
final RobotsEntry robotsTxt4Host = this . getEntry ( urlHostPort , true ) ;
2008-07-05 02:35:20 +02:00
try {
2008-08-02 14:12:04 +02:00
final String sitemapUrlStr = robotsTxt4Host . getSitemap ( ) ;
2009-10-11 02:12:19 +02:00
if ( sitemapUrlStr ! = null ) sitemapURL = new DigestURI ( sitemapUrlStr , null ) ;
2008-08-02 14:12:04 +02:00
} catch ( final MalformedURLException e ) { /* ignore this */ }
2008-07-05 02:35:20 +02:00
return sitemapURL ;
}
2009-10-11 02:12:19 +02:00
public Long getCrawlDelayMillis ( final DigestURI theURL ) {
2008-07-05 02:35:20 +02:00
if ( theURL = = null ) throw new IllegalArgumentException ( ) ;
2008-09-03 01:49:48 +02:00
Long crawlDelay = null ;
2008-07-05 02:35:20 +02:00
// generating the hostname:poart string needed to do a DB lookup
2008-08-02 14:12:04 +02:00
final String urlHostPort = getHostPort ( theURL ) ;
2009-10-09 16:44:20 +02:00
final RobotsEntry robotsTxt4Host = getEntry ( urlHostPort , true ) ;
2008-07-05 02:35:20 +02:00
try {
2008-09-03 01:49:48 +02:00
crawlDelay = robotsTxt4Host . getCrawlDelayMillis ( ) ;
2008-08-02 14:12:04 +02:00
} catch ( final NumberFormatException e ) { /* ignore this */ }
2008-07-05 02:35:20 +02:00
2008-09-03 01:49:48 +02:00
return crawlDelay ;
2008-07-05 02:35:20 +02:00
}
2009-10-11 02:12:19 +02:00
public boolean isDisallowed ( final DigestURI nexturl ) {
2008-07-05 02:35:20 +02:00
if ( nexturl = = null ) throw new IllegalArgumentException ( ) ;
2008-07-11 11:12:54 +02:00
// generating the hostname:port string needed to do a DB lookup
2008-08-02 14:12:04 +02:00
final String urlHostPort = getHostPort ( nexturl ) ;
2009-10-09 16:44:20 +02:00
RobotsEntry robotsTxt4Host = null ;
2008-07-11 11:12:54 +02:00
robotsTxt4Host = getEntry ( urlHostPort , true ) ;
2008-07-10 02:47:37 +02:00
return robotsTxt4Host . isDisallowed ( nexturl . getFile ( ) ) ;
2008-07-05 02:35:20 +02:00
}
2005-09-07 13:17:21 +02:00
2009-10-11 02:12:19 +02:00
private static Object [ ] downloadRobotsTxt ( final DigestURI robotsURL , int redirectionCount , final RobotsEntry entry ) throws Exception {
2008-07-05 02:35:20 +02:00
if ( redirectionCount < 0 ) return new Object [ ] { Boolean . FALSE , null , null } ;
redirectionCount - - ;
boolean accessCompletelyRestricted = false ;
byte [ ] robotsTxt = null ;
long downloadStart , downloadEnd ;
String eTag = null , oldEtag = null ;
Date lastMod = null ;
downloadStart = System . currentTimeMillis ( ) ;
// if we previously have downloaded this robots.txt then we can set the if-modified-since header
2009-07-19 22:37:44 +02:00
RequestHeader reqHeaders = new RequestHeader ( ) ;
2008-07-05 02:35:20 +02:00
// add yacybot user agent
2009-07-19 22:37:44 +02:00
reqHeaders . put ( HeaderFramework . USER_AGENT , HTTPLoader . crawlerUserAgent ) ;
2008-07-05 02:35:20 +02:00
// adding referer
2009-10-11 02:12:19 +02:00
reqHeaders . put ( RequestHeader . REFERER , ( DigestURI . newURL ( robotsURL , " / " ) ) . toNormalform ( true , true ) ) ;
2008-07-05 02:35:20 +02:00
if ( entry ! = null ) {
oldEtag = entry . getETag ( ) ;
2009-07-19 22:37:44 +02:00
reqHeaders = new RequestHeader ( ) ;
2008-08-02 14:12:04 +02:00
final Date modDate = entry . getModDate ( ) ;
2009-07-19 22:37:44 +02:00
if ( modDate ! = null ) reqHeaders . put ( RequestHeader . IF_MODIFIED_SINCE , DateFormatter . formatRFC1123 ( entry . getModDate ( ) ) ) ;
2008-07-05 02:35:20 +02:00
}
// setup http-client
//TODO: adding Traffic statistic for robots download?
2009-07-19 22:37:44 +02:00
final Client client = new Client ( 10000 , reqHeaders ) ;
ResponseContainer res = null ;
2008-07-05 02:35:20 +02:00
try {
// sending the get request
res = client . GET ( robotsURL . toString ( ) ) ;
// check for interruption
if ( Thread . currentThread ( ) . isInterrupted ( ) ) throw new InterruptedException ( " Shutdown in progress. " ) ;
// check the response status
if ( res . getStatusLine ( ) . startsWith ( " 2 " ) ) {
if ( ! res . getResponseHeader ( ) . mime ( ) . startsWith ( " text/plain " ) ) {
robotsTxt = null ;
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " Robots.txt from URL ' " + robotsURL + " ' has wrong mimetype ' " + res . getResponseHeader ( ) . mime ( ) + " '. " ) ;
2008-07-05 02:35:20 +02:00
} else {
// getting some metadata
2009-07-19 22:37:44 +02:00
eTag = res . getResponseHeader ( ) . containsKey ( HeaderFramework . ETAG ) ? ( res . getResponseHeader ( ) . get ( HeaderFramework . ETAG ) ) . trim ( ) : null ;
2008-07-05 02:35:20 +02:00
lastMod = res . getResponseHeader ( ) . lastModified ( ) ;
// if the robots.txt file was not changed we break here
if ( ( eTag ! = null ) & & ( oldEtag ! = null ) & & ( eTag . equals ( oldEtag ) ) ) {
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " Robots.txt from URL ' " + robotsURL + " ' was not modified. Abort downloading of new version. " ) ;
2008-07-05 02:35:20 +02:00
return null ;
}
// downloading the content
2009-01-30 16:33:00 +01:00
final ByteBuffer sbb = new ByteBuffer ( ) ;
2008-07-05 02:35:20 +02:00
try {
2009-01-31 02:06:56 +01:00
FileUtils . copyToStream ( new BufferedInputStream ( res . getDataAsStream ( ) ) , new BufferedOutputStream ( sbb ) ) ;
2008-07-05 02:35:20 +02:00
} finally {
res . closeStream ( ) ;
}
robotsTxt = sbb . getBytes ( ) ;
downloadEnd = System . currentTimeMillis ( ) ;
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " Robots.txt successfully loaded from URL ' " + robotsURL + " ' in " + ( downloadEnd - downloadStart ) + " ms. " ) ;
2008-07-05 02:35:20 +02:00
}
} else if ( res . getStatusCode ( ) = = 304 ) {
return null ;
} else if ( res . getStatusLine ( ) . startsWith ( " 3 " ) ) {
// getting redirection URL
2009-07-19 22:37:44 +02:00
String redirectionUrlString = res . getResponseHeader ( ) . get ( HeaderFramework . LOCATION ) ;
2008-07-05 02:35:20 +02:00
if ( redirectionUrlString = = null ) {
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " robots.txt could not be downloaded from URL ' " + robotsURL + " ' because of missing redirecton header. [ " + res . getStatusLine ( ) + " ]. " ) ;
2008-07-05 02:35:20 +02:00
robotsTxt = null ;
} else {
redirectionUrlString = redirectionUrlString . trim ( ) ;
// generating the new URL object
2009-10-11 02:12:19 +02:00
final DigestURI redirectionUrl = DigestURI . newURL ( robotsURL , redirectionUrlString ) ;
2008-07-05 02:35:20 +02:00
// following the redirection
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " Redirection detected for robots.txt with URL ' " + robotsURL + " '. " +
2008-07-05 02:35:20 +02:00
" \ nRedirecting request to: " + redirectionUrl ) ;
return downloadRobotsTxt ( redirectionUrl , redirectionCount , entry ) ;
}
} else if ( res . getStatusCode ( ) = = 401 | | res . getStatusCode ( ) = = 403 ) {
accessCompletelyRestricted = true ;
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " Access to Robots.txt not allowed on URL ' " + robotsURL + " '. " ) ;
2008-07-05 02:35:20 +02:00
} else {
2008-09-03 02:30:21 +02:00
if ( log . isFinest ( ) ) log . logFinest ( " robots.txt could not be downloaded from URL ' " + robotsURL + " '. [ " + res . getStatusLine ( ) + " ]. " ) ;
2008-07-05 02:35:20 +02:00
robotsTxt = null ;
}
2008-08-02 14:12:04 +02:00
} catch ( final Exception e ) {
2008-07-05 02:35:20 +02:00
throw e ;
} finally {
if ( res ! = null ) {
// release connection
res . closeStream ( ) ;
}
}
2008-08-06 21:43:12 +02:00
return new Object [ ] { Boolean . valueOf ( accessCompletelyRestricted ) , robotsTxt , eTag , lastMod } ;
2008-07-05 02:35:20 +02:00
}
2005-09-07 13:17:21 +02:00
}