2005-10-10 01:11:17 +02:00
// plasmaCrawlStacker.java
2005-10-09 17:59:09 +02:00
// -----------------------
// part of YaCy
2008-07-20 19:14:51 +02:00
// (C) by Michael Peter Christen; mc@yacy.net
2005-10-09 17:59:09 +02:00
// first published on http://www.anomic.de
// Frankfurt, Germany, 2005
//
// This file was contributed by Martin Thelian
2007-10-29 02:43:20 +01:00
// ([MC] removed all multithreading and thread pools, this is not necessary here; complete renovation 2007)
2005-10-09 17:59:09 +02:00
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2012-09-21 15:48:16 +02:00
package net.yacy.crawler ;
2005-10-05 12:45:33 +02:00
2010-12-09 18:17:25 +01:00
import java.io.IOException ;
2010-08-12 03:29:56 +02:00
import java.net.InetAddress ;
2010-12-09 18:17:25 +01:00
import java.net.MalformedURLException ;
2005-10-05 12:45:33 +02:00
import java.util.Date ;
2013-10-24 16:20:20 +02:00
import java.util.HashSet ;
2013-09-05 13:22:16 +02:00
import java.util.List ;
2011-09-29 17:17:39 +02:00
import java.util.Locale ;
2013-10-24 16:20:20 +02:00
import java.util.Set ;
2010-12-09 18:17:25 +01:00
import java.util.concurrent.BlockingQueue ;
2012-10-29 21:08:45 +01:00
import java.util.concurrent.atomic.AtomicInteger ;
2005-10-05 12:45:33 +02:00
2012-11-09 16:22:24 +01:00
import net.yacy.contentcontrol.ContentControlFilterUpdateThread ;
2012-11-21 18:46:49 +01:00
import net.yacy.cora.document.analysis.Classification.ContentDomain ;
2013-09-15 00:30:23 +02:00
import net.yacy.cora.document.encoding.ASCII ;
import net.yacy.cora.document.encoding.UTF8 ;
import net.yacy.cora.document.id.AnchorURL ;
import net.yacy.cora.document.id.DigestURL ;
import net.yacy.cora.document.id.MultiProtocolURL ;
2013-09-17 15:27:02 +02:00
import net.yacy.cora.federate.solr.FailCategory ;
2012-09-21 16:46:57 +02:00
import net.yacy.cora.order.Base64Order ;
2010-08-23 14:32:02 +02:00
import net.yacy.cora.protocol.Domains ;
2010-12-09 18:17:25 +01:00
import net.yacy.cora.protocol.ftp.FTPClient ;
2013-07-09 14:28:25 +02:00
import net.yacy.cora.util.ConcurrentLog ;
2012-09-21 15:48:16 +02:00
import net.yacy.crawler.data.CrawlProfile ;
import net.yacy.crawler.data.CrawlQueues ;
import net.yacy.crawler.data.NoticedURL ;
import net.yacy.crawler.retrieval.FTPLoader ;
import net.yacy.crawler.retrieval.HTTPLoader ;
import net.yacy.crawler.retrieval.Request ;
import net.yacy.crawler.retrieval.SMBLoader ;
2012-10-29 21:08:45 +01:00
import net.yacy.crawler.robots.RobotsTxt ;
2013-11-04 11:59:28 +01:00
import net.yacy.document.TextParser ;
2013-06-30 02:11:46 +02:00
import net.yacy.kelondro.data.citation.CitationReference ;
2009-10-11 02:12:19 +02:00
import net.yacy.kelondro.workflow.WorkflowProcessor ;
2011-10-04 11:06:24 +02:00
import net.yacy.peers.SeedDB ;
2012-06-11 00:17:30 +02:00
import net.yacy.repository.Blacklist.BlacklistType ;
2010-10-30 16:44:33 +02:00
import net.yacy.repository.FilterEngine ;
2011-09-25 18:59:06 +02:00
import net.yacy.search.Switchboard ;
import net.yacy.search.index.Segment ;
2013-09-17 15:27:02 +02:00
import net.yacy.search.schema.CollectionConfiguration ;
2005-10-05 12:45:33 +02:00
2008-12-15 01:02:58 +01:00
public final class CrawlStacker {
2012-09-14 12:04:54 +02:00
public static String ERROR_NO_MATCH_MUST_MATCH_FILTER = " url does not match must-match filter " ;
public static String ERROR_MATCH_WITH_MUST_NOT_MATCH_FILTER = " url matches must-not-match filter " ;
2013-11-13 06:18:48 +01:00
private final static ConcurrentLog log = new ConcurrentLog ( " STACKCRAWL " ) ;
2012-10-29 21:08:45 +01:00
private final RobotsTxt robots ;
2013-02-27 20:58:34 +01:00
private final WorkflowProcessor < Request > requestQueue ;
2013-09-17 15:27:02 +02:00
public final CrawlQueues nextQueue ;
2010-06-25 18:44:57 +02:00
private final CrawlSwitchboard crawler ;
private final Segment indexSegment ;
2011-11-08 16:38:08 +01:00
private final SeedDB peers ;
2010-06-25 18:44:57 +02:00
private final boolean acceptLocalURLs , acceptGlobalURLs ;
2010-10-30 16:44:33 +02:00
private final FilterEngine domainList ;
2009-03-16 19:08:43 +01:00
2008-12-15 01:02:58 +01:00
// this is the process that checks url for double-occurrences and for allowance/disallowance by robots.txt
2009-03-16 19:08:43 +01:00
2009-05-28 16:26:05 +02:00
public CrawlStacker (
2012-10-29 21:08:45 +01:00
final RobotsTxt robots ,
2011-09-08 14:23:55 +02:00
final CrawlQueues cq ,
final CrawlSwitchboard cs ,
final Segment indexSegment ,
2011-10-04 11:06:24 +02:00
final SeedDB peers ,
2011-09-08 14:23:55 +02:00
final boolean acceptLocalURLs ,
final boolean acceptGlobalURLs ,
final FilterEngine domainList ) {
2012-10-29 21:08:45 +01:00
this . robots = robots ;
2008-12-15 01:02:58 +01:00
this . nextQueue = cq ;
2009-05-28 16:26:05 +02:00
this . crawler = cs ;
this . indexSegment = indexSegment ;
this . peers = peers ;
2008-12-15 01:02:58 +01:00
this . acceptLocalURLs = acceptLocalURLs ;
this . acceptGlobalURLs = acceptGlobalURLs ;
2010-10-30 16:44:33 +02:00
this . domainList = domainList ;
2013-02-27 20:58:34 +01:00
this . requestQueue = new WorkflowProcessor < Request > ( " CrawlStacker " , " This process checks new urls before they are enqueued into the balancer (proper, double-check, correct domain, filter) " , new String [ ] { " Balancer " } , this , " job " , 10000 , null , WorkflowProcessor . availableCPU ) ;
2013-11-13 06:18:48 +01:00
CrawlStacker . log . info ( " STACKCRAWL thread initialized. " ) ;
2007-10-29 02:43:20 +01:00
}
2005-11-15 13:46:22 +01:00
2008-12-15 01:02:58 +01:00
public int size ( ) {
2013-05-13 13:28:07 +02:00
return this . requestQueue . getQueueSize ( ) ;
2006-10-17 23:01:35 +02:00
}
2013-11-13 06:18:48 +01:00
2009-12-02 01:37:59 +01:00
public boolean isEmpty ( ) {
2013-02-27 20:58:34 +01:00
if ( ! this . requestQueue . queueIsEmpty ( ) ) return false ;
2009-12-02 01:37:59 +01:00
return true ;
}
2008-12-15 01:02:58 +01:00
2008-12-17 23:53:06 +01:00
public void clear ( ) {
2013-02-27 20:58:34 +01:00
this . requestQueue . clear ( ) ;
2008-06-04 23:34:57 +02:00
}
2009-03-16 19:08:43 +01:00
2008-12-19 16:26:01 +01:00
public void announceClose ( ) {
2013-11-13 06:18:48 +01:00
CrawlStacker . log . info ( " Flushing remaining " + size ( ) + " crawl stacker job entries. " ) ;
2013-02-27 20:58:34 +01:00
this . requestQueue . shutdown ( ) ;
2008-12-19 16:26:01 +01:00
}
2009-03-16 19:08:43 +01:00
2012-05-14 07:41:55 +02:00
public synchronized void close ( ) {
2013-11-13 06:18:48 +01:00
CrawlStacker . log . info ( " Shutdown. waiting for remaining " + size ( ) + " crawl stacker job entries. please wait. " ) ;
2013-02-27 20:58:34 +01:00
this . requestQueue . shutdown ( ) ;
2009-03-16 19:08:43 +01:00
2013-11-13 06:18:48 +01:00
CrawlStacker . log . info ( " Shutdown. Closing stackCrawl queue. " ) ;
2007-10-29 02:43:20 +01:00
2008-12-17 23:53:06 +01:00
clear ( ) ;
2005-10-05 12:45:33 +02:00
}
2008-12-15 01:02:58 +01:00
2011-09-08 14:23:55 +02:00
public Request job ( final Request entry ) {
2008-12-15 01:02:58 +01:00
// this is the method that is called by the busy thread from outside
2008-12-19 00:18:34 +01:00
if ( entry = = null ) return null ;
2007-10-29 02:43:20 +01:00
2013-10-07 11:15:58 +02:00
// record the link graph for this request; this can be overwritten, replaced and enhanced by an index writing process in Segment.storeDocument
2013-06-30 02:11:46 +02:00
byte [ ] anchorhash = entry . url ( ) . hash ( ) ;
2013-12-04 01:54:45 +01:00
if ( entry . referrerhash ( ) ! = null ) {
if ( this . indexSegment . connectedCitation ( ) ) try {
this . indexSegment . urlCitation ( ) . add ( anchorhash , new CitationReference ( entry . referrerhash ( ) , entry . appdate ( ) . getTime ( ) ) ) ;
} catch ( final Exception e ) {
ConcurrentLog . logException ( e ) ;
}
// TODO: write to webgraph??
2013-06-30 02:11:46 +02:00
}
2007-10-29 02:43:20 +01:00
try {
2008-12-15 01:02:58 +01:00
final String rejectReason = stackCrawl ( entry ) ;
2007-10-29 02:43:20 +01:00
// if the url was rejected we store it into the error URL db
2013-08-22 15:56:09 +02:00
if ( rejectReason ! = null & & ! rejectReason . startsWith ( " double in " ) ) {
2013-09-25 18:27:54 +02:00
final CrawlProfile profile = this . crawler . get ( UTF8 . getBytes ( entry . profileHandle ( ) ) ) ;
2013-09-17 15:27:02 +02:00
this . nextQueue . errorURL . push ( entry . url ( ) , profile , FailCategory . FINAL_LOAD_CONTEXT , rejectReason , - 1 ) ;
2006-11-30 00:09:56 +01:00
}
2008-08-02 14:12:04 +02:00
} catch ( final Exception e ) {
2013-11-13 06:18:48 +01:00
CrawlStacker . log . warn ( " Error while processing stackCrawl entry. \ n " + " Entry: " + entry . toString ( ) + " Error: " + e . toString ( ) , e ) ;
2008-12-19 00:18:34 +01:00
return null ;
2005-10-05 12:45:33 +02:00
}
2008-12-19 00:18:34 +01:00
return null ;
2005-10-05 12:45:33 +02:00
}
2009-03-16 19:08:43 +01:00
2009-07-15 23:07:46 +02:00
public void enqueueEntry ( final Request entry ) {
2009-03-16 19:08:43 +01:00
2008-08-26 18:34:24 +02:00
// DEBUG
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFinest ( ) ) CrawlStacker . log . finest ( " ENQUEUE " + entry . url ( ) + " , referer= " + entry . referrerhash ( ) + " , initiator= " + ( ( entry . initiator ( ) = = null ) ? " " : ASCII . String ( entry . initiator ( ) ) ) + " , name= " + entry . name ( ) + " , appdate= " + entry . appdate ( ) + " , depth= " + entry . depth ( ) ) ;
2013-04-25 11:33:17 +02:00
this . requestQueue . enQueue ( entry ) ;
2005-10-05 12:45:33 +02:00
}
2013-09-15 00:30:23 +02:00
public void enqueueEntriesAsynchronous ( final byte [ ] initiator , final String profileHandle , final List < AnchorURL > hyperlinks ) {
2011-04-18 18:11:16 +02:00
new Thread ( ) {
2012-02-24 00:38:57 +01:00
@Override
2011-04-18 18:11:16 +02:00
public void run ( ) {
2012-07-02 09:51:43 +02:00
Thread . currentThread ( ) . setName ( " enqueueEntriesAsynchronous " ) ;
2011-04-18 18:11:16 +02:00
enqueueEntries ( initiator , profileHandle , hyperlinks , true ) ;
}
} . start ( ) ;
}
2009-03-16 19:08:43 +01:00
2013-09-15 00:30:23 +02:00
private void enqueueEntries ( final byte [ ] initiator , final String profileHandle , final List < AnchorURL > hyperlinks , final boolean replace ) {
2013-10-24 16:20:20 +02:00
if ( replace ) {
// delete old entries, if exists to force a re-load of the url (thats wanted here)
Set < String > hosthashes = new HashSet < String > ( ) ;
for ( final AnchorURL url : hyperlinks ) {
if ( url = = null ) continue ;
final byte [ ] urlhash = url . hash ( ) ;
byte [ ] hosthash = new byte [ 6 ] ; System . arraycopy ( urlhash , 6 , hosthash , 0 , 6 ) ;
hosthashes . add ( ASCII . String ( hosthash ) ) ;
}
this . nextQueue . errorURL . removeHosts ( hosthashes ) ;
}
2013-09-15 23:27:04 +02:00
for ( final AnchorURL url : hyperlinks ) {
2013-09-15 00:30:23 +02:00
if ( url = = null ) continue ;
2011-09-08 14:23:55 +02:00
2010-12-09 18:17:25 +01:00
// delete old entry, if exists to force a re-load of the url (thats wanted here)
final byte [ ] urlhash = url . hash ( ) ;
if ( replace ) {
2012-08-17 15:52:33 +02:00
this . indexSegment . fulltext ( ) . remove ( urlhash ) ;
2012-10-10 11:46:22 +02:00
String u = url . toNormalform ( true ) ;
2011-03-09 13:50:39 +01:00
if ( u . endsWith ( " / " ) ) {
u = u + " index.html " ;
} else if ( ! u . contains ( " . " ) ) {
u = u + " /index.html " ;
}
try {
2013-09-15 00:30:23 +02:00
final byte [ ] uh = new DigestURL ( u ) . hash ( ) ;
2012-08-17 15:52:33 +02:00
this . indexSegment . fulltext ( ) . remove ( uh ) ;
2011-03-09 13:50:39 +01:00
this . nextQueue . noticeURL . removeByURLHash ( uh ) ;
2011-09-08 14:23:55 +02:00
} catch ( final MalformedURLException e1 ) { }
2010-12-09 18:17:25 +01:00
}
2011-09-08 14:23:55 +02:00
2010-12-11 01:31:57 +01:00
if ( url . getProtocol ( ) . equals ( " ftp " ) ) {
// put the whole ftp site on the crawl stack
2013-12-28 22:42:02 +01:00
String userInfo = url . getUserInfo ( ) ;
int p = userInfo = = null ? - 1 : userInfo . indexOf ( ':' ) ;
String user = userInfo = = null ? FTPClient . ANONYMOUS : userInfo . substring ( 0 , p ) ;
String pw = userInfo = = null | | p = = - 1 ? " anomic " : userInfo . substring ( p + 1 ) ;
enqueueEntriesFTP ( initiator , profileHandle , url . getHost ( ) , url . getPort ( ) , user , pw , replace ) ;
2010-12-11 01:31:57 +01:00
} else {
// put entry on crawl stack
enqueueEntry ( new Request (
2011-09-08 14:23:55 +02:00
initiator ,
url ,
null ,
2013-09-15 23:27:04 +02:00
url . getNameProperty ( ) ,
2010-12-11 01:31:57 +01:00
new Date ( ) ,
profileHandle ,
0 ,
0 ,
0 ,
0
) ) ;
}
2010-12-09 18:17:25 +01:00
}
}
2011-09-08 14:23:55 +02:00
2013-12-28 22:42:02 +01:00
public void enqueueEntriesFTP ( final byte [ ] initiator , final String profileHandle , final String host , final int port , final String user , final String pw , final boolean replace ) {
2010-12-09 18:17:25 +01:00
final CrawlQueues cq = this . nextQueue ;
new Thread ( ) {
2012-02-24 00:38:57 +01:00
@Override
2010-12-09 18:17:25 +01:00
public void run ( ) {
2012-07-02 09:51:43 +02:00
Thread . currentThread ( ) . setName ( " enqueueEntriesFTP " ) ;
2010-12-09 18:17:25 +01:00
BlockingQueue < FTPClient . entryInfo > queue ;
try {
2013-12-28 22:42:02 +01:00
queue = FTPClient . sitelist ( host , port , user , pw ) ;
2010-12-09 18:17:25 +01:00
FTPClient . entryInfo entry ;
while ( ( entry = queue . take ( ) ) ! = FTPClient . POISON_entryInfo ) {
2011-09-08 14:23:55 +02:00
2010-12-09 18:17:25 +01:00
// delete old entry, if exists to force a re-load of the url (thats wanted here)
2013-09-15 00:30:23 +02:00
DigestURL url = null ;
2010-12-09 18:17:25 +01:00
try {
2013-12-28 22:42:02 +01:00
url = new DigestURL ( " ftp:// " + user + " : " + pw + " @ " + host + ( port = = 21 ? " " : " : " + port ) + MultiProtocolURL . escape ( entry . name ) ) ;
2011-09-08 14:23:55 +02:00
} catch ( final MalformedURLException e ) {
2010-12-09 18:17:25 +01:00
continue ;
}
final byte [ ] urlhash = url . hash ( ) ;
if ( replace ) {
2012-08-17 15:52:33 +02:00
CrawlStacker . this . indexSegment . fulltext ( ) . remove ( urlhash ) ;
2010-12-09 18:17:25 +01:00
cq . noticeURL . removeByURLHash ( urlhash ) ;
}
2011-09-08 14:23:55 +02:00
2010-12-09 18:17:25 +01:00
// put entry on crawl stack
enqueueEntry ( new Request (
2011-09-08 14:23:55 +02:00
initiator ,
url ,
null ,
2013-09-15 00:30:23 +02:00
MultiProtocolURL . unescape ( entry . name ) ,
2010-12-09 18:17:25 +01:00
entry . date ,
profileHandle ,
0 ,
0 ,
2010-12-11 01:31:57 +01:00
0 ,
entry . size
2010-12-09 18:17:25 +01:00
) ) ;
}
2011-09-08 14:23:55 +02:00
} catch ( final IOException e1 ) {
2013-12-28 22:42:02 +01:00
ConcurrentLog . logException ( e1 ) ;
2011-09-08 14:23:55 +02:00
} catch ( final InterruptedException e ) {
2010-12-09 18:17:25 +01:00
}
}
} . start ( ) ;
}
2011-09-08 14:23:55 +02:00
2011-04-28 15:04:33 +02:00
/ * *
* simple method to add one url as crawljob
* @param url
* @return null if successfull , a reason string if not successful
* /
2013-09-15 00:30:23 +02:00
public String stackSimpleCrawl ( final DigestURL url ) {
2011-09-08 14:23:55 +02:00
final CrawlProfile pe = this . crawler . defaultSurrogateProfile ;
2011-04-28 15:04:33 +02:00
return stackCrawl ( new Request (
2011-09-08 14:23:55 +02:00
this . peers . mySeed ( ) . hash . getBytes ( ) ,
2011-04-28 15:04:33 +02:00
url ,
null ,
" CRAWLING-ROOT " ,
new Date ( ) ,
pe . handle ( ) ,
0 ,
0 ,
0 ,
0
) ) ;
}
2011-09-08 14:23:55 +02:00
2011-04-28 15:04:33 +02:00
/ * *
* stacks a crawl item . The position can also be remote
* @param entry
* @return null if successful , a reason string if not successful
* /
2009-07-15 23:07:46 +02:00
public String stackCrawl ( final Request entry ) {
2006-10-11 02:46:45 +02:00
//this.log.logFinest("stackCrawl: nexturlString='" + nexturlString + "'");
2011-09-08 14:23:55 +02:00
2013-09-25 18:27:54 +02:00
byte [ ] handle = UTF8 . getBytes ( entry . profileHandle ( ) ) ;
final CrawlProfile profile = this . crawler . get ( handle ) ;
2010-06-25 18:44:57 +02:00
String error ;
if ( profile = = null ) {
error = " LOST STACKER PROFILE HANDLE ' " + entry . profileHandle ( ) + " ' for URL " + entry . url ( ) ;
2013-11-13 06:18:48 +01:00
CrawlStacker . log . warn ( error ) ;
2010-06-25 18:44:57 +02:00
return error ;
}
2011-09-08 14:23:55 +02:00
2013-09-26 13:41:52 +02:00
error = checkAcceptanceChangeable ( entry . url ( ) , profile , entry . depth ( ) ) ;
if ( error ! = null ) return error ;
error = checkAcceptanceInitially ( entry . url ( ) , profile ) ;
2010-06-25 18:44:57 +02:00
if ( error ! = null ) return error ;
2011-09-08 14:23:55 +02:00
2010-06-25 18:44:57 +02:00
// store information
2011-09-08 14:23:55 +02:00
final boolean local = Base64Order . enhancedCoder . equal ( entry . initiator ( ) , UTF8 . getBytes ( this . peers . mySeed ( ) . hash ) ) ;
final boolean proxy = ( entry . initiator ( ) = = null | | entry . initiator ( ) . length = = 0 | | ASCII . String ( entry . initiator ( ) ) . equals ( " ------------ " ) ) & & profile . handle ( ) . equals ( this . crawler . defaultProxyProfile . handle ( ) ) ;
final boolean remote = profile . handle ( ) . equals ( this . crawler . defaultRemoteProfile . handle ( ) ) ;
2010-06-25 18:44:57 +02:00
final boolean global =
( profile . remoteIndexing ( ) ) /* granted */ & &
( entry . depth ( ) = = profile . depth ( ) ) /* leaf node */ & &
//(initiatorHash.equals(yacyCore.seedDB.mySeed.hash)) /* not proxy */ &&
(
2011-09-08 14:23:55 +02:00
( this . peers . mySeed ( ) . isSenior ( ) ) | |
( this . peers . mySeed ( ) . isPrincipal ( ) )
2010-06-25 18:44:57 +02:00
) /* qualified */ ;
if ( ! local & & ! global & & ! remote & & ! proxy ) {
2011-05-27 10:24:54 +02:00
error = " URL ' " + entry . url ( ) . toString ( ) + " ' cannot be crawled. initiator = " + ( ( entry . initiator ( ) = = null ) ? " " : ASCII . String ( entry . initiator ( ) ) ) + " , profile.handle = " + profile . handle ( ) ;
2013-11-13 06:18:48 +01:00
CrawlStacker . log . severe ( error ) ;
2010-06-25 18:44:57 +02:00
return error ;
}
2011-09-08 14:23:55 +02:00
2010-12-11 01:31:57 +01:00
long maxFileSize = Long . MAX_VALUE ;
2012-07-12 11:12:21 +02:00
if ( ! entry . isEmpty ( ) ) {
2011-09-08 14:23:55 +02:00
final String protocol = entry . url ( ) . getProtocol ( ) ;
2010-12-11 01:31:57 +01:00
if ( protocol . equals ( " http " ) | | protocol . equals ( " https " ) ) maxFileSize = Switchboard . getSwitchboard ( ) . getConfigLong ( " crawler.http.maxFileSize " , HTTPLoader . DEFAULT_MAXFILESIZE ) ;
if ( protocol . equals ( " ftp " ) ) maxFileSize = Switchboard . getSwitchboard ( ) . getConfigLong ( " crawler.ftp.maxFileSize " , FTPLoader . DEFAULT_MAXFILESIZE ) ;
if ( protocol . equals ( " smb " ) ) maxFileSize = Switchboard . getSwitchboard ( ) . getConfigLong ( " crawler.smb.maxFileSize " , SMBLoader . DEFAULT_MAXFILESIZE ) ;
}
2010-12-15 01:03:19 +01:00
2010-12-11 01:31:57 +01:00
// check availability of parser and maxfilesize
2011-03-09 13:50:39 +01:00
String warning = null ;
2013-10-23 00:16:54 +02:00
ContentDomain contentDomain = entry . url ( ) . getContentDomainFromExt ( ) ;
2012-12-10 21:17:45 +01:00
if ( ( maxFileSize > = 0 & & entry . size ( ) > maxFileSize ) | |
2013-10-23 00:16:54 +02:00
contentDomain = = ContentDomain . APP | |
2013-11-04 11:59:28 +01:00
( contentDomain = = ContentDomain . IMAGE & & TextParser . supportsExtension ( entry . url ( ) ) ! = null ) | |
2013-10-23 00:16:54 +02:00
contentDomain = = ContentDomain . AUDIO | |
contentDomain = = ContentDomain . VIDEO | |
contentDomain = = ContentDomain . CTRL ) {
2013-07-16 18:18:55 +02:00
warning = this . nextQueue . noticeURL . push ( NoticedURL . StackType . NOLOAD , entry , profile , this . robots ) ;
2013-06-13 13:03:56 +02:00
//if (warning != null && this.log.isFine()) this.log.logFine("CrawlStacker.stackCrawl of URL " + entry.url().toNormalform(true, false) + " - not pushed: " + warning);
2010-12-11 01:31:57 +01:00
return null ;
}
2011-02-12 01:01:40 +01:00
2010-06-25 18:44:57 +02:00
if ( global ) {
// it may be possible that global == true and local == true, so do not check an error case against it
2013-11-13 06:18:48 +01:00
if ( proxy ) CrawlStacker . log . warn ( " URL ' " + entry . url ( ) . toString ( ) + " ' has conflicting initiator properties: global = true, proxy = true, initiator = proxy " + " , profile.handle = " + profile . handle ( ) ) ;
if ( remote ) CrawlStacker . log . warn ( " URL ' " + entry . url ( ) . toString ( ) + " ' has conflicting initiator properties: global = true, remote = true, initiator = " + ASCII . String ( entry . initiator ( ) ) + " , profile.handle = " + profile . handle ( ) ) ;
2013-07-16 18:18:55 +02:00
warning = this . nextQueue . noticeURL . push ( NoticedURL . StackType . GLOBAL , entry , profile , this . robots ) ;
2010-06-25 18:44:57 +02:00
} else if ( local ) {
2013-11-13 06:18:48 +01:00
if ( proxy ) CrawlStacker . log . warn ( " URL ' " + entry . url ( ) . toString ( ) + " ' has conflicting initiator properties: local = true, proxy = true, initiator = proxy " + " , profile.handle = " + profile . handle ( ) ) ;
if ( remote ) CrawlStacker . log . warn ( " URL ' " + entry . url ( ) . toString ( ) + " ' has conflicting initiator properties: local = true, remote = true, initiator = " + ASCII . String ( entry . initiator ( ) ) + " , profile.handle = " + profile . handle ( ) ) ;
2013-07-16 18:18:55 +02:00
warning = this . nextQueue . noticeURL . push ( NoticedURL . StackType . LOCAL , entry , profile , this . robots ) ;
2010-06-25 18:44:57 +02:00
} else if ( proxy ) {
2013-11-13 06:18:48 +01:00
if ( remote ) CrawlStacker . log . warn ( " URL ' " + entry . url ( ) . toString ( ) + " ' has conflicting initiator properties: proxy = true, remote = true, initiator = " + ASCII . String ( entry . initiator ( ) ) + " , profile.handle = " + profile . handle ( ) ) ;
2013-07-16 18:18:55 +02:00
warning = this . nextQueue . noticeURL . push ( NoticedURL . StackType . LOCAL , entry , profile , this . robots ) ;
2010-06-25 18:44:57 +02:00
} else if ( remote ) {
2013-07-16 18:18:55 +02:00
warning = this . nextQueue . noticeURL . push ( NoticedURL . StackType . REMOTE , entry , profile , this . robots ) ;
2010-06-25 18:44:57 +02:00
}
2013-11-13 06:18:48 +01:00
if ( warning ! = null & & CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " CrawlStacker.stackCrawl of URL " + entry . url ( ) . toNormalform ( true ) + " - not pushed: " + warning ) ;
2009-03-16 19:08:43 +01:00
2010-06-25 18:44:57 +02:00
return null ;
}
2006-09-04 13:46:17 +02:00
2013-09-26 13:41:52 +02:00
/ * *
* Test if an url shall be accepted for crawl using attributes that are consistent for the whole crawl
* These tests are incomplete and must be followed with an checkAcceptanceChangeable - test .
* @param url
* @param profile
* @return null if the url is accepted , an error string in case if the url is not accepted with an error description
* /
public String checkAcceptanceInitially ( final DigestURL url , final CrawlProfile profile ) {
2011-09-08 14:23:55 +02:00
2011-09-29 17:17:39 +02:00
final String urlstring = url . toString ( ) ;
2006-09-03 16:59:00 +02:00
// check if the url is double registered
2013-12-12 10:38:32 +01:00
String urlhash = ASCII . String ( url . hash ( ) ) ;
2013-09-05 09:59:41 +02:00
final HarvestProcess dbocc = this . nextQueue . exists ( url . hash ( ) ) ; // returns the name of the queue if entry exists
2013-12-12 10:38:32 +01:00
final Date oldDate = this . indexSegment . fulltext ( ) . getLoadDate ( urlhash ) ; // TODO: combine the exists-query with this one
2012-11-19 17:24:34 +01:00
if ( oldDate = = null ) {
2010-03-07 02:46:08 +01:00
if ( dbocc ! = null ) {
// do double-check
2012-12-07 00:31:10 +01:00
if ( dbocc = = HarvestProcess . ERRORS ) {
2013-12-12 10:38:32 +01:00
final CollectionConfiguration . FailDoc errorEntry = this . nextQueue . errorURL . get ( urlhash ) ;
2013-09-17 15:27:02 +02:00
return " double in: errors ( " + errorEntry . getFailReason ( ) + " ) " ;
2010-03-07 02:46:08 +01:00
}
2012-12-07 00:31:10 +01:00
return " double in: " + dbocc . toString ( ) ;
2009-01-09 01:06:36 +01:00
}
2010-03-07 02:46:08 +01:00
} else {
2012-11-19 17:24:34 +01:00
final boolean recrawl = profile . recrawlIfOlder ( ) > oldDate . getTime ( ) ;
2010-03-07 02:46:08 +01:00
if ( recrawl ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isInfo ( ) )
CrawlStacker . log . info ( " RE-CRAWL of URL ' " + urlstring + " ': this url was crawled " +
2012-11-19 17:24:34 +01:00
( ( System . currentTimeMillis ( ) - oldDate . getTime ( ) ) / 60000 / 60 / 24 ) + " days ago. " ) ;
2010-03-07 02:46:08 +01:00
} else {
if ( dbocc = = null ) {
2012-12-26 19:15:11 +01:00
return " double in: LURL-DB, oldDate = " + oldDate . toString ( ) ;
2010-03-07 02:46:08 +01:00
}
2012-12-07 00:31:10 +01:00
if ( dbocc = = HarvestProcess . ERRORS ) {
2013-12-12 10:38:32 +01:00
final CollectionConfiguration . FailDoc errorEntry = this . nextQueue . errorURL . get ( urlhash ) ;
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isInfo ( ) ) CrawlStacker . log . info ( " URL ' " + urlstring + " ' is double registered in ' " + dbocc . toString ( ) + " ', previous cause: " + errorEntry . getFailReason ( ) ) ;
2013-09-17 15:27:02 +02:00
return " double in: errors ( " + errorEntry . getFailReason ( ) + " ), oldDate = " + oldDate . toString ( ) ;
2012-07-05 08:44:39 +02:00
}
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isInfo ( ) ) CrawlStacker . log . info ( " URL ' " + urlstring + " ' is double registered in ' " + dbocc . toString ( ) + " '. " ) ;
2012-12-26 19:15:11 +01:00
return " double in: " + dbocc . toString ( ) + " , oldDate = " + oldDate . toString ( ) ;
2009-01-09 01:06:36 +01:00
}
2006-03-23 17:05:16 +01:00
}
2011-02-12 01:01:40 +01:00
// deny urls that exceed allowed number of occurrences
final int maxAllowedPagesPerDomain = profile . domMaxPages ( ) ;
2012-10-28 19:56:02 +01:00
if ( maxAllowedPagesPerDomain < Integer . MAX_VALUE & & maxAllowedPagesPerDomain > 0 ) {
2012-10-29 21:08:45 +01:00
final AtomicInteger dp = profile . getCount ( url . getHost ( ) ) ;
if ( dp ! = null & & dp . get ( ) > = maxAllowedPagesPerDomain ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " URL ' " + urlstring + " ' appeared too often in crawl stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed. " ) ;
2013-09-26 13:41:52 +02:00
return " crawl stack domain counter exceeded (test by profile) " ;
2011-02-12 01:01:40 +01:00
}
2011-09-08 14:23:55 +02:00
2013-09-26 13:41:52 +02:00
/ *
2012-10-29 21:08:45 +01:00
if ( ResultURLs . domainCount ( EventOrigin . LOCAL_CRAWLING , url . getHost ( ) ) > = maxAllowedPagesPerDomain ) {
2013-07-09 14:28:25 +02:00
if ( this . log . isFine ( ) ) this . log . fine ( " URL ' " + urlstring + " ' appeared too often in result stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed. " ) ;
2013-09-26 13:41:52 +02:00
return " result stack domain counter exceeded (test by domainCount) " ;
2011-02-12 01:01:40 +01:00
}
2013-09-26 13:41:52 +02:00
* /
}
return null ;
}
/ * *
* Test if an url shall be accepted using attributes that are defined by a crawl start but can be changed during a crawl .
* @param url
* @param profile
* @param depth
* @return null if the url is accepted , an error string in case if the url is not accepted with an error description
* /
public String checkAcceptanceChangeable ( final DigestURL url , final CrawlProfile profile , final int depth ) {
// check if the protocol is supported
final String urlProtocol = url . getProtocol ( ) ;
final String urlstring = url . toString ( ) ;
if ( ! Switchboard . getSwitchboard ( ) . loader . isSupportedProtocol ( urlProtocol ) ) {
2013-11-13 06:18:48 +01:00
CrawlStacker . log . severe ( " Unsupported protocol in URL ' " + urlstring + " '. " ) ;
2013-09-26 13:41:52 +02:00
return " unsupported protocol " ;
}
// check if ip is local ip address
final String urlRejectReason = urlInAcceptedDomain ( url ) ;
if ( urlRejectReason ! = null ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " denied_( " + urlRejectReason + " ) " ) ;
2013-09-26 13:41:52 +02:00
return " denied_( " + urlRejectReason + " ) " ;
}
// check blacklist
if ( Switchboard . urlBlacklist . isListed ( BlacklistType . CRAWLER , url ) ) {
2013-11-13 06:18:48 +01:00
CrawlStacker . log . fine ( " URL ' " + urlstring + " ' is in blacklist. " ) ;
2013-09-26 13:41:52 +02:00
return " url in blacklist " ;
}
// filter with must-match for URLs
if ( ( depth > 0 ) & & ! profile . urlMustMatchPattern ( ) . matcher ( urlstring ) . matches ( ) ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " URL ' " + urlstring + " ' does not match must-match crawling filter ' " + profile . urlMustMatchPattern ( ) . toString ( ) + " '. " ) ;
2013-09-26 13:41:52 +02:00
return ERROR_NO_MATCH_MUST_MATCH_FILTER + profile . urlMustMatchPattern ( ) . toString ( ) ;
}
// filter with must-not-match for URLs
if ( ( depth > 0 ) & & profile . urlMustNotMatchPattern ( ) . matcher ( urlstring ) . matches ( ) ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " URL ' " + urlstring + " ' matches must-not-match crawling filter ' " + profile . urlMustNotMatchPattern ( ) . toString ( ) + " '. " ) ;
2013-09-26 13:41:52 +02:00
return ERROR_MATCH_WITH_MUST_NOT_MATCH_FILTER + profile . urlMustNotMatchPattern ( ) . toString ( ) ;
}
// deny cgi
if ( url . isIndividual ( ) & & ! profile . crawlingQ ( ) ) { // TODO: make special property for crawlingIndividual
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " URL ' " + urlstring + " ' is CGI URL. " ) ;
2013-09-26 13:41:52 +02:00
return " individual url (sessionid etc) not wanted " ;
}
// deny post properties
if ( url . isPOST ( ) & & ! profile . crawlingQ ( ) ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " URL ' " + urlstring + " ' is post URL. " ) ;
2013-09-26 13:41:52 +02:00
return " post url not allowed " ;
2011-02-12 01:01:40 +01:00
}
2011-09-08 14:23:55 +02:00
2011-09-29 17:17:39 +02:00
// the following filters use a DNS lookup to check if the url matches with IP filter
// this is expensive and those filters are check at the end of all other tests
// filter with must-match for IPs
2012-02-24 00:38:57 +01:00
if ( ( depth > 0 ) & & profile . ipMustMatchPattern ( ) ! = CrawlProfile . MATCH_ALL_PATTERN & & url . getHost ( ) ! = null & & ! profile . ipMustMatchPattern ( ) . matcher ( url . getInetAddress ( ) . getHostAddress ( ) ) . matches ( ) ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " IP " + url . getInetAddress ( ) . getHostAddress ( ) + " of URL ' " + urlstring + " ' does not match must-match crawling filter ' " + profile . ipMustMatchPattern ( ) . toString ( ) + " '. " ) ;
2011-09-29 17:17:39 +02:00
return " ip " + url . getInetAddress ( ) . getHostAddress ( ) + " of url does not match must-match filter " ;
}
// filter with must-not-match for IPs
2012-02-27 00:52:44 +01:00
if ( ( depth > 0 ) & & profile . ipMustNotMatchPattern ( ) ! = CrawlProfile . MATCH_NEVER_PATTERN & & url . getHost ( ) ! = null & & profile . ipMustNotMatchPattern ( ) . matcher ( url . getInetAddress ( ) . getHostAddress ( ) ) . matches ( ) ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " IP " + url . getInetAddress ( ) . getHostAddress ( ) + " of URL ' " + urlstring + " ' matches must-not-match crawling filter ' " + profile . ipMustNotMatchPattern ( ) . toString ( ) + " '. " ) ;
2011-09-29 17:17:39 +02:00
return " ip " + url . getInetAddress ( ) . getHostAddress ( ) + " of url matches must-not-match filter " ;
}
// filter with must-match for IPs
final String [ ] countryMatchList = profile . countryMustMatchList ( ) ;
if ( depth > 0 & & countryMatchList ! = null & & countryMatchList . length > 0 ) {
final Locale locale = url . getLocale ( ) ;
if ( locale ! = null ) {
final String c0 = locale . getCountry ( ) ;
boolean granted = false ;
matchloop : for ( final String c : countryMatchList ) {
if ( c0 . equals ( c ) ) {
granted = true ;
break matchloop ;
}
}
if ( ! granted ) {
2013-11-13 06:18:48 +01:00
if ( CrawlStacker . log . isFine ( ) ) CrawlStacker . log . fine ( " IP " + url . getInetAddress ( ) . getHostAddress ( ) + " of URL ' " + urlstring + " ' does not match must-match crawling filter ' " + profile . ipMustMatchPattern ( ) . toString ( ) + " '. " ) ;
2011-09-29 17:17:39 +02:00
return " country " + c0 + " of url does not match must-match filter for countries " ;
}
}
}
2005-10-05 12:45:33 +02:00
return null ;
}
2011-09-08 14:23:55 +02:00
2008-12-15 01:02:58 +01:00
/ * *
* Test a url if it can be used for crawling / indexing
* This mainly checks if the url is in the declared domain ( local / global )
* @param url
* @return null if the url can be accepted , a string containing a rejection reason if the url cannot be accepted
* /
2013-09-15 00:30:23 +02:00
public String urlInAcceptedDomain ( final DigestURL url ) {
2010-07-18 22:14:20 +02:00
// returns true if the url can be accepted according to network.unit.domain
2008-12-15 01:02:58 +01:00
if ( url = = null ) return " url is null " ;
2010-10-30 16:44:33 +02:00
// check domainList from network-definition
if ( this . domainList ! = null ) {
if ( ! this . domainList . isListed ( url , null ) ) {
return " the url ' " + url + " ' is not in domainList of this network " ;
}
}
2012-08-29 09:52:14 +02:00
if ( Switchboard . getSwitchboard ( ) . getConfigBool (
" contentcontrol.enabled " , false ) = = true ) {
if ( ! Switchboard . getSwitchboard ( )
. getConfig ( " contentcontrol.mandatoryfilterlist " , " " )
. equals ( " " ) ) {
FilterEngine f = ContentControlFilterUpdateThread . getNetworkFilter ( ) ;
if ( f ! = null ) {
if ( ! f . isListed ( url , null ) ) {
return " the url ' "
+ url
+ " ' does not belong to the network mandatory filter list " ;
}
}
}
}
2010-07-18 22:14:20 +02:00
final boolean local = url . isLocal ( ) ;
if ( this . acceptLocalURLs & & local ) return null ;
if ( this . acceptGlobalURLs & & ! local ) return null ;
2008-12-15 01:02:58 +01:00
final String host = url . getHost ( ) ;
if ( host = = null ) return " url.host is null " ;
// check if this is a local address and we are allowed to index local pages:
//boolean local = hostAddress.isSiteLocalAddress() || hostAddress.isLoopbackAddress();
//assert local == yacyURL.isLocalDomain(url.hash()); // TODO: remove the dnsResolve above!
2011-09-08 14:23:55 +02:00
final InetAddress ia = Domains . dnsResolve ( host ) ;
2008-12-15 01:02:58 +01:00
return ( local ) ?
2010-08-12 03:29:56 +02:00
( " the host ' " + host + " ' is local, but local addresses are not accepted: " + ( ( ia = = null ) ? " null " : ia . getHostAddress ( ) ) ) :
( " the host ' " + host + " ' is global, but global addresses are not accepted: " + ( ( ia = = null ) ? " null " : ia . getHostAddress ( ) ) ) ;
2008-12-15 01:02:58 +01:00
}
2011-09-08 14:23:55 +02:00
2010-04-15 15:22:59 +02:00
public String urlInAcceptedDomainHash ( final byte [ ] urlhash ) {
2010-07-18 22:14:20 +02:00
// returns true if the url can be accepted according to network.unit.domain
2009-04-20 08:38:28 +02:00
if ( urlhash = = null ) return " url is null " ;
// check if this is a local address and we are allowed to index local pages:
2013-09-15 00:30:23 +02:00
final boolean local = DigestURL . isLocal ( urlhash ) ;
2010-07-18 22:14:20 +02:00
if ( this . acceptLocalURLs & & local ) return null ;
if ( this . acceptGlobalURLs & & ! local ) return null ;
2009-04-20 08:38:28 +02:00
return ( local ) ?
2011-05-27 10:24:54 +02:00
( " the urlhash ' " + ASCII . String ( urlhash ) + " ' is local, but local addresses are not accepted " ) :
( " the urlhash ' " + ASCII . String ( urlhash ) + " ' is global, but global addresses are not accepted " ) ;
2009-04-20 08:38:28 +02:00
}
2009-03-16 19:08:43 +01:00
2008-12-15 01:02:58 +01:00
public boolean acceptLocalURLs ( ) {
return this . acceptLocalURLs ;
}
2009-03-16 19:08:43 +01:00
2008-12-15 01:02:58 +01:00
public boolean acceptGlobalURLs ( ) {
return this . acceptGlobalURLs ;
}
2005-12-03 10:58:00 +01:00
}