2010-01-12 11:05:28 +01:00
// Crawler_p.java
2008-07-20 19:14:51 +02:00
// (C) 2006 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
2006-12-19 01:29:45 +01:00
// first published 18.12.2006 on http://www.anomic.de
// this file was created using the an implementation from IndexCreate_p.java, published 02.12.2004
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
2009-09-27 00:07:40 +02:00
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
2006-12-19 01:29:45 +01:00
//
// LICENSE
2011-06-13 23:44:03 +02:00
//
2006-12-19 01:29:45 +01:00
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2007-03-02 22:09:28 +01:00
import java.io.File ;
2011-03-09 13:50:39 +01:00
import java.io.FileInputStream ;
2007-03-02 22:09:28 +01:00
import java.io.Writer ;
import java.net.MalformedURLException ;
import java.util.Date ;
import java.util.HashMap ;
2011-01-28 17:24:33 +01:00
import java.util.HashSet ;
2007-03-02 22:09:28 +01:00
import java.util.Iterator ;
import java.util.Map ;
2011-04-21 15:58:49 +02:00
import java.util.Properties ;
2008-08-04 22:43:36 +02:00
import java.util.Set ;
2007-03-02 22:09:28 +01:00
import java.util.regex.Pattern ;
import java.util.regex.PatternSyntaxException ;
2010-05-25 14:54:57 +02:00
import net.yacy.cora.document.MultiProtocolURI ;
2010-08-23 14:32:02 +02:00
import net.yacy.cora.protocol.RequestHeader ;
2011-06-13 23:44:03 +02:00
import net.yacy.cora.services.federated.yacy.CacheStrategy ;
2009-10-18 02:53:43 +02:00
import net.yacy.document.parser.html.ContentScraper ;
import net.yacy.document.parser.html.TransformerWriter ;
2009-10-11 02:12:19 +02:00
import net.yacy.kelondro.data.meta.DigestURI ;
2009-11-05 21:28:37 +01:00
import net.yacy.kelondro.logging.Log ;
2009-10-10 03:14:19 +02:00
import net.yacy.kelondro.util.FileUtils ;
2011-10-04 11:06:24 +02:00
import net.yacy.peers.NewsPool ;
2011-09-25 18:59:06 +02:00
import net.yacy.search.Switchboard ;
import net.yacy.search.SwitchboardConstants ;
import net.yacy.search.index.Segment ;
import net.yacy.search.index.Segments ;
2008-05-06 02:32:41 +02:00
import de.anomic.crawler.CrawlProfile ;
2008-05-06 15:44:38 +02:00
import de.anomic.crawler.SitemapImporter ;
2011-05-26 12:57:02 +02:00
import de.anomic.crawler.ZURL.FailCategory ;
2009-07-15 23:07:46 +02:00
import de.anomic.crawler.retrieval.Request ;
2010-02-01 23:18:56 +01:00
import de.anomic.data.BookmarkHelper ;
2010-11-21 02:29:32 +01:00
import de.anomic.data.BookmarksDB ;
import de.anomic.data.ListManager ;
2011-06-13 23:44:03 +02:00
import de.anomic.data.WorkTables ;
2007-03-02 22:09:28 +01:00
import de.anomic.server.serverObjects ;
import de.anomic.server.serverSwitch ;
2010-01-12 11:05:28 +01:00
public class Crawler_p {
2006-12-19 01:29:45 +01:00
2010-01-12 11:05:28 +01:00
// this servlet does NOT create the Crawler servlet page content!
2006-12-19 01:29:45 +01:00
// this servlet starts a web crawl. The interface for entering the web crawl parameters is in IndexCreate_p.html
2011-06-13 23:44:03 +02:00
2009-07-19 22:37:44 +02:00
public static serverObjects respond ( final RequestHeader header , final serverObjects post , final serverSwitch env ) {
2006-12-19 01:29:45 +01:00
// return variable that accumulates replacements
2009-07-19 22:37:44 +02:00
final Switchboard sb = ( Switchboard ) env ;
2011-06-13 23:44:03 +02:00
// inital values for AJAX Elements (without JavaScript)
2009-01-22 01:03:54 +01:00
final serverObjects prop = new serverObjects ( ) ;
prop . put ( " rejected " , 0 ) ;
prop . put ( " urlpublictextSize " , 0 ) ;
prop . put ( " rwipublictextSize " , 0 ) ;
prop . put ( " list " , " 0 " ) ;
2011-06-13 23:44:03 +02:00
prop . put ( " loaderSize " , 0 ) ;
2009-01-22 01:03:54 +01:00
prop . put ( " loaderMax " , 0 ) ;
prop . put ( " list-loader " , 0 ) ;
2010-12-29 15:30:25 +01:00
prop . put ( " localCrawlSize " , sb . crawlQueues . coreCrawlJobSize ( ) ) ;
2009-01-22 01:03:54 +01:00
prop . put ( " localCrawlState " , " " ) ;
2010-12-29 15:30:25 +01:00
prop . put ( " limitCrawlSize " , sb . crawlQueues . limitCrawlJobSize ( ) ) ;
2009-01-22 01:03:54 +01:00
prop . put ( " limitCrawlState " , " " ) ;
2010-12-29 15:30:25 +01:00
prop . put ( " remoteCrawlSize " , sb . crawlQueues . limitCrawlJobSize ( ) ) ;
2009-01-22 01:03:54 +01:00
prop . put ( " remoteCrawlState " , " " ) ;
prop . put ( " list-remote " , 0 ) ;
2007-10-24 23:38:19 +02:00
prop . put ( " forwardToCrawlStart " , " 0 " ) ;
2011-06-13 23:44:03 +02:00
2009-10-09 16:44:20 +02:00
// get segment
Segment indexSegment = null ;
if ( post ! = null & & post . containsKey ( " segment " ) ) {
2011-06-13 23:44:03 +02:00
final String segmentName = post . get ( " segment " ) ;
2009-10-09 16:44:20 +02:00
if ( sb . indexSegments . segmentExist ( segmentName ) ) {
indexSegment = sb . indexSegments . segment ( segmentName ) ;
}
} else {
// take default segment
indexSegment = sb . indexSegments . segment ( Segments . Process . PUBLIC ) ;
}
2011-06-13 23:44:03 +02:00
2008-06-14 12:24:58 +02:00
prop . put ( " info " , " 0 " ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
if ( post ! = null & & post . containsKey ( " continue " ) ) {
// continue queue
final String queue = post . get ( " continue " , " " ) ;
2010-11-27 01:54:59 +01:00
if ( " localcrawler " . equals ( queue ) ) {
2010-09-30 14:50:34 +02:00
sb . continueCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
2010-11-27 01:54:59 +01:00
} else if ( " remotecrawler " . equals ( queue ) ) {
2010-09-30 14:50:34 +02:00
sb . continueCrawlJob ( SwitchboardConstants . CRAWLJOB_REMOTE_TRIGGERED_CRAWL ) ;
2007-02-22 23:26:11 +01:00
}
2010-09-30 14:50:34 +02:00
}
2007-02-22 23:26:11 +01:00
2010-09-30 14:50:34 +02:00
if ( post ! = null & & post . containsKey ( " pause " ) ) {
// pause queue
final String queue = post . get ( " pause " , " " ) ;
2010-11-27 01:54:59 +01:00
if ( " localcrawler " . equals ( queue ) ) {
2010-09-30 14:50:34 +02:00
sb . pauseCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
2010-11-27 01:54:59 +01:00
} else if ( " remotecrawler " . equals ( queue ) ) {
2010-09-30 14:50:34 +02:00
sb . pauseCrawlJob ( SwitchboardConstants . CRAWLJOB_REMOTE_TRIGGERED_CRAWL ) ;
2007-02-22 23:26:11 +01:00
}
2010-09-30 14:50:34 +02:00
}
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
if ( post ! = null & & post . containsKey ( " crawlingstart " ) ) {
// init crawl
if ( sb . peers = = null ) {
prop . put ( " info " , " 3 " ) ;
} else {
String crawlingStart = post . get ( " crawlingURL " , " " ) . trim ( ) ; // the crawljob start url
// add the prefix http:// if necessary
int pos = crawlingStart . indexOf ( " :// " ) ;
2010-12-09 18:17:25 +01:00
if ( pos = = - 1 ) {
if ( crawlingStart . startsWith ( " www " ) ) crawlingStart = " http:// " + crawlingStart ;
if ( crawlingStart . startsWith ( " ftp " ) ) crawlingStart = " ftp:// " + crawlingStart ;
}
2008-06-04 23:34:57 +02:00
2011-04-18 18:11:16 +02:00
// remove crawlingFileContent before we record the call
final String crawlingFileName = post . get ( " crawlingFile " ) ;
final File crawlingFile = ( crawlingFileName ! = null & & crawlingFileName . length ( ) > 0 ) ? new File ( crawlingFileName ) : null ;
if ( crawlingFile ! = null & & crawlingFile . exists ( ) ) {
post . remove ( " crawlingFile$file " ) ;
}
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// normalize URL
DigestURI crawlingStartURL = null ;
2011-04-18 18:11:16 +02:00
if ( crawlingFile = = null ) try { crawlingStartURL = new DigestURI ( crawlingStart ) ; } catch ( final MalformedURLException e1 ) { Log . logException ( e1 ) ; }
2010-09-30 14:50:34 +02:00
crawlingStart = ( crawlingStartURL = = null ) ? null : crawlingStartURL . toNormalform ( true , true ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// set new properties
2010-11-27 01:54:59 +01:00
final boolean fullDomain = " domain " . equals ( post . get ( " range " , " wide " ) ) ; // special property in simple crawl start
final boolean subPath = " subpath " . equals ( post . get ( " range " , " wide " ) ) ; // special property in simple crawl start
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// set the crawl filter
2011-09-29 17:17:39 +02:00
String newcrawlingMustMatch = post . get ( " mustmatch " , CrawlProfile . MATCH_ALL_STRING ) ;
final String newcrawlingMustNotMatch = post . get ( " mustnotmatch " , CrawlProfile . MATCH_NEVER_STRING ) ;
if ( newcrawlingMustMatch . length ( ) < 2 ) newcrawlingMustMatch = CrawlProfile . MATCH_ALL_STRING ; // avoid that all urls are filtered out if bad value was submitted
String ipMustMatch = post . get ( " ipMustmatch " , CrawlProfile . MATCH_ALL_STRING ) ;
final String ipMustNotMatch = post . get ( " ipMustnotmatch " , CrawlProfile . MATCH_NEVER_STRING ) ;
if ( ipMustMatch . length ( ) < 2 ) ipMustMatch = CrawlProfile . MATCH_ALL_STRING ;
2011-09-27 23:58:18 +02:00
final String countryMustMatch = post . getBoolean ( " countryMustMatchSwitch " , false ) ? post . get ( " countryMustMatchList " , " " ) : " " ;
sb . setConfig ( " crawlingIPMustMatch " , ipMustMatch ) ;
sb . setConfig ( " crawlingIPMustNotMatch " , ipMustNotMatch ) ;
if ( countryMustMatch . length ( ) > 0 ) sb . setConfig ( " crawlingCountryMustMatch " , countryMustMatch ) ;
2010-09-30 14:50:34 +02:00
// special cases:
if ( crawlingStartURL ! = null & & fullDomain ) {
2010-10-05 02:05:08 +02:00
if ( crawlingStartURL . isFile ( ) ) {
newcrawlingMustMatch = " file:// " + crawlingStartURL . getPath ( ) + " .* " ;
} else if ( crawlingStartURL . isSMB ( ) ) {
newcrawlingMustMatch = " smb://.* " + crawlingStartURL . getHost ( ) + " .* " + crawlingStartURL . getPath ( ) + " .* " ;
2010-12-09 18:17:25 +01:00
} else if ( crawlingStartURL . isFTP ( ) ) {
newcrawlingMustMatch = " ftp://.* " + crawlingStartURL . getHost ( ) + " .* " + crawlingStartURL . getPath ( ) + " .* " ;
2010-10-05 02:05:08 +02:00
} else {
newcrawlingMustMatch = " .* " + crawlingStartURL . getHost ( ) + " .* " ;
}
2010-09-30 14:50:34 +02:00
}
if ( crawlingStart ! = null & & subPath & & ( pos = crawlingStart . lastIndexOf ( '/' ) ) > 0 ) {
newcrawlingMustMatch = crawlingStart . substring ( 0 , pos + 1 ) + " .* " ;
}
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
final boolean crawlOrder = post . get ( " crawlOrder " , " off " ) . equals ( " on " ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " crawlOrder " , crawlOrder ) ;
2011-06-13 23:44:03 +02:00
2011-03-15 02:03:35 +01:00
int newcrawlingdepth = post . getInt ( " crawlingDepth " , 8 ) ;
2010-09-30 14:50:34 +02:00
env . setConfig ( " crawlingDepth " , Integer . toString ( newcrawlingdepth ) ) ;
if ( ( crawlOrder ) & & ( newcrawlingdepth > 8 ) ) newcrawlingdepth = 8 ;
2011-06-13 23:44:03 +02:00
2011-09-30 14:38:28 +02:00
final boolean directDocByURL = " on " . equals ( post . get ( " directDocByURL " , " on " ) ) ;
env . setConfig ( " crawlingDirectDocByURL " , directDocByURL ) ;
2010-09-30 14:50:34 +02:00
// recrawl
final String recrawl = post . get ( " recrawl " , " nodoubles " ) ; // nodoubles, reload, scheduler
2010-11-27 01:54:59 +01:00
boolean crawlingIfOlderCheck = " on " . equals ( post . get ( " crawlingIfOlderCheck " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
int crawlingIfOlderNumber = post . getInt ( " crawlingIfOlderNumber " , - 1 ) ;
2010-09-30 14:50:34 +02:00
String crawlingIfOlderUnit = post . get ( " crawlingIfOlderUnit " , " year " ) ; // year, month, day, hour
2011-03-15 02:03:35 +01:00
int repeat_time = post . getInt ( " repeat_time " , - 1 ) ;
2010-09-30 14:50:34 +02:00
final String repeat_unit = post . get ( " repeat_unit " , " seldays " ) ; // selminutes, selhours, seldays
2011-06-13 23:44:03 +02:00
2010-11-27 01:54:59 +01:00
if ( " scheduler " . equals ( recrawl ) & & repeat_time > 0 ) {
2011-06-13 23:44:03 +02:00
// set crawlingIfOlder attributes that are appropriate for scheduled crawling
2010-09-30 14:50:34 +02:00
crawlingIfOlderCheck = true ;
2010-11-27 01:54:59 +01:00
crawlingIfOlderNumber = " selminutes " . equals ( repeat_unit ) ? 1 : " selhours " . equals ( repeat_unit ) ? repeat_time / 2 : repeat_time * 12 ;
2010-09-30 14:50:34 +02:00
crawlingIfOlderUnit = " hour " ;
2010-11-27 01:54:59 +01:00
} else if ( " reload " . equals ( recrawl ) ) {
2010-09-30 14:50:34 +02:00
repeat_time = - 1 ;
crawlingIfOlderCheck = true ;
2010-11-27 01:54:59 +01:00
} else if ( " nodoubles " . equals ( recrawl ) ) {
2010-09-30 14:50:34 +02:00
repeat_time = - 1 ;
crawlingIfOlderCheck = false ;
}
2011-06-13 23:44:03 +02:00
final long crawlingIfOlder = recrawlIfOlderC ( crawlingIfOlderCheck , crawlingIfOlderNumber , crawlingIfOlderUnit ) ;
2010-09-30 14:50:34 +02:00
env . setConfig ( " crawlingIfOlder " , crawlingIfOlder ) ;
2010-08-20 01:52:38 +02:00
2010-09-30 14:50:34 +02:00
// store this call as api call
if ( repeat_time > 0 ) {
// store as scheduled api call
2010-12-09 18:17:25 +01:00
sb . tables . recordAPICall ( post , " Crawler_p.html " , WorkTables . TABLE_API_TYPE_CRAWLER , " crawl start for " + ( ( crawlingStart = = null ) ? post . get ( " crawlingFile " , " " ) : crawlingStart ) , repeat_time , repeat_unit . substring ( 3 ) ) ;
2010-09-30 14:50:34 +02:00
} else {
// store just a protocol
2010-12-09 18:17:25 +01:00
sb . tables . recordAPICall ( post , " Crawler_p.html " , WorkTables . TABLE_API_TYPE_CRAWLER , " crawl start for " + ( ( crawlingStart = = null ) ? post . get ( " crawlingFile " , " " ) : crawlingStart ) ) ;
2011-06-13 23:44:03 +02:00
}
2010-11-27 01:54:59 +01:00
final boolean crawlingDomMaxCheck = " on " . equals ( post . get ( " crawlingDomMaxCheck " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
final int crawlingDomMaxPages = ( crawlingDomMaxCheck ) ? post . getInt ( " crawlingDomMaxPages " , - 1 ) : - 1 ;
2010-09-30 14:50:34 +02:00
env . setConfig ( " crawlingDomMaxPages " , Integer . toString ( crawlingDomMaxPages ) ) ;
2011-06-13 23:44:03 +02:00
2010-11-27 01:54:59 +01:00
final boolean crawlingQ = " on " . equals ( post . get ( " crawlingQ " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " crawlingQ " , crawlingQ ) ;
2011-06-13 23:44:03 +02:00
2010-12-17 00:37:21 +01:00
final boolean indexText = " on " . equals ( post . get ( " indexText " , " on " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " indexText " , indexText ) ;
2011-06-13 23:44:03 +02:00
2010-12-17 00:37:21 +01:00
final boolean indexMedia = " on " . equals ( post . get ( " indexMedia " , " on " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " indexMedia " , indexMedia ) ;
2011-06-13 23:44:03 +02:00
2010-12-17 00:37:21 +01:00
boolean storeHTCache = " on " . equals ( post . get ( " storeHTCache " , " on " ) ) ;
2010-10-02 00:41:28 +02:00
if ( crawlingStartURL ! = null & & ( crawlingStartURL . isFile ( ) | | crawlingStartURL . isSMB ( ) ) ) storeHTCache = false ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " storeHTCache " , storeHTCache ) ;
2011-06-13 23:44:03 +02:00
CacheStrategy cachePolicy = CacheStrategy . parse ( post . get ( " cachePolicy " , " iffresh " ) ) ;
if ( cachePolicy = = null ) cachePolicy = CacheStrategy . IFFRESH ;
2010-11-27 01:54:59 +01:00
final boolean xsstopw = " on " . equals ( post . get ( " xsstopw " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " xsstopw " , xsstopw ) ;
2011-06-13 23:44:03 +02:00
2010-11-27 01:54:59 +01:00
final boolean xdstopw = " on " . equals ( post . get ( " xdstopw " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " xdstopw " , xdstopw ) ;
2011-06-13 23:44:03 +02:00
2010-11-27 01:54:59 +01:00
final boolean xpstopw = " on " . equals ( post . get ( " xpstopw " , " off " ) ) ;
2011-03-15 02:03:35 +01:00
env . setConfig ( " xpstopw " , xpstopw ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
final String crawlingMode = post . get ( " crawlingMode " , " url " ) ;
2010-12-09 18:17:25 +01:00
if ( crawlingStart ! = null & & crawlingStart . startsWith ( " ftp " ) ) {
try {
// check if the crawl filter works correctly
Pattern . compile ( newcrawlingMustMatch ) ;
final CrawlProfile profile = new CrawlProfile (
crawlingStart ,
crawlingStartURL ,
newcrawlingMustMatch ,
2011-09-27 23:58:18 +02:00
newcrawlingMustNotMatch ,
ipMustMatch ,
ipMustNotMatch ,
countryMustMatch ,
2010-12-09 18:17:25 +01:00
newcrawlingdepth ,
2011-09-30 14:38:28 +02:00
directDocByURL ,
2010-12-09 18:17:25 +01:00
crawlingIfOlder ,
crawlingDomMaxPages ,
crawlingQ ,
indexText ,
indexMedia ,
storeHTCache ,
crawlOrder ,
xsstopw ,
xdstopw ,
xpstopw ,
cachePolicy ) ;
2011-02-12 01:01:40 +01:00
sb . crawler . putActive ( profile . handle ( ) . getBytes ( ) , profile ) ;
2010-12-09 18:17:25 +01:00
sb . pauseCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
final DigestURI url = crawlingStartURL ;
2011-03-09 13:50:39 +01:00
sb . crawlStacker . enqueueEntriesFTP ( sb . peers . mySeed ( ) . hash . getBytes ( ) , profile . handle ( ) , url . getHost ( ) , url . getPort ( ) , false ) ;
2010-12-09 18:17:25 +01:00
} catch ( final PatternSyntaxException e ) {
prop . put ( " info " , " 4 " ) ; // crawlfilter does not match url
prop . putHTML ( " info_newcrawlingfilter " , newcrawlingMustMatch ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
} catch ( final Exception e ) {
// mist
prop . put ( " info " , " 7 " ) ; // Error with file
prop . putHTML ( " info_crawlingStart " , crawlingStart ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
Log . logException ( e ) ;
}
sb . continueCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
} else if ( " url " . equals ( crawlingMode ) ) {
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// check if pattern matches
if ( ( crawlingStart = = null | | crawlingStartURL = = null ) /* || (!(crawlingStart.matches(newcrawlingfilter))) */ ) {
// print error message
prop . put ( " info " , " 4 " ) ; //crawlfilter does not match url
prop . putHTML ( " info_newcrawlingfilter " , newcrawlingMustMatch ) ;
prop . putHTML ( " info_crawlingStart " , crawlingStart ) ;
} else try {
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// check if the crawl filter works correctly
Pattern . compile ( newcrawlingMustMatch ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// stack request
// first delete old entry, if exists
2010-10-26 18:10:20 +02:00
final DigestURI url = new DigestURI ( crawlingStart ) ;
2010-09-30 14:50:34 +02:00
final byte [ ] urlhash = url . hash ( ) ;
indexSegment . urlMetadata ( ) . remove ( urlhash ) ;
sb . crawlQueues . noticeURL . removeByURLHash ( urlhash ) ;
sb . crawlQueues . errorURL . remove ( urlhash ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// stack url
2011-02-12 01:01:40 +01:00
sb . crawler . removePassive ( crawlingStartURL . hash ( ) ) ; // if there is an old entry, delete it
2010-09-30 14:50:34 +02:00
final CrawlProfile pe = new CrawlProfile (
2010-10-01 01:57:58 +02:00
( crawlingStartURL . getHost ( ) = = null ) ? crawlingStartURL . toNormalform ( true , false ) : crawlingStartURL . getHost ( ) ,
2010-09-30 14:50:34 +02:00
crawlingStartURL ,
newcrawlingMustMatch ,
newcrawlingMustNotMatch ,
2011-09-27 23:58:18 +02:00
ipMustMatch ,
ipMustNotMatch ,
countryMustMatch ,
2010-09-30 14:50:34 +02:00
newcrawlingdepth ,
2011-09-30 14:38:28 +02:00
directDocByURL ,
2010-10-01 01:57:58 +02:00
crawlingIfOlder ,
crawlingDomMaxPages ,
2010-09-30 14:50:34 +02:00
crawlingQ ,
indexText , indexMedia ,
2010-10-01 01:57:58 +02:00
storeHTCache ,
crawlOrder ,
xsstopw ,
xdstopw ,
xpstopw ,
cachePolicy ) ;
2011-02-12 01:01:40 +01:00
sb . crawler . putActive ( pe . handle ( ) . getBytes ( ) , pe ) ;
2010-09-30 14:50:34 +02:00
final String reasonString = sb . crawlStacker . stackCrawl ( new Request (
sb . peers . mySeed ( ) . hash . getBytes ( ) ,
url ,
null ,
" CRAWLING-ROOT " ,
new Date ( ) ,
pe . handle ( ) ,
0 ,
0 ,
2010-12-11 01:31:57 +01:00
0 ,
2010-09-30 14:50:34 +02:00
0
) ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
if ( reasonString = = null ) {
2010-11-27 01:54:59 +01:00
// create a bookmark from crawl start url
2011-06-13 23:44:03 +02:00
final Set < String > tags = ListManager . string2set ( BookmarkHelper . cleanTagsString ( post . get ( " bookmarkFolder " , " /crawlStart " ) ) ) ;
2010-09-30 14:50:34 +02:00
tags . add ( " crawlStart " ) ;
2010-11-27 01:54:59 +01:00
if ( " on " . equals ( post . get ( " createBookmark " , " off " ) ) ) {
2011-06-13 23:44:03 +02:00
final BookmarksDB . Bookmark bookmark = sb . bookmarksDB . createBookmark ( crawlingStart , " admin " ) ;
2010-11-27 01:54:59 +01:00
if ( bookmark ! = null ) {
bookmark . setProperty ( BookmarksDB . Bookmark . BOOKMARK_TITLE , post . get ( " bookmarkTitle " , crawlingStart ) ) ;
bookmark . setOwner ( " admin " ) ;
bookmark . setPublic ( false ) ;
bookmark . setTags ( tags , true ) ;
sb . bookmarksDB . saveBookmark ( bookmark ) ;
}
2010-09-30 14:50:34 +02:00
}
// liftoff!
prop . put ( " info " , " 8 " ) ; //start msg
prop . putHTML ( " info_crawlingURL " , ( post . get ( " crawlingURL " ) ) ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// generate a YaCyNews if the global flag was set
2011-01-12 01:00:14 +01:00
if ( ! sb . isRobinsonMode ( ) & & crawlOrder ) {
2010-09-30 14:50:34 +02:00
final Map < String , String > m = new HashMap < String , String > ( pe ) ; // must be cloned
m . remove ( " specificDepth " ) ;
m . remove ( " indexText " ) ;
m . remove ( " indexMedia " ) ;
m . remove ( " remoteIndexing " ) ;
m . remove ( " xsstopw " ) ;
m . remove ( " xpstopw " ) ;
m . remove ( " xdstopw " ) ;
m . remove ( " storeTXCache " ) ;
m . remove ( " storeHTCache " ) ;
m . remove ( " generalFilter " ) ;
m . remove ( " specificFilter " ) ;
m . put ( " intention " , post . get ( " intention " , " " ) . replace ( ',' , '/' ) ) ;
2011-10-04 11:06:24 +02:00
sb . peers . newsPool . publishMyNews ( sb . peers . mySeed ( ) , NewsPool . CATEGORY_CRAWL_START , m ) ;
2011-06-13 23:44:03 +02:00
}
2010-09-30 14:50:34 +02:00
} else {
prop . put ( " info " , " 5 " ) ; //Crawling failed
prop . putHTML ( " info_crawlingURL " , ( post . get ( " crawlingURL " ) ) ) ;
prop . putHTML ( " info_reasonString " , reasonString ) ;
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
sb . crawlQueues . errorURL . push (
2010-11-27 01:54:59 +01:00
new Request (
sb . peers . mySeed ( ) . hash . getBytes ( ) ,
crawlingStartURL ,
null ,
" " ,
new Date ( ) ,
pe . handle ( ) ,
0 ,
0 ,
2010-12-11 01:31:57 +01:00
0 ,
2010-11-27 01:54:59 +01:00
0 ) ,
sb . peers . mySeed ( ) . hash . getBytes ( ) ,
new Date ( ) ,
1 ,
2011-05-26 12:57:02 +02:00
FailCategory . FINAL_LOAD_CONTEXT ,
2011-05-02 16:05:51 +02:00
reasonString , - 1 ) ;
2010-09-30 14:50:34 +02:00
}
} catch ( final PatternSyntaxException e ) {
2010-12-09 18:17:25 +01:00
prop . put ( " info " , " 4 " ) ; // crawlfilter does not match url
2010-09-30 14:50:34 +02:00
prop . putHTML ( " info_newcrawlingfilter " , newcrawlingMustMatch ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
} catch ( final Exception e ) {
// mist
2010-12-09 18:17:25 +01:00
prop . put ( " info " , " 6 " ) ; // Error with url
2010-09-30 14:50:34 +02:00
prop . putHTML ( " info_crawlingStart " , crawlingStart ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
Log . logException ( e ) ;
}
2011-06-13 23:44:03 +02:00
2010-11-27 01:54:59 +01:00
} else if ( " file " . equals ( crawlingMode ) ) {
2010-09-30 14:50:34 +02:00
if ( post . containsKey ( " crawlingFile " ) ) {
2011-03-09 13:50:39 +01:00
final String crawlingFileContent = post . get ( " crawlingFile$file " , " " ) ;
2010-09-30 14:50:34 +02:00
try {
2006-12-19 01:29:45 +01:00
// check if the crawl filter works correctly
2008-11-14 10:58:56 +01:00
Pattern . compile ( newcrawlingMustMatch ) ;
2011-03-09 13:50:39 +01:00
final ContentScraper scraper = new ContentScraper ( new DigestURI ( crawlingFile ) ) ;
2010-09-30 14:50:34 +02:00
final Writer writer = new TransformerWriter ( null , null , scraper , null , false ) ;
2011-03-09 13:50:39 +01:00
if ( crawlingFile ! = null & & crawlingFile . exists ( ) ) {
FileUtils . copy ( new FileInputStream ( crawlingFile ) , writer ) ;
} else {
FileUtils . copy ( crawlingFileContent , writer ) ;
}
2010-09-30 14:50:34 +02:00
writer . close ( ) ;
2011-06-13 23:44:03 +02:00
2011-03-09 13:50:39 +01:00
// get links and generate filter
2011-04-21 15:58:49 +02:00
final Map < MultiProtocolURI , Properties > hyperlinks = scraper . getAnchors ( ) ;
2011-03-17 22:07:44 +01:00
if ( fullDomain & & newcrawlingdepth > 0 ) newcrawlingMustMatch = siteFilter ( hyperlinks . keySet ( ) ) ;
2011-06-13 23:44:03 +02:00
2011-03-09 13:50:39 +01:00
final DigestURI crawlURL = new DigestURI ( " file:// " + crawlingFile . toString ( ) ) ;
2010-09-30 14:50:34 +02:00
final CrawlProfile profile = new CrawlProfile (
2011-03-09 13:50:39 +01:00
crawlingFileName ,
2010-10-01 01:57:58 +02:00
crawlURL ,
2008-11-14 10:58:56 +01:00
newcrawlingMustMatch ,
2011-09-29 17:17:39 +02:00
CrawlProfile . MATCH_NEVER_STRING ,
2011-09-27 23:58:18 +02:00
ipMustMatch ,
ipMustNotMatch ,
countryMustMatch ,
2008-11-14 10:58:56 +01:00
newcrawlingdepth ,
2011-09-30 14:38:28 +02:00
false ,
2010-09-30 14:50:34 +02:00
crawlingIfOlder ,
crawlingDomMaxPages ,
2006-12-19 01:29:45 +01:00
crawlingQ ,
2010-09-30 14:50:34 +02:00
indexText ,
indexMedia ,
storeHTCache ,
crawlOrder ,
2010-10-01 01:57:58 +02:00
xsstopw ,
xdstopw ,
xpstopw ,
2010-09-30 14:50:34 +02:00
cachePolicy ) ;
2011-02-12 01:01:40 +01:00
sb . crawler . putActive ( profile . handle ( ) . getBytes ( ) , profile ) ;
2010-09-30 14:50:34 +02:00
sb . pauseCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
2011-04-18 18:11:16 +02:00
sb . crawlStacker . enqueueEntriesAsynchronous ( sb . peers . mySeed ( ) . hash . getBytes ( ) , profile . handle ( ) , hyperlinks , true ) ;
2008-08-02 14:12:04 +02:00
} catch ( final PatternSyntaxException e ) {
2010-12-09 18:17:25 +01:00
prop . put ( " info " , " 4 " ) ; // crawlfilter does not match url
2008-11-14 10:58:56 +01:00
prop . putHTML ( " info_newcrawlingfilter " , newcrawlingMustMatch ) ;
2007-10-24 23:38:19 +02:00
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
2008-08-02 14:12:04 +02:00
} catch ( final Exception e ) {
2006-12-19 01:29:45 +01:00
// mist
2010-12-09 18:17:25 +01:00
prop . put ( " info " , " 7 " ) ; // Error with file
2011-03-09 13:50:39 +01:00
prop . putHTML ( " info_crawlingStart " , crawlingFileName ) ;
2007-10-24 23:38:19 +02:00
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
2009-11-05 21:28:37 +01:00
Log . logException ( e ) ;
2007-10-24 23:38:19 +02:00
}
2010-09-30 14:50:34 +02:00
sb . continueCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
}
2010-11-27 01:54:59 +01:00
} else if ( " sitemap " . equals ( crawlingMode ) ) {
2011-06-13 23:44:03 +02:00
final String sitemapURLStr = post . get ( " sitemapURL " , " " ) ;
2010-09-30 14:50:34 +02:00
try {
2010-10-26 18:10:20 +02:00
final DigestURI sitemapURL = new DigestURI ( sitemapURLStr ) ;
2010-09-30 14:50:34 +02:00
final CrawlProfile pe = new CrawlProfile (
2010-10-01 01:57:58 +02:00
sitemapURLStr ,
sitemapURL ,
2011-09-29 17:17:39 +02:00
CrawlProfile . MATCH_ALL_STRING ,
CrawlProfile . MATCH_NEVER_STRING ,
2011-09-27 23:58:18 +02:00
ipMustMatch ,
ipMustNotMatch ,
countryMustMatch ,
2010-10-01 01:57:58 +02:00
0 ,
2011-09-30 14:38:28 +02:00
false ,
2010-10-01 01:57:58 +02:00
crawlingIfOlder ,
crawlingDomMaxPages ,
true ,
indexText ,
indexMedia ,
storeHTCache ,
crawlOrder ,
xsstopw ,
xdstopw ,
xpstopw ,
2010-09-30 14:50:34 +02:00
cachePolicy ) ;
2011-02-12 01:01:40 +01:00
sb . crawler . putActive ( pe . handle ( ) . getBytes ( ) , pe ) ;
2010-09-30 14:50:34 +02:00
final SitemapImporter importer = new SitemapImporter ( sb , sitemapURL , pe ) ;
importer . start ( ) ;
} catch ( final Exception e ) {
// mist
prop . put ( " info " , " 6 " ) ; //Error with url
prop . putHTML ( " info_crawlingStart " , sitemapURLStr ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
Log . logException ( e ) ;
}
2010-11-27 01:54:59 +01:00
} else if ( " sitelist " . equals ( crawlingMode ) ) {
2010-09-30 14:50:34 +02:00
try {
2010-10-26 18:10:20 +02:00
final DigestURI sitelistURL = new DigestURI ( crawlingStart ) ;
2010-09-30 14:50:34 +02:00
// download document
ContentScraper scraper = null ;
2011-06-13 23:44:03 +02:00
scraper = sb . loader . parseResource ( sitelistURL , CacheStrategy . IFFRESH ) ;
2010-10-01 01:57:58 +02:00
// String title = scraper.getTitle();
2010-09-30 14:50:34 +02:00
// String description = scraper.getDescription();
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
// get links and generate filter
2011-04-21 15:58:49 +02:00
final Map < MultiProtocolURI , Properties > hyperlinks = scraper . getAnchors ( ) ;
2011-03-17 22:07:44 +01:00
if ( fullDomain & & newcrawlingdepth > 0 ) newcrawlingMustMatch = siteFilter ( hyperlinks . keySet ( ) ) ;
2010-09-30 14:50:34 +02:00
// put links onto crawl queue
final CrawlProfile profile = new CrawlProfile (
2010-10-01 01:57:58 +02:00
sitelistURL . getHost ( ) ,
2010-09-30 14:50:34 +02:00
sitelistURL ,
newcrawlingMustMatch ,
2011-09-29 17:17:39 +02:00
CrawlProfile . MATCH_NEVER_STRING ,
2011-09-27 23:58:18 +02:00
ipMustMatch ,
ipMustNotMatch ,
countryMustMatch ,
2010-09-30 14:50:34 +02:00
newcrawlingdepth ,
2011-09-30 14:38:28 +02:00
directDocByURL ,
2010-09-30 14:50:34 +02:00
crawlingIfOlder ,
crawlingDomMaxPages ,
crawlingQ ,
indexText ,
indexMedia ,
storeHTCache ,
crawlOrder ,
2010-10-01 01:57:58 +02:00
xsstopw ,
xdstopw ,
xpstopw ,
2010-09-30 14:50:34 +02:00
cachePolicy ) ;
2011-02-12 01:01:40 +01:00
sb . crawler . putActive ( profile . handle ( ) . getBytes ( ) , profile ) ;
2010-09-30 14:50:34 +02:00
sb . pauseCrawlJob ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL ) ;
2011-04-21 15:58:49 +02:00
final Iterator < Map . Entry < MultiProtocolURI , Properties > > linkiterator = hyperlinks . entrySet ( ) . iterator ( ) ;
2010-09-30 14:50:34 +02:00
DigestURI nexturl ;
while ( linkiterator . hasNext ( ) ) {
2011-04-21 15:58:49 +02:00
final Map . Entry < MultiProtocolURI , Properties > e = linkiterator . next ( ) ;
2010-09-30 14:50:34 +02:00
if ( e . getKey ( ) = = null ) continue ;
nexturl = new DigestURI ( e . getKey ( ) ) ;
// remove the url from the database to be prepared to crawl them again
final byte [ ] urlhash = nexturl . hash ( ) ;
indexSegment . urlMetadata ( ) . remove ( urlhash ) ;
sb . crawlQueues . noticeURL . removeByURLHash ( urlhash ) ;
sb . crawlQueues . errorURL . remove ( urlhash ) ;
sb . crawlStacker . enqueueEntry ( new Request (
2011-06-13 23:44:03 +02:00
sb . peers . mySeed ( ) . hash . getBytes ( ) ,
nexturl ,
null ,
e . getValue ( ) . getProperty ( " name " , " " ) ,
2010-09-30 14:50:34 +02:00
new Date ( ) ,
profile . handle ( ) ,
0 ,
0 ,
2010-12-11 01:31:57 +01:00
0 ,
2010-09-30 14:50:34 +02:00
0
) ) ;
}
} catch ( final Exception e ) {
// mist
prop . put ( " info " , " 6 " ) ; //Error with url
prop . putHTML ( " info_crawlingStart " , crawlingStart ) ;
prop . putHTML ( " info_error " , e . getMessage ( ) ) ;
Log . logException ( e ) ;
2006-12-19 01:29:45 +01:00
}
}
}
2010-09-30 14:50:34 +02:00
}
2011-06-13 23:44:03 +02:00
2010-09-30 14:50:34 +02:00
if ( post ! = null & & post . containsKey ( " crawlingPerformance " ) ) {
setPerformance ( sb , post ) ;
2006-12-19 01:29:45 +01:00
}
2011-06-13 23:44:03 +02:00
2007-03-12 17:24:28 +01:00
// performance settings
2011-03-15 02:03:35 +01:00
final long LCbusySleep = env . getConfigLong ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP , 1000L ) ;
2008-08-02 14:12:04 +02:00
final int LCppm = ( int ) ( 60000L / Math . max ( 1 , LCbusySleep ) ) ;
2009-05-15 01:11:10 +02:00
prop . put ( " crawlingSpeedMaxChecked " , ( LCppm > = 30000 ) ? " 1 " : " 0 " ) ;
prop . put ( " crawlingSpeedCustChecked " , ( ( LCppm > 10 ) & & ( LCppm < 30000 ) ) ? " 1 " : " 0 " ) ;
2007-10-24 23:38:19 +02:00
prop . put ( " crawlingSpeedMinChecked " , ( LCppm < = 10 ) ? " 1 " : " 0 " ) ;
2008-09-17 01:04:24 +02:00
prop . put ( " customPPMdefault " , Integer . toString ( LCppm ) ) ;
2011-06-13 23:44:03 +02:00
2006-12-19 01:29:45 +01:00
// return rewrite properties
return prop ;
}
2011-06-13 23:44:03 +02:00
2008-08-25 22:31:32 +02:00
private static long recrawlIfOlderC ( final boolean recrawlIfOlderCheck , final int recrawlIfOlderNumber , final String crawlingIfOlderUnit ) {
if ( ! recrawlIfOlderCheck ) return 0L ;
2010-11-27 01:54:59 +01:00
if ( " year " . equals ( crawlingIfOlderUnit ) ) return System . currentTimeMillis ( ) - ( long ) recrawlIfOlderNumber * 1000L * 60L * 60L * 24L * 365L ;
if ( " month " . equals ( crawlingIfOlderUnit ) ) return System . currentTimeMillis ( ) - ( long ) recrawlIfOlderNumber * 1000L * 60L * 60L * 24L * 30L ;
if ( " day " . equals ( crawlingIfOlderUnit ) ) return System . currentTimeMillis ( ) - ( long ) recrawlIfOlderNumber * 1000L * 60L * 60L * 24L ;
if ( " hour " . equals ( crawlingIfOlderUnit ) ) return System . currentTimeMillis ( ) - ( long ) recrawlIfOlderNumber * 1000L * 60L * 60L ;
2008-08-25 22:31:32 +02:00
return System . currentTimeMillis ( ) - ( long ) recrawlIfOlderNumber ;
2006-12-19 01:29:45 +01:00
}
2011-06-13 23:44:03 +02:00
2009-07-19 22:37:44 +02:00
private static void setPerformance ( final Switchboard sb , final serverObjects post ) {
2008-09-17 01:04:24 +02:00
final String crawlingPerformance = post . get ( " crawlingPerformance " , " custom " ) ;
2011-03-15 02:03:35 +01:00
final long LCbusySleep = sb . getConfigLong ( SwitchboardConstants . CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP , 1000L ) ;
2009-05-15 01:11:10 +02:00
int wantedPPM = ( LCbusySleep = = 0 ) ? 30000 : ( int ) ( 60000L / LCbusySleep ) ;
2007-03-12 17:24:28 +01:00
try {
2011-03-15 02:03:35 +01:00
wantedPPM = post . getInt ( " customPPM " , wantedPPM ) ;
2008-08-02 14:12:04 +02:00
} catch ( final NumberFormatException e ) { }
2010-11-27 01:54:59 +01:00
if ( " minimum " . equals ( crawlingPerformance . toLowerCase ( ) ) ) wantedPPM = 10 ;
if ( " maximum " . equals ( crawlingPerformance . toLowerCase ( ) ) ) wantedPPM = 30000 ;
2007-03-12 17:24:28 +01:00
sb . setPerformance ( wantedPPM ) ;
}
2011-06-13 23:44:03 +02:00
private static String siteFilter ( final Set < MultiProtocolURI > uris ) {
2011-03-09 13:50:39 +01:00
final StringBuilder filter = new StringBuilder ( ) ;
final Set < String > filterSet = new HashSet < String > ( ) ;
for ( final MultiProtocolURI uri : uris ) {
filterSet . add ( new StringBuilder ( ) . append ( uri . getProtocol ( ) ) . append ( " :// " ) . append ( uri . getHost ( ) ) . append ( " .* " ) . toString ( ) ) ;
if ( ! uri . getHost ( ) . startsWith ( " www. " ) ) {
filterSet . add ( new StringBuilder ( ) . append ( uri . getProtocol ( ) ) . append ( " ://www. " ) . append ( uri . getHost ( ) ) . append ( " .* " ) . toString ( ) ) ;
}
}
for ( final String element : filterSet ) {
filter . append ( '|' ) . append ( element ) ;
}
return filter . length ( ) > 0 ? filter . substring ( 1 ) : " " ;
}
2006-12-19 01:29:45 +01:00
}