2006-11-30 08:33:54 +01:00
//LogParserPLASMA.java
//-------------------------------------
//part of YACY
2008-07-20 19:14:51 +02:00
//(C) by Michael Peter Christen; mc@yacy.net
2006-11-30 08:33:54 +01:00
//first published on http://www.anomic.de
//Frankfurt, Germany, 2004
//
//This file ist contributed by Matthias Soehnholz
//last major change: $LastChangedDate$ by $LastChangedBy$
//Revision: $LastChangedRevision$
//
//This program is free software; you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation; either version 2 of the License, or
//(at your option) any later version.
//
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
//
//You should have received a copy of the GNU General Public License
//along with this program; if not, write to the Free Software
//Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2006-11-17 12:49:21 +01:00
package de.anomic.server.logging.logParsers ;
import java.util.HashSet ;
2007-01-11 10:18:10 +01:00
import java.util.Hashtable ;
2006-11-17 12:49:21 +01:00
import java.util.regex.Matcher ;
import java.util.regex.Pattern ;
public class LogParserPLASMA implements LogParser {
2007-01-11 10:18:10 +01:00
2007-01-16 17:13:21 +01:00
/** the version of the LogParser - <strong>Double</strong>*/
2007-01-16 06:51:39 +01:00
public static final String PARSER_VERSION = " version " ;
2007-01-16 17:13:21 +01:00
/** the amount of URLs received during DHT - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String URLS_RECEIVED = " urlSum " ;
2007-01-16 17:13:21 +01:00
/** the amount of URLs requested during DHT - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String URLS_REQUESTED = " urlReqSum " ;
2007-01-16 17:13:21 +01:00
/** the amount of URLs blocked during DHT because they match the peer's blacklist - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String URLS_BLOCKED = " blockedURLSum " ;
2007-01-16 17:13:21 +01:00
/** the amount of words received during DHT - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String WORDS_RECEIVED = " wordsSum " ;
2007-01-16 17:13:21 +01:00
/** the amount of RWIs received during DHT - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String RWIS_RECEIVED = " rwiSum " ;
2007-01-16 17:13:21 +01:00
/** the amount of RWIs blocked during DHT because their entries match the peer's blacklist - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String RWIS_BLOCKED = " blockedRWISum " ;
2007-01-16 17:13:21 +01:00
/** total time receiving RWIs during DHT in milli seconds - <strong>Long</strong> */
2007-01-16 06:51:39 +01:00
public static final String RWIS_RECEIVED_TIME = " rwiTimeSum " ;
2007-01-16 17:13:21 +01:00
/** total time receiving URLs during DHT in milli seconds - <strong>Long</strong> */
2007-01-16 06:51:39 +01:00
public static final String URLS_RECEIVED_TIME = " urlTimeSum " ;
2007-01-16 17:13:21 +01:00
/** the traffic sent during DHT in bytes - <strong>Long</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_TRAFFIC_SENT = " DHTSendTraffic " ;
2007-01-16 17:13:21 +01:00
/** the amount of URLs requested by other peers and sent by this one - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_URLS_SENT = " DHTSendURLs " ;
2007-01-16 17:13:21 +01:00
/** the amount of rejected DHT transfers from other peers (i.e. because this peer was busy) - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_REJECTED = " RWIRejectCount " ;
2007-01-16 17:13:21 +01:00
/** the peer-names from whose DHT transfers were rejected - <strong>HasSet</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_REJECTED_PEERS_NAME = " DHTRejectPeerNames " ;
2007-01-16 17:13:21 +01:00
/** the peer-hashes from whose DHT transfers were rejected - <strong>HasSet</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_REJECTED_PEERS_HASH = " DHTRejectPeerHashs " ;
2007-01-16 17:13:21 +01:00
/** the peer-names this peer sent DHT chunks to - <strong>HasSet</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_SENT_PEERS_NAME = " DHTPeerNames " ;
2007-01-16 17:13:21 +01:00
/** the peer-hashes this peer sent DHT chunks to - <strong>HasSet</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_SENT_PEERS_HASH = " DHTPeerHashs " ;
2007-01-16 17:13:21 +01:00
/** total amount of selected peers for index distribution - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_SELECTED = " DHTSelectionTargetCount " ;
2007-01-16 17:13:21 +01:00
/** total amount of words selected for index distribution - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_WORDS_SELECTED = " DHTSelectionWordsCount " ;
2007-01-16 17:13:21 +01:00
/** total time selecting words for index distribution - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_WORDS_SELECTED_TIME = " DHTSelectionWordsTimeCount " ;
2007-01-16 17:13:21 +01:00
/** the minimal DHT distance during peer-selection for index distribution - <strong>Double</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_DISTANCE_MIN = " minDHTDist " ;
2007-01-16 17:13:21 +01:00
/** the maximal DHT distance during peer-selection for index distribution - <strong>Double</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_DISTANCE_MAX = " maxDHTDist " ;
2007-01-16 17:13:21 +01:00
/** the average DHT distance during peer-selection for index distribution - <strong>Double</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_DISTANCE_AVERAGE = " avgDHTDist " ;
2007-01-16 17:13:21 +01:00
/** how many times remote peers were too busy to accept the index transfer - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String PEERS_BUSY = " busyPeerCount " ;
2007-01-16 17:13:21 +01:00
/** how many times not enough peers for index distribution were found - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String PEERS_TOO_LESS = " notEnoughDHTPeers " ;
2007-01-16 17:13:21 +01:00
/** how many times the index distribution failed (i.e. due to time-out or other reasons) - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String DHT_SENT_FAILED = " failedIndexDistributionCount " ;
2007-01-16 17:13:21 +01:00
/** how many times the error "<code>tried to create left child-node twice</code>" occured - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String ERROR_CHILD_TWICE_LEFT = " leftChildTwiceCount " ;
2007-01-16 17:13:21 +01:00
/** how many times the error "<code>tried to create right child-node twice</code>" occured - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String ERROR_CHILD_TWICE_RIGHT = " rightChildTwiceCount " ;
2007-01-16 17:13:21 +01:00
/** how many ranking distributions were executed successfully - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String RANKING_DIST = " rankingDistributionCount " ;
2007-01-16 17:13:21 +01:00
/** total time the ranking distributions took - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String RANKING_DIST_TIME = " rankingDistributionTime " ;
2007-01-16 17:13:21 +01:00
/** how many ranking distributions failed - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String RANKING_DIST_FAILED = " rankingDistributionFailCount " ;
2007-01-16 17:13:21 +01:00
/** how many times the error "<code>Malformed URL</code>" occured - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String ERROR_MALFORMED_URL = " malformedURLCount " ;
2007-01-16 17:13:21 +01:00
/** the amount of indexed sites - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String INDEXED_SITES = " indexedSites " ;
2007-01-16 17:13:21 +01:00
/** total amount of indexed words - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String INDEXED_WORDS = " indexedWords " ;
2007-01-16 17:13:21 +01:00
/** total size of all indexed sites - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String INDEXED_SITES_SIZE = " indexedSiteSizeSum " ;
2007-01-16 17:13:21 +01:00
/** total amount of indexed anchors - <strong>Integer</strong> */
2007-01-16 06:51:39 +01:00
public static final String INDEXED_ANCHORS = " indexedAnchors " ;
2008-06-24 09:01:04 +02:00
// /** total time needed for stacking the site of an indexing - <strong>Integer</strong> */
// public static final String INDEXED_STACK_TIME = "indexedStackingTime";
//
// /** total time needed for parsing during indexing - <strong>Integer</strong> */
// public static final String INDEXED_PARSE_TIME = "indexedParsingTime";
//
// /** total time needed for the actual indexing during indexing - <strong>Integer</strong> */
// public static final String INDEXED_INDEX_TIME = "indexedIndexingTime";
//
// /** total time needed for storing the results of an indexing - <strong>Integer</strong> */
// public static final String INDEXED_STORE_TIME = "indexedStorageTime";
/** total time needed for storing the results of a link indexing - <strong>Integer</strong> */
public static final String INDEXED_LINKSTORE_TIME = " indexedLinkStorageTime " ;
/** total time needed for storing the results of a word indexing - <strong>Integer</strong> */
public static final String INDEXED_INDEXSTORE_TIME = " indexedIndexStorageTime " ;
2007-01-16 06:51:39 +01:00
2007-01-19 12:10:57 +01:00
/** accumulated time needed to parse the log entries up to now (in ms)*/
2007-01-17 18:01:20 +01:00
public static final String TOTAL_PARSER_TIME = " totalParserTime " ;
2008-06-24 09:01:04 +02:00
/** times the parser was called, respectively amount of independent log-lines */
2007-01-17 18:01:20 +01:00
public static final String TOTAL_PARSER_RUNS = " totalParserRuns " ;
2007-01-17 14:35:33 +01:00
2007-01-16 06:51:39 +01:00
2008-08-06 21:43:12 +02:00
private static final double parserVersion = 0 . 1 ;
private static final String parserType = " PLASMA " ;
2007-01-11 10:18:10 +01:00
2006-11-17 12:49:21 +01:00
//RegExp for LogLevel I
2008-06-26 09:12:03 +02:00
private static final Pattern i1 = Pattern . compile ( " Received ( \\ d*) URLs from peer [ \\ w-_]{12}:[ \\ w-_]*/[ \\ w.-]* in ( \\ d*) ms, blocked ( \\ d*) URLs " ) ;
2007-01-16 19:42:34 +01:00
private static final Pattern i2 = Pattern . compile ( " Received ( \\ d*) Entries ( \\ d*) Words \\ [[ \\ w-_]{12} .. [ \\ w-_]{12} \\ ]/[ \\ w.-]* from [ \\ w-_]{12}:[ \\ w-_]*/[ \\ w.-]*, processed in ( \\ d*) milliseconds, requesting ( \\ d*)/( \\ d*) URLs, blocked ( \\ d*) RWIs " ) ;
private static final Pattern i2_2 = Pattern . compile ( " Received ( \\ d*) Entries ( \\ d*) Words \\ [[ \\ w-_]{12} .. [ \\ w-_]{12} \\ ]/[ \\ w.-]* from [ \\ w-_]{12}:[ \\ w-_]*, processed in ( \\ d*) milliseconds, requesting ( \\ d*)/( \\ d*) URLs, blocked ( \\ d*) RWIs " ) ;
private static final Pattern i3 = Pattern . compile ( " Index transfer of ( \\ d*) words \\ [[ \\ w-_]{12} .. [ \\ w-_]{12} \\ ] to peer ([ \\ w-_]*):([ \\ w-_]{12}) in ( \\ d*) seconds successful \\ (( \\ d*) words/s, ( \\ d*) Bytes \\ ) " ) ;
private static final Pattern i4 = Pattern . compile ( " Index transfer of ( \\ d*) entries ( \\ d*) words \\ [[ \\ w-_]{12} .. [ \\ w-_]{12} \\ ] and ( \\ d*) URLs to peer ([ \\ w-_]*):([ \\ w-_]{12}) in ( \\ d*) seconds successful \\ (( \\ d*) words/s, ( \\ d*) Bytes \\ ) " ) ;
2008-06-25 10:15:07 +02:00
private static final Pattern i5 = Pattern . compile ( " Selected \\ w* DHT target peer ([ \\ w-_]*):([ \\ w-_]{12}), distance2first = ([ \\ w.-]*), distance2last = ([ \\ w.-]*) " ) ;
private static final Pattern i6 = Pattern . compile ( " Rejecting RWIs from peer ([ \\ w-_]{12}):([ \\ w-_]*)/([ \\ w.]*). ([ \\ w. ]*) " ) ;
2007-01-16 19:42:34 +01:00
private static final Pattern i7 = Pattern . compile ( " DHT distribution: transfer to peer [ \\ w-]* finished. " ) ;
private static final Pattern i8 = Pattern . compile ( " Index selection of ( \\ d*) words \\ [[ \\ w-_]{12} .. [ \\ w-_]{12} \\ ] in ( \\ d*) seconds " ) ;
2008-06-26 09:12:03 +02:00
private static final Pattern i9 = Pattern . compile ( " RankingDistribution - transmitted file [ \\ w \\ s-:. \\ \\ ]* to [ \\ w.]*: \\ d* successfully in ( \\ d)* seconds " ) ;
2007-01-16 19:42:34 +01:00
private static final Pattern i10 = Pattern . compile ( " RankingDistribution - error transmitting file " ) ;
private static final Pattern i11 = Pattern . compile ( " Peer [ \\ w-_]*:[ \\ w-_]{12} is busy \\ . Waiting \\ d* ms \\ . " ) ;
2007-01-11 10:18:10 +01:00
//private static Pattern i12 = Pattern.compile("\\*Indexed \\d* words in URL [\\w:.&/%-~$\u00A7@=]* \\[[\\w-_]{12}\\]");
2007-01-16 19:42:34 +01:00
private static final Pattern i13 = Pattern . compile ( " WROTE HEADER for |LOCALCRAWL \\ [ \\ d*, \\ d*, \\ d*, \\ d* \\ ]|REJECTED WRONG STATUS TYPE " ) ;
2006-11-17 12:49:21 +01:00
//RegExp for LogLevel W
2007-01-16 19:42:34 +01:00
private static final Pattern w1 = Pattern . compile ( " found not enough \\ ( \\ d* \\ ) peers for distribution " ) ;
private static final Pattern w2 = Pattern . compile ( " Transfer to peer ([ \\ w-_]*):([ \\ w-_]{12}) failed:'( \\ w*)' " ) ;
2006-11-17 12:49:21 +01:00
//RegExp for LogLevel E
2007-01-16 19:42:34 +01:00
private static final Pattern e1 = Pattern . compile ( " INTERNAL ERROR AT plasmaCrawlLURL:store:de.anomic.kelondro.kelondroException: tried to create ( \\ w*) node twice in db " ) ;
private static final Pattern e2 = Pattern . compile ( " INTERNAL ERROR [ \\ w./: ]* java.net.MalformedURLException " ) ;
2006-11-17 12:49:21 +01:00
private Matcher m ;
//RegExp for advancedParser
//private Pattern adv1 = Pattern.compile("\\*Indexed (\\d*) words in URL [\\w:.&?/%-=]* \\[[\\w-_]{12}\\]\\n\\tDescription: ([\\w- ]*)\\n\\tMimeType: ([\\w-_/]*) \\| Size: (\\d*) bytes \\| Anchors: (\\d*)\\n\\tStackingTime: (\\d*) ms \\| ParsingTime: (\\d*) ms \\| IndexingTime: (\\d*) ms \\| StorageTime: (\\d*) ms");
2007-01-16 19:42:34 +01:00
private static Pattern adv1 = Pattern . compile (
" \\ *Indexed ( \\ d+) words in URL [ \\ w:.&/%-~;$ \ u00A7@=]* \\ [[ \\ w_-]{12} \\ ] \\ r? \\ n? " +
" \\ tDescription: +([ \\ w- \\ .,:!=' \" |/+@ \\ ( \\ ) \\ t]*) \\ r? \\ n? " +
" \\ tMimeType: ([ \\ w_~/-]*) \\ | Charset: ([ \\ w-]*) \\ | Size: ( \\ d+) bytes \\ | Anchors: ( \\ d+) \\ r? \\ n? " +
2008-06-24 09:01:04 +02:00
" \\ tLinkStorageTime: ( \\ d+) ms \\ | indexStorageTime: ( \\ d+) ms " ) ;
//"\\tStackingTime:[ ]*(\\d+) ms \\| ParsingTime:[ ]*(\\d+) ms \\| IndexingTime: (\\d+) ms \\| StorageTime: (\\d+) ms");
2006-11-17 12:49:21 +01:00
private int urlSum = 0 ;
private int urlReqSum = 0 ;
private int blockedURLSum = 0 ;
private int wordsSum = 0 ;
private int rwiSum = 0 ;
private int blockedRWISum = 0 ;
private long urlTimeSum = 0 ;
private long rwiTimeSum = 0 ;
private long DHTSendTraffic = 0 ;
private int DHTSendURLs = 0 ;
private int RWIRejectCount = 0 ;
2008-08-02 14:12:04 +02:00
private final HashSet < String > RWIRejectPeerNames = new HashSet < String > ( ) ;
private final HashSet < String > RWIRejectPeerHashs = new HashSet < String > ( ) ;
private final HashSet < String > DHTPeerNames = new HashSet < String > ( ) ;
private final HashSet < String > DHTPeerHashs = new HashSet < String > ( ) ;
2006-11-17 12:49:21 +01:00
private int DHTSelectionTargetCount = 0 ;
private int DHTSelectionWordsCount = 0 ;
private int DHTSelectionWordsTimeCount = 0 ;
private double minDHTDist = 1 ;
private double maxDHTDist = 0 ;
private double avgDHTDist = 0 ;
private int busyPeerCount = 0 ;
private int notEnoughDHTPeers = 0 ;
private int failedIndexDistributionCount = 0 ;
private int leftChildTwiceCount = 0 ;
private int rightChildTwiceCount = 0 ;
private int rankingDistributionCount = 0 ;
private int rankingDistributionTime = 0 ;
private int rankingDistributionFailCount = 0 ;
private int malformedURLCount = 0 ;
private int indexedSites = 0 ;
private int indexedWordSum = 0 ;
private int indexedSiteSizeSum = 0 ;
private int indexedAnchorsCount = 0 ;
2008-06-24 09:01:04 +02:00
private int indexedLinkStorageTime = 0 ;
private int indexedIndexStorageTime = 0 ;
// private int indexedStackingTime = 0;
// private int indexedParsingTime = 0;
// private int indexedIndexingTime = 0;
// private int indexedStorageTime = 0;
2007-01-17 14:35:33 +01:00
private long totalParserTime = 0 ;
private int totalParserRuns = 0 ;
2006-11-17 12:49:21 +01:00
2008-08-02 14:12:04 +02:00
public int parse ( final String logLevel , final String logLine ) {
final long start = System . currentTimeMillis ( ) ;
2006-11-17 12:49:21 +01:00
if ( logLevel . equals ( " INFO " ) ) {
m = i1 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 3 ) {
2006-11-17 12:49:21 +01:00
//System.out.println(m.group(1) + " " + m.group(2) + " " + m.group(3));
urlSum + = Integer . parseInt ( m . group ( 1 ) ) ;
urlTimeSum + = Integer . parseInt ( m . group ( 2 ) ) ;
blockedURLSum + = Integer . parseInt ( m . group ( 3 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i2 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 6 ) {
2006-11-17 12:49:21 +01:00
rwiSum + = Integer . parseInt ( m . group ( 1 ) ) ;
wordsSum + = Integer . parseInt ( m . group ( 2 ) ) ;
rwiTimeSum + = Integer . parseInt ( m . group ( 3 ) ) ;
urlReqSum + = Integer . parseInt ( m . group ( 4 ) ) ;
blockedRWISum + = Integer . parseInt ( m . group ( 6 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i2_2 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 6 ) {
2006-11-17 12:49:21 +01:00
rwiSum + = Integer . parseInt ( m . group ( 1 ) ) ;
wordsSum + = Integer . parseInt ( m . group ( 2 ) ) ;
rwiTimeSum + = Integer . parseInt ( m . group ( 3 ) ) ;
urlReqSum + = Integer . parseInt ( m . group ( 4 ) ) ;
blockedRWISum + = Integer . parseInt ( m . group ( 6 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i3 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 6 ) {
2006-11-17 12:49:21 +01:00
DHTSendTraffic + = Integer . parseInt ( m . group ( 6 ) ) ;
DHTPeerNames . add ( m . group ( 2 ) ) ;
DHTPeerHashs . add ( m . group ( 3 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i4 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 8 ) {
2006-11-17 12:49:21 +01:00
DHTSendTraffic + = Integer . parseInt ( m . group ( 8 ) ) ;
DHTSendURLs + = Integer . parseInt ( m . group ( 3 ) ) ;
DHTPeerNames . add ( m . group ( 4 ) ) ;
DHTPeerHashs . add ( m . group ( 5 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i5 . matcher ( logLine ) ;
2008-06-25 10:15:07 +02:00
if ( m . find ( ) & & m . groupCount ( ) > = 4 ) {
minDHTDist = Math . min ( minDHTDist , Math . min ( Double . parseDouble ( m . group ( 3 ) ) , Double . parseDouble ( m . group ( 4 ) ) ) ) ;
maxDHTDist = Math . max ( maxDHTDist , Math . max ( Double . parseDouble ( m . group ( 3 ) ) , Double . parseDouble ( m . group ( 4 ) ) ) ) ;
2006-11-17 12:49:21 +01:00
avgDHTDist + = Double . parseDouble ( m . group ( 3 ) ) ;
DHTSelectionTargetCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i6 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 2 ) {
2006-11-17 12:49:21 +01:00
RWIRejectPeerNames . add ( m . group ( 2 ) ) ;
RWIRejectPeerHashs . add ( m . group ( 1 ) ) ;
RWIRejectCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i7 . matcher ( logLine ) ;
if ( m . find ( ) ) {
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i8 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 2 ) {
2006-11-17 12:49:21 +01:00
DHTSelectionWordsCount + = Double . parseDouble ( m . group ( 1 ) ) ;
DHTSelectionWordsTimeCount + = Double . parseDouble ( m . group ( 2 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i9 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 1 ) {
2006-11-17 12:49:21 +01:00
rankingDistributionCount + + ;
rankingDistributionTime + = Integer . parseInt ( m . group ( 1 ) ) ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i10 . matcher ( logLine ) ;
if ( m . find ( ) ) {
rankingDistributionFailCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = i11 . matcher ( logLine ) ;
if ( m . find ( ) ) {
busyPeerCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
// m = i12.matcher (logLine);
//
// if (m.find ()) {
// return 3;
// }
m = i13 . matcher ( logLine ) ;
if ( m . find ( ) ) {
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = adv1 . matcher ( logLine ) ;
2008-06-24 09:01:04 +02:00
if ( m . find ( ) & & m . groupCount ( ) > = 8 ) {
2006-11-17 12:49:21 +01:00
indexedSites + + ;
indexedWordSum + = Integer . parseInt ( m . group ( 1 ) ) ;
2007-01-16 19:42:34 +01:00
indexedSiteSizeSum + = Integer . parseInt ( m . group ( 5 ) ) ;
indexedAnchorsCount + = Integer . parseInt ( m . group ( 6 ) ) ;
2008-06-24 09:01:04 +02:00
indexedLinkStorageTime + = Integer . parseInt ( m . group ( 7 ) ) ;
indexedIndexStorageTime + = Integer . parseInt ( m . group ( 8 ) ) ;
// indexedStackingTime += Integer.parseInt(m.group(7));
// indexedParsingTime += Integer.parseInt(m.group(8));
// indexedIndexingTime += Integer.parseInt(m.group(9));
// indexedStorageTime += Integer.parseInt(m.group(10));
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
return 0 ;
2006-11-17 12:49:21 +01:00
}
} else if ( logLevel . equals ( " WARNING " ) ) {
m = w1 . matcher ( logLine ) ;
if ( m . find ( ) ) {
notEnoughDHTPeers + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = w2 . matcher ( logLine ) ;
if ( m . find ( ) ) {
failedIndexDistributionCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
} else if ( logLevel . equals ( " SEVERE " ) ) {
m = e1 . matcher ( logLine ) ;
2007-01-21 15:45:52 +01:00
if ( m . find ( ) & & m . groupCount ( ) > = 1 ) {
2006-11-17 12:49:21 +01:00
if ( m . group ( 1 ) . equals ( " leftchild " ) ) leftChildTwiceCount + + ;
else if ( m . group ( 1 ) . equals ( " rightchild " ) ) rightChildTwiceCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
m = e2 . matcher ( logLine ) ;
if ( m . find ( ) ) {
malformedURLCount + + ;
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-19 10:25:04 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return 0 ;
}
}
2007-01-19 12:10:57 +01:00
totalParserTime + = ( System . currentTimeMillis ( ) - start ) ;
2007-01-17 14:35:33 +01:00
totalParserRuns + + ;
2006-11-17 12:49:21 +01:00
return - 1 ;
}
2008-01-24 00:08:32 +01:00
public Hashtable < String , Object > getResults ( ) {
2008-08-02 14:12:04 +02:00
final Hashtable < String , Object > results = new Hashtable < String , Object > ( ) ;
2008-08-06 21:43:12 +02:00
results . put ( PARSER_VERSION , Double . valueOf ( parserVersion ) ) ;
results . put ( URLS_RECEIVED , Integer . valueOf ( urlSum ) ) ;
results . put ( URLS_REQUESTED , Integer . valueOf ( urlReqSum ) ) ;
results . put ( URLS_BLOCKED , Integer . valueOf ( blockedURLSum ) ) ;
results . put ( WORDS_RECEIVED , Integer . valueOf ( wordsSum ) ) ;
results . put ( RWIS_RECEIVED , Integer . valueOf ( rwiSum ) ) ;
results . put ( RWIS_BLOCKED , Integer . valueOf ( blockedRWISum ) ) ;
results . put ( URLS_RECEIVED_TIME , Long . valueOf ( urlTimeSum ) ) ;
results . put ( RWIS_RECEIVED_TIME , Long . valueOf ( rwiTimeSum ) ) ;
results . put ( DHT_TRAFFIC_SENT , Long . valueOf ( DHTSendTraffic ) ) ;
results . put ( DHT_URLS_SENT , Integer . valueOf ( DHTSendURLs ) ) ;
results . put ( DHT_REJECTED , Integer . valueOf ( RWIRejectCount ) ) ;
2007-01-16 06:51:39 +01:00
results . put ( DHT_REJECTED_PEERS_NAME , RWIRejectPeerNames ) ;
results . put ( DHT_REJECTED_PEERS_HASH , RWIRejectPeerHashs ) ;
results . put ( DHT_SENT_PEERS_NAME , DHTPeerNames ) ;
results . put ( DHT_SENT_PEERS_HASH , DHTPeerHashs ) ;
2008-08-06 21:43:12 +02:00
results . put ( DHT_SELECTED , Integer . valueOf ( DHTSelectionTargetCount ) ) ;
results . put ( DHT_WORDS_SELECTED , Integer . valueOf ( DHTSelectionWordsCount ) ) ;
results . put ( DHT_WORDS_SELECTED_TIME , Integer . valueOf ( DHTSelectionWordsTimeCount ) ) ;
results . put ( DHT_DISTANCE_MIN , Double . valueOf ( minDHTDist ) ) ;
results . put ( DHT_DISTANCE_MAX , Double . valueOf ( maxDHTDist ) ) ;
results . put ( DHT_DISTANCE_AVERAGE , Double . valueOf ( avgDHTDist / DHTSelectionTargetCount ) ) ;
results . put ( PEERS_BUSY , Integer . valueOf ( busyPeerCount ) ) ;
results . put ( PEERS_TOO_LESS , Integer . valueOf ( notEnoughDHTPeers ) ) ;
results . put ( DHT_SENT_FAILED , Integer . valueOf ( failedIndexDistributionCount ) ) ;
results . put ( ERROR_CHILD_TWICE_LEFT , Integer . valueOf ( leftChildTwiceCount ) ) ;
results . put ( ERROR_CHILD_TWICE_RIGHT , Integer . valueOf ( rightChildTwiceCount ) ) ;
results . put ( RANKING_DIST , Integer . valueOf ( rankingDistributionCount ) ) ;
results . put ( RANKING_DIST_TIME , Integer . valueOf ( rankingDistributionTime ) ) ;
results . put ( RANKING_DIST_FAILED , Integer . valueOf ( rankingDistributionFailCount ) ) ;
results . put ( ERROR_MALFORMED_URL , Integer . valueOf ( malformedURLCount ) ) ;
results . put ( INDEXED_SITES , Integer . valueOf ( indexedSites ) ) ;
results . put ( INDEXED_WORDS , Integer . valueOf ( indexedWordSum ) ) ;
results . put ( INDEXED_SITES_SIZE , Integer . valueOf ( indexedSiteSizeSum ) ) ;
results . put ( INDEXED_ANCHORS , Integer . valueOf ( indexedAnchorsCount ) ) ;
2008-06-24 09:01:04 +02:00
// results.put(INDEXED_STACK_TIME , new Integer(indexedStackingTime));
// results.put(INDEXED_PARSE_TIME , new Integer(indexedParsingTime));
// results.put(INDEXED_INDEX_TIME , new Integer(indexedIndexingTime));
// results.put(INDEXED_STORE_TIME , new Integer(indexedStorageTime));
2008-08-06 21:43:12 +02:00
results . put ( INDEXED_LINKSTORE_TIME , Integer . valueOf ( indexedLinkStorageTime ) ) ;
results . put ( INDEXED_INDEXSTORE_TIME , Integer . valueOf ( indexedIndexStorageTime ) ) ;
results . put ( TOTAL_PARSER_TIME , Long . valueOf ( totalParserTime ) ) ;
results . put ( TOTAL_PARSER_RUNS , Integer . valueOf ( totalParserRuns ) ) ;
2007-01-12 11:39:22 +01:00
return results ;
2007-01-11 10:18:10 +01:00
}
public String getParserType ( ) {
return parserType ;
}
public double getParserVersion ( ) {
return parserVersion ;
}
2006-11-17 12:49:21 +01:00
public void printResults ( ) {
if ( rankingDistributionCount = = 0 ) rankingDistributionCount = 1 ;
if ( DHTSelectionWordsTimeCount = = 0 ) DHTSelectionWordsTimeCount = 1 ;
if ( indexedSites ! = 0 ) indexedSites + + ;
2008-06-24 09:01:04 +02:00
System . out . println ( " INDEXER: Indexed " + indexedSites + " sites in " + ( indexedLinkStorageTime + indexedIndexStorageTime ) + " milliseconds. " ) ;
2006-11-17 12:49:21 +01:00
System . out . println ( " INDEXER: Indexed " + indexedWordSum + " words on " + indexedSites + " sites. (avg. words per site: " + ( indexedWordSum / indexedSites ) + " ). " ) ;
System . out . println ( " INDEXER: Total Size of indexed sites: " + indexedSiteSizeSum + " bytes (avg. size per site: " + ( indexedSiteSizeSum / indexedSites ) + " bytes). " ) ;
System . out . println ( " INDEXER: Total Number of Anchors found: " + indexedAnchorsCount + " (avg. Anchors per site: " + ( indexedAnchorsCount / indexedSites ) + " ). " ) ;
2008-06-24 09:01:04 +02:00
System . out . println ( " INDEXER: Total LinkStorageTime: " + indexedLinkStorageTime + " milliseconds (avg. StorageTime: " + ( indexedLinkStorageTime / indexedSites ) + " milliseconds). " ) ;
System . out . println ( " INDEXER: Total indexStorageTime: " + indexedIndexStorageTime + " milliseconds (avg. StorageTime: " + ( indexedIndexStorageTime / indexedSites ) + " milliseconds). " ) ;
// System.out.println("INDEXER: Total StackingTime: " + indexedStackingTime + " milliseconds (avg. StackingTime: " + (indexedStackingTime / indexedSites) + " milliseconds).");
// System.out.println("INDEXER: Total ParsingTime: " + indexedParsingTime + " milliseconds (avg. ParsingTime: " + (indexedParsingTime / indexedSites) + " milliseconds).");
// System.out.println("INDEXER: Total IndexingTime: " + indexedIndexingTime + " milliseconds (avg. IndexingTime: " + (indexedIndexingTime / indexedSites) + " milliseconds).");
// System.out.println("INDEXER: Total StorageTime: " + indexedStorageTime + " milliseconds (avg. StorageTime: " + (indexedStorageTime / indexedSites) + " milliseconds).");
2006-11-17 12:49:21 +01:00
if ( urlSum ! = 0 ) urlSum + + ;
System . out . println ( " DHT: Recieved " + urlSum + " Urls in " + urlTimeSum + " ms. Blocked " + blockedURLSum + " URLs. " ) ;
System . out . println ( " DHT: " + urlTimeSum / urlSum + " milliseconds per URL. " ) ;
if ( rwiSum ! = 0 ) rwiSum + + ;
System . out . println ( " DHT: Recieved " + rwiSum + " RWIs from " + wordsSum + " Words in " + rwiTimeSum + " ms. " + urlReqSum + " requested URLs. " ) ;
System . out . println ( " DHT: Blocked " + blockedRWISum + " RWIs before requesting URLs, because URL-Hash was blacklisted. " ) ;
System . out . println ( " DHT: " + rwiTimeSum / rwiSum + " milliseconds per RWI. " ) ;
System . out . println ( " DHT: Rejected " + RWIRejectCount + " Indextransfers from " + RWIRejectPeerNames . size ( ) + " PeerNames with " + RWIRejectPeerHashs . size ( ) + " PeerHashs. " ) ;
2008-08-06 21:43:12 +02:00
System . out . println ( " DHT: " + DHTSendTraffic / ( 1024 * 1024l ) + " MegaBytes ( " + DHTSendTraffic + " Bytes) of DHT-Transfertraffic. " ) ;
2006-11-17 12:49:21 +01:00
System . out . println ( " DHT: Sended " + DHTSendURLs + " URLs via DHT. " ) ;
System . out . println ( " DHT: DHT Transfers send to " + DHTPeerNames . size ( ) + " Peernames with " + DHTPeerHashs . size ( ) + " Peerhashs. " ) ;
System . out . println ( " DHT: Totally selected " + DHTSelectionWordsCount + " words in " + DHTSelectionWordsTimeCount + " seconds ( " + ( float ) DHTSelectionWordsCount / DHTSelectionWordsTimeCount + " words/s) " ) ;
2008-06-06 18:01:27 +02:00
System . out . println ( " DHT: Selected " + DHTSelectionTargetCount + " possible DHT Targets (min. Distance: " + minDHTDist + " max. Distance: " + maxDHTDist + " avg. Distance: " + ( avgDHTDist / DHTSelectionTargetCount ) ) ;
2006-11-17 12:49:21 +01:00
System . out . println ( " DHT: " + busyPeerCount + " times a targetpeer was too busy to accept a transfer. " ) ;
System . out . println ( " DHT: " + notEnoughDHTPeers + " times there were not enought targetpeers for the selected DHTChunk " ) ;
System . out . println ( " DHT: IndexDistribution failed " + failedIndexDistributionCount + " times. " ) ;
System . out . println ( " RANKING: Transmitted " + rankingDistributionCount + " Rankingfiles in " + rankingDistributionTime + " seconds ( " + rankingDistributionTime / rankingDistributionCount + " seconds/file) " ) ;
System . out . println ( " RANKING: RankingDistribution failed " + rankingDistributionFailCount + " times. " ) ;
if ( leftChildTwiceCount ! = 0 )
System . out . println ( " ERRORS: tried " + leftChildTwiceCount + " times to create leftchild node twice in db " ) ;
if ( rightChildTwiceCount ! = 0 )
System . out . println ( " ERRORS: tried " + rightChildTwiceCount + " times to create rightchild node twice in db " ) ;
if ( malformedURLCount ! = 0 )
System . out . println ( " ERRORS: " + malformedURLCount + " MalformedURLExceptions accord. " ) ;
}
}