yacy_search_server/htroot/yacy/transferRWI.java
orbiter 536e77e8b7 modifications towards a single database operation to read/write http header and cached file at once:
- removed distinction between header file types for http and ftp; ftp is simulated by using http properties
- removed all old resourceInfo classes that handled this distinction
- introduced a new distinction between http request and http response objects
- unified new response objects with two other object types that had been introduced elsewhere
- changed all servlet call methods to use the new http request header object type
- divided static object keys for http header properties into request and response types
- refactoring here and there (a large number of type changes and many methods merged/moved)


git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5079 6c8d7289-2bf4-0310-a012-ef5d649a1542
2008-08-25 18:11:47 +00:00

201 lines
10 KiB
Java

// transferRWI.java
// -----------------------
// part of the AnomicHTTPD caching proxy
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
// Frankfurt, Germany, 2004, 2005
//
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// You must compile this file with
// javac -classpath .:../classes transferRWI.java
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import de.anomic.http.httpRequestHeader;
import de.anomic.index.indexRWIRowEntry;
import de.anomic.index.indexReferenceBlacklist;
import de.anomic.plasma.plasmaSwitchboard;
import de.anomic.server.serverCore;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import de.anomic.tools.nxTools;
import de.anomic.xml.RSSFeed;
import de.anomic.xml.RSSMessage;
import de.anomic.yacy.yacyCore;
import de.anomic.yacy.yacyDHTAction;
import de.anomic.yacy.yacyNetwork;
import de.anomic.yacy.yacySeed;
public final class transferRWI {
public static serverObjects respond(final httpRequestHeader header, final serverObjects post, final serverSwitch<?> env) throws InterruptedException {
// return variable that accumulates replacements
final plasmaSwitchboard sb = (plasmaSwitchboard) env;
final serverObjects prop = new serverObjects();
if ((post == null) || (env == null)) return prop;
if (!yacyNetwork.authentifyRequest(post, env)) return prop;
if (!post.containsKey("wordc")) return prop;
if (!post.containsKey("entryc")) return prop;
// request values
final String iam = post.get("iam", ""); // seed hash of requester
final String youare = post.get("youare", ""); // seed hash of the target peer, needed for network stability
// final String key = (String) post.get("key", ""); // transmission key
final int wordc = post.getInt("wordc", 0); // number of different words
final int entryc = post.getInt("entryc", 0); // number of entries in indexes
byte[] indexes = post.get("indexes", "").getBytes(); // the indexes, as list of word entries
boolean granted = sb.getConfig("allowReceiveIndex", "false").equals("true");
final boolean blockBlacklist = sb.getConfig("indexReceiveBlockBlacklist", "false").equals("true");
final boolean checkLimit = sb.getConfigBool("indexDistribution.transferRWIReceiptLimitEnabled", true);
final long cachelimit = sb.getConfigLong("indexDistribution.dhtReceiptLimit", 10000);
final yacySeed otherPeer = sb.webIndex.seedDB.get(iam);
final String otherPeerName = iam + ":" + ((otherPeer == null) ? "NULL" : (otherPeer.getName() + "/" + otherPeer.getVersion()));
// response values
String result = "ok";
final StringBuffer unknownURLs = new StringBuffer();
int pause = 10000;
if ((youare == null) || (!youare.equals(sb.webIndex.seedDB.mySeed().hash))) {
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.webIndex.seedDB.mySeed().hash);
result = "wrong_target";
pause = 0;
} else if ((!granted) || (sb.isRobinsonMode())) {
// we dont want to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Not granted.");
result = "not_granted";
pause = 0;
} else if (checkLimit && sb.webIndex.dhtInCacheSize() > cachelimit) {
// we are too busy to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (buffersize=" + sb.webIndex.dhtInCacheSize() + ").");
granted = false; // don't accept more words if there are too many words to flush
result = "busy";
pause = 60000;
} /* else if ((checkLimit && sb.wordIndex.dhtOutCacheSize() > sb.getConfigLong(plasmaSwitchboard.WORDCACHE_MAX_COUNT, 20000)) || ((sb.wordIndex.busyCacheFlush) && (!shortCacheFlush))) {
// we are too busy flushing the ramCache to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (wordcachesize=" + sb.wordIndex.dhtOutCacheSize() + ").");
granted = false; // don't accept more words if there are too many words to flush
result = "busy";
pause = 300000;
} */ else {
// we want and can receive indexes
// log value status (currently added to find outOfMemory error
sb.getLog().logFine("Processing " + indexes.length + " bytes / " + wordc + " words / " + entryc + " entries from " + otherPeerName);
final long startProcess = System.currentTimeMillis();
// decode request
final List<String> v = nxTools.strings(indexes, null);
// free memory
indexes = null;
// the value-vector should now have the same length as entryc
if (v.size() != entryc) sb.getLog().logSevere("ERROR WITH ENTRY COUNTER: v=" + v.size() + ", entryc=" + entryc);
// now parse the Strings in the value-vector and write index entries
String estring;
int p;
String wordHash;
String urlHash;
indexRWIRowEntry iEntry;
final HashSet<String> unknownURL = new HashSet<String>();
final HashSet<String> knownURL = new HashSet<String>();
final String[] wordhashes = new String[v.size()];
int received = 0;
int blocked = 0;
int receivedURL = 0;
final Iterator<String> i = v.iterator();
while (i.hasNext()) {
serverCore.checkInterruption();
estring = i.next();
// check if RWI entry is well-formed
p = estring.indexOf("{");
if ((p < 0) || (estring.indexOf("x=") < 0)) {
blocked++;
continue;
}
wordHash = estring.substring(0, p);
wordhashes[received] = wordHash;
iEntry = new indexRWIRowEntry(estring.substring(p));
urlHash = iEntry.urlHash();
// block blacklisted entries
if ((blockBlacklist) && (plasmaSwitchboard.urlBlacklist.hashInBlacklistedCache(indexReferenceBlacklist.BLACKLIST_DHT, urlHash))) {
yacyCore.log.logFine("transferRWI: blocked blacklisted URLHash '" + urlHash + "' from peer " + otherPeerName);
blocked++;
continue;
}
// learn entry
sb.webIndex.addEntry(wordHash, iEntry, System.currentTimeMillis(), true);
serverCore.checkInterruption();
// check if we need to ask for the corresponding URL
if (!(knownURL.contains(urlHash)||unknownURL.contains(urlHash))) try {
if (sb.webIndex.existsURL(urlHash)) {
knownURL.add(urlHash);
} else {
unknownURL.add(urlHash);
}
receivedURL++;
} catch (final Exception ex) {
sb.getLog().logWarning(
"transferRWI: DB-Error while trying to determine if URL with hash '" +
urlHash + "' is known.", ex);
}
received++;
}
sb.webIndex.seedDB.mySeed().incRI(received);
// finally compose the unknownURL hash list
final Iterator<String> it = unknownURL.iterator();
unknownURLs.ensureCapacity(unknownURL.size()*13);
while (it.hasNext()) {
unknownURLs.append(",").append(it.next());
}
if (unknownURLs.length() > 0) { unknownURLs.delete(0, 1); }
if ((wordhashes.length == 0) || (received == 0)) {
sb.getLog().logInfo("Received 0 RWIs from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + " URLs, blocked " + blocked + " RWIs");
} else {
final double avdist = (yacyDHTAction.dhtDistance(sb.webIndex.seedDB.mySeed().hash, wordhashes[0]) + yacyDHTAction.dhtDistance(sb.webIndex.seedDB.mySeed().hash, wordhashes[received - 1])) / 2.0;
sb.getLog().logInfo("Received " + received + " Entries " + wordc + " Words [" + wordhashes[0] + " .. " + wordhashes[received - 1] + "]/" + avdist + " from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + "/" + receivedURL + " URLs, blocked " + blocked + " RWIs");
RSSFeed.channels(RSSFeed.INDEXRECEIVE).addMessage(new RSSMessage("Received " + received + " RWIs [" + wordhashes[0] + " .. " + wordhashes[received - 1] + "]/" + avdist + " from " + otherPeerName + ", requesting " + unknownURL.size() + " URLs, blocked " + blocked, "", ""));
}
result = "ok";
if (checkLimit) {
pause = (sb.webIndex.dhtInCacheSize() < 500) ? 0 : sb.webIndex.dhtInCacheSize(); // estimation of necessary pause time
}
}
prop.put("unknownURL", unknownURLs.toString());
prop.put("result", result);
prop.put("pause", pause);
// return rewrite properties
return prop;
}
}