some last-minute performance hacks

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@8101 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
orbiter 2011-11-25 11:23:52 +00:00
parent dd1482aaf5
commit 5a55397f99
77 changed files with 1526 additions and 1542 deletions

4
debian/copyright vendored
View File

@ -3,9 +3,9 @@ Wed, 28 May 2008 19:15:45 +0200.
It was downloaded from <http://yacy.net>
Upstream Author: Michael Peter Christen <mc@anomic.de>
Upstream Author: Michael Peter Christen <mc@yacy.net>
Copyright: (C) by Michael Peter Christen; mc@anomic.de
Copyright: (C) by Michael Peter Christen; mc@yacy.net
License:
All parts of the software known as YaCy are covered under the

View File

@ -1,4 +1,4 @@
// BlacklistCleaner_p.java
// BlacklistCleaner_p.java
// -----------------------
// part of YaCy
// (C) by Michael Peter Christen; mc@yacy.net
@ -40,14 +40,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.anomic.data.ListManager;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import java.util.Set;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
@ -55,23 +51,26 @@ import net.yacy.repository.Blacklist;
import net.yacy.repository.Blacklist.BlacklistError;
import net.yacy.search.Switchboard;
import net.yacy.search.query.SearchEventCache;
import de.anomic.data.ListManager;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class BlacklistCleaner_p {
private static final String RESULTS = "results_";
private static final String DISABLED = "disabled_";
private static final String BLACKLISTS = "blacklists_";
private static final String ENTRIES = "entries_";
private final static String BLACKLIST_FILENAME_FILTER = "^.*\\.black$";
public static final Class<?>[] supportedBLEngines = {
Blacklist.class
};
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
// initialize the list manager
ListManager.switchboard = (Switchboard) env;
ListManager.listsPath = new File(env.getDataPath(), env.getConfig("listManager.listsPath", "DATA/LISTS"));
@ -79,7 +78,7 @@ public class BlacklistCleaner_p {
// get the list of supported blacklist types
final String supportedBlacklistTypesStr = Blacklist.BLACKLIST_TYPES_STRING;
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");
prop.put(DISABLED+"checked", "1");
@ -131,7 +130,7 @@ public class BlacklistCleaner_p {
prop.put("results", "0");
putBlacklists(prop, FileUtils.getDirListing(ListManager.listsPath, BLACKLIST_FILENAME_FILTER), blacklistToUse);
}
return prop;
}
@ -147,7 +146,7 @@ public class BlacklistCleaner_p {
for (int i=0; i < supportedBLEngines.length && !supported; i++) {
supported |= (Switchboard.urlBlacklist.getClass() == supportedBLEngines[i]);
}
if (supported) {
if (!lists.isEmpty()) {
prop.put("disabled", "0");
@ -226,7 +225,7 @@ public class BlacklistCleaner_p {
}
}
}
return r.toArray(new String[r.size()]);
}
@ -242,7 +241,7 @@ public class BlacklistCleaner_p {
private static Map<String, BlacklistError> getIllegalEntries(final String blacklistToUse, final Blacklist blEngine, final boolean allowRegex) {
final Map<String, BlacklistError> illegalEntries = new HashMap<String, BlacklistError>();
final Set<String> legalEntries = new HashSet<String>();
final List<String> list = FileUtils.getListArray(new File(ListManager.listsPath, blacklistToUse));
final Map<String, String> properties= new HashMap<String, String>();
properties.put("allowRegex", String.valueOf(allowRegex));
@ -251,7 +250,7 @@ public class BlacklistCleaner_p {
for (String element : list) {
element = element.trim();
// check for double-occurance
if (legalEntries.contains(element)) {
illegalEntries.put(element, BlacklistError.DOUBLE_OCCURANCE);
@ -279,37 +278,37 @@ public class BlacklistCleaner_p {
private static int removeEntries(final String blacklistToUse, final String[] supportedBlacklistTypes, final String[] entries) {
// load blacklist data from file
final List<String> list = FileUtils.getListArray(new File(ListManager.listsPath, blacklistToUse));
boolean listChanged = false;
// delete the old entry from file
for (final String entry : entries) {
String s = entry;
if (list != null){
// get rid of escape characters which make it impossible to
// properly use contains()
if (s.contains("\\\\")) {
s = s.replaceAll(Pattern.quote("\\\\"), Matcher.quoteReplacement("\\"));
}
if (list.contains(s)) {
listChanged = list.remove(s);
}
}
// remove the entry from the running blacklist engine
for (final String supportedBlacklistType : supportedBlacklistTypes) {
if (ListManager.listSetContains(supportedBlacklistType + ".BlackLists", blacklistToUse)) {
final String host = (s.indexOf('/') == -1) ? s : s.substring(0, s.indexOf('/'));
final String path = (s.indexOf('/') == -1) ? ".*" : s.substring(s.indexOf('/') + 1);
final String host = (s.indexOf('/',0) == -1) ? s : s.substring(0, s.indexOf('/',0));
final String path = (s.indexOf('/',0) == -1) ? ".*" : s.substring(s.indexOf('/',0) + 1);
try {
Switchboard.urlBlacklist.remove(supportedBlacklistType, host, path);
} catch (final RuntimeException e) {
Log.logSevere("BLACKLIST-CLEANER", e.getMessage() + ": " + host + "/" + path);
}
}
}
}
SearchEventCache.cleanupEvents(true);
}
@ -338,7 +337,7 @@ public class BlacklistCleaner_p {
pw = new PrintWriter(new FileWriter(new File(ListManager.listsPath, blacklistToUse), true));
String host, path;
for (final String n : newEntry) {
int pos = n.indexOf('/');
final int pos = n.indexOf('/',0);
if (pos < 0) {
host = n;
path = ".*";

View File

@ -1,4 +1,4 @@
// Blacklist_p.java
// Blacklist_p.java
// -----------------------
// part of YaCy
// (C) by Michael Peter Christen; mc@yacy.net
@ -45,9 +45,8 @@ import net.yacy.kelondro.util.FileUtils;
import net.yacy.repository.Blacklist;
import net.yacy.search.Switchboard;
import net.yacy.search.query.SearchEventCache;
import de.anomic.data.WorkTables;
import de.anomic.data.ListManager;
import de.anomic.data.WorkTables;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
@ -57,30 +56,30 @@ public class Blacklist_p {
private final static String BLACKLIST = "blackLists_";
private final static String BLACKLIST_MOVE = "blackListsMove_";
private final static String BLACKLIST_SHARED = "BlackLists.Shared";
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
// initialize the list manager
ListManager.switchboard = (Switchboard) env;
ListManager.listsPath = new File(ListManager.switchboard.getDataPath(),ListManager.switchboard.getConfig("listManager.listsPath", "DATA/LISTS"));
// get the list of supported blacklist types
final String supportedBlacklistTypesStr = Blacklist.BLACKLIST_TYPES_STRING;
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");
// load all blacklist files located in the directory
List<String> dirlist = FileUtils.getDirListing(ListManager.listsPath, Blacklist.BLACKLIST_FILENAME_FILTER);
String blacklistToUse = null;
final serverObjects prop = new serverObjects();
prop.putHTML("blacklistEngine", Switchboard.urlBlacklist.getEngineInfo());
// do all post operations
if (post != null) {
final String action = post.get("action", "");
if(post.containsKey("testList")) {
prop.put("testlist", "1");
String urlstring = post.get("testurl", "");
@ -131,13 +130,13 @@ public class Blacklist_p {
/* ===========================================================
* Creation of a new blacklist
* =========================================================== */
blacklistToUse = post.get("newListName", "").trim();
if (blacklistToUse.length() == 0) {
prop.put("LOCATION","");
return prop;
}
}
// Check if blacklist name only consists of "legal" characters.
// This is mainly done to prevent files from being written to other directories
// than the LISTS directory.
@ -146,7 +145,7 @@ public class Blacklist_p {
prop.putHTML("error_name", blacklistToUse);
blacklistToUse = null;
} else {
if (!blacklistToUse.endsWith(".black")) {
blacklistToUse += ".black";
}
@ -162,29 +161,29 @@ public class Blacklist_p {
// activate it for all known blacklist types
for (final String supportedBlacklistType : supportedBlacklistTypes) {
ListManager.updateListSet(supportedBlacklistType + ".BlackLists", blacklistToUse);
}
}
} catch (final IOException e) {/* */}
} else {
prop.put("error", 2);
prop.putHTML("error_name", blacklistToUse);
blacklistToUse = null;
}
// reload Blacklists
dirlist = FileUtils.getDirListing(ListManager.listsPath, Blacklist.BLACKLIST_FILENAME_FILTER);
}
} else if (post.containsKey("deleteList")) {
/* ===========================================================
* Delete a blacklist
* =========================================================== */
* =========================================================== */
blacklistToUse = post.get("selectedListName");
if (blacklistToUse == null || blacklistToUse.length() == 0) {
prop.put("LOCATION","");
return prop;
}
}
final File blackListFile = new File(ListManager.listsPath, blacklistToUse);
if(!blackListFile.delete()) {
Log.logWarning("Blacklist", "file "+ blackListFile +" could not be deleted!");
@ -192,12 +191,12 @@ public class Blacklist_p {
for (final String supportedBlacklistType : supportedBlacklistTypes) {
ListManager.removeFromListSet(supportedBlacklistType + ".BlackLists",blacklistToUse);
}
}
// remove it from the shared list
ListManager.removeFromListSet(BLACKLIST_SHARED, blacklistToUse);
blacklistToUse = null;
// reload Blacklists
dirlist = FileUtils.getDirListing(ListManager.listsPath, Blacklist.BLACKLIST_FILENAME_FILTER);
@ -205,36 +204,36 @@ public class Blacklist_p {
/* ===========================================================
* Activate/Deactivate a blacklist
* =========================================================== */
* =========================================================== */
blacklistToUse = post.get("selectedListName", "").trim();
if (blacklistToUse == null || blacklistToUse.length() == 0) {
prop.put("LOCATION", "");
return prop;
}
}
for (final String supportedBlacklistType : supportedBlacklistTypes) {
if (post.containsKey("activateList4" + supportedBlacklistType)) {
ListManager.updateListSet(supportedBlacklistType + ".BlackLists",blacklistToUse);
} else {
ListManager.removeFromListSet(supportedBlacklistType + ".BlackLists",blacklistToUse);
}
}
}
}
ListManager.reloadBlacklists();
} else if (post.containsKey("shareList")) {
/* ===========================================================
* Share a blacklist
* =========================================================== */
* =========================================================== */
blacklistToUse = post.get("selectedListName", "").trim();
if (blacklistToUse == null || blacklistToUse.length() == 0) {
prop.put("LOCATION", "");
return prop;
}
}
if (ListManager.listSetContains(BLACKLIST_SHARED, blacklistToUse)) {
// Remove from shared BlackLists
ListManager.removeFromListSet(BLACKLIST_SHARED, blacklistToUse);
@ -242,15 +241,15 @@ public class Blacklist_p {
ListManager.updateListSet(BLACKLIST_SHARED, blacklistToUse);
}
} else if ("deleteBlacklistEntry".equals(action)) {
/* ===========================================================
* Delete an entry from a blacklist
* =========================================================== */
blacklistToUse = post.get("currentBlacklist", "").trim();
final String[] selectedBlacklistEntries = post.getAll("selectedEntry.*");
if (selectedBlacklistEntries.length > 0) {
String temp = null;
for (final String selectedBlacklistEntry : selectedBlacklistEntries) {
@ -263,35 +262,35 @@ public class Blacklist_p {
ListManager.reloadBlacklists();
} else if (post.containsKey("addBlacklistEntry")) {
/* ===========================================================
* Add new entry to blacklist
* =========================================================== */
blacklistToUse = post.get("currentBlacklist", "").trim();
final String blentry = post.get("newEntry", "").trim();
// store this call as api call
ListManager.switchboard.tables.recordAPICall(post, "Blacklist_p.html", WorkTables.TABLE_API_TYPE_CONFIGURATION, "add to blacklist: " + blentry);
final String temp = addBlacklistEntry(blacklistToUse, blentry, header, supportedBlacklistTypes);
if (temp != null) {
prop.put("LOCATION", temp);
return prop;
}
ListManager.reloadBlacklists();
} else if ("moveBlacklistEntry".equals(action)) {
/* ===========================================================
* Move an entry from one blacklist to another
* =========================================================== */
blacklistToUse = post.get("currentBlacklist", "").trim();
final String targetBlacklist = post.get("targetBlacklist");
final String[] selectedBlacklistEntries = post.getAll("selectedEntry.*");
if (selectedBlacklistEntries != null &&
selectedBlacklistEntries.length > 0 &&
targetBlacklist != null &&
@ -307,32 +306,32 @@ public class Blacklist_p {
if ((temp = deleteBlacklistEntry(blacklistToUse, selectedBlacklistEntry, header, supportedBlacklistTypes)) != null) {
prop.put("LOCATION", temp);
return prop;
}
}
}
ListManager.reloadBlacklists();
} else if ("editBlacklistEntry".equals(action)) {
/* ===========================================================
* Edit entry of a blacklist
* =========================================================== */
blacklistToUse = post.get("currentBlacklist", "").trim();
final String[] editedBlacklistEntries = post.getAll("editedBlacklistEntry.*");
// if edited entry has been posted, save changes
if (editedBlacklistEntries.length > 0) {
final String[] selectedBlacklistEntries = post.getAll("selectedBlacklistEntry.*");
if (selectedBlacklistEntries.length != editedBlacklistEntries.length) {
prop.put("LOCATION", "");
return prop;
}
String temp = null;
for (int i = 0; i < selectedBlacklistEntries.length; i++) {
@ -352,7 +351,7 @@ public class Blacklist_p {
}
ListManager.reloadBlacklists();
prop.putHTML(DISABLED + EDIT + "currentBlacklist", blacklistToUse);
// else return entry to be edited
} else {
final String[] selectedEntries = post.getAll("selectedEntry.*");
@ -362,7 +361,7 @@ public class Blacklist_p {
prop.put(DISABLED + EDIT + "editList_" + i + "_count", i);
}
prop.putHTML(DISABLED + EDIT + "currentBlacklist", blacklistToUse);
prop.put(DISABLED + "edit", "1");
prop.put(DISABLED + "edit", "1");
prop.put(DISABLED + EDIT + "editList", selectedEntries.length);
}
}
@ -381,11 +380,11 @@ public class Blacklist_p {
if (blacklistToUse != null) {
int entryCount = 0;
final List<String> list = FileUtils.getListArray(new File(ListManager.listsPath, blacklistToUse));
// sort them
final String[] sortedlist = new String[list.size()];
Arrays.sort(list.toArray(sortedlist));
// display them
boolean dark = true;
int offset = 0;
@ -405,7 +404,7 @@ public class Blacklist_p {
for (int j = offset; j < to; ++j){
final String nextEntry = sortedlist[j];
if (nextEntry.length() == 0) continue;
if (nextEntry.charAt(0) == '#') continue;
prop.put(DISABLED + EDIT + "Itemlist_" + entryCount + "_dark", dark ? "1" : "0");
@ -449,7 +448,7 @@ public class Blacklist_p {
prop.put(DISABLED + EDIT + "subListOffset", entryCount);
// create selection of list size
int[] sizes = {10,25,50,100,250,-1};
final int[] sizes = {10,25,50,100,250,-1};
for (int i = 0; i < sizes.length; i++) {
prop.put(DISABLED + EDIT + "subListSize_" + i + "_value", sizes[i]);
if (sizes[i] == -1) {
@ -463,13 +462,13 @@ public class Blacklist_p {
}
prop.put(DISABLED + EDIT + "subListSize", sizes.length);
}
// List BlackLists
int blacklistCount = 0;
int blacklistMoveCount = 0;
if (dirlist != null) {
for (String element : dirlist) {
for (final String element : dirlist) {
prop.putXML(DISABLED + BLACKLIST + blacklistCount + "_name", element);
prop.put(DISABLED + BLACKLIST + blacklistCount + "_selected", "0");
@ -487,7 +486,7 @@ public class Blacklist_p {
prop.putXML(DISABLED + EDIT + BLACKLIST_MOVE + blacklistMoveCount + "_name", element);
blacklistMoveCount++;
}
if (ListManager.listSetContains(BLACKLIST_SHARED, element)) {
prop.put(DISABLED + BLACKLIST + blacklistCount + "_shared", "1");
} else {
@ -495,27 +494,27 @@ public class Blacklist_p {
}
int activeCount = 0;
for (int blTypes=0; blTypes < supportedBlacklistTypes.length; blTypes++) {
if (ListManager.listSetContains(supportedBlacklistTypes[blTypes] + ".BlackLists", element)) {
prop.putHTML(DISABLED + BLACKLIST + blacklistCount + "_active_" + activeCount + "_blTypeName", supportedBlacklistTypes[blTypes]);
for (final String supportedBlacklistType : supportedBlacklistTypes) {
if (ListManager.listSetContains(supportedBlacklistType + ".BlackLists", element)) {
prop.putHTML(DISABLED + BLACKLIST + blacklistCount + "_active_" + activeCount + "_blTypeName", supportedBlacklistType);
activeCount++;
}
}
}
}
prop.put(DISABLED + BLACKLIST + blacklistCount + "_active", activeCount);
blacklistCount++;
}
}
prop.put(DISABLED + "blackLists", blacklistCount);
prop.put(DISABLED + EDIT + "blackListsMove", blacklistMoveCount);
prop.putXML(DISABLED + "currentBlacklist", (blacklistToUse==null) ? "" : blacklistToUse);
prop.putXML(DISABLED + EDIT + "currentBlacklist", (blacklistToUse==null) ? "" : blacklistToUse);
prop.put("disabled", (blacklistToUse == null) ? "1" : "0");
return prop;
}
/**
* This method adds a new entry to the chosen blacklist.
* @param blacklistToUse the name of the blacklist the entry is to be added to
@ -526,7 +525,7 @@ public class Blacklist_p {
*/
private static String addBlacklistEntry(
final String blacklistToUse,
String newEntry,
final String newEntry,
final RequestHeader header,
final String[] supportedBlacklistTypes) {
@ -542,7 +541,7 @@ public class Blacklist_p {
SearchEventCache.cleanupEvents(true);
return null;
}
/**
* This method deletes a blacklist entry.
@ -554,7 +553,7 @@ public class Blacklist_p {
*/
private static String deleteBlacklistEntry(
final String blacklistToUse,
String oldEntry,
final String oldEntry,
final RequestHeader header,
final String[] supportedBlacklistTypes) {
@ -580,7 +579,7 @@ public class Blacklist_p {
private static void deleteBlacklistEntry(
final File listsPath,
final String blacklistToUse,
String oldEntry,
String oldEntry,
final String[] supportedBlacklistTypes) {
// load blacklist data from file
@ -598,7 +597,7 @@ public class Blacklist_p {
}
// remove the entry from the running blacklist engine
int pos = oldEntry.indexOf('/');
int pos = oldEntry.indexOf('/',0);
if (pos < 0) {
// add default empty path pattern
pos = oldEntry.length();
@ -612,7 +611,7 @@ public class Blacklist_p {
}
/**
* This method adds a new entry to the chosen blacklist.
* @param blacklistToUse the name of the blacklist the entry is to be added to
@ -633,7 +632,7 @@ public class Blacklist_p {
newEntry = newEntry.substring(8);
}
int pos = newEntry.indexOf('/');
int pos = newEntry.indexOf('/',0);
if (pos < 0) {
// add default empty path pattern
pos = newEntry.length();

View File

@ -141,7 +141,7 @@ public class ConfigBasic {
String host = null;
if (header.containsKey(HeaderFramework.HOST)) {
host = header.get(HeaderFramework.HOST);
final int idx = host.indexOf(':');
final int idx = host.indexOf(':',0);
if (idx != -1) host = host.substring(0,idx);
} else {
host = Domains.myPublicLocalIP().getHostAddress();

View File

@ -10,7 +10,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -36,7 +36,6 @@ import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.MapTools;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import de.anomic.data.WorkTables;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
@ -44,20 +43,20 @@ import de.anomic.server.serverSwitch;
public class ConfigNetwork_p {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) throws FileNotFoundException, IOException {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
int commit = 0;
// load all options for network definitions
final File networkBootstrapLocationsFile = new File(new File(sb.getAppPath(), "defaults"), "yacy.networks");
final Set<String> networkBootstrapLocations = FileUtils.loadList(networkBootstrapLocationsFile);
if (post != null) {
// store this call as api call
sb.tables.recordAPICall(post, "ConfigNetwork_p.html", WorkTables.TABLE_API_TYPE_CONFIGURATION, "network settings");
if (post.containsKey("changeNetwork")) {
final String networkDefinition = post.get("networkDefinition", "defaults/yacy.network.freeworld.unit");
if (networkDefinition.equals(sb.getConfig("network.unit.definition", ""))) {
@ -69,9 +68,9 @@ public class ConfigNetwork_p {
sb.switchNetwork(networkDefinition);
}
}
if (post.containsKey("save")) {
// DHT control
boolean indexDistribute = "on".equals(post.get("indexDistribute", ""));
boolean indexReceive = "on".equals(post.get("indexReceive", ""));
@ -93,25 +92,25 @@ public class ConfigNetwork_p {
commit = 1;
}
}
if (indexDistribute) {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW, true);
} else {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW, false);
}
if ("on".equals(post.get("indexDistributeWhileCrawling",""))) {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW_WHILE_CRAWLING, true);
} else {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW_WHILE_CRAWLING, false);
}
if ("on".equals(post.get("indexDistributeWhileIndexing",""))) {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW_WHILE_INDEXING, true);
} else {
sb.setConfig(SwitchboardConstants.INDEX_DIST_ALLOW_WHILE_INDEXING, false);
}
if (indexReceive) {
sb.setConfig(SwitchboardConstants.INDEX_RECEIVE_ALLOW, true);
sb.peers.mySeed().setFlagAcceptRemoteIndex(true);
@ -120,35 +119,35 @@ public class ConfigNetwork_p {
sb.peers.mySeed().setFlagAcceptRemoteIndex(false);
sb.setConfig(SwitchboardConstants.INDEX_RECEIVE_AUTODISABLED, false);
}
if ("on".equals(post.get("indexReceiveBlockBlacklist", ""))) {
sb.setConfig("indexReceiveBlockBlacklist", true);
} else {
sb.setConfig("indexReceiveBlockBlacklist", false);
}
if (post.containsKey("peertags")) {
sb.peers.mySeed().setPeerTags(MapTools.string2set(normalizedList(post.get("peertags")), ","));
}
sb.setConfig("cluster.mode", post.get("cluster.mode", "publicpeer"));
sb.setConfig("cluster.peers.ipport", checkIPPortList(post.get("cluster.peers.ipport", "")));
sb.setConfig("cluster.peers.yacydomain", checkYaCyDomainList(post.get("cluster.peers.yacydomain", "")));
// update the cluster hash set
sb.clusterhashes = sb.peers.clusterHashes(sb.getConfig("cluster.peers.yacydomain", ""));
}
}
}
// write answer code
prop.put("commit", commit);
// write remote crawl request settings
prop.put("crawlResponse", sb.getConfigBool("crawlResponse", false) ? "1" : "0");
final long RTCbusySleep = Math.max(1, env.getConfigInt(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL_BUSYSLEEP, 100));
final int RTCppm = (int) (60000L / RTCbusySleep);
prop.put("acceptCrawlLimit", RTCppm);
final boolean indexDistribute = sb.getConfigBool(SwitchboardConstants.INDEX_DIST_ALLOW, true);
final boolean indexReceive = sb.getConfigBool(SwitchboardConstants.INDEX_RECEIVE_ALLOW, true);
prop.put("indexDistributeChecked", (indexDistribute) ? "1" : "0");
@ -164,7 +163,7 @@ public class ConfigNetwork_p {
// set seed information directly
sb.peers.mySeed().setFlagAcceptRemoteCrawl(sb.getConfigBool("crawlResponse", false));
sb.peers.mySeed().setFlagAcceptRemoteIndex(indexReceive);
// set p2p/robinson mode flags and values
prop.put("p2p.checked", (indexDistribute || indexReceive) ? "1" : "0");
prop.put("robinson.checked", (indexDistribute || indexReceive) ? "0" : "1");
@ -177,15 +176,15 @@ public class ConfigNetwork_p {
if (hashes.length() > 2) {
hashes = hashes.delete(0, 2);
}
prop.put("cluster.peers.yacydomain.hashes", hashes.toString());
// set p2p mode flags
prop.put("privatepeerChecked", ("privatepeer".equals(sb.getConfig("cluster.mode", ""))) ? "1" : "0");
prop.put("privateclusterChecked", ("privatecluster".equals(sb.getConfig("cluster.mode", ""))) ? "1" : "0");
prop.put("publicclusterChecked", ("publiccluster".equals(sb.getConfig("cluster.mode", ""))) ? "1" : "0");
prop.put("publicpeerChecked", ("publicpeer".equals(sb.getConfig("cluster.mode", ""))) ? "1" : "0");
// set network configuration
prop.putHTML("network.unit.definition", sb.getConfig("network.unit.definition", ""));
prop.putHTML("network.unit.name", sb.getConfig(SwitchboardConstants.NETWORK_NAME, ""));
@ -198,10 +197,10 @@ public class ConfigNetwork_p {
prop.put("networks_" + c++ + "_network", s);
}
prop.put("networks", c);
return prop;
}
private static String normalizedList(String input) {
input = input.replace(' ', ',');
input = input.replace(' ', ';');
@ -214,28 +213,28 @@ public class ConfigNetwork_p {
}
return input;
}
private static String checkYaCyDomainList(final String input) {
final String[] array = normalizedList(input).split(",");
final StringBuilder output = new StringBuilder();
for (final String element : array) {
if ((element.endsWith(".yacyh")) || (element.endsWith(".yacy")) ||
(element.indexOf(".yacyh=") > 0) || (element.indexOf(".yacy=") > 0)) {
(element.indexOf(".yacyh=",0) > 0) || (element.indexOf(".yacy=",0) > 0)) {
output.append(",").append(element);
}
}
if (output.length() == 0) {
return input;
}
return output.delete(0, 1).toString();
}
private static String checkIPPortList(final String input) {
final String[] array = normalizedList(input).split(",");
StringBuilder output = new StringBuilder();
final StringBuilder output = new StringBuilder();
for (final String element :array) {
if (element.indexOf(':') >= 9) {
if (element.indexOf(':',0) >= 9) {
output.append(",").append(element);
}
}

View File

@ -147,10 +147,10 @@ public class ConfigPortal {
prop.put("search.result.show.parser", sb.getConfigBool("search.result.show.parser", false) ? 1 : 0);
prop.put("search.result.show.pictures", sb.getConfigBool("search.result.show.pictures", false) ? 1 : 0);
prop.put("search.navigation.hosts", sb.getConfig("search.navigation", "").indexOf("hosts") >= 0 ? 1 : 0);
prop.put("search.navigation.authors", sb.getConfig("search.navigation", "").indexOf("authors") >= 0 ? 1 : 0);
prop.put("search.navigation.namespace", sb.getConfig("search.navigation", "").indexOf("namespace") >= 0 ? 1 : 0);
prop.put("search.navigation.topics", sb.getConfig("search.navigation", "").indexOf("topics") >= 0 ? 1 : 0);
prop.put("search.navigation.hosts", sb.getConfig("search.navigation", "").indexOf("hosts",0) >= 0 ? 1 : 0);
prop.put("search.navigation.authors", sb.getConfig("search.navigation", "").indexOf("authors",0) >= 0 ? 1 : 0);
prop.put("search.navigation.namespace", sb.getConfig("search.navigation", "").indexOf("namespace",0) >= 0 ? 1 : 0);
prop.put("search.navigation.topics", sb.getConfig("search.navigation", "").indexOf("topics",0) >= 0 ? 1 : 0);
prop.put("search.verify.nocache", sb.getConfig("search.verify", "").equals("nocache") ? 1 : 0);
prop.put("search.verify.iffresh", sb.getConfig("search.verify", "").equals("iffresh") ? 1 : 0);

View File

@ -7,12 +7,12 @@
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
@ -38,33 +38,32 @@ import net.yacy.kelondro.order.Base64Order;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.query.SearchEventCache;
import de.anomic.data.WorkTables;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class CrawlStartScanner_p {
private final static int CONCURRENT_RUNNER = 100;
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
final Switchboard sb = (Switchboard)env;
// clean up all search events
SearchEventCache.cleanupEvents(true);
prop.put("noserverdetected", 0);
prop.put("hosts", "");
prop.put("intranet.checked", sb.isIntranetMode() ? 1 : 0);
int timeout = sb.isIntranetMode() ? 200 : 3000;
timeout = post == null ? timeout : post.getInt("timeout", timeout);
// make a scanhosts entry
String hosts = post == null ? "" : post.get("scanhosts", "");
Set<InetAddress> ips = Domains.myIntranetIPs();
final Set<InetAddress> ips = Domains.myIntranetIPs();
prop.put("intranethosts", ips.toString());
prop.put("intranetHint", sb.isIntranetMode() ? 0 : 1);
if (hosts.length() == 0) {
@ -79,7 +78,7 @@ public class CrawlStartScanner_p {
if (ip != null) hosts = ip.getHostAddress();
}
prop.put("scanhosts", hosts);
// parse post requests
if (post != null) {
int repeat_time = 0;
@ -94,9 +93,9 @@ public class CrawlStartScanner_p {
if (repeat_unit.equals("selhours")) validTime = repeat_time * 60 * 60 * 1000;
if (repeat_unit.equals("seldays")) validTime = repeat_time * 24 * 60 * 60 * 1000;
}
boolean bigrange = post.getBoolean("bigrange", false);
final boolean bigrange = post.getBoolean("bigrange", false);
// case: an IP range was given; scan the range for services and display result
if (post.containsKey("scan") && "hosts".equals(post.get("source", ""))) {
final Set<InetAddress> ia = new HashSet<InetAddress>();
@ -105,7 +104,7 @@ public class CrawlStartScanner_p {
if (host.startsWith("https://")) host = host.substring(8);
if (host.startsWith("ftp://")) host = host.substring(6);
if (host.startsWith("smb://")) host = host.substring(6);
int p = host.indexOf('/');
final int p = host.indexOf('/',0);
if (p >= 0) host = host.substring(0, p);
ia.add(Domains.dnsResolve(host));
}
@ -122,7 +121,7 @@ public class CrawlStartScanner_p {
Scanner.scancacheReplace(scanner, validTime);
}
}
if (post.containsKey("scan") && "intranet".equals(post.get("source", ""))) {
final Scanner scanner = new Scanner(Domains.myIntranetIPs(), CONCURRENT_RUNNER, timeout);
if ("on".equals(post.get("scanftp", ""))) scanner.addFTP(bigrange);
@ -137,27 +136,27 @@ public class CrawlStartScanner_p {
Scanner.scancacheReplace(scanner, validTime);
}
}
// check crawl request
if (post.containsKey("crawl")) {
// make a pk/url mapping
final Iterator<Map.Entry<Scanner.Service, Scanner.Access>> se = Scanner.scancacheEntries();
final Map<byte[], DigestURI> pkmap = new TreeMap<byte[], DigestURI>(Base64Order.enhancedCoder);
while (se.hasNext()) {
Scanner.Service u = se.next().getKey();
final Scanner.Service u = se.next().getKey();
DigestURI uu;
try {
uu = new DigestURI(u.url());
pkmap.put(uu.hash(), uu);
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
Log.logException(e);
}
}
// search for crawl start requests in this mapping
for (final Map.Entry<String, String> entry: post.entrySet()) {
if (entry.getValue().startsWith("mark_")) {
byte [] pk = entry.getValue().substring(5).getBytes();
DigestURI url = pkmap.get(pk);
final byte [] pk = entry.getValue().substring(5).getBytes();
final DigestURI url = pkmap.get(pk);
if (url != null) {
String path = "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
path += "&crawlingURL=" + url.toNormalform(true, false);
@ -166,21 +165,21 @@ public class CrawlStartScanner_p {
}
}
}
// check scheduler
if ("scheduler".equals(post.get("rescan", ""))) {
// store this call as api call
if (repeat_time > 0) {
// store as scheduled api call
sb.tables.recordAPICall(post, "CrawlStartScanner_p.html", WorkTables.TABLE_API_TYPE_CRAWLER, "network scanner for hosts: " + hosts, repeat_time, repeat_unit.substring(3));
}
// execute the scan results
if (Scanner.scancacheSize() > 0) {
// make a comment cache
final Map<byte[], String> apiCommentCache = WorkTables.commentCache(sb);
String urlString;
DigestURI u;
try {
@ -196,17 +195,17 @@ public class CrawlStartScanner_p {
path += "&crawlingURL=" + urlString;
WorkTables.execAPICall("localhost", (int) sb.getConfigLong("port", 8090), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), path, u.hash());
}
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
Log.logException(e);
}
}
} catch (ConcurrentModificationException e) {}
} catch (final ConcurrentModificationException e) {}
}
}
}
return prop;
}
}

View File

@ -131,7 +131,7 @@ public class Crawler_p {
} else {
String crawlingStart = post.get("crawlingURL","").trim(); // the crawljob start url
// add the prefix http:// if necessary
int pos = crawlingStart.indexOf("://");
int pos = crawlingStart.indexOf("://",0);
if (pos == -1) {
if (crawlingStart.startsWith("www")) crawlingStart = "http://" + crawlingStart;
if (crawlingStart.startsWith("ftp")) crawlingStart = "ftp://" + crawlingStart;

View File

@ -260,7 +260,7 @@ public class IndexControlURLs_p {
// extend export file name
String s = post.get("exportfile", "");
if (s.indexOf('.') < 0) {
if (s.indexOf('.',0) < 0) {
if (format == 0) s = s + ".txt";
if (format == 1) s = s + ".html";
if (format == 2) s = s + ".xml";

View File

@ -25,8 +25,8 @@
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Map;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
@ -38,7 +38,6 @@ import net.yacy.document.importer.ResumptionToken;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import de.anomic.data.WorkTables;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
@ -52,66 +51,66 @@ public class IndexImportOAIPMH_p {
prop.put("import-one", 0);
prop.put("status", 0);
prop.put("defaulturl", "");
int jobcount = OAIPMHImporter.runningJobs.size() + OAIPMHImporter.startedJobs.size() + OAIPMHImporter.finishedJobs.size();
final int jobcount = OAIPMHImporter.runningJobs.size() + OAIPMHImporter.startedJobs.size() + OAIPMHImporter.finishedJobs.size();
prop.put("iframetype", (jobcount == 0) ? 2 : 1);
prop.put("optiongetlist", (jobcount == 0) ? 0 : 1);
if (post != null) {
if (post.containsKey("urlstartone")) {
String oaipmhurl = post.get("urlstartone");
if (oaipmhurl.indexOf("?") < 0) oaipmhurl = oaipmhurl + "?verb=ListRecords&metadataPrefix=oai_dc";
if (oaipmhurl.indexOf('?',0) < 0) oaipmhurl = oaipmhurl + "?verb=ListRecords&metadataPrefix=oai_dc";
DigestURI url = null;
try {
url = new DigestURI(oaipmhurl);
OAIPMHLoader r = new OAIPMHLoader(sb.loader, url, sb.surrogatesInPath, "oaipmh-one");
ResumptionToken rt = r.getResumptionToken();
final OAIPMHLoader r = new OAIPMHLoader(sb.loader, url, sb.surrogatesInPath, "oaipmh-one");
final ResumptionToken rt = r.getResumptionToken();
prop.put("import-one", 1);
prop.put("import-one_count", (rt == null) ? "not available" : Integer.toString(rt.getRecordCounter()));
prop.put("import-one_source", r.source());
prop.put("import-one_rt", r.getResumptionToken().toString());
// set next default url
try {
DigestURI nexturl = (rt == null) ? null : rt.resumptionURL();
final DigestURI nexturl = (rt == null) ? null : rt.resumptionURL();
if (rt != null) prop.put("defaulturl", (nexturl == null) ? "" : nexturl.toNormalform(true, false));
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
prop.put("defaulturl", e.getMessage());
} catch (IOException e) {
} catch (final IOException e) {
// reached end of resumption
prop.put("defaulturl", e.getMessage());
}
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
Log.logException(e);
prop.put("import-one", 2);
prop.put("import-one_error", e.getMessage());
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
prop.put("import-one", 2);
prop.put("import-one_error", e.getMessage());
}
}
if (post.get("urlstart", "").length() > 0) {
String oaipmhurl = post.get("urlstart", "");
final String oaipmhurl = post.get("urlstart", "");
sb.tables.recordAPICall(post, "IndexImportOAIPMH_p.html", WorkTables.TABLE_API_TYPE_CRAWLER, "OAI-PMH import for " + oaipmhurl);
DigestURI url = null;
try {
url = new DigestURI(oaipmhurl);
OAIPMHImporter job = new OAIPMHImporter(sb.loader, url);
final OAIPMHImporter job = new OAIPMHImporter(sb.loader, url);
job.start();
prop.put("status", 1);
prop.put("optiongetlist", 1);
prop.put("iframetype", 1);
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
Log.logException(e);
prop.put("status", 2);
prop.put("status_message", e.getMessage());
}
}
if (post.get("loadrows", "").length() > 0) {
// create a time-ordered list of events to execute
Set<String> sources = new TreeSet<String>();
final Set<String> sources = new TreeSet<String>();
for (final Map.Entry<String, String> entry: post.entrySet()) {
if (entry.getValue().startsWith("mark_")) {
sources.add(entry.getValue().substring(5));
@ -120,26 +119,26 @@ public class IndexImportOAIPMH_p {
prop.put("status", 1);
prop.put("optiongetlist", 1);
prop.put("iframetype", 1);
// prepare the set for random read from it (to protect the servers at the beginning of the list)
List<String> sourceList = new ArrayList<String>(sources.size());
for (String oaipmhurl: sources) sourceList.add(oaipmhurl);
Random r = new Random(System.currentTimeMillis());
final List<String> sourceList = new ArrayList<String>(sources.size());
for (final String oaipmhurl: sources) sourceList.add(oaipmhurl);
final Random r = new Random(System.currentTimeMillis());
// start jobs for the sources
DigestURI url = null;
while (sourceList.size() > 0) {
String oaipmhurl = sourceList.remove(r.nextInt(sourceList.size()));
final String oaipmhurl = sourceList.remove(r.nextInt(sourceList.size()));
try {
url = new DigestURI(oaipmhurl);
OAIPMHImporter job = new OAIPMHImporter(sb.loader, url);
final OAIPMHImporter job = new OAIPMHImporter(sb.loader, url);
job.start();
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
Log.logException(e);
}
}
}
if (post.containsKey("getlist")) {
prop.put("iframetype", 2);
}

View File

@ -106,7 +106,7 @@ public class PerformanceMemory_p {
while (i.hasNext()) {
filename = i.next();
mapx = Table.memoryStats(filename);
prop.put("EcoList_" + c + "_tableIndexPath", ((p = filename.indexOf("DATA")) < 0) ? filename : filename.substring(p));
prop.put("EcoList_" + c + "_tableIndexPath", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
prop.putNum("EcoList_" + c + "_tableSize", mapx.get(Table.StatKeys.tableSize));
assert mapx.get(Table.StatKeys.tableKeyMem) != null : mapx;
@ -140,7 +140,7 @@ public class PerformanceMemory_p {
}
filename = oie.getKey();
cache = oie.getValue();
prop.put("indexcache_" + c + "_Name", ((p = filename.indexOf("DATA")) < 0) ? filename : filename.substring(p));
prop.put("indexcache_" + c + "_Name", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
hitmem = cache.mem();
totalhitmem += hitmem;
@ -163,7 +163,7 @@ public class PerformanceMemory_p {
while (i.hasNext()) {
filename = i.next();
mapy = Cache.memoryStats(filename);
prop.put("ObjectList_" + c + "_objectCachePath", ((p = filename.indexOf("DATA")) < 0) ? filename : filename.substring(p));
prop.put("ObjectList_" + c + "_objectCachePath", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
// hit cache
hitmem = Long.parseLong(mapy.get(Cache.StatKeys.objectHitMem));

View File

@ -81,7 +81,7 @@ public class QuickCrawlLink_p {
//String host = hostSocket;
int port = 80;
final int pos = hostSocket.indexOf(":");
final int pos = hostSocket.indexOf(':',0);
if (pos != -1) {
port = Integer.parseInt(hostSocket.substring(pos + 1));
//host = hostSocket.substring(0, pos);

View File

@ -111,7 +111,7 @@ public class Ranking_p {
for (final Entry<String, String> entry: map.entrySet()) {
key = entry.getKey();
description = rankingParameters.get(key.substring(prefix.length()));
p = description.indexOf(';');
p = description.indexOf(';',0);
if (p >= 0) {
name = description.substring(0, p);
info = description.substring(p + 1);

View File

@ -209,8 +209,8 @@ public class SettingsAck_p {
if (staticIP.length() > 8) { staticIP = staticIP.substring(8); } else { staticIP = ""; }
}
// TODO IPv6 support!
if (staticIP.indexOf(":") > 0) {
staticIP = staticIP.substring(0, staticIP.indexOf(":"));
if (staticIP.indexOf(':',0) > 0) {
staticIP = staticIP.substring(0, staticIP.indexOf(':',0));
}
if (staticIP.length() == 0) {
serverCore.useStaticIP = false;

View File

@ -1,4 +1,4 @@
// Status.java
// Status.java
// -----------------------
// part of YaCy
// (C) by Michael Peter Christen; mc@yacy.net
@ -42,7 +42,6 @@ import net.yacy.peers.Seed;
import net.yacy.peers.operation.yacyBuildProperties;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import de.anomic.server.serverCore;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
@ -60,13 +59,13 @@ public class Status {
// check if the basic configuration was accessed before and forward
prop.put("forwardToConfigBasic", 0);
if ((post == null || !post.containsKey("noforward")) &&
sb.getConfig("server.servlets.submitted", "").indexOf("ConfigBasic.html") < 0 &&
sb.getConfig("server.servlets.submitted", "").indexOf("ConfigBasic.html",0) < 0 &&
Seed.isDefaultPeerName(sb.peers.mySeed().getName())) {
// forward to ConfigBasic
prop.put("forwardToConfigBasic", 1);
}
if (post != null) post.remove("noforward");
if (post != null && post.size() > 0) {
if (sb.adminAuthenticated(header) < 2) {
prop.put("AUTHENTICATE","admin log-in");
@ -104,7 +103,7 @@ public class Status {
sb.setConfig("trayIcon", trigger_enabled);
redirect = true;
}
if (redirect) {
prop.put("LOCATION","");
return prop;
@ -118,7 +117,7 @@ public class Status {
if (adminaccess) {
prop.put("showPrivateTable", "1");
prop.put("privateStatusTable", "Status_p.inc");
} else {
} else {
prop.put("showPrivateTable", "0");
prop.put("privateStatusTable", "");
}
@ -147,26 +146,26 @@ public class Status {
prop.put("warningMemoryLow", "1");
prop.put("warningMemoryLow_minSpace", minFree);
}
}
// version information
//final String versionstring = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1"));
final String versionstring = yacyBuildProperties.getVersion() + "/" + yacyBuildProperties.getSVNRevision();
prop.put("versionpp", versionstring);
// place some more hints
if ((adminaccess) && (sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL).getJobCount() == 0)) {
prop.put("hintCrawlStart", "1");
}
if ((adminaccess) && (sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL).getJobCount() > 500)) {
prop.put("hintCrawlMonitor", "1");
}
// hostname and port
final String extendedPortString = sb.getConfig("port", "8090");
final int pos = extendedPortString.indexOf(":");
final int pos = extendedPortString.indexOf(':',0);
prop.put("port",serverCore.getPortNr(extendedPortString));
if (pos != -1) {
prop.put("extPortFormat", "1");
@ -174,9 +173,9 @@ public class Status {
} else {
prop.put("extPortFormat", "0");
}
InetAddress hostIP = Domains.myPublicLocalIP();
final InetAddress hostIP = Domains.myPublicLocalIP();
prop.put("host", hostIP != null ? hostIP.getHostAddress() : "Unkown IP");
// ssl support
prop.put("sslSupport",sb.getConfig("keyStore", "").length() == 0 ? "0" : "1");
@ -233,7 +232,7 @@ public class Status {
}
prop.putHTML("peerName", thisName);
prop.put("hash", thisHash);
final String seedUploadMethod = sb.getConfig("seedUploadMethod", "");
if (!"none".equalsIgnoreCase(seedUploadMethod) ||
("".equals(seedUploadMethod) && (sb.getConfig("seedFTPPassword", "").length() > 0 ||
@ -262,7 +261,7 @@ public class Status {
} else {
prop.put(SEEDSERVER, "0"); // disabled
}
if (sb.peers != null && sb.peers.sizeConnected() > 0){
prop.put("otherPeers", "1");
prop.putNum("otherPeers_num", sb.peers.sizeConnected());
@ -275,7 +274,7 @@ public class Status {
} else {
prop.put("popup", "1");
}
if (!OS.isWindows) {
prop.put("tray", "2");
} else if (!sb.getConfigBool("trayIcon", false)) {
@ -299,21 +298,21 @@ public class Status {
final serverCore httpd = (serverCore) sb.getThread("10_httpd");
prop.putNum("connectionsActive", httpd.getJobCount());
prop.putNum("connectionsMax", httpd.getMaxSessionCount());
// Queue information
final int loaderJobCount = sb.crawlQueues.workerSize();
final int loaderMaxCount = sb.getConfigInt(SwitchboardConstants.CRAWLER_THREADS_ACTIVE_MAX, 10);
final int loaderPercent = (loaderMaxCount == 0) ? 0 : loaderJobCount * 100 / loaderMaxCount;
prop.putNum("loaderQueueSize", loaderJobCount);
prop.putNum("loaderQueueMax", loaderMaxCount);
prop.putNum("loaderQueueMax", loaderMaxCount);
prop.put("loaderQueuePercent", (loaderPercent>100) ? 100 : loaderPercent);
prop.putNum("localCrawlQueueSize", sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL).getJobCount());
prop.put("localCrawlPaused",sb.crawlJobIsPaused(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL) ? "1" : "0");
prop.putNum("remoteTriggeredCrawlQueueSize", sb.getThread(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL).getJobCount());
prop.put("remoteTriggeredCrawlPaused",sb.crawlJobIsPaused(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL) ? "1" : "0");
prop.putNum("stackCrawlQueueSize", sb.crawlStacker.size());
// return rewrite properties

View File

@ -33,7 +33,7 @@ public class Table_YMark_p {
// get the user name for the selected table
String bmk_user = null;
if (table != null)
bmk_user = table.substring(0,table.indexOf('_'));
bmk_user = table.substring(0,table.indexOf('_',0));
// currently selected table
prop.put("showselection_table", table);

View File

@ -358,9 +358,9 @@ public class ViewFile {
words = words.substring(1, words.length() - 1);
}
words = UTF8.decodeURL(words);
if (words.indexOf(' ') >= 0) return words.split(" ");
if (words.indexOf(',') >= 0) return words.split(",");
if (words.indexOf('+') >= 0) return words.split("\\+");
if (words.indexOf(' ',0) >= 0) return words.split(" ");
if (words.indexOf(',',0) >= 0) return words.split(",");
if (words.indexOf('+',0) >= 0) return words.split("\\+");
w = new String[1];
w[0] = words;
return w;

View File

@ -1,4 +1,4 @@
//YaCySearchPluginFF.pac
//YaCySearchPluginFF.pac
//-----------------------
//part of YaCy
//(C) by Michael Peter Christen; mc@yacy.net
@ -34,7 +34,7 @@ import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class YaCySearchPluginFF {
/**
* @param header the complete HTTP header of the request
* @param post any arguments for this servlet, the request carried with (GET as well as POST)
@ -44,23 +44,23 @@ public class YaCySearchPluginFF {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
// getting the http host header
final String hostSocket = header.get(HeaderFramework.CONNECTION_PROP_HOST);
String host = hostSocket;
int port = 80;
final int pos = hostSocket.indexOf(":");
final int pos = hostSocket.indexOf(':',0);
if (pos != -1) {
port = Integer.parseInt(hostSocket.substring(pos + 1));
host = hostSocket.substring(0, pos);
}
}
prop.put("host", host);
prop.put("port", port);
prop.putHTML("name", sb.peers.mySeed().getName());
return prop;
}
}

View File

@ -61,7 +61,7 @@ public class getpageinfo_p {
!url.startsWith("file://")) {
url = "http://" + url;
}
if (actions.indexOf("title") >= 0) {
if (actions.indexOf("title",0) >= 0) {
DigestURI u = null;
try {
u = new DigestURI(url);
@ -117,7 +117,7 @@ public class getpageinfo_p {
prop.putXML("filter", filter.length() > 0 ? filter.substring(1) : ".*");
}
}
if (actions.indexOf("robots") >= 0) {
if (actions.indexOf("robots",0) >= 0) {
try {
final DigestURI theURL = new DigestURI(url);
@ -139,7 +139,7 @@ public class getpageinfo_p {
Log.logException(e);
}
}
if (actions.indexOf("oai") >= 0) {
if (actions.indexOf("oai",0) >= 0) {
try {
final DigestURI theURL = new DigestURI(url
+ "?verb=Identify");

View File

@ -33,7 +33,7 @@ import de.anomic.server.serverSwitch;
public class import_ymark {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
@ -41,23 +41,23 @@ public class import_ymark {
final boolean isAdmin = (sb.verifyAuthentication(header, true));
final boolean isAuthUser = user!= null && user.hasRight(UserDB.AccessRight.BOOKMARK_RIGHT);
final int queueSize = 200;
Thread t;
YMarkEntry bmk;
// String root = YMarkEntry.FOLDERS_IMPORTED;
String root = "";
ByteArrayInputStream stream = null;
if(isAdmin || isAuthUser) {
String bmk_user = (isAuthUser ? user.getUserName() : YMarkTables.USER_ADMIN);
String bmk_user = (isAuthUser ? user.getUserName() : YMarkTables.USER_ADMIN);
final ArrayBlockingQueue<String> autoTaggingQueue = new ArrayBlockingQueue<String>(10*queueSize);
boolean autotag = false;
boolean merge = false;
boolean merge = false;
boolean empty = false;
if(post.containsKey("autotag") && !post.get("autotag", "off").equals("off")) {
if(post.containsKey("autotag") && !post.get("autotag", "off").equals("off")) {
autotag = true;
if(post.get("autotag").equals("merge")) {
if(post.get("autotag").equals("merge")) {
merge = true;
}
if(post.get("autotag").equals("empty")) {
@ -66,9 +66,9 @@ public class import_ymark {
t = new Thread(new YMarkAutoTagger(autoTaggingQueue, sb.loader, sb.tables.bookmarks, bmk_user, merge),"YMarks - autoTagger");
t.start();
}
if(isAdmin && post.containsKey("table") && post.get("table").length() > 0) {
bmk_user = post.get("table").substring(0, post.get("table").indexOf('_'));
bmk_user = post.get("table").substring(0, post.get("table").indexOf('_',0));
}
if(post.containsKey("redirect") && post.get("redirect").length() > 0) {
prop.put("redirect_url", post.get("redirect"));
@ -83,11 +83,11 @@ public class import_ymark {
SurrogateReader surrogateReader;
try {
surrogateReader = new SurrogateReader(stream, queueSize);
} catch (IOException e) {
} catch (final IOException e) {
//TODO: display an error message
Log.logException(e);
prop.put("status", "0");
return prop;
return prop;
}
t = new Thread(surrogateReader, "YMarks - Surrogate Reader");
t.start();
@ -99,11 +99,11 @@ public class import_ymark {
InputStreamReader reader = null;
try {
reader = new InputStreamReader(stream,"UTF-8");
} catch (UnsupportedEncodingException e1) {
} catch (final UnsupportedEncodingException e1) {
//TODO: display an error message
Log.logException(e1);
prop.put("status", "0");
return prop;
return prop;
}
if(post.get("importer").equals("html") && reader != null) {
final YMarkHTMLImporter htmlImporter = new YMarkHTMLImporter(reader, queueSize, root);
@ -112,13 +112,13 @@ public class import_ymark {
while ((bmk = htmlImporter.take()) != YMarkEntry.POISON) {
putBookmark(sb.tables.bookmarks, bmk_user, bmk, autoTaggingQueue, autotag, empty);
}
prop.put("status", "1");
prop.put("status", "1");
} else if(post.get("importer").equals("xbel") && reader != null) {
final YMarkXBELImporter xbelImporter;
final YMarkXBELImporter xbelImporter;
try {
//TODO: make RootFold
//TODO: make RootFold
xbelImporter = new YMarkXBELImporter(reader, queueSize, root);
} catch (SAXException e) {
} catch (final SAXException e) {
//TODO: display an error message
Log.logException(e);
prop.put("status", "0");
@ -140,7 +140,7 @@ public class import_ymark {
}
prop.put("status", "1");
}
}
}
} else if(post.containsKey("importer") && post.get("importer").equals("crawls")) {
try {
final Pattern pattern = Pattern.compile("^crawl start for.*");
@ -150,24 +150,24 @@ public class import_ymark {
row = APIcalls.next();
if(row.get(WorkTables.TABLE_API_COL_TYPE, "").equals("crawler")) {
final String url = row.get(WorkTables.TABLE_API_COL_COMMENT, "").substring(16);
sb.tables.bookmarks.createBookmark(sb.loader, url, bmk_user, autotag, "crawlStart", "/Crawl Start");
sb.tables.bookmarks.createBookmark(sb.loader, url, bmk_user, autotag, "crawlStart", "/Crawl Start");
}
}
prop.put("status", "1");
} catch (IOException e) {
} catch (final IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Failure e) {
} catch (final Failure e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if(post.containsKey("importer") && post.get("importer").equals("bmks")) {
final Iterator<String> bit=sb.bookmarksDB.getBookmarksIterator(isAdmin);
BookmarksDB.Bookmark bookmark;
while(bit.hasNext()){
while(bit.hasNext()){
bookmark=sb.bookmarksDB.getBookmark(bit.next());
final YMarkEntry bmk_entry = new YMarkEntry(false);
bmk_entry.put(YMarkEntry.BOOKMARK.URL.key(), bookmark.getUrl());
@ -184,53 +184,53 @@ public class import_ymark {
}
sb.tables.bookmarks.addBookmark(bmk_user, bmk_entry, merge, true);
prop.put("status", "1");
} catch (MalformedURLException e) {
} catch (final MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
} catch (final IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
if(post.containsKey("autotag") && !post.get("autotag", "off").equals("off")) {
try {
autoTaggingQueue.put(YMarkAutoTagger.POISON);
Log.logInfo(YMarkTables.BOOKMARKS_LOG, "Importer inserted poison pill in autoTagging queue");
} catch (InterruptedException e) {
Log.logInfo(YMarkTables.BOOKMARKS_LOG, "Importer inserted poison pill in autoTagging queue");
} catch (final InterruptedException e) {
Log.logException(e);
}
}
}
}
} else {
prop.put(YMarkTables.USER_AUTHENTICATE,YMarkTables.USER_AUTHENTICATE_MSG);
}
}
// return rewrite properties
return prop;
}
public static void putBookmark(final YMarkTables ymarks, final String bmk_user, final YMarkEntry bmk,
public static void putBookmark(final YMarkTables ymarks, final String bmk_user, final YMarkEntry bmk,
final ArrayBlockingQueue<String> autoTaggingQueue, final boolean autotag, final boolean empty) {
try {
String url = bmk.get(YMarkEntry.BOOKMARK.URL.key());
final String url = bmk.get(YMarkEntry.BOOKMARK.URL.key());
// other protocols could cause problems
if(url != null && url.startsWith("http")) {
ymarks.addBookmark(bmk_user, bmk, true, true);
ymarks.addBookmark(bmk_user, bmk, true, true);
if(autotag) {
if(!empty) {
autoTaggingQueue.put(url);
} else if(!bmk.containsKey(YMarkEntry.BOOKMARK.TAGS.key()) || bmk.get(YMarkEntry.BOOKMARK.TAGS.key()).equals(YMarkEntry.BOOKMARK.TAGS.deflt())) {
autoTaggingQueue.put(url);
}
}
}
}
}
} catch (IOException e) {
Log.logException(e);
} catch (RowSpaceExceededException e) {
} catch (final IOException e) {
Log.logException(e);
} catch (InterruptedException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
} catch (final InterruptedException e) {
Log.logException(e);
}
}

View File

@ -1,4 +1,4 @@
//autoconfig.pac
//autoconfig.pac
//-----------------------
//part of YaCy
//(C) by Michael Peter Christen; mc@yacy.net
@ -34,38 +34,38 @@ import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class autoconfig {
// http://web.archive.org/web/20071011034022/http://wp.netscape.com/eng/mozilla/2.0/relnotes/demo/proxy-live.html
/**
* Generates a proxy-autoconfig-file (application/x-ns-proxy-autoconfig)
* See: <a href="http://wp.netscape.com/eng/mozilla/2.0/relnotes/demo/proxy-live.html">Proxy Auto-Config File Format</a>
* Generates a proxy-autoconfig-file (application/x-ns-proxy-autoconfig)
* See: <a href="http://wp.netscape.com/eng/mozilla/2.0/relnotes/demo/proxy-live.html">Proxy Auto-Config File Format</a>
* @param header the complete HTTP header of the request
* @param post any arguments for this servlet, the request carried with (GET as well as POST)
* @param env the serverSwitch object holding all runtime-data
* @return the rewrite-properties for the template
*/
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
final boolean yacyonly = env.getConfigBool(SwitchboardConstants.PROXY_YACY_ONLY, false);
// get the http host header
final String hostSocket = header.get(HeaderFramework.CONNECTION_PROP_HOST);
String host = hostSocket;
int port = 80;
final int pos = hostSocket.indexOf(":");
final int pos = hostSocket.indexOf(':',0);
if (pos != -1) {
port = Integer.parseInt(hostSocket.substring(pos + 1));
host = hostSocket.substring(0, pos);
}
}
prop.put("yacy", yacyonly ? "0" : "1");
prop.put("yacy_host", host);
prop.put("yacy_port", port);
return prop;
}
}

View File

@ -9,7 +9,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -31,48 +31,47 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.document.importer.MediawikiImporter;
import net.yacy.search.Switchboard;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class mediawiki_p {
//http://localhost:8090/mediawiki_p.html?dump=wikipedia.de.xml&title=Kartoffel
public static serverObjects respond(final RequestHeader header, serverObjects post, final serverSwitch env) throws IOException {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) throws IOException {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
prop.put("title", "");
prop.put("page", "");
if (post == null) {
return post;
}
String dump = post.get("dump", null);
String title = post.get("title", null);
final String dump = post.get("dump", null);
final String title = post.get("title", null);
if (dump == null || title == null) return post;
File dumpFile = new File(sb.getDataPath(), "DATA/HTCACHE/mediawiki/" + dump);
final File dumpFile = new File(sb.getDataPath(), "DATA/HTCACHE/mediawiki/" + dump);
if (!dumpFile.exists()) return post;
MediawikiImporter.checkIndex(dumpFile);
MediawikiImporter.wikisourcerecord w = MediawikiImporter.find(title.replaceAll(" ", "_"), MediawikiImporter.idxFromMediawikiXML(dumpFile));
final MediawikiImporter.wikisourcerecord w = MediawikiImporter.find(title.replaceAll(" ", "_"), MediawikiImporter.idxFromMediawikiXML(dumpFile));
if (w == null) {
return post;
}
String page = UTF8.String(MediawikiImporter.read(dumpFile, w.start, (int) (w.end - w.start)));
int p = page.indexOf("<text");
int p = page.indexOf("<text",0);
if (p < 0) return prop;
p = page.indexOf('>', p);
if (p < 0) return prop;
p++;
int q = page.lastIndexOf("</text>");
final int q = page.lastIndexOf("</text>");
if (q < 0) return prop;
page = page.substring(p, q);
prop.putHTML("title", title);
prop.putWiki(sb.peers.mySeed().getClusterAddress(), "page", page);
return prop;
}
}

View File

@ -9,7 +9,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -38,21 +38,21 @@ public class opensearchdescription {
// generate message content for open search description
String promoteSearchPageGreeting = env.getConfig(SwitchboardConstants.GREETING, "");
if (env.getConfigBool(SwitchboardConstants.GREETING_NETWORK_NAME, false)) promoteSearchPageGreeting = env.getConfig("network.unit.description", "");
String thisaddress = header.get("Host", "127.0.0.1");
if (thisaddress.indexOf(':') == -1) thisaddress += ":" + serverCore.getPortNr(env.getConfig("port", "8090"));
if (thisaddress.indexOf(':',0) == -1) thisaddress += ":" + serverCore.getPortNr(env.getConfig("port", "8090"));
int compareyacy = 0;
if (post != null && post.getBoolean("compare_yacy", false))
compareyacy = 1;
final serverObjects prop = new serverObjects();
prop.put("compareyacy", compareyacy);
prop.putXML("compareyacy_thisaddress", thisaddress);
prop.putXML("thisaddress", thisaddress);
prop.putXML("SearchPageGreeting", promoteSearchPageGreeting);
prop.putXML("clientname", sb.peers.mySeed().getName());
// return rewrite properties
return prop;
}

View File

@ -119,14 +119,14 @@ public void processRequestResponse(int steps) {
</domain>
*/
int p = line.indexOf("<domain");
int p = line.indexOf("<domain",0);
if (p >= 0) {
//println("domain :" + line.substring(p + 8, line.length() - 1).trim());
processDomain(parseProps(line.substring(p + 8, line.length() - 1).trim()));
}
p = line.indexOf("<reference");
p = line.indexOf("<reference",0);
if (p >= 0) {
int q = line.indexOf("</reference>");
int q = line.indexOf("</reference>",0);
if (q > 0) {
int r = line.lastIndexOf('>', q);
if (r > 0) {
@ -192,7 +192,7 @@ public HashMap parseProps(String s) {
int p;
String z;
for (int i = 0; i < l.length; i++) {
p = l[i].indexOf('=');
p = l[i].indexOf('=',0);
if (p > 0) {
z = l[i].substring(p + 1).trim();
if (z.length() > 0 && z.charAt(0) == '"') z = z.substring(1);

View File

@ -1,4 +1,4 @@
//sharedBlacklist_p.java
//sharedBlacklist_p.java
//-----------------------
//part of the AnomicHTTPProxy
//(C) by Michael Peter Christen; mc@yacy.net
@ -40,11 +40,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
import de.anomic.data.ListManager;
import de.anomic.data.list.ListAccumulator;
import de.anomic.data.list.XMLBlacklistImporter;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
@ -58,6 +53,12 @@ import net.yacy.search.query.SearchEventCache;
import org.xml.sax.SAXException;
import de.anomic.data.ListManager;
import de.anomic.data.list.ListAccumulator;
import de.anomic.data.list.XMLBlacklistImporter;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class sharedBlacklist_p {
public static final int STATUS_NONE = 0;
@ -69,7 +70,7 @@ public class sharedBlacklist_p {
public static final int STATUS_PARSE_ERROR = 6;
private final static String BLACKLIST_FILENAME_FILTER = "^.*\\.black$";
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
// return variable that accumulates replacements
@ -82,48 +83,48 @@ public class sharedBlacklist_p {
}else{
selectedBlacklistName = "shared.black";
}
prop.putHTML("currentBlacklist", selectedBlacklistName);
prop.putHTML("page_target", selectedBlacklistName);
if (post != null) {
// initialize the list manager
ListManager.switchboard = (Switchboard) env;
ListManager.listsPath = new File(ListManager.switchboard.getDataPath(),ListManager.switchboard.getConfig("listManager.listsPath", "DATA/LISTS"));
// loading all blacklist files located in the directory
final List<String> dirlist = FileUtils.getDirListing(ListManager.listsPath, BLACKLIST_FILENAME_FILTER);
// List BlackLists
int blacklistCount = 0;
if (dirlist != null) {
for (String element : dirlist) {
for (final String element : dirlist) {
prop.putXML("page_blackLists_" + blacklistCount + "_name", element);
blacklistCount++;
}
}
prop.put("page_blackLists", blacklistCount);
Iterator<String> otherBlacklist = null;
ListAccumulator otherBlacklists = null;
if (post.containsKey("hash")) {
/* ======================================================
* Import blacklist from other peer
* Import blacklist from other peer
* ====================================================== */
// get the source peer hash
final String hash = post.get("hash");
// generate the download URL
String downloadURLOld = null;
if( sb.peers != null ){ //no nullpointer error..
final Seed seed = sb.peers.getConnected(hash);
if (seed != null) {
final String IP = seed.getIP();
final String IP = seed.getIP();
final String Port = seed.get(Seed.PORT, "8090");
final String peerName = seed.get(Seed.NAME, "<" + IP + ":" + Port + ">");
prop.putHTML("page_source", peerName);
@ -138,12 +139,12 @@ public class sharedBlacklist_p {
prop.putHTML("status_name", hash);
prop.put("page", "1");
}
if (downloadURLOld != null) {
// download the blacklist
try {
// get List
DigestURI u = new DigestURI(downloadURLOld);
final DigestURI u = new DigestURI(downloadURLOld);
otherBlacklist = FileUtils.strings(u.get(ClientIdentification.getUserAgent(), 10000));
} catch (final Exception e) {
@ -156,7 +157,7 @@ public class sharedBlacklist_p {
/* ======================================================
* Download the blacklist from URL
* ====================================================== */
final String downloadURL = post.get("url");
prop.putHTML("page_source", downloadURL);
@ -182,9 +183,9 @@ public class sharedBlacklist_p {
if (fileString != null) {
try {
otherBlacklists = new XMLBlacklistImporter().parse(new StringReader(fileString));
} catch (IOException ex) {
} catch (final IOException ex) {
prop.put("status", STATUS_FILE_ERROR);
} catch (SAXException ex) {
} catch (final SAXException ex) {
prop.put("status", STATUS_PARSE_ERROR);
}
}
@ -205,46 +206,46 @@ public class sharedBlacklist_p {
/* ======================================================
* Add loaded items into blacklist file
* ====================================================== */
prop.put("page", "1"); //result page
prop.put("status", STATUS_ENTRIES_ADDED); //list of added Entries
int count = 0;//couter of added entries
PrintWriter pw = null;
try {
// open the blacklist file
pw = new PrintWriter(new FileWriter(new File(ListManager.listsPath, selectedBlacklistName), true));
// loop through the received entry list
final int num = post.getInt("num", 0);
for(int i = 0; i < num; i++){
if( post.containsKey("item" + i) ){
String newItem = post.get("item" + i);
//This should not be needed...
if ( newItem.startsWith("http://") ){
newItem = newItem.substring(7);
}
// separate the newItem into host and path
int pos = newItem.indexOf("/");
int pos = newItem.indexOf('/',0);
if (pos < 0) {
// add default empty path pattern
pos = newItem.length();
newItem = newItem + "/.*";
}
// append the item to the file
pw.println(newItem);
count++;
if (Switchboard.urlBlacklist != null) {
final String supportedBlacklistTypesStr = Blacklist.BLACKLIST_TYPES_STRING;
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");
for (int blTypes=0; blTypes < supportedBlacklistTypes.length; blTypes++) {
if (ListManager.listSetContains(supportedBlacklistTypes[blTypes] + ".BlackLists",selectedBlacklistName)) {
Switchboard.urlBlacklist.add(supportedBlacklistTypes[blTypes],newItem.substring(0, pos), newItem.substring(pos + 1));
for (final String supportedBlacklistType : supportedBlacklistTypes) {
if (ListManager.listSetContains(supportedBlacklistType + ".BlackLists",selectedBlacklistName)) {
Switchboard.urlBlacklist.add(supportedBlacklistType,newItem.substring(0, pos), newItem.substring(pos + 1));
}
}
SearchEventCache.cleanupEvents(true);
@ -265,12 +266,12 @@ public class sharedBlacklist_p {
prop.put("LOCATION","Blacklist_p.html?selectedListName=" + CharacterCoding.unicode2html(selectedBlacklistName, true) + "&selectList=select");
return prop;
}
// generate the html list
if (otherBlacklist != null) {
// loading the current blacklist content
final Set<String> Blacklist = new HashSet<String>(FileUtils.getListArray(new File(ListManager.listsPath, selectedBlacklistName)));
int count = 0;
while (otherBlacklist.hasNext()) {
final String tmp = otherBlacklist.next();
@ -287,18 +288,18 @@ public class sharedBlacklist_p {
prop.put("page", "0");
} else if (otherBlacklists != null) {
List<List<String>> entries = otherBlacklists.getEntryLists();
final List<List<String>> entries = otherBlacklists.getEntryLists();
//List<Map<String,String>> properties = otherBlacklists.getPropertyMaps();
int count = 0;
for(List<String> list : entries) {
for(final List<String> list : entries) {
// sort the loaded blacklist
final String[] sortedlist = list.toArray(new String[list.size()]);
Arrays.sort(sortedlist);
for(int i = 0; i < sortedlist.length; i++){
final String tmp = sortedlist[i];
for (final String element : sortedlist) {
final String tmp = element;
if(!tmp.equals("")){
//newBlacklist.add(tmp);
prop.put("page_urllist_" + count + "_dark", count % 2 == 0 ? "0" : "1");
@ -315,7 +316,7 @@ public class sharedBlacklist_p {
prop.put("page", "0");
}
} else {
prop.put("page", "1");
prop.put("status", "5");//Wrong Invocation

View File

@ -161,8 +161,8 @@ public final class transferRWI {
if (count > 1000) break; // protection against flooding
// check if RWI entry is well-formed
p = estring.indexOf('{');
if (p < 0 || estring.indexOf("x=") < 0 || !(estring.indexOf("[B@") < 0)) {
p = estring.indexOf('{',0);
if (p < 0 || estring.indexOf("x=",0) < 0 || !(estring.indexOf("[B@",0) < 0)) {
blocked++;
continue;
}

View File

@ -197,7 +197,7 @@ public class yacysearch {
}
String prefermask = (post == null) ? "" : post.get("prefermaskfilter", "");
if (!prefermask.isEmpty() && prefermask.indexOf(".*") < 0) {
if (!prefermask.isEmpty() && prefermask.indexOf(".*",0) < 0) {
prefermask = ".*" + prefermask + ".*";
}
@ -289,42 +289,42 @@ public class yacysearch {
final RankingProfile ranking = sb.getRanking();
if (querystring.indexOf("/near") >= 0) {
if (querystring.indexOf("/near",0) >= 0) {
querystring = querystring.replace("/near", "");
ranking.coeff_worddistance = RankingProfile.COEFF_MAX;
}
if (querystring.indexOf("/date") >= 0) {
if (querystring.indexOf("/date",0) >= 0) {
querystring = querystring.replace("/date", "");
ranking.coeff_date = RankingProfile.COEFF_MAX;
}
if (querystring.indexOf("/http") >= 0) {
if (querystring.indexOf("/http",0) >= 0) {
querystring = querystring.replace("/http", "");
urlmask = "https?://.*";
}
if (querystring.indexOf("/https") >= 0) {
if (querystring.indexOf("/https",0) >= 0) {
querystring = querystring.replace("/https", "");
urlmask = "https?://.*";
}
if (querystring.indexOf("/ftp") >= 0) {
if (querystring.indexOf("/ftp",0) >= 0) {
querystring = querystring.replace("/ftp", "");
urlmask = "ftp://.*";
}
if (querystring.indexOf("/smb") >= 0) {
if (querystring.indexOf("/smb",0) >= 0) {
querystring = querystring.replace("/smb", "");
urlmask = "smb://.*";
}
if (querystring.indexOf("/file") >= 0) {
if (querystring.indexOf("/file",0) >= 0) {
querystring = querystring.replace("/file", "");
urlmask = "file://.*";
}
if (querystring.indexOf("/location") >= 0) {
if (querystring.indexOf("/location",0) >= 0) {
querystring = querystring.replace("/location", "");
if (constraint == null) {
constraint = new Bitfield(4);
}
constraint.set(Condenser.flag_cat_haslocation, true);
}
final int lrp = querystring.indexOf("/language/");
final int lrp = querystring.indexOf("/language/",0);
String lr = "";
if (lrp >= 0) {
if (querystring.length() >= (lrp + 11)) {
@ -334,7 +334,7 @@ public class yacysearch {
querystring = querystring.replace("/language/" + lr, "");
lr = lr.toLowerCase();
}
final int inurl = querystring.indexOf("inurl:");
final int inurl = querystring.indexOf("inurl:",0);
if (inurl >= 0) {
int ftb = querystring.indexOf(' ', inurl);
if (ftb == -1) {
@ -346,7 +346,7 @@ public class yacysearch {
urlmask = ".*" + urlstr + ".*";
}
}
final int filetype = querystring.indexOf("filetype:");
final int filetype = querystring.indexOf("filetype:",0);
if (filetype >= 0) {
int ftb = querystring.indexOf(' ', filetype);
if (ftb == -1) {
@ -375,7 +375,7 @@ public class yacysearch {
} else urlmask = ".*" + tenant + urlmask;
}
}
final int site = querystring.indexOf("site:");
final int site = querystring.indexOf("site:",0);
String sitehash = null;
String sitehost = null;
if (site >= 0) {
@ -394,17 +394,17 @@ public class yacysearch {
sitehash = DigestURI.hosthash(sitehost);
}
final int heuristicScroogle = querystring.indexOf("heuristic:scroogle");
final int heuristicScroogle = querystring.indexOf("heuristic:scroogle",0);
if (heuristicScroogle >= 0) {
querystring = querystring.replace("heuristic:scroogle", "");
}
final int heuristicBlekko = querystring.indexOf("heuristic:blekko");
final int heuristicBlekko = querystring.indexOf("heuristic:blekko",0);
if (heuristicBlekko >= 0) {
querystring = querystring.replace("heuristic:blekko", "");
}
final int authori = querystring.indexOf("author:");
final int authori = querystring.indexOf("author:",0);
String authorhash = null;
if (authori >= 0) {
// check if the author was given with single quotes or without
@ -427,7 +427,7 @@ public class yacysearch {
}
authorhash = ASCII.String(Word.word2hash(author));
}
final int tld = querystring.indexOf("tld:");
final int tld = querystring.indexOf("tld:",0);
if (tld >= 0) {
int ftb = querystring.indexOf(' ', tld);
if (ftb == -1) {
@ -438,7 +438,7 @@ public class yacysearch {
while (domain.length() > 0 && domain.charAt(0) == '.') {
domain = domain.substring(1);
}
if (domain.indexOf('.') < 0) {
if (domain.indexOf('.',0) < 0) {
domain = "\\." + domain;
} // is tld
if (domain.length() > 0) {
@ -473,7 +473,7 @@ public class yacysearch {
// the query
final TreeSet<String>[] query = QueryParams.cleanQuery(querystring.trim()); // converts also umlaute
final int maxDistance = (querystring.indexOf('"') >= 0) ? query.length - 1 : Integer.MAX_VALUE;
final int maxDistance = (querystring.indexOf('"',0) >= 0) ? query.length - 1 : Integer.MAX_VALUE;
// filter out stopwords
final SortedSet<String> filtered = SetTools.joinConstructive(query[0], Switchboard.stopwords);
@ -806,7 +806,7 @@ public class yacysearch {
// adding some additional properties needed for the rss feed
String hostName = header.get("Host", "localhost");
if (hostName.indexOf(':') == -1) {
if (hostName.indexOf(':',0) == -1) {
hostName += ":" + serverCore.getPortNr(env.getConfig("port", "8090"));
}
prop.put("searchBaseURL", "http://" + hostName + "/yacysearch.html");

View File

@ -17,6 +17,7 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
@ -35,18 +36,16 @@ import de.anomic.server.serverCore;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import java.util.Date;
public class yacysearch_location {
private static final String space = " ";
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
prop.put("kml", 0);
if (header.get(HeaderFramework.CONNECTION_PROP_EXT, "").equals("kml") ||
header.get(HeaderFramework.CONNECTION_PROP_EXT, "").equals("xml") ||
header.get(HeaderFramework.CONNECTION_PROP_EXT, "").equals("rss")
@ -54,25 +53,25 @@ public class yacysearch_location {
// generate a kml output page
prop.put("kml", 1);
if (post == null) return prop;
String query = post.get("query", "");
boolean search_query = post.get("dom", "").indexOf("query") >= 0;
boolean metatag = post.get("dom", "").indexOf("metatag") >= 0;
boolean alltext = post.get("dom", "").indexOf("alltext") >= 0;
boolean search_title = alltext || post.get("dom", "").indexOf("title") >= 0;
boolean search_publisher = alltext || post.get("dom", "").indexOf("publisher") >= 0;
boolean search_creator = alltext || post.get("dom", "").indexOf("creator") >= 0;
boolean search_subject = alltext || post.get("dom", "").indexOf("subject") >= 0;
long maximumTime = post.getLong("maximumTime", 5000);
int maximumRecords = post.getInt("maximumRecords", 3000);
final String query = post.get("query", "");
final boolean search_query = post.get("dom", "").indexOf("query",0) >= 0;
final boolean metatag = post.get("dom", "").indexOf("metatag",0) >= 0;
final boolean alltext = post.get("dom", "").indexOf("alltext",0) >= 0;
final boolean search_title = alltext || post.get("dom", "").indexOf("title",0) >= 0;
final boolean search_publisher = alltext || post.get("dom", "").indexOf("publisher",0) >= 0;
final boolean search_creator = alltext || post.get("dom", "").indexOf("creator",0) >= 0;
final boolean search_subject = alltext || post.get("dom", "").indexOf("subject",0) >= 0;
final long maximumTime = post.getLong("maximumTime", 5000);
final int maximumRecords = post.getInt("maximumRecords", 3000);
//i.e. http://localhost:8090/yacysearch_location.kml?query=berlin&maximumTime=2000&maximumRecords=100
int placemarkCounter = 0;
if (search_query) {
Set<Location> locations = LibraryProvider.geoLoc.find(query, true);
for (String qp: query.split(" ")) {
final Set<Location> locations = LibraryProvider.geoLoc.find(query, true);
for (final String qp: query.split(" ")) {
locations.addAll(LibraryProvider.geoLoc.find(qp, true));
}
for (Location location: locations) {
for (final Location location: locations) {
// write for all locations a point to this message
prop.put("kml_placemark_" + placemarkCounter + "_location", location.getName());
prop.put("kml_placemark_" + placemarkCounter + "_name", location.getName());
@ -88,41 +87,41 @@ public class yacysearch_location {
placemarkCounter++;
}
}
if (metatag || search_title || search_publisher || search_creator || search_subject) try {
// get a queue of search results
String rssSearchServiceURL = "http://127.0.0.1:" + sb.getConfig("port", "8090") + "/yacysearch.rss";
BlockingQueue<RSSMessage> results = new LinkedBlockingQueue<RSSMessage>();
final String rssSearchServiceURL = "http://127.0.0.1:" + sb.getConfig("port", "8090") + "/yacysearch.rss";
final BlockingQueue<RSSMessage> results = new LinkedBlockingQueue<RSSMessage>();
SRURSSConnector.searchSRURSS(results, rssSearchServiceURL, query, maximumTime, Integer.MAX_VALUE, null, false, null);
// take the results and compute some locations
RSSMessage message;
loop: while ((message = results.poll(maximumTime, TimeUnit.MILLISECONDS)) != RSSMessage.POISON) {
// find all associated locations
Set<Location> locations = new HashSet<Location>();
StringBuilder words = new StringBuilder(120);
final Set<Location> locations = new HashSet<Location>();
final StringBuilder words = new StringBuilder(120);
if (search_title) words.append(message.getTitle().trim()).append(space);
if (search_publisher) words.append(message.getCopyright().trim()).append(space);
if (search_creator) words.append(message.getAuthor().trim()).append(space);
String subject = "";
assert message != null;
assert message.getSubject() != null;
for (String s: message.getSubject()) subject += s.trim() + space;
for (final String s: message.getSubject()) subject += s.trim() + space;
if (search_subject) words.append(subject).append(space);
String[] wordlist = words.toString().trim().split(space);
for (String word: wordlist) if (word.length() >= 3) locations.addAll(LibraryProvider.geoLoc.find(word, true));
final String[] wordlist = words.toString().trim().split(space);
for (final String word: wordlist) if (word.length() >= 3) locations.addAll(LibraryProvider.geoLoc.find(word, true));
for (int i = 0; i < wordlist.length - 1; i++) locations.addAll(LibraryProvider.geoLoc.find(wordlist[i] + space + wordlist[i + 1], true));
for (int i = 0; i < wordlist.length - 2; i++) locations.addAll(LibraryProvider.geoLoc.find(wordlist[i] + space + wordlist[i + 1] + space + wordlist[i + 2], true));
// add locations from metatag
if (metatag) {
if (message.getLat() != 0.0f && message.getLon() != 0.0f) {
locations.add(new Location(message.getLon(), message.getLat(), message.getTitle().trim()));
}
}
for (Location location: locations) {
for (final Location location: locations) {
// write for all locations a point to this message
prop.put("kml_placemark_" + placemarkCounter + "_location", location.getName());
prop.put("kml_placemark_" + placemarkCounter + "_name", message.getTitle());
@ -139,7 +138,7 @@ public class yacysearch_location {
if (placemarkCounter >= maximumRecords) break loop;
}
}
} catch (InterruptedException e) {}
} catch (final InterruptedException e) {}
prop.put("kml_placemark", placemarkCounter);
}
if (header.get(HeaderFramework.CONNECTION_PROP_EXT, "").equals("rss")) {
@ -147,7 +146,7 @@ public class yacysearch_location {
String promoteSearchPageGreeting = env.getConfig(SwitchboardConstants.GREETING, "");
if (env.getConfigBool(SwitchboardConstants.GREETING_NETWORK_NAME, false)) promoteSearchPageGreeting = env.getConfig("network.unit.description", "");
String hostName = header.get("Host", "localhost");
if (hostName.indexOf(':') == -1) hostName += ":" + serverCore.getPortNr(env.getConfig("port", "8090"));
if (hostName.indexOf(':',0) == -1) hostName += ":" + serverCore.getPortNr(env.getConfig("port", "8090"));
final String originalquerystring = (post == null) ? "" : post.get("query", post.get("search", "")).trim(); // SRU compliance
final boolean global = post.get("kml_resource", "local").equals("global");
@ -174,9 +173,9 @@ public class yacysearch_location {
prop.put("initsearch_query", post.get("query"));
}
}
// return rewrite properties
return prop;
}
}

View File

@ -124,7 +124,7 @@ public class yacysearchtrailer {
while (i < 20 && navigatorIterator.hasNext()) {
name = navigatorIterator.next().trim();
count = authorNavigator.get(name);
anav = (name.indexOf(' ') < 0) ? "author:" + name : "author:'" + name.replace(" ", "+") + "'";
anav = (name.indexOf(' ',0) < 0) ? "author:" + name : "author:'" + name.replace(" ", "+") + "'";
prop.put(fileType, "nav-authors_element_" + i + "_name", name);
prop.put("nav-authors_element_" + i + "_url", "<a href=\"" + QueryParams.navurl("html", 0, theQuery, theQuery.queryStringForUrl() + "+" + anav, theQuery.urlMask.toString(), theQuery.navigators).toString() + "\">" + name + " (" + count + ")</a>");
prop.putJSON("nav-authors_element_" + i + "_url-json", QueryParams.navurl("json", 0, theQuery, theQuery.queryStringForUrl() + "+" + anav, theQuery.urlMask.toString(), theQuery.navigators).toString());

View File

@ -50,8 +50,8 @@ import net.yacy.peers.Protocol;
import net.yacy.peers.Seed;
import net.yacy.peers.dht.PeerSelection;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.Switchboard.indexingQueueEntry;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.index.Segments;
import de.anomic.crawler.NoticedURL.StackType;
import de.anomic.crawler.ZURL.FailCategory;
@ -271,7 +271,7 @@ public class CrawlQueues {
return true;
} catch (final IOException e) {
this.log.logSevere(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
if (e.getMessage().indexOf("hash is null") > 0) this.noticeURL.clear(NoticedURL.StackType.CORE);
if (e.getMessage().indexOf("hash is null",0) > 0) this.noticeURL.clear(NoticedURL.StackType.CORE);
}
}
return true;
@ -532,7 +532,7 @@ public class CrawlQueues {
return true;
} catch (final IOException e) {
this.log.logSevere(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
if (e.getMessage().indexOf("hash is null") > 0) this.noticeURL.clear(NoticedURL.StackType.REMOTE);
if (e.getMessage().indexOf("hash is null",0) > 0) this.noticeURL.clear(NoticedURL.StackType.REMOTE);
return true;
}
}

View File

@ -1,4 +1,4 @@
// FTPLoader.java
// FTPLoader.java
// -------------------------------------
// part of YACY
// (C) by Michael Peter Christen; mc@yacy.net
@ -49,7 +49,7 @@ import de.anomic.crawler.ZURL.FailCategory;
public class FTPLoader {
public static final long DEFAULT_MAXFILESIZE = 1024 * 1024 * 10;
private final Switchboard sb;
private final Log log;
private final long maxFileSize;
@ -62,13 +62,13 @@ public class FTPLoader {
/**
* Loads the entry from a ftp-server
*
*
* @param request
* @return
*/
public Response load(final Request request, boolean acceptOnlyParseable) throws IOException {
long start = System.currentTimeMillis();
public Response load(final Request request, final boolean acceptOnlyParseable) throws IOException {
final long start = System.currentTimeMillis();
final DigestURI entryUrl = request.url();
final String fullPath = getPath(entryUrl);
@ -97,7 +97,7 @@ public class FTPLoader {
// create new ftp client
final FTPClient ftpClient = new FTPClient();
// get a connection
if (openConnection(ftpClient, entryUrl)) {
// test if the specified file is a directory
@ -113,23 +113,23 @@ public class FTPLoader {
if (file.length() == 0) {
// directory -> get list of files
RequestHeader requestHeader = new RequestHeader();
final RequestHeader requestHeader = new RequestHeader();
if (request.referrerhash() != null) {
DigestURI u = sb.getURL(Segments.Process.LOCALCRAWLING, request.referrerhash());
final DigestURI u = this.sb.getURL(Segments.Process.LOCALCRAWLING, request.referrerhash());
if (u != null) requestHeader.put(RequestHeader.REFERER, u.toNormalform(true, false));
}
StringBuilder dirList = ftpClient.dirhtml(path);
final StringBuilder dirList = ftpClient.dirhtml(path);
if (dirList == null) {
response = null;
} else {
ResponseHeader responseHeader = new ResponseHeader();
final ResponseHeader responseHeader = new ResponseHeader();
responseHeader.put(HeaderFramework.LAST_MODIFIED, HeaderFramework.formatRFC1123(new Date()));
responseHeader.put(HeaderFramework.CONTENT_TYPE, "text/html");
final CrawlProfile profile = sb.crawler.getActive(request.profileHandle().getBytes());
final CrawlProfile profile = this.sb.crawler.getActive(request.profileHandle().getBytes());
response = new Response(
request,
request,
requestHeader,
responseHeader,
"200",
@ -153,10 +153,10 @@ public class FTPLoader {
if (berr.size() > 0 || response == null) {
// some error logging
final String detail = (berr.size() > 0) ? "Errorlog: " + berr.toString() : "";
sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, " ftp server download, " + detail, -1);
this.sb.crawlQueues.errorURL.push(request, this.sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, " ftp server download, " + detail, -1);
throw new IOException("FTPLoader: Unable to download URL '" + request.url().toString() + "': " + detail);
}
Latency.update(request.url(), System.currentTimeMillis() - start);
return response;
}
@ -178,7 +178,7 @@ public class FTPLoader {
final String userInfo = entryUrl.getUserInfo();
String userName = "anonymous", userPwd = "anonymous";
if (userInfo != null) {
final int pos = userInfo.indexOf(":");
final int pos = userInfo.indexOf(':',0);
if (pos != -1) {
userName = userInfo.substring(0, pos);
userPwd = userInfo.substring(pos + 1);
@ -210,7 +210,7 @@ public class FTPLoader {
return true;
}
private Response getFile(final FTPClient ftpClient, final Request request, boolean acceptOnlyParseable) throws IOException {
private Response getFile(final FTPClient ftpClient, final Request request, final boolean acceptOnlyParseable) throws IOException {
// determine the mimetype of the resource
final DigestURI url = request.url();
final String mime = TextParser.mimeOf(url);
@ -218,37 +218,37 @@ public class FTPLoader {
// determine the file date
final Date fileDate = ftpClient.entryDate(path);
// create response header
RequestHeader requestHeader = new RequestHeader();
final RequestHeader requestHeader = new RequestHeader();
if (request.referrerhash() != null) {
DigestURI refurl = sb.getURL(Segments.Process.LOCALCRAWLING, request.referrerhash());
final DigestURI refurl = this.sb.getURL(Segments.Process.LOCALCRAWLING, request.referrerhash());
if (refurl != null) requestHeader.put(RequestHeader.REFERER, refurl.toNormalform(true, false));
}
ResponseHeader responseHeader = new ResponseHeader();
final ResponseHeader responseHeader = new ResponseHeader();
responseHeader.put(HeaderFramework.LAST_MODIFIED, HeaderFramework.formatRFC1123(fileDate));
responseHeader.put(HeaderFramework.CONTENT_TYPE, mime);
// if the mimetype and file extension is supported we start to download the file
final long size = ftpClient.fileSize(path);
responseHeader.put(HeaderFramework.CONTENT_LENGTH, String.valueOf(size));
String parserError = null;
if ((acceptOnlyParseable && (parserError = TextParser.supports(url, mime)) != null) ||
(size > maxFileSize && maxFileSize >= 0)) {
(size > this.maxFileSize && this.maxFileSize >= 0)) {
// we know that we cannot process that file before loading
// only the metadata is returned
if (parserError != null) {
log.logInfo("No parser available in FTP crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
this.log.logInfo("No parser available in FTP crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
} else {
log.logInfo("Too big file in FTP crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
this.log.logInfo("Too big file in FTP crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
}
// create response with metadata only
responseHeader.put(HeaderFramework.CONTENT_TYPE, "text/plain");
final CrawlProfile profile = sb.crawler.getActive(request.profileHandle().getBytes());
Response response = new Response(
request,
final CrawlProfile profile = this.sb.crawler.getActive(request.profileHandle().getBytes());
final Response response = new Response(
request,
requestHeader,
responseHeader,
"200",
@ -256,14 +256,14 @@ public class FTPLoader {
null);
return response;
}
// download the remote file
byte[] b = ftpClient.get(path);
final byte[] b = ftpClient.get(path);
// create a response
final CrawlProfile profile = sb.crawler.getActive(request.profileHandle().getBytes());
Response response = new Response(
request,
final CrawlProfile profile = this.sb.crawler.getActive(request.profileHandle().getBytes());
final Response response = new Response(
request,
requestHeader,
responseHeader,
"200",
@ -274,7 +274,7 @@ public class FTPLoader {
/**
* gets path suitable for FTP (url-decoded, double-quotes escaped)
*
*
* @param entryUrl
* @return
*/

View File

@ -1,4 +1,4 @@
// BlogBoardComments.java
// BlogBoardComments.java
// -------------------------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
@ -60,7 +60,7 @@ import org.xml.sax.SAXException;
import de.anomic.data.wiki.WikiBoard;
public class BlogBoardComments {
private final static int KEY_LENGTH = 64;
private final static String DATE_FORMAT = "yyyyMMddHHmmss";
@ -68,29 +68,29 @@ public class BlogBoardComments {
static {
SIMPLE_DATE_FORMATTER.setTimeZone(TimeZone.getTimeZone("GMT"));
}
private MapHeap database = null;
public BlogBoardComments(final File actpath) throws IOException {
new File(actpath.getParent()).mkdir();
//database = new MapView(BLOBTree.toHeap(actpath, true, true, keyLength, recordSize, '_', NaturalOrder.naturalOrder, newFile), 500, '_');
database = new MapHeap(actpath, KEY_LENGTH, NaturalOrder.naturalOrder, 1024 * 64, 500, '_');
this.database = new MapHeap(actpath, KEY_LENGTH, NaturalOrder.naturalOrder, 1024 * 64, 500, '_');
}
public int size() {
return database.size();
return this.database.size();
}
public void close() {
database.close();
this.database.close();
}
static String dateString(final Date date) {
synchronized (SIMPLE_DATE_FORMATTER) {
return SIMPLE_DATE_FORMATTER.format(date);
}
}
private static String normalize(final String key) {
return (key == null) ? "null" : key.trim().toLowerCase();
}
@ -100,7 +100,7 @@ public class BlogBoardComments {
}
String ret = key.trim().toLowerCase();
int p;
while ((p = ret.indexOf(" ")) >= 0)
while ((p = ret.indexOf(' ',0)) >= 0)
ret = ret.substring(0, p) + "%20" + key.substring(p +1);
return ret;
}
@ -116,7 +116,7 @@ public class BlogBoardComments {
public String write(final CommentEntry page) {
// writes a new page and returns key
try {
database.insert(UTF8.getBytes(page.key), page.record);
this.database.insert(UTF8.getBytes(page.key), page.record);
return page.key;
} catch (final Exception e) {
Log.logException(e);
@ -125,7 +125,7 @@ public class BlogBoardComments {
}
public CommentEntry read(final String key) {
//System.out.println("DEBUG: read from blogBoardComments");
return read(key, database);
return read(key, this.database);
}
private CommentEntry read(final String key, final MapHeap base) {
@ -137,7 +137,7 @@ public class BlogBoardComments {
} catch (final IOException e) {
Log.logException(e);
record = null;
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
record = null;
}
@ -157,20 +157,20 @@ public class BlogBoardComments {
catch (final ParserConfigurationException e) {}
catch (final SAXException e) {}
catch (final IOException e) {}
return ret;
}
private boolean parseXMLimport(final Document doc) {
if(!"blog".equals(doc.getDocumentElement().getTagName())) {
return false;
}
final NodeList items = doc.getDocumentElement().getElementsByTagName("item");
if(items.getLength() == 0) {
return false;
}
for(int i = 0, n = items.getLength(); i < n; ++i) {
String key = null, ip = null, StrSubject = null, StrAuthor = null, StrPage = null, StrDate = null;
Date date = null;
@ -197,55 +197,55 @@ public class BlogBoardComments {
StrPage = currentNode.getFirstChild().getNodeValue();
}
}
try {
date = SIMPLE_DATE_FORMATTER.parse(StrDate);
} catch (final ParseException ex) {
date = new Date();
}
if (key == null || ip == null || StrSubject == null || StrAuthor == null || StrPage == null || date == null)
return false;
byte[] subject,author,page;
subject = UTF8.getBytes(StrSubject);
author = UTF8.getBytes(StrAuthor);
page = UTF8.getBytes(StrPage);
write (newEntry(key, subject, author, ip, date, page));
}
return true;
}
public void delete(final String key) {
try {
database.delete(UTF8.getBytes(normalize(key)));
this.database.delete(UTF8.getBytes(normalize(key)));
}
catch (final IOException e) { }
}
public Iterator<byte[]> keys(final boolean up) throws IOException {
return database.keys(up, false);
return this.database.keys(up, false);
}
public static class CommentEntry {
String key;
Map<String, String> record;
public CommentEntry(final String nkey, final byte[] subject, final byte[] author, final String ip, final Date date, final byte[] page) {
record = new HashMap<String, String>();
this.record = new HashMap<String, String>();
setKey(nkey);
setDate(date);
setSubject(subject);
setAuthor(author);
setIp(ip);
setPage(page);
WikiBoard.setAuthor(ip, UTF8.String(author));
}
CommentEntry(final String key, final Map<String, String> record) {
this.key = key;
this.record = record;
@ -253,36 +253,36 @@ public class BlogBoardComments {
this.record.put("comments", ListManager.collection2string(new ArrayList<String>()));
}
}
public String getKey() {
return key;
return this.key;
}
private void setKey(final String var) {
key = var.substring(0, Math.min(var.length(), KEY_LENGTH));
this.key = var.substring(0, Math.min(var.length(), KEY_LENGTH));
}
private void setSubject(final byte[] subject) {
if (subject == null)
record.put("subject","");
else
record.put("subject", Base64Order.enhancedCoder.encode(subject));
if (subject == null)
this.record.put("subject","");
else
this.record.put("subject", Base64Order.enhancedCoder.encode(subject));
}
public byte[] getSubject() {
final String subject = record.get("subject");
final String subject = this.record.get("subject");
if (subject == null) return new byte[0];
final byte[] subject_bytes = Base64Order.enhancedCoder.decode(subject);
if (subject_bytes == null) return new byte[0];
return subject_bytes;
}
private void setDate(Date date) {
if(date == null)
date = new Date();
record.put("date", dateString(date));
if(date == null)
date = new Date();
this.record.put("date", dateString(date));
}
public Date getDate() {
try {
final String date = record.get("date");
final String date = this.record.get("date");
if (date == null) {
if (Log.isFinest("Blog")) Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
return new Date();
@ -294,9 +294,9 @@ public class BlogBoardComments {
return new Date();
}
}
public String getTimestamp() {
final String timestamp = record.get("date");
final String timestamp = this.record.get("date");
if (timestamp == null) {
if (Log.isFinest("Blog")) Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
return dateString(new Date());
@ -304,59 +304,59 @@ public class BlogBoardComments {
return timestamp;
}
private void setAuthor(final byte[] author) {
if (author == null)
record.put("author","");
else
record.put("author", Base64Order.enhancedCoder.encode(author));
if (author == null)
this.record.put("author","");
else
this.record.put("author", Base64Order.enhancedCoder.encode(author));
}
public byte[] getAuthor() {
final String author = record.get("author");
if (author == null)
final String author = this.record.get("author");
if (author == null)
return new byte[0];
final byte[] author_byte = Base64Order.enhancedCoder.decode(author);
if (author_byte == null)
if (author_byte == null)
return new byte[0];
return author_byte;
}
private void setIp(String ip) {
if ((ip == null) || (ip.length() == 0))
if ((ip == null) || (ip.length() == 0))
ip = "";
record.put("ip", ip);
this.record.put("ip", ip);
}
public String getIp() {
final String ip = record.get("ip");
if (ip == null)
final String ip = this.record.get("ip");
if (ip == null)
return "127.0.0.1";
return ip;
}
private void setPage(final byte[] page) {
if (page == null)
record.put("page", "");
else
record.put("page", Base64Order.enhancedCoder.encode(page));
if (page == null)
this.record.put("page", "");
else
this.record.put("page", Base64Order.enhancedCoder.encode(page));
}
public byte[] getPage() {
final String page = record.get("page");
if (page == null)
final String page = this.record.get("page");
if (page == null)
return new byte[0];
final byte[] page_byte = Base64Order.enhancedCoder.decode(page);
if (page_byte == null)
if (page_byte == null)
return new byte[0];
return page_byte;
}
}
/**
* Is the comment allowed?
* this is possible for moderated blog entry only and means
* Is the comment allowed?
* this is possible for moderated blog entry only and means
* the administrator has explicit allowed the comment.
* @return
* @return
*/
public boolean isAllowed() {
return "true".equals(record.get("moderated"));
}
return "true".equals(this.record.get("moderated"));
}
public void allow() {
record.put("moderated", "true");
}
this.record.put("moderated", "true");
}
}
}

View File

@ -34,7 +34,6 @@ import java.util.Map;
import java.util.regex.Pattern;
import net.yacy.document.parser.html.CharacterCoding;
import de.anomic.server.serverCore;
/** Provides methods to handle texts that have been posted in the yacyWiki or other
@ -43,7 +42,7 @@ import de.anomic.server.serverCore;
* @author Alexander Schier [AS], Franz Brausze [FB], Marc Nause [MN]
*/
public class WikiCode extends AbstractWikiParser implements WikiParser {
private static final String EMPTY = "";
private static final String PIPE_ESCAPED = "&#124;";
private static final Pattern REGEX_NOT_CHAR_NUM_OR_UNDERSCORE_PATTERN = Pattern.compile("[^a-zA-Z0-9_]");
@ -82,8 +81,8 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
this.openWiki = openWiki;
this.closeWiki = closeWiki;
openWikiLength = openWiki.length();
closeWikiLength = closeWiki.length();
this.openWikiLength = openWiki.length();
this.closeWikiLength = closeWiki.length();
}
Tags(final String wiki, final String openHTML, final String closeHTML) {
@ -178,10 +177,10 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
private final String cellDivider = PIPE_ESCAPED + PIPE_ESCAPED; // ||
private final String tableEnd = PIPE_ESCAPED + "&#125;"; // |}
private final String attribDivider = PIPE_ESCAPED; // |
private final int lenTableStart = tableStart.length();
private final int lenCellDivider = cellDivider.length();
private final int lenTableEnd = tableEnd.length();
private final int lenAttribDivider = attribDivider.length();
private final int lenTableStart = this.tableStart.length();
private final int lenCellDivider = this.cellDivider.length();
private final int lenTableEnd = this.tableEnd.length();
private final int lenAttribDivider = this.attribDivider.length();
private enum ListType {
ORDERED, UNORDERED;
@ -192,8 +191,8 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
private String defListLevel = EMPTY;
private boolean processingCell = false; //needed for prevention of double-execution of replaceHTML
private boolean processingDefList = false; //needed for definition lists
private boolean escape = false; //needed for escape
private boolean escaped = false; //needed for <pre> not getting in the way
private final boolean escape = false; //needed for escape
private final boolean escaped = false; //needed for <pre> not getting in the way
private boolean newRowStart = false; //needed for the first row not to be empty
private boolean noList = false; //needed for handling of [= and <pre> in lists
private boolean processingPreformattedText = false; //needed for preformatted text
@ -220,7 +219,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
out.append(processLineOfWikiCode(hostport, line)).append(serverCore.CRLF_STRING);
}
out.insert(0, createTableOfContents());
tableOfContents.clear();
this.tableOfContents.clear();
return out.toString();
}
@ -235,7 +234,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
if ((direlem = input.substring(firstPosition + tags.openWikiLength, secondPosition)) != null) {
//counting double headlines
int doubles = 0;
final Iterator<String> iterator = tableOfContents.iterator();
final Iterator<String> iterator = this.tableOfContents.iterator();
String element;
while (iterator.hasNext()) {
element = iterator.next();
@ -262,7 +261,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
//add headlines to list of headlines (so TOC can be created)
if (Arrays.binarySearch(HEADLINE_TAGS, tags.openWiki) >= 0) {
tableOfContents.add((tags.openWikiLength - 1) + direlem);
this.tableOfContents.add((tags.openWikiLength - 1) + direlem);
}
}
}
@ -277,61 +276,61 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
final StringBuilder out = new StringBuilder();
if (line.startsWith(tableStart) && !processingTable) {
processingTable = true;
newRowStart = true;
if (line.startsWith(this.tableStart) && !this.processingTable) {
this.processingTable = true;
this.newRowStart = true;
out.append("<table");
if (line.trim().length() > lenTableStart) {
out.append(filterTableProperties(line.substring(lenTableStart).trim()));
if (line.trim().length() > this.lenTableStart) {
out.append(filterTableProperties(line.substring(this.lenTableStart).trim()));
}
out.append(">");
} else if (line.startsWith(newLine) && processingTable) { // new row
if (!newRowStart) {
} else if (line.startsWith(this.newLine) && this.processingTable) { // new row
if (!this.newRowStart) {
out.append("\t</tr>\n");
} else {
newRowStart = false;
this.newRowStart = false;
}
out.append("\t<tr>");
} else if (line.startsWith(cellDivider) && processingTable) {
} else if (line.startsWith(this.cellDivider) && this.processingTable) {
out.append("\t\t<td");
final int cellEnd = (line.indexOf(cellDivider, lenCellDivider) > 0) ? (line.indexOf(cellDivider, lenCellDivider)) : (line.length());
int propEnd = line.indexOf(attribDivider, lenCellDivider);
final int occImage = line.indexOf("[[Image:", lenCellDivider);
final int occEscape = line.indexOf("[=", lenCellDivider);
final int cellEnd = (line.indexOf(this.cellDivider, this.lenCellDivider) > 0) ? (line.indexOf(this.cellDivider, this.lenCellDivider)) : (line.length());
int propEnd = line.indexOf(this.attribDivider, this.lenCellDivider);
final int occImage = line.indexOf("[[Image:", this.lenCellDivider);
final int occEscape = line.indexOf("[=", this.lenCellDivider);
//If resultOf("[[Image:") is less than propEnd, that means that there is no
//property for this cell, only an image. Without this, YaCy could get confused
//by a | in [[Image:picture.png|alt-text]] or [[Image:picture.png|alt-text]]
//Same for [= (part of [= =])
if ((propEnd > lenCellDivider) && ((occImage > propEnd) || (occImage < 0)) && ((occEscape > propEnd) || (occEscape < 0))) {
propEnd = line.indexOf(attribDivider, lenCellDivider) + lenAttribDivider;
if ((propEnd > this.lenCellDivider) && ((occImage > propEnd) || (occImage < 0)) && ((occEscape > propEnd) || (occEscape < 0))) {
propEnd = line.indexOf(this.attribDivider, this.lenCellDivider) + this.lenAttribDivider;
} else {
propEnd = cellEnd;
}
// both point at same place => new line
if (propEnd == cellEnd) {
propEnd = lenCellDivider;
propEnd = this.lenCellDivider;
} else {
out.append(filterTableProperties(line.substring(lenCellDivider, propEnd - lenAttribDivider).trim()));
out.append(filterTableProperties(line.substring(this.lenCellDivider, propEnd - this.lenAttribDivider).trim()));
}
// quick&dirty fix [MN]
if (propEnd > cellEnd) {
propEnd = lenCellDivider;
propEnd = this.lenCellDivider;
}
processingTable = false;
processingCell = true;
this.processingTable = false;
this.processingCell = true;
out.append(">");
out.append(processTable(line.substring(propEnd, cellEnd).trim()));
out.append("</td>");
processingTable = true;
processingCell = false;
this.processingTable = true;
this.processingCell = false;
if (cellEnd < line.length()) {
out.append("\n");
out.append(processTable(line.substring(cellEnd)));
}
} else if (line.startsWith(tableEnd) && (processingTable)) { // Table end
processingTable = false;
} else if (line.startsWith(this.tableEnd) && (this.processingTable)) { // Table end
this.processingTable = false;
out.append("\t</tr>\n</table>");
out.append(line.substring(lenTableEnd));
out.append(line.substring(this.lenTableEnd));
} else {
out.append(line);
}
@ -395,7 +394,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
* @param line line of text to be transformed from wiki code to HTML
* @return HTML fragment
*/
private String processUnorderedList(String line) {
private String processUnorderedList(final String line) {
return processList(line, ListType.UNORDERED);
}
@ -410,7 +409,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
final String ret;
if (!noList) { //lists only get processed if not forbidden (see code for [= and <pre>).
if (!this.noList) { //lists only get processed if not forbidden (see code for [= and <pre>).
String listLevel;
final String htmlOpenList;
@ -418,12 +417,12 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
final char symbol;
if (ListType.ORDERED.equals(listType)) {
listLevel = orderedListLevel;
listLevel = this.orderedListLevel;
symbol = '#';
htmlOpenList = HTML_OPEN_ORDERED_LIST;
htmlCloseList = HTML_CLOSE_ORDERED_LIST;
} else if (ListType.UNORDERED.equals(listType)) {
listLevel = unorderedListLevel;
listLevel = this.unorderedListLevel;
symbol = ASTERISK;
htmlOpenList = HTML_OPEN_UNORDERED_LIST;
htmlCloseList = HTML_CLOSE_UNORDERED_LIST;
@ -456,7 +455,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
i--;
}
listLevel = listLevel.substring(0, i);
final int startOfContent = listLevel.length();
if (startOfContent > 0) {
@ -474,9 +473,9 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
}
if (ListType.ORDERED.equals(listType)) {
orderedListLevel = listLevel;
this.orderedListLevel = listLevel;
} else if (ListType.UNORDERED.equals(listType)) {
unorderedListLevel = listLevel;
this.unorderedListLevel = listLevel;
}
} else {
ret = line;
@ -492,12 +491,12 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
private String processDefinitionList(final String line) {
final String ret;
if (!noList) { //lists only get processed if not forbidden (see code for [= and <pre>). [MN]
if (!this.noList) { //lists only get processed if not forbidden (see code for [= and <pre>). [MN]
if (line.startsWith(defListLevel + ";")) { //more semicolons
final String copyOfLine = line.substring(defListLevel.length() + 1);
if (line.startsWith(this.defListLevel + ";")) { //more semicolons
final String copyOfLine = line.substring(this.defListLevel.length() + 1);
final int positionOfOpeningTag;
if ((positionOfOpeningTag = copyOfLine.indexOf(":")) > 0) {
if ((positionOfOpeningTag = copyOfLine.indexOf(':',0)) > 0) {
final String definitionItem = copyOfLine.substring(0, positionOfOpeningTag);
final String definitionDescription = copyOfLine.substring(positionOfOpeningTag + 1);
final StringBuilder stringBuilder = new StringBuilder();
@ -507,16 +506,16 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
stringBuilder.append(HTML_CLOSE_DEFINITION_ITEM);
stringBuilder.append(HTML_OPEN_DEFINITION_DESCRIPTION);
stringBuilder.append(definitionDescription);
processingDefList = true;
this.processingDefList = true;
ret = stringBuilder.toString();
} else {
ret = line;
}
defListLevel += ";";
} else if (!defListLevel.isEmpty() && line.startsWith(defListLevel)) { //equal number of semicolons
final String copyOfLine = line.substring(defListLevel.length());
this.defListLevel += ";";
} else if (!this.defListLevel.isEmpty() && line.startsWith(this.defListLevel)) { //equal number of semicolons
final String copyOfLine = line.substring(this.defListLevel.length());
final int positionOfOpeningTag;
if ((positionOfOpeningTag = copyOfLine.indexOf(":")) > 0) {
if ((positionOfOpeningTag = copyOfLine.indexOf(':',0)) > 0) {
final String definitionItem = copyOfLine.substring(0, positionOfOpeningTag);
final String definitionDescription = copyOfLine.substring(positionOfOpeningTag + 1);
final StringBuilder stringBuilder = new StringBuilder();
@ -525,23 +524,23 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
stringBuilder.append(HTML_CLOSE_DEFINITION_ITEM);
stringBuilder.append(HTML_OPEN_DEFINITION_DESCRIPTION);
stringBuilder.append(definitionDescription);
processingDefList = true;
this.processingDefList = true;
ret = stringBuilder.toString();
} else {
ret = line;
}
} else if (!defListLevel.isEmpty()) { //less semicolons
int i = defListLevel.length();
} else if (!this.defListLevel.isEmpty()) { //less semicolons
int i = this.defListLevel.length();
String tmp = EMPTY;
while (!line.startsWith(defListLevel.substring(0, i))) {
while (!line.startsWith(this.defListLevel.substring(0, i))) {
tmp = HTML_CLOSE_DEFINITION_DESCRIPTION + HTML_CLOSE_DEFINITION_LIST;
i--;
}
defListLevel = defListLevel.substring(0, i);
int positionOfOpeningTag = defListLevel.length();
if (!defListLevel.isEmpty()) {
this.defListLevel = this.defListLevel.substring(0, i);
int positionOfOpeningTag = this.defListLevel.length();
if (!this.defListLevel.isEmpty()) {
final String copyOfLine = line.substring(positionOfOpeningTag);
if ((positionOfOpeningTag = copyOfLine.indexOf(":")) > 0) {
if ((positionOfOpeningTag = copyOfLine.indexOf(':',0)) > 0) {
final String definitionItem = copyOfLine.substring(0, positionOfOpeningTag);
final String definitionDescription = copyOfLine.substring(positionOfOpeningTag + 1);
final StringBuilder stringBuilder = new StringBuilder();
@ -551,7 +550,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
stringBuilder.append(HTML_CLOSE_DEFINITION_ITEM);
stringBuilder.append(HTML_OPEN_DEFINITION_DESCRIPTION);
stringBuilder.append(definitionDescription);
processingDefList = true;
this.processingDefList = true;
ret = stringBuilder.toString();
} else {
ret = line;
@ -631,7 +630,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
// using the wikicode [[Image:share/yacy.gif]]
// or an image DATA/HTDOCS/grafics/kaskelix.jpg with [[Image:grafics/kaskelix.jpg]]
// you are free to use other sub-paths of DATA/HTDOCS
if (kl.indexOf("://") < 1) {
if (kl.indexOf("://",0) < 1) {
kl = "http://" + hostport + "/" + kl;
}
@ -656,7 +655,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
break;
}
kl = line.substring(positionOfOpeningTag + LEN_WIKI_OPEN_EXTERNAL_LINK, positionOfClosingTag);
if ((p = kl.indexOf(" ")) > 0) {
if ((p = kl.indexOf(' ',0)) > 0) {
kv = kl.substring(p + 1);
kl = kl.substring(0, p);
} // No text for the link? -> <a href="http://www.url.com/">http://www.url.com/</a>
@ -668,7 +667,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
// using the wikicode [share/page.html]
// or a file DATA/HTDOCS/www/page.html with [www/page.html]
// you are free to use other sub-paths of DATA/HTDOCS
if (kl.indexOf("://") < 1) {
if (kl.indexOf("://",0) < 1) {
kl = "http://" + hostport + "/" + kl;
}
line = line.substring(0, positionOfOpeningTag) + "<a class=\"extern\" href=\"" + kl + "\">" + kv + "</a>" + line.substring(positionOfClosingTag + LEN_WIKI_CLOSE_EXTERNAL_LINK);
@ -683,7 +682,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
* @return HTML fragment
*/
private String processPreformattedText(final String hostport, String line) {
if (!escaped) {
if (!this.escaped) {
final int positionOfOpeningTag = line.indexOf(WIKI_OPEN_PRE_ESCAPED);
final int positionOfClosingTag = line.indexOf(WIKI_CLOSE_PRE_ESCAPED);
//both <pre> and </pre> in the same line
@ -699,48 +698,48 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
line = line.replaceAll("!pre!!", "!pre!");
} //handles cases like <pre><pre> </pre></pre> <pre> </pre> that would cause an exception otherwise
else {
processingPreformattedText = true;
this.processingPreformattedText = true;
final String temp1 = processLineOfWikiCode(hostport, line.substring(0, positionOfOpeningTag - 1).replaceAll("!tmp!", "!tmp!!") + "!tmp!txt!");
noList = true;
this.noList = true;
final String temp2 = processLineOfWikiCode(hostport, line.substring(positionOfOpeningTag));
noList = false;
this.noList = false;
line = temp1.replaceAll("!tmp!txt!", temp2);
line = line.replaceAll("!tmp!!", "!tmp!");
processingPreformattedText = false;
this.processingPreformattedText = false;
}
} //start <pre>
else if (positionOfOpeningTag >= 0 && !preformattedSpanning) {
processingPreformattedText = true; //prevent surplus line breaks
else if (positionOfOpeningTag >= 0 && !this.preformattedSpanning) {
this.processingPreformattedText = true; //prevent surplus line breaks
final StringBuilder openBlockQuoteTags = new StringBuilder(); //gets filled with <blockquote>s as needed
String preformattedText = "<pre style=\"border:dotted;border-width:thin;\">" + line.substring(positionOfOpeningTag + LEN_WIKI_OPEN_PRE_ESCAPED);
preformattedText = preformattedText.replaceAll("!pre!", "!pre!!");
//taking care of indented lines
while (preindented < positionOfOpeningTag && positionOfOpeningTag < line.length() &&
line.substring(preindented, positionOfOpeningTag).charAt(0) == WIKI_INDENTION) {
preindented++;
while (this.preindented < positionOfOpeningTag && positionOfOpeningTag < line.length() &&
line.substring(this.preindented, positionOfOpeningTag).charAt(0) == WIKI_INDENTION) {
this.preindented++;
openBlockQuoteTags.append(HTML_OPEN_BLOCKQUOTE);
}
line = processLineOfWikiCode(hostport, line.substring(preindented, positionOfOpeningTag).replaceAll("!pre!", "!pre!!") + "!pre!txt!");
line = processLineOfWikiCode(hostport, line.substring(this.preindented, positionOfOpeningTag).replaceAll("!pre!", "!pre!!") + "!pre!txt!");
line = openBlockQuoteTags + line.replace("!pre!txt!", preformattedText);
line = line.replaceAll("!pre!!", "!pre!");
preformattedSpanning = true;
this.preformattedSpanning = true;
} //end </pre>
else if (positionOfClosingTag >= 0 && preformattedSpanning) {
preformattedSpanning = false;
else if (positionOfClosingTag >= 0 && this.preformattedSpanning) {
this.preformattedSpanning = false;
final StringBuilder endBlockQuoteTags = new StringBuilder(); //gets filled with </blockquote>s as needed
String preformattedText = line.substring(0, positionOfClosingTag) + "</pre>";
preformattedText = preformattedText.replaceAll("!pre!", "!pre!!");
//taking care of indented lines
while (preindented > 0) {
while (this.preindented > 0) {
endBlockQuoteTags.append(HTML_CLOSE_BLOCKQUOTE);
preindented--;
this.preindented--;
}
line = processLineOfWikiCode(hostport, "!pre!txt!" + line.substring(positionOfClosingTag + LEN_WIKI_CLOSE_PRE_ESCAPED).replaceAll("!pre!", "!pre!!"));
line = line.replace("!pre!txt!", preformattedText) + endBlockQuoteTags;
line = line.replaceAll("!pre!!", "!pre!");
processingPreformattedText = false;
this.processingPreformattedText = false;
} //Getting rid of surplus </pre>
else if (positionOfOpeningTag >= 0 && !preformattedSpanning) {
else if (positionOfOpeningTag >= 0 && !this.preformattedSpanning) {
int posTag;
while ((posTag = line.indexOf(WIKI_CLOSE_PRE_ESCAPED)) >= 0) {
line = line.substring(0, posTag) + line.substring(posTag + LEN_WIKI_CLOSE_PRE_ESCAPED);
@ -767,23 +766,23 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
int level6 = 0;
int doubles = 0;
String anchorext = EMPTY;
if ((s = tableOfContents.size()) > 2) {
if ((s = this.tableOfContents.size()) > 2) {
directory.append("<table><tr><td><div class=\"WikiTOCBox\">\n");
for (int i = 0; i < s; i++) {
if (i >= tableOfContents.size()) {
if (i >= this.tableOfContents.size()) {
break;
}
element = tableOfContents.get(i);
element = this.tableOfContents.get(i);
if (element == null) {
continue;
}
//counting double headlines
doubles = 0;
for (int j = 0; j < i; j++) {
if (j >= tableOfContents.size()) {
if (j >= this.tableOfContents.size()) {
break;
}
final String d = tableOfContents.get(j);
final String d = this.tableOfContents.get(j);
if (d == null || d.isEmpty()) {
continue;
}
@ -801,7 +800,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
final char l = element.charAt(0);
String temp = "";
if (Arrays.binarySearch(HEADLINE_LEVEL, l) >= 0 && !element.isEmpty()) {
switch (l) {
case SIX: {
@ -911,7 +910,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
* @return String with replaced tags.
*/
private String tagReplace(final String input, final Tags tags) {
String direlem = null; //string to keep headlines until they get added to List dirElements
final String direlem = null; //string to keep headlines until they get added to List dirElements
final StringBuilder stringBuilder = new StringBuilder(input);
@ -920,12 +919,12 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
//replace pattern if a pair of the pattern can be found in the line
while (((firstPosition = stringBuilder.indexOf(tags.openWiki, secondPosition)) >= 0) &&
((secondPosition = stringBuilder.indexOf(tags.closeWiki, firstPosition + tags.openWikiLength)) >= 0)) {
//extra treatment for headlines
if (Arrays.binarySearch(HEADLINE_TAGS, tags.openWiki) >= 0) {
processHeadline(stringBuilder, firstPosition, tags, secondPosition, direlem);
} else {
int oldLength = stringBuilder.length();
final int oldLength = stringBuilder.length();
stringBuilder.replace(firstPosition, firstPosition + tags.openWikiLength, tags.openHTML);
secondPosition += stringBuilder.length() - oldLength;
stringBuilder.replace(secondPosition, secondPosition + tags.closeWikiLength, tags.closeHTML);
@ -943,15 +942,15 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
private String processLineOfWikiCode(final String hostport, String line) {
//If HTML has not been replaced yet (can happen if method gets called in recursion), replace now!
line = processMetadata(line);
if ((!replacedHtmlAlready || preformattedSpanning) && line.indexOf(WIKI_CLOSE_PRE_ESCAPED) < 0) {
if ((!this.replacedHtmlAlready || this.preformattedSpanning) && line.indexOf(WIKI_CLOSE_PRE_ESCAPED) < 0) {
line = CharacterCoding.unicode2html(line, true);
replacedHtmlAlready = true;
this.replacedHtmlAlready = true;
}
//check if line contains preformatted symbols or if we are in a preformatted sequence already.
if ((line.indexOf(WIKI_OPEN_PRE_ESCAPED) >= 0) ||
(line.indexOf(WIKI_CLOSE_PRE_ESCAPED) >= 0) ||
preformattedSpanning) {
this.preformattedSpanning) {
line = processPreformattedText(hostport, line);
} else {
@ -991,7 +990,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
line = tagReplace(line, Tags.STRIKE);
line = tagReplace(line, Tags.UNDERLINE);
line = processUnorderedList(line);
line = processOrderedList(line);
line = processDefinitionList(line);
@ -1000,10 +999,10 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
}
if (!processingPreformattedText) {
replacedHtmlAlready = false;
if (!this.processingPreformattedText) {
this.replacedHtmlAlready = false;
}
if (!(line.endsWith(HTML_CLOSE_LIST_ELEMENT) || processingDefList || escape || processingPreformattedText || processingTable || processingCell)) {
if (!(line.endsWith(HTML_CLOSE_LIST_ELEMENT) || this.processingDefList || this.escape || this.processingPreformattedText || this.processingTable || this.processingCell)) {
line += "<br />";
}
return line;
@ -1014,7 +1013,7 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
int p, q, s = 0;
while ((p = line.indexOf(WIKI_OPEN_METADATA, s)) >= 0 && (q = line.indexOf(WIKI_CLOSE_METADATA, p + 1)) >= 0) {
s = q; // continue with next position
String a = line.substring(p + LEN_WIKI_OPEN_METADATA, q);
final String a = line.substring(p + LEN_WIKI_OPEN_METADATA, q);
if (a.toLowerCase().startsWith("coordinate")) {
// parse Geographical Coordinates as described in
// http://en.wikipedia.org/wiki/Wikipedia:Manual_of_Style_%28dates_and_numbers%29#Geographical_coordinates
@ -1026,24 +1025,24 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
// and if passed through this parser:
// {{Coordinate |NS 45/37/43.0/N |EW. 07/58/41.0/E |type=landmark |region=IT-BI}} ## means: degree/minute/second
// {{Coordinate |NS 51.48994 |EW. 7.33249 |type=landmark |region=DE-NW}}
String b[] = a.split("\\|");
final String b[] = a.split("\\|");
float lon = 0.0f, lat = 0.0f;
float lonm = 0.0f, latm = 0.0f;
String lono = "E", lato = "N";
String name = "";
for (String c: b) {
for (final String c: b) {
if (c.toLowerCase().startsWith("name=")) {
name = c.substring(5);
}
if (c.toUpperCase().startsWith("NS=")) {
String d[] = c.substring(3).split("/");
final String d[] = c.substring(3).split("/");
if (d.length == 1) {float l = Float.parseFloat(d[0]); if (l < 0) {lato = "S"; l = -l;} lat = (float) Math.floor(l); latm = 60.0f * (l - lat);}
else if (d.length == 2) {lat = Float.parseFloat(d[0]); latm = Float.parseFloat(d[1]);}
else if (d.length == 3) {lat = Float.parseFloat(d[0]); latm = Float.parseFloat(d[1]) + Float.parseFloat(d[2]) / 60.0f;}
if (d[d.length-1].toUpperCase().equals("S")) {}
}
if (c.toUpperCase().startsWith("EW=")) {
String d[] = c.substring(3).split("/");
final String d[] = c.substring(3).split("/");
if (d.length == 1) {float l = Float.parseFloat(d[0]); if (l < 0) {lono = "W"; l = -l;} lon = (float) Math.floor(l); lonm = 60.0f * (l - lon);}
else if (d.length == 2) {lon = Float.parseFloat(d[0]); lonm = Float.parseFloat(d[1]);}
else if (d.length == 3) {lon = Float.parseFloat(d[0]); lonm = Float.parseFloat(d[1]) + Float.parseFloat(d[2]) / 60.0f;}
@ -1060,29 +1059,29 @@ public class WikiCode extends AbstractWikiParser implements WikiParser {
}
return line;
}
private class TableOfContent {
private final List<String> toc = new ArrayList<String>(); // needs to be list which ensures order
int size() {
return toc.size();
return this.toc.size();
}
String get(final int index) {
return toc.get(index);
return this.toc.get(index);
}
synchronized boolean add(final String element) {
return toc.add(element);
return this.toc.add(element);
}
Iterator<String> iterator() {
return toc.iterator();
return this.toc.iterator();
}
void clear() {
toc.clear();
this.toc.clear();
}
}
}

View File

@ -259,7 +259,7 @@ public final class HTTPDFileHandler {
String clientIP = (String) conProp.get(HeaderFramework.CONNECTION_PROP_CLIENTIP); if (clientIP == null) clientIP = "unknown-host";
// check hack attacks in path
if (path.indexOf("..") >= 0) {
if (path.indexOf("..",0) >= 0) {
HTTPDemon.sendRespondError(conProp,out,4,403,null,"Access not allowed",null);
return;
}
@ -267,7 +267,7 @@ public final class HTTPDFileHandler {
path = UTF8.decodeURL(path);
// check against hack attacks in path
if (path.indexOf("..") >= 0) {
if (path.indexOf("..",0) >= 0) {
HTTPDemon.sendRespondError(conProp,out,4,403,null,"Access not allowed",null);
return;
}
@ -293,7 +293,7 @@ public final class HTTPDFileHandler {
// (the alternative is that we deliver a 401 to request authorization)
// -1- the page is not protected; or
final boolean protectedPage = path.indexOf("_p.") > 0;
final boolean protectedPage = path.indexOf("_p.",0) > 0;
boolean accessGranted = !protectedPage;
// -2- a password is not configured; or
@ -398,7 +398,7 @@ public final class HTTPDFileHandler {
String val;
while (e.hasNext()) {
val = e.next();
if ((val != null) && (val.indexOf("<script") >= 0) && !path.equals("/Crawler_p.html")) {
if ((val != null) && (val.indexOf("<script",0) >= 0) && !path.equals("/Crawler_p.html")) {
// deny request
HTTPDemon.sendRespondError(conProp,out,4,403,null,"bad post values",null);
return;
@ -731,7 +731,7 @@ public final class HTTPDFileHandler {
}
final String cgiReturn = StringBuilder.toString();
int indexOfDelimiter = cgiReturn.indexOf("\n\n");
int indexOfDelimiter = cgiReturn.indexOf("\n\n",0);
String[] cgiHeader = new String[0];
if (indexOfDelimiter > -1) {
cgiHeader = cgiReturn.substring(0, indexOfDelimiter).split("\n");

View File

@ -1006,7 +1006,7 @@ public final class HTTPDProxyHandler {
}
// only gzip-encoding is supported, remove other encodings (e. g. deflate)
if ((requestHeader.get(HeaderFramework.ACCEPT_ENCODING,"")).indexOf("gzip") != -1) {
if ((requestHeader.get(HeaderFramework.ACCEPT_ENCODING,"")).indexOf("gzip",0) != -1) {
requestHeader.put(HeaderFramework.ACCEPT_ENCODING, "gzip");
} else {
requestHeader.put(HeaderFramework.ACCEPT_ENCODING, "");
@ -1363,13 +1363,13 @@ public final class HTTPDProxyHandler {
errorMessage = "Unable to establish a connection to the destination host. Connect timed out.";
} else {
final String exceptionMsg = e.getMessage();
if ((exceptionMsg != null) && (exceptionMsg.indexOf("Corrupt GZIP trailer") >= 0)) {
if ((exceptionMsg != null) && (exceptionMsg.indexOf("Corrupt GZIP trailer",0) >= 0)) {
// just do nothing, we leave it this way
if (log.isFine()) log.logFine("ignoring bad gzip trail for URL " + url + " (" + e.getMessage() + ")");
forceConnectionClose(conProp);
} else if ((exceptionMsg != null) && (exceptionMsg.indexOf("Connection reset")>= 0)) {
} else if ((exceptionMsg != null) && (exceptionMsg.indexOf("Connection reset",0)>= 0)) {
errorMessage = "Connection reset";
} else if ((exceptionMsg != null) && (exceptionMsg.indexOf("unknown host")>=0)) {
} else if ((exceptionMsg != null) && (exceptionMsg.indexOf("unknown host",0)>=0)) {
try {
detailedErrorMsgMap = unknownHostHandling(conProp);
httpStatusText = "Unknown Host";
@ -1380,10 +1380,10 @@ public final class HTTPDProxyHandler {
}
} else if ((exceptionMsg != null) &&
(
(exceptionMsg.indexOf("socket write error")>=0) ||
(exceptionMsg.indexOf("Read timed out") >= 0) ||
(exceptionMsg.indexOf("Broken pipe") >= 0) ||
(exceptionMsg.indexOf("server has closed connection") >= 0)
(exceptionMsg.indexOf("socket write error",0)>=0) ||
(exceptionMsg.indexOf("Read timed out",0) >= 0) ||
(exceptionMsg.indexOf("Broken pipe",0) >= 0) ||
(exceptionMsg.indexOf("server has closed connection",0) >= 0)
)) {
errorMessage = exceptionMsg;
Log.logException(e);

View File

@ -208,7 +208,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
// is always (but not only) called if an IO-dependent exception occurrs.
log.logSevere("Unexpected Error. " + e.getClass().getName(),e);
final String message = e.getMessage();
if (message != null && message.indexOf("heap space") > 0) Log.logException(e);
if (message != null && message.indexOf("heap space",0) > 0) Log.logException(e);
return "501 Exception occurred: " + message;
}
@ -233,8 +233,8 @@ public final class HTTPDemon implements serverHandler, Cloneable {
// persistent by default, but closed with the "Connection: close"
// property.
boolean persistent = !(httpVersion.equals(HeaderFramework.HTTP_VERSION_0_9) || httpVersion.equals(HeaderFramework.HTTP_VERSION_1_0));
if ((header.get(RequestHeader.CONNECTION, "keep-alive")).toLowerCase().indexOf("close") != -1 ||
(header.get(RequestHeader.PROXY_CONNECTION, "keep-alive")).toLowerCase().indexOf("close") != -1) {
if ((header.get(RequestHeader.CONNECTION, "keep-alive")).toLowerCase().indexOf("close",0) != -1 ||
(header.get(RequestHeader.PROXY_CONNECTION, "keep-alive")).toLowerCase().indexOf("close",0) != -1) {
persistent = false;
}

View File

@ -1,4 +1,4 @@
// serverClassLoader.java
// serverClassLoader.java
// -----------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
@ -43,7 +43,7 @@ public final class serverClassLoader extends ClassLoader {
public serverClassLoader(final ClassLoader parent) {
super(parent);
classes = new ConcurrentHashMap<File, Class<?>>(100);
this.classes = new ConcurrentHashMap<File, Class<?>>(100);
}
public Package[] packages() {
@ -54,8 +54,8 @@ public final class serverClassLoader extends ClassLoader {
// take the class out of the cache, denoted by the class file
Class<?> c = this.classes.get(classfile);
if (c != null) return c;
final int p = classfile.getName().indexOf(".");
final int p = classfile.getName().indexOf('.',0);
if (p < 0) throw new ClassNotFoundException("wrong class name: " + classfile.getName());
final String classname = classfile.getName().substring(0, p);

View File

@ -725,8 +725,8 @@ public final class serverCore extends AbstractBusyThread implements BusyThread {
// now we need to initialize the session
if (this.commandCounter == 0) {
// first we need to determine the proper protocol handler
if (this.request.indexOf("HTTP") >= 0) reqProtocol = "HTTP";
else reqProtocol = null;
if (this.request.indexOf("HTTP",0) >= 0) reqProtocol = "HTTP";
else reqProtocol = null;
if (this.request == null) break;
if (reqProtocol != null && reqProtocol.equals("HTTP")) {

View File

@ -1,4 +1,4 @@
// nxTools.java
// nxTools.java
// -------------------------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
@ -71,7 +71,7 @@ public class nxTools {
}
return result;
}
public static ArrayList<String> grep(final ArrayList<String> list, final int afterContext, final String pattern) {
final Iterator<String> i = list.iterator();
int ac = 0;
@ -88,13 +88,13 @@ public class nxTools {
ac--;
}
return result;
}
}
public static String tail1(final Vector<String> list) {
if (list == null || list.isEmpty()) return "";
return list.lastElement();
}
public static String tail1(final ArrayList<String> list) {
if (list == null || list.isEmpty()) return "";
return list.get(list.size()-1);
@ -132,7 +132,7 @@ public class nxTools {
}
return theLine;
}
/**
* This function shorten URL Strings<br>
*
@ -140,7 +140,7 @@ public class nxTools {
* <dl><dt>normal domain:</dt><dd>http://domain.net/leftpath..rightpath</dd>
* <dt>long domain:</dt><dd>http://very_very_long_domain.net/le..</dd></dl>
* @param url String like a URL
* @param len
* @param len
* @return the shorten or the old String
*/
public static String shortenURLString(final String url, final int len) {
@ -149,7 +149,7 @@ public class nxTools {
int urlLen = url.length();
if (urlLen > len) {
int cpos;
cpos = url.indexOf("://");
cpos = url.indexOf("://",0);
if (cpos >= 0) {
cpos = url.indexOf("/", cpos + 3);
if (cpos < 0) { // very crazy domain or very short len

View File

@ -1,4 +1,4 @@
// whois.java
// whois.java
// -------------------------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
@ -39,7 +39,7 @@ public class whois {
int pos;
final Properties result = new Properties();
while ((line = br.readLine()) != null) {
pos = line.indexOf(":");
pos = line.indexOf(':',0);
if (pos > 0) {
key = line.substring(0, pos).trim().toLowerCase();
value = line.substring(pos + 1).trim();

View File

@ -629,7 +629,7 @@ public class MultiProtocolURI implements Serializable, Comparable<MultiProtocolU
q = q.substring(p + 1);
continue;
}
final int p = q.toLowerCase().indexOf("&" + sid.toLowerCase() + "=");
final int p = q.toLowerCase().indexOf("&" + sid.toLowerCase() + "=",0);
if (p < 0) continue;
final int p1 = q.indexOf('&', p+1);
if (p1 < 0) {
@ -757,7 +757,7 @@ public class MultiProtocolURI implements Serializable, Comparable<MultiProtocolU
// remove all double-spaces
int p;
while ((p = t.indexOf(" ")) >= 0) t = t.substring(0, p) + t.substring(p + 1);
while ((p = t.indexOf(" ",0)) >= 0) t = t.substring(0, p) + t.substring(p + 1);
// split the string into tokens and add all camel-case splitting
final String[] u = t.split(" ");
@ -892,32 +892,32 @@ public class MultiProtocolURI implements Serializable, Comparable<MultiProtocolU
public final boolean isCGI() {
final String ls = unescape(this.path.toLowerCase());
return ls.indexOf(".cgi") >= 0 ||
ls.indexOf(".exe") >= 0;
return ls.indexOf(".cgi",0) >= 0 ||
ls.indexOf(".exe",0) >= 0;
}
public final boolean isIndividual() {
final String q = unescape(this.path.toLowerCase());
for (final String sid: sessionIDnames.keySet()) {
if (q.startsWith(sid.toLowerCase() + "=")) return true;
final int p = q.indexOf("&" + sid.toLowerCase() + "=");
final int p = q.indexOf("&" + sid.toLowerCase() + "=",0);
if (p >= 0) return true;
}
int pos;
return
((pos = q.indexOf("sid")) > 0 &&
((pos = q.indexOf("sid",0)) > 0 &&
(q.charAt(--pos) == '?' || q.charAt(pos) == '&' || q.charAt(pos) == ';') &&
(pos += 5) < q.length() &&
(q.charAt(pos) != '&' && q.charAt(--pos) == '=')
) ||
((pos = q.indexOf("sessionid")) > 0 &&
((pos = q.indexOf("sessionid",0)) > 0 &&
(pos += 10) < q.length() &&
(q.charAt(pos) != '&' &&
(q.charAt(--pos) == '=' || q.charAt(pos) == '/'))
) ||
((pos = q.indexOf("phpsessid")) > 0 &&
((pos = q.indexOf("phpsessid",0)) > 0 &&
(pos += 10) < q.length() &&
(q.charAt(pos) != '&' &&
(q.charAt(--pos) == '=' || q.charAt(pos) == '/')));
@ -1148,7 +1148,7 @@ public class MultiProtocolURI implements Serializable, Comparable<MultiProtocolU
private static final String splitrex = " |/|\\(|\\)|-|\\:|_|\\.|,|\\?|!|'|" + '"';
public static final Pattern splitpattern = Pattern.compile(splitrex);
public static String[] urlComps(String normalizedURL) {
final int p = normalizedURL.indexOf("//");
final int p = normalizedURL.indexOf("//",0);
if (p > 0) normalizedURL = normalizedURL.substring(p + 2);
return splitpattern.split(normalizedURL.toLowerCase()); // word components of the url
}

View File

@ -138,9 +138,9 @@ public class RSSReader extends DefaultHandler {
} while(!end.contains("</"));
Type type = Type.none;
if (end.indexOf("rss") > 0) type = Type.rss;
if (end.indexOf("feed") > 0) type = Type.atom;
if (end.indexOf("rdf") > 0) type = Type.rdf;
if (end.indexOf("rss",0) > 0) type = Type.rss;
if (end.indexOf("feed",0) > 0) type = Type.atom;
if (end.indexOf("rdf",0) > 0) type = Type.rdf;
return type;
}

View File

@ -765,7 +765,7 @@ public class Domains {
b0 != 127 && // loopback
(b0 != 172 || b1 < 16 || b1 > 31) && // class B reserved
(b0 != 192 || b1 != 168) && // class C reserved
(a.getHostAddress().indexOf(":") < 0))
(a.getHostAddress().indexOf(':',0) < 0))
return a;
}
// there is only a local address
@ -775,24 +775,24 @@ public class Domains {
final InetAddress localHostAddress = InetAddress.getLocalHost();
if (localHostAddress != null &&
(0Xff & localHostAddress.getAddress()[0]) != 127 &&
localHostAddress.getHostAddress().indexOf(":") < 0) return localHostAddress;
localHostAddress.getHostAddress().indexOf(':',0) < 0) return localHostAddress;
} catch (final UnknownHostException e) {
}
// we filter out the loopback address 127.0.0.1 and all addresses without a name
for (final InetAddress a: localHostAddresses) {
if ((0Xff & a.getAddress()[0]) != 127 &&
a.getHostAddress().indexOf(":") < 0 &&
a.getHostAddress().indexOf(':',0) < 0 &&
a.getHostName() != null &&
!a.getHostName().isEmpty()) return a;
}
// if no address has a name, then take any other than the loopback
for (final InetAddress a: localHostAddresses) {
if ((0Xff & a.getAddress()[0]) != 127 &&
a.getHostAddress().indexOf(":") < 0) return a;
a.getHostAddress().indexOf(':',0) < 0) return a;
}
// if all fails, give back whatever we have
for (final InetAddress a: localHostAddresses) {
if (a.getHostAddress().indexOf(":") < 0) return a;
if (a.getHostAddress().indexOf(':',0) < 0) return a;
}
// finally, just get any
return localHostAddresses.iterator().next();

View File

@ -107,7 +107,7 @@ public class RequestHeader extends HeaderFramework {
public boolean acceptGzip() {
return ((containsKey(ACCEPT_ENCODING)) &&
((get(ACCEPT_ENCODING)).toUpperCase().indexOf("GZIP")) != -1);
((get(ACCEPT_ENCODING)).toUpperCase().indexOf("GZIP",0)) != -1);
}
public FileType fileType() {

View File

@ -7,12 +7,12 @@
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
@ -35,7 +35,7 @@ import org.apache.log4j.Logger;
public class ResponseHeader extends HeaderFramework {
// response header properties
private static final long serialVersionUID = 0L;
private static Logger log = Logger.getLogger(ResponseHeader.class);
@ -43,69 +43,69 @@ public class ResponseHeader extends HeaderFramework {
super();
}
public ResponseHeader(Header[] headers) {
public ResponseHeader(final Header[] headers) {
super();
for (final Header h : headers) {
this.add(h.getName(), h.getValue());
add(h.getName(), h.getValue());
}
}
public ResponseHeader(final HashMap<String, String> reverseMappingCache) {
super(reverseMappingCache);
}
public ResponseHeader(final HashMap<String, String> reverseMappingCache, final Map<String, String> othermap) {
super(reverseMappingCache, othermap);
}
public Date date() {
Date d = headerDate(HeaderFramework.DATE);
final Date d = headerDate(HeaderFramework.DATE);
if (d == null) return new Date(); else return d;
}
public Date expires() {
return headerDate(EXPIRES);
}
public Date lastModified() {
Date d = headerDate(LAST_MODIFIED);
final Date d = headerDate(LAST_MODIFIED);
if (d == null) return date(); else return d;
}
public long age() {
final Date lm = lastModified();
final Date sd = date();
if (lm == null) return Long.MAX_VALUE;
return ((sd == null) ? new Date() : sd).getTime() - lm.getTime();
}
public boolean gzip() {
return ((containsKey(CONTENT_ENCODING)) &&
((get(CONTENT_ENCODING)).toUpperCase().startsWith("GZIP")));
}
public static Object[] parseResponseLine(final String respLine) {
if ((respLine == null) || (respLine.length() == 0)) {
return new Object[]{"HTTP/1.0",Integer.valueOf(500),"status line parse error"};
}
int p = respLine.indexOf(" ");
int p = respLine.indexOf(' ',0);
if (p < 0) {
return new Object[]{"HTTP/1.0",Integer.valueOf(500),"status line parse error"};
}
String httpVer, status, statusText;
Integer statusCode;
// the http version reported by the server
httpVer = respLine.substring(0,p);
// Status of the request, e.g. "200 OK"
status = respLine.substring(p + 1).trim(); // the status code plus reason-phrase
// splitting the status into statuscode and statustext
p = status.indexOf(" ");
p = status.indexOf(' ',0);
try {
statusCode = Integer.valueOf((p < 0) ? status.trim() : status.substring(0,p).trim());
statusText = (p < 0) ? "" : status.substring(p+1).trim();
@ -113,10 +113,10 @@ public class ResponseHeader extends HeaderFramework {
statusCode = Integer.valueOf(500);
statusText = status;
}
return new Object[]{httpVer,statusCode,statusText};
}
/**
* @param header
@ -135,15 +135,15 @@ public class ResponseHeader extends HeaderFramework {
// use system default
return Charset.defaultCharset();
}
} catch(IllegalCharsetNameException e) {
} catch(final IllegalCharsetNameException e) {
log.warn("Charset in header is illegal: '"+ charSetName +"'\n "+ toString() + "\n" + e.getMessage());
// use system default
return Charset.defaultCharset();
} catch (UnsupportedCharsetException e) {
} catch (final UnsupportedCharsetException e) {
log.warn("Charset in header is unsupported: '"+ charSetName +"'\n "+ toString() + "\n" + e.getMessage());
// use system default
return Charset.defaultCharset();
}
return Charset.forName(charSetName);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -142,7 +142,7 @@ public class SolrScheme extends ConfigurationSet {
final String rel = p.getProperty("rel", ""); // the rel-attribute
final String text = p.getProperty("text", ""); // the text between the <a></a> tag
final String urls = url.toNormalform(false, false);
final int pr = urls.indexOf("://");
final int pr = urls.indexOf("://",0);
inboundlinksURLProtocol[c] = urls.substring(0, pr);
inboundlinksURLStub[c] = urls.substring(pr + 3);
inboundlinksName[c] = name.length() > 0 ? name : "";
@ -179,7 +179,7 @@ public class SolrScheme extends ConfigurationSet {
final String rel = p.getProperty("rel", ""); // the rel-attribute
final String text = p.getProperty("text", ""); // the text between the <a></a> tag
final String urls = url.toNormalform(false, false);
final int pr = urls.indexOf("://");
final int pr = urls.indexOf("://",0);
outboundlinksURLProtocol[c] = urls.substring(0, pr);
outboundlinksURLStub[c] = urls.substring(pr + 3);
outboundlinksName[c] = name.length() > 0 ? name : "";
@ -246,20 +246,20 @@ public class SolrScheme extends ConfigurationSet {
final String robots_meta = html.getMetas().get("robots");
// this tag may have values: all, index, noindex, nofollow
if (robots_meta != null) {
if (robots_meta.indexOf("all") >= 0) b += 1; // set bit 0
if (robots_meta.indexOf("index") == 0 || robots_meta.indexOf(" index") >= 0 || robots_meta.indexOf(",index") >= 0 ) b += 2; // set bit 1
if (robots_meta.indexOf("noindex") >= 0) b += 4; // set bit 2
if (robots_meta.indexOf("nofollow") >= 0) b += 8; // set bit 3
if (robots_meta.indexOf("all",0) >= 0) b += 1; // set bit 0
if (robots_meta.indexOf("index",0) == 0 || robots_meta.indexOf(" index",0) >= 0 || robots_meta.indexOf(",index",0) >= 0 ) b += 2; // set bit 1
if (robots_meta.indexOf("noindex",0) >= 0) b += 4; // set bit 2
if (robots_meta.indexOf("nofollow",0) >= 0) b += 8; // set bit 3
}
String x_robots_tag = header.get(HeaderFramework.X_ROBOTS_TAG, "");
if (x_robots_tag.length() == 0) x_robots_tag = header.get(HeaderFramework.X_ROBOTS, "");
// this tag may have values: noarchive, nosnippet, noindex, unavailable_after
if (x_robots_tag.length() > 0) {
if (x_robots_tag.indexOf("noarchive") >= 0) b += 256; // set bit 8
if (x_robots_tag.indexOf("nosnippet") >= 0) b += 512; // set bit 9
if (x_robots_tag.indexOf("noindex") >= 0) b += 1024; // set bit 10
if (x_robots_tag.indexOf("nofollow") >= 0) b += 2048; // set bit 11
if (x_robots_tag.indexOf("unavailable_after") >=0) b += 4096; // set bit 12
if (x_robots_tag.indexOf("noarchive",0) >= 0) b += 256; // set bit 8
if (x_robots_tag.indexOf("nosnippet",0) >= 0) b += 512; // set bit 9
if (x_robots_tag.indexOf("noindex",0) >= 0) b += 1024; // set bit 10
if (x_robots_tag.indexOf("nofollow",0) >= 0) b += 2048; // set bit 11
if (x_robots_tag.indexOf("unavailable_after",0) >=0) b += 4096; // set bit 12
}
addSolr(solrdoc, "robots_i", b);

View File

@ -194,7 +194,7 @@ public class SolrShardingConnector implements SolrConnector {
final InetAddress localhostExternAddress = Domains.myPublicLocalIP();
final String localhostExtern = localhostExternAddress == null ? "127.0.0.1" : localhostExternAddress.getHostAddress();
for (String u: this.urls) {
int p = u.indexOf("localhost"); if (p < 0) p = u.indexOf("127.0.0.1");
int p = u.indexOf("localhost",0); if (p < 0) p = u.indexOf("127.0.0.1",0);
if (p >= 0) u = u.substring(0, p) + localhostExtern + u.substring(p + 9);
urlAdmin[i++] = u + (u.endsWith("/") ? "admin/" : "/admin/");
}

View File

@ -362,7 +362,7 @@ public class SolrSingleConnector implements SolrConnector {
final InetAddress localhostExternAddress = Domains.myPublicLocalIP();
final String localhostExtern = localhostExternAddress == null ? "127.0.0.1" : localhostExternAddress.getHostAddress();
String u = this.solrurl;
int p = u.indexOf("localhost"); if (p < 0) p = u.indexOf("127.0.0.1");
int p = u.indexOf("localhost",0); if (p < 0) p = u.indexOf("127.0.0.1",0);
if (p >= 0) u = u.substring(0, p) + localhostExtern + u.substring(p + 9);
return u + (u.endsWith("/") ? "admin/" : "/admin/");
}

View File

@ -132,9 +132,9 @@ public class Document {
}
public Set<String> getContentLanguages() {
return this.languages;
return this.languages;
}
public String getFileExtension() {
return this.source.getFileExtension();
}
@ -415,8 +415,8 @@ dc_rights
for (final Map.Entry<MultiProtocolURI, Properties> entry: this.anchors.entrySet()) {
url = entry.getKey();
if (url == null) continue;
final boolean noindex = entry.getValue().getProperty("rel", "").toLowerCase().indexOf("noindex") >= 0;
final boolean nofollow = entry.getValue().getProperty("rel", "").toLowerCase().indexOf("nofollow") >= 0;
final boolean noindex = entry.getValue().getProperty("rel", "").toLowerCase().indexOf("noindex",0) >= 0;
final boolean nofollow = entry.getValue().getProperty("rel", "").toLowerCase().indexOf("nofollow",0) >= 0;
if ((thishost == null && url.getHost() == null) ||
((thishost != null && url.getHost() != null) &&
(url.getHost().endsWith(thishost) ||

View File

@ -233,7 +233,7 @@ public class LibraryProvider {
String w;
while ((line = reader.readLine()) != null) {
line = line.trim();
p = line.indexOf(" ");
p = line.indexOf(' ',0);
if (p > 0) {
//c = Integer.parseInt(line.substring(p + 1));
//if (c < 1) continue;

View File

@ -172,7 +172,7 @@ public class MediawikiImporter extends Thread implements Importer {
wikiparserrecord record;
int q;
while ((t = r.readLine()) != null) {
if ((p = t.indexOf("<base>")) >= 0 && (q = t.indexOf("</base>", p)) > 0) {
if ((p = t.indexOf("<base>",0)) >= 0 && (q = t.indexOf("</base>", p)) > 0) {
//urlStub = "http://" + lang + ".wikipedia.org/wiki/";
this.urlStub = t.substring(p + 6, q);
if (!this.urlStub.endsWith("/")) {
@ -236,9 +236,9 @@ public class MediawikiImporter extends Thread implements Importer {
page = false;
continue;
}
if ((p = t.indexOf("<title>")) >= 0) {
if ((p = t.indexOf("<title>",0)) >= 0) {
title = t.substring(p + 7);
q = title.indexOf("</title>");
q = title.indexOf("</title>",0);
if (q >= 0) title = title.substring(0, q);
continue;
}
@ -459,7 +459,7 @@ public class MediawikiImporter extends Thread implements Importer {
public wikisourcerecord(final byte[] chunk, final long start, final long end) {
String s;
s = UTF8.String(chunk);
final int t0 = s.indexOf("<title>");
final int t0 = s.indexOf("<title>",0);
if (t0 >= 0) {
final int t1 = s.indexOf("</title>", t0);
if (t1 >= 0) {
@ -596,7 +596,7 @@ public class MediawikiImporter extends Thread implements Importer {
if (s.indexOf(m) >= 0) {
// we found the record
//Log.logInfo("WIKITRANSLATION", "s = " + s);
int p = s.indexOf("start=\"");
int p = s.indexOf("start=\"",0);
if (p < 0) return null;
p += 7;
int q = s.indexOf('"', p + 1);

View File

@ -2,19 +2,19 @@
* ResumptionToken
* Copyright 2009 by Michael Peter Christen
* First released 31.10.2009 at http://yacy.net
*
*
* This is a part of YaCy, a peer-to-peer based web search engine
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
@ -35,17 +35,17 @@ import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import net.yacy.cora.date.ISO8601Formatter;
import net.yacy.cora.document.UTF8;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class ResumptionToken extends TreeMap<String, String> {
private static final long serialVersionUID = -8389462290545629792L;
// use a collator to relax when distinguishing between lowercase und uppercase letters
@ -54,21 +54,21 @@ public class ResumptionToken extends TreeMap<String, String> {
insensitiveCollator.setStrength(Collator.SECONDARY);
insensitiveCollator.setDecomposition(Collator.NO_DECOMPOSITION);
}
int recordCounter;
private DigestURI source;
public ResumptionToken(DigestURI source, final byte[] b) throws IOException {
private final DigestURI source;
public ResumptionToken(final DigestURI source, final byte[] b) throws IOException {
super((Collator) insensitiveCollator.clone());
this.source = source;
this.recordCounter = 0;
new Parser(b);
}
/*
public ResumptionToken(
DigestURI source,
DigestURI source,
Date expirationDate,
int completeListSize,
int cursor,
@ -82,9 +82,9 @@ public class ResumptionToken extends TreeMap<String, String> {
this.put("cursor", Integer.toString(cursor));
this.put("token", token);
}
public ResumptionToken(
DigestURI source,
DigestURI source,
String expirationDate,
int completeListSize,
int cursor,
@ -99,19 +99,19 @@ public class ResumptionToken extends TreeMap<String, String> {
this.put("token", token);
}
*/
/**
* truncate the given url at the '?'
* @param url
* @return a string containing the url up to and including the '?'
*/
public static String truncatedURL(DigestURI url) {
public static String truncatedURL(final DigestURI url) {
String u = url.toNormalform(true, true);
int i = u.indexOf('?');
final int i = u.indexOf('?');
if (i > 0) u = u.substring(0, i + 1);
return u;
}
/**
* while parsing the resumption token, also all records are counted
* @return the result from counting the records
@ -119,7 +119,7 @@ public class ResumptionToken extends TreeMap<String, String> {
public int getRecordCounter() {
return this.recordCounter;
}
/**
* compute a url that can be used to resume the retrieval from the OAI-PMH resource
* @param givenURL
@ -129,27 +129,27 @@ public class ResumptionToken extends TreeMap<String, String> {
public DigestURI resumptionURL() throws IOException {
// decide which kind of encoding strategy was used to get a resumptionToken:
String token = this.getToken();
final String token = getToken();
if (token == null) throw new IOException("end of resumption reached - token == null");
if (token.length() == 0) throw new IOException("end of resumption reached - token.length() == 0");
String url = truncatedURL(this.source);
final String url = truncatedURL(this.source);
// encoded state
if (token.indexOf("from=") >= 0) {
if (token.indexOf("from=",0) >= 0) {
return new DigestURI(url + "verb=ListRecords&" + token);
}
// cached result set
// can be detected with given expiration date
Date expiration = getExpirationDate();
final Date expiration = getExpirationDate();
if (expiration != null) {
if (expiration.before(new Date())) throw new IOException("the resumption is expired at " + ISO8601Formatter.FORMATTER.format(expiration) + " (now: " + ISO8601Formatter.FORMATTER.format());
// the resumption token is still fresh
}
String u = url + "verb=ListRecords&resumptionToken=" + escape(token);
final String u = url + "verb=ListRecords&resumptionToken=" + escape(token);
return new DigestURI(u);
}
public static StringBuilder escape(final String s) {
final int len = s.length();
final StringBuilder sbuf = new StringBuilder(len + 10);
@ -181,7 +181,7 @@ public class ResumptionToken extends TreeMap<String, String> {
}
return sbuf;
}
/**
* an expiration date of a resumption token that addresses how long a cached set will
* stay in the cache of the oai-pmh server. See:
@ -189,16 +189,16 @@ public class ResumptionToken extends TreeMap<String, String> {
* @return
*/
public Date getExpirationDate() {
String d = this.get("expirationDate");
final String d = get("expirationDate");
if (d == null) return null;
try {
return ISO8601Formatter.FORMATTER.parse(d);
} catch (ParseException e) {
} catch (final ParseException e) {
Log.logException(e);
return new Date();
}
}
/**
* The completeListSize attribute provides a place where the estimated number of results
* in the complete list response may be announced. This is likely to be used for
@ -212,11 +212,11 @@ public class ResumptionToken extends TreeMap<String, String> {
* @return
*/
public int getCompleteListSize() {
String t = this.get("completeListSize");
final String t = get("completeListSize");
if (t == null) return 0;
return Integer.parseInt(t);
}
/**
* The cursor attribute is the number of results returned so far in the complete list response,
* thus it is always "0" in the first incomplete list response.
@ -226,11 +226,11 @@ public class ResumptionToken extends TreeMap<String, String> {
* @return
*/
public int getCursor() {
String t = this.get("cursor");
final String t = get("cursor");
if (t == null) return 0;
return Integer.parseInt(t);
}
/**
* get a token of the stateless transfer in case that no expiration date is given
* see:
@ -238,14 +238,14 @@ public class ResumptionToken extends TreeMap<String, String> {
* @return
*/
public String getToken() {
return this.get("token");
return get("token");
}
public String toString() {
return "source = " + this.source + ", expirationDate=" + ISO8601Formatter.FORMATTER.format(this.getExpirationDate()) + ", completeListSize=" + getCompleteListSize() +
", cursor=" + this.getCursor() + ", token=" + this.getToken();
return "source = " + this.source + ", expirationDate=" + ISO8601Formatter.FORMATTER.format(getExpirationDate()) + ", completeListSize=" + getCompleteListSize() +
", cursor=" + getCursor() + ", token=" + getToken();
}
// get a resumption token using a SAX xml parser from am input stream
private class Parser extends DefaultHandler {
@ -253,7 +253,7 @@ public class ResumptionToken extends TreeMap<String, String> {
private final StringBuilder buffer;
private boolean parsingValue;
private SAXParser saxParser;
private InputStream stream;
private final InputStream stream;
private Attributes atts;
public Parser(final byte[] b) throws IOException {
@ -265,25 +265,25 @@ public class ResumptionToken extends TreeMap<String, String> {
try {
this.saxParser = factory.newSAXParser();
this.saxParser.parse(this.stream, this);
} catch (SAXException e) {
} catch (final SAXException e) {
Log.logException(e);
Log.logWarning("ResumptionToken", "token was not parsed (1):\n" + UTF8.String(b));
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
Log.logWarning("ResumptionToken", "token was not parsed (2):\n" + UTF8.String(b));
} catch (ParserConfigurationException e) {
} catch (final ParserConfigurationException e) {
Log.logException(e);
Log.logWarning("ResumptionToken", "token was not parsed (3):\n" + UTF8.String(b));
throw new IOException(e.getMessage());
} finally {
try {
this.stream.close();
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
}
}
}
/*
<resumptionToken expirationDate="2009-10-31T22:52:14Z"
completeListSize="226"
@ -298,7 +298,7 @@ public class ResumptionToken extends TreeMap<String, String> {
public void startElement(final String uri, final String name, final String tag, final Attributes atts) throws SAXException {
if ("record".equals(tag)) {
recordCounter++;
ResumptionToken.this.recordCounter++;
}
if ("resumptionToken".equals(tag)) {
this.parsingValue = true;
@ -309,18 +309,18 @@ public class ResumptionToken extends TreeMap<String, String> {
public void endElement(final String uri, final String name, final String tag) {
if (tag == null) return;
if ("resumptionToken".equals(tag)) {
put("expirationDate", atts.getValue("expirationDate"));
put("completeListSize", atts.getValue("completeListSize"));
put("cursor", atts.getValue("cursor"));
put("token", buffer.toString());
put("expirationDate", this.atts.getValue("expirationDate"));
put("completeListSize", this.atts.getValue("completeListSize"));
put("cursor", this.atts.getValue("cursor"));
put("token", this.buffer.toString());
this.buffer.setLength(0);
this.parsingValue = false;
}
}
public void characters(final char ch[], final int start, final int length) {
if (parsingValue) {
buffer.append(ch, start, length);
if (this.parsingValue) {
this.buffer.append(ch, start, length);
}
}

View File

@ -676,7 +676,7 @@ public class ContentScraper extends AbstractScraper implements Scraper {
public boolean indexingDenied() {
final String s = this.metas.get("robots");
if (s == null) return false;
if (s.indexOf("noindex") >= 0) return true;
if (s.indexOf("noindex",0) >= 0) return true;
return false;
}

View File

@ -1,4 +1,4 @@
//swfParser.java
//swfParser.java
//------------------------
//part of YaCy
//(C) by Michael Peter Christen; mc@yacy.net
@ -39,25 +39,24 @@ import net.yacy.document.AbstractParser;
import net.yacy.document.Document;
import net.yacy.document.Parser;
import net.yacy.kelondro.logging.Log;
import pt.tumba.parser.swf.SWF2HTML;
public class swfParser extends AbstractParser implements Parser {
public swfParser() {
super("Adobe Flash Parser");
SUPPORTED_EXTENSIONS.add("swf");
SUPPORTED_MIME_TYPES.add("application/x-shockwave-flash");
SUPPORTED_MIME_TYPES.add("application/x-shockwave-flash2-preview");
SUPPORTED_MIME_TYPES.add("application/futuresplash");
SUPPORTED_MIME_TYPES.add("image/vnd.rn-realflash");
this.SUPPORTED_EXTENSIONS.add("swf");
this.SUPPORTED_MIME_TYPES.add("application/x-shockwave-flash");
this.SUPPORTED_MIME_TYPES.add("application/x-shockwave-flash2-preview");
this.SUPPORTED_MIME_TYPES.add("application/futuresplash");
this.SUPPORTED_MIME_TYPES.add("image/vnd.rn-realflash");
}
/*
* parses the source documents and returns a plasmaParserDocument containing
* all extracted information about the parsed document
*/
public Document[] parse(final MultiProtocolURI location, final String mimeType,
public Document[] parse(final MultiProtocolURI location, final String mimeType,
final String charset, final InputStream source)
throws Parser.Failure, InterruptedException
{
@ -67,11 +66,11 @@ public class swfParser extends AbstractParser implements Parser {
String contents = "";
try {
contents = swf2html.convertSWFToHTML(source);
} catch (NegativeArraySizeException e) {
} catch (final NegativeArraySizeException e) {
throw new Parser.Failure(e.getMessage(), location);
} catch (IOException e) {
} catch (final IOException e) {
throw new Parser.Failure(e.getMessage(), location);
} catch (Exception e) {
} catch (final Exception e) {
Log.logException(e);
throw new Parser.Failure(e.getMessage(), location);
}
@ -88,9 +87,9 @@ public class swfParser extends AbstractParser implements Parser {
int p0 = 0;
//getting rid of HTML-Tags
p0 = contents.indexOf("<html><body>");
p0 = contents.indexOf("<html><body>",0);
contents = contents.substring(p0+12);
p0 = contents.indexOf("</body></html>");
p0 = contents.indexOf("</body></html>",0);
contents = contents.substring(0,p0);
//extracting urls
@ -98,7 +97,7 @@ public class swfParser extends AbstractParser implements Parser {
urlEnd = contents.indexOf(linebreak,urlStart);
url = contents.substring(urlStart,urlEnd);
urlnr = Integer.toString(++urls).toString();
Properties p = new Properties();
final Properties p = new Properties();
p.put("name", urlnr);
anchors.put(new MultiProtocolURI(url), p);
contents = contents.substring(0,urlStart)+contents.substring(urlEnd);
@ -121,13 +120,13 @@ public class swfParser extends AbstractParser implements Parser {
"",
sections, // an array of section headlines
abstrct, // an abstract
0.0f, 0.0f,
0.0f, 0.0f,
UTF8.getBytes(contents), // the parsed document text
anchors, // a map of extracted anchors
null,
null,
false)}; // a treeset of image URLs
} catch (final Exception e) {
} catch (final Exception e) {
if (e instanceof InterruptedException) throw (InterruptedException) e;
// if an unexpected error occures just log the error and raise a new Parser.Failure

View File

@ -1,4 +1,4 @@
//vcfParser.java
//vcfParser.java
//------------------------
//part of YaCy
//(C) by Michael Peter Christen; mc@yacy.net
@ -50,16 +50,16 @@ import net.yacy.kelondro.order.Base64Order;
*/
public class vcfParser extends AbstractParser implements Parser {
public vcfParser() {
public vcfParser() {
super("vCard Parser");
SUPPORTED_EXTENSIONS.add("vcf");
SUPPORTED_MIME_TYPES.add("text/x-vcard");
SUPPORTED_MIME_TYPES.add("application/vcard");
SUPPORTED_MIME_TYPES.add("application/x-versit");
SUPPORTED_MIME_TYPES.add("text/x-versit");
SUPPORTED_MIME_TYPES.add("text/x-vcalendar");
this.SUPPORTED_EXTENSIONS.add("vcf");
this.SUPPORTED_MIME_TYPES.add("text/x-vcard");
this.SUPPORTED_MIME_TYPES.add("application/vcard");
this.SUPPORTED_MIME_TYPES.add("application/x-versit");
this.SUPPORTED_MIME_TYPES.add("text/x-versit");
this.SUPPORTED_MIME_TYPES.add("text/x-vcalendar");
}
public Document[] parse(final MultiProtocolURI url, final String mimeType, final String charset, final InputStream source)
throws Parser.Failure, InterruptedException {
@ -69,54 +69,54 @@ public class vcfParser extends AbstractParser implements Parser {
final HashMap<String, String> parsedData = new HashMap<String, String>();
final HashMap<MultiProtocolURI, Properties> anchors = new HashMap<MultiProtocolURI, Properties>();
final LinkedList<String> parsedNames = new LinkedList<String>();
boolean useLastLine = false;
int lineNr = 0;
String line = null;
String line = null;
final BufferedReader inputReader = (charset!=null)
? new BufferedReader(new InputStreamReader(source,charset))
: new BufferedReader(new InputStreamReader(source));
while (true) {
// get the next line
if (!useLastLine) {
line = inputReader.readLine();
} else {
useLastLine = false;
}
if (line == null) break;
if (line == null) break;
else if (line.length() == 0) continue;
lineNr++;
final int pos = line.indexOf(":");
lineNr++;
final int pos = line.indexOf(':',0);
if (pos != -1) {
final String key = line.substring(0,pos).trim().toUpperCase();
String value = line.substring(pos+1).trim();
String encoding = null;
final String[] keyParts = key.split(";");
if (keyParts.length > 1) {
for (int i=0; i < keyParts.length; i++) {
if (keyParts[i].toUpperCase().startsWith("ENCODING")) {
encoding = keyParts[i].substring("ENCODING".length()+1);
} else if (keyParts[i].toUpperCase().startsWith("QUOTED-PRINTABLE")) {
for (final String keyPart : keyParts) {
if (keyPart.toUpperCase().startsWith("ENCODING")) {
encoding = keyPart.substring("ENCODING".length()+1);
} else if (keyPart.toUpperCase().startsWith("QUOTED-PRINTABLE")) {
encoding = "QUOTED-PRINTABLE";
} else if (keyParts[i].toUpperCase().startsWith("BASE64")) {
} else if (keyPart.toUpperCase().startsWith("BASE64")) {
encoding = "BASE64";
}
}
if (encoding != null) {
try {
if (encoding.equalsIgnoreCase("QUOTED-PRINTABLE")) {
// if the value has multiple lines ...
if (line.endsWith("=")) {
if (line.endsWith("=")) {
do {
value = value.substring(0,value.length()-1);
line = inputReader.readLine();
if (line == null) break;
value += line;
value += line;
} while (line.endsWith("="));
}
value = decodeQuotedPrintable(value);
@ -124,7 +124,7 @@ public class vcfParser extends AbstractParser implements Parser {
do {
line = inputReader.readLine();
if (line == null) break;
if (line.indexOf(":")!= -1) {
if (line.indexOf(':',0)!= -1) {
// we have detected an illegal block end of the base64 data
useLastLine = true;
}
@ -132,19 +132,19 @@ public class vcfParser extends AbstractParser implements Parser {
else break;
} while (line.length()!=0);
value = Base64Order.standardCoder.decodeString(value);
}
}
} catch (final Exception ey) {
// Encoding error: This could occure e.g. if the base64 doesn't
// Encoding error: This could occure e.g. if the base64 doesn't
// end with an empty newline
//
//
// We can simply ignore it.
}
}
}
}
if (key.equalsIgnoreCase("END")) {
String name = null, title = null;
// using the name of the current version as section headline
if (parsedData.containsKey("FN")) {
parsedNames.add(name = parsedData.get("FN"));
@ -153,19 +153,19 @@ public class vcfParser extends AbstractParser implements Parser {
} else {
parsedNames.add(name = "unknown name");
}
// getting the vcard title
if (parsedData.containsKey("TITLE")) {
parsedNames.add(title = parsedData.get("TITLE"));
}
if (parsedTitle.length() > 0) parsedTitle.append(", ");
parsedTitle.append((title==null)?name:name + " - " + title);
// looping through the properties and add there values to
// the text representation of the vCard
final Iterator<String> iter = parsedData.values().iterator();
final Iterator<String> iter = parsedData.values().iterator();
while (iter.hasNext()) {
value = iter.next();
parsedDataText.append(value).append("\r\n");
@ -175,11 +175,11 @@ public class vcfParser extends AbstractParser implements Parser {
} else if (key.toUpperCase().startsWith("URL")) {
try {
final MultiProtocolURI newURL = new MultiProtocolURI(value);
Properties p = new Properties();
final Properties p = new Properties();
p.put("name", newURL.toString());
anchors.put(newURL, p);
anchors.put(newURL, p);
//parsedData.put(key,value);
} catch (final MalformedURLException ex) {/* ignore this */}
} catch (final MalformedURLException ex) {/* ignore this */}
} else if (
!key.equalsIgnoreCase("BEGIN") &&
!key.equalsIgnoreCase("END") &&
@ -192,12 +192,12 @@ public class vcfParser extends AbstractParser implements Parser {
) {
// value = value.replaceAll(";","\t");
if ((value.length() > 0)) parsedData.put(key, value);
}
}
} else {
if (log.isFinest()) this.log.logFinest("Invalid data in vcf file" +
if (this.log.isFinest()) this.log.logFinest("Invalid data in vcf file" +
"\n\tURL: " + url +
"\n\tLine: " + line +
"\n\tLine: " + line +
"\n\tLine-Nr: " + lineNr);
}
}
@ -216,20 +216,20 @@ public class vcfParser extends AbstractParser implements Parser {
"", // the publisher
sections, // an array of section headlines
"vCard", // an abstract
0.0f, 0.0f,
0.0f, 0.0f,
text, // the parsed document text
anchors, // a map of extracted anchors
null,
null, // a treeset of image URLs
false)};
} catch (final Exception e) {
} catch (final Exception e) {
if (e instanceof InterruptedException) throw (InterruptedException) e;
if (e instanceof Parser.Failure) throw (Parser.Failure) e;
throw new Parser.Failure("Unexpected error while parsing vcf resource. " + e.getMessage(),url);
}
}
}
private String decodeQuotedPrintable(final String s) {
if (s == null) return null;
final byte[] b = UTF8.getBytes(s);
@ -251,5 +251,5 @@ public class vcfParser extends AbstractParser implements Parser {
}
return sb.toString();
}
}

View File

@ -9,7 +9,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -46,7 +46,7 @@ import org.xml.sax.helpers.DefaultHandler;
public class opensearchdescriptionReader extends DefaultHandler {
// statics for item generation and automatic categorization
static int guidcount = 0;
//private static final String recordTag = "OpenSearchDescription";
@ -87,14 +87,14 @@ public class opensearchdescriptionReader extends DefaultHandler {
<SyndicationRight>open</SyndicationRight>
</OpenSearchDescription>
*/
private static final HashSet<String> tags = new HashSet<String>();
static {
for (int i = 0; i < tagsDef.length; i++) {
tags.add(tagsDef[i]);
for (final String element : tagsDef) {
tags.add(element);
}
}
// class variables
private Item channel;
private final StringBuilder buffer;
@ -102,17 +102,17 @@ public class opensearchdescriptionReader extends DefaultHandler {
private final String imageURL;
private final ArrayList<String> itemsGUID; // a list of GUIDs, so the items can be retrieved by a specific order
private final HashMap<String, Item> items; // a guid:Item map
public opensearchdescriptionReader() {
itemsGUID = new ArrayList<String>();
items = new HashMap<String, Item>();
buffer = new StringBuilder();
channel = null;
parsingChannel = false;
imageURL = null;
this.itemsGUID = new ArrayList<String>();
this.items = new HashMap<String, Item>();
this.buffer = new StringBuilder();
this.channel = null;
this.parsingChannel = false;
this.imageURL = null;
}
public opensearchdescriptionReader(final String path) {
this();
try {
@ -123,7 +123,7 @@ public class opensearchdescriptionReader extends DefaultHandler {
Log.logException(e);
}
}
public opensearchdescriptionReader(final InputStream stream) {
this();
try {
@ -134,7 +134,7 @@ public class opensearchdescriptionReader extends DefaultHandler {
Log.logException(e);
}
}
public static opensearchdescriptionReader parse(final byte[] a) {
// check integrity of array
@ -151,14 +151,14 @@ public class opensearchdescriptionReader extends DefaultHandler {
return null;
}
final String end = UTF8.String(a, a.length - 10, 10);
if (end.indexOf("rss") < 0) {
if (end.indexOf("rss",0) < 0) {
Log.logWarning("opensearchdescriptionReader", "response incomplete");
return null;
}
// make input stream
final ByteArrayInputStream bais = new ByteArrayInputStream(a);
// parse stream
opensearchdescriptionReader reader = null;
try {
@ -174,8 +174,8 @@ public class opensearchdescriptionReader extends DefaultHandler {
@Override
public void startElement(final String uri, final String name, final String tag, final Attributes atts) throws SAXException {
if ("channel".equals(tag)) {
channel = new Item();
parsingChannel = true;
this.channel = new Item();
this.parsingChannel = true;
}
}
@ -183,54 +183,54 @@ public class opensearchdescriptionReader extends DefaultHandler {
public void endElement(final String uri, final String name, final String tag) {
if (tag == null) return;
if ("channel".equals(tag)) {
parsingChannel = false;
} else if (parsingChannel) {
final String value = buffer.toString().trim();
buffer.setLength(0);
if (tags.contains(tag)) channel.setValue(tag, value);
this.parsingChannel = false;
} else if (this.parsingChannel) {
final String value = this.buffer.toString().trim();
this.buffer.setLength(0);
if (tags.contains(tag)) this.channel.setValue(tag, value);
}
}
@Override
public void characters(final char ch[], final int start, final int length) {
if (parsingChannel) {
buffer.append(ch, start, length);
if (this.parsingChannel) {
this.buffer.append(ch, start, length);
}
}
public Item getChannel() {
return channel;
return this.channel;
}
public Item getItem(final int i) {
// retrieve item by order number
return getItem(itemsGUID.get(i));
return getItem(this.itemsGUID.get(i));
}
public Item getItem(final String guid) {
// retrieve item by guid
return items.get(guid);
return this.items.get(guid);
}
public int items() {
return items.size();
return this.items.size();
}
public String getImage() {
return this.imageURL;
}
public static class Item {
private final HashMap<String, String> map;
public Item() {
this.map = new HashMap<String, String>();
this.map.put("guid", Long.toHexString(System.currentTimeMillis()) + ":" + guidcount++);
}
public void setValue(final String name, final String value) {
map.put(name, value);
this.map.put(name, value);
}
}
}

View File

@ -10,7 +10,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -60,19 +60,19 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
private ARC<byte[], Map<String, String>> cache;
private final char fillchar;
public MapHeap(
final File heapFile,
final int keylength,
final ByteOrder ordering,
int buffermax,
final int buffermax,
final int cachesize,
char fillchar) throws IOException {
final char fillchar) throws IOException {
this.blob = new Heap(heapFile, keylength, ordering, buffermax);
this.cache = new ConcurrentARC<byte[], Map<String, String>>(cachesize, Math.max(32, 4 * Runtime.getRuntime().availableProcessors()), ordering);
this.fillchar = fillchar;
}
/**
* ask for the length of the primary key
* @return the length of the key
@ -80,7 +80,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
public int keylength() {
return this.blob.keylength();
}
/**
* clears the content of the database
* @throws IOException
@ -88,7 +88,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
public synchronized void clear() {
try {
this.blob.clear();
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
}
this.cache.clear();
@ -97,7 +97,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
private static String map2string(final Map<String, String> map, final String comment) {
final StringBuilder bb = new StringBuilder(map.size() * 40);
bb.append("# ").append(comment).append('\r').append('\n');
for (Map.Entry<String, String> entry: map.entrySet()) {
for (final Map.Entry<String, String> entry: map.entrySet()) {
if (entry.getValue() != null) {
bb.append(entry.getKey());
bb.append('=');
@ -109,7 +109,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
return bb.toString();
}
private static Map<String, String> bytes2map(byte[] b) throws IOException, RowSpaceExceededException {
private static Map<String, String> bytes2map(final byte[] b) throws IOException, RowSpaceExceededException {
final BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(b)));
final Map<String, String> map = new ConcurrentHashMap<String, String>();
String line;
@ -119,17 +119,17 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
line = line.trim();
if (line.equals("# EOF")) return map;
if ((line.length() == 0) || (line.charAt(0) == '#')) continue;
pos = line.indexOf("=");
pos = line.indexOf('=');
if (pos < 0) continue;
map.put(line.substring(0, pos), line.substring(pos + 1));
}
} catch (OutOfMemoryError e) {
} catch (final OutOfMemoryError e) {
throw new RowSpaceExceededException(0, "readLine probably uses too much RAM", e);
}
return map;
}
// use our own formatter to prevent concurrency locks with other processes
private final static GenericFormatter my_SHORT_SECOND_FORMATTER = new GenericFormatter(GenericFormatter.FORMAT_SHORT_SECOND, GenericFormatter.time_second);
@ -138,42 +138,42 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
* @param key the primary key
* @param newMap
* @throws IOException
* @throws RowSpaceExceededException
* @throws RowSpaceExceededException
*/
public void insert(byte[] key, final Map<String, String> newMap) throws IOException, RowSpaceExceededException {
assert key != null;
assert key.length > 0;
assert newMap != null;
key = normalizeKey(key);
String s = map2string(newMap, "W" + my_SHORT_SECOND_FORMATTER.format() + " ");
final String s = map2string(newMap, "W" + my_SHORT_SECOND_FORMATTER.format() + " ");
assert s != null;
byte[] sb = UTF8.getBytes(s);
if (cache == null) {
final byte[] sb = UTF8.getBytes(s);
if (this.cache == null) {
// write entry
if (blob != null) blob.insert(key, sb);
if (this.blob != null) this.blob.insert(key, sb);
} else {
synchronized (this) {
// write entry
if (blob != null) blob.insert(key, sb);
if (this.blob != null) this.blob.insert(key, sb);
// write map to cache
if (MemoryControl.shortStatus()) {
cache.clear();
this.cache.clear();
} else {
cache.insert(key, newMap);
this.cache.insert(key, newMap);
}
}
}
}
public Map<String, String> put(byte[] key, final Map<String, String> newMap) {
public Map<String, String> put(final byte[] key, final Map<String, String> newMap) {
Map<String, String> v = null;
try {
v = this.get(key);
insert(key, newMap);
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
}
return v;
@ -188,39 +188,39 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
// update elementCount
if (key == null) return;
key = normalizeKey(key);
synchronized (this) {
// remove from cache
if (cache != null) cache.remove(key);
if (this.cache != null) this.cache.remove(key);
// remove from file
blob.delete(key);
this.blob.delete(key);
}
}
public Map<String, String> remove(Object key) {
public Map<String, String> remove(final Object key) {
Map<String, String> v = null;
try {
v = this.get(key);
this.delete((byte[]) key);
} catch (IOException e) {
delete((byte[]) key);
} catch (final IOException e) {
Log.logException(e);
}
return v;
}
/**
* check if a specific key is in the database
* @param key the primary key
* @return
* @throws IOException
*/
public boolean containsKey(Object k) {
public boolean containsKey(final Object k) {
if (!(k instanceof byte[])) return false;
assert k != null;
if (cache == null) return false; // case may appear during shutdown
byte[] key = normalizeKey((byte[]) k);
if (this.cache == null) return false; // case may appear during shutdown
final byte[] key = normalizeKey((byte[]) k);
boolean h;
synchronized (this) {
h = this.cache.containsKey(key) || this.blob.containsKey(key);
@ -238,43 +238,43 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
if (key == null) return null;
return get(key, true);
}
public Map<String, String> get(final Object key) {
if (key == null) return null;
try {
if (key instanceof byte[]) return get((byte[]) key);
if (key instanceof String) return get(UTF8.getBytes((String) key));
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
}
return null;
}
private byte[] normalizeKey(byte[] key) {
if (blob == null || key == null) return key;
if (key.length > blob.keylength()) {
byte[] b = new byte[blob.keylength()];
System.arraycopy(key, 0, b, 0, blob.keylength());
private byte[] normalizeKey(final byte[] key) {
if (this.blob == null || key == null) return key;
if (key.length > this.blob.keylength()) {
final byte[] b = new byte[this.blob.keylength()];
System.arraycopy(key, 0, b, 0, this.blob.keylength());
return b;
}
if (key.length < blob.keylength()) {
byte[] b = new byte[blob.keylength()];
if (key.length < this.blob.keylength()) {
final byte[] b = new byte[this.blob.keylength()];
System.arraycopy(key, 0, b, 0, key.length);
for (int i = key.length; i < b.length; i++) b[i] = (byte) fillchar;
for (int i = key.length; i < b.length; i++) b[i] = (byte) this.fillchar;
return b;
}
return key;
}
private byte[] removeFillchar(byte[] key) {
private byte[] removeFillchar(final byte[] key) {
if (key == null) return key;
int p = key.length - 1;
while (p >= 0 && key[p] == fillchar) p--;
while (p >= 0 && key[p] == this.fillchar) p--;
if (p == key.length - 1) return key;
// copy part of key into new byte[]
byte[] k = new byte[p + 1];
final byte[] k = new byte[p + 1];
System.arraycopy(key, 0, k, 0, k.length);
return k;
}
@ -282,51 +282,51 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
protected Map<String, String> get(byte[] key, final boolean storeCache) throws IOException, RowSpaceExceededException {
// load map from cache
assert key != null;
if (cache == null) return null; // case may appear during shutdown
if (this.cache == null) return null; // case may appear during shutdown
key = normalizeKey(key);
if (MemoryControl.shortStatus()) {
cache.clear();
}
this.cache.clear();
}
// if we have the entry in the cache then just return that
Map<String, String> map = cache.get(key);
Map<String, String> map = this.cache.get(key);
if (map != null) return map;
// in all other cases we must look into the cache again within
// a synchronization in case that the entry was not in the cache but stored
// there while another process has taken it from the file system
if (storeCache) {
synchronized (this) {
map = cache.get(key);
map = this.cache.get(key);
if (map != null) return map;
// read object
final byte[] b = blob.get(key);
final byte[] b = this.blob.get(key);
if (b == null) return null;
try {
map = bytes2map(b);
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
throw new IOException(e.getMessage());
}
// write map to cache
cache.insert(key, map);
this.cache.insert(key, map);
// return value
return map;
}
} else {
byte[] b;
synchronized (this) {
map = cache.get(key);
map = this.cache.get(key);
if (map != null) return map;
b = blob.get(key);
b = this.blob.get(key);
}
if (b == null) return null;
try {
return bytes2map(b);
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
throw new IOException(e.getMessage());
}
}
@ -343,7 +343,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
// simple enumeration of key names without special ordering
return new KeyIterator(up, rotating, null, null);
}
/**
* return an iteration of the keys in the map
* the keys in the map are de-normalized which means that the fill-character is removed
@ -363,39 +363,39 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
final boolean up, rotating;
final byte[] firstKey, secondKey;
Iterator<byte[]> iterator;
public KeyIterator(final boolean up, final boolean rotating, final byte[] firstKey, final byte[] secondKey) throws IOException {
this.up = up;
this.rotating = rotating;
this.firstKey = firstKey;
this.secondKey = secondKey;
final CloneableIterator<byte[]> i = blob.keys(up, firstKey);
iterator = (rotating) ? new RotateIterator<byte[]>(i, secondKey, blob.size()) : i;
final CloneableIterator<byte[]> i = MapHeap.this.blob.keys(up, firstKey);
this.iterator = (rotating) ? new RotateIterator<byte[]>(i, secondKey, MapHeap.this.blob.size()) : i;
}
public byte[] next() {
return removeFillchar(iterator.next());
return removeFillchar(this.iterator.next());
}
@Override
public boolean hasNext() {
return iterator.hasNext();
return this.iterator.hasNext();
}
@Override
public void remove() {
iterator.remove();
this.iterator.remove();
}
@Override
public CloneableIterator<byte[]> clone(Object modifier) {
public CloneableIterator<byte[]> clone(final Object modifier) {
try {
return new KeyIterator(this.up, this.rotating, this.firstKey, this.secondKey);
} catch (IOException e) {
} catch (final IOException e) {
return null;
}
}
}
public synchronized MapIterator entries(final boolean up, final boolean rotating) throws IOException {
@ -411,24 +411,24 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
* @return the number of entries in the table
*/
public synchronized int size() {
return (blob == null) ? 0 : blob.size();
return (this.blob == null) ? 0 : this.blob.size();
}
public synchronized boolean isEmpty() {
return (blob == null) ? true : blob.isEmpty();
return (this.blob == null) ? true : this.blob.isEmpty();
}
/**
* close the Map table
*/
public synchronized void close() {
cache = null;
this.cache = null;
// close file
if (blob != null) blob.close(true);
blob = null;
if (this.blob != null) this.blob.close(true);
this.blob = null;
}
@Override
public void finalize() {
close();
@ -447,13 +447,13 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
}
public boolean hasNext() {
return (!(finish)) && (keyIterator.hasNext());
return (!(this.finish)) && (this.keyIterator.hasNext());
}
public Map<String, String> next() {
byte[] nextKey = keyIterator.next();
byte[] nextKey = this.keyIterator.next();
if (nextKey == null) {
finish = true;
this.finish = true;
return null;
}
nextKey = normalizeKey(nextKey); // the key must be normalized because the keyIterator may iterate over not-normalized keys
@ -462,10 +462,10 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
if (obj == null) throw new kelondroException("no more elements available");
return obj;
} catch (final IOException e) {
finish = true;
this.finish = true;
return null;
} catch (final RowSpaceExceededException e) {
finish = true;
this.finish = true;
return null;
}
}
@ -475,24 +475,24 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
}
} // class mapIterator
public void putAll(Map<? extends byte[], ? extends Map<String, String>> map) {
for (Map.Entry<? extends byte[], ? extends Map<String, String>> me: map.entrySet()) {
public void putAll(final Map<? extends byte[], ? extends Map<String, String>> map) {
for (final Map.Entry<? extends byte[], ? extends Map<String, String>> me: map.entrySet()) {
try {
this.insert(me.getKey(), me.getValue());
} catch (RowSpaceExceededException e) {
insert(me.getKey(), me.getValue());
} catch (final RowSpaceExceededException e) {
Log.logException(e);
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
}
}
}
public Set<byte[]> keySet() {
TreeSet<byte[]> set = new TreeSet<byte[]>(this.blob.ordering());
final TreeSet<byte[]> set = new TreeSet<byte[]>(this.blob.ordering());
try {
Iterator<byte[]> i = this.blob.keys(true, false);
final Iterator<byte[]> i = this.blob.keys(true, false);
while (i.hasNext()) set.add(i.next());
} catch (IOException e) {}
} catch (final IOException e) {}
return set;
}
@ -506,33 +506,33 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
throw new UnsupportedOperationException();
}
public boolean containsValue(Object value) {
public boolean containsValue(final Object value) {
// this method shall not be used because it is not appropriate for this kind of data
throw new UnsupportedOperationException();
}
public static void main(String[] args) {
public static void main(final String[] args) {
// test the class
File f = new File("maptest");
final File f = new File("maptest");
if (f.exists()) FileUtils.deletedelete(f);
try {
// make map
MapHeap map = new MapHeap(f, 12, NaturalOrder.naturalOrder, 1024 * 1024, 1024, '_');
final MapHeap map = new MapHeap(f, 12, NaturalOrder.naturalOrder, 1024 * 1024, 1024, '_');
// put some values into the map
Map<String, String> m = new HashMap<String, String>();
final Map<String, String> m = new HashMap<String, String>();
m.put("k", "000"); map.insert("123".getBytes(), m);
m.put("k", "111"); map.insert("456".getBytes(), m);
m.put("k", "222"); map.insert("789".getBytes(), m);
// iterate over keys
Iterator<byte[]> i = map.keys(true, false);
final Iterator<byte[]> i = map.keys(true, false);
while (i.hasNext()) {
System.out.println("key: " + UTF8.String(i.next()));
}
// clean up
map.close();
} catch (IOException e) {
} catch (final IOException e) {
Log.logException(e);
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
}
}

View File

@ -7,7 +7,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -46,7 +46,7 @@ public final class BufferedRecords {
private final Records efs;
private final int maxEntries;
private final TreeMap<Long, byte[]> buffer;
public BufferedRecords(final Records efs, final int maxEntries) {
this.efs = efs;
this.maxEntries = maxEntries;
@ -59,24 +59,24 @@ public final class BufferedRecords {
* @throws IOException
*/
public synchronized void flushBuffer() throws IOException {
this.flushBuffer0();
if (efs != null) efs.flushBuffer();
flushBuffer0();
if (this.efs != null) this.efs.flushBuffer();
}
private final void flushBuffer0() throws IOException {
if (efs == null) return;
for (Map.Entry<Long, byte[]> entry: buffer.entrySet()) {
efs.put(entry.getKey().intValue(), entry.getValue(), 0);
if (this.efs == null) return;
for (final Map.Entry<Long, byte[]> entry: this.buffer.entrySet()) {
this.efs.put(entry.getKey().intValue(), entry.getValue(), 0);
}
buffer.clear();
this.buffer.clear();
}
public final synchronized long size() throws IOException {
return efs == null ? 0 : efs.size();
return this.efs == null ? 0 : this.efs.size();
}
public final File filename() {
return efs.filename();
return this.efs.filename();
}
public final synchronized void close() {
@ -85,69 +85,69 @@ public final class BufferedRecords {
} catch (final IOException e) {
Log.logException(e);
}
if (efs != null) efs.close();
if (this.efs != null) this.efs.close();
}
@Override
protected final synchronized void finalize() {
if (this.efs != null) this.close();
if (this.efs != null) close();
}
public final synchronized void get(final long index, final byte[] b, final int start) throws IOException {
Long idx = Long.valueOf(index);
public final void get(final long index, final byte[] b, final int start) throws IOException {
final Long idx = Long.valueOf(index);
final byte[] bb;
synchronized (this) {
assert b.length - start >= efs.recordsize;
bb = buffer.get(idx);
assert b.length - start >= this.efs.recordsize;
bb = this.buffer.get(idx);
if (bb == null) {
if (index >= size()) throw new IndexOutOfBoundsException("kelondroBufferedEcoFS.get(" + index + ") outside bounds (" + this.size() + ")");
efs.get(index, b, start);
if (index >= size()) throw new IndexOutOfBoundsException("kelondroBufferedEcoFS.get(" + index + ") outside bounds (" + size() + ")");
this.efs.get(index, b, start);
return;
}
}
System.arraycopy(bb, 0, b, start, efs.recordsize);
System.arraycopy(bb, 0, b, start, this.efs.recordsize);
}
public final synchronized void put(final long index, final byte[] b, final int start) throws IOException {
assert b.length - start >= efs.recordsize;
assert b.length - start >= this.efs.recordsize;
final long s = size();
if (index > s) throw new IndexOutOfBoundsException("kelondroBufferedEcoFS.put(" + index + ") outside bounds (" + this.size() + ")");
if (index > s) throw new IndexOutOfBoundsException("kelondroBufferedEcoFS.put(" + index + ") outside bounds (" + size() + ")");
if (index == s) {
efs.add(b, start);
this.efs.add(b, start);
} else {
final byte[] bb = new byte[efs.recordsize];
System.arraycopy(b, start, bb, 0, efs.recordsize);
buffer.put(Long.valueOf(index), bb);
if (buffer.size() > this.maxEntries) flushBuffer0();
final byte[] bb = new byte[this.efs.recordsize];
System.arraycopy(b, start, bb, 0, this.efs.recordsize);
this.buffer.put(Long.valueOf(index), bb);
if (this.buffer.size() > this.maxEntries) flushBuffer0();
}
}
public final synchronized void add(final byte[] b, final int start) throws IOException {
assert b.length - start >= efs.recordsize;
assert b.length - start >= this.efs.recordsize;
// index == size() == efs.size();
efs.add(b, start);
this.efs.add(b, start);
}
public final synchronized void cleanLast(final byte[] b, final int start) throws IOException {
assert b.length - start >= efs.recordsize;
final byte[] bb = buffer.remove(Long.valueOf(size() - 1));
assert b.length - start >= this.efs.recordsize;
final byte[] bb = this.buffer.remove(Long.valueOf(size() - 1));
if (bb == null) {
efs.cleanLast(b, start);
this.efs.cleanLast(b, start);
} else {
System.arraycopy(bb, 0, b, start, efs.recordsize);
efs.cleanLast();
System.arraycopy(bb, 0, b, start, this.efs.recordsize);
this.efs.cleanLast();
}
}
public final synchronized void cleanLast() throws IOException {
buffer.remove(Long.valueOf(size() - 1));
efs.cleanLast();
this.buffer.remove(Long.valueOf(size() - 1));
this.efs.cleanLast();
}
public final void deleteOnExit() {
efs.deleteOnExit();
this.efs.deleteOnExit();
}
/**
* main - writes some data and checks the tables size (with time measureing)
* @param args
@ -192,7 +192,7 @@ public final class BufferedRecords {
}
System.out.println("size() needs " + ((System.currentTimeMillis() - start) / 100) + " nanoseconds");
System.out.println("size = " + c);
t.close();
} catch (final IOException e) {
Log.logException(e);

View File

@ -185,7 +185,7 @@ public class ThreadDump extends HashMap<ThreadDump.StackTrace, List<String>> imp
state = null;
continue;
}
if (line.charAt(0) == '"' && (p = line.indexOf("\" prio=")) > 0) {
if (line.charAt(0) == '"' && (p = line.indexOf("\" prio=",0)) > 0) {
// start a new thread
thread = line.substring(1, p);
continue;
@ -378,7 +378,7 @@ public class ThreadDump extends HashMap<ThreadDump.StackTrace, List<String>> imp
// check if the thread is locked or holds a lock
if (entry.getKey().state != Thread.State.RUNNABLE) continue runf;
for (final String s: entry.getValue()) {
if (s.indexOf("locked <") > 0 || s.indexOf("waiting to lock") > 0) continue runf;
if (s.indexOf("locked <") > 0 || s.indexOf("waiting to lock",0) > 0) continue runf;
}
runner.add(entry);
}
@ -394,7 +394,7 @@ public class ThreadDump extends HashMap<ThreadDump.StackTrace, List<String>> imp
final Map<Lock, StackTrace> locks = new HashMap<Lock, StackTrace>();
for (final Map.Entry<StackTrace, List<String>> entry: entrySet()) {
for (final String s: entry.getValue()) {
if ((p = s.indexOf("locked <")) > 0) {
if ((p = s.indexOf("locked <",0)) > 0) {
locks.put(new Lock(s.substring(p + 8, s.indexOf('>'))), entry.getKey());
}
@ -413,7 +413,7 @@ public class ThreadDump extends HashMap<ThreadDump.StackTrace, List<String>> imp
final List<String> list = get(threadName);
if (list == null) return null;
for (final String s: list) {
if ((p = s.indexOf("<")) > 0 && s.indexOf("locked <") < 0) {
if ((p = s.indexOf('<',0)) > 0 && s.indexOf("locked <",0) < 0) {
return new Lock(s.substring(p + 1, s.indexOf('>')));
}
}

View File

@ -38,10 +38,6 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.ranking.Order;
@ -58,7 +54,6 @@ import net.yacy.kelondro.order.CloneableIterator;
import net.yacy.kelondro.order.MergeIterator;
import net.yacy.kelondro.order.StackIterator;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.NamePrefixThreadFactory;
public class SplitTable implements Index, Iterable<Row.Entry> {
@ -70,7 +65,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
private static final int EcoFSBufferSize = 20;
// the thread pool for the keeperOf executor service
private ExecutorService executor;
//private ExecutorService executor;
private Map<String, Index> tables; // a map from a date string to a kelondroIndex object
private final Row rowdef;
@ -240,12 +235,14 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
assert this.current == null || this.tables.get(this.current) != null : "this.current = " + this.current;
// init the thread pool for the keeperOf executor service
/*
this.executor = new ThreadPoolExecutor(
Math.max(this.tables.size(), Runtime.getRuntime().availableProcessors()) + 1,
Math.max(this.tables.size(), Runtime.getRuntime().availableProcessors()) + 1, 10,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(),
new NamePrefixThreadFactory(this.prefix));
*/
}
public void clear() throws IOException {
@ -400,7 +397,6 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
}
}
private Index keeperOf(final byte[] key) {
if (key == null) return null;
if (this.tables == null) return null;
@ -516,12 +512,14 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
public synchronized void close() {
if (this.tables == null) return;
/*
this.executor.shutdown();
try {
this.executor.awaitTermination(3, TimeUnit.SECONDS);
} catch (final InterruptedException e) {
}
this.executor = null;
*/
final Iterator<Index> i = this.tables.values().iterator();
while (i.hasNext()) {
i.next().close();

View File

@ -533,8 +533,8 @@ public class Table implements Index, Iterable<Row.Entry> {
if (rowb == null) return null;
final byte[] key = row.getPrimaryKeyBytes();
synchronized (this) {
assert this.file.size() == this.index.size() : "file.size() = " + this.file.size() + ", index.size() = " + this.index.size();
assert this.table == null || this.table.size() == this.index.size() : "table.size() = " + this.table.size() + ", index.size() = " + this.index.size();
//assert this.file.size() == this.index.size() : "file.size() = " + this.file.size() + ", index.size() = " + this.index.size();
//assert this.table == null || this.table.size() == this.index.size() : "table.size() = " + this.table.size() + ", index.size() = " + this.index.size();
final int i = (int) this.index.get(key);
if (i == -1) {
try {
@ -590,8 +590,8 @@ public class Table implements Index, Iterable<Row.Entry> {
if (rowb == null) return true;
final byte[] key = row.getPrimaryKeyBytes();
synchronized (this) {
assert this.file == null || this.file.size() == this.index.size() : "file.size() = " + this.file.size() + ", index.size() = " + this.index.size() + ", file = " + filename();
assert this.table == null || this.table.size() == this.index.size() : "table.size() = " + this.table.size() + ", index.size() = " + this.index.size() + ", file = " + filename();
//assert this.file == null || this.file.size() == this.index.size() : "file.size() = " + this.file.size() + ", index.size() = " + this.index.size() + ", file = " + filename();
//assert this.table == null || this.table.size() == this.index.size() : "table.size() = " + this.table.size() + ", index.size() = " + this.index.size() + ", file = " + filename();
final int i = (int) this.index.get(key);
if (i == -1) {
try {

View File

@ -1,4 +1,4 @@
// OS.java
// OS.java
// -------------------------------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
@ -32,10 +32,9 @@ import java.util.List;
import java.util.Properties;
import java.util.Vector;
import de.anomic.server.serverCore;
import net.yacy.cora.document.UTF8;
import net.yacy.kelondro.logging.Log;
import de.anomic.server.serverCore;
public final class OS {
@ -48,7 +47,7 @@ public final class OS {
Windows, // all Windows 95/98/NT/2K/XP
Unknown; // any other system
}
// constants for file type identification (Mac only)
public static final String blankTypeString = "____";
@ -88,7 +87,7 @@ public final class OS {
if (isWindows) maxPathLength = 255; else maxPathLength = 65535;
}
/**
* finds the maximum possible heap (may cause high system load)
* @return heap in -Xmx<i>[heap]</i>m
@ -100,14 +99,14 @@ public final class OS {
while(!checkWin32Heap(maxmem)) maxmem -= 10;
return maxmem;
}
/**
* checks heap (may cause high system load)
* @param mem heap to check in -Xmx<i>[heap]</i>m
* @return true if possible
* @author [DW], 07.02.2009
*/
public static boolean checkWin32Heap(int mem){
public static boolean checkWin32Heap(final int mem){
String line = "";
final List<String> processArgs = new ArrayList<String>();
processArgs.add("java");
@ -118,7 +117,7 @@ public final class OS {
} catch (final IOException e) {
return false;
}
return (line.indexOf("space for object heap") > -1) ? false : true;
return (line.indexOf("space for object heap",0) > -1) ? false : true;
}
public static String infoString() {
@ -167,11 +166,11 @@ public final class OS {
* @return the PID of the current java process or -1 if the PID cannot be obtained
*/
public static int getPID() {
String pids = ManagementFactory.getRuntimeMXBean().getName();
int p = pids.indexOf('@');
final String pids = ManagementFactory.getRuntimeMXBean().getName();
final int p = pids.indexOf('@');
return p >= 0 ? Integer.parseInt(pids.substring(0, p)) : -1;
}
public static void execAsynchronous(final File scriptFile) throws IOException {
// runs a script as separate thread
String starterFileExtension = null;

View File

@ -7,7 +7,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -29,7 +29,6 @@ import java.lang.reflect.Method;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import net.yacy.kelondro.logging.Log;
@ -42,35 +41,35 @@ public class InstantBlockingThread<J extends WorkflowJob> extends AbstractBlocki
private static int handleCounter = 0;
public static int instantThreadCounter = 0;
public static final ConcurrentMap<Long, String> jobs = new ConcurrentHashMap<Long, String>();
public InstantBlockingThread(final Object env, final String jobExec, final WorkflowProcessor<J> manager) {
// jobExec is the name of a method of the object 'env' that executes the one-step-run
// jobCount is the name of a method that returns the size of the job
// set the manager of blocking queues for input and output
this.setManager(manager);
setManager(manager);
// define execution class
this.jobExecMethod = execMethod(env, jobExec);
this.environment = (env instanceof Class<?>) ? null : env;
this.setName(jobExecMethod.getClass().getName() + "." + jobExecMethod.getName() + "." + handleCounter++);
this.handle = Long.valueOf(System.currentTimeMillis() + this.getName().hashCode());
setName(this.jobExecMethod.getClass().getName() + "." + this.jobExecMethod.getName() + "." + handleCounter++);
this.handle = Long.valueOf(System.currentTimeMillis() + getName().hashCode());
}
public InstantBlockingThread(final Object env, final Method jobExecMethod, final WorkflowProcessor<J> manager) {
// jobExec is the name of a method of the object 'env' that executes the one-step-run
// jobCount is the name of a method that returns the size of the job
// set the manager of blocking queues for input and output
this.setManager(manager);
setManager(manager);
// define execution class
this.jobExecMethod = jobExecMethod;
this.environment = (env instanceof Class<?>) ? null : env;
this.setName(jobExecMethod.getClass().getName() + "." + jobExecMethod.getName() + "." + handleCounter++);
this.handle = Long.valueOf(System.currentTimeMillis() + this.getName().hashCode());
setName(jobExecMethod.getClass().getName() + "." + jobExecMethod.getName() + "." + handleCounter++);
this.handle = Long.valueOf(System.currentTimeMillis() + getName().hashCode());
}
protected static Method execMethod(final Object env, final String jobExec) {
final Class<?> theClass = (env instanceof Class<?>) ? (Class<?>) env : env.getClass();
try {
@ -84,11 +83,11 @@ public class InstantBlockingThread<J extends WorkflowJob> extends AbstractBlocki
throw new RuntimeException("serverInstantThread, wrong declaration of jobExec: " + e.getMessage());
}
}
public int getJobCount() {
return this.getManager().queueSize();
return getManager().queueSize();
}
@SuppressWarnings("unchecked")
public J job(final J next) throws Exception {
J out = null;
@ -99,41 +98,41 @@ public class InstantBlockingThread<J extends WorkflowJob> extends AbstractBlocki
} else if (next == WorkflowJob.poisonPill || next.status == WorkflowJob.STATUS_POISON) {
out = next;
} else {
long t = System.currentTimeMillis();
final long t = System.currentTimeMillis();
instantThreadCounter++;
//System.out.println("started job " + this.handle + ": " + this.getName());
jobs.put(this.handle, this.getName());
jobs.put(this.handle, getName());
try {
out = (J) jobExecMethod.invoke(environment, new Object[]{next});
out = (J) this.jobExecMethod.invoke(this.environment, new Object[]{next});
} catch (final IllegalAccessException e) {
Log.logSevere(BLOCKINGTHREAD, "Internal Error in serverInstantThread.job: " + e.getMessage());
Log.logSevere(BLOCKINGTHREAD, "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere(BLOCKINGTHREAD, "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final IllegalArgumentException e) {
Log.logSevere(BLOCKINGTHREAD, "Internal Error in serverInstantThread.job: " + e.getMessage());
Log.logSevere(BLOCKINGTHREAD, "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere(BLOCKINGTHREAD, "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final InvocationTargetException e) {
final String targetException = e.getTargetException().getMessage();
Log.logException(e.getTargetException());
Log.logException(e);
if ((targetException != null) &&
((targetException.indexOf("heap space") > 0) ||
(targetException.indexOf("NullPointerException") > 0))) {
((targetException.indexOf("heap space",0) > 0) ||
(targetException.indexOf("NullPointerException",0) > 0))) {
Log.logException(e.getTargetException());
}
Log.logSevere(BLOCKINGTHREAD, "Runtime Error in serverInstantThread.job, thread '" + this.getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
Log.logSevere(BLOCKINGTHREAD, "Runtime Error in serverInstantThread.job, thread '" + getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
} catch (final OutOfMemoryError e) {
Log.logSevere(BLOCKINGTHREAD, "OutOfMemory Error in serverInstantThread.job, thread '" + this.getName() + "': " + e.getMessage());
Log.logSevere(BLOCKINGTHREAD, "OutOfMemory Error in serverInstantThread.job, thread '" + getName() + "': " + e.getMessage());
Log.logException(e);
}
instantThreadCounter--;
jobs.remove(this.handle);
this.getManager().increaseJobTime(System.currentTimeMillis() - t);
getManager().increaseJobTime(System.currentTimeMillis() - t);
}
return out;
}
}

View File

@ -28,30 +28,29 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.TreeMap;
import net.yacy.kelondro.logging.Log;
public final class InstantBusyThread extends AbstractBusyThread implements BusyThread {
private Method jobExecMethod, jobCountMethod, freememExecMethod;
private final Object environment;
private final Long handle;
public static int instantThreadCounter = 0;
public static final TreeMap<Long, String> jobs = new TreeMap<Long, String>();
public InstantBusyThread(
final Object env,
final String jobExec,
final String jobCount,
final String freemem,
long minIdleSleep,
long maxIdleSleep,
long minBusySleep,
long maxBusySleep) {
final long minIdleSleep,
final long maxIdleSleep,
final long minBusySleep,
final long maxBusySleep) {
super(minIdleSleep, maxIdleSleep, minBusySleep, maxBusySleep);
// jobExec is the name of a method of the object 'env' that executes the one-step-run
// jobCount is the name of a method that returns the size of the job
// freemem is the name of a method that tries to free memory and returns void
@ -66,7 +65,7 @@ public final class InstantBusyThread extends AbstractBusyThread implements BusyT
this.jobCountMethod = null;
else
this.jobCountMethod = theClass.getMethod(jobCount, new Class[0]);
} catch (final NoSuchMethodException e) {
throw new RuntimeException("serverInstantThread, wrong declaration of jobCount: " + e.getMessage());
}
@ -75,19 +74,19 @@ public final class InstantBusyThread extends AbstractBusyThread implements BusyT
this.freememExecMethod = null;
else
this.freememExecMethod = theClass.getMethod(freemem, new Class[0]);
} catch (final NoSuchMethodException e) {
throw new RuntimeException("serverInstantThread, wrong declaration of freemem: " + e.getMessage());
}
this.environment = (env instanceof Class<?>) ? null : env;
this.setName(theClass.getName() + "." + jobExec);
this.handle = Long.valueOf(System.currentTimeMillis() + this.getName().hashCode());
setName(theClass.getName() + "." + jobExec);
this.handle = Long.valueOf(System.currentTimeMillis() + getName().hashCode());
}
public int getJobCount() {
if (this.jobCountMethod == null) return Integer.MAX_VALUE;
try {
final Object result = jobCountMethod.invoke(environment, new Object[0]);
final Object result = this.jobCountMethod.invoke(this.environment, new Object[0]);
if (result instanceof Integer)
return ((Integer) result).intValue();
else
@ -97,69 +96,69 @@ public final class InstantBusyThread extends AbstractBusyThread implements BusyT
} catch (final IllegalArgumentException e) {
return -1;
} catch (final InvocationTargetException e) {
Log.logSevere("BUSYTHREAD", "invocation serverInstantThread of thread '" + this.getName() + "': " + e.getMessage(), e);
Log.logSevere("BUSYTHREAD", "invocation serverInstantThread of thread '" + getName() + "': " + e.getMessage(), e);
return -1;
}
}
public boolean job() throws Exception {
instantThreadCounter++;
//System.out.println("started job " + this.handle + ": " + this.getName());
synchronized(jobs) {jobs.put(this.handle, this.getName());}
synchronized(jobs) {jobs.put(this.handle, getName());}
boolean jobHasDoneSomething = false;
try {
final Object result = jobExecMethod.invoke(environment, new Object[0]);
final Object result = this.jobExecMethod.invoke(this.environment, new Object[0]);
if (result == null) jobHasDoneSomething = true;
else if (result instanceof Boolean) jobHasDoneSomething = ((Boolean) result).booleanValue();
} catch (final IllegalAccessException e) {
Log.logSevere("BUSYTHREAD", "Internal Error in serverInstantThread.job: " + e.getMessage());
Log.logSevere("BUSYTHREAD", "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere("BUSYTHREAD", "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final IllegalArgumentException e) {
Log.logSevere("BUSYTHREAD", "Internal Error in serverInstantThread.job: " + e.getMessage());
Log.logSevere("BUSYTHREAD", "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere("BUSYTHREAD", "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final InvocationTargetException e) {
final String targetException = e.getTargetException().getMessage();
Log.logException(e);
Log.logException(e.getTargetException());
Log.logSevere("BUSYTHREAD", "Runtime Error in serverInstantThread.job, thread '" + this.getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
Log.logSevere("BUSYTHREAD", "Runtime Error in serverInstantThread.job, thread '" + getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
} catch (final OutOfMemoryError e) {
Log.logSevere("BUSYTHREAD", "OutOfMemory Error in serverInstantThread.job, thread '" + this.getName() + "': " + e.getMessage());
Log.logSevere("BUSYTHREAD", "OutOfMemory Error in serverInstantThread.job, thread '" + getName() + "': " + e.getMessage());
Log.logException(e);
freemem();
} catch (final Exception e) {
Log.logSevere("BUSYTHREAD", "Generic Exception, thread '" + this.getName() + "': " + e.getMessage());
Log.logSevere("BUSYTHREAD", "Generic Exception, thread '" + getName() + "': " + e.getMessage());
Log.logException(e);
}
instantThreadCounter--;
synchronized(jobs) {jobs.remove(this.handle);}
return jobHasDoneSomething;
}
public void freemem() {
if (freememExecMethod == null) return;
if (this.freememExecMethod == null) return;
try {
freememExecMethod.invoke(environment, new Object[0]);
this.freememExecMethod.invoke(this.environment, new Object[0]);
} catch (final IllegalAccessException e) {
Log.logSevere("BUSYTHREAD", "Internal Error in serverInstantThread.freemem: " + e.getMessage());
Log.logSevere("BUSYTHREAD", "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere("BUSYTHREAD", "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final IllegalArgumentException e) {
Log.logSevere("BUSYTHREAD", "Internal Error in serverInstantThread.freemem: " + e.getMessage());
Log.logSevere("BUSYTHREAD", "shutting down thread '" + this.getName() + "'");
this.terminate(false);
Log.logSevere("BUSYTHREAD", "shutting down thread '" + getName() + "'");
terminate(false);
} catch (final InvocationTargetException e) {
final String targetException = e.getTargetException().getMessage();
if (targetException.indexOf("heap space") > 0) Log.logException(e.getTargetException());
Log.logSevere("BUSYTHREAD", "Runtime Error in serverInstantThread.freemem, thread '" + this.getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
if (targetException.indexOf("heap space",0) > 0) Log.logException(e.getTargetException());
Log.logSevere("BUSYTHREAD", "Runtime Error in serverInstantThread.freemem, thread '" + getName() + "': " + e.getMessage() + "; target exception: " + targetException, e.getTargetException());
Log.logException(e.getTargetException());
} catch (final OutOfMemoryError e) {
Log.logSevere("BUSYTHREAD", "OutOfMemory Error in serverInstantThread.freemem, thread '" + this.getName() + "': " + e.getMessage());
Log.logSevere("BUSYTHREAD", "OutOfMemory Error in serverInstantThread.freemem, thread '" + getName() + "': " + e.getMessage());
Log.logException(e);
}
}
public static BusyThread oneTimeJob(final Object env, final String jobExec, final Log log, final long startupDelay) {
// start the job and execute it once as background process
final BusyThread thread = new InstantBusyThread(
@ -171,16 +170,16 @@ public final class InstantBusyThread extends AbstractBusyThread implements BusyT
thread.start();
return thread;
}
public static WorkflowThread oneTimeJob(final Runnable thread, final long startupDelay) {
final Log log = new Log(thread.getClass().getName() + "/run");
log.setLevel(java.util.logging.Level.INFO);
return oneTimeJob(thread, "run", log, startupDelay);
}
public static WorkflowThread oneTimeJob(final Runnable thread, final long startupDelay, final int maxJobs) {
while (instantThreadCounter >= maxJobs) try {Thread.sleep(100);} catch (final InterruptedException e) {break;}
return oneTimeJob( thread, startupDelay);
}
}

View File

@ -647,9 +647,9 @@ public class Network {
logt = sb.peers.uploadSeedList(uploader, sb, sb.peers, seedURL);
if (logt != null) {
if (logt.indexOf("Error") >= 0) {
if (logt.indexOf("Error",0) >= 0) {
sb.peers.mySeed().put(Seed.PEERTYPE, prevStatus);
final String errorMsg = "SaveSeedList: seed upload failed using " + uploader.getClass().getName() + " (error): " + logt.substring(logt.indexOf("Error") + 6);
final String errorMsg = "SaveSeedList: seed upload failed using " + uploader.getClass().getName() + " (error): " + logt.substring(logt.indexOf("Error",0) + 6);
log.logSevere(errorMsg);
return errorMsg;
}

View File

@ -244,11 +244,9 @@ public class Seed implements Cloneable, Comparable<Seed>, Comparator<Seed> {
* @param id
* @return a checked name without "<" and ">"
*/
final static Pattern ltp = Pattern.compile("<");
final static Pattern gtp = Pattern.compile(">");
private final static Pattern tp = Pattern.compile("<|>");
private static String checkPeerName(String name) {
name = ltp.matcher(name).replaceAll("_");
name = gtp.matcher(name).replaceAll("_");
name = tp.matcher(name).replaceAll("_");
return name;
}

View File

@ -9,7 +9,7 @@
// $LastChangedBy$
//
// LICENSE
//
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
@ -34,8 +34,8 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
@ -74,11 +74,11 @@ public class Blacklist {
}
public int getInt() {
return errorCode;
return this.errorCode;
}
public long getLong() {
return (long) errorCode;
return this.errorCode;
}
}
protected static final Set<String> BLACKLIST_TYPES = new HashSet<String>(Arrays.asList(new String[]{
@ -97,7 +97,7 @@ public class Blacklist {
public Blacklist(final File rootPath) {
this.setRootPath(rootPath);
setRootPath(rootPath);
// prepare the data structure
this.hostpaths_matchable = new HashMap<String, Map<String, List<String>>>();
@ -241,7 +241,7 @@ public class Blacklist {
blacklistMap.remove(host);
}
}
final Map<String, List<String>> blacklistMapNotMatch = getBlacklistMap(blacklistType, false);
hostList = blacklistMapNotMatch.get(host);
if (hostList != null) {
@ -300,7 +300,7 @@ public class Blacklist {
final String h =
((!isMatchable(host) && host.length() > 0 && host.charAt(0) == '*') ? "." + host : host).toLowerCase();
List<String> hostList = blacklistMap.get(h);
final List<String> hostList = blacklistMap.get(h);
if (hostList != null) {
ret = hostList.contains(path);
}
@ -322,7 +322,7 @@ public class Blacklist {
if (temp) {
try {
urlHashCache.put(url.hash());
} catch (RowSpaceExceededException e) {
} catch (final RowSpaceExceededException e) {
Log.logException(e);
}
}
@ -365,7 +365,7 @@ public class Blacklist {
if (!matched && (app = blacklistMapMatched.get(hostlow)) != null) {
for (int i = app.size() - 1; !matched && i > -1; i--) {
pp = app.get(i);
if (pp.indexOf("?*") > 0) {
if (pp.indexOf("?*",0) > 0) {
// prevent "Dangling meta character '*'" exception
Log.logWarning("Blacklist", "ignored blacklist path to prevent 'Dangling meta character' exception: " + pp);
continue;

View File

@ -5,9 +5,9 @@ import java.io.IOException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map.Entry;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
@ -17,15 +17,15 @@ import net.yacy.kelondro.logging.Log;
/**
* a URL filter engine for black and white lists
*
*
* @TODO precompile regular expressions
*
*/
public class FilterEngine {
/** size of URL cache */
protected static final int CACHE_SIZE = 100;
public static final int ERR_TWO_WILDCARDS_IN_HOST = 1;
public static final int ERR_SUBDOMAIN_XOR_WILDCARD = 2;
public static final int ERR_PATH_REGEX = 3;
@ -40,29 +40,29 @@ public class FilterEngine {
public String path;
public EnumSet<listTypes> types;
public FilterEntry(String path, EnumSet<listTypes>types) {
public FilterEntry(final String path, final EnumSet<listTypes>types) {
this.path = path;
this.types = types;
}
@Override
public int compareTo(FilterEntry fe) {
public int compareTo(final FilterEntry fe) {
return this.path.compareToIgnoreCase(fe.path);
}
}
protected HashARC<DigestURI, EnumSet<listTypes>> cachedUrlHashs = null;
protected Map<String, Set<FilterEntry>> hostpaths_matchable = null;
protected Map<String, Set<FilterEntry>> hostpaths_notmatchable = null;
public FilterEngine() {
// prepare the data structure
this.hostpaths_matchable = new HashMap<String, Set<FilterEntry>>();
this.hostpaths_notmatchable = new HashMap<String, Set<FilterEntry>>();
this.cachedUrlHashs = new HashARC<DigestURI, EnumSet<listTypes>>(CACHE_SIZE);
}
public void clear() {
this.cachedUrlHashs.clear();
this.hostpaths_matchable.clear();
@ -72,64 +72,64 @@ public class FilterEngine {
public int size() {
return this.hostpaths_matchable.size() + this.hostpaths_notmatchable.size();
}
public void add(String entry, EnumSet<listTypes> types) {
public void add(final String entry, final EnumSet<listTypes> types) {
assert entry != null;
int pos; // position between domain and path
if((pos = entry.indexOf('/')) > 0) {
String host = entry.substring(0, pos).trim().toLowerCase();
String path = entry.substring(pos + 1).trim();
final String path = entry.substring(pos + 1).trim();
// avoid PatternSyntaxException e
if (!isMatchable(host) && host.length() > 0 && host.charAt(0) == '*')
host = "." + host;
if(isMatchable(host)) {
if (!hostpaths_matchable.containsKey(host))
hostpaths_matchable.put(host, new TreeSet<FilterEntry>());
hostpaths_matchable.get(host).add(new FilterEntry(path, types));
if (!this.hostpaths_matchable.containsKey(host))
this.hostpaths_matchable.put(host, new TreeSet<FilterEntry>());
this.hostpaths_matchable.get(host).add(new FilterEntry(path, types));
// TODO: update type, if there is an element
} else {
if (!hostpaths_notmatchable.containsKey(host))
hostpaths_notmatchable.put(host, new TreeSet<FilterEntry>());
hostpaths_notmatchable.get(host).add(new FilterEntry(path, types));
if (!this.hostpaths_notmatchable.containsKey(host))
this.hostpaths_notmatchable.put(host, new TreeSet<FilterEntry>());
this.hostpaths_notmatchable.get(host).add(new FilterEntry(path, types));
}
}
}
public void loadList(final BufferedReader in, EnumSet<listTypes> types) throws IOException {
public void loadList(final BufferedReader in, final EnumSet<listTypes> types) throws IOException {
String line;
while((line = in.readLine()) != null) {
line = line.trim();
if (line.length() > 0 && line.charAt(0) != '#')
this.add(line, types);
add(line, types);
}
}
public void removeAll(final String host) {
assert host != null;
this.hostpaths_matchable.remove(host);
this.hostpaths_notmatchable.remove(host);
}
public void remove(final String listType, final String host, final String path) {
}
public boolean isListed(final DigestURI url, final EnumSet<listTypes> type) {
// trival anwser
if (url.getHost() == null)
return false;
if(cachedUrlHashs.containsKey(url)) {
if(this.cachedUrlHashs.containsKey(url)) {
// Cache Hit
EnumSet<listTypes> e = cachedUrlHashs.get(url);
final EnumSet<listTypes> e = this.cachedUrlHashs.get(url);
return e.containsAll(type);
} else {
// Cache Miss
return isListed(url.getHost().toLowerCase(), url.getFile(), type);
}
}
public static boolean isMatchable (final String host) {
try {
if(Pattern.matches("^[a-z0-9.-]*$", host)) // simple Domain (yacy.net or www.yacy.net)
@ -145,7 +145,7 @@ public class FilterEngine {
return false;
}
public boolean isListed(final String host, String path, EnumSet<listTypes> type) {
public boolean isListed(final String host, String path, final EnumSet<listTypes> type) {
if (host == null) throw new NullPointerException();
if (path == null) throw new NullPointerException();
@ -153,9 +153,9 @@ public class FilterEngine {
Set<FilterEntry> app;
// try to match complete domain
if ((app = hostpaths_matchable.get(host)) != null) {
for(FilterEntry e: app) {
if (e.path.indexOf("?*") > 0) {
if ((app = this.hostpaths_matchable.get(host)) != null) {
for(final FilterEntry e: app) {
if (e.path.indexOf("?*",0) > 0) {
// prevent "Dangling meta character '*'" exception
Log.logWarning("FilterEngine", "ignored blacklist path to prevent 'Dangling meta character' exception: " + e);
continue;
@ -168,14 +168,14 @@ public class FilterEngine {
// [TL] While "." are found within the string
int index = 0;
while ((index = host.indexOf('.', index + 1)) != -1) {
if ((app = hostpaths_matchable.get(host.substring(0, index + 1) + "*")) != null) {
for(FilterEntry e: app) {
if ((app = this.hostpaths_matchable.get(host.substring(0, index + 1) + "*")) != null) {
for(final FilterEntry e: app) {
if((e.path.equals("*")) || (path.matches(e.path)))
return true;
}
}
if ((app = hostpaths_matchable.get(host.substring(0, index))) != null) {
for(FilterEntry e: app) {
if ((app = this.hostpaths_matchable.get(host.substring(0, index))) != null) {
for(final FilterEntry e: app) {
if((e.path.equals("*")) || (path.matches(e.path)))
return true;
}
@ -183,15 +183,15 @@ public class FilterEngine {
}
index = host.length();
while ((index = host.lastIndexOf('.', index - 1)) != -1) {
if ((app = hostpaths_matchable.get("*" + host.substring(index, host.length()))) != null) {
for(FilterEntry e: app) {
if ((app = this.hostpaths_matchable.get("*" + host.substring(index, host.length()))) != null) {
for(final FilterEntry e: app) {
if((e.path.equals("*")) || (path.matches(e.path)))
return true;
}
}
if ((app = hostpaths_matchable.get(host.substring(index +1, host.length()))) != null) {
for(FilterEntry e: app) {
if ((app = this.hostpaths_matchable.get(host.substring(index +1, host.length()))) != null) {
for(final FilterEntry e: app) {
if((e.path.equals("*")) || (path.matches(e.path)))
return true;
}
@ -200,12 +200,12 @@ public class FilterEngine {
// loop over all Regexentrys
for(final Entry<String, Set<FilterEntry>> entry: hostpaths_notmatchable.entrySet()) {
for(final Entry<String, Set<FilterEntry>> entry: this.hostpaths_notmatchable.entrySet()) {
try {
if(Pattern.matches(entry.getKey(), host)) {
app = entry.getValue();
for(FilterEntry e: app) {
if(Pattern.matches(e.path, path))
for(final FilterEntry e: app) {
if(Pattern.matches(e.path, path))
return true;
}
}
@ -216,7 +216,7 @@ public class FilterEngine {
return false;
}
public int checkError(String element, Map<String, String> properties) {
public int checkError(final String element, final Map<String, String> properties) {
final boolean allowRegex = (properties != null) && properties.get("allowRegex").equalsIgnoreCase("true");
int slashPos;

View File

@ -2539,7 +2539,7 @@ public final class Switchboard extends serverSwitch {
@Override
public void run() {
String r = host;
if (r.indexOf("//") < 0) {
if (r.indexOf("//",0) < 0) {
r = "http://" + r;
}
@ -2577,7 +2577,7 @@ public final class Switchboard extends serverSwitch {
@Override
public void run() {
String query = searchEvent.getQuery().queryString(true);
final int meta = query.indexOf("heuristic:");
final int meta = query.indexOf("heuristic:",0);
if (meta >= 0) {
final int q = query.indexOf(' ', meta);
query = (q >= 0) ? query.substring(0, meta) + query.substring(q + 1) : query.substring(0, meta);
@ -2600,7 +2600,7 @@ public final class Switchboard extends serverSwitch {
}
final Iterator<MultiProtocolURI> i = links.keySet().iterator();
while (i.hasNext()) {
if (i.next().toNormalform(false, false).indexOf("scroogle") >= 0) {
if (i.next().toNormalform(false, false).indexOf("scroogle",0) >= 0) {
i.remove();
}
}
@ -2619,7 +2619,7 @@ public final class Switchboard extends serverSwitch {
@Override
public void run() {
String query = searchEvent.getQuery().queryString(true);
final int meta = query.indexOf("heuristic:");
final int meta = query.indexOf("heuristic:",0);
if (meta >= 0) {
final int q = query.indexOf(' ', meta);
if (q >= 0) query = query.substring(0, meta) + query.substring(q + 1); else query = query.substring(0, meta);

View File

@ -109,7 +109,7 @@ public class DocumentIndex extends Segment {
}
}
} catch (final IOException e) {
if (e.getMessage().indexOf("cannot parse") < 0) Log.logException(e);
if (e.getMessage().indexOf("cannot parse",0) < 0) Log.logException(e);
DocumentIndex.this.callback.fail(f, e.getMessage());
}
} catch (final InterruptedException e) {}

View File

@ -323,7 +323,7 @@ public final class MetadataRepository implements Iterable<byte[]> {
oldUrlStr = entry.getColUTF8(1).trim();
int pos = -1;
if ((pos = oldUrlStr.indexOf("://")) != -1) {
if ((pos = oldUrlStr.indexOf("://",0)) != -1) {
// trying to correct the url
final String newUrlStr = "http://" + oldUrlStr.substring(pos + 3);
final DigestURI newUrl = new DigestURI(newUrlStr);
@ -422,7 +422,7 @@ public final class MetadataRepository implements Iterable<byte[]> {
}
}
} catch (final RuntimeException e) {
if (e.getMessage() != null && e.getMessage().indexOf("not found in LURL") != -1) {
if (e.getMessage() != null && e.getMessage().indexOf("not found in LURL",0) != -1) {
Log.logWarning("URLDBCLEANER", "urlHash not found in LURL", e);
}
else {

View File

@ -192,7 +192,7 @@ public final class RWIProcess extends Thread {
// iterate over normalized entries and select some that are better than currently stored
timer = System.currentTimeMillis();
final boolean nav_hosts = this.query.navigators.equals("all") || this.query.navigators.indexOf("hosts") >= 0;
final boolean nav_hosts = this.query.navigators.equals("all") || this.query.navigators.indexOf("hosts",0) >= 0;
// apply all constraints
try {
@ -616,14 +616,14 @@ public final class RWIProcess extends Thread {
}
public ScoreMap<String> getNamespaceNavigator() {
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("namespace") < 0) return new ClusteredScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("namespace",0) < 0) return new ClusteredScoreMap<String>();
if (this.namespaceNavigator.sizeSmaller(2)) this.namespaceNavigator.clear(); // navigators with one entry are not useful
return this.namespaceNavigator;
}
public ScoreMap<String> getHostNavigator() {
final ScoreMap<String> result = new ConcurrentScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("hosts") < 0) return result;
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("hosts",0) < 0) return result;
final Iterator<String> domhashs = this.hostNavigator.keys(false);
URIMetadataRow row;
@ -644,13 +644,13 @@ public final class RWIProcess extends Thread {
}
public ScoreMap<String> getProtocolNavigator() {
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("protocol") < 0) return new ClusteredScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("protocol",0) < 0) return new ClusteredScoreMap<String>();
if (this.protocolNavigator.sizeSmaller(2)) this.protocolNavigator.clear(); // navigators with one entry are not useful
return this.protocolNavigator;
}
public ScoreMap<String> getFiletypeNavigator() {
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("filetype") < 0) return new ClusteredScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("filetype",0) < 0) return new ClusteredScoreMap<String>();
if (this.filetypeNavigator.sizeSmaller(2)) this.filetypeNavigator.clear(); // navigators with one entry are not useful
return this.filetypeNavigator;
}
@ -667,7 +667,7 @@ public final class RWIProcess extends Thread {
// create a list of words that had been computed by statistics over all
// words that appeared in the url or the description of all urls
final ScoreMap<String> result = new ConcurrentScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("topics") < 0) return result;
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("topics",0) < 0) return result;
if (this.ref.sizeSmaller(2)) this.ref.clear(); // navigators with one entry are not useful
final Map<String, Float> counts = new HashMap<String, Float>();
final Iterator<String> i = this.ref.keys(false);
@ -725,7 +725,7 @@ public final class RWIProcess extends Thread {
public ScoreMap<String> getAuthorNavigator() {
// create a list of words that had been computed by statistics over all
// words that appeared in the url or the description of all urls
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("authors") < 0) return new ConcurrentScoreMap<String>();
if (!this.query.navigators.equals("all") && this.query.navigators.indexOf("authors",0) < 0) return new ConcurrentScoreMap<String>();
if (this.authorNavigator.sizeSmaller(2)) this.authorNavigator.clear(); // navigators with one entry are not useful
return this.authorNavigator;
}

View File

@ -371,7 +371,7 @@ public class SnippetProcess {
URIMetadataRow page;
ResultEntry resultEntry;
//final int fetchAhead = snippetMode == 0 ? 0 : 10;
final boolean nav_topics = SnippetProcess.this.query.navigators.equals("all") || SnippetProcess.this.query.navigators.indexOf("topics") >= 0;
final boolean nav_topics = SnippetProcess.this.query.navigators.equals("all") || SnippetProcess.this.query.navigators.indexOf("topics",0) >= 0;
try {
//System.out.println("DEPLOYED WORKER " + id + " FOR " + this.neededResults + " RESULTS, timeoutd = " + (this.timeout - System.currentTimeMillis()));
int loops = 0;

View File

@ -70,7 +70,7 @@ public class MediaSnippet implements Comparable<MediaSnippet>, Comparator<MediaS
this.width = -1;
this.height = -1;
int p = 0;
if (attr != null && (p = attr.indexOf(" x ")) > 0) {
if (attr != null && (p = attr.indexOf(" x ",0)) > 0) {
this.width = Integer.parseInt(attr.substring(0, p).trim());
this.height = Integer.parseInt(attr.substring(p + 3).trim());
}
@ -189,7 +189,7 @@ public class MediaSnippet implements Comparable<MediaSnippet>, Comparator<MediaS
ientry = i.next();
url = new DigestURI(ientry.url());
final String u = url.toString();
if (u.indexOf(".ico") >= 0 || u.indexOf("favicon") >= 0) continue;
if (u.indexOf(".ico",0) >= 0 || u.indexOf("favicon",0) >= 0) continue;
if (ientry.height() > 0 && ientry.height() < 32) continue;
if (ientry.width() > 0 && ientry.width() < 32) continue;
desc = ientry.alt();

View File

@ -63,7 +63,7 @@ public class GraphPlotter {
}
public coordinate[] getBorder(final String name) {
final int p = name.indexOf("$");
final int p = name.indexOf('$',0);
if (p < 0) return null;
final coordinate from = getPoint(name.substring(0, p));
final coordinate to = getPoint(name.substring(p + 1));

View File

@ -163,10 +163,10 @@ public class TarEntry implements TarConstants {
*
* @param name the entry name
*/
public TarEntry(String name) {
public TarEntry(final String name) {
this();
boolean isDir = name.endsWith("/");
final boolean isDir = name.endsWith("/");
this.devMajor = 0;
this.devMinor = 0;
@ -191,7 +191,7 @@ public class TarEntry implements TarConstants {
* @param name the entry name
* @param linkFlag the entry link flag.
*/
public TarEntry(String name, byte linkFlag) {
public TarEntry(final String name, final byte linkFlag) {
this(name);
this.linkFlag = linkFlag;
}
@ -202,13 +202,13 @@ public class TarEntry implements TarConstants {
*
* @param file The file that the entry represents.
*/
public TarEntry(File file) {
public TarEntry(final File file) {
this();
this.file = file;
String fileName = file.getPath();
String osname = System.getProperty("os.name").toLowerCase(Locale.US);
final String osname = System.getProperty("os.name").toLowerCase(Locale.US);
if (osname != null) {
@ -217,8 +217,8 @@ public class TarEntry implements TarConstants {
if (osname.startsWith("windows")) {
if (fileName.length() > 2) {
char ch1 = fileName.charAt(0);
char ch2 = fileName.charAt(1);
final char ch1 = fileName.charAt(0);
final char ch2 = fileName.charAt(1);
if (ch2 == ':'
&& ((ch1 >= 'a' && ch1 <= 'z')
@ -226,8 +226,8 @@ public class TarEntry implements TarConstants {
fileName = fileName.substring(2);
}
}
} else if (osname.indexOf("netware") > -1) {
int colon = fileName.indexOf(':');
} else if (osname.indexOf("netware",0) > -1) {
final int colon = fileName.indexOf(':');
if (colon != -1) {
fileName = fileName.substring(colon + 1);
}
@ -270,11 +270,11 @@ public class TarEntry implements TarConstants {
*
* @param headerBuf The header bytes from a tar archive entry.
*/
public TarEntry(byte[] headerBuf) {
public TarEntry(final byte[] headerBuf) {
this();
this.parseTarHeader(headerBuf);
parseTarHeader(headerBuf);
}
/**
* Determine if the two entries are equal. Equality is determined
* by the header names being equal.
@ -287,8 +287,8 @@ public class TarEntry implements TarConstants {
if (this == obj) return true;
if (obj == null) return false;
if (!(obj instanceof TarEntry)) return false;
TarEntry other = (TarEntry) obj;
return this.getName().equals(other.getName());
final TarEntry other = (TarEntry) obj;
return getName().equals(other.getName());
}
/**
@ -309,8 +309,8 @@ public class TarEntry implements TarConstants {
* @param desc Entry to be checked as a descendent of this.
* @return True if entry is a descendant of this.
*/
public boolean isDescendent(TarEntry desc) {
return desc.getName().startsWith(this.getName());
public boolean isDescendent(final TarEntry desc) {
return desc.getName().startsWith(getName());
}
/**
@ -327,7 +327,7 @@ public class TarEntry implements TarConstants {
*
* @param name This entry's new name.
*/
public void setName(String name) {
public void setName(final String name) {
this.name = new StringBuilder(name);
}
@ -336,7 +336,7 @@ public class TarEntry implements TarConstants {
*
* @param mode the mode for this entry
*/
public void setMode(int mode) {
public void setMode(final int mode) {
this.mode = mode;
}
@ -363,7 +363,7 @@ public class TarEntry implements TarConstants {
*
* @param userId This entry's new user id.
*/
public void setUserId(int userId) {
public void setUserId(final int userId) {
this.userId = userId;
}
@ -381,7 +381,7 @@ public class TarEntry implements TarConstants {
*
* @param groupId This entry's new group id.
*/
public void setGroupId(int groupId) {
public void setGroupId(final int groupId) {
this.groupId = groupId;
}
@ -399,7 +399,7 @@ public class TarEntry implements TarConstants {
*
* @param userName This entry's new user name.
*/
public void setUserName(String userName) {
public void setUserName(final String userName) {
this.userName = new StringBuilder(userName);
}
@ -417,7 +417,7 @@ public class TarEntry implements TarConstants {
*
* @param groupName This entry's new group name.
*/
public void setGroupName(String groupName) {
public void setGroupName(final String groupName) {
this.groupName = new StringBuilder(groupName);
}
@ -427,9 +427,9 @@ public class TarEntry implements TarConstants {
* @param userId This entry's new user id.
* @param groupId This entry's new group id.
*/
public void setIds(int userId, int groupId) {
this.setUserId(userId);
this.setGroupId(groupId);
public void setIds(final int userId, final int groupId) {
setUserId(userId);
setGroupId(groupId);
}
/**
@ -438,9 +438,9 @@ public class TarEntry implements TarConstants {
* @param userName This entry's new user name.
* @param groupName This entry's new group name.
*/
public void setNames(String userName, String groupName) {
this.setUserName(userName);
this.setGroupName(groupName);
public void setNames(final String userName, final String groupName) {
setUserName(userName);
setGroupName(groupName);
}
/**
@ -449,7 +449,7 @@ public class TarEntry implements TarConstants {
*
* @param time This entry's new modification time.
*/
public void setModTime(long time) {
public void setModTime(final long time) {
this.modTime = time / MILLIS_PER_SECOND;
}
@ -458,7 +458,7 @@ public class TarEntry implements TarConstants {
*
* @param time This entry's new modification time.
*/
public void setModTime(Date time) {
public void setModTime(final Date time) {
this.modTime = time.getTime() / MILLIS_PER_SECOND;
}
@ -503,7 +503,7 @@ public class TarEntry implements TarConstants {
*
* @param size This entry's new file size.
*/
public void setSize(long size) {
public void setSize(final long size) {
this.size = size;
}
@ -514,8 +514,8 @@ public class TarEntry implements TarConstants {
* @return true if this is a long name extension provided by GNU tar
*/
public boolean isGNULongNameEntry() {
return linkFlag == LF_GNUTYPE_LONGNAME
&& name.toString().equals(GNU_LONGLINK);
return this.linkFlag == LF_GNUTYPE_LONGNAME
&& this.name.toString().equals(GNU_LONGLINK);
}
/**
@ -532,7 +532,7 @@ public class TarEntry implements TarConstants {
return true;
}
if (this.getName().endsWith("/")) {
if (getName().endsWith("/")) {
return true;
}
@ -550,8 +550,8 @@ public class TarEntry implements TarConstants {
return new TarEntry[0];
}
String[] list = this.file.list();
TarEntry[] result = new TarEntry[list.length];
final String[] list = this.file.list();
final TarEntry[] result = new TarEntry[list.length];
for (int i = 0; i < list.length; ++i) {
result[i] = new TarEntry(new File(this.file, list[i]));
@ -565,7 +565,7 @@ public class TarEntry implements TarConstants {
*
* @param outbuf The tar entry header buffer to fill in.
*/
public void writeEntryHeader(byte[] outbuf) {
public void writeEntryHeader(final byte[] outbuf) {
int offset = 0;
offset = TarUtils.getNameBytes(this.name, outbuf, offset, NAMELEN);
@ -575,7 +575,7 @@ public class TarEntry implements TarConstants {
offset = TarUtils.getLongOctalBytes(this.size, outbuf, offset, SIZELEN);
offset = TarUtils.getLongOctalBytes(this.modTime, outbuf, offset, MODTIMELEN);
int csOffset = offset;
final int csOffset = offset;
for (int c = 0; c < CHKSUMLEN; ++c) {
outbuf[offset++] = (byte) ' ';
@ -593,7 +593,7 @@ public class TarEntry implements TarConstants {
outbuf[offset++] = 0;
}
long chk = TarUtils.computeCheckSum(outbuf);
final long chk = TarUtils.computeCheckSum(outbuf);
TarUtils.getCheckSumOctalBytes(chk, outbuf, csOffset, CHKSUMLEN);
}
@ -603,7 +603,7 @@ public class TarEntry implements TarConstants {
*
* @param header The tar entry header buffer to get information from.
*/
public void parseTarHeader(byte[] header) {
public void parseTarHeader(final byte[] header) {
int offset = 0;
this.name = TarUtils.parseName(header, offset, NAMELEN);