git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@482 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
rramthun 2005-08-02 16:09:19 +00:00
parent 40da910f41
commit eacff63eda
9 changed files with 17 additions and 17 deletions

View File

@ -359,7 +359,7 @@ patterns and rapid bootstraping. May help in future releases to speed up indexin
<li>Introduced global general time measurement for peer synchronization and balanced hello - round-robin.</li>
</ul>
<li>Enhanced proxy-proxy - mode: 'no-proxy' settings as list of patterns to exceptional dis-allow usage of remote proxies.</li>
<li>Implemented multiple default-paths for urls pointing to directories.</li>
<li>Implemented multiple default-paths for URLs pointing to directories.</li>
<li>Re-design of front-end menu structure.</li>
<li>Integrated Interface for principal configuration in Settings page</li>
<li>Re-named the release: integrated YACY name to emphasize that the proxy is mutating into the YACY Search Engine Node</li>

View File

@ -165,7 +165,7 @@ public class IndexControl_p {
plasmaCrawlLURL.Entry entry = switchboard.urlPool.loadedURL.getEntry(urlhash);
URL url = entry.url();
if (url == null) {
prop.put("result", "No Entry for url hash " + urlhash + "; nothing deleted.");
prop.put("result", "No Entry for URL hash " + urlhash + "; nothing deleted.");
} else {
urlstring = htmlFilterContentScraper.urlNormalform(url);
prop.put("urlstring", "");
@ -240,7 +240,7 @@ public class IndexControl_p {
plasmaCrawlLURL.Entry entry = switchboard.urlPool.loadedURL.getEntry(urlhash);
URL url = entry.url();
if (url == null) {
prop.put("result", "No Entry for url hash " + urlhash);
prop.put("result", "No Entry for URL hash " + urlhash);
} else {
urlstring = url.toString();
prop.put("urlstring", urlstring);
@ -300,9 +300,9 @@ public class IndexControl_p {
}
public static String genUrlProfile(plasmaSwitchboard switchboard, plasmaCrawlLURL.Entry entry, String urlhash) {
if (entry == null) return "No entry found for url-hash " + urlhash;
if (entry == null) return "No entry found for URL-hash " + urlhash;
URL url = entry.url();
if (url == null) return "No entry found for url-hash " + urlhash;
if (url == null) return "No entry found for URL-hash " + urlhash;
String result = "<table>" +
"<tr><td class=\"small\">URL String</td><td class=\"tt\">" + htmlFilterContentScraper.urlNormalform(url) + "</td></tr>" +
"<tr><td class=\"small\">Hash</td><td class=\"tt\">" + urlhash + "</td></tr>" +

View File

@ -39,7 +39,7 @@
// the intact and unchanged copyright notice.
// Contributions and changes to the program code must be marked as such.
// Contains contributions from Alexandier Schier [AS]
// Contains contributions from Alexander Schier [AS]
// and Marc Nause [MN]
// You must compile this file with

View File

@ -119,7 +119,7 @@ public class crawlReceipt {
if (entry == null) {
switchboard.getLog().logError("RECEIVED wrong RECEIPT for hash " + urlhash + " from peer " + iam);
} else {
switchboard.getLog().logInfo("RECEIVED RECEIPT for url " + entry.url().toString());
switchboard.getLog().logInfo("RECEIVED RECEIPT for URL " + entry.url().toString());
}
// ready for more

View File

@ -1072,7 +1072,7 @@ do upload
try {
return wput(url, 5000, null, null, null, 0, props);
} catch (IOException e) {
serverLog.logError("HTTPC", "wput exception for url " + url + ": " + e.getMessage());
serverLog.logError("HTTPC", "wput exception for URL " + url + ": " + e.getMessage());
e.printStackTrace();
Vector ll = new Vector();
ll.add("503 " + e.getMessage());

View File

@ -500,7 +500,7 @@ public final class httpdProxyHandler extends httpdAbstractHandler implements htt
((ext == null) || (!(plasmaParser.mediaExtContains(ext)))) &&
(plasmaParser.realtimeParsableMimeTypesContains(res.responseHeader.mime()))) {
// make a transformer
this.theLogger.logDebug("create transformer for url " + url);
this.theLogger.logDebug("create transformer for URL " + url);
hfos = new htmlFilterOutputStream((gzippedOut != null) ? gzippedOut : ((chunkedOut != null)? chunkedOut : respond), null, transformer, (ext.length() == 0));
} else {
// simply pass through without parsing

View File

@ -386,11 +386,11 @@ public final class plasmaCrawlWorker extends Thread {
);
}
} else {
log.logInfo("Redirection counter exceeded for url " + url.toString() + ". Processing aborted.");
log.logInfo("Redirection counter exceeded for URL " + url.toString() + ". Processing aborted.");
}
}else {
// if the response has not the right response type then reject file
log.logInfo("REJECTED WRONG STATUS TYPE '" + res.status + "' for url " + url.toString());
log.logInfo("REJECTED WRONG STATUS TYPE '" + res.status + "' for URL " + url.toString());
// not processed any further
}
if (remote != null) remote.close();

View File

@ -438,7 +438,7 @@ public final class plasmaSearch {
}
// insert value
//System.out.println("Ranking " + ranking + " for url " + url.toString());
//System.out.println("Ranking " + ranking + " for URL " + url.toString());
pageAcc.put(serverCodings.encodeHex(ranking, 16) + indexEntry.getUrlHash(), page);
}
// flush memory

View File

@ -117,7 +117,7 @@ public class plasmaSnippetCache {
public result retrieve(URL url, Set queryhashes, boolean fetchOnline, int snippetMaxLength) {
// heise = "0OQUNU3JSs05"
if (queryhashes.size() == 0) {
//System.out.println("found no queryhashes for url retrieve " + url);
//System.out.println("found no queryhashes for URL retrieve " + url);
return new result(null, ERROR_NO_HASH_GIVEN, "no query hashes given");
}
String urlhash = plasmaURL.urlHash(url);
@ -127,7 +127,7 @@ public class plasmaSnippetCache {
String wordhashes = yacySearch.set2string(queryhashes);
String line = retrieveFromCache(wordhashes, urlhash);
if (line != null) {
//System.out.println("found snippet for url " + url + " in cache: " + line);
//System.out.println("found snippet for URL " + url + " in cache: " + line);
return new result(line, source, null);
}
@ -144,13 +144,13 @@ public class plasmaSnippetCache {
return new result(null, ERROR_SOURCE_LOADING, "error loading resource from web: " + e.getMessage());
}
if (resource == null) {
//System.out.println("cannot load document for url " + url);
//System.out.println("cannot load document for URL " + url);
return new result(null, ERROR_RESOURCE_LOADING, "error loading resource from web, cacheManager returned NULL");
}
plasmaParserDocument document = parseDocument(url, resource);
if (document == null) return new result(null, ERROR_PARSER_FAILED, "parser error/failed"); // cannot be parsed
//System.out.println("loaded document for url " + url);
//System.out.println("loaded document for URL " + url);
String[] sentences = document.getSentences();
//System.out.println("----" + url.toString()); for (int l = 0; l < sentences.length; l++) System.out.println(sentences[l]);
if ((sentences == null) || (sentences.length == 0)) {
@ -160,7 +160,7 @@ public class plasmaSnippetCache {
// we have found a parseable non-empty file: use the lines
line = computeSnippet(sentences, queryhashes, 8 + 6 * queryhashes.size(), snippetMaxLength);
//System.out.println("loaded snippet for url " + url + ": " + line);
//System.out.println("loaded snippet for URL " + url + ": " + line);
if (line == null) return new result(null, ERROR_NO_MATCH, "no matching snippet found");
if (line.length() > snippetMaxLength) line = line.substring(0, snippetMaxLength);