*) Better handling of robots.txt files with incorrect keywords

See: http://www.yacy-forum.de/viewtopic.php?p=12292#12292

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@1035 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
theli 2005-11-06 06:01:08 +00:00
parent a1406f4617
commit f9fb284fb7

View File

@ -98,16 +98,17 @@ public final class robotsParser{
ArrayList deny = new ArrayList();
int pos;
String line = null;
boolean rule4Yacy = false;
String line = null, lineUpper = null;
boolean rule4Yacy = false;
while ((line = reader.readLine()) != null) {
line = line.trim();
lineUpper = line.toUpperCase();
if (line.length() == 0) {
// we have reached the end of the rule block
rule4Yacy = false;
} else if (line.startsWith("#")) {
// we can ignore this. Just a comment line
} else if ((!rule4Yacy) && (line.startsWith("User-agent:"))) {
} else if ((!rule4Yacy) && (lineUpper.startsWith("User-agent:".toUpperCase()))) {
// cutting off comments at the line end
pos = line.indexOf("#");
if (pos != -1) {
@ -120,7 +121,13 @@ public final class robotsParser{
String userAgent = line.substring(pos).trim();
rule4Yacy = (userAgent.equals("*") || (userAgent.toLowerCase().indexOf("yacy") >=0));
}
} else if (line.startsWith("Disallow:") && rule4Yacy) {
} else if (lineUpper.startsWith("Disallow:".toUpperCase()) && rule4Yacy) {
// cutting off comments at the line end
pos = line.indexOf("#");
if (pos != -1) {
line = line.substring(0,pos);
}
pos = line.indexOf(" ");
if (pos != -1) {
// getting the path