mirror of
https://github.com/yacy/yacy_search_server.git
synced 2024-09-19 00:01:41 +02:00
added option to see ranking scores in surftipps
and some cleanups git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@2684 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
parent
7c0e6de366
commit
310f1c41cd
|
@ -54,6 +54,8 @@ public class Surftipps {
|
|||
int display = ((post == null) || (!authenticated)) ? 0 : post.getInt("display", 0);
|
||||
prop.put("display", display);
|
||||
|
||||
boolean showScore = ((post != null) && (post.containsKey("score")));
|
||||
|
||||
boolean surftippsOn = sb.getConfigBool("showSurftipps", true);
|
||||
if ((post != null) && (post.containsKey("surftipps"))) {
|
||||
if (!sb.verifyAuthentication(header, false)) {
|
||||
|
@ -134,12 +136,12 @@ public class Surftipps {
|
|||
e.printStackTrace();
|
||||
}
|
||||
prop.put("surftipps_results_" + i + "_recommend", (voted) ? 0 : 1);
|
||||
prop.put("surftipps_results_" + i + "_recommend_negativeVoteLink", "/Surftipps.html?voteNegative=" + urlhash + "&refid=" + refid + "&display=" + display); // for negaive votes, we don't send around the bad url again, the hash is enough
|
||||
prop.put("surftipps_results_" + i + "_recommend_positiveVoteLink", "/Surftipps.html?votePositive=" + urlhash + "&refid=" + refid + "&url=" + crypt.simpleEncode(url,null,'b') + "&title=" + crypt.simpleEncode(title,null,'b') + "&description=" + crypt.simpleEncode(description,null,'b') + "&display=" + display);
|
||||
prop.put("surftipps_results_" + i + "_recommend_negativeVoteLink", "/Surftipps.html?voteNegative=" + urlhash + "&refid=" + refid + "&display=" + display + ((showScore) ? "&score=" : "")); // for negaive votes, we don't send around the bad url again, the hash is enough
|
||||
prop.put("surftipps_results_" + i + "_recommend_positiveVoteLink", "/Surftipps.html?votePositive=" + urlhash + "&refid=" + refid + "&url=" + crypt.simpleEncode(url,null,'b') + "&title=" + crypt.simpleEncode(title,null,'b') + "&description=" + crypt.simpleEncode(description,null,'b') + "&display=" + display + ((showScore) ? "&score=" : ""));
|
||||
prop.put("surftipps_results_" + i + "_url", url);
|
||||
prop.put("surftipps_results_" + i + "_urlname", nxTools.shortenURLString(url, 60));
|
||||
prop.put("surftipps_results_" + i + "_urlhash", urlhash);
|
||||
prop.put("surftipps_results_" + i + "_title", title);
|
||||
prop.put("surftipps_results_" + i + "_title", (showScore) ? ("(" + ranking.getScore(urlhash) + ") " + title) : title);
|
||||
prop.put("surftipps_results_" + i + "_description", description);
|
||||
i++;
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ import java.io.IOException;
|
|||
|
||||
public class httpdLimitExceededException extends IOException {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private long limit;
|
||||
|
||||
public httpdLimitExceededException(String errorMsg, long limit) {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class indexEntryAttribute {
|
|||
public static char docType(URL url) {
|
||||
String path = url.getPath();
|
||||
// serverLog.logFinest("PLASMA", "docType URL=" + path);
|
||||
char doctype = doctype = indexEntryAttribute.DT_UNKNOWN;
|
||||
char doctype = indexEntryAttribute.DT_UNKNOWN;
|
||||
if (path.endsWith(".gif")) { doctype = indexEntryAttribute.DT_IMAGE; }
|
||||
else if (path.endsWith(".jpg")) { doctype = indexEntryAttribute.DT_IMAGE; }
|
||||
else if (path.endsWith(".jpeg")) { doctype = indexEntryAttribute.DT_IMAGE; }
|
||||
|
|
|
@ -3,6 +3,9 @@ package de.anomic.plasma.crawler;
|
|||
import java.io.IOException;
|
||||
|
||||
public class plasmaCrawlerException extends IOException {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public plasmaCrawlerException(String errorMsg) {
|
||||
super(errorMsg);
|
||||
}
|
||||
|
|
|
@ -225,7 +225,7 @@ public final class plasmaCrawlStacker {
|
|||
|
||||
plasmaCrawlProfile.entry profile = this.sb.profiles.getEntry(theMsg.profileHandle());
|
||||
if (profile == null) {
|
||||
String errorMsg = "LOST PROFILE HANDLE '" + theMsg.profileHandle() + "' (must be internal error) for URL " + theMsg.url();
|
||||
String errorMsg = "LOST PROFILE HANDLE '" + theMsg.profileHandle() + "' for URL " + theMsg.url();
|
||||
this.log.logSevere(errorMsg);
|
||||
throw new IllegalStateException(errorMsg);
|
||||
}
|
||||
|
|
|
@ -103,7 +103,6 @@
|
|||
|
||||
package de.anomic.plasma;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
|
@ -1227,12 +1226,12 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
|
|||
// System.out.println("DEBUG plasmaSwitchboard.processCrawling:
|
||||
// profileHandle = " + profileHandle + ", urlEntry.url = " + urlEntry.url());
|
||||
if (profileHandle == null) {
|
||||
log.logSevere(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' (must be internal error) for URL " + urlEntry.url());
|
||||
log.logSevere(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
|
||||
return true;
|
||||
}
|
||||
plasmaCrawlProfile.entry profile = profiles.getEntry(profileHandle);
|
||||
if (profile == null) {
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' (must be internal error) for URL " + urlEntry.url());
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
|
||||
return true;
|
||||
}
|
||||
log.logFine("LOCALCRAWL: URL=" + urlEntry.url() + ", initiator=" + urlEntry.initiator() + ", crawlOrder=" + ((profile.remoteIndexing()) ? "true" : "false") + ", depth=" + urlEntry.depth() + ", crawlDepth=" + profile.generalDepth() + ", filter=" + profile.generalFilter()
|
||||
|
@ -1293,7 +1292,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
|
|||
// profileHandle = " + profileHandle + ", urlEntry.url = " + urlEntry.url());
|
||||
plasmaCrawlProfile.entry profile = profiles.getEntry(profileHandle);
|
||||
if (profile == null) {
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' (must be internal error) for URL " + urlEntry.url());
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
|
||||
return true;
|
||||
}
|
||||
log.logFine("plasmaSwitchboard.limitCrawlTriggerJob: url=" + urlEntry.url() + ", initiator=" + urlEntry.initiator() + ", crawlOrder=" + ((profile.remoteIndexing()) ? "true" : "false") + ", depth=" + urlEntry.depth() + ", crawlDepth=" + profile.generalDepth() + ", filter="
|
||||
|
@ -1381,7 +1380,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
|
|||
plasmaCrawlProfile.entry profile = profiles.getEntry(profileHandle);
|
||||
|
||||
if (profile == null) {
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' (must be internal error) for URL " + urlEntry.url());
|
||||
log.logSevere(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
|
||||
return false;
|
||||
}
|
||||
log.logFine("plasmaSwitchboard.remoteTriggeredCrawlJob: url=" + urlEntry.url() + ", initiator=" + urlEntry.initiator() + ", crawlOrder=" + ((profile.remoteIndexing()) ? "true" : "false") + ", depth=" + urlEntry.depth() + ", crawlDepth=" + profile.generalDepth() + ", filter="
|
||||
|
|
|
@ -50,6 +50,8 @@ import org.apache.axis.Message;
|
|||
import de.anomic.http.httpHeader;
|
||||
|
||||
public class SoapException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private int statusCode = 500;
|
||||
private String statusText = (String) httpHeader.http1_1.get(Integer.toString(this.statusCode));
|
||||
private Object errorMsg = this.statusText;
|
||||
|
|
Loading…
Reference in New Issue
Block a user