mirror of
https://github.com/yacy/yacy_search_server.git
synced 2024-09-21 00:00:13 +02:00
1d8d51075c
- removed the plasma package. The name of that package came from a very early pre-version of YaCy, even before YaCy was named AnomicHTTPProxy. The Proxy project introduced search for cache contents using class files that had been developed during the plasma project. Information from 2002 about plasma can be found here: http://web.archive.org/web/20020802110827/http://anomic.de/AnomicPlasma/index.html We stil have one class that comes mostly unchanged from the plasma project, the Condenser class. But this is now part of the document package and all other classes in the plasma package can be assigned to other packages. - cleaned up the http package: better structure of that class and clean isolation of server and client classes. The old HTCache becomes part of the client sub-package of http. - because the plasmaSwitchboard is now part of the search package all servlets had to be touched to declare a different package source. git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6232 6c8d7289-2bf4-0310-a012-ef5d649a1542
78 lines
3.0 KiB
Java
78 lines
3.0 KiB
Java
|
|
import java.util.Iterator;
|
|
|
|
import de.anomic.crawler.CrawlProfile.entry;
|
|
import de.anomic.http.metadata.RequestHeader;
|
|
import de.anomic.crawler.CrawlSwitchboard;
|
|
import de.anomic.search.Switchboard;
|
|
import de.anomic.server.serverObjects;
|
|
import de.anomic.server.serverSwitch;
|
|
|
|
|
|
public class WatchWebStructure_p {
|
|
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
|
|
final Switchboard sb = (Switchboard) env;
|
|
final serverObjects prop = new serverObjects();
|
|
|
|
int width = 768;
|
|
int height = 576;
|
|
int depth = 3;
|
|
int nodes = 500; // maximum number of host nodes that are painted
|
|
int time = -1;
|
|
String host = "auto";
|
|
String besthost;
|
|
|
|
if (post != null) {
|
|
width = post.getInt("width", 768);
|
|
height = post.getInt("height", 576);
|
|
depth = post.getInt("depth", 3);
|
|
nodes = post.getInt("nodes", width * height * 100 / 768 / 576);
|
|
time = post.getInt("time", -1);
|
|
host = post.get("host", "auto");
|
|
}
|
|
|
|
if (host.equals("auto")) {
|
|
// try to find the host from the crawl profiles
|
|
final Iterator<entry> it = sb.crawler.profilesActiveCrawls.profiles(true);
|
|
entry e;
|
|
while (it.hasNext()) {
|
|
e = it.next();
|
|
if (e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_PROXY) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_REMOTE) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_SNIPPET_LOCAL_TEXT) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_SNIPPET_GLOBAL_TEXT) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_SNIPPET_LOCAL_MEDIA) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA) ||
|
|
e.name().equals(CrawlSwitchboard.CRAWL_PROFILE_SURROGATE))
|
|
continue;
|
|
host = e.name();
|
|
break; // take the first one
|
|
}
|
|
}
|
|
|
|
// find start point
|
|
if ((host == null) || (host.length() == 0) || (host.equals("auto"))) {
|
|
// find domain with most references
|
|
besthost = sb.webStructure.hostWithMaxReferences();
|
|
} else {
|
|
besthost = host;
|
|
}
|
|
|
|
prop.putHTML("host", host);
|
|
prop.putHTML("besthost", besthost);
|
|
prop.put("depth", depth);
|
|
prop.put("depthi", Math.min(8, depth + 1));
|
|
prop.put("depthd", Math.max(0, depth - 1));
|
|
prop.put("nodes", nodes);
|
|
prop.put("nodesi", Math.min(1000, nodes + 100));
|
|
prop.put("nodesd", Math.max(100, nodes - 100));
|
|
prop.put("time", time);
|
|
prop.put("timei", (time > 9000) ? -1 : ((time < 0) ? -1 : Math.min(9999, time + 1000)));
|
|
prop.put("timed", (time < 0) ? 9000 : Math.max(1000, time - 1000));
|
|
prop.put("width", width);
|
|
prop.put("height", height);
|
|
|
|
return prop;
|
|
}
|
|
}
|