refactoring of wordIndex class

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5709 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
orbiter 2009-03-13 10:34:51 +00:00
parent d49238a637
commit 14a1c33823
89 changed files with 474 additions and 520 deletions

View File

@ -231,7 +231,7 @@ public class AccessTracker_p {
prop.put("page_list_" + entCount + "_dark", ((dark) ? 1 : 0) ); dark =! dark;
prop.putHTML("page_list_" + entCount + "_host", host);
if (page == 5) {
final yacySeed remotepeer = sb.webIndex.seedDB.lookupByIP(natLib.getInetAddress(host), true, true, true);
final yacySeed remotepeer = sb.webIndex.peers().lookupByIP(natLib.getInetAddress(host), true, true, true);
prop.putHTML("page_list_" + entCount + "_peername", (remotepeer == null) ? "UNKNOWN" : remotepeer.getName());
}
prop.putNum("page_list_" + entCount + "_count", handles.size());

View File

@ -67,14 +67,14 @@ public class Banner {
double myqph = 0;
String type = "";
final String network = env.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "unspecified").toUpperCase();
final int peers = sb.webIndex.seedDB.sizeConnected() + 1; // the '+ 1': the own peer is not included in sizeConnected()
long nlinks = sb.webIndex.seedDB.countActiveURL();
long nwords = sb.webIndex.seedDB.countActiveRWI();
final double nqpm = sb.webIndex.seedDB.countActiveQPM();
long nppm = sb.webIndex.seedDB.countActivePPM();
final int peers = sb.webIndex.peers().sizeConnected() + 1; // the '+ 1': the own peer is not included in sizeConnected()
long nlinks = sb.webIndex.peers().countActiveURL();
long nwords = sb.webIndex.peers().countActiveRWI();
final double nqpm = sb.webIndex.peers().countActiveQPM();
long nppm = sb.webIndex.peers().countActivePPM();
double nqph = 0;
final yacySeed seed = sb.webIndex.seedDB.mySeed();
final yacySeed seed = sb.webIndex.peers().mySeed();
if (seed != null){
name = seed.get(yacySeed.NAME, "-").toUpperCase();
links = Long.parseLong(seed.get(yacySeed.LCOUNT, "0"));
@ -82,19 +82,19 @@ public class Banner {
myppm = seed.getPPM();
myqph = 60d * seed.getQPM();
if (sb.webIndex.seedDB.mySeed().isVirgin()) {
if (sb.webIndex.peers().mySeed().isVirgin()) {
type = "VIRGIN";
nqph = Math.round(6000d * nqpm) / 100d;
} else if(sb.webIndex.seedDB.mySeed().isJunior()) {
} else if(sb.webIndex.peers().mySeed().isJunior()) {
type = "JUNIOR";
nqph = Math.round(6000d * nqpm) / 100d;
} else if(sb.webIndex.seedDB.mySeed().isSenior()) {
} else if(sb.webIndex.peers().mySeed().isSenior()) {
type = "SENIOR";
nlinks = nlinks + links;
nwords = nwords + words;
nqph = Math.round(6000d * nqpm + 100d * myqph) / 100d;
nppm = nppm + myppm;
} else if(sb.webIndex.seedDB.mySeed().isPrincipal()) {
} else if(sb.webIndex.peers().mySeed().isPrincipal()) {
type = "PRINCIPAL";
nlinks = nlinks + links;
nwords = nwords + words;

View File

@ -64,11 +64,11 @@ public class BlacklistImpExp_p {
}
// List known hosts for BlackList retrieval
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.sizeConnected() > 0) { // no nullpointer error
if (sb.webIndex.peers() != null && sb.webIndex.peers().sizeConnected() > 0) { // no nullpointer error
int peerCount = 0;
try {
final TreeMap<String, String> hostList = new TreeMap<String, String>();
final Iterator<yacySeed> e = sb.webIndex.seedDB.seedsConnected(true, false, null, (float) 0.0);
final Iterator<yacySeed> e = sb.webIndex.peers().seedsConnected(true, false, null, (float) 0.0);
while (e.hasNext()) {
final yacySeed seed = e.next();
if (seed != null) hostList.put(seed.get(yacySeed.NAME, "nameless"),seed.hash);

View File

@ -68,7 +68,7 @@ public class Blog {
final boolean xml = (header.get(httpRequestHeader.CONNECTION_PROP_PATH)).endsWith(".xml");
final String address = sb.webIndex.seedDB.mySeed().getPublicAddress();
final String address = sb.webIndex.peers().mySeed().getPublicAddress();
if(hasRights) {
prop.put("mode_admin", "1");
@ -77,7 +77,7 @@ public class Blog {
}
if (post == null) {
prop.putHTML("peername", sb.webIndex.seedDB.mySeed().getName());
prop.putHTML("peername", sb.webIndex.peers().mySeed().getName());
prop.put("address", address);
return putBlogDefault(prop, sb, address, 0, 10, hasRights, xml);
}
@ -104,10 +104,10 @@ public class Blog {
StrAuthor = sb.blogDB.guessAuthor(ip);
if (StrAuthor == null || StrAuthor.length() == 0) {
if (sb.webIndex.seedDB.mySeed() == null) {
if (sb.webIndex.peers().mySeed() == null) {
StrAuthor = "anonymous";
} else {
StrAuthor = sb.webIndex.seedDB.mySeed().get("Name", "anonymous");
StrAuthor = sb.webIndex.peers().mySeed().get("Name", "anonymous");
}
}
}
@ -169,7 +169,7 @@ public class Blog {
map.put("page", pagename);
map.put("subject", StrSubject.replace(',', ' '));
map.put("author", StrAuthor.replace(',', ' '));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_BLOG_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_BLOG_ADD, map));
}
page = sb.blogDB.readBlogEntry(pagename); //maybe "if(page == null)"
@ -246,7 +246,7 @@ public class Blog {
if(pagename.equals(DEFAULT_PAGE)) {
// XXX: where are "peername" and "address" used in the template?
// XXX: "clientname" is already set to the peername, no need for a new setting
prop.putHTML("peername", sb.webIndex.seedDB.mySeed().getName());
prop.putHTML("peername", sb.webIndex.peers().mySeed().getName());
prop.put("address", address);
//index all entries
putBlogDefault(prop, sb, address, start, num, hasRights, xml);

View File

@ -91,11 +91,11 @@ public class BlogComments {
StrAuthor = sb.blogDB.guessAuthor(ip);
if (StrAuthor == null || StrAuthor.length() == 0) {
if (sb.webIndex.seedDB.mySeed() == null) {
if (sb.webIndex.peers().mySeed() == null) {
StrAuthor = "anonymous";
}
else {
StrAuthor = sb.webIndex.seedDB.mySeed().get("Name", "anonymous");
StrAuthor = sb.webIndex.peers().mySeed().get("Name", "anonymous");
}
}
}
@ -151,15 +151,15 @@ public class BlogComments {
sb.messageDB.write(msgEntry = sb.messageDB.newEntry(
"blogComment",
StrAuthor,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.seedDB.mySeed().getName(), sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
sb.webIndex.peers().mySeed().getName(), sb.webIndex.peers().mySeed().hash,
"new blog comment: " + new String(blogEntry.getSubject(),"UTF-8"), content));
} catch (final UnsupportedEncodingException e1) {
sb.messageDB.write(msgEntry = sb.messageDB.newEntry(
"blogComment",
StrAuthor,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.seedDB.mySeed().getName(), sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
sb.webIndex.peers().mySeed().getName(), sb.webIndex.peers().mySeed().hash,
"new blog comment: " + new String(blogEntry.getSubject()), content));
}
@ -337,7 +337,7 @@ public class BlogComments {
.append(sendMailTo)
.append("\nFrom: ")
.append("yacy@")
.append(sb.webIndex.seedDB.mySeed().getName())
.append(sb.webIndex.peers().mySeed().getName())
.append("\nSubject: [YaCy] ")
.append(msgEntry.subject().replace('\n', ' '))
.append("\nDate: ")

View File

@ -93,7 +93,7 @@ public class Bookmarks {
*/
// set peer address
final String address = sb.webIndex.seedDB.mySeed().getPublicAddress();
final String address = sb.webIndex.peers().mySeed().getPublicAddress();
prop.put("address", address);
//defaultvalues
@ -184,7 +184,7 @@ public class Bookmarks {
final bookmarksDB.Bookmark bookmark = sb.bookmarksDB.getBookmark(urlHash);
if (bookmark == null) {
// try to get the bookmark from the LURL database
final MetadataRowContainer urlentry = sb.webIndex.getURL(urlHash, null, 0);
final MetadataRowContainer urlentry = sb.webIndex.metadata().load(urlHash, null, 0);
plasmaParserDocument document = null;
if (urlentry != null) {
final URLMetadata metadata = urlentry.metadata();
@ -433,7 +433,7 @@ public class Bookmarks {
map.put("title", title.replace(',', ' '));
map.put("description", description.replace(',', ' '));
map.put("tags", tagsString.replace(',', ' '));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_BOOKMARK_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_BOOKMARK_ADD, map));
}
}

View File

@ -177,7 +177,7 @@ public class ConfigAppearance_p {
prop.put("popupStatus", 1);
}
String myaddress = sb.webIndex.seedDB.mySeed().getPublicAddress();
String myaddress = sb.webIndex.peers().mySeed().getPublicAddress();
if (myaddress == null) myaddress = "localhost:" + sb.getConfig("port", "8080");
prop.put("myaddress", myaddress);
return prop;

View File

@ -70,7 +70,7 @@ public class ConfigBasic {
// starting a peer ping
//boolean doPeerPing = false;
if ((sb.webIndex.seedDB.mySeed().isVirgin()) || (sb.webIndex.seedDB.mySeed().isJunior())) {
if ((sb.webIndex.peers().mySeed().isVirgin()) || (sb.webIndex.peers().mySeed().isJunior())) {
serverInstantBusyThread.oneTimeJob(sb.yc, "peerPing", null, 0);
//doPeerPing = true;
}
@ -90,7 +90,7 @@ public class ConfigBasic {
}
// check if peer name already exists
final yacySeed oldSeed = sb.webIndex.seedDB.lookupByName(peerName);
final yacySeed oldSeed = sb.webIndex.peers().lookupByName(peerName);
if ((oldSeed == null) && (!(env.getConfig("peerName", "").equals(peerName)))) {
// the name is new
final boolean nameOK = Pattern.compile("[A-Za-z0-9\\-_]{3,80}").matcher(peerName).matches();
@ -190,7 +190,7 @@ public class ConfigBasic {
// check if values are proper
final boolean properPassword = (sb.getConfig(httpd.ADMIN_ACCOUNT_B64MD5, "").length() > 0) || sb.getConfigBool("adminAccountForLocalhost", false);
final boolean properName = (env.getConfig("peerName","").length() >= 3) && (!(yacySeed.isDefaultPeerName(env.getConfig("peerName",""))));
final boolean properPort = (sb.webIndex.seedDB.mySeed().isSenior()) || (sb.webIndex.seedDB.mySeed().isPrincipal());
final boolean properPort = (sb.webIndex.peers().mySeed().isSenior()) || (sb.webIndex.peers().mySeed().isPrincipal());
if ((env.getConfig("defaultFiles", "").startsWith("ConfigBasic.html,"))) {
env.setConfig("defaultFiles", env.getConfig("defaultFiles", "").substring(17));

View File

@ -124,10 +124,10 @@ public class ConfigNetwork_p {
if (indexReceive) {
sb.setConfig(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, true);
sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(true);
sb.webIndex.peers().mySeed().setFlagAcceptRemoteIndex(true);
} else {
sb.setConfig(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, false);
sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(false);
sb.webIndex.peers().mySeed().setFlagAcceptRemoteIndex(false);
}
if (post.get("indexReceiveBlockBlacklist", "").equals("on")) {
@ -137,7 +137,7 @@ public class ConfigNetwork_p {
}
if (post.containsKey("peertags")) {
sb.webIndex.seedDB.mySeed().setPeerTags(serverCodings.string2set(normalizedList(post.get("peertags")), ","));
sb.webIndex.peers().mySeed().setPeerTags(serverCodings.string2set(normalizedList(post.get("peertags")), ","));
}
sb.setConfig("cluster.mode", post.get("cluster.mode", "publicpeer"));
@ -170,7 +170,7 @@ public class ConfigNetwork_p {
sb.setConfig("cluster.peers.yacydomain", checkYaCyDomainList(post.get("cluster.peers.yacydomain", "")));
// update the cluster hash set
sb.clusterhashes = sb.webIndex.seedDB.clusterHashes(sb.getConfig("cluster.peers.yacydomain", ""));
sb.clusterhashes = sb.webIndex.peers().clusterHashes(sb.getConfig("cluster.peers.yacydomain", ""));
}
}
@ -196,11 +196,11 @@ public class ConfigNetwork_p {
prop.put("indexReceiveChecked", (indexReceive) ? "1" : "0");
prop.put("indexReceiveBlockBlacklistChecked.on", (sb.getConfig("indexReceiveBlockBlacklist", "true").equals("true")) ? "1" : "0");
prop.put("indexReceiveBlockBlacklistChecked.off", (sb.getConfig("indexReceiveBlockBlacklist", "true").equals("true")) ? "0" : "1");
prop.putHTML("peertags", serverCodings.set2string(sb.webIndex.seedDB.mySeed().getPeerTags(), ",", false));
prop.putHTML("peertags", serverCodings.set2string(sb.webIndex.peers().mySeed().getPeerTags(), ",", false));
// set seed information directly
sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteCrawl(sb.getConfigBool("crawlResponse", false));
sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(indexReceive);
sb.webIndex.peers().mySeed().setFlagAcceptRemoteCrawl(sb.getConfigBool("crawlResponse", false));
sb.webIndex.peers().mySeed().setFlagAcceptRemoteIndex(indexReceive);
// set p2p/robinson mode flags and values
prop.put("p2p.checked", (indexDistribute || indexReceive) ? "1" : "0");

View File

@ -94,7 +94,7 @@ public class ConfigProfile_p {
// generate a news message
final Properties news = profile;
news.remove("comment");
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_PROFILE_UPDATE, news));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_PROFILE_UPDATE, news));
//yacyCore.newsPool.publishMyNews(new yacyNewsRecord(yacyNewsRecord.CATEGORY_PROFILE_UPDATE, profile));
} catch(final IOException e) {
} finally {

View File

@ -44,7 +44,7 @@ public class ConfigRobotsTxt_p {
final servletProperties prop = new servletProperties();
final httpdRobotsTxtConfig rbc = ((plasmaSwitchboard)env).robotstxtConfig;
prop.put("clientname", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("clientname", sb.webIndex.peers().mySeed().getPublicAddress());
if (post != null) {
if (post.containsKey("save")) {

View File

@ -160,14 +160,14 @@ public final class Connections_p {
commandLine = urlRedir.getURL();
}
if ((dest != null) && (dest.equals(virtualHost))) dest = sb.webIndex.seedDB.mySeed().getName() + ".yacy";
if ((dest != null) && (dest.equals(virtualHost))) dest = sb.webIndex.peers().mySeed().getName() + ".yacy";
// determining if the source is a yacy host
yacySeed seed = null;
if (doNameLookup) {
seed = sb.webIndex.seedDB.lookupByIP(userAddress,true,false,false);
seed = sb.webIndex.peers().lookupByIP(userAddress,true,false,false);
if (seed != null) {
if ((seed.hash.equals(sb.webIndex.seedDB.mySeed().hash)) &&
if ((seed.hash.equals(sb.webIndex.peers().mySeed().hash)) &&
(!seed.get(yacySeed.PORT,"").equals(Integer.toString(userPort)))) {
seed = null;
}

View File

@ -105,7 +105,7 @@ public class CrawlResults {
final String hash = post.get("hash", null);
if (hash != null) {
// delete from database
sb.webIndex.removeURL(hash);
sb.webIndex.metadata().remove(hash);
}
}
@ -115,7 +115,7 @@ public class CrawlResults {
if (hashpart != null) {
// delete all urls for this domain from database
try {
sb.webIndex.deleteDomain(hashpart);
sb.webIndex.metadata().deleteDomain(hashpart);
sb.crawlResults.deleteDomain(tabletype, domain, hashpart);
} catch (IOException e) {
e.printStackTrace();
@ -179,7 +179,7 @@ public class CrawlResults {
executorHash = sb.crawlResults.getExecutorHash(tabletype, i);
urlHash = sb.crawlResults.getUrlHash(tabletype, i);
try {
urle = sb.webIndex.getURL(urlHash, null, 0);
urle = sb.webIndex.metadata().load(urlHash, null, 0);
if(urle == null) {
Log.logWarning("PLASMA", "CrawlResults: URL not in index for crawl result "+ i +" with hash "+ urlHash);
urlstr = null;
@ -190,8 +190,8 @@ public class CrawlResults {
urlstr = metadata.url().toNormalform(false, true);
urltxt = nxTools.shortenURLString(urlstr, 72); // shorten the string text like a URL
}
initiatorSeed = sb.webIndex.seedDB.getConnected(initiatorHash);
executorSeed = sb.webIndex.seedDB.getConnected(executorHash);
initiatorSeed = sb.webIndex.peers().getConnected(initiatorHash);
executorSeed = sb.webIndex.peers().getConnected(executorHash);
prop.put("table_indexed_" + cnt + "_dark", (dark) ? "1" : "0");
prop.put("table_indexed_" + cnt + "_feedbackpage", "CrawlResults.html");

View File

@ -39,7 +39,7 @@ public class CrawlStart_p {
final serverObjects prop = new serverObjects();
// define visible variables
String a = sb.webIndex.seedDB.mySeed().getPublicAddress();
String a = sb.webIndex.peers().mySeed().getPublicAddress();
boolean intranet = sb.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "").equals("intranet");
String repository = "http://" + ((a == null) ? "localhost:" + sb.getConfig("port", "8080") : a) + "/repository/";
prop.put("starturl", (intranet) ? repository : "http://");

View File

@ -43,7 +43,7 @@ public class IndexCleaner_p {
//prop.putHTML("bla", "post!=null");
if (post.get("action").equals("ustart")) {
if (urldbCleanerThread==null || !urldbCleanerThread.isAlive()) {
urldbCleanerThread = sb.webIndex.getURLCleaner(plasmaSwitchboard.urlBlacklist);
urldbCleanerThread = sb.webIndex.metadata().getBlacklistCleaner(plasmaSwitchboard.urlBlacklist);
urldbCleanerThread.start();
}
else {
@ -77,7 +77,7 @@ public class IndexCleaner_p {
//prop.put("bla", "post==null");
if (urldbCleanerThread!=null) {
prop.put("urldb", "1");
prop.putNum("urldb_percentUrls", ((double)urldbCleanerThread.totalSearchedUrls/sb.webIndex.countURL())*100);
prop.putNum("urldb_percentUrls", ((double)urldbCleanerThread.totalSearchedUrls/sb.webIndex.metadata().size())*100);
prop.putNum("urldb_blacklisted", urldbCleanerThread.blacklistedUrls);
prop.putNum("urldb_total", urldbCleanerThread.totalSearchedUrls);
prop.putHTML("urldb_lastBlacklistedUrl", urldbCleanerThread.lastBlacklistedUrl);

View File

@ -187,14 +187,14 @@ public class IndexControlRWIs_p {
if (host.length() != 0) {
if (host.length() == 12) {
// the host string is a peer hash
seed = sb.webIndex.seedDB.getConnected(host);
seed = sb.webIndex.peers().getConnected(host);
} else {
// the host string can be a host name
seed = sb.webIndex.seedDB.lookupByName(host);
seed = sb.webIndex.peers().lookupByName(host);
}
} else {
host = post.get("hostHash", ""); // if input field is empty, get from select box
seed = sb.webIndex.seedDB.getConnected(host);
seed = sb.webIndex.peers().getConnected(host);
}
// prepare index
@ -209,7 +209,7 @@ public class IndexControlRWIs_p {
MetadataRowContainer lurl;
while (urlIter.hasNext()) {
iEntry = urlIter.next();
lurl = sb.webIndex.getURL(iEntry.urlHash(), null, 0);
lurl = sb.webIndex.metadata().load(iEntry.urlHash(), null, 0);
if (lurl == null) {
unknownURLEntries.add(iEntry.urlHash());
urlIter.remove();
@ -269,8 +269,8 @@ public class IndexControlRWIs_p {
yacyURL url;
for (int i=0; i<urlx.length; i++) {
urlHashes.add(urlx[i]);
final MetadataRowContainer e = sb.webIndex.getURL(urlx[i], null, 0);
sb.webIndex.removeURL(urlx[i]);
final MetadataRowContainer e = sb.webIndex.metadata().load(urlx[i], null, 0);
sb.webIndex.metadata().remove(urlx[i]);
if (e != null) {
url = e.metadata().url();
pw.println(url.getHost() + "/" + url.getFile());
@ -297,8 +297,8 @@ public class IndexControlRWIs_p {
yacyURL url;
for (int i=0; i<urlx.length; i++) {
urlHashes.add(urlx[i]);
final MetadataRowContainer e = sb.webIndex.getURL(urlx[i], null, 0);
sb.webIndex.removeURL(urlx[i]);
final MetadataRowContainer e = sb.webIndex.metadata().load(urlx[i], null, 0);
sb.webIndex.metadata().remove(urlx[i]);
if (e != null) {
url = e.metadata().url();
pw.println(url.getHost() + "/.*");

View File

@ -53,7 +53,7 @@ public class IndexControlURLs_p {
prop.put("urlstring", "");
prop.put("urlhash", "");
prop.put("result", "");
prop.put("ucount", Integer.toString(sb.webIndex.countURL()));
prop.put("ucount", Integer.toString(sb.webIndex.metadata().size()));
prop.put("otherHosts", "");
prop.put("genUrlProfile", 0);
prop.put("statistics", 1);
@ -62,7 +62,7 @@ public class IndexControlURLs_p {
prop.put("reload", 0);
// show export messages
final MetadataRepository.Export export = sb.webIndex.exportURL();
final MetadataRepository.Export export = sb.webIndex.metadata().export();
if ((export != null) && (export.isAlive())) {
// there is currently a running export
prop.put("lurlexport", 2);
@ -116,7 +116,7 @@ public class IndexControlURLs_p {
}
if (post.containsKey("urlhashdelete")) {
final MetadataRowContainer entry = sb.webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer entry = sb.webIndex.metadata().load(urlhash, null, 0);
if (entry == null) {
prop.putHTML("result", "No Entry for URL hash " + urlhash + "; nothing deleted.");
} else {
@ -150,7 +150,7 @@ public class IndexControlURLs_p {
final yacyURL url = new yacyURL(urlstring, null);
urlhash = url.hash();
prop.put("urlhash", urlhash);
final MetadataRowContainer entry = sb.webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer entry = sb.webIndex.metadata().load(urlhash, null, 0);
if (entry == null) {
prop.putHTML("urlstring", "unknown url: " + urlstring);
prop.put("urlhash", "");
@ -167,7 +167,7 @@ public class IndexControlURLs_p {
}
if (post.containsKey("urlhashsearch")) {
final MetadataRowContainer entry = sb.webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer entry = sb.webIndex.metadata().load(urlhash, null, 0);
if (entry == null) {
prop.putHTML("result", "No Entry for URL hash " + urlhash);
} else {
@ -182,7 +182,7 @@ public class IndexControlURLs_p {
// generate list
if (post.containsKey("urlhashsimilar")) {
try {
final Iterator<MetadataRowContainer> entryIt = new RotateIterator<MetadataRowContainer>(sb.webIndex.entriesURL(true, urlhash), new String(Base64Order.zero((urlhash == null ? 0 : urlhash.length()))), sb.webIndex.size());
final Iterator<MetadataRowContainer> entryIt = new RotateIterator<MetadataRowContainer>(sb.webIndex.metadata().entries(true, urlhash), new String(Base64Order.zero((urlhash == null ? 0 : urlhash.length()))), sb.webIndex.size());
final StringBuilder result = new StringBuilder("Sequential List of URL-Hashes:<br />");
MetadataRowContainer entry;
int i = 0;
@ -229,7 +229,7 @@ public class IndexControlURLs_p {
final File f = new File(s);
f.getParentFile().mkdirs();
final String filter = post.get("exportfilter", ".*");
final MetadataRepository.Export running = sb.webIndex.exportURL(f, filter, format, dom);
final MetadataRepository.Export running = sb.webIndex.metadata().export(f, filter, null, format, dom);
prop.put("lurlexport_exportfile", s);
prop.put("lurlexport_urlcount", running.count());
@ -242,7 +242,7 @@ public class IndexControlURLs_p {
if (post.containsKey("deletedomain")) {
String hp = post.get("hashpart");
try {
sb.webIndex.deleteDomain(hp);
sb.webIndex.metadata().deleteDomain(hp);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
@ -258,7 +258,7 @@ public class IndexControlURLs_p {
prop.put("statistics_lines", count);
int cnt = 0;
try {
statsiter = sb.webIndex.statistics(count);
statsiter = sb.webIndex.metadata().statistics(count);
boolean dark = true;
MetadataRepository.hostStat hs;
while (statsiter.hasNext() && cnt < count) {
@ -281,7 +281,7 @@ public class IndexControlURLs_p {
}
// insert constants
prop.putNum("ucount", sb.webIndex.countURL());
prop.putNum("ucount", sb.webIndex.metadata().size());
// return rewrite properties
return prop;
}
@ -294,7 +294,7 @@ public class IndexControlURLs_p {
return prop;
}
final URLMetadata metadata = entry.metadata();
final MetadataRowContainer le = ((entry.referrerHash() == null) || (entry.referrerHash().length() != yacySeedDB.commonHashLength)) ? null : switchboard.webIndex.getURL(entry.referrerHash(), null, 0);
final MetadataRowContainer le = ((entry.referrerHash() == null) || (entry.referrerHash().length() != yacySeedDB.commonHashLength)) ? null : switchboard.webIndex.metadata().load(entry.referrerHash(), null, 0);
if (metadata.url() == null) {
prop.put("genUrlProfile", "1");
prop.put("genUrlProfile_urlhash", urlhash);

View File

@ -116,7 +116,7 @@ public class IndexCreateIndexingQueue_p {
if ((pcentry != null)&&(pcentry.url() != null)) {
final long entrySize = pcentry.size();
totalSize += entrySize;
initiator = sb.webIndex.seedDB.getConnected(pcentry.initiator());
initiator = sb.webIndex.peers().getConnected(pcentry.initiator());
prop.put("indexing-queue_list_"+entryCount+"_dark", inProcess ? "2" : (dark ? "1" : "0"));
prop.putHTML("indexing-queue_list_"+entryCount+"_initiator", ((initiator == null) ? "proxy" : initiator.getName()));
prop.put("indexing-queue_list_"+entryCount+"_depth", pcentry.depth());
@ -163,8 +163,8 @@ public class IndexCreateIndexingQueue_p {
initiatorHash = entry.initiator();
executorHash = entry.executor();
initiatorSeed = sb.webIndex.seedDB.getConnected(initiatorHash);
executorSeed = sb.webIndex.seedDB.getConnected(executorHash);
initiatorSeed = sb.webIndex.peers().getConnected(initiatorHash);
executorSeed = sb.webIndex.peers().getConnected(executorHash);
prop.putHTML("rejected_list_"+j+"_initiator", ((initiatorSeed == null) ? "proxy" : initiatorSeed.getName()));
prop.putHTML("rejected_list_"+j+"_executor", ((executorSeed == null) ? "proxy" : executorSeed.getName()));
prop.putHTML("rejected_list_"+j+"_url", url.toNormalform(false, true));

View File

@ -50,7 +50,7 @@ public class IndexCreateLoaderQueue_p {
for (int i = 0; i < w.length; i++) {
if (w[i] == null) continue;
initiator = sb.webIndex.seedDB.getConnected(w[i].initiator());
initiator = sb.webIndex.peers().getConnected(w[i].initiator());
prop.put("loader-set_list_"+count+"_dark", dark ? "1" : "0");
prop.putHTML("loader-set_list_"+count+"_initiator", ((initiator == null) ? "proxy" : initiator.getName()));
prop.put("loader-set_list_"+count+"_depth", w[i].depth());

View File

@ -96,7 +96,7 @@ public class IndexCreateWWWGlobalQueue_p {
for (i = 0; (i < crawlerList.size()) && (showNum < showLimit); i++) {
urle = crawlerList.get(i);
if ((urle != null)&&(urle.url()!=null)) {
initiator = sb.webIndex.seedDB.getConnected(urle.initiator());
initiator = sb.webIndex.peers().getConnected(urle.initiator());
profileHandle = urle.profileHandle();
profileEntry = (profileHandle == null) ? null : sb.webIndex.profilesActiveCrawls.getEntry(profileHandle);
prop.put("crawler-queue_list_"+showNum+"_dark", dark ? "1" : "0");

View File

@ -164,7 +164,7 @@ public class IndexCreateWWWLocalQueue_p {
for (i = 0; (i < crawlerList.size()) && (showNum < showLimit); i++) {
urle = crawlerList.get(i);
if ((urle != null)&&(urle.url()!=null)) {
initiator = sb.webIndex.seedDB.getConnected(urle.initiator());
initiator = sb.webIndex.peers().getConnected(urle.initiator());
profileHandle = urle.profileHandle();
profileEntry = (profileHandle == null) ? null : sb.webIndex.profilesActiveCrawls.getEntry(profileHandle);
prop.put("crawler-queue_list_"+showNum+"_dark", dark ? "1" : "0");

View File

@ -96,7 +96,7 @@ public class IndexCreateWWWRemoteQueue_p {
for (i = 0; (i < crawlerList.size()) && (showNum < showLimit); i++) {
urle = crawlerList.get(i);
if (urle != null && urle.url() != null) {
initiator = sb.webIndex.seedDB.getConnected(urle.initiator());
initiator = sb.webIndex.peers().getConnected(urle.initiator());
profileHandle = urle.profileHandle();
profileEntry = (profileHandle == null) ? null : sb.webIndex.profilesActiveCrawls.getEntry(profileHandle);
prop.put("crawler-queue_list_" + showNum + "_dark", dark ? "1" : "0");

View File

@ -107,7 +107,7 @@ public final class IndexImport_p {
}
prop.putNum("wcount", switchboard.webIndex.size());
prop.putNum("ucount", switchboard.webIndex.countURL());
prop.putNum("ucount", switchboard.webIndex.metadata().size());
/*
* Loop over all currently running jobs

View File

@ -56,7 +56,7 @@ public class IndexShare_p {
prop.put("dtable", "");
prop.put("rtable", "");
prop.putNum("wcount", switchboard.webIndex.size());
prop.putNum("ucount", switchboard.webIndex.countURL());
prop.putNum("ucount", switchboard.webIndex.metadata().size());
return prop; // be save
}
@ -69,7 +69,7 @@ public class IndexShare_p {
// insert constants
prop.putNum("wcount", switchboard.webIndex.size());
prop.putNum("ucount", switchboard.webIndex.countURL());
prop.putNum("ucount", switchboard.webIndex.metadata().size());
// return rewrite properties
return prop;

View File

@ -67,14 +67,14 @@ public class MessageSend_p {
// open an editor page for the message
// first ask if the other peer is online, and also what kind of document it accepts
final HashMap<String, String> result = yacyClient.permissionMessage(sb.webIndex.seedDB, hash);
final HashMap<String, String> result = yacyClient.permissionMessage(sb.webIndex.peers(), hash);
//System.out.println("DEBUG: permission request result = " + result.toString());
String peerName;
yacySeed targetPeer = null;
if (hash.equals(sb.webIndex.seedDB.mySeed().hash)) {
peerName = sb.webIndex.seedDB.mySeed().get(yacySeed.NAME,"nameless");
if (hash.equals(sb.webIndex.peers().mySeed().hash)) {
peerName = sb.webIndex.peers().mySeed().get(yacySeed.NAME,"nameless");
} else {
targetPeer = sb.webIndex.seedDB.getConnected(hash);
targetPeer = sb.webIndex.peers().getConnected(hash);
if (targetPeer == null)
peerName = "nameless";
else
@ -88,7 +88,7 @@ public class MessageSend_p {
prop.put("mode_permission", "0");
if (targetPeer != null) {
sb.webIndex.seedDB.peerActions.peerDeparture(targetPeer, "peer responded upon message send request: " + response);
sb.webIndex.peers().peerActions.peerDeparture(targetPeer, "peer responded upon message send request: " + response);
}
} else {
prop.put("mode_permission", "1");
@ -131,7 +131,7 @@ public class MessageSend_p {
} catch (final UnsupportedEncodingException e) {
mb = message.getBytes();
}
final HashMap<String, String> result = yacyClient.postMessage(sb.webIndex.seedDB, hash, subject, mb);
final HashMap<String, String> result = yacyClient.postMessage(sb.webIndex.peers(), hash, subject, mb);
//message has been sent
prop.put("mode_status_response", result.get("response"));

View File

@ -55,18 +55,18 @@ public class Messages_p {
final serverObjects prop = new serverObjects();
// set peer address / name
final String peerAddress = sb.webIndex.seedDB.mySeed().getPublicAddress();
final String peerName = sb.webIndex.seedDB.mySeed().getName();
final String peerAddress = sb.webIndex.peers().mySeed().getPublicAddress();
final String peerName = sb.webIndex.peers().mySeed().getName();
prop.put("peerAddress", peerAddress);
prop.putXML("peerName", peerName);
// List known hosts for message sending (from Blacklist_p.java)
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.sizeConnected() > 0) {
if (sb.webIndex.peers() != null && sb.webIndex.peers().sizeConnected() > 0) {
prop.put("peersKnown", "1");
int peerCount = 0;
try {
final TreeMap<String, String> hostList = new TreeMap<String, String>();
final Iterator<yacySeed> e = sb.webIndex.seedDB.seedsConnected(true, false, null, (float) 0.0);
final Iterator<yacySeed> e = sb.webIndex.peers().seedsConnected(true, false, null, (float) 0.0);
while (e.hasNext()) {
final yacySeed seed = e.next();
if (seed != null) hostList.put(seed.get(yacySeed.NAME, "nameless"),seed.hash);

View File

@ -65,30 +65,30 @@ public class Network {
prop.putHTML("page_networkName", sb.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "unspecified"));
final boolean overview = (post == null) || (post.get("page", "0").equals("0"));
final String mySeedType = sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN);
final String mySeedType = sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN);
final boolean iAmActive = (mySeedType.equals(yacySeed.PEERTYPE_SENIOR) || mySeedType.equals(yacySeed.PEERTYPE_PRINCIPAL));
if (overview) {
long accActLinks = sb.webIndex.seedDB.countActiveURL();
long accActWords = sb.webIndex.seedDB.countActiveRWI();
final long accPassLinks = sb.webIndex.seedDB.countPassiveURL();
final long accPassWords = sb.webIndex.seedDB.countPassiveRWI();
long accPotLinks = sb.webIndex.seedDB.countPotentialURL();
long accPotWords = sb.webIndex.seedDB.countPotentialRWI();
long accActLinks = sb.webIndex.peers().countActiveURL();
long accActWords = sb.webIndex.peers().countActiveRWI();
final long accPassLinks = sb.webIndex.peers().countPassiveURL();
final long accPassWords = sb.webIndex.peers().countPassiveRWI();
long accPotLinks = sb.webIndex.peers().countPotentialURL();
long accPotWords = sb.webIndex.peers().countPotentialRWI();
int conCount = sb.webIndex.seedDB.sizeConnected();
final int disconCount = sb.webIndex.seedDB.sizeDisconnected();
int potCount = sb.webIndex.seedDB.sizePotential();
int conCount = sb.webIndex.peers().sizeConnected();
final int disconCount = sb.webIndex.peers().sizeDisconnected();
int potCount = sb.webIndex.peers().sizePotential();
// final boolean complete = ((post == null) ? false : post.get("links", "false").equals("true"));
final long otherppm = sb.webIndex.seedDB.countActivePPM();
final double otherqpm = sb.webIndex.seedDB.countActiveQPM();
final long otherppm = sb.webIndex.peers().countActivePPM();
final double otherqpm = sb.webIndex.peers().countActiveQPM();
long myppm = 0;
double myqph = 0d;
// create own peer info
final yacySeed seed = sb.webIndex.seedDB.mySeed();
if (sb.webIndex.seedDB.mySeed() != null){ //our Peer
final yacySeed seed = sb.webIndex.peers().mySeed();
if (sb.webIndex.peers().mySeed() != null){ //our Peer
// update seed info
sb.updateMySeed();
@ -104,17 +104,17 @@ public class Network {
// my-info
prop.putHTML("table_my-name", seed.get(yacySeed.NAME, "-") );
prop.put("table_my-hash", seed.hash );
if (sb.webIndex.seedDB.mySeed().isVirgin()) {
if (sb.webIndex.peers().mySeed().isVirgin()) {
prop.put("table_my-info", 0);
} else if(sb.webIndex.seedDB.mySeed().isJunior()) {
} else if(sb.webIndex.peers().mySeed().isJunior()) {
prop.put("table_my-info", 1);
accPotLinks += LCount;
accPotWords += ICount;
} else if(sb.webIndex.seedDB.mySeed().isSenior()) {
} else if(sb.webIndex.peers().mySeed().isSenior()) {
prop.put("table_my-info", 2);
accActLinks += LCount;
accActWords += ICount;
} else if(sb.webIndex.seedDB.mySeed().isPrincipal()) {
} else if(sb.webIndex.peers().mySeed().isPrincipal()) {
prop.put("table_my-info", 3);
accActLinks += LCount;
accActWords += ICount;
@ -185,13 +185,13 @@ public class Network {
yacySeed peer = new yacySeed(post.get("peerHash"),map);
sb.updateMySeed();
final int added = yacyClient.publishMySeed(sb.webIndex.seedDB.mySeed(), sb.webIndex.seedDB.peerActions, peer.getPublicAddress(), peer.hash);
final int added = yacyClient.publishMySeed(sb.webIndex.peers().mySeed(), sb.webIndex.peers().peerActions, peer.getPublicAddress(), peer.hash);
if (added <= 0) {
prop.put("table_comment",1);
prop.putHTML("table_comment_status","publish: disconnected peer '" + peer.getName() + "/" + post.get("peerHash") + "' from " + peer.getPublicAddress());
} else {
peer = sb.webIndex.seedDB.getConnected(peer.hash);
peer = sb.webIndex.peers().getConnected(peer.hash);
if (peer == null) {
prop.put("table_comment",1);
prop.putHTML("table_comment_status","publish: disconnected peer 'UNKNOWN/" + post.get("peerHash") + "' from UNKNOWN");
@ -217,14 +217,14 @@ public class Network {
final int page = (post == null ? 1 : Integer.parseInt(post.get("page", "1")));
final int maxCount = (post == null ? 300 : Integer.parseInt(post.get("maxCount", "300")));
int conCount = 0;
if (sb.webIndex.seedDB == null) {
if (sb.webIndex.peers() == null) {
prop.put("table", 0);//no remote senior/principal proxies known"
} else {
int size = 0;
switch (page) {
case 1 : size = sb.webIndex.seedDB.sizeConnected(); break;
case 2 : size = sb.webIndex.seedDB.sizeDisconnected(); break;
case 3 : size = sb.webIndex.seedDB.sizePotential(); break;
case 1 : size = sb.webIndex.peers().sizeConnected(); break;
case 2 : size = sb.webIndex.peers().sizeDisconnected(); break;
case 3 : size = sb.webIndex.peers().sizePotential(); break;
default: break;
}
if (size == 0) {
@ -233,7 +233,7 @@ public class Network {
// add temporary the own seed to the database
if (iAmActive) {
sb.updateMySeed();
sb.webIndex.seedDB.addConnected(sb.webIndex.seedDB.mySeed());
sb.webIndex.peers().addConnected(sb.webIndex.peers().mySeed());
}
// find updated Information using YaCyNews
@ -242,7 +242,7 @@ public class Network {
final HashMap<String, Map<String, String>> updatedBlog = new HashMap<String, Map<String, String>>();
final HashMap<String, String> isCrawling = new HashMap<String, String>();
yacyNewsRecord record;
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(yacyNewsPool.INCOMING_DB, true);
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(yacyNewsPool.INCOMING_DB, true);
while (recordIterator.hasNext()) {
record = recordIterator.next();
if (record == null) {
@ -265,9 +265,9 @@ public class Network {
final boolean order = (post != null && post.get("order", "down").equals("up"));
final String sort = (post == null ? null : post.get("sort", null));
switch (page) {
case 1 : e = sb.webIndex.seedDB.seedsSortedConnected(order, (sort == null ? yacySeed.LCOUNT : sort)); break;
case 2 : e = sb.webIndex.seedDB.seedsSortedDisconnected(order, (sort == null ? yacySeed.LASTSEEN : sort)); break;
case 3 : e = sb.webIndex.seedDB.seedsSortedPotential(order, (sort == null ? yacySeed.LASTSEEN : sort)); break;
case 1 : e = sb.webIndex.peers().seedsSortedConnected(order, (sort == null ? yacySeed.LCOUNT : sort)); break;
case 2 : e = sb.webIndex.peers().seedsSortedDisconnected(order, (sort == null ? yacySeed.LASTSEEN : sort)); break;
case 3 : e = sb.webIndex.peers().seedsSortedPotential(order, (sort == null ? yacySeed.LASTSEEN : sort)); break;
default: break;
}
String startURL;
@ -308,7 +308,7 @@ public class Network {
prop.put(STR_TABLE_LIST + conCount + "_updatedBlog", 0);
prop.put(STR_TABLE_LIST + conCount + "_isCrawling", 0);
if (conCount >= maxCount) { break; }
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.mySeed() != null && seed.hash != null && seed.hash.equals(sb.webIndex.seedDB.mySeed().hash)) {
if (sb.webIndex.peers() != null && sb.webIndex.peers().mySeed() != null && seed.hash != null && seed.hash.equals(sb.webIndex.peers().mySeed().hash)) {
prop.put(STR_TABLE_LIST + conCount + "_dark", 2);
} else {
prop.put(STR_TABLE_LIST + conCount + "_dark", ((dark) ? 1 : 0) ); dark=!dark;
@ -346,11 +346,11 @@ public class Network {
prop.putHTML(STR_TABLE_LIST + conCount + "_shortname", shortname);
prop.putHTML(STR_TABLE_LIST + conCount + "_fullname", seed.get(yacySeed.NAME, "deadlink"));
userAgent = null;
if (seed.hash != null && seed.hash.equals(sb.webIndex.seedDB.mySeed().hash)) {
if (seed.hash != null && seed.hash.equals(sb.webIndex.peers().mySeed().hash)) {
userAgent = HTTPLoader.yacyUserAgent;
location = httpClient.generateLocation();
} else {
userAgent = sb.webIndex.seedDB.peerActions.getUserAgent(seed.getIP());
userAgent = sb.webIndex.peers().peerActions.getUserAgent(seed.getIP());
location = parseLocationInUserAgent(userAgent);
}
prop.put(STR_TABLE_LIST + conCount + "_location", location);
@ -435,7 +435,7 @@ public class Network {
} // seed != null
} // while
}
if (iAmActive) { sb.webIndex.seedDB.removeMySeed(); }
if (iAmActive) { sb.webIndex.peers().removeMySeed(); }
prop.putNum("table_list", conCount);
prop.put("table", 1);
prop.putNum("table_num", conCount);

View File

@ -64,7 +64,7 @@ public class NetworkPicture {
if (passiveLimit > 1000000) passiveLimit = 1000000;
if (potentialLimit > 1000000) potentialLimit = 1000000;
if (maxCount > 1000) maxCount = 1000;
return plasmaGrafics.getNetworkPicture(sb.webIndex.seedDB, 10000, width, height, passiveLimit, potentialLimit, maxCount, corona, env.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "unspecified"), env.getConfig("network.unit.description", "unspecified"), bgcolor);
return plasmaGrafics.getNetworkPicture(sb.webIndex.peers(), 10000, width, height, passiveLimit, potentialLimit, maxCount, corona, env.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "unspecified"), env.getConfig("network.unit.description", "unspecified"), bgcolor);
}
}

View File

@ -62,7 +62,7 @@ public class News {
if ((check.startsWith("del_")) && (post.get(check, "off").equals("on"))) {
id = check.substring(4);
try {
sb.webIndex.seedDB.newsPool.moveOff(tableID, id);
sb.webIndex.peers().newsPool.moveOff(tableID, id);
} catch (final IOException ee) {ee.printStackTrace();}
}
}
@ -75,9 +75,9 @@ public class News {
}
try {
if ((tableID == yacyNewsPool.PROCESSED_DB) || (tableID == yacyNewsPool.PUBLISHED_DB)) {
sb.webIndex.seedDB.newsPool.clear(tableID);
sb.webIndex.peers().newsPool.clear(tableID);
} else {
sb.webIndex.seedDB.newsPool.moveOffAll(tableID);
sb.webIndex.peers().newsPool.moveOffAll(tableID);
}
} catch (final IOException e) {
e.printStackTrace();
@ -90,19 +90,19 @@ public class News {
// show overview
prop.put("table", "0");
prop.put("page", "0");
prop.putNum("table_insize", sb.webIndex.seedDB.newsPool.size(yacyNewsPool.INCOMING_DB));
prop.putNum("table_prsize", sb.webIndex.seedDB.newsPool.size(yacyNewsPool.PROCESSED_DB));
prop.putNum("table_ousize", sb.webIndex.seedDB.newsPool.size(yacyNewsPool.OUTGOING_DB));
prop.putNum("table_pusize", sb.webIndex.seedDB.newsPool.size(yacyNewsPool.PUBLISHED_DB));
prop.putNum("table_insize", sb.webIndex.peers().newsPool.size(yacyNewsPool.INCOMING_DB));
prop.putNum("table_prsize", sb.webIndex.peers().newsPool.size(yacyNewsPool.PROCESSED_DB));
prop.putNum("table_ousize", sb.webIndex.peers().newsPool.size(yacyNewsPool.OUTGOING_DB));
prop.putNum("table_pusize", sb.webIndex.peers().newsPool.size(yacyNewsPool.PUBLISHED_DB));
} else {
// generate table
prop.put("table", "1");
prop.put("page", tableID + 1);
prop.put("table_page", tableID + 1);
if (sb.webIndex.seedDB != null) {
final int maxCount = Math.min(1000, sb.webIndex.seedDB.newsPool.size(tableID));
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(tableID, false);
if (sb.webIndex.peers() != null) {
final int maxCount = Math.min(1000, sb.webIndex.peers().newsPool.size(tableID));
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(tableID, false);
yacyNewsRecord record;
yacySeed seed;
int i = 0;
@ -110,8 +110,8 @@ public class News {
record = recordIterator.next();
if (record == null) continue;
seed = sb.webIndex.seedDB.getConnected(record.originator());
if (seed == null) seed = sb.webIndex.seedDB.getDisconnected(record.originator());
seed = sb.webIndex.peers().getConnected(record.originator());
if (seed == null) seed = sb.webIndex.peers().getDisconnected(record.originator());
final String category = record.category();
prop.put("table_list_" + i + "_id", record.id());
prop.putHTML("table_list_" + i + "_ori", (seed == null) ? record.originator() : seed.getName());
@ -179,7 +179,7 @@ public class News {
}
// adding the peer address
prop.put("address", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("address", sb.webIndex.peers().mySeed().getPublicAddress());
// return rewrite properties
return prop;

View File

@ -41,7 +41,7 @@ public class PerformanceGraph {
final int width = post.getInt("width", 660);
final int height = post.getInt("height", 240);
return plasmaProfiling.performanceGraph(width, height, sb.webIndex.countURL() + " URLS / " + sb.webIndex.collectionsSize() + " WORDS IN COLLECTIONS / " + sb.webIndex.cacheSize() + " WORDS IN CACHE");
return plasmaProfiling.performanceGraph(width, height, sb.webIndex.metadata().size() + " URLS / " + sb.webIndex.collectionsSize() + " WORDS IN COLLECTIONS / " + sb.webIndex.cacheSize() + " WORDS IN CACHE");
}
}

View File

@ -287,7 +287,7 @@ public class PerformanceQueues_p {
prop.put("minimumGlobalDelta", switchboard.crawlQueues.noticeURL.getMinimumGlobalDelta());
// table cache settings
prop.putNum("urlCacheSize", switchboard.webIndex.getURLwriteCacheSize());
prop.putNum("urlCacheSize", switchboard.webIndex.metadata().writeCacheSize());
prop.putNum("wordCacheSize", switchboard.webIndex.indexCacheSize());
prop.putNum("wordCacheSizeKBytes", switchboard.webIndex.indexCacheSizeBytes()/1024);
prop.putNum("maxURLinCache", switchboard.webIndex.maxURLinCache());

View File

@ -122,7 +122,7 @@ public class QuickCrawlLink_p {
}
final String urlhash = crawlingStartURL.hash();
sb.webIndex.removeURL(urlhash);
sb.webIndex.metadata().remove(urlhash);
sb.crawlQueues.noticeURL.removeByURLHash(urlhash);
sb.crawlQueues.errorURL.remove(urlhash);
@ -160,7 +160,7 @@ public class QuickCrawlLink_p {
// stack URL
String reasonString = null;
reasonString = sb.crawlStacker.stackCrawl(new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
crawlingStartURL,
null,
(title==null)?"CRAWLING-ROOT":title,

View File

@ -41,7 +41,7 @@ public class SearchEventPicture {
final plasmaSwitchboard sb = (plasmaSwitchboard) env;
final String eventID = (String) header.get("event", plasmaSearchEvent.lastEventID);
if (eventID == null) return null;
final ymageMatrix yp = plasmaGrafics.getSearchEventPicture(sb.webIndex.seedDB, eventID);
final ymageMatrix yp = plasmaGrafics.getSearchEventPicture(sb.webIndex.peers(), eventID);
if (yp == null) return new ymageMatrix(1, 1, ymageMatrix.MODE_SUB, "000000"); // empty image
return yp;

View File

@ -234,7 +234,7 @@ public class SettingsAck_p {
} else {
serverCore.useStaticIP = true;
}
if (yacySeed.isProperIP(staticIP) == null) sb.webIndex.seedDB.mySeed().setIP(staticIP);
if (yacySeed.isProperIP(staticIP) == null) sb.webIndex.peers().mySeed().setIP(staticIP);
env.setConfig("staticIP", staticIP);
// server access data
@ -352,7 +352,7 @@ public class SettingsAck_p {
// getting the currently used uploading method
final String oldSeedUploadMethod = env.getConfig("seedUploadMethod","none");
final String newSeedUploadMethod = post.get("seedUploadMethod");
final String oldSeedURLStr = sb.webIndex.seedDB.mySeed().get(yacySeed.SEEDLIST, "");
final String oldSeedURLStr = sb.webIndex.peers().mySeed().get(yacySeed.SEEDLIST, "");
final String newSeedURLStr = post.get("seedURL");
final boolean seedUrlChanged = !oldSeedURLStr.equals(newSeedURLStr);
@ -363,7 +363,7 @@ public class SettingsAck_p {
if (seedUrlChanged || uploadMethodChanged) {
env.setConfig("seedUploadMethod", newSeedUploadMethod);
sb.webIndex.seedDB.mySeed().put(yacySeed.SEEDLIST, newSeedURLStr);
sb.webIndex.peers().mySeed().put(yacySeed.SEEDLIST, newSeedURLStr);
// try an upload
String error;

View File

@ -205,7 +205,7 @@ public final class Settings_p {
}
// general settings
prop.put("seedURL", sb.webIndex.seedDB.mySeed().get(yacySeed.SEEDLIST, ""));
prop.put("seedURL", sb.webIndex.peers().mySeed().get(yacySeed.SEEDLIST, ""));
/*
* Message forwarding configuration

View File

@ -180,31 +180,31 @@ public class Status {
// peer information
String thisHash = "";
final String thisName = sb.getConfig("peerName", "<nameless>");
if (sb.webIndex.seedDB.mySeed() == null) {
if (sb.webIndex.peers().mySeed() == null) {
thisHash = "not assigned";
prop.put("peerAddress", "0"); // not assigned
prop.put("peerStatistics", "0"); // unknown
} else {
final long uptime = 60000 * Long.parseLong(sb.webIndex.seedDB.mySeed().get(yacySeed.UPTIME, "0"));
final long uptime = 60000 * Long.parseLong(sb.webIndex.peers().mySeed().get(yacySeed.UPTIME, "0"));
prop.put("peerStatistics", "1");
prop.put("peerStatistics_uptime", DateFormatter.formatInterval(uptime));
prop.putNum("peerStatistics_pagesperminute", sb.webIndex.seedDB.mySeed().getPPM());
prop.putNum("peerStatistics_queriesperhour", Math.round(6000d * sb.webIndex.seedDB.mySeed().getQPM()) / 100d);
prop.putNum("peerStatistics_links", sb.webIndex.seedDB.mySeed().getLinkCount());
prop.put("peerStatistics_words", Formatter.number(sb.webIndex.seedDB.mySeed().get(yacySeed.ICOUNT, "0")));
prop.putNum("peerStatistics_disconnects", sb.webIndex.seedDB.peerActions.disconnects);
prop.put("peerStatistics_connects", Formatter.number(sb.webIndex.seedDB.mySeed().get(yacySeed.CCOUNT, "0")));
thisHash = sb.webIndex.seedDB.mySeed().hash;
if (sb.webIndex.seedDB.mySeed().getPublicAddress() == null) {
prop.putNum("peerStatistics_pagesperminute", sb.webIndex.peers().mySeed().getPPM());
prop.putNum("peerStatistics_queriesperhour", Math.round(6000d * sb.webIndex.peers().mySeed().getQPM()) / 100d);
prop.putNum("peerStatistics_links", sb.webIndex.peers().mySeed().getLinkCount());
prop.put("peerStatistics_words", Formatter.number(sb.webIndex.peers().mySeed().get(yacySeed.ICOUNT, "0")));
prop.putNum("peerStatistics_disconnects", sb.webIndex.peers().peerActions.disconnects);
prop.put("peerStatistics_connects", Formatter.number(sb.webIndex.peers().mySeed().get(yacySeed.CCOUNT, "0")));
thisHash = sb.webIndex.peers().mySeed().hash;
if (sb.webIndex.peers().mySeed().getPublicAddress() == null) {
prop.put("peerAddress", "0"); // not assigned + instructions
prop.put("warningGoOnline", "1");
} else {
prop.put("peerAddress", "1"); // Address
prop.put("peerAddress_address", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("peerAddress_address", sb.webIndex.peers().mySeed().getPublicAddress());
prop.putXML("peerAddress_peername", sb.getConfig("peerName", "<nameless>").toLowerCase());
}
}
final String peerStatus = ((sb.webIndex.seedDB.mySeed() == null) ? yacySeed.PEERTYPE_VIRGIN : sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN));
final String peerStatus = ((sb.webIndex.peers().mySeed() == null) ? yacySeed.PEERTYPE_VIRGIN : sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN));
if (peerStatus.equals(yacySeed.PEERTYPE_VIRGIN) && sb.getConfig(plasmaSwitchboardConstants.NETWORK_NAME, "").equals("freeworld")) {
prop.put(PEERSTATUS, "0");
prop.put("urgentStatusVirgin", "1");
@ -217,7 +217,7 @@ public class Status {
} else if (peerStatus.equals(yacySeed.PEERTYPE_PRINCIPAL)) {
prop.put(PEERSTATUS, "3");
prop.put("hintStatusPrincipal", "1");
prop.put("hintStatusPrincipal_seedURL", sb.webIndex.seedDB.mySeed().get(yacySeed.SEEDLIST, "?"));
prop.put("hintStatusPrincipal_seedURL", sb.webIndex.peers().mySeed().get(yacySeed.SEEDLIST, "?"));
}
prop.putHTML("peerName", thisName);
prop.put("hash", thisHash);
@ -246,14 +246,14 @@ public class Status {
prop.putHTML("seedServer_seedFile", sb.getConfig("seedFilePath", ""));
}
prop.put("seedServer_lastUpload",
DateFormatter.formatInterval(System.currentTimeMillis() - sb.webIndex.seedDB.lastSeedUpload_timeStamp));
DateFormatter.formatInterval(System.currentTimeMillis() - sb.webIndex.peers().lastSeedUpload_timeStamp));
} else {
prop.put(SEEDSERVER, "0"); // disabled
}
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.sizeConnected() > 0){
if (sb.webIndex.peers() != null && sb.webIndex.peers().sizeConnected() > 0){
prop.put("otherPeers", "1");
prop.putNum("otherPeers_num", sb.webIndex.seedDB.sizeConnected());
prop.putNum("otherPeers_num", sb.webIndex.peers().sizeConnected());
}else{
prop.put("otherPeers", "0"); // not online
}

View File

@ -78,7 +78,7 @@ public class Supporter {
map.put("urlhash", hash);
map.put("vote", "negative");
map.put("refid", post.get("refid", ""));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
}
if ((post != null) && ((hash = post.get("votePositive", null)) != null)) {
if (!sb.verifyAuthentication(header, false)) {
@ -94,7 +94,7 @@ public class Supporter {
map.put("vote", "positive");
map.put("refid", post.get("refid", ""));
map.put("comment", post.get("comment", ""));
sb.webIndex.seedDB.newsPool.publishMyNews(new yacyNewsRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(new yacyNewsRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
}
// create Supporter
@ -131,8 +131,8 @@ public class Supporter {
description = row.getColString(2,"UTF-8");
if ((url == null) || (title == null) || (description == null)) continue;
refid = row.getColString(3, null);
voted = (sb.webIndex.seedDB.newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null) ||
(sb.webIndex.seedDB.newsPool.getSpecific(yacyNewsPool.PUBLISHED_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null);
voted = (sb.webIndex.peers().newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null) ||
(sb.webIndex.peers().newsPool.getSpecific(yacyNewsPool.PUBLISHED_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null);
prop.put("supporter_results_" + i + "_authorized", authenticated ? "1" : "0");
prop.put("supporter_results_" + i + "_authorized_recommend", voted ? "0" : "1");
@ -168,9 +168,9 @@ public class Supporter {
}
private static void accumulateVotes(final plasmaSwitchboard sb, final HashMap<String, Integer> negativeHashes, final HashMap<String, Integer> positiveHashes, final int dbtype) {
final int maxCount = Math.min(1000, sb.webIndex.seedDB.newsPool.size(dbtype));
final int maxCount = Math.min(1000, sb.webIndex.peers().newsPool.size(dbtype));
yacyNewsRecord record;
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(dbtype, true);
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(dbtype, true);
int j = 0;
while ((recordIterator.hasNext()) && (j++ < maxCount)) {
record = recordIterator.next();
@ -198,9 +198,9 @@ public class Supporter {
final plasmaSwitchboard sb,
final HashMap<String, Entry> Supporter, final ScoreCluster<String> ranking, final Row rowdef,
final HashMap<String, Integer> negativeHashes, final HashMap<String, Integer> positiveHashes, final int dbtype) {
final int maxCount = Math.min(1000, sb.webIndex.seedDB.newsPool.size(dbtype));
final int maxCount = Math.min(1000, sb.webIndex.peers().newsPool.size(dbtype));
yacyNewsRecord record;
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(dbtype, true);
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(dbtype, true);
int j = 0;
String url = "", urlhash;
Row.Entry entry;
@ -213,7 +213,7 @@ public class Supporter {
entry = null;
if ((record.category().equals(yacyNewsPool.CATEGORY_PROFILE_UPDATE)) &&
((seed = sb.webIndex.seedDB.getConnected(record.originator())) != null)) try {
((seed = sb.webIndex.peers().getConnected(record.originator())) != null)) try {
url = record.attribute("homepage", "");
if (url.length() < 12) continue;
entry = rowdef.newEntry(new byte[][]{
@ -226,7 +226,7 @@ public class Supporter {
} catch (final IOException e) {}
if ((record.category().equals(yacyNewsPool.CATEGORY_PROFILE_BROADCAST)) &&
((seed = sb.webIndex.seedDB.getConnected(record.originator())) != null)) try {
((seed = sb.webIndex.peers().getConnected(record.originator())) != null)) try {
url = record.attribute("homepage", "");
if (url.length() < 12) continue;
entry = rowdef.newEntry(new byte[][]{

View File

@ -86,7 +86,7 @@ public class Surftips {
map.put("urlhash", hash);
map.put("vote", "negative");
map.put("refid", post.get("refid", ""));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
}
if ((post != null) && ((hash = post.get("votePositive", null)) != null)) {
if (!sb.verifyAuthentication(header, false)) {
@ -102,7 +102,7 @@ public class Surftips {
map.put("vote", "positive");
map.put("refid", post.get("refid", ""));
map.put("comment", post.get("comment", ""));
sb.webIndex.seedDB.newsPool.publishMyNews(new yacyNewsRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(new yacyNewsRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
}
// create surftips
@ -140,8 +140,8 @@ public class Surftips {
description = row.getColString(2,"UTF-8");
if ((url == null) || (title == null) || (description == null)) continue;
refid = row.getColString(3, null);
voted = (sb.webIndex.seedDB.newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null) ||
(sb.webIndex.seedDB.newsPool.getSpecific(yacyNewsPool.PUBLISHED_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null);
voted = (sb.webIndex.peers().newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null) ||
(sb.webIndex.peers().newsPool.getSpecific(yacyNewsPool.PUBLISHED_DB, yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, "refid", refid) != null);
prop.put("surftips_results_" + i + "_authorized", (authenticated) ? "1" : "0");
prop.put("surftips_results_" + i + "_authorized_recommend", (voted) ? "0" : "1");
@ -177,9 +177,9 @@ public class Surftips {
}
private static void accumulateVotes(final plasmaSwitchboard sb, final HashMap<String, Integer> negativeHashes, final HashMap<String, Integer> positiveHashes, final int dbtype) {
final int maxCount = Math.min(1000, sb.webIndex.seedDB.newsPool.size(dbtype));
final int maxCount = Math.min(1000, sb.webIndex.peers().newsPool.size(dbtype));
yacyNewsRecord record;
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(dbtype, true);
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(dbtype, true);
int j = 0;
while ((recordIterator.hasNext()) && (j++ < maxCount)) {
record = recordIterator.next();
@ -207,9 +207,9 @@ public class Surftips {
final plasmaSwitchboard sb,
final HashMap<String, Entry> surftips, final ScoreCluster<String> ranking, final Row rowdef,
final HashMap<String, Integer> negativeHashes, final HashMap<String, Integer> positiveHashes, final int dbtype) {
final int maxCount = Math.min(1000, sb.webIndex.seedDB.newsPool.size(dbtype));
final int maxCount = Math.min(1000, sb.webIndex.peers().newsPool.size(dbtype));
yacyNewsRecord record;
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.seedDB.newsPool.recordIterator(dbtype, true);
final Iterator<yacyNewsRecord> recordIterator = sb.webIndex.peers().newsPool.recordIterator(dbtype, true);
int j = 0;
String url = "", urlhash;
Row.Entry entry;
@ -271,8 +271,8 @@ public class Surftips {
} catch (final IOException e) {}
if (record.category().equals(yacyNewsPool.CATEGORY_WIKI_UPDATE)) try {
yacySeed seed = sb.webIndex.seedDB.getConnected(record.originator());
if (seed == null) seed = sb.webIndex.seedDB.getDisconnected(record.originator());
yacySeed seed = sb.webIndex.peers().getConnected(record.originator());
if (seed == null) seed = sb.webIndex.peers().getDisconnected(record.originator());
if (seed != null) {
url = "http://" + seed.getPublicAddress() + "/Wiki.html?page=" + record.attribute("page", "");
entry = rowdef.newEntry(new byte[][]{
@ -286,8 +286,8 @@ public class Surftips {
} catch (final IOException e) {}
if (record.category().equals(yacyNewsPool.CATEGORY_BLOG_ADD)) try {
yacySeed seed = sb.webIndex.seedDB.getConnected(record.originator());
if (seed == null) seed = sb.webIndex.seedDB.getDisconnected(record.originator());
yacySeed seed = sb.webIndex.peers().getConnected(record.originator());
if (seed == null) seed = sb.webIndex.peers().getDisconnected(record.originator());
if (seed != null) {
url = "http://" + seed.getPublicAddress() + "/Blog.html?page=" + record.attribute("page", "");
entry = rowdef.newEntry(new byte[][]{

View File

@ -96,7 +96,7 @@ public class ViewFile {
if (urlHash.length() > 0) {
// getting the urlEntry that belongs to the url hash
MetadataRowContainer urlEntry = null;
urlEntry = sb.webIndex.getURL(urlHash, null, 0);
urlEntry = sb.webIndex.metadata().load(urlHash, null, 0);
if (urlEntry == null) {
prop.put("error", "2");
prop.put("viewMode",VIEW_MODE_NO_TEXT);

View File

@ -58,7 +58,7 @@ public class ViewProfile {
prop.put("display", display);
final String hash = (post == null) ? null : (String) post.get("hash");
if ((hash == null) || (sb.webIndex.seedDB == null)) {
if ((hash == null) || (sb.webIndex.peers() == null)) {
// wrong access
prop.put("success", "0");
return prop;
@ -82,20 +82,20 @@ public class ViewProfile {
profile.putAll(p);
prop.put("success", "3"); // everything ok
prop.put("localremotepeer", "0");
prop.putHTML("success_peername", sb.webIndex.seedDB.mySeed().getName());
prop.put("success_peerhash", sb.webIndex.seedDB.mySeed().hash);
address = sb.webIndex.seedDB.mySeed().getPublicAddress();
prop.putHTML("success_peername", sb.webIndex.peers().mySeed().getName());
prop.put("success_peerhash", sb.webIndex.peers().mySeed().hash);
address = sb.webIndex.peers().mySeed().getPublicAddress();
} else {
// read the profile from remote peer
yacySeed seed = sb.webIndex.seedDB.getConnected(hash);
if (seed == null) seed = sb.webIndex.seedDB.getDisconnected(hash);
yacySeed seed = sb.webIndex.peers().getConnected(hash);
if (seed == null) seed = sb.webIndex.peers().getDisconnected(hash);
if (seed == null) {
prop.put("success", "1"); // peer unknown
} else {
// process news if existent
try {
final yacyNewsRecord record = sb.webIndex.seedDB.newsPool.getByOriginator(yacyNewsPool.INCOMING_DB, yacyNewsPool.CATEGORY_PROFILE_UPDATE, seed.hash);
if (record != null) sb.webIndex.seedDB.newsPool.moveOff(yacyNewsPool.INCOMING_DB, record.id());
final yacyNewsRecord record = sb.webIndex.peers().newsPool.getByOriginator(yacyNewsPool.INCOMING_DB, yacyNewsPool.CATEGORY_PROFILE_UPDATE, seed.hash);
if (record != null) sb.webIndex.peers().newsPool.moveOff(yacyNewsPool.INCOMING_DB, record.id());
} catch (final IOException e) {}
// try to get the profile from remote peer

View File

@ -119,7 +119,7 @@ public class WatchCrawler_p {
if (post.containsKey("crawlingstart")) {
// init crawl
if (sb.webIndex.seedDB == null) {
if (sb.webIndex.peers() == null) {
prop.put("info", "3");
} else {
// set new properties
@ -209,7 +209,7 @@ public class WatchCrawler_p {
// first delete old entry, if exists
final yacyURL url = new yacyURL(crawlingStart, null);
final String urlhash = url.hash();
sb.webIndex.removeURL(urlhash);
sb.webIndex.metadata().remove(urlhash);
sb.crawlQueues.noticeURL.removeByURLHash(urlhash);
sb.crawlQueues.errorURL.remove(urlhash);
@ -227,7 +227,7 @@ public class WatchCrawler_p {
indexText, indexMedia,
storeHTCache, true, crawlOrder, xsstopw, xdstopw, xpstopw);
final String reasonString = sb.crawlStacker.stackCrawl(new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
url,
null,
"CRAWLING-ROOT",
@ -272,7 +272,7 @@ public class WatchCrawler_p {
m.remove("generalFilter");
m.remove("specificFilter");
m.put("intention", post.get("intention", "").replace(',', '/'));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_CRAWL_START, m));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_CRAWL_START, m));
}
} else {
prop.put("info", "5"); //Crawling failed
@ -281,7 +281,7 @@ public class WatchCrawler_p {
final ZURL.Entry ee = sb.crawlQueues.errorURL.newEntry(
new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
crawlingStartURL,
"",
"",
@ -291,7 +291,7 @@ public class WatchCrawler_p {
0,
0,
0),
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
new Date(),
1,
reasonString);
@ -366,7 +366,7 @@ public class WatchCrawler_p {
// enqueuing the url for crawling
sb.crawlStacker.enqueueEntry(new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
nexturl,
"",
e.getValue(),

View File

@ -73,8 +73,8 @@ public class Wiki {
if (author.equals("anonymous")) {
author = wikiBoard.guessAuthor(ip);
if (author == null) {
if (sb.webIndex.seedDB.mySeed() == null) author = "anonymous";
else author = sb.webIndex.seedDB.mySeed().get("Name", "anonymous");
if (sb.webIndex.peers().mySeed() == null) author = "anonymous";
else author = sb.webIndex.peers().mySeed().get("Name", "anonymous");
}
}
@ -116,7 +116,7 @@ public class Wiki {
map.put("page", pagename);
map.put("author", author.replace(',', ' '));
if (post.get("content", "").trim().length() > 0 && !page.page().equals(content))
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_WIKI_UPDATE, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_WIKI_UPDATE, map));
page = newEntry;
prop.putHTML("LOCATION", "/Wiki.html?page=" + pagename);
}

View File

@ -54,7 +54,7 @@ public class YaCySearchPluginFF {
prop.put("host", host);
prop.put("port", port);
prop.putHTML("name", sb.webIndex.seedDB.mySeed().getName());
prop.putHTML("name", sb.webIndex.peers().mySeed().getName());
return prop;
}

View File

@ -50,8 +50,8 @@ public class get_bookmarks {
prop.putHTML("display_user", username);
// set peer address
prop.put("display_address", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("display_peer", sb.webIndex.seedDB.mySeed().getName());
prop.put("display_address", sb.webIndex.peers().mySeed().getPublicAddress());
prop.put("display_peer", sb.webIndex.peers().mySeed().getName());
int rp = MAXRESULTS; // items per page
int page = 1; // page

View File

@ -31,8 +31,8 @@ public class get_folders {
prop.putHTML("display_user", username);
// set peer address
prop.put("display_address", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("display_peer", sb.webIndex.seedDB.mySeed().getName());
prop.put("display_address", sb.webIndex.peers().mySeed().getPublicAddress());
prop.put("display_peer", sb.webIndex.peers().mySeed().getName());
String root = "/";
String[] foldername = null;

View File

@ -82,6 +82,6 @@ public class add_p {
map.put("title", title.replace(',', ' '));
map.put("description", description.replace(',', ' '));
map.put("tags", tagsString.replace(',', ' '));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_BOOKMARK_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_BOOKMARK_ADD, map));
}
}

View File

@ -41,7 +41,7 @@ public class queues_p {
//indexing queue
prop.putNum("indexingSize", sb.getThread(plasmaSwitchboardConstants.INDEXER).getJobCount() + sb.webIndex.queuePreStack.getActiveQueueSize());
prop.putNum("indexingMax", (int) sb.getConfigLong(plasmaSwitchboardConstants.INDEXER_SLOTS, 30));
prop.putNum("urlpublictextSize", sb.webIndex.countURL());
prop.putNum("urlpublictextSize", sb.webIndex.metadata().size());
prop.putNum("rwipublictextSize", sb.webIndex.size());
if ((sb.webIndex.queuePreStack.size() == 0) && (sb.webIndex.queuePreStack.getActiveQueueSize() == 0)) {
prop.put("list", "0"); //is empty
@ -75,7 +75,7 @@ public class queues_p {
if ((pcentry != null) && (pcentry.url() != null)) {
final long entrySize = pcentry.size();
totalSize += entrySize;
initiator = sb.webIndex.seedDB.getConnected(pcentry.initiator());
initiator = sb.webIndex.peers().getConnected(pcentry.initiator());
prop.put("list-indexing_"+i+"_profile", (pcentry.profile() != null) ? pcentry.profile().name() : "deleted");
prop.putHTML("list-indexing_"+i+"_initiator", ((initiator == null) ? "proxy" : initiator.getName()));
prop.put("list-indexing_"+i+"_depth", pcentry.depth());
@ -102,7 +102,7 @@ public class queues_p {
for (int i = 0; i < w.length; i++) {
if (w[i] == null) continue;
prop.put("list-loader_"+count+"_profile", w[i].profileHandle());
initiator = sb.webIndex.seedDB.getConnected(w[i].initiator());
initiator = sb.webIndex.peers().getConnected(w[i].initiator());
prop.putHTML("list-loader_"+count+"_initiator", ((initiator == null) ? "proxy" : initiator.getName()));
prop.put("list-loader_"+count+"_depth", w[i].depth());
prop.putXML("list-loader_"+count+"_url", w[i].url().toString());
@ -146,7 +146,7 @@ public class queues_p {
for (int i = 0; i < crawlerList.size(); i++) {
urle = crawlerList.get(i);
if ((urle != null) && (urle.url() != null)) {
initiator = sb.webIndex.seedDB.getConnected(urle.initiator());
initiator = sb.webIndex.peers().getConnected(urle.initiator());
prop.put(tableName + "_" + showNum + "_profile", urle.profileHandle());
prop.put(tableName + "_" + showNum + "_initiator", ((initiator == null) ? "proxy" : initiator.getName()));
prop.put(tableName + "_" + showNum + "_depth", urle.depth());

View File

@ -24,7 +24,7 @@ public class status_p {
final int cacheSize = sb.webIndex.indexCacheSize();
final long cacheMaxSize = sb.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 10000);
prop.putNum("ppm", sb.currentPPM());
prop.putNum("qpm", sb.webIndex.seedDB.mySeed().getQPM());
prop.putNum("qpm", sb.webIndex.peers().mySeed().getQPM());
prop.putNum("wordCacheSize", sb.webIndex.indexCacheSize());
prop.putNum("wordCacheSize", cacheSize);
prop.putNum("wordCacheMaxSize", cacheMaxSize);

View File

@ -32,7 +32,7 @@ public class ynetSearch {
String searchaddress = post.get("url");
if (!searchaddress.startsWith("http://")) {
// a relative path .. this addresses the local peer
searchaddress = "http://" + switchboard.webIndex.seedDB.mySeed().getPublicAddress() + (searchaddress.startsWith("/") ? "" : "/") + searchaddress;
searchaddress = "http://" + switchboard.webIndex.peers().mySeed().getPublicAddress() + (searchaddress.startsWith("/") ? "" : "/") + searchaddress;
}
post.remove("url");
post.remove("login");

View File

@ -69,14 +69,14 @@ public class yacydoc {
}
if (urlhash == null || urlhash.length() == 0) return prop;
final MetadataRowContainer entry = sb.webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer entry = sb.webIndex.metadata().load(urlhash, null, 0);
if (entry == null) return prop;
final URLMetadata metadata = entry.metadata();
if (metadata.url() == null) {
return prop;
}
final MetadataRowContainer le = ((entry.referrerHash() == null) || (entry.referrerHash().length() != yacySeedDB.commonHashLength)) ? null : sb.webIndex.getURL(entry.referrerHash(), null, 0);
final MetadataRowContainer le = ((entry.referrerHash() == null) || (entry.referrerHash().length() != yacySeedDB.commonHashLength)) ? null : sb.webIndex.metadata().load(entry.referrerHash(), null, 0);
prop.putXML("dc_title", metadata.dc_title());
prop.putXML("dc_creator", metadata.dc_creator());

View File

@ -51,7 +51,7 @@ public class opensearchdescription {
prop.putXML("compareyacy_thisaddress", thisaddress);
prop.putXML("thisaddress", thisaddress);
prop.putXML("SearchPageGreeting", promoteSearchPageGreeting);
prop.putXML("clientname", sb.webIndex.seedDB.mySeed().getName());
prop.putXML("clientname", sb.webIndex.peers().mySeed().getName());
// return rewrite properties
return prop;

View File

@ -53,8 +53,8 @@ public class rct_p {
if (post != null) {
if (post.containsKey("retrieve")) {
final String peerhash = post.get("peer", null);
final yacySeed seed = (peerhash == null) ? null : sb.webIndex.seedDB.getConnected(peerhash);
final RSSFeed feed = (seed == null) ? null : yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 20, 60000);
final yacySeed seed = (peerhash == null) ? null : sb.webIndex.peers().getConnected(peerhash);
final RSSFeed feed = (seed == null) ? null : yacyClient.queryRemoteCrawlURLs(sb.webIndex.peers(), seed, 20, 60000);
if (feed != null) {
for (final RSSMessage item: feed) {
//System.out.println("URL=" + item.getLink() + ", desc=" + item.getDescription() + ", pubDate=" + item.getPubDate());
@ -115,8 +115,8 @@ public class rct_p {
// list known hosts
yacySeed seed;
int hc = 0;
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.sizeConnected() > 0) {
final Iterator<yacySeed> e = PeerSelection.getProvidesRemoteCrawlURLs(sb.webIndex.seedDB);
if (sb.webIndex.peers() != null && sb.webIndex.peers().sizeConnected() > 0) {
final Iterator<yacySeed> e = PeerSelection.getProvidesRemoteCrawlURLs(sb.webIndex.peers());
while (e.hasNext()) {
seed = e.next();
if (seed != null) {

View File

@ -109,8 +109,8 @@ public class sharedBlacklist_p {
// generate the download URL
String downloadURLOld = null;
if( sb.webIndex.seedDB != null ){ //no nullpointer error..
final yacySeed seed = sb.webIndex.seedDB.getConnected(Hash);
if( sb.webIndex.peers() != null ){ //no nullpointer error..
final yacySeed seed = sb.webIndex.peers().getConnected(Hash);
if (seed != null) {
final String IP = seed.getIP();
final String Port = seed.get(yacySeed.PORT, "8080");

View File

@ -53,7 +53,7 @@ public class welcome {
prop.putHTML("peername", env.getConfig("peerName", "<nameless>"));
prop.putHTML("peerdomain", env.getConfig("peerName", "<nameless>").toLowerCase());
prop.putHTML("peeraddress", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.putHTML("peeraddress", sb.webIndex.peers().mySeed().getPublicAddress());
prop.put("hostname", serverDomains.myPublicIP());
try{
prop.put("hostip", InetAddress.getByName(serverDomains.myPublicIP()).getHostAddress());
@ -63,7 +63,7 @@ public class welcome {
prop.put("port", serverCore.getPortNr(env.getConfig("port","8080")));
prop.put("clientip", (String) header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, ""));
final String peertype = (sb.webIndex.seedDB.mySeed() == null) ? yacySeed.PEERTYPE_JUNIOR : sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN);
final String peertype = (sb.webIndex.peers().mySeed() == null) ? yacySeed.PEERTYPE_JUNIOR : sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN);
final boolean senior = (peertype.equals(yacySeed.PEERTYPE_SENIOR)) || (peertype.equals(yacySeed.PEERTYPE_PRINCIPAL));
if (senior) { prop.put("couldcan", "can"); } else { prop.put("couldcan", "could"); }
if (senior) { prop.put("seniorinfo", "This peer runs in senior mode which means that your peer can be accessed using the addresses shown above."); } else { prop.putHTML("seniorinfo", "<b>Nobody can access your peer from the outside of your intranet. You must open your firewall and/or set a 'virtual server' in the settings of your router to enable access to the addresses as shown below.</b>"); }

View File

@ -92,10 +92,10 @@ public final class crawlReceipt {
*/
final yacySeed otherPeer = sb.webIndex.seedDB.get(iam);
final yacySeed otherPeer = sb.webIndex.peers().get(iam);
final String otherPeerName = iam + ":" + ((otherPeer == null) ? "NULL" : (otherPeer.getName() + "/" + otherPeer.getVersion()));
if ((sb.webIndex.seedDB.mySeed() == null) || (!(sb.webIndex.seedDB.mySeed().hash.equals(youare)))) {
if ((sb.webIndex.peers().mySeed() == null) || (!(sb.webIndex.peers().mySeed().hash.equals(youare)))) {
// no yacy connection / unknown peers
prop.put("delay", "3600");
return prop;
@ -137,7 +137,7 @@ public final class crawlReceipt {
if (result.equals("fill")) try {
// put new entry into database
sb.webIndex.putURL(entry);
sb.webIndex.metadata().store(entry);
sb.crawlResults.stack(entry, youare, iam, 1);
sb.crawlQueues.delegatedURL.remove(entry.hash()); // the delegated work has been done
log.logInfo("crawlReceipt: RECEIVED RECEIPT from " + otherPeerName + " for URL " + entry.hash() + ":" + metadata.url().toNormalform(false, true));

View File

@ -158,17 +158,17 @@ public final class hello {
remoteSeed.put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR);
}
// connect the seed
sb.webIndex.seedDB.peerActions.peerArrival(remoteSeed, true);
sb.webIndex.peers().peerActions.peerArrival(remoteSeed, true);
} else {
prop.put(yacySeed.YOURTYPE, yacySeed.PEERTYPE_JUNIOR);
remoteSeed.put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR);
yacyCore.log.logInfo("hello: responded remote junior peer '" + remoteSeed.getName() + "' from " + reportedip);
// no connection here, instead store junior in connection cache
if ((remoteSeed.hash != null) && (remoteSeed.isProper(false) == null)) {
sb.webIndex.seedDB.peerActions.peerPing(remoteSeed);
sb.webIndex.peers().peerActions.peerPing(remoteSeed);
}
}
sb.webIndex.seedDB.peerActions.setUserAgent(clientip, userAgent);
sb.webIndex.peers().peerActions.setUserAgent(clientip, userAgent);
if (!(prop.get(yacySeed.YOURTYPE)).equals(reportedPeerType)) {
yacyCore.log.logInfo("hello: changing remote peer '" + remoteSeed.getName() +
"' [" + reportedip +
@ -179,15 +179,15 @@ public final class hello {
serverCore.checkInterruption();
final StringBuilder seeds = new StringBuilder(768);
// attach some more seeds, as requested
if (sb.webIndex.seedDB.sizeConnected() > 0) {
if (count > sb.webIndex.seedDB.sizeConnected()) { count = sb.webIndex.seedDB.sizeConnected(); }
if (sb.webIndex.peers().sizeConnected() > 0) {
if (count > sb.webIndex.peers().sizeConnected()) { count = sb.webIndex.peers().sizeConnected(); }
if (count > 100) { count = 100; }
// latest seeds
final Map<String, yacySeed> ySeeds = PeerSelection.seedsByAge(sb.webIndex.seedDB, true, count); // peerhash/yacySeed relation
final Map<String, yacySeed> ySeeds = PeerSelection.seedsByAge(sb.webIndex.peers(), true, count); // peerhash/yacySeed relation
// attach also my own seed
seeds.append("seed0=").append(sb.webIndex.seedDB.mySeed().genSeedStr(key)).append(serverCore.CRLF_STRING);
seeds.append("seed0=").append(sb.webIndex.peers().mySeed().genSeedStr(key)).append(serverCore.CRLF_STRING);
count = 1;
// attach other seeds
@ -211,7 +211,7 @@ public final class hello {
}
} else {
// attach also my own seed
seeds.append("seed0=").append(sb.webIndex.seedDB.mySeed().genSeedStr(key)).append(serverCore.CRLF_STRING);
seeds.append("seed0=").append(sb.webIndex.peers().mySeed().genSeedStr(key)).append(serverCore.CRLF_STRING);
}
prop.put("seedlist", seeds.toString());

View File

@ -57,7 +57,7 @@ public final class list {
String otherPeerName = null;
if (post.containsKey("iam")) {
final yacySeed bla = sb.webIndex.seedDB.get(post.get("iam", ""));
final yacySeed bla = sb.webIndex.peers().get(post.get("iam", ""));
if (bla != null) otherPeerName = bla.getName();
}
if (otherPeerName == null) otherPeerName = header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP);

View File

@ -73,7 +73,7 @@ public final class message {
final String youare = post.get("youare", ""); // seed hash of the target peer, needed for network stability
// check if we are the right target and requester has correct information about this peer
if ((sb.webIndex.seedDB.mySeed() == null) || (!(sb.webIndex.seedDB.mySeed().hash.equals(youare)))) {
if ((sb.webIndex.peers().mySeed() == null) || (!(sb.webIndex.peers().mySeed().hash.equals(youare)))) {
// this request has a wrong target
prop.put("response", "-1"); // request rejected
return prop;
@ -133,7 +133,7 @@ public final class message {
sb.messageDB.write(msgEntry = sb.messageDB.newEntry(
"remote",
otherSeed.get(yacySeed.NAME, "anonymous"), otherSeed.hash,
sb.webIndex.seedDB.mySeed().getName(), sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().getName(), sb.webIndex.peers().mySeed().hash,
subject, mb));
messageForwardingViaEmail(sb, msgEntry);
@ -179,7 +179,7 @@ public final class message {
.append(sendMailTo)
.append("\nFrom: ")
.append("yacy@")
.append(sb.webIndex.seedDB.mySeed().getName())
.append(sb.webIndex.peers().mySeed().getName())
.append("\nSubject: [YaCy] ")
.append(msgEntry.subject().replace('\n', ' '))
.append("\nDate: ")

View File

@ -72,7 +72,7 @@ public final class query {
prop.put("mytime", DateFormatter.formatShortSecond());
// check if we are the right target and requester has correct information about this peer
if (sb.webIndex.seedDB.mySeed() == null || !sb.webIndex.seedDB.mySeed().hash.equals(youare)) {
if (sb.webIndex.peers().mySeed() == null || !sb.webIndex.peers().mySeed().hash.equals(youare)) {
// this request has a wrong target
prop.put("response", "-1"); // request rejected
return prop;
@ -94,7 +94,7 @@ public final class query {
if (obj.equals("lurlcount")) {
// return the number of all available l-url's
prop.put("response", sb.webIndex.countURL());
prop.put("response", sb.webIndex.metadata().size());
return prop;
}

View File

@ -157,10 +157,10 @@ public final class search {
// store accessing peer
final yacySeed remoteSeed = yacySeed.genRemoteSeed(oseed, key, false);
if (sb.webIndex.seedDB == null) {
if (sb.webIndex.peers() == null) {
yacyCore.log.logSevere("yacy.search: seed cache not initialized");
} else {
sb.webIndex.seedDB.peerActions.peerArrival(remoteSeed, true);
sb.webIndex.peers().peerActions.peerArrival(remoteSeed, true);
}
// prepare search
@ -315,7 +315,7 @@ public final class search {
prop.put("fwrec", ""); // peers that would have helped to construct this result (recommendations)
// prepare search statistics
theQuery.remotepeer = sb.webIndex.seedDB.lookupByIP(natLib.getInetAddress(client), true, false, false);
theQuery.remotepeer = sb.webIndex.peers().lookupByIP(natLib.getInetAddress(client), true, false, false);
theQuery.resultcount = (theSearch == null) ? 0 : theSearch.getRankingResult().getLocalResourceSize() + theSearch.getRankingResult().getRemoteResourceSize();
theQuery.searchtime = System.currentTimeMillis() - timestamp;
theQuery.urlretrievaltime = (theSearch == null) ? 0 : theSearch.getURLRetrievalTime();
@ -337,8 +337,8 @@ public final class search {
prop.put("searchtime", System.currentTimeMillis() - timestamp);
final int links = Integer.parseInt(prop.get("linkcount","0"));
sb.webIndex.seedDB.mySeed().incSI(links);
sb.webIndex.seedDB.mySeed().incSU(links);
sb.webIndex.peers().mySeed().incSI(links);
sb.webIndex.peers().mySeed().incSU(links);
return prop;
}

View File

@ -66,7 +66,7 @@ public final class transfer {
return prop;
}
final yacySeed otherseed = sb.webIndex.seedDB.get(otherpeer);
final yacySeed otherseed = sb.webIndex.peers().get(otherpeer);
if ((otherseed == null) || (filename.indexOf("..") >= 0)) {
// reject unknown peers: this does not appear fair, but anonymous senders are dangerous
// reject paths that contain '..' because they are dangerous
@ -88,7 +88,7 @@ public final class transfer {
final String access = Base64Order.enhancedCoder.encode(Digest.encodeMD5Raw(otherpeer + ":" + filename)) + ":" + Base64Order.enhancedCoder.encode(Digest.encodeMD5Raw("" + System.currentTimeMillis()));
prop.put("response", "ok");
prop.put("process_access", access);
prop.put("process_address", sb.webIndex.seedDB.mySeed().getPublicAddress());
prop.put("process_address", sb.webIndex.peers().mySeed().getPublicAddress());
prop.put("process_protocol", "http");
prop.put("process_path", ""); // currently empty; the store process will find a path
prop.put("process_maxsize", "-1"); // if response is too big we return the size of the file

View File

@ -83,7 +83,7 @@ public final class transferRWI {
boolean granted = sb.getConfig("allowReceiveIndex", "false").equals("true");
final boolean blockBlacklist = sb.getConfig("indexReceiveBlockBlacklist", "false").equals("true");
final long cachelimit = sb.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 100000);
final yacySeed otherPeer = sb.webIndex.seedDB.get(iam);
final yacySeed otherPeer = sb.webIndex.peers().get(iam);
final String otherPeerName = iam + ":" + ((otherPeer == null) ? "NULL" : (otherPeer.getName() + "/" + otherPeer.getVersion()));
// response values
@ -91,8 +91,8 @@ public final class transferRWI {
String result = "ok";
final StringBuilder unknownURLs = new StringBuilder();
if ((youare == null) || (!youare.equals(sb.webIndex.seedDB.mySeed().hash))) {
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.webIndex.seedDB.mySeed().hash);
if ((youare == null) || (!youare.equals(sb.webIndex.peers().mySeed().hash))) {
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.webIndex.peers().mySeed().hash);
result = "wrong_target";
pause = 0;
} else if ((!granted) || (sb.isRobinsonMode())) {
@ -162,7 +162,7 @@ public final class transferRWI {
// check if we need to ask for the corresponding URL
if (!(knownURL.contains(urlHash)||unknownURL.contains(urlHash))) try {
if (sb.webIndex.existsURL(urlHash)) {
if (sb.webIndex.metadata().exists(urlHash)) {
knownURL.add(urlHash);
} else {
unknownURL.add(urlHash);
@ -175,7 +175,7 @@ public final class transferRWI {
}
received++;
}
sb.webIndex.seedDB.mySeed().incRI(received);
sb.webIndex.peers().mySeed().incRI(received);
// finally compose the unknownURL hash list
final Iterator<String> it = unknownURL.iterator();
@ -187,7 +187,7 @@ public final class transferRWI {
if ((wordhashes.length == 0) || (received == 0)) {
sb.getLog().logInfo("Received 0 RWIs from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + " URLs, blocked " + blocked + " RWIs");
} else {
final long avdist = (FlatWordPartitionScheme.std.dhtDistance(wordhashes[0], null, sb.webIndex.seedDB.mySeed()) + FlatWordPartitionScheme.std.dhtDistance(wordhashes[received - 1], null, sb.webIndex.seedDB.mySeed())) / 2;
final long avdist = (FlatWordPartitionScheme.std.dhtDistance(wordhashes[0], null, sb.webIndex.peers().mySeed()) + FlatWordPartitionScheme.std.dhtDistance(wordhashes[received - 1], null, sb.webIndex.peers().mySeed())) / 2;
sb.getLog().logInfo("Received " + received + " Entries " + wordc + " Words [" + wordhashes[0] + " .. " + wordhashes[received - 1] + "]/" + avdist + " from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + "/" + receivedURL + " URLs, blocked " + blocked + " RWIs");
RSSFeed.channels(RSSFeed.INDEXRECEIVE).addMessage(new RSSMessage("Received " + received + " RWIs [" + wordhashes[0] + " .. " + wordhashes[received - 1] + "]/" + avdist + " from " + otherPeerName + ", requesting " + unknownURL.size() + " URLs, blocked " + blocked, "", ""));
}

View File

@ -70,11 +70,11 @@ public final class transferURL {
String result = "";
String doublevalues = "0";
final yacySeed otherPeer = sb.webIndex.seedDB.get(iam);
final yacySeed otherPeer = sb.webIndex.peers().get(iam);
final String otherPeerName = iam + ":" + ((otherPeer == null) ? "NULL" : (otherPeer.getName() + "/" + otherPeer.getVersion()));
if ((youare == null) || (!youare.equals(sb.webIndex.seedDB.mySeed().hash))) {
sb.getLog().logInfo("Rejecting URLs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.webIndex.seedDB.mySeed().hash);
if ((youare == null) || (!youare.equals(sb.webIndex.peers().mySeed().hash))) {
sb.getLog().logInfo("Rejecting URLs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.webIndex.peers().mySeed().hash);
result = "wrong_target";
} else if ((!granted) || (sb.isRobinsonMode())) {
sb.getLog().logInfo("Rejecting URLs from peer " + otherPeerName + ". Not granted.");
@ -82,7 +82,7 @@ public final class transferURL {
} else {
int received = 0;
int blocked = 0;
final int sizeBefore = sb.webIndex.countURL();
final int sizeBefore = sb.webIndex.metadata().size();
// read the urls from the other properties and store
String urls;
MetadataRowContainer lEntry;
@ -139,7 +139,7 @@ public final class transferURL {
// write entry to database
try {
sb.webIndex.putURL(lEntry);
sb.webIndex.metadata().store(lEntry);
sb.crawlResults.stack(lEntry, iam, iam, 3);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferURL: received URL '" + metadata.url().toNormalform(false, true) + "' from peer " + otherPeerName);
received++;
@ -148,10 +148,10 @@ public final class transferURL {
}
}
sb.webIndex.seedDB.mySeed().incRU(received);
sb.webIndex.peers().mySeed().incRU(received);
// return rewrite properties
final int more = sb.webIndex.countURL() - sizeBefore;
final int more = sb.webIndex.metadata().size() - sizeBefore;
doublevalues = Integer.toString(received - more);
sb.getLog().logInfo("Received " + received + " URLs from peer " + otherPeerName + " in " + (System.currentTimeMillis() - start) + " ms, blocked " + blocked + " URLs");
RSSFeed.channels(RSSFeed.INDEXRECEIVE).addMessage(new RSSMessage("Received " + received + " URLs from peer " + otherPeerName + ", blocked " + blocked, "", ""));

View File

@ -46,7 +46,7 @@ public class urls {
// insert default values
final serverObjects prop = new serverObjects();
prop.put("iam", sb.webIndex.seedDB.mySeed().hash);
prop.put("iam", sb.webIndex.peers().mySeed().hash);
prop.put("response", "rejected - insufficient call parameters");
prop.put("channel_title", "");
prop.put("channel_description", "");
@ -82,7 +82,7 @@ public class urls {
sb.crawlQueues.delegatedURL.push(
sb.crawlQueues.delegatedURL.newEntry(
entry,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
new Date(),
0,
"client=____________")
@ -113,7 +113,7 @@ public class urls {
URLMetadata metadata;
yacyURL referrer;
for (int i = 0; i < count; i++) {
entry = sb.webIndex.getURL(urlhashes.substring(12 * i, 12 * (i + 1)), null, 0);
entry = sb.webIndex.metadata().load(urlhashes.substring(12 * i, 12 * (i + 1)), null, 0);
if (entry == null) continue;
// find referrer, if there is one
referrer = sb.getURL(entry.referrerHash());

View File

@ -224,7 +224,7 @@ public class yacysearch {
// check available memory and clean up if necessary
if (!MemoryControl.request(8000000L, false)) {
sb.webIndex.clearCache();
sb.webIndex.metadata().clearCache();
plasmaSearchEvent.cleanupEvents(true);
}
@ -322,7 +322,7 @@ public class yacysearch {
map.put("urlhash", delHash);
map.put("vote", "negative");
map.put("refid", "");
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_VOTE_ADD, map));
}
// if a plus-button was hit, create new voting message
@ -332,7 +332,7 @@ public class yacysearch {
return prop;
}
final String recommendHash = post.get("recommendref", ""); // urlhash
final MetadataRowContainer urlentry = sb.webIndex.getURL(recommendHash, null, 0);
final MetadataRowContainer urlentry = sb.webIndex.metadata().load(recommendHash, null, 0);
if (urlentry != null) {
final URLMetadata metadata = urlentry.metadata();
plasmaParserDocument document;
@ -345,7 +345,7 @@ public class yacysearch {
map.put("description", document.dc_title().replace(',', ' '));
map.put("author", document.dc_creator());
map.put("tags", document.dc_subject(' '));
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_ADD, map));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_SURFTIPP_ADD, map));
document.close();
}
}

View File

@ -108,7 +108,7 @@ public class yacysearchitem {
prop.put("content", 1); // switch on specific content
prop.put("content_authorized", authenticated ? "1" : "0");
prop.put("content_authorized_recommend", (sb.webIndex.seedDB.newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_ADD, "url", result.urlstring()) == null) ? "1" : "0");
prop.put("content_authorized_recommend", (sb.webIndex.peers().newsPool.getSpecific(yacyNewsPool.OUTGOING_DB, yacyNewsPool.CATEGORY_SURFTIPP_ADD, "url", result.urlstring()) == null) ? "1" : "0");
prop.putHTML("content_authorized_recommend_deletelink", "/yacysearch.html?search=" + theQuery.queryString + "&Enter=Search&count=" + theQuery.displayResults() + "&offset=" + (theQuery.neededResults() - theQuery.displayResults()) + "&order=" + crypt.simpleEncode(theQuery.ranking.toExternalString()) + "&resource=local&time=3&deleteref=" + result.hash() + "&urlmaskfilter=.*");
prop.putHTML("content_authorized_recommend_recommendlink", "/yacysearch.html?search=" + theQuery.queryString + "&Enter=Search&count=" + theQuery.displayResults() + "&offset=" + (theQuery.neededResults() - theQuery.displayResults()) + "&order=" + crypt.simpleEncode(theQuery.ranking.toExternalString()) + "&resource=local&time=3&recommendref=" + result.hash() + "&urlmaskfilter=.*");
prop.put("content_authorized_urlhash", result.hash());

View File

@ -244,7 +244,7 @@ public class CrawlQueues {
+ ", crawlDepth=" + profile.depth()
+ ", must-match=" + profile.mustMatchPattern().toString()
+ ", must-not-match=" + profile.mustNotMatchPattern().toString()
+ ", permission=" + ((sb.webIndex.seedDB == null) ? "undefined" : (((sb.webIndex.seedDB.mySeed().isSenior()) || (sb.webIndex.seedDB.mySeed().isPrincipal())) ? "true" : "false")));
+ ", permission=" + ((sb.webIndex.peers() == null) ? "undefined" : (((sb.webIndex.peers().mySeed().isSenior()) || (sb.webIndex.peers().mySeed().isPrincipal())) ? "true" : "false")));
processLocalCrawling(urlEntry, stats);
} else {
@ -310,13 +310,13 @@ public class CrawlQueues {
public boolean remoteCrawlLoaderJob() {
// check if we are allowed to crawl urls provided by other peers
if (!sb.webIndex.seedDB.mySeed().getFlagAcceptRemoteCrawl()) {
if (!sb.webIndex.peers().mySeed().getFlagAcceptRemoteCrawl()) {
//this.log.logInfo("remoteCrawlLoaderJob: not done, we are not allowed to do that");
return false;
}
// check if we are a senior peer
if (!sb.webIndex.seedDB.mySeed().isActive()) {
if (!sb.webIndex.peers().mySeed().isActive()) {
//this.log.logInfo("remoteCrawlLoaderJob: not done, this should be a senior or principal peer");
return false;
}
@ -354,8 +354,8 @@ public class CrawlQueues {
// check if we have an entry in the provider list, otherwise fill the list
yacySeed seed;
if (remoteCrawlProviderHashes.size() == 0) {
if (sb.webIndex.seedDB != null && sb.webIndex.seedDB.sizeConnected() > 0) {
final Iterator<yacySeed> e = PeerSelection.getProvidesRemoteCrawlURLs(sb.webIndex.seedDB);
if (sb.webIndex.peers() != null && sb.webIndex.peers().sizeConnected() > 0) {
final Iterator<yacySeed> e = PeerSelection.getProvidesRemoteCrawlURLs(sb.webIndex.peers());
while (e.hasNext()) {
seed = e.next();
if (seed != null) {
@ -372,7 +372,7 @@ public class CrawlQueues {
while ((seed == null) && (remoteCrawlProviderHashes.size() > 0)) {
hash = remoteCrawlProviderHashes.remove(remoteCrawlProviderHashes.size() - 1);
if (hash == null) continue;
seed = sb.webIndex.seedDB.get(hash);
seed = sb.webIndex.peers().get(hash);
if (seed == null) continue;
// check if the peer is inside our cluster
if ((sb.isRobinsonMode()) && (!sb.isInMyCluster(seed))) {
@ -383,11 +383,11 @@ public class CrawlQueues {
if (seed == null) return false;
// we know a peer which should provide remote crawl entries. load them now.
final RSSFeed feed = yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 30, 60000);
final RSSFeed feed = yacyClient.queryRemoteCrawlURLs(sb.webIndex.peers(), seed, 30, 60000);
if (feed == null || feed.size() == 0) {
// something is wrong with this provider. To prevent that we get not stuck with this peer
// we remove it from the peer list
sb.webIndex.seedDB.peerActions.peerDeparture(seed, "no results from provided remote crawls");
sb.webIndex.peers().peerActions.peerDeparture(seed, "no results from provided remote crawls");
// ask another peer
return remoteCrawlLoaderJob();
}
@ -501,7 +501,7 @@ public class CrawlQueues {
) throws IOException {
final CrawlEntry centry = new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
url,
"",
"",
@ -555,7 +555,7 @@ public class CrawlQueues {
if (log.isFine()) log.logFine("Crawling of URL '" + entry.url().toString() + "' disallowed by robots.txt.");
final ZURL.Entry eentry = errorURL.newEntry(
this.entry,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
new Date(),
1,
"denied by robots.txt");
@ -568,7 +568,7 @@ public class CrawlQueues {
if (result != null) {
final ZURL.Entry eentry = errorURL.newEntry(
this.entry,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
new Date(),
1,
"cannot load: " + result);
@ -581,7 +581,7 @@ public class CrawlQueues {
} catch (final Exception e) {
final ZURL.Entry eentry = errorURL.newEntry(
this.entry,
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
new Date(),
1,
e.getMessage() + " - in worker");

View File

@ -123,7 +123,7 @@ public final class CrawlStacker {
// if the url was rejected we store it into the error URL db
if (rejectReason != null) {
final ZURL.Entry ee = nextQueue.errorURL.newEntry(entry, wordIndex.seedDB.mySeed().hash, new Date(), 1, rejectReason);
final ZURL.Entry ee = nextQueue.errorURL.newEntry(entry, wordIndex.peers().mySeed().hash, new Date(), 1, rejectReason);
ee.store();
nextQueue.errorURL.push(ee);
}
@ -255,8 +255,8 @@ public final class CrawlStacker {
// check if the url is double registered
final String dbocc = nextQueue.urlExists(entry.url().hash());
if (dbocc != null || wordIndex.existsURL(entry.url().hash())) {
final MetadataRowContainer oldEntry = wordIndex.getURL(entry.url().hash(), null, 0);
if (dbocc != null || wordIndex.metadata().exists(entry.url().hash())) {
final MetadataRowContainer oldEntry = wordIndex.metadata().load(entry.url().hash(), null, 0);
final boolean recrawl = (oldEntry != null) && (profile.recrawlIfOlder() > oldEntry.loaddate().getTime());
// do double-check
if ((dbocc != null) && (!recrawl)) {
@ -278,7 +278,7 @@ public final class CrawlStacker {
}
// store information
final boolean local = entry.initiator().equals(wordIndex.seedDB.mySeed().hash);
final boolean local = entry.initiator().equals(wordIndex.peers().mySeed().hash);
final boolean proxy = (entry.initiator() == null || entry.initiator().equals("------------")) && profile.handle().equals(wordIndex.defaultProxyProfile.handle());
final boolean remote = profile.handle().equals(wordIndex.defaultRemoteProfile.handle());
final boolean global =
@ -286,8 +286,8 @@ public final class CrawlStacker {
(entry.depth() == profile.depth()) /* leaf node */ &&
//(initiatorHash.equals(yacyCore.seedDB.mySeed.hash)) /* not proxy */ &&
(
(wordIndex.seedDB.mySeed().isSenior()) ||
(wordIndex.seedDB.mySeed().isPrincipal())
(wordIndex.peers().mySeed().isSenior()) ||
(wordIndex.peers().mySeed().isPrincipal())
) /* qualified */;
if (!local && !global && !remote && !proxy) {

View File

@ -143,7 +143,7 @@ public class FTPLoader {
if (berr.size() > 0 || htCache == null) {
// some error logging
final String detail = (berr.size() > 0) ? "\n Errorlog: " + berr.toString() : "";
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "server download" + detail);
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "server download" + detail);
throw new IOException("FTPLoader: Unable to download URL " + entry.url().toString() + detail);
}
@ -240,13 +240,13 @@ public class FTPLoader {
htCache.setCacheArray(b);
} else {
log.logInfo("REJECTED TOO BIG FILE with size " + size + " Bytes for URL " + entry.url().toString());
sb.crawlQueues.errorURL.newEntry(entry, this.sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "file size limit exceeded");
sb.crawlQueues.errorURL.newEntry(entry, this.sb.webIndex.peers().mySeed().hash, new Date(), 1, "file size limit exceeded");
throw new Exception("file size exceeds limit");
}
} else {
// if the response has not the right file type then reject file
log.logInfo("REJECTED WRONG MIME/EXT TYPE " + mimeType + " for URL " + entry.url().toString());
sb.crawlQueues.errorURL.newEntry(entry, this.sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "wrong mime type or wrong extension");
sb.crawlQueues.errorURL.newEntry(entry, this.sb.webIndex.peers().mySeed().hash, new Date(), 1, "wrong mime type or wrong extension");
throw new Exception("response has not the right file type -> rejected");
}
return htCache;

View File

@ -105,7 +105,7 @@ public final class HTTPLoader {
private Document load(final CrawlEntry entry, final String parserMode, final int retryCount) throws IOException {
if (retryCount < 0) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "redirection counter exceeded").store();
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "redirection counter exceeded").store();
throw new IOException("Redirection counter exceeded for URL " + entry.url().toString() + ". Processing aborted.");
}
@ -119,7 +119,7 @@ public final class HTTPLoader {
// check if url is in blacklist
final String hostlow = host.toLowerCase();
if (plasmaSwitchboard.urlBlacklist.isListed(Blacklist.BLACKLIST_CRAWLER, hostlow, path)) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "url in blacklist").store();
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "url in blacklist").store();
throw new IOException("CRAWLER Rejecting URL '" + entry.url().toString() + "'. URL is in blacklist.");
}
@ -158,7 +158,7 @@ public final class HTTPLoader {
// get the content length and check if the length is allowed
long contentLength = res.getResponseHeader().getContentLength();
if (maxFileSize >= 0 && contentLength > maxFileSize) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "file size limit exceeded");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "file size limit exceeded");
throw new IOException("REJECTED URL " + entry.url() + " because file size '" + contentLength + "' exceeds max filesize limit of " + maxFileSize + " bytes.");
}
@ -169,14 +169,14 @@ public final class HTTPLoader {
// check length again in case it was not possible to get the length before loading
if (maxFileSize >= 0 && contentLength > maxFileSize) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "file size limit exceeded");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "file size limit exceeded");
throw new IOException("REJECTED URL " + entry.url() + " because file size '" + contentLength + "' exceeds max filesize limit of " + maxFileSize + " bytes.");
}
htCache.setCacheArray(responseBody);
} else {
// if the response has not the right file type then reject file
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "wrong mime type or wrong extension");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "wrong mime type or wrong extension");
throw new IOException("REJECTED WRONG MIME/EXT TYPE " + res.getResponseHeader().mime() + " for URL " + entry.url().toString());
}
return htCache;
@ -198,7 +198,7 @@ public final class HTTPLoader {
redirectionUrlString = redirectionUrlString.trim();
if (redirectionUrlString.length() == 0) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "redirection header empy");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "redirection header empy");
throw new IOException("CRAWLER Redirection of URL=" + entry.url().toString() + " aborted. Location header is empty.");
}
@ -211,7 +211,7 @@ public final class HTTPLoader {
// if we are already doing a shutdown we don't need to retry crawling
if (Thread.currentThread().isInterrupted()) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "server shutdown");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "server shutdown");
throw new IOException("CRAWLER Retry of URL=" + entry.url().toString() + " aborted because of server shutdown.");
}
@ -221,7 +221,7 @@ public final class HTTPLoader {
// check if the url was already indexed
final String dbname = sb.urlExists(urlhash);
if (dbname != null) {
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "redirection to double content");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "redirection to double content");
throw new IOException("CRAWLER Redirection of URL=" + entry.url().toString() + " ignored. The url appears already in db " + dbname);
}
@ -231,7 +231,7 @@ public final class HTTPLoader {
}
} else {
// if the response has not the right response type then reject file
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.seedDB.mySeed().hash, new Date(), 1, "wrong http status code " + res.getStatusCode() + ")");
sb.crawlQueues.errorURL.newEntry(entry, sb.webIndex.peers().mySeed().hash, new Date(), 1, "wrong http status code " + res.getStatusCode() + ")");
throw new IOException("REJECTED WRONG STATUS TYPE '" + res.getStatusLine() + "' for URL " + entry.url().toString());
}
/*

View File

@ -319,12 +319,12 @@ public class IndexingStack {
public yacySeed initiatorPeer() {
if ((initiator == null) || (initiator.length() == 0)) return null;
if (initiator.equals(wordIndex.seedDB.mySeed().hash)) {
if (initiator.equals(wordIndex.peers().mySeed().hash)) {
// normal crawling
return null;
}
// this was done for remote peer (a global crawl)
return wordIndex.seedDB.getConnected(initiator);
return wordIndex.peers().getConnected(initiator);
}
public int depth() {
@ -367,7 +367,7 @@ public class IndexingStack {
if (referrerURL == null) {
// FIXME the equals seems to be incorrect: String.equals(boolean)
if ((referrerHash == null) || ((initiator != null) && (referrerHash.equals(initiator.length() == 0)))) return null;
final MetadataRowContainer entry = wordIndex.getURL(referrerHash, null, 0);
final MetadataRowContainer entry = wordIndex.metadata().load(referrerHash, null, 0);
if (entry == null) referrerURL = null; else referrerURL = entry.metadata().url();
}
return referrerURL;
@ -394,7 +394,7 @@ public class IndexingStack {
if ((initiator == null) || initiator.length() == 0 || initiator.equals("------------")) {
// proxy-load
processCase = plasmaSwitchboardConstants.PROCESSCASE_4_PROXY_LOAD;
} else if (initiator.equals(wordIndex.seedDB.mySeed().hash)) {
} else if (initiator.equals(wordIndex.peers().mySeed().hash)) {
// normal crawling
processCase = plasmaSwitchboardConstants.PROCESSCASE_5_LOCAL_CRAWLING;
} else {

View File

@ -123,7 +123,7 @@ public final class ResourceObserver {
if (tmpDisksFree == LOW && sb.getConfigBool(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, false)) {
log.logInfo("disabling index receive");
sb.setConfig(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, false);
sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(false);
sb.webIndex.peers().mySeed().setFlagAcceptRemoteIndex(false);
}
}
else {

View File

@ -260,7 +260,7 @@ public class SitemapParser extends DefaultHandler {
final String dbocc = this.sb.urlExists(nexturlhash);
if ((dbocc != null) && (dbocc.equalsIgnoreCase("loaded"))) {
// the url was already loaded. we need to check the date
final MetadataRowContainer oldEntry = this.sb.webIndex.getURL(nexturlhash, null, 0);
final MetadataRowContainer oldEntry = this.sb.webIndex.metadata().load(nexturlhash, null, 0);
if (oldEntry != null) {
final Date modDate = oldEntry.moddate();
// check if modDate is null
@ -272,7 +272,7 @@ public class SitemapParser extends DefaultHandler {
// URL needs to crawled
this.sb.crawlStacker.enqueueEntry(new CrawlEntry(
this.sb.webIndex.seedDB.mySeed().hash,
this.sb.webIndex.peers().mySeed().hash,
url,
null, // this.siteMapURL.toString(),
this.nextURL,

View File

@ -243,7 +243,7 @@ public class bookmarksDB {
Pattern.compile(newcrawlingMustMatch);
String urlhash = crawlingStartURL.hash();
sb.webIndex.removeURL(urlhash);
sb.webIndex.metadata().remove(urlhash);
sb.crawlQueues.noticeURL.removeByURLHash(urlhash);
sb.crawlQueues.errorURL.remove(urlhash);
@ -259,7 +259,7 @@ public class bookmarksDB {
indexText, indexMedia,
storeHTCache, true, crawlOrder, xsstopw, xdstopw, xpstopw);
sb.crawlStacker.enqueueEntry(new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
crawlingStartURL,
null,
"CRAWLING-ROOT",
@ -287,7 +287,7 @@ public class bookmarksDB {
m.remove("generalFilter");
m.remove("specificFilter");
m.put("intention", "Automatic ReCrawl!");
sb.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_CRAWL_START, m));
sb.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(sb.webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_CRAWL_START, m));
}
} catch (MalformedURLException e1) {}
} // if

View File

@ -28,7 +28,7 @@ public abstract class abstractWikiParser implements wikiParser {
return transform(
new BufferedReader(new StringReader(content)),
content.length(),
sb.webIndex.seedDB.mySeed().getPublicAddress(),
sb.webIndex.peers().mySeed().getPublicAddress(),
sb);
} catch (final IOException e) {
return "internal error: " + e.getMessage();
@ -74,7 +74,7 @@ public abstract class abstractWikiParser implements wikiParser {
return transform(
new BufferedReader(new InputStreamReader(bais, encoding)),
content.length,
sb.webIndex.seedDB.mySeed().getPublicAddress(),
sb.webIndex.peers().mySeed().getPublicAddress(),
switchboard);
} catch (final IOException e) {
return "internal error: " + e.getMessage();

View File

@ -126,7 +126,7 @@ public class knwikiParser implements wikiParser {
tokens = new Token[] {
new SimpleToken('=', '=', new String[][] { null, { "h2" }, { "h3" }, { "h4" } }, true),
new SimpleToken('\'', '\'', new String[][] { null, { "i" }, { "b" }, null, { "b", "i" } }, false),
new LinkToken((publicAddress == null) ? sb.webIndex.seedDB.mySeed().getPublicAddress() : publicAddress, "Wiki.html?page=", sb),
new LinkToken((publicAddress == null) ? sb.webIndex.peers().mySeed().getPublicAddress() : publicAddress, "Wiki.html?page=", sb),
new ListToken('*', "ul"),
new ListToken('#', "ol"),
new ListToken(':', "blockquote", null),

View File

@ -787,7 +787,7 @@ public final class httpdFileHandler {
templatePatterns.put(servletProperties.PEER_STAT_VERSION, switchboard.getConfig("version", ""));
templatePatterns.put(servletProperties.PEER_STAT_UPTIME, ((System.currentTimeMillis() - serverCore.startupTime) / 1000) / 60); // uptime in minutes
templatePatterns.putHTML(servletProperties.PEER_STAT_CLIENTNAME, switchboard.getConfig("peerName", "anomic"));
templatePatterns.putHTML(servletProperties.PEER_STAT_CLIENTID, ((plasmaSwitchboard) switchboard).webIndex.seedDB.myID());
templatePatterns.putHTML(servletProperties.PEER_STAT_CLIENTID, ((plasmaSwitchboard) switchboard).webIndex.peers().myID());
templatePatterns.put(servletProperties.PEER_STAT_MYTIME, DateFormatter.formatShortSecond());
//System.out.println("respond props: " + ((tp == null) ? "null" : tp.toString())); // debug
} catch (final InvocationTargetException e) {

View File

@ -93,8 +93,8 @@ public class plasmaDbImporter extends AbstractImporter implements Importer {
try {
this.log.logInfo("Importing DB from '" + this.importWordIndex.getLocation(true).getAbsolutePath() + "'");
this.log.logInfo("Home word index contains " + homeWordIndex.size() + " words and " + homeWordIndex.countURL() + " URLs.");
this.log.logInfo("Import word index contains " + this.importWordIndex.size() + " words and " + this.importWordIndex.countURL() + " URLs.");
this.log.logInfo("Home word index contains " + homeWordIndex.size() + " words and " + homeWordIndex.metadata().size() + " URLs.");
this.log.logInfo("Import word index contains " + this.importWordIndex.size() + " words and " + this.importWordIndex.metadata().size() + " URLs.");
final HashSet<String> unknownUrlBuffer = new HashSet<String>();
final HashSet<String> importedUrlBuffer = new HashSet<String>();
@ -141,11 +141,11 @@ public class plasmaDbImporter extends AbstractImporter implements Importer {
// we need to import the url
// getting the url entry
final MetadataRowContainer urlEntry = this.importWordIndex.getURL(urlHash, null, 0);
final MetadataRowContainer urlEntry = this.importWordIndex.metadata().load(urlHash, null, 0);
if (urlEntry != null) {
/* write it into the home url db */
homeWordIndex.putURL(urlEntry);
homeWordIndex.metadata().store(urlEntry);
importedUrlBuffer.add(urlHash);
this.urlCounter++;
@ -212,8 +212,8 @@ public class plasmaDbImporter extends AbstractImporter implements Importer {
}
}
this.log.logInfo("Home word index contains " + homeWordIndex.size() + " words and " + homeWordIndex.countURL() + " URLs.");
this.log.logInfo("Import word index contains " + this.importWordIndex.size() + " words and " + this.importWordIndex.countURL() + " URLs.");
this.log.logInfo("Home word index contains " + homeWordIndex.size() + " words and " + homeWordIndex.metadata().size() + " URLs.");
this.log.logInfo("Import word index contains " + this.importWordIndex.size() + " words and " + this.importWordIndex.metadata().size() + " URLs.");
} catch (final Exception e) {
this.log.logSevere("Database import failed.",e);
e.printStackTrace();

View File

@ -73,7 +73,7 @@ public class plasmaSearchAPI {
yacySeed seed;
int hc = 0;
prop.put("searchresult_keyhash", startHash);
final Iterator<yacySeed> e = PeerSelection.getAcceptRemoteIndexSeeds(sb.webIndex.seedDB, startHash, sb.webIndex.seedDB.sizeConnected(), true);
final Iterator<yacySeed> e = PeerSelection.getAcceptRemoteIndexSeeds(sb.webIndex.peers(), startHash, sb.webIndex.peers().sizeConnected(), true);
while (e.hasNext()) {
seed = e.next();
if (seed != null) {

View File

@ -183,7 +183,7 @@ public final class plasmaSearchEvent {
IAmaxcounthash = wordhash;
maxcount = container.size();
}
l = FlatWordPartitionScheme.std.dhtDistance(wordhash, null, wordIndex.seedDB.mySeed());
l = FlatWordPartitionScheme.std.dhtDistance(wordhash, null, wordIndex.peers().mySeed());
if (l < mindhtdistance) {
// calculate the word hash that is closest to our dht position
mindhtdistance = l;
@ -345,7 +345,7 @@ public final class plasmaSearchEvent {
} else {
// problems with snippet fetch
registerFailure(page.hash(), "no text snippet for URL " + metadata.url());
if (!wordIndex.seedDB.mySeed().isVirgin()) plasmaSnippetCache.failConsequences(snippet, query.id(false));
if (!wordIndex.peers().mySeed().isVirgin()) plasmaSnippetCache.failConsequences(snippet, query.id(false));
return null;
}
} else {
@ -708,7 +708,7 @@ public final class plasmaSearchEvent {
Iterator<Map.Entry<String, String>> i1 = abstractJoin.entrySet().iterator();
Map.Entry<String, String> entry1;
String url, urls, peer, peers;
final String mypeerhash = wordIndex.seedDB.mySeed().hash;
final String mypeerhash = wordIndex.peers().mySeed().hash;
boolean mypeerinvolved = false;
int mypeercount;
while (i1.hasNext()) {
@ -819,7 +819,7 @@ public final class plasmaSearchEvent {
// translate host into current IP
int p = host.indexOf(".");
final String hash = yacySeed.hexHash2b64Hash(host.substring(p + 1, host.length() - 6));
final yacySeed seed = wordIndex.seedDB.getConnected(hash);
final yacySeed seed = wordIndex.peers().getConnected(hash);
final String filename = urlcomps.url().getFile();
String address = null;
if ((seed == null) || ((address = seed.getPublicAddress()) == null)) {
@ -831,7 +831,7 @@ public final class plasmaSearchEvent {
" " +
urlcomps.dc_title())).keySet(),
urlentry.hash());
wordIndex.removeURL(urlentry.hash()); // clean up
wordIndex.metadata().remove(urlentry.hash()); // clean up
throw new RuntimeException("index void");
}
alternative_urlstring = "http://" + address + "/" + host.substring(0, p) + filename;

View File

@ -295,7 +295,7 @@ public final class plasmaSearchRankingProcess {
if (((stack.size() == 0) && (size() == 0))) break;
final SortStack<ReferenceVars>.stackElement obrwi = bestRWI(skipDoubleDom);
if (obrwi == null) continue; // *** ? this happened and the thread was suspended silently. cause?
final MetadataRowContainer u = wordIndex.getURL(obrwi.element.urlHash(), obrwi.element, obrwi.weight.longValue());
final MetadataRowContainer u = wordIndex.metadata().load(obrwi.element.urlHash(), obrwi.element, obrwi.weight.longValue());
if (u != null) {
final URLMetadata metadata = u.metadata();
if (metadata.url() != null) this.handover.put(u.hash(), metadata.url().toNormalform(true, false)); // remember that we handed over this url

View File

@ -946,7 +946,7 @@ public class plasmaSnippetCache {
(snippet.getErrorCode() == ERROR_PARSER_FAILED) ||
(snippet.getErrorCode() == ERROR_PARSER_NO_LINES)) {
log.logInfo("error: '" + snippet.getError() + "', remove url = " + snippet.getUrl().toNormalform(false, true) + ", cause: " + snippet.getError());
plasmaSwitchboard.getSwitchboard().webIndex.removeURL(urlHash);
plasmaSwitchboard.getSwitchboard().webIndex.metadata().remove(urlHash);
final plasmaSearchEvent event = plasmaSearchEvent.getEvent(eventID);
assert plasmaSwitchboard.getSwitchboard() != null;
assert plasmaSwitchboard.getSwitchboard().webIndex != null;

View File

@ -330,8 +330,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// init a DHT transmission dispatcher
this.dhtDispatcher = new Dispatcher(
webIndex,
webIndex.referenceURL,
webIndex.seedDB,
webIndex.metadata(),
webIndex.peers(),
true,
30000);
@ -430,7 +430,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
htCachePath = getConfigPath(plasmaSwitchboardConstants.HTCACHE_PATH, plasmaSwitchboardConstants.HTCACHE_PATH_DEFAULT);
this.log.logInfo("HTCACHE Path = " + htCachePath.getAbsolutePath());
final long maxCacheSize = 1024 * 1024 * Long.parseLong(getConfig(plasmaSwitchboardConstants.PROXY_CACHE_SIZE, "2")); // this is megabyte
plasmaHTCache.init(htCachePath, webIndex.seedDB.mySeed().hash, maxCacheSize);
plasmaHTCache.init(htCachePath, webIndex.peers().mySeed().hash, maxCacheSize);
// create the release download directory
releasePath = getConfigPath(plasmaSwitchboardConstants.RELEASE_PATH, plasmaSwitchboardConstants.RELEASE_PATH_DEFAULT);
@ -534,8 +534,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
CRDist1Target = kaskelix.de:8080,yacy.dyndns.org:8000,suma-lab.de:8080
**/
rankingOn = getConfig(plasmaSwitchboardConstants.RANKING_DIST_ON, "true").equals("true") && networkName.equals("freeworld");
rankingOwnDistribution = new plasmaRankingDistribution(log, webIndex.seedDB, new File(rankingPath, getConfig(plasmaSwitchboardConstants.RANKING_DIST_0_PATH, plasmaRankingDistribution.CR_OWN)), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_0_METHOD, plasmaRankingDistribution.METHOD_ANYSENIOR), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_0_METHOD, 0), getConfig(plasmaSwitchboardConstants.RANKING_DIST_0_TARGET, ""));
rankingOtherDistribution = new plasmaRankingDistribution(log, webIndex.seedDB, new File(rankingPath, getConfig(plasmaSwitchboardConstants.RANKING_DIST_1_PATH, plasmaRankingDistribution.CR_OTHER)), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_1_METHOD, plasmaRankingDistribution.METHOD_MIXEDSENIOR), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_1_METHOD, 30), getConfig(plasmaSwitchboardConstants.RANKING_DIST_1_TARGET, "kaskelix.de:8080,yacy.dyndns.org:8000"));
rankingOwnDistribution = new plasmaRankingDistribution(log, webIndex.peers(), new File(rankingPath, getConfig(plasmaSwitchboardConstants.RANKING_DIST_0_PATH, plasmaRankingDistribution.CR_OWN)), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_0_METHOD, plasmaRankingDistribution.METHOD_ANYSENIOR), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_0_METHOD, 0), getConfig(plasmaSwitchboardConstants.RANKING_DIST_0_TARGET, ""));
rankingOtherDistribution = new plasmaRankingDistribution(log, webIndex.peers(), new File(rankingPath, getConfig(plasmaSwitchboardConstants.RANKING_DIST_1_PATH, plasmaRankingDistribution.CR_OTHER)), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_1_METHOD, plasmaRankingDistribution.METHOD_MIXEDSENIOR), (int) getConfigLong(plasmaSwitchboardConstants.RANKING_DIST_1_METHOD, 30), getConfig(plasmaSwitchboardConstants.RANKING_DIST_1_TARGET, "kaskelix.de:8080,yacy.dyndns.org:8000"));
// init nameCacheNoCachingList
serverDomains.setNoCachingPatterns(getConfig(plasmaSwitchboardConstants.HTTPC_NAME_CACHE_CACHING_PATTERNS_NO,""));
@ -573,7 +573,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// before we do that, we wait some time until the seed list is loaded.
//while (((System.currentTimeMillis() - startedSeedListAquisition) < 8000) && (this.webIndex.seedDB.sizeConnected() == 0)) try {Thread.sleep(1000);} catch (final InterruptedException e) {}
try {Thread.sleep(1000);} catch (final InterruptedException e) {}
this.clusterhashes = this.webIndex.seedDB.clusterHashes(getConfig("cluster.peers.yacydomain", ""));
this.clusterhashes = this.webIndex.peers().clusterHashes(getConfig("cluster.peers.yacydomain", ""));
// deploy blocking threads
indexingStorageProcessor = new serverProcessor<indexingQueueEntry>(
@ -913,12 +913,12 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// tests if hash occurrs in any database
// if it exists, the name of the database is returned,
// if it not exists, null is returned
if (webIndex.existsURL(hash)) return "loaded";
if (webIndex.metadata().exists(hash)) return "loaded";
return this.crawlQueues.urlExists(hash);
}
public void urlRemove(final String hash) {
webIndex.removeURL(hash);
webIndex.metadata().remove(hash);
crawlResults.remove(hash);
crawlQueues.urlRemove(hash);
}
@ -928,7 +928,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if (urlhash.length() == 0) return null;
final yacyURL ne = crawlQueues.getURL(urlhash);
if (ne != null) return ne;
final MetadataRowContainer le = webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer le = webIndex.metadata().load(urlhash, null, 0);
if (le != null) return le.metadata().url();
return null;
}
@ -1145,7 +1145,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// flush some entries from the RAM cache
webIndex.flushCacheFor(5000);
// empty some caches
webIndex.clearCache();
webIndex.metadata().clearCache();
plasmaSearchEvent.cleanupEvents(true);
}
@ -1278,7 +1278,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// clear caches if necessary
if (!MemoryControl.request(8000000L, false)) {
webIndex.clearCache();
webIndex.metadata().clearCache();
plasmaSearchEvent.cleanupEvents(true);
}
@ -1362,20 +1362,20 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// clean up news
checkInterruption();
try {
if (this.log.isFine()) log.logFine("Cleaning Incoming News, " + this.webIndex.seedDB.newsPool.size(yacyNewsPool.INCOMING_DB) + " entries on stack");
if (this.webIndex.seedDB.newsPool.automaticProcess(webIndex.seedDB) > 0) hasDoneSomething = true;
if (this.log.isFine()) log.logFine("Cleaning Incoming News, " + this.webIndex.peers().newsPool.size(yacyNewsPool.INCOMING_DB) + " entries on stack");
if (this.webIndex.peers().newsPool.automaticProcess(webIndex.peers()) > 0) hasDoneSomething = true;
} catch (final IOException e) {}
if (getConfigBool("cleanup.deletionProcessedNews", true)) {
this.webIndex.seedDB.newsPool.clear(yacyNewsPool.PROCESSED_DB);
this.webIndex.peers().newsPool.clear(yacyNewsPool.PROCESSED_DB);
}
if (getConfigBool("cleanup.deletionPublishedNews", true)) {
this.webIndex.seedDB.newsPool.clear(yacyNewsPool.PUBLISHED_DB);
this.webIndex.peers().newsPool.clear(yacyNewsPool.PUBLISHED_DB);
}
// clean up seed-dbs
if(getConfigBool("routing.deleteOldSeeds.permission",true)) {
final long deleteOldSeedsTime = getConfigLong("routing.deleteOldSeeds.time",7)*24*3600000;
Iterator<yacySeed> e = this.webIndex.seedDB.seedsSortedDisconnected(true,yacySeed.LASTSEEN);
Iterator<yacySeed> e = this.webIndex.peers().seedsSortedDisconnected(true,yacySeed.LASTSEEN);
yacySeed seed = null;
final ArrayList<String> deleteQueue = new ArrayList<String>();
checkInterruption();
@ -1389,9 +1389,9 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
deleteQueue.add(seed.hash);
}
}
for(int i=0;i<deleteQueue.size();++i) this.webIndex.seedDB.removeDisconnected(deleteQueue.get(i));
for(int i=0;i<deleteQueue.size();++i) this.webIndex.peers().removeDisconnected(deleteQueue.get(i));
deleteQueue.clear();
e = this.webIndex.seedDB.seedsSortedPotential(true,yacySeed.LASTSEEN);
e = this.webIndex.peers().seedsSortedPotential(true,yacySeed.LASTSEEN);
checkInterruption();
//clean potential seeds
while(e.hasNext()) {
@ -1403,7 +1403,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
deleteQueue.add(seed.hash);
}
}
for (int i = 0; i < deleteQueue.size(); ++i) this.webIndex.seedDB.removePotential(deleteQueue.get(i));
for (int i = 0; i < deleteQueue.size(); ++i) this.webIndex.peers().removePotential(deleteQueue.get(i));
}
// check if update is available and
@ -1426,7 +1426,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
}
// initiate broadcast about peer startup to spread supporter url
if (this.webIndex.seedDB.newsPool.size(yacyNewsPool.OUTGOING_DB) == 0) {
if (this.webIndex.peers().newsPool.size(yacyNewsPool.OUTGOING_DB) == 0) {
// read profile
final Properties profile = new Properties();
FileInputStream fileIn = null;
@ -1441,12 +1441,12 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if ((homepage != null) && (homepage.length() > 10)) {
final Properties news = new Properties();
news.put("homepage", profile.get("homepage"));
this.webIndex.seedDB.newsPool.publishMyNews(yacyNewsRecord.newRecord(webIndex.seedDB.mySeed(), yacyNewsPool.CATEGORY_PROFILE_BROADCAST, news));
this.webIndex.peers().newsPool.publishMyNews(yacyNewsRecord.newRecord(webIndex.peers().mySeed(), yacyNewsPool.CATEGORY_PROFILE_BROADCAST, news));
}
}
// update the cluster set
this.clusterhashes = this.webIndex.seedDB.clusterHashes(getConfig("cluster.peers.yacydomain", ""));
this.clusterhashes = this.webIndex.peers().clusterHashes(getConfig("cluster.peers.yacydomain", ""));
// after all clean up is done, check the resource usage
@ -1655,7 +1655,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
crawlResults.stack(
newEntry, // loaded url db entry
queueEntry.initiator(), // initiator peer hash
this.webIndex.seedDB.mySeed().hash, // executor peer hash
this.webIndex.peers().mySeed().hash, // executor peer hash
processCase // process case
);
@ -1697,7 +1697,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
this.reference = reference;
}
public void run() {
yacyClient.crawlReceipt(webIndex.seedDB.mySeed(), initiatorPeer, "crawl", "fill", "indexed", reference, "");
yacyClient.crawlReceipt(webIndex.peers().mySeed(), initiatorPeer, "crawl", "fill", "indexed", reference, "");
}
}
@ -1737,7 +1737,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if (urlhash == null) return 0;
// determine the url string
final MetadataRowContainer entry = webIndex.getURL(urlhash, null, 0);
final MetadataRowContainer entry = webIndex.metadata().load(urlhash, null, 0);
if (entry == null) return 0;
final URLMetadata metadata = entry.metadata();
if (metadata.url() == null) return 0;
@ -1753,7 +1753,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
}
if (resource == null) {
// delete just the url entry
webIndex.removeURL(urlhash);
webIndex.metadata().remove(urlhash);
return 0;
} else {
resourceContent = (InputStream) resource[0];
@ -1775,7 +1775,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if (words != null) count = webIndex.removeWordReferences(words, urlhash);
// finally delete the url entry itself
webIndex.removeURL(urlhash);
webIndex.metadata().remove(urlhash);
return count;
}
} catch (final ParserException e) {
@ -1868,16 +1868,16 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
}
public String dhtShallTransfer() {
if (this.webIndex.seedDB == null) {
if (this.webIndex.peers() == null) {
return "no DHT distribution: seedDB == null";
}
if (this.webIndex.seedDB.mySeed() == null) {
if (this.webIndex.peers().mySeed() == null) {
return "no DHT distribution: mySeed == null";
}
if (this.webIndex.seedDB.mySeed().isVirgin()) {
if (this.webIndex.peers().mySeed().isVirgin()) {
return "no DHT distribution: status is virgin";
}
if (this.webIndex.seedDB.noDHTActivity()) {
if (this.webIndex.peers().noDHTActivity()) {
return "no DHT distribution: network too small";
}
if (!this.getConfigBool("network.unit.dht", true)) {
@ -1886,8 +1886,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if (getConfig(plasmaSwitchboardConstants.INDEX_DIST_ALLOW, "false").equalsIgnoreCase("false")) {
return "no DHT distribution: not enabled (ser setting)";
}
if (webIndex.countURL() < 10) {
return "no DHT distribution: loadedURL.size() = " + webIndex.countURL();
if (webIndex.metadata().size() < 10) {
return "no DHT distribution: loadedURL.size() = " + webIndex.metadata().size();
}
if (webIndex.size() < 100) {
return "no DHT distribution: not enough words - wordIndex.size() = " + webIndex.size();
@ -1908,14 +1908,14 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
return false;
}
boolean hasDoneSomething = false;
if (this.dhtDispatcher.cloudSize() > this.webIndex.seedDB.scheme.verticalPartitions() * 4) {
if (this.dhtDispatcher.cloudSize() > this.webIndex.peers().scheme.verticalPartitions() * 4) {
log.logInfo("dhtTransferJob: no selection, too many entries in transmission cloud: " + this.dhtDispatcher.cloudSize());
} else if (MemoryControl.available() < 1024*1024*25) {
log.logInfo("dhtTransferJob: no selection, too less memory available : " + (MemoryControl.available() / 1024 / 1024) + " MB");
} else {
String startHash = PeerSelection.selectTransferStart();
log.logInfo("dhtTransferJob: selected " + startHash + " as start hash");
String limitHash = PeerSelection.limitOver(this.webIndex.seedDB, startHash);
String limitHash = PeerSelection.limitOver(this.webIndex.peers(), startHash);
log.logInfo("dhtTransferJob: selected " + limitHash + " as limit hash");
try {
boolean enqueued = this.dhtDispatcher.selectContainersEnqueueToCloud(
@ -1979,28 +1979,28 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
// generate new peer name
setConfig("peerName", yacySeed.makeDefaultPeerName());
}
webIndex.seedDB.mySeed().put(yacySeed.NAME, getConfig("peerName", "nameless"));
webIndex.seedDB.mySeed().put(yacySeed.PORT, Integer.toString(serverCore.getPortNr(getConfig("port", "8080"))));
webIndex.peers().mySeed().put(yacySeed.NAME, getConfig("peerName", "nameless"));
webIndex.peers().mySeed().put(yacySeed.PORT, Integer.toString(serverCore.getPortNr(getConfig("port", "8080"))));
//the speed of indexing (pages/minute) of the peer
final long uptime = (System.currentTimeMillis() - serverCore.startupTime) / 1000;
webIndex.seedDB.mySeed().put(yacySeed.ISPEED, Integer.toString(currentPPM()));
webIndex.peers().mySeed().put(yacySeed.ISPEED, Integer.toString(currentPPM()));
totalQPM = requestedQueries * 60d / Math.max(uptime, 1d);
webIndex.seedDB.mySeed().put(yacySeed.RSPEED, Double.toString(totalQPM /*Math.max((float) requestcdiff, 0f) * 60f / Math.max((float) uptimediff, 1f)*/ ));
webIndex.peers().mySeed().put(yacySeed.RSPEED, Double.toString(totalQPM /*Math.max((float) requestcdiff, 0f) * 60f / Math.max((float) uptimediff, 1f)*/ ));
webIndex.seedDB.mySeed().put(yacySeed.UPTIME, Long.toString(uptime/60)); // the number of minutes that the peer is up in minutes/day (moving average MA30)
webIndex.seedDB.mySeed().put(yacySeed.LCOUNT, Integer.toString(webIndex.countURL())); // the number of links that the peer has stored (LURL's)
webIndex.seedDB.mySeed().put(yacySeed.NCOUNT, Integer.toString(crawlQueues.noticeURL.size())); // the number of links that the peer has noticed, but not loaded (NURL's)
webIndex.seedDB.mySeed().put(yacySeed.RCOUNT, Integer.toString(crawlQueues.noticeURL.stackSize(NoticedURL.STACK_TYPE_LIMIT))); // the number of links that the peer provides for remote crawling (ZURL's)
webIndex.seedDB.mySeed().put(yacySeed.ICOUNT, Integer.toString(webIndex.size())); // the minimum number of words that the peer has indexed (as it says)
webIndex.seedDB.mySeed().put(yacySeed.SCOUNT, Integer.toString(webIndex.seedDB.sizeConnected())); // the number of seeds that the peer has stored
webIndex.seedDB.mySeed().put(yacySeed.CCOUNT, Double.toString(((int) ((webIndex.seedDB.sizeConnected() + webIndex.seedDB.sizeDisconnected() + webIndex.seedDB.sizePotential()) * 60.0 / (uptime + 1.01)) * 100) / 100.0)); // the number of clients that the peer connects (as connects/hour)
webIndex.seedDB.mySeed().put(yacySeed.VERSION, getConfig("version", ""));
webIndex.seedDB.mySeed().setFlagDirectConnect(true);
webIndex.seedDB.mySeed().setLastSeenUTC();
webIndex.seedDB.mySeed().put(yacySeed.UTC, DateFormatter.UTCDiffString());
webIndex.seedDB.mySeed().setFlagAcceptRemoteCrawl(getConfig("crawlResponse", "").equals("true"));
webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(getConfig("allowReceiveIndex", "").equals("true"));
webIndex.peers().mySeed().put(yacySeed.UPTIME, Long.toString(uptime/60)); // the number of minutes that the peer is up in minutes/day (moving average MA30)
webIndex.peers().mySeed().put(yacySeed.LCOUNT, Integer.toString(webIndex.metadata().size())); // the number of links that the peer has stored (LURL's)
webIndex.peers().mySeed().put(yacySeed.NCOUNT, Integer.toString(crawlQueues.noticeURL.size())); // the number of links that the peer has noticed, but not loaded (NURL's)
webIndex.peers().mySeed().put(yacySeed.RCOUNT, Integer.toString(crawlQueues.noticeURL.stackSize(NoticedURL.STACK_TYPE_LIMIT))); // the number of links that the peer provides for remote crawling (ZURL's)
webIndex.peers().mySeed().put(yacySeed.ICOUNT, Integer.toString(webIndex.size())); // the minimum number of words that the peer has indexed (as it says)
webIndex.peers().mySeed().put(yacySeed.SCOUNT, Integer.toString(webIndex.peers().sizeConnected())); // the number of seeds that the peer has stored
webIndex.peers().mySeed().put(yacySeed.CCOUNT, Double.toString(((int) ((webIndex.peers().sizeConnected() + webIndex.peers().sizeDisconnected() + webIndex.peers().sizePotential()) * 60.0 / (uptime + 1.01)) * 100) / 100.0)); // the number of clients that the peer connects (as connects/hour)
webIndex.peers().mySeed().put(yacySeed.VERSION, getConfig("version", ""));
webIndex.peers().mySeed().setFlagDirectConnect(true);
webIndex.peers().mySeed().setLastSeenUTC();
webIndex.peers().mySeed().put(yacySeed.UTC, DateFormatter.UTCDiffString());
webIndex.peers().mySeed().setFlagAcceptRemoteCrawl(getConfig("crawlResponse", "").equals("true"));
webIndex.peers().mySeed().setFlagAcceptRemoteIndex(getConfig("allowReceiveIndex", "").equals("true"));
//mySeed.setFlagAcceptRemoteIndex(true);
}
@ -2013,7 +2013,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
ArrayList<String> seedList;
Iterator<String> enu;
int lc;
final int sc = webIndex.seedDB.sizeConnected();
final int sc = webIndex.peers().sizeConnected();
httpResponseHeader header;
yacyCore.log.logInfo("BOOTSTRAP: " + sc + " seeds known from previous run");
@ -2059,8 +2059,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
while (enu.hasNext()) {
ys = yacySeed.genRemoteSeed(enu.next(), null, false);
if ((ys != null) &&
((!webIndex.seedDB.mySeedIsDefined()) || !webIndex.seedDB.mySeed().hash.equals(ys.hash))) {
if (webIndex.seedDB.peerActions.connectPeer(ys, false)) lc++;
((!webIndex.peers().mySeedIsDefined()) || !webIndex.peers().mySeed().hash.equals(ys.hash))) {
if (webIndex.peers().peerActions.connectPeer(ys, false)) lc++;
//seedDB.writeMap(ys.hash, ys.getMap(), "init");
//System.out.println("BOOTSTRAP: received peer " + ys.get(yacySeed.NAME, "anonymous") + "/" + ys.getAddress());
//lc++;
@ -2078,7 +2078,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
}
}
}
yacyCore.log.logInfo("BOOTSTRAP: " + (webIndex.seedDB.sizeConnected() - sc) + " new seeds while bootstraping.");
yacyCore.log.logInfo("BOOTSTRAP: " + (webIndex.peers().sizeConnected() - sc) + " new seeds while bootstraping.");
}
public void checkInterruption() throws InterruptedException {

View File

@ -51,14 +51,12 @@ import de.anomic.kelondro.text.Index;
import de.anomic.kelondro.text.IndexCache;
import de.anomic.kelondro.text.IndexCollection;
import de.anomic.kelondro.text.MetadataRowContainer;
import de.anomic.kelondro.text.Reference;
import de.anomic.kelondro.text.ReferenceContainer;
import de.anomic.kelondro.text.ReferenceContainerOrder;
import de.anomic.kelondro.text.ReferenceRow;
import de.anomic.kelondro.text.MetadataRepository;
import de.anomic.kelondro.text.Word;
import de.anomic.kelondro.text.Blacklist;
import de.anomic.kelondro.text.MetadataRepository.Export;
import de.anomic.kelondro.util.MemoryControl;
import de.anomic.kelondro.util.kelondroException;
import de.anomic.kelondro.util.Log;
@ -98,8 +96,8 @@ public final class plasmaWordIndex implements Index {
private final IndexCache indexCache;
private final IndexCollection collections; // new database structure to replace AssortmentCluster and FileCluster
private final Log log;
public MetadataRepository referenceURL;
public final yacySeedDB seedDB;
private MetadataRepository metadata;
private final yacySeedDB peers;
private final File primaryRoot, secondaryRoot;
public IndexingStack queuePreStack;
public CrawlProfile profilesActiveCrawls, profilesPassiveCrawls;
@ -170,7 +168,7 @@ public final class plasmaWordIndex implements Index {
useCommons);
// create LURL-db
referenceURL = new MetadataRepository(new File(this.secondaryRoot, "TEXT"));
metadata = new MetadataRepository(new File(this.secondaryRoot, "TEXT"));
// make crawl profiles database and default profiles
this.queuesRoot = new File(this.primaryRoot, "QUEUES");
@ -233,7 +231,7 @@ public final class plasmaWordIndex implements Index {
final File mySeedFile = new File(networkRoot, yacySeedDB.DBFILE_OWN_SEED);
final File oldSeedFile = new File(new File(indexPrimaryRoot.getParentFile(), "YACYDB"), "mySeed.txt");
if (oldSeedFile.exists()) oldSeedFile.renameTo(mySeedFile);
seedDB = new yacySeedDB(
peers = new yacySeedDB(
networkRoot,
"seed.new.heap",
"seed.old.heap",
@ -243,8 +241,12 @@ public final class plasmaWordIndex implements Index {
partitionExponent);
}
public void clearCache() {
referenceURL.clearCache();
public MetadataRepository metadata() {
return this.metadata;
}
public yacySeedDB peers() {
return this.peers;
}
public void clear() {
@ -255,7 +257,7 @@ public final class plasmaWordIndex implements Index {
e.printStackTrace();
}
try {
referenceURL.clear();
metadata.clear();
} catch (final IOException e) {
e.printStackTrace();
}
@ -375,54 +377,6 @@ public final class plasmaWordIndex implements Index {
public File getLocation(final boolean primary) {
return (primary) ? this.primaryRoot : this.secondaryRoot;
}
public void putURL(final MetadataRowContainer entry) throws IOException {
this.referenceURL.store(entry);
}
public MetadataRowContainer getURL(final String urlHash, final Reference searchedWord, final long ranking) {
return this.referenceURL.load(urlHash, searchedWord, ranking);
}
public boolean removeURL(final String urlHash) {
return this.referenceURL.remove(urlHash);
}
public boolean existsURL(final String urlHash) {
return this.referenceURL.exists(urlHash);
}
public int countURL() {
return this.referenceURL.size();
}
public Export exportURL(final File f, final String filter, final int format, final boolean dom) {
return this.referenceURL.export(f, filter, null, format, dom);
}
public Export exportURL() {
return this.referenceURL.export();
}
public CloneableIterator<MetadataRowContainer> entriesURL(final boolean up, final String firstHash) throws IOException {
return this.referenceURL.entries(up, firstHash);
}
public Iterator<MetadataRepository.hostStat> statistics(int count) throws IOException {
return this.referenceURL.statistics(count);
}
public int deleteDomain(String urlfragment) throws IOException {
return this.referenceURL.deleteDomain(urlfragment);
}
public MetadataRepository.BlacklistCleaner getURLCleaner(final Blacklist blacklist) {
return this.referenceURL.getBlacklistCleaner(blacklist); // thread is not already started after this is called!
}
public int getURLwriteCacheSize() {
return this.referenceURL.writeCacheSize();
}
public int minMem() {
return 1024*1024 /* indexing overhead */ + indexCache.minMem() + collections.minMem();
@ -696,8 +650,8 @@ public final class plasmaWordIndex implements Index {
public void close() {
indexCache.close();
collections.close();
referenceURL.close();
seedDB.close();
metadata.close();
peers.close();
profilesActiveCrawls.close();
queuePreStack.close();
}
@ -866,7 +820,7 @@ public final class plasmaWordIndex implements Index {
);
// STORE URL TO LOADED-URL-DB
putURL(newEntry);
metadata.store(newEntry);
final long storageEndTime = System.currentTimeMillis();
@ -895,7 +849,7 @@ public final class plasmaWordIndex implements Index {
"Anchors: " + ((document.getAnchors() == null) ? 0 : document.getAnchors().size()) +
"\n\tLinkStorageTime: " + (storageEndTime - startTime) + " ms | " +
"indexStorageTime: " + (indexingEndTime - storageEndTime) + " ms");
RSSFeed.channels((entry.initiator().equals(seedDB.mySeed().hash)) ? RSSFeed.LOCALINDEXING : RSSFeed.REMOTEINDEXING).addMessage(new RSSMessage("Indexed web page", dc_title, entry.url().toNormalform(true, false)));
RSSFeed.channels((entry.initiator().equals(peers.mySeed().hash)) ? RSSFeed.LOCALINDEXING : RSSFeed.REMOTEINDEXING).addMessage(new RSSMessage("Indexed web page", dc_title, entry.url().toNormalform(true, false)));
}
// finished
@ -965,7 +919,7 @@ public final class plasmaWordIndex implements Index {
entry = containerIterator.next();
// System.out.println("Wordhash: "+wordHash+" UrlHash:
// "+entry.getUrlHash());
final MetadataRowContainer ue = referenceURL.load(entry.urlHash(), entry, 0);
final MetadataRowContainer ue = metadata.load(entry.urlHash(), entry, 0);
if (ue == null) {
urlHashs.add(entry.urlHash());
} else {

View File

@ -191,13 +191,13 @@ public class urlRedirectord implements serverHandler, Cloneable {
) {
// first delete old entry, if exists
final String urlhash = reqURL.hash();
sb.webIndex.removeURL(urlhash);
sb.webIndex.metadata().remove(urlhash);
sb.crawlQueues.noticeURL.removeByURLHash(urlhash);
sb.crawlQueues.errorURL.remove(urlhash);
// enqueuing URL for crawling
sb.crawlStacker.enqueueEntry(new CrawlEntry(
sb.webIndex.seedDB.mySeed().hash,
sb.webIndex.peers().mySeed().hash,
reqURL,
null,
"URL Redirector",

View File

@ -569,7 +569,7 @@ public final class yacyClient {
// passed all checks, store url
try {
wordIndex.putURL(urlEntry);
wordIndex.metadata().store(urlEntry);
crawlResults.stack(urlEntry, mySeed.hash, target.hash, 2);
} catch (final IOException e) {
yacyCore.log.logSevere("could not store search result", e);
@ -1074,7 +1074,7 @@ public final class yacyClient {
final plasmaSwitchboard sb = new plasmaSwitchboard(new File(args[0]), "httpProxy.init", "DATA/SETTINGS/yacy.conf", false);
/*final yacyCore core =*/ new yacyCore(sb);
sb.loadSeedLists();
final yacySeed target = sb.webIndex.seedDB.getConnected(args[1]);
final yacySeed target = sb.webIndex.peers().getConnected(args[1]);
final String wordhashe = Word.word2hash("test");
//System.out.println("permission=" + permissionMessage(args[1]));
@ -1082,9 +1082,9 @@ public final class yacyClient {
reqHeader.put(httpRequestHeader.USER_AGENT, HTTPLoader.crawlerUserAgent);
final byte[] content = httpClient.wget(
"http://" + target.getPublicAddress() + "/yacy/search.html" +
"?myseed=" + sb.webIndex.seedDB.mySeed().genSeedStr(null) +
"?myseed=" + sb.webIndex.peers().mySeed().genSeedStr(null) +
"&youare=" + target.hash + "&key=" +
"&myseed=" + sb.webIndex.seedDB.mySeed() .genSeedStr(null) +
"&myseed=" + sb.webIndex.peers().mySeed() .genSeedStr(null) +
"&count=10" +
"&resource=global" +
"&query=" + wordhashe +

View File

@ -103,7 +103,7 @@ public class yacyCore {
final String staticIP = sb.getConfig("staticIP", "");
if (staticIP.length() != 0 && yacySeed.isProperIP(staticIP) == null) {
serverCore.useStaticIP = true;
sb.webIndex.seedDB.mySeed().setIP(staticIP);
sb.webIndex.peers().mySeed().setIP(staticIP);
log.logInfo("staticIP set to "+ staticIP);
} else {
serverCore.useStaticIP = false;
@ -134,11 +134,11 @@ public class yacyCore {
yacyCore.log.logDebug("***DEBUG publishSeedList: I can reach myself");
*/
if ((sb.webIndex.seedDB.lastSeedUpload_myIP.equals(sb.webIndex.seedDB.mySeed().getIP())) &&
(sb.webIndex.seedDB.lastSeedUpload_seedDBSize == sb.webIndex.seedDB.sizeConnected()) &&
if ((sb.webIndex.peers().lastSeedUpload_myIP.equals(sb.webIndex.peers().mySeed().getIP())) &&
(sb.webIndex.peers().lastSeedUpload_seedDBSize == sb.webIndex.peers().sizeConnected()) &&
(canReachMyself()) &&
(System.currentTimeMillis() - sb.webIndex.seedDB.lastSeedUpload_timeStamp < 1000 * 60 * 60 * 24) &&
(sb.webIndex.seedDB.mySeed().isPrincipal())
(System.currentTimeMillis() - sb.webIndex.peers().lastSeedUpload_timeStamp < 1000 * 60 * 60 * 24) &&
(sb.webIndex.peers().mySeed().isPrincipal())
) {
if (log.isFine()) log.logFine("yacyCore.publishSeedList: not necessary to publish: oldIP is equal, sizeConnected is equal and I can reach myself under the old IP.");
return;
@ -185,29 +185,29 @@ public class yacyCore {
sb.updateMySeed();
// publish own seed to other peer, this can every peer, but makes only sense for senior peers
if (sb.webIndex.seedDB.sizeConnected() == 0) {
if (sb.webIndex.peers().sizeConnected() == 0) {
// reload the seed lists
sb.loadSeedLists();
log.logInfo("re-initialized seed list. received " + sb.webIndex.seedDB.sizeConnected() + " new peer(s)");
log.logInfo("re-initialized seed list. received " + sb.webIndex.peers().sizeConnected() + " new peer(s)");
}
final int newSeeds = publishMySeed(false);
if (newSeeds > 0) {
log.logInfo("received " + newSeeds + " new peer(s), know a total of " + sb.webIndex.seedDB.sizeConnected() + " different peers");
log.logInfo("received " + newSeeds + " new peer(s), know a total of " + sb.webIndex.peers().sizeConnected() + " different peers");
}
}
private boolean canReachMyself() { // TODO: check if this method is necessary - depending on the used router it will not work
// returns true if we can reach ourself under our known peer address
// if we cannot reach ourself, we call a forced publishMySeed and return false
final int urlc = yacyClient.queryUrlCount(sb.webIndex.seedDB.mySeed());
final int urlc = yacyClient.queryUrlCount(sb.webIndex.peers().mySeed());
if (urlc >= 0) {
sb.webIndex.seedDB.mySeed().setLastSeenUTC();
sb.webIndex.peers().mySeed().setLastSeenUTC();
return true;
}
log.logInfo("re-connect own seed");
final String oldAddress = sb.webIndex.seedDB.mySeed().getPublicAddress();
final String oldAddress = sb.webIndex.peers().mySeed().getPublicAddress();
/*final int newSeeds =*/ publishMySeed(true);
return (oldAddress != null && oldAddress.equals(sb.webIndex.seedDB.mySeed().getPublicAddress()));
return (oldAddress != null && oldAddress.equals(sb.webIndex.peers().mySeed().getPublicAddress()));
}
protected class publishThread extends Thread {
@ -230,24 +230,24 @@ public class yacyCore {
public final void run() {
try {
this.added = yacyClient.publishMySeed(sb.webIndex.seedDB.mySeed(), sb.webIndex.seedDB.peerActions, seed.getClusterAddress(), seed.hash);
this.added = yacyClient.publishMySeed(sb.webIndex.peers().mySeed(), sb.webIndex.peers().peerActions, seed.getClusterAddress(), seed.hash);
if (this.added < 0) {
// no or wrong response, delete that address
final String cause = "peer ping to peer resulted in error response (added < 0)";
log.logInfo("publish: disconnected " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) + " peer '" + this.seed.getName() + "' from " + this.seed.getPublicAddress() + ": " + cause);
sb.webIndex.seedDB.peerActions.peerDeparture(this.seed, cause);
sb.webIndex.peers().peerActions.peerDeparture(this.seed, cause);
} else {
// success! we have published our peer to a senior peer
// update latest news from the other peer
log.logInfo("publish: handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) + " peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress());
// check if seed's lastSeen has been updated
final yacySeed newSeed = sb.webIndex.seedDB.getConnected(this.seed.hash);
final yacySeed newSeed = sb.webIndex.peers().getConnected(this.seed.hash);
if (newSeed != null) {
if (!newSeed.isOnline()) {
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " is not online." +
" Removing Peer from connected");
sb.webIndex.seedDB.peerActions.peerDeparture(newSeed, "peer not online");
sb.webIndex.peers().peerActions.peerDeparture(newSeed, "peer not online");
} else
if (newSeed.getLastSeenUTC() < (System.currentTimeMillis() - 10000)) {
// update last seed date
@ -256,14 +256,14 @@ public class yacyCore {
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " with old LastSeen: '" +
DateFormatter.formatShortSecond(new Date(newSeed.getLastSeenUTC())) + "'");
newSeed.setLastSeenUTC();
sb.webIndex.seedDB.peerActions.peerArrival(newSeed, true);
sb.webIndex.peers().peerActions.peerArrival(newSeed, true);
} else {
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " with old LastSeen: '" +
DateFormatter.formatShortSecond(new Date(newSeed.getLastSeenUTC())) + "', this is more recent: '" +
DateFormatter.formatShortSecond(new Date(this.seed.getLastSeenUTC())) + "'");
this.seed.setLastSeenUTC();
sb.webIndex.seedDB.peerActions.peerArrival(this.seed, true);
sb.webIndex.peers().peerActions.peerArrival(this.seed, true);
}
}
} else {
@ -300,13 +300,13 @@ public class yacyCore {
// init yacyHello-process
Map<String, yacySeed> seeds; // hash/yacySeed relation
int attempts = sb.webIndex.seedDB.sizeConnected();
int attempts = sb.webIndex.peers().sizeConnected();
// getting a list of peers to contact
if (sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN).equals(yacySeed.PEERTYPE_VIRGIN)) {
if (sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN).equals(yacySeed.PEERTYPE_VIRGIN)) {
if (attempts > PING_INITIAL) { attempts = PING_INITIAL; }
final Map<String, String> ch = plasmaSwitchboard.getSwitchboard().clusterhashes;
seeds = PeerSelection.seedsByAge(sb.webIndex.seedDB, true, attempts - ((ch == null) ? 0 : ch.size())); // best for fast connection
seeds = PeerSelection.seedsByAge(sb.webIndex.peers(), true, attempts - ((ch == null) ? 0 : ch.size())); // best for fast connection
// add also all peers from cluster if this is a public robinson cluster
if (ch != null) {
final Iterator<Map.Entry<String, String>> i = ch.entrySet().iterator();
@ -318,7 +318,7 @@ public class yacyCore {
hash = entry.getKey();
seed = seeds.get(hash);
if (seed == null) {
seed = sb.webIndex.seedDB.get(hash);
seed = sb.webIndex.peers().get(hash);
if (seed == null) continue;
}
seed.setAlternativeAddress(entry.getValue());
@ -333,7 +333,7 @@ public class yacyCore {
} else {
if (attempts > PING_MIN_RUNNING) { attempts = PING_MIN_RUNNING; }
}
seeds = PeerSelection.seedsByAge(sb.webIndex.seedDB, false, attempts); // best for seed list maintenance/cleaning
seeds = PeerSelection.seedsByAge(sb.webIndex.peers(), false, attempts); // best for seed list maintenance/cleaning
}
if ((seeds == null) || seeds.size() == 0) { return 0; }
@ -345,20 +345,20 @@ public class yacyCore {
// include a YaCyNews record to my seed
try {
final yacyNewsRecord record = sb.webIndex.seedDB.newsPool.myPublication();
final yacyNewsRecord record = sb.webIndex.peers().newsPool.myPublication();
if (record == null) {
sb.webIndex.seedDB.mySeed().put("news", "");
sb.webIndex.peers().mySeed().put("news", "");
} else {
sb.webIndex.seedDB.mySeed().put("news", de.anomic.tools.crypt.simpleEncode(record.toString()));
sb.webIndex.peers().mySeed().put("news", de.anomic.tools.crypt.simpleEncode(record.toString()));
}
} catch (final IOException e) {
log.logSevere("publishMySeed: problem with news encoding", e);
}
sb.webIndex.seedDB.mySeed().setUnusedFlags();
sb.webIndex.peers().mySeed().setUnusedFlags();
// include current citation-rank file count
sb.webIndex.seedDB.mySeed().put(yacySeed.CRWCNT, Integer.toString(sb.rankingOwnDistribution.size()));
sb.webIndex.seedDB.mySeed().put(yacySeed.CRTCNT, Integer.toString(sb.rankingOtherDistribution.size()));
sb.webIndex.peers().mySeed().put(yacySeed.CRWCNT, Integer.toString(sb.rankingOwnDistribution.size()));
sb.webIndex.peers().mySeed().put(yacySeed.CRTCNT, Integer.toString(sb.rankingOtherDistribution.size()));
int newSeeds = -1;
//if (seeds.length > 1) {
// holding a reference to all started threads
@ -381,7 +381,7 @@ public class yacyCore {
final String seederror = seed.isProper(false);
if ((address == null) || (seederror != null)) {
// we don't like that address, delete it
sb.webIndex.seedDB.peerActions.peerDeparture(seed, "peer ping to peer resulted in address = " + address + "; seederror = " + seederror);
sb.webIndex.peers().peerActions.peerDeparture(seed, "peer ping to peer resulted in address = " + address + "; seederror = " + seederror);
sync.P();
} else {
// starting a new publisher thread
@ -446,7 +446,7 @@ public class yacyCore {
if ((accessible >= PING_MIN_PEERSEEN) ||
(accessible >= notaccessible)) {
// We can be reached from a majority of other Peers
if (sb.webIndex.seedDB.mySeed().isPrincipal()) {
if (sb.webIndex.peers().mySeed().isPrincipal()) {
newPeerType = yacySeed.PEERTYPE_PRINCIPAL;
} else {
newPeerType = yacySeed.PEERTYPE_SENIOR;
@ -455,23 +455,23 @@ public class yacyCore {
// We cannot be reached from the outside
newPeerType = yacySeed.PEERTYPE_JUNIOR;
}
if (sb.webIndex.seedDB.mySeed().orVirgin().equals(newPeerType)) {
log.logInfo("PeerPing: myType is " + sb.webIndex.seedDB.mySeed().orVirgin());
if (sb.webIndex.peers().mySeed().orVirgin().equals(newPeerType)) {
log.logInfo("PeerPing: myType is " + sb.webIndex.peers().mySeed().orVirgin());
} else {
log.logInfo("PeerPing: changing myType from '" + sb.webIndex.seedDB.mySeed().orVirgin() + "' to '" + newPeerType + "'");
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, newPeerType);
log.logInfo("PeerPing: changing myType from '" + sb.webIndex.peers().mySeed().orVirgin() + "' to '" + newPeerType + "'");
sb.webIndex.peers().mySeed().put(yacySeed.PEERTYPE, newPeerType);
}
} else {
log.logInfo("PeerPing: No data, staying at myType: " + sb.webIndex.seedDB.mySeed().orVirgin());
log.logInfo("PeerPing: No data, staying at myType: " + sb.webIndex.peers().mySeed().orVirgin());
}
// success! we have published our peer to a senior peer
// update latest news from the other peer
// log.logInfo("publish: handshaked " + t.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) + " peer '" + t.seed.getName() + "' at " + t.seed.getAddress());
sb.webIndex.seedDB.saveMySeed();
sb.webIndex.peers().saveMySeed();
// if we have an address, we do nothing
if (sb.webIndex.seedDB.mySeed().isProper(true) == null && !force) { return 0; }
if (sb.webIndex.peers().mySeed().isProper(true) == null && !force) { return 0; }
if (newSeeds > 0) return newSeeds;
// still no success: ask own NAT or internet responder
@ -481,12 +481,12 @@ public class yacyCore {
//if (ip.equals("")) ip = natLib.retrieveIP(DI604use, DI604pw);
// yacyCore.log.logDebug("DEBUG: new IP=" + ip);
if (yacySeed.isProperIP(ip) == null) sb.webIndex.seedDB.mySeed().setIP(ip);
if (sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR).equals(yacySeed.PEERTYPE_JUNIOR)) // ???????????????
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR); // to start bootstraping, we need to be recognised as PEERTYPE_SENIOR peer
if (yacySeed.isProperIP(ip) == null) sb.webIndex.peers().mySeed().setIP(ip);
if (sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR).equals(yacySeed.PEERTYPE_JUNIOR)) // ???????????????
sb.webIndex.peers().mySeed().put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR); // to start bootstraping, we need to be recognised as PEERTYPE_SENIOR peer
log.logInfo("publish: no recipient found, our address is " +
((sb.webIndex.seedDB.mySeed().getPublicAddress() == null) ? "unknown" : sb.webIndex.seedDB.mySeed().getPublicAddress()));
sb.webIndex.seedDB.saveMySeed();
((sb.webIndex.peers().mySeed().getPublicAddress() == null) ? "unknown" : sb.webIndex.peers().mySeed().getPublicAddress()));
sb.webIndex.peers().saveMySeed();
return 0;
} catch (final InterruptedException e) {
try {
@ -620,7 +620,7 @@ public class yacyCore {
String logt;
// be shure that we have something to say
if (sb.webIndex.seedDB.mySeed().getPublicAddress() == null) {
if (sb.webIndex.peers().mySeed().getPublicAddress() == null) {
final String errorMsg = "We have no valid IP address until now";
log.logWarning("SaveSeedList: " + errorMsg);
return errorMsg;
@ -658,7 +658,7 @@ public class yacyCore {
// ensure that the seed file url is configured properly
yacyURL seedURL;
try {
final String seedURLStr = sb.webIndex.seedDB.mySeed().get(yacySeed.SEEDLIST, "");
final String seedURLStr = sb.webIndex.peers().mySeed().get(yacySeed.SEEDLIST, "");
if (seedURLStr.length() == 0) { throw new MalformedURLException("The seed-file url must not be empty."); }
if (!(
seedURLStr.toLowerCase().startsWith("http://") ||
@ -668,26 +668,26 @@ public class yacyCore {
}
seedURL = new yacyURL(seedURLStr, null);
} catch (final MalformedURLException e) {
final String errorMsg = "Malformed seed file URL '" + sb.webIndex.seedDB.mySeed().get(yacySeed.SEEDLIST, "") + "'. " + e.getMessage();
final String errorMsg = "Malformed seed file URL '" + sb.webIndex.peers().mySeed().get(yacySeed.SEEDLIST, "") + "'. " + e.getMessage();
log.logWarning("SaveSeedList: " + errorMsg);
return errorMsg;
}
// upload the seed-list using the configured uploader class
String prevStatus = sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR);
String prevStatus = sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR);
if (prevStatus.equals(yacySeed.PEERTYPE_PRINCIPAL)) { prevStatus = yacySeed.PEERTYPE_SENIOR; }
try {
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_PRINCIPAL); // this information shall also be uploaded
sb.webIndex.peers().mySeed().put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_PRINCIPAL); // this information shall also be uploaded
if (log.isFine()) log.logFine("SaveSeedList: Using seed uploading method '" + seedUploadMethod + "' for seed-list uploading." +
"\n\tPrevious peerType is '" + sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR) + "'.");
"\n\tPrevious peerType is '" + sb.webIndex.peers().mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR) + "'.");
// logt = seedDB.uploadCache(seedFTPServer, seedFTPAccount, seedFTPPassword, seedFTPPath, seedURL);
logt = sb.webIndex.seedDB.uploadCache(uploader, sb, sb.webIndex.seedDB, seedURL);
logt = sb.webIndex.peers().uploadCache(uploader, sb, sb.webIndex.peers(), seedURL);
if (logt != null) {
if (logt.indexOf("Error") >= 0) {
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, prevStatus);
sb.webIndex.peers().mySeed().put(yacySeed.PEERTYPE, prevStatus);
final String errorMsg = "SaveSeedList: seed upload failed using " + uploader.getClass().getName() + " (error): " + logt.substring(logt.indexOf("Error") + 6);
log.logSevere(errorMsg);
return errorMsg;
@ -699,16 +699,16 @@ public class yacyCore {
sb.setConfig("yacyStatus", yacySeed.PEERTYPE_PRINCIPAL);
return null;
} catch (final Exception e) {
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, prevStatus);
sb.webIndex.peers().mySeed().put(yacySeed.PEERTYPE, prevStatus);
sb.setConfig("yacyStatus", prevStatus);
final String errorMsg = "SaveSeedList: Seed upload failed (IO error): " + e.getMessage();
log.logInfo(errorMsg, e);
return errorMsg;
}
} finally {
sb.webIndex.seedDB.lastSeedUpload_seedDBSize = sb.webIndex.seedDB.sizeConnected();
sb.webIndex.seedDB.lastSeedUpload_timeStamp = System.currentTimeMillis();
sb.webIndex.seedDB.lastSeedUpload_myIP = sb.webIndex.seedDB.mySeed().getIP();
sb.webIndex.peers().lastSeedUpload_seedDBSize = sb.webIndex.peers().sizeConnected();
sb.webIndex.peers().lastSeedUpload_timeStamp = System.currentTimeMillis();
sb.webIndex.peers().lastSeedUpload_myIP = sb.webIndex.peers().mySeed().getIP();
}
}

View File

@ -79,7 +79,7 @@ public class yacyNetwork {
post.add(new DefaultCharsetStringPart("key", salt));
// just standard identification essentials
post.add(new DefaultCharsetStringPart("iam", sb.webIndex.seedDB.mySeed().hash));
post.add(new DefaultCharsetStringPart("iam", sb.webIndex.peers().mySeed().hash));
if (targetHash != null) post.add(new DefaultCharsetStringPart("youare", targetHash));
// time information for synchronization
@ -96,7 +96,7 @@ public class yacyNetwork {
if (authentificationMethod.equals("salted-magic-sim")) {
// generate an authentification essential using the salt, the iam-hash and the network magic
final String magic = sb.getConfig("network.unit.protocol.request.authentification.essentials", "");
final String md5 = Digest.encodeMD5Hex(salt + sb.webIndex.seedDB.mySeed().hash + magic);
final String md5 = Digest.encodeMD5Hex(salt + sb.webIndex.peers().mySeed().hash + magic);
post.add(new DefaultCharsetStringPart("magicmd5", md5));
}
}

View File

@ -114,7 +114,7 @@ public class yacySearch extends Thread {
public void run() {
this.urls = yacyClient.search(
wordIndex.seedDB.mySeed(),
wordIndex.peers().mySeed(),
wordhashes, excludehashes, urlhashes, prefer, filter, language, count, maxDistance, global, partitions,
targetPeer, wordIndex, crawlResults, containerCache, abstractCache,
blacklist, rankingProfile, constraint);
@ -123,8 +123,8 @@ public class yacySearch extends Thread {
final StringBuilder urllist = new StringBuilder(this.urls.length * 13);
for (int i = 0; i < this.urls.length; i++) urllist.append(this.urls[i]).append(' ');
yacyCore.log.logInfo("REMOTE SEARCH - remote peer " + targetPeer.hash + ":" + targetPeer.getName() + " contributed " + urls.length + " links for word hash " + wordhashes + ": " + new String(urllist));
wordIndex.seedDB.mySeed().incRI(urls.length);
wordIndex.seedDB.mySeed().incRU(urls.length);
wordIndex.peers().mySeed().incRI(urls.length);
wordIndex.peers().mySeed().incRU(urls.length);
} else {
yacyCore.log.logInfo("REMOTE SEARCH - no answer from remote peer " + targetPeer.hash + ":" + targetPeer.getName());
}
@ -266,11 +266,11 @@ public class yacySearch extends Thread {
final yacySeed[] targetPeers =
(clusterselection == null) ?
selectSearchTargets(
wordIndex.seedDB,
wordIndex.peers(),
plasmaSearchQuery.hashes2Set(wordhashes),
targets,
wordIndex.seedDB.redundancy())
: selectClusterPeers(wordIndex.seedDB, clusterselection);
wordIndex.peers().redundancy())
: selectClusterPeers(wordIndex.peers(), clusterselection);
if (targetPeers == null) return new yacySearch[0];
targets = targetPeers.length;
if (targets == 0) return new yacySearch[0];
@ -294,10 +294,10 @@ public class yacySearch extends Thread {
final plasmaSearchRankingProfile rankingProfile,
final Bitfield constraint, final TreeMap<String, String> clusterselection) {
// check own peer status
if (wordIndex.seedDB.mySeed() == null || wordIndex.seedDB.mySeed().getPublicAddress() == null) { return null; }
if (wordIndex.peers().mySeed() == null || wordIndex.peers().mySeed().getPublicAddress() == null) { return null; }
// prepare seed targets and threads
final yacySeed targetPeer = wordIndex.seedDB.getConnected(targethash);
final yacySeed targetPeer = wordIndex.peers().getConnected(targethash);
if (targetPeer == null || targetPeer.hash == null) return null;
if (clusterselection != null) targetPeer.setAlternativeAddress(clusterselection.get(targetPeer.hash));
final yacySearch searchThread = new yacySearch(wordhashes, excludehashes, urlhashes, "", "", "en", 0, 9999, true, 0, targetPeer,