Merge branch 'master' of git@gitorious.org:yacy/rc1.git

This commit is contained in:
Marc Nause 2013-02-05 21:09:41 +01:00
commit 27894d2c1a
14 changed files with 156 additions and 118 deletions

View File

@ -101,7 +101,7 @@ function updatepage(str) {
#(/hosts)#
#(files)#::
<fieldset><legend>Browser for #[path]#</legend>
<fieldset><legend>Browser for <a href="#[path]#" target="_blank">#[path]#</a></legend>
<p>documents stored for host: #[hostsize]#; documents stored for subpath: #[subpathloadsize]#; unloaded documents detected in subpath: #[subpathdetectedsize]# <!-- #(complete)#;<a href="/HostBrowser.html?complete=true&path=#[path]#">get complete list</a>::<a href="/HostBrowser.html?path=#[path]#">directory view</a>#(/complete)#-->
</p>
<table class="sortable" border="0" cellpadding="2" cellspacing="2" style="float:left">
@ -152,7 +152,7 @@ function updatepage(str) {
<fieldset><legend>Outbound Links, outgoing from #[host]# - Host List</legend>
#{list}#
<div style="float:left; padding:1px 5px 1px 5px;">
<div style="width:160px; text-align:left; float: left; white-space:nowrap; overflow:hidden;"><div id="info"><a href="/HostBrowser.html?path=#[link]#">#[host]#</a><span>browse #[host]#</span></div></div>
<div style="width:160px; text-align:left; float: left; white-space:nowrap; overflow:hidden;"><div id="info"><a href="/HostBrowser.html?path=#[link]#">#[host]#</a></div></div>
<div style="width:80px; text-align:right; float: left; white-space:nowrap; overflow:hidden;">#[count]# URLs</div>
</div>
#{/list}#
@ -168,7 +168,7 @@ function updatepage(str) {
<fieldset><legend>Inbound Links, incoming to #[host]# - Host List</legend>
#{list}#
<div style="float:left; padding:1px 5px 1px 5px;">
<div style="width:160px; text-align:left; float: left; white-space:nowrap; overflow:hidden;"><div id="info"><a href="/HostBrowser.html?path=#[host]#">#[host]#</a><span>browse #[host]#</span></div></div>
<div style="width:160px; text-align:left; float: left; white-space:nowrap; overflow:hidden;"><div id="info"><a href="/HostBrowser.html?path=#[host]#">#[host]#</a></div></div>
<div style="width:80px; text-align:right; float: left; white-space:nowrap; overflow:hidden;">#[count]# URLs</div>
</div>
#{/list}#

View File

@ -17,9 +17,11 @@
</legend>
<form id="ConfigForm" method="post" action="RemoteCrawl_p.html" enctype="multipart/form-data" accept-charset="UTF-8">
<dl>
#(disabled)#::<dt></dt><dd><span class="error">Your peer cannot accept remote crawls because you need senior or principal peer status for that!</span></dd>#(/disabled)#
<dt>
<dt>
<label for="crawlResponse">Accept Remote Crawl Requests</label>
<input type="checkbox" id="crawlResponse" name="crawlResponse" onclick="window.location.href='RemoteCrawl_p.html?#(crawlResponse)#crawlResponse=on::crawlResponse=off#(/crawlResponse)#'" #(crawlResponse)#::checked="checked" #(/crawlResponse)#/>
<input type="checkbox" #(disabled)#::disabled="disabled"#(/disabled)# id="crawlResponse" name="crawlResponse" onclick="window.location.href='RemoteCrawl_p.html?#(crawlResponse)#crawlResponse=on::crawlResponse=off#(/crawlResponse)#'" #(crawlResponse)#::checked="checked" #(/crawlResponse)#/>
</dt>
<dd>
Perform web indexing upon request of another peer.<br />

View File

@ -69,14 +69,16 @@ public class RemoteCrawl_p {
}
}
// set seed information directly
sb.peers.mySeed().setFlagAcceptRemoteCrawl(sb.getConfigBool("crawlResponse", false));
// write remote crawl request settings
prop.put("crawlResponse", sb.getConfigBool("crawlResponse", false) ? "1" : "0");
prop.put("disabled", !sb.peers.mySeed().isActive() && !sb.peers.mySeed().getFlagAcceptRemoteCrawl() ? 1 : 0);
prop.put("crawlResponse", sb.peers.mySeed().getFlagAcceptRemoteCrawl() ? 1 : 0);
long RTCbusySleep = Math.max(1, env.getConfigLong(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL_BUSYSLEEP, 100));
final int RTCppm = (int) (60000L / RTCbusySleep);
prop.put("acceptCrawlLimit", RTCppm);
// set seed information directly
sb.peers.mySeed().setFlagAcceptRemoteCrawl(sb.getConfigBool("crawlResponse", false));
// -------------------------------------------------------------------------------------
// write network list

View File

@ -48,9 +48,7 @@ public class schema {
c++;
}
}
if (solrScheme.contains(YaCySchema.author)) {
addField(prop, c, YaCySchema.author_sxt);
}
//if (solrScheme.contains(YaCySchema.author)) {addField(prop, c, YaCySchema.author_sxt);}
prop.put("fields", c);
prop.put("copyFieldAuthor", solrScheme.contains(YaCySchema.author) ? 1 : 0);

View File

@ -309,6 +309,18 @@ legend {
color:white;
}
legend a:link {
color:white;
}
legend a:link:hover {
color:white;
}
legend a {
color:#[color_text]#;
}
form dt, dl.pairs dt {
background-color:#[color_tableitem]#;
font-weight:bold;

View File

@ -294,6 +294,14 @@ legend {
color:white;
}
legend a:link {
color:white;
}
legend a:link:hover {
color:white;
}
form dt, dl.pairs dt {
background-color:#DEE6F3;
font-weight:bold;

View File

@ -96,6 +96,7 @@ public class EmbeddedSolrConnector extends SolrServerConnector implements SolrCo
this.cores = new CoreContainer(storagePath.getAbsolutePath(), new File(solr_config, "solr.xml"));
}
this.defaultCoreName = this.cores.getDefaultCoreName();
Log.logInfo("EmbeddedSolrConnector", "detected default solr core: " + this.defaultCoreName);
this.defaultCore = this.cores.getCore(this.defaultCoreName); // should be "collection1"
if (this.defaultCore == null) {
// try again

View File

@ -26,6 +26,7 @@ import java.net.InetAddress;
import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.protocol.Domains;
import net.yacy.kelondro.logging.Log;
import org.apache.commons.httpclient.HttpException;
import org.apache.http.Header;
@ -45,9 +46,9 @@ import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.protocol.HttpContext;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.impl.XMLResponseParser;
import org.apache.solr.client.solrj.request.QueryRequest;
@ -90,7 +91,10 @@ public class RemoteSolrConnector extends SolrServerConnector implements SolrConn
}
HttpSolrServer s;
if (this.solraccount.length() > 0) {
this.client = new DefaultHttpClient() {
PoolingClientConnectionManager cm = new PoolingClientConnectionManager(); // try also: ThreadSafeClientConnManager
cm.setMaxTotal(100);
this.client = new DefaultHttpClient(cm) {
@Override
protected HttpContext createHttpContext() {
HttpContext context = super.createHttpContext();
@ -130,8 +134,11 @@ public class RemoteSolrConnector extends SolrServerConnector implements SolrConn
BasicCredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(this.host, AuthScope.ANY_PORT), new UsernamePasswordCredentials(this.solraccount, this.solrpw));
this.client.setCredentialsProvider(credsProvider);
s = new HttpSolrServer("http://" + this.host + ":" + this.port + this.solrpath, this.client);
String p = "http://" + this.host + ":" + this.port + this.solrpath;
Log.logInfo("RemoteSolrConnector", "connecting Solr authenticated with url:" + p);
s = new HttpSolrServer(p, this.client);
} else {
Log.logInfo("RemoteSolrConnector", "connecting Solr with url:" + this.solrurl);
s = new HttpSolrServer(this.solrurl);
}
s.setAllowCompression(true);
@ -169,8 +176,6 @@ public class RemoteSolrConnector extends SolrServerConnector implements SolrConn
if (q != null) Thread.currentThread().setName(threadname);
return response;
} catch (SolrServerException e) {
throw new IOException(e.getMessage());
} catch (Throwable e) {
throw new IOException("Error executing query", e);
}

View File

@ -30,8 +30,10 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.federate.solr.YaCySchema;
import net.yacy.cora.sorting.ClusteredScoreMap;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.kelondro.logging.Log;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
@ -101,10 +103,12 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
@Override
public synchronized void commit(final boolean softCommit) {
//if (this.server instanceof HttpSolrServer) ((HttpSolrServer) this.server).getHttpClient().getConnectionManager().closeExpiredConnections();
try {
this.server.commit(true, true, softCommit);
} catch (SolrServerException e) {
} catch (IOException e) {
if (this.server instanceof HttpSolrServer) ((HttpSolrServer) this.server).shutdown();
} catch (Throwable e) {
Log.logException(e);
}
}
@ -115,8 +119,8 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
public void optimize(int maxSegments) {
try {
this.server.optimize(true, true, maxSegments);
} catch (SolrServerException e) {
} catch (IOException e) {
} catch (Throwable e) {
Log.logException(e);
}
}
@ -125,10 +129,8 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
try {
if (this.server != null) synchronized (this.server) {this.server.commit(true, true, false);}
this.server = null;
} catch (SolrServerException e) {
log.warn(e);
} catch (IOException e) {
log.warn(e);
} catch (Throwable e) {
Log.logException(e);
}
}

View File

@ -121,7 +121,6 @@ import org.apache.solr.client.solrj.response.FacetField.Count;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.FacetParams;
@ -1035,8 +1034,6 @@ public final class Protocol
return -1; // we cannot query solr only with word hashes, there is no clear text string
}
event.addExpectedRemoteReferences(count);
QueryResponse rsp = null;
SolrDocumentList docList = null;
final SolrQuery solrQuery = event.query.solrQuery();
solrQuery.setStart(offset);
solrQuery.setRows(count);
@ -1061,16 +1058,15 @@ public final class Protocol
for (YaCySchema field: snippetFields) solrQuery.addHighlightField(field.getSolrFieldName());
boolean localsearch = target == null || target.equals(event.peers.mySeed());
SolrDocumentList docList = null;
QueryResponse rsp = null;
if (localsearch) {
// search the local index
try {
rsp = event.rankingProcess.getQuery().getSegment().fulltext().getSolr().query(solrQuery);
docList = rsp.getResults();
} catch (SolrException e) {
Network.log.logInfo("SEARCH failed (solr, 1), localpeer (" + e.getMessage() + ")", e);
return -1;
} catch (IOException e) {
Network.log.logInfo("SEARCH failed (solr, 2), localpeer (" + e.getMessage() + ")", e);
} catch (Throwable e) {
Network.log.logInfo("SEARCH failed (solr), localpeer (" + e.getMessage() + ")", e);
return -1;
}
} else {
@ -1080,8 +1076,8 @@ public final class Protocol
rsp = solrConnector.query(solrQuery);
docList = rsp.getResults();
// no need to close this here because that sends a commit to remote solr which is not wanted here
} catch (IOException e) {
Network.log.logInfo("SEARCH failed (solr), Peer: " +target.getName() + "/" + target.getPublicAddress() + " (" + e.getMessage() + ")", e);
} catch (Throwable e) {
Network.log.logInfo("SEARCH failed (solr), remote Peer: " +target.getName() + "/" + target.getPublicAddress() + " (" + e.getMessage() + ")", e);
return -1;
}
}
@ -1122,77 +1118,77 @@ public final class Protocol
// evaluate result
List<URIMetadataNode> container = new ArrayList<URIMetadataNode>();
if (docList.size() == 0) {
Network.log.logInfo("SEARCH (solr), returned 0 out of " + docList.getNumFound() + " documents from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())) + " query = " + solrQuery.toString()) ;
} else {// create containers
Network.log.logInfo("SEARCH (solr), returned " + docList.size() + " out of " + docList.getNumFound() + " documents from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName()))) ;
int term = count;
for (final SolrDocument doc: docList) {
if ( term-- <= 0 ) {
break; // do not process more that requested (in case that evil peers fill us up with rubbish)
}
// get one single search result
if ( doc == null ) {
continue;
}
URIMetadataNode urlEntry = new URIMetadataNode(doc);
if ( blacklist.isListed(BlacklistType.SEARCH, urlEntry) ) {
if ( Network.log.isInfo() ) {
if (localsearch) {
Network.log.logInfo("local search (solr): filtered blacklisted url " + urlEntry.url());
} else {
Network.log.logInfo("remote search (solr): filtered blacklisted url " + urlEntry.url() + " from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())));
}
}
continue; // block with blacklist
}
final String urlRejectReason = Switchboard.getSwitchboard().crawlStacker.urlInAcceptedDomain(urlEntry.url());
if ( urlRejectReason != null ) {
if ( Network.log.isInfo() ) {
if (localsearch) {
Network.log.logInfo("local search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ")");
} else {
Network.log.logInfo("remote search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ") from peer " + target.getName());
}
}
continue; // reject url outside of our domain
}
// passed all checks, store url
if (!localsearch) {
try {
event.query.getSegment().fulltext().putDocument(ClientUtils.toSolrInputDocument(doc));
ResultURLs.stack(
ASCII.String(urlEntry.url().hash()),
urlEntry.url().getHost(),
event.peers.mySeed().hash.getBytes(),
UTF8.getBytes(target.hash),
EventOrigin.QUERIES);
} catch ( final IOException e ) {
Network.log.logWarning("could not store search result", e);
continue; // db-error
}
}
// add the url entry to the word indexes
container.add(urlEntry);
}
if (localsearch) {
event.add(container, facets, snippets, true, "localpeer", (int) docList.getNumFound());
event.rankingProcess.addFinalize();
event.addExpectedRemoteReferences(-count);
Network.log.logInfo("local search (solr): localpeer sent " + container.get(0).size() + "/" + docList.size() + " references");
} else {
event.add(container, facets, snippets, false, target.getName() + "/" + target.hash, (int) docList.getNumFound());
event.rankingProcess.addFinalize();
event.addExpectedRemoteReferences(-count);
Network.log.logInfo("remote search (solr): peer " + target.getName() + " sent " + container.get(0).size() + "/" + docList.size() + " references");
}
if (docList == null || docList.size() == 0) {
Network.log.logInfo("SEARCH (solr), returned 0 out of 0 documents from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())) + " query = " + solrQuery.toString()) ;
return 0;
}
Network.log.logInfo("SEARCH (solr), returned " + docList.size() + " out of " + docList.getNumFound() + " documents from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())));
int term = count;
for (final SolrDocument doc: docList) {
if ( term-- <= 0 ) {
break; // do not process more that requested (in case that evil peers fill us up with rubbish)
}
// get one single search result
if ( doc == null ) {
continue;
}
URIMetadataNode urlEntry = new URIMetadataNode(doc);
if ( blacklist.isListed(BlacklistType.SEARCH, urlEntry) ) {
if ( Network.log.isInfo() ) {
if (localsearch) {
Network.log.logInfo("local search (solr): filtered blacklisted url " + urlEntry.url());
} else {
Network.log.logInfo("remote search (solr): filtered blacklisted url " + urlEntry.url() + " from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())));
}
}
continue; // block with blacklist
}
final String urlRejectReason = Switchboard.getSwitchboard().crawlStacker.urlInAcceptedDomain(urlEntry.url());
if ( urlRejectReason != null ) {
if ( Network.log.isInfo() ) {
if (localsearch) {
Network.log.logInfo("local search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ")");
} else {
Network.log.logInfo("remote search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ") from peer " + target.getName());
}
}
continue; // reject url outside of our domain
}
// passed all checks, store url
if (!localsearch) {
try {
event.query.getSegment().fulltext().putDocument(ClientUtils.toSolrInputDocument(doc));
ResultURLs.stack(
ASCII.String(urlEntry.url().hash()),
urlEntry.url().getHost(),
event.peers.mySeed().hash.getBytes(),
UTF8.getBytes(target.hash),
EventOrigin.QUERIES);
} catch ( final IOException e ) {
Network.log.logWarning("could not store search result", e);
continue; // db-error
}
}
// add the url entry to the word indexes
container.add(urlEntry);
}
if (localsearch) {
event.add(container, facets, snippets, true, "localpeer", (int) docList.getNumFound());
event.rankingProcess.addFinalize();
event.addExpectedRemoteReferences(-count);
Network.log.logInfo("local search (solr): localpeer sent " + container.get(0).size() + "/" + docList.size() + " references");
} else {
event.add(container, facets, snippets, false, target.getName() + "/" + target.hash, (int) docList.getNumFound());
event.rankingProcess.addFinalize();
event.addExpectedRemoteReferences(-count);
Network.log.logInfo("remote search (solr): peer " + target.getName() + " sent " + container.get(0).size() + "/" + docList.size() + " references");
}
return docList.size();
}

View File

@ -3522,8 +3522,8 @@ public final class Switchboard extends serverSwitch {
this.peers.mySeed().setFlagDirectConnect(true);
this.peers.mySeed().setLastSeenUTC();
this.peers.mySeed().put(Seed.UTC, GenericFormatter.UTCDiffString());
this.peers.mySeed().setFlagAcceptRemoteCrawl(getConfig("crawlResponse", "").equals("true"));
this.peers.mySeed().setFlagAcceptRemoteIndex(getConfig("allowReceiveIndex", "").equals("true"));
this.peers.mySeed().setFlagAcceptRemoteCrawl(getConfigBool("crawlResponse", true));
this.peers.mySeed().setFlagAcceptRemoteIndex(getConfigBool("allowReceiveIndex", true));
//mySeed.setFlagAcceptRemoteIndex(true);
}

View File

@ -67,6 +67,7 @@ import net.yacy.kelondro.util.Bitfield;
import net.yacy.kelondro.util.SetTools;
import net.yacy.peers.Seed;
import net.yacy.search.index.Segment;
import net.yacy.search.index.SolrConfiguration;
import net.yacy.search.ranking.RankingProfile;
public final class QueryParams {
@ -83,11 +84,8 @@ public final class QueryParams {
}
}
private static final String[] defaultfacetfields = new String[]{
YaCySchema.host_s.getSolrFieldName(),
YaCySchema.url_protocol_s.getSolrFieldName(),
YaCySchema.url_file_ext_s.getSolrFieldName(),
YaCySchema.author_sxt.getSolrFieldName()};
private static final YaCySchema[] defaultfacetfields = new YaCySchema[]{
YaCySchema.host_s, YaCySchema.url_protocol_s, YaCySchema.url_file_ext_s, YaCySchema.author_sxt};
private static final int defaultmaxfacets = 30;
@ -146,6 +144,7 @@ public final class QueryParams {
public List<String> facetfields;
public int maxfacets;
private SolrQuery cachedQuery;
private SolrConfiguration solrScheme;
// the following values are filled during the search process as statistics for the search
public final AtomicInteger local_rwi_available; // the number of hits generated/ranked by the local search in rwi index
@ -158,7 +157,8 @@ public final class QueryParams {
public final SortedSet<byte[]> misses; // url hashes that had been sorted out because of constraints in postranking
public QueryParams(
final String query_original, final String query_words,
final String query_original,
final String query_words,
final int itemsPerPage,
final Bitfield constraint,
final Segment indexSegment,
@ -210,7 +210,12 @@ public final class QueryParams {
this.remote_available = new AtomicInteger(0); // the number of result contributions from all the remote peers
this.remote_peerCount = new AtomicInteger(0); // the number of remote peers that have contributed
this.misses = Collections.synchronizedSortedSet(new TreeSet<byte[]>(URIMetadataRow.rowdef.objectOrder));
this.facetfields = new ArrayList<String>(); for (String f: defaultfacetfields) facetfields.add(f);
this.facetfields = new ArrayList<String>();
this.solrScheme = indexSegment.fulltext().getSolrScheme();
for (YaCySchema f: defaultfacetfields) {
if (solrScheme.contains(f)) facetfields.add(f.getSolrFieldName());
}
for (Tagging v: LibraryProvider.autotagging.getVocabularies()) this.facetfields.add(YaCySchema.VOCABULARY_PREFIX + v.getName() + YaCySchema.VOCABULARY_SUFFIX);
this.maxfacets = defaultmaxfacets;
this.cachedQuery = null;
@ -312,7 +317,12 @@ public final class QueryParams {
this.remote_available = new AtomicInteger(0); // the number of result contributions from all the remote peers
this.remote_peerCount = new AtomicInteger(0); // the number of remote peers that have contributed
this.misses = Collections.synchronizedSortedSet(new TreeSet<byte[]>(URIMetadataRow.rowdef.objectOrder));
this.facetfields = new ArrayList<String>(); for (String f: defaultfacetfields) facetfields.add(f);
this.facetfields = new ArrayList<String>();
this.solrScheme = indexSegment.fulltext().getSolrScheme();
for (YaCySchema f: defaultfacetfields) {
if (solrScheme.contains(f)) facetfields.add(f.getSolrFieldName());
}
for (Tagging v: LibraryProvider.autotagging.getVocabularies()) this.facetfields.add(YaCySchema.VOCABULARY_PREFIX + v.getName() + YaCySchema.VOCABULARY_SUFFIX);
this.maxfacets = defaultmaxfacets;
this.cachedQuery = null;
@ -477,7 +487,7 @@ public final class QueryParams {
}
// add author facets
if (this.author != null && this.author.length() > 0) {
if (this.author != null && this.author.length() > 0 && this.solrScheme.contains(YaCySchema.author_sxt)) {
fq.append(" AND ").append(YaCySchema.author_sxt.getSolrFieldName()).append(":\"").append(this.author).append('\"');
}

View File

@ -197,6 +197,8 @@ public final class RankingProcess extends Thread {
@Override
public void run() {
if (query.getSegment().termIndex() == null) return; // nothing to do; this index is not used
// do a search
oneFeederStarted();

View File

@ -224,7 +224,7 @@ public final class SearchEvent {
// start a local RWI search concurrently
if (this.remote || this.peers.mySeed().getBirthdate() < noRobinsonLocalRWISearch) {
// we start the local search only if this peer is doing a remote search or when it is doing a local search and the peer is old
this.rankingProcess.start();
if (query.getSegment().connectedRWI()) this.rankingProcess.start();
}
if (this.remote) {
@ -270,7 +270,7 @@ public final class SearchEvent {
if ( generateAbstracts ) {
// we need the results now
try {
this.rankingProcess.join();
if (query.getSegment().connectedRWI()) this.rankingProcess.join();
} catch ( final Throwable e ) {
}
// compute index abstracts
@ -278,7 +278,7 @@ public final class SearchEvent {
int maxcount = -1;
long mindhtdistance = Long.MAX_VALUE, l;
byte[] wordhash;
assert this.rankingProcess.searchContainerMap() != null;
assert !query.getSegment().connectedRWI() || this.rankingProcess.searchContainerMap() != null;
if (this.rankingProcess.searchContainerMap() != null) {
for (final Map.Entry<byte[], ReferenceContainer<WordReference>> entry : this.rankingProcess.searchContainerMap().entrySet()) {
wordhash = entry.getKey();
@ -303,7 +303,7 @@ public final class SearchEvent {
// give process time to accumulate a certain amount of data
// before a reading process wants to get results from it
try {
this.rankingProcess.join(100);
if (query.getSegment().connectedRWI()) this.rankingProcess.join(100);
} catch ( final Throwable e ) {
}
// this will reduce the maximum waiting time until results are available to 100 milliseconds