- no more search time-out!

- fixed a bug with last commit

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4430 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
orbiter 2008-02-02 23:53:39 +00:00
parent cd3e0d6f03
commit 7404256997
15 changed files with 35 additions and 64 deletions

View File

@ -3,7 +3,7 @@ javacSource=1.5
javacTarget=1.5 javacTarget=1.5
# Release Configuration # Release Configuration
releaseVersion=0.565 releaseVersion=0.566
stdReleaseFile=yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz stdReleaseFile=yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
embReleaseFile=yacy_emb_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz embReleaseFile=yacy_emb_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
proReleaseFile=yacy_pro_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz proReleaseFile=yacy_pro_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz

View File

@ -35,7 +35,6 @@
<input type="hidden" name="count" value="10" /> <input type="hidden" name="count" value="10" />
<input type="hidden" name="offset" value="0" /> <input type="hidden" name="offset" value="0" />
<input type="hidden" name="resource" value="global" /> <input type="hidden" name="resource" value="global" />
<input type="hidden" name="time" value="#[searchtime]#" />
<input type="hidden" name="urlmaskfilter" value=".*" /> <input type="hidden" name="urlmaskfilter" value=".*" />
<input type="hidden" name="prefermaskfilter" value="" /> <input type="hidden" name="prefermaskfilter" value="" />
<input type="hidden" name="indexof" value="off" /> <input type="hidden" name="indexof" value="off" />
@ -72,20 +71,6 @@
</td> </td>
</tr> </tr>
<tr> <tr>
<td><label for="time">Max. search time (seconds)</label>:</td>
<td>
<select id="time" name="time">
<option #(time-1)#::selected="selected"#(/time-1)#>1</option>
<option #(time-2)#::selected="selected"#(/time-2)#>2</option>
<option #(time-3)#::selected="selected"#(/time-3)#>3</option>
<option #(time-4)#::selected="selected"#(/time-4)#>4</option>
<option #(time-6)#::selected="selected"#(/time-6)#>6</option>
<option #(time-8)#::selected="selected"#(/time-8)#>8</option>
<option #(time-10)#::selected="selected"#(/time-10)#>10</option>
</select>
</td>
</tr>
<tr>
<td><label for="urlmaskfilter">URL mask</label>:</td> <td><label for="urlmaskfilter">URL mask</label>:</td>
<td>#(urlmaskoptions)# <td>#(urlmaskoptions)#
<input id="urlmaskfilter" name="urlmaskfilter" type="text" size="12" maxlength="80" value="#[urlmaskfilter]#" /> <input id="urlmaskfilter" name="urlmaskfilter" type="text" size="12" maxlength="80" value="#[urlmaskfilter]#" />

View File

@ -60,7 +60,6 @@ public class index {
final int searchoptions = (post == null) ? 0 : post.getInt("searchoptions", 0); final int searchoptions = (post == null) ? 0 : post.getInt("searchoptions", 0);
final String former = (post == null) ? "" : post.get("former", ""); final String former = (post == null) ? "" : post.get("former", "");
final int count = Math.min(100, (post == null) ? 10 : post.getInt("count", 10)); final int count = Math.min(100, (post == null) ? 10 : post.getInt("count", 10));
final int time = Math.min(60, (post == null) ? (int) sb.getConfigLong("network.unit.search.time", 3) : post.getInt("time", (int) sb.getConfigLong("network.unit.search.time", 3)));
final String urlmaskfilter = (post == null) ? ".*" : post.get("urlmaskfilter", ".*"); final String urlmaskfilter = (post == null) ? ".*" : post.get("urlmaskfilter", ".*");
final String prefermaskfilter = (post == null) ? "" : post.get("prefermaskfilter", ""); final String prefermaskfilter = (post == null) ? "" : post.get("prefermaskfilter", "");
final String constraint = (post == null) ? "" : post.get("constraint", ""); final String constraint = (post == null) ? "" : post.get("constraint", "");
@ -119,14 +118,6 @@ public class index {
prop.put("searchoptions_resource-global-disabled", (indexReceiveGranted && indexDistributeGranted) ? "0" : "1"); prop.put("searchoptions_resource-global-disabled", (indexReceiveGranted && indexDistributeGranted) ? "0" : "1");
prop.put("searchoptions_resource-global-disabled_reason", (indexReceiveGranted) ? "0" : (indexDistributeGranted ? "1" : "2")); prop.put("searchoptions_resource-global-disabled_reason", (indexReceiveGranted) ? "0" : (indexDistributeGranted ? "1" : "2"));
prop.put("searchoptions_resource-local", global ? "0" : "1"); prop.put("searchoptions_resource-local", global ? "0" : "1");
prop.put("searchoptions_searchtime", time);
prop.put("searchoptions_time-1", (time == 1) ? "1" : "0");
prop.put("searchoptions_time-2", (time == 2) ? "1" : "0");
prop.put("searchoptions_time-3", (time == 3) ? "1" : "0");
prop.put("searchoptions_time-4", (time == 4) ? "1" : "0");
prop.put("searchoptions_time-6", (time == 6) ? "1" : "0");
prop.put("searchoptions_time-8", (time == 8) ? "1" : "0");
prop.put("searchoptions_time-10", (time == 10) ? "1" : "0");
prop.put("searchoptions_urlmaskoptions", "0"); prop.put("searchoptions_urlmaskoptions", "0");
prop.putHTML("searchoptions_urlmaskoptions_urlmaskfilter", urlmaskfilter); prop.putHTML("searchoptions_urlmaskoptions_urlmaskfilter", urlmaskfilter);
prop.put("searchoptions_prefermaskoptions", "0"); prop.put("searchoptions_prefermaskoptions", "0");

View File

@ -77,7 +77,6 @@ public final class search {
String abstracts = post.get("abstracts", ""); // a string of word hashes for abstracts that shall be generated, or 'auto' (for maxcount-word), or '' (for none) String abstracts = post.get("abstracts", ""); // a string of word hashes for abstracts that shall be generated, or 'auto' (for maxcount-word), or '' (for none)
// final String fwdep = post.get("fwdep", ""); // forward depth. if "0" then peer may NOT ask another peer for more results // final String fwdep = post.get("fwdep", ""); // forward depth. if "0" then peer may NOT ask another peer for more results
// final String fwden = post.get("fwden", ""); // forward deny, a list of seed hashes. They may NOT be target of forward hopping // final String fwden = post.get("fwden", ""); // forward deny, a list of seed hashes. They may NOT be target of forward hopping
final long duetime= Math.min(60000, post.getLong("duetime", 3000));
final int count = Math.min(100, post.getInt("count", 10)); // maximum number of wanted results final int count = Math.min(100, post.getInt("count", 10)); // maximum number of wanted results
final int maxdist= post.getInt("maxdist", Integer.MAX_VALUE); final int maxdist= post.getInt("maxdist", Integer.MAX_VALUE);
final String prefer = post.get("prefer", ""); final String prefer = post.get("prefer", "");
@ -119,7 +118,7 @@ public final class search {
} }
// tell all threads to do nothing for a specific time // tell all threads to do nothing for a specific time
sb.intermissionAllThreads(2 * duetime); sb.intermissionAllThreads(3000);
TreeSet<String> abstractSet = ((abstracts.length() == 0) || (abstracts.equals("auto"))) ? null : plasmaSearchQuery.hashes2Set(abstracts); TreeSet<String> abstractSet = ((abstracts.length() == 0) || (abstracts.equals("auto"))) ? null : plasmaSearchQuery.hashes2Set(abstracts);
@ -147,7 +146,7 @@ public final class search {
long urlRetrievalAllTime = 0, snippetComputationAllTime = 0; long urlRetrievalAllTime = 0, snippetComputationAllTime = 0;
if ((query.length() == 0) && (abstractSet != null)) { if ((query.length() == 0) && (abstractSet != null)) {
// this is _not_ a normal search, only a request for index abstracts // this is _not_ a normal search, only a request for index abstracts
theQuery = new plasmaSearchQuery(null, abstractSet, new TreeSet<String>(kelondroBase64Order.enhancedComparator), rankingProfile, maxdist, prefer, plasmaSearchQuery.contentdomParser(contentdom), false, count, 0, duetime, filter, plasmaSearchQuery.SEARCHDOM_LOCAL, null, -1, null, false); theQuery = new plasmaSearchQuery(null, abstractSet, new TreeSet<String>(kelondroBase64Order.enhancedComparator), rankingProfile, maxdist, prefer, plasmaSearchQuery.contentdomParser(contentdom), false, count, 0, filter, plasmaSearchQuery.SEARCHDOM_LOCAL, null, -1, null, false);
theQuery.domType = plasmaSearchQuery.SEARCHDOM_LOCAL; theQuery.domType = plasmaSearchQuery.SEARCHDOM_LOCAL;
yacyCore.log.logInfo("INIT HASH SEARCH (abstracts only): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links"); yacyCore.log.logInfo("INIT HASH SEARCH (abstracts only): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links");
@ -173,7 +172,7 @@ public final class search {
} else { } else {
// retrieve index containers from search request // retrieve index containers from search request
theQuery = new plasmaSearchQuery(null, queryhashes, excludehashes, rankingProfile, maxdist, prefer, plasmaSearchQuery.contentdomParser(contentdom), false, count, 0, duetime, filter, plasmaSearchQuery.SEARCHDOM_LOCAL, null, -1, constraint, false); theQuery = new plasmaSearchQuery(null, queryhashes, excludehashes, rankingProfile, maxdist, prefer, plasmaSearchQuery.contentdomParser(contentdom), false, count, 0, filter, plasmaSearchQuery.SEARCHDOM_LOCAL, null, -1, constraint, false);
theQuery.domType = plasmaSearchQuery.SEARCHDOM_LOCAL; theQuery.domType = plasmaSearchQuery.SEARCHDOM_LOCAL;
yacyCore.log.logInfo("INIT HASH SEARCH (query-" + abstracts + "): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links"); yacyCore.log.logInfo("INIT HASH SEARCH (query-" + abstracts + "): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links");
@ -213,7 +212,7 @@ public final class search {
} else { } else {
joincount = theSearch.getRankingResult().getLocalResourceSize(); joincount = theSearch.getRankingResult().getLocalResourceSize();
prop.put("joincount", Integer.toString(joincount)); prop.put("joincount", Integer.toString(joincount));
accu = theSearch.completeResults(duetime); accu = theSearch.completeResults(3000);
} }
// generate compressed index for maxcounthash // generate compressed index for maxcounthash

View File

@ -120,7 +120,6 @@ public class yacysearch {
prop.put("input_count", "10"); prop.put("input_count", "10");
prop.put("input_offset", "0"); prop.put("input_offset", "0");
prop.put("input_resource", "global"); prop.put("input_resource", "global");
prop.put("input_time", sb.getConfigLong("network.unit.search.time", 3));
prop.put("input_urlmaskfilter", ".*"); prop.put("input_urlmaskfilter", ".*");
prop.put("input_prefermaskfilter", ""); prop.put("input_prefermaskfilter", "");
prop.put("input_indexof", "off"); prop.put("input_indexof", "off");
@ -154,7 +153,6 @@ public class yacysearch {
int offset = post.getInt("offset", 0); int offset = post.getInt("offset", 0);
boolean global = (post == null) ? true : post.get("resource", "global").equals("global"); boolean global = (post == null) ? true : post.get("resource", "global").equals("global");
final boolean indexof = post.get("indexof","").equals("on"); final boolean indexof = post.get("indexof","").equals("on");
final long searchtime = 1000 * post.getLong("time", (int) sb.getConfigLong("network.unit.search.time", 3));
String urlmask = ""; String urlmask = "";
if (post.containsKey("urlmask") && post.get("urlmask").equals("no")) { if (post.containsKey("urlmask") && post.get("urlmask").equals("no")) {
urlmask = ".*"; urlmask = ".*";
@ -267,7 +265,6 @@ public class yacysearch {
true, true,
itemsPerPage, itemsPerPage,
offset, offset,
searchtime,
urlmask, urlmask,
(clustersearch && globalsearch) ? plasmaSearchQuery.SEARCHDOM_CLUSTERALL : (clustersearch && globalsearch) ? plasmaSearchQuery.SEARCHDOM_CLUSTERALL :
((globalsearch) ? plasmaSearchQuery.SEARCHDOM_GLOBALDHT : plasmaSearchQuery.SEARCHDOM_LOCAL), ((globalsearch) ? plasmaSearchQuery.SEARCHDOM_GLOBALDHT : plasmaSearchQuery.SEARCHDOM_LOCAL),
@ -279,13 +276,13 @@ public class yacysearch {
String client = (String) header.get("CLIENTIP"); // the search client who initiated the search String client = (String) header.get("CLIENTIP"); // the search client who initiated the search
// tell all threads to do nothing for a specific time // tell all threads to do nothing for a specific time
sb.intermissionAllThreads(2 * theQuery.maximumTime); sb.intermissionAllThreads(10000);
// filter out words that appear in bluelist // filter out words that appear in bluelist
theQuery.filterOut(plasmaSwitchboard.blueList); theQuery.filterOut(plasmaSwitchboard.blueList);
// log // log
serverLog.logInfo("LOCAL_SEARCH", "INIT WORD SEARCH: " + theQuery.queryString + ":" + theQuery.queryHashes + " - " + theQuery.neededResults() + " links to be computed, " + theQuery.displayResults() + " lines to be displayed, " + (theQuery.maximumTime / 1000) + " seconds"); serverLog.logInfo("LOCAL_SEARCH", "INIT WORD SEARCH: " + theQuery.queryString + ":" + theQuery.queryHashes + " - " + theQuery.neededResults() + " links to be computed, " + theQuery.displayResults() + " lines to be displayed");
long timestamp = System.currentTimeMillis(); long timestamp = System.currentTimeMillis();
// create a new search event // create a new search event
@ -300,8 +297,6 @@ public class yacysearch {
serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER RESULT PREPARATION: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds"); serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER RESULT PREPARATION: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds");
// calc some more cross-reference // calc some more cross-reference
long remainingTime = theQuery.maximumTime - (System.currentTimeMillis() - timestamp);
if (remainingTime < 0) remainingTime = 1000;
serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER XREF PREPARATION: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds"); serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER XREF PREPARATION: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds");
// log // log
@ -406,7 +401,6 @@ public class yacysearch {
prop.put("input_count", itemsPerPage); prop.put("input_count", itemsPerPage);
prop.put("input_offset", offset); prop.put("input_offset", offset);
prop.put("input_resource", global ? "global" : "local"); prop.put("input_resource", global ? "global" : "local");
prop.put("input_time", searchtime / 1000);
prop.putHTML("input_urlmaskfilter", urlmask); prop.putHTML("input_urlmaskfilter", urlmask);
prop.putHTML("input_prefermaskfilter", prefermask); prop.putHTML("input_prefermaskfilter", prefermask);
prop.put("input_indexof", (indexof) ? "on" : "off"); prop.put("input_indexof", (indexof) ? "on" : "off");
@ -435,7 +429,6 @@ public class yacysearch {
"&amp;count="+ theQuery.displayResults() + "&amp;count="+ theQuery.displayResults() +
"&amp;offset=" + (page * theQuery.displayResults()) + "&amp;offset=" + (page * theQuery.displayResults()) +
"&amp;resource=" + theQuery.searchdom() + "&amp;resource=" + theQuery.searchdom() +
"&amp;time=" + (theQuery.maximumTime / 1000) +
"&amp;urlmaskfilter=" + theQuery.urlMask + "&amp;urlmaskfilter=" + theQuery.urlMask +
"&amp;prefermaskfilter=" + theQuery.prefer + "&amp;prefermaskfilter=" + theQuery.prefer +
"&amp;cat=href&amp;constraint=" + ((theQuery.constraint == null) ? "" : theQuery.constraint.exportB64()) + "&amp;cat=href&amp;constraint=" + ((theQuery.constraint == null) ? "" : theQuery.constraint.exportB64()) +

View File

@ -151,7 +151,6 @@ public class yacysearchitem {
prop.put("references_words_" + hintcount + "_offset", "0"); prop.put("references_words_" + hintcount + "_offset", "0");
prop.put("references_words_" + hintcount + "_contentdom", theQuery.contentdom()); prop.put("references_words_" + hintcount + "_contentdom", theQuery.contentdom());
prop.put("references_words_" + hintcount + "_resource", theQuery.searchdom()); prop.put("references_words_" + hintcount + "_resource", theQuery.searchdom());
prop.put("references_words_" + hintcount + "_time", (theQuery.maximumTime / 1000));
} }
prop.put("references_words", hintcount); prop.put("references_words", hintcount);
if (hintcount++ > MAX_TOPWORDS) { if (hintcount++ > MAX_TOPWORDS) {

View File

@ -169,7 +169,7 @@ public class indexCollectionRI implements indexRI {
} }
} }
public void addEntries(indexContainer newEntries, long creationTime, boolean dhtCase) { public void addEntries(indexContainer newEntries) {
try { try {
collectionIndex.merge(newEntries); collectionIndex.merge(newEntries);
} catch (kelondroOutOfLimitsException e) { } catch (kelondroOutOfLimitsException e) {

View File

@ -481,7 +481,7 @@ public final class indexRAMRI implements indexRI {
return delCount; return delCount;
} }
public synchronized void addEntries(indexContainer container, long updateTime, boolean dhtCase) { public synchronized void addEntries(indexContainer container) {
// this puts the entries into the cache, not into the assortment directly // this puts the entries into the cache, not into the assortment directly
int added = 0; int added = 0;
if ((container == null) || (container.size() == 0)) return; if ((container == null) || (container.size() == 0)) return;
@ -498,7 +498,7 @@ public final class indexRAMRI implements indexRI {
if (added > 0) { if (added > 0) {
cache.put(wordHash, entries); cache.put(wordHash, entries);
hashScore.addScore(wordHash, added); hashScore.addScore(wordHash, added);
hashDate.setScore(wordHash, intTime(updateTime)); hashDate.setScore(wordHash, intTime(System.currentTimeMillis()));
} }
entries = null; entries = null;
} }

View File

@ -44,7 +44,7 @@ public interface indexRI {
public indexContainer deleteContainer(String wordHash); public indexContainer deleteContainer(String wordHash);
public boolean removeEntry(String wordHash, String urlHash); public boolean removeEntry(String wordHash, String urlHash);
public int removeEntries(String wordHash, Set<String> urlHashes); public int removeEntries(String wordHash, Set<String> urlHashes);
public void addEntries(indexContainer newEntries, long creationTime, boolean dhtCase); public void addEntries(indexContainer newEntries);
public void close(); public void close();
} }

View File

@ -225,12 +225,12 @@ public class kelondroEcoFS {
assert b.length - start >= this.recordsize; assert b.length - start >= this.recordsize;
if (index > size()) throw new IndexOutOfBoundsException("kelondroEcoFS.put(" + index + ") outside bounds (" + this.size() + ")"); if (index > size()) throw new IndexOutOfBoundsException("kelondroEcoFS.put(" + index + ") outside bounds (" + this.size() + ")");
// check if this is an empty entry // check if this is an empty entry
/*
if (isClean(b , start, this.recordsize)) { if (isClean(b , start, this.recordsize)) {
clean(index); clean(index);
return; return;
} }
*/
// check if index is inside of cache // check if index is inside of cache
int p = inCache(index); int p = inCache(index);
int q = (p >= 0) ? -1 : inBuffer(index); int q = (p >= 0) ? -1 : inBuffer(index);
@ -306,7 +306,7 @@ public class kelondroEcoFS {
assert false; assert false;
return false; return false;
} }
/*
public synchronized void clean(long index, byte[] b, int start) throws IOException { public synchronized void clean(long index, byte[] b, int start) throws IOException {
// removes an entry by cleaning (writing zero bytes to the file) // removes an entry by cleaning (writing zero bytes to the file)
// the entry that had been at the specific place before is copied to the given array b // the entry that had been at the specific place before is copied to the given array b
@ -378,7 +378,7 @@ public class kelondroEcoFS {
raf.seek((long) index * (long) this.recordsize); raf.seek((long) index * (long) this.recordsize);
raf.write(zero, 0, this.recordsize); raf.write(zero, 0, this.recordsize);
} }
*/
public synchronized void cleanLast(byte[] b, int start) throws IOException { public synchronized void cleanLast(byte[] b, int start) throws IOException {
cleanLast0(b, start); cleanLast0(b, start);
long i; long i;

View File

@ -219,7 +219,7 @@ public class plasmaDbImporter extends AbstractImporter implements dbImporter {
if (isAborted()) break; if (isAborted()) break;
// importing entity container to home db // importing entity container to home db
if (newContainer.size() > 0) { homeWordIndex.addEntries(newContainer, System.currentTimeMillis(), false); } if (newContainer.size() > 0) { homeWordIndex.addEntries(newContainer); }
// delete complete index entity file // delete complete index entity file
this.importWordIndex.deleteContainer(this.wordHash); this.importWordIndex.deleteContainer(this.wordHash);

View File

@ -122,9 +122,7 @@ public final class plasmaSearchEvent {
// do a global search // do a global search
this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, 2, max_results_preparation); this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, 2, max_results_preparation);
int fetchpeers = (int) (query.maximumTime / 500L); // number of target peers; means 10 peers in 10 seconds int fetchpeers = 30;
if (fetchpeers > 50) fetchpeers = 50;
if (fetchpeers < 30) fetchpeers = 30;
// the result of the fetch is then in the rcGlobal // the result of the fetch is then in the rcGlobal
long timer = System.currentTimeMillis(); long timer = System.currentTimeMillis();
@ -194,7 +192,7 @@ public final class plasmaSearchEvent {
// start worker threads to fetch urls and snippets // start worker threads to fetch urls and snippets
this.workerThreads = new resultWorker[workerThreadCount]; this.workerThreads = new resultWorker[workerThreadCount];
for (int i = 0; i < workerThreadCount; i++) { for (int i = 0; i < workerThreadCount; i++) {
this.workerThreads[i] = new resultWorker(i, query.maximumTime * 3); this.workerThreads[i] = new resultWorker(i, 10000);
this.workerThreads[i].start(); this.workerThreads[i].start();
} }
} else { } else {
@ -475,7 +473,7 @@ public final class plasmaSearchEvent {
// start worker threads to fetch urls and snippets // start worker threads to fetch urls and snippets
event.workerThreads = new resultWorker[workerThreadCount]; event.workerThreads = new resultWorker[workerThreadCount];
for (int i = 0; i < workerThreadCount; i++) { for (int i = 0; i < workerThreadCount; i++) {
event.workerThreads[i] = event.deployWorker(i, 3 * query.maximumTime); event.workerThreads[i] = event.deployWorker(i, 10000);
} }
} }
@ -619,7 +617,7 @@ public final class plasmaSearchEvent {
public ArrayList<ResultEntry> completeResults(long waitingtime) { public ArrayList<ResultEntry> completeResults(long waitingtime) {
long timeout = System.currentTimeMillis() + waitingtime; long timeout = System.currentTimeMillis() + waitingtime;
while ((this.resultList.size() < query.neededResults()) && (anyWorkerAlive()) && (System.currentTimeMillis() < timeout)) { while ((this.resultList.size() < query.neededResults()) && (anyWorkerAlive()) && (System.currentTimeMillis() < timeout)) {
try {Thread.sleep(200);} catch (InterruptedException e) {} try {Thread.sleep(100);} catch (InterruptedException e) {}
//System.out.println("+++DEBUG-completeResults+++ sleeping " + 200); //System.out.println("+++DEBUG-completeResults+++ sleeping " + 200);
} }
return this.resultList; return this.resultList;

View File

@ -78,7 +78,6 @@ public final class plasmaSearchQuery {
private int linesPerPage, offset; private int linesPerPage, offset;
public String prefer; public String prefer;
public int contentdom; public int contentdom;
public long maximumTime;
public String urlMask; public String urlMask;
public int domType; public int domType;
public String domGroupName; public String domGroupName;
@ -110,7 +109,6 @@ public final class plasmaSearchQuery {
this.contentdom = CONTENTDOM_ALL; this.contentdom = CONTENTDOM_ALL;
this.linesPerPage = lines; this.linesPerPage = lines;
this.offset = 0; this.offset = 0;
this.maximumTime = 3000;
this.urlMask = ".*"; this.urlMask = ".*";
this.domType = SEARCHDOM_LOCAL; this.domType = SEARCHDOM_LOCAL;
this.domGroupName = ""; this.domGroupName = "";
@ -125,7 +123,7 @@ public plasmaSearchQuery(
plasmaSearchRankingProfile ranking, plasmaSearchRankingProfile ranking,
int maxDistance, String prefer, int contentdom, int maxDistance, String prefer, int contentdom,
boolean onlineSnippetFetch, boolean onlineSnippetFetch,
int lines, int offset, long maximumTime, String urlMask, int lines, int offset, String urlMask,
int domType, String domGroupName, int domMaxTargets, int domType, String domGroupName, int domMaxTargets,
kelondroBitfield constraint, boolean allofconstraint) { kelondroBitfield constraint, boolean allofconstraint) {
this.queryString = queryString; this.queryString = queryString;
@ -137,7 +135,7 @@ public plasmaSearchQuery(
this.contentdom = contentdom; this.contentdom = contentdom;
this.linesPerPage = lines; this.linesPerPage = lines;
this.offset = offset; this.offset = offset;
this.maximumTime = Math.min(6000, maximumTime); //this.maximumTime = Math.min(6000, maximumTime);
this.urlMask = urlMask; this.urlMask = urlMask;
this.domType = domType; this.domType = domType;
this.domGroupName = domGroupName; this.domGroupName = domGroupName;
@ -288,7 +286,7 @@ public plasmaSearchQuery(
r.put("queryhashes", queryHashes); r.put("queryhashes", queryHashes);
r.put("querystring", queryString); r.put("querystring", queryString);
r.put("querycount", new Integer(linesPerPage)); r.put("querycount", new Integer(linesPerPage));
r.put("querytime", new Long(maximumTime)); //r.put("querytime", new Long(maximumTime));
r.put("resultcount", new Integer(searchcount)); r.put("resultcount", new Integer(searchcount));
r.put("resulttime", new Long(searchtime)); r.put("resulttime", new Long(searchtime));
r.put("resulturltime", new Long(urlretrieval)); r.put("resulturltime", new Long(urlretrieval));

View File

@ -187,7 +187,11 @@ public final class plasmaWordIndex implements indexRI {
} }
} }
public void addEntries(indexContainer entries, long updateTime, boolean dhtInCase) { public void addEntries(indexContainer entries) {
addEntries(entries, false);
}
public void addEntries(indexContainer entries, boolean dhtInCase) {
assert (entries.row().objectsize == indexRWIRowEntry.urlEntryRow.objectsize); assert (entries.row().objectsize == indexRWIRowEntry.urlEntryRow.objectsize);
// set dhtInCase depending on wordHash // set dhtInCase depending on wordHash
@ -195,10 +199,10 @@ public final class plasmaWordIndex implements indexRI {
// add the entry // add the entry
if (dhtInCase) { if (dhtInCase) {
dhtInCache.addEntries(entries, updateTime, true); dhtInCache.addEntries(entries);
dhtFlushControl(this.dhtInCache); dhtFlushControl(this.dhtInCache);
} else { } else {
dhtOutCache.addEntries(entries, updateTime, false); dhtOutCache.addEntries(entries);
dhtFlushControl(this.dhtOutCache); dhtFlushControl(this.dhtOutCache);
} }
} }
@ -244,6 +248,9 @@ public final class plasmaWordIndex implements indexRI {
if (c != null) containerList.add(c); if (c != null) containerList.add(c);
} }
// flush the containers // flush the containers
for (int i = 0; i < containerList.size(); i++) {
collections.addEntries((indexContainer) containerList.get(i));
}
collections.addMultipleEntries(containerList); collections.addMultipleEntries(containerList);
//System.out.println("DEBUG-Finished flush of " + count + " entries from RAM to DB in " + (System.currentTimeMillis() - start) + " milliseconds"); //System.out.println("DEBUG-Finished flush of " + count + " entries from RAM to DB in " + (System.currentTimeMillis() - start) + " milliseconds");
busyCacheFlush = false; busyCacheFlush = false;

View File

@ -417,6 +417,7 @@ public final class yacyClient {
post.put("partitions", partitions); post.put("partitions", partitions);
post.put("query", wordhashes); post.put("query", wordhashes);
post.put("exclude", excludehashes); post.put("exclude", excludehashes);
post.put("duetime", 1000);
post.put("urls", urlhashes); post.put("urls", urlhashes);
post.put("prefer", prefer); post.put("prefer", prefer);
post.put("filter", filter); post.put("filter", filter);
@ -573,7 +574,7 @@ public final class yacyClient {
// insert the containers to the index // insert the containers to the index
for (int m = 0; m < words; m++) { for (int m = 0; m < words; m++) {
wordIndex.addEntries(container[m], System.currentTimeMillis(), true); wordIndex.addEntries(container[m], true);
} }
// read index abstract // read index abstract