This commit is contained in:
luc 2015-12-15 23:30:30 +01:00
commit a2c08402af
11 changed files with 58 additions and 423 deletions

View File

@ -659,7 +659,6 @@ public class IndexControlRWIs_p {
sb.index,
sb.getRanking(),
"",//userAgent
false,
0.0d, 0.0d, 0.0d,
new String[0]);
final SearchEvent theSearch = SearchEventCache.getEvent(query, sb.peers, sb.tables, null, false, sb.loader, Integer.MAX_VALUE, Long.MAX_VALUE);
@ -670,8 +669,7 @@ public class IndexControlRWIs_p {
} else {
prop.put("searchresult", 3);
prop.put("searchresult_allurl", theSearch.local_rwi_available.get());
prop
.put("searchresult_description", theSearch.flagCount()[WordReferenceRow.flag_app_dc_description]);
prop.put("searchresult_description", theSearch.flagCount()[WordReferenceRow.flag_app_dc_description]);
prop.put("searchresult_title", theSearch.flagCount()[WordReferenceRow.flag_app_dc_title]);
prop.put("searchresult_creator", theSearch.flagCount()[WordReferenceRow.flag_app_dc_creator]);
prop.put("searchresult_subject", theSearch.flagCount()[WordReferenceRow.flag_app_dc_subject]);

View File

@ -158,6 +158,7 @@
</tr>
</table>
<!-- this cache table wasn't used for years
<p><strong>Object Read Caches:</strong></p>
<table border="0">
<tr class="TableHeader" valign="bottom">
@ -184,7 +185,7 @@
<td>Write Unique</td>
<td>Write Double</td>
<td>Deletes</td>
<!-- <td>Flushes</td> -->
<td>Flushes</td>
</tr>
#{ObjectList}#
<tr class="TableCellLight">
@ -206,13 +207,13 @@
<td align="right">#[objectMissCacheWriteUnique]#</td>
<td align="right">#[objectMissCacheWriteDouble]#</td>
<td align="right">#[objectMissCacheDeletes]#</td>
<!-- <td align="right">#[objectMissCacheFlushes]#</td> -->
<td align="right">#[objectMissCacheFlushes]#</td>
</tr>
#{/ObjectList}#
<tr class="TableCellDark">
<td colspan="19">Total Mem: #[objectHitCacheTotalMem]# MB (hit), #[objectMissCacheTotalMem]# MB (miss); Stop Grow when less than #[objectCacheStopGrow]# MB available left; Start Shrink when less than #[objectCacheStartShrink]# MB availabe left</td>
</tr>
</table>
</table> -->
<!-- other cache sizes -->
<p><strong>Other Caching Structures:</strong></p>
@ -225,32 +226,6 @@
<td>Insert</td>
<td>Delete</td>
</tr>
<!-- CachedSolrConnector was replaced by ConcurrentUpdateSolrConnector
<tr class="TableCellLight">
<td>Solr-Cache/Hit</td>
<td>#[solrcacheHit.size]#</td>
<td>#[solrcacheHit.Hit]#</td>
<td>#[solrcacheHit.Miss]#</td>
<td>#[solrcacheHit.Insert]#</td>
<td>(ARC)</td>
</tr>
<tr class="TableCellLight">
<td>Solr-Cache/Miss</td>
<td>#[solrcacheMiss.size]#</td>
<td>#[solrcacheMiss.Hit]#</td>
<td>#[solrcacheMiss.Miss]#</td>
<td>#[solrcacheMiss.Insert]#</td>
<td>(ARC)</td>
</tr>
<tr class="TableCellLight">
<td>Solr-Cache/Document</td>
<td>#[solrcacheDocument.size]#</td>
<td>#[solrcacheDocument.Hit]#</td>
<td>#[solrcacheDocument.Miss]#</td>
<td>#[solrcacheDocument.Insert]#</td>
<td>(ARC)</td>
</tr>
-->
<tr class="TableCellLight">
<td>DNSCache/Hit</td>
<td>#[namecacheHit.size]#</td>

View File

@ -36,7 +36,6 @@ import org.apache.solr.search.SolrCache;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.index.Cache;
import net.yacy.kelondro.index.RAMIndex;
import net.yacy.kelondro.table.Table;
import net.yacy.kelondro.util.Formatter;
@ -183,67 +182,51 @@ public class PerformanceMemory_p {
prop.put("indexcache", c);
prop.putNum("indexcacheTotalMem", totalhitmem / (1024d * 1024d));
// write object cache table
i = Cache.filenames();
c = 0;
long missmem, totalmissmem = 0;
totalhitmem = 0;
Map<Cache.StatKeys, String> mapy;
while (i.hasNext()) {
filename = i.next();
mapy = Cache.memoryStats(filename);
prop.put("ObjectList_" + c + "_objectCachePath", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
// hit cache
hitmem = Long.parseLong(mapy.get(Cache.StatKeys.objectHitMem));
totalhitmem += hitmem;
prop.put("ObjectList_" + c + "_objectHitChunkSize", mapy.get(Cache.StatKeys.objectHitChunkSize));
prop.putNum("ObjectList_" + c + "_objectHitCacheCount", mapy.get(Cache.StatKeys.objectHitCacheCount));
prop.put("ObjectList_" + c + "_objectHitCacheMem", Formatter.bytesToString(hitmem));
prop.putNum("ObjectList_" + c + "_objectHitCacheReadHit", mapy.get(Cache.StatKeys.objectHitCacheReadHit));
prop.putNum("ObjectList_" + c + "_objectHitCacheReadMiss", mapy.get(Cache.StatKeys.objectHitCacheReadMiss));
prop.putNum("ObjectList_" + c + "_objectHitCacheWriteUnique", mapy.get(Cache.StatKeys.objectHitCacheWriteUnique));
prop.putNum("ObjectList_" + c + "_objectHitCacheWriteDouble", mapy.get(Cache.StatKeys.objectHitCacheWriteDouble));
prop.putNum("ObjectList_" + c + "_objectHitCacheDeletes", mapy.get(Cache.StatKeys.objectHitCacheDeletes));
prop.putNum("ObjectList_" + c + "_objectHitCacheFlushes", mapy.get(Cache.StatKeys.objectHitCacheFlushes));
// miss cache
missmem = Long.parseLong(mapy.get(Cache.StatKeys.objectMissMem));
totalmissmem += missmem;
prop.put("ObjectList_" + c + "_objectMissChunkSize", mapy.get(Cache.StatKeys.objectMissChunkSize));
prop.putNum("ObjectList_" + c + "_objectMissCacheCount", mapy.get(Cache.StatKeys.objectMissCacheCount));
prop.putHTML("ObjectList_" + c + "_objectMissCacheMem", Formatter.bytesToString(missmem));
prop.putNum("ObjectList_" + c + "_objectMissCacheReadHit", mapy.get(Cache.StatKeys.objectMissCacheReadHit));
prop.putNum("ObjectList_" + c + "_objectMissCacheReadMiss", mapy.get(Cache.StatKeys.objectMissCacheReadMiss));
prop.putNum("ObjectList_" + c + "_objectMissCacheWriteUnique", mapy.get(Cache.StatKeys.objectMissCacheWriteUnique));
prop.putNum("ObjectList_" + c + "_objectMissCacheWriteDouble", mapy.get(Cache.StatKeys.objectMissCacheWriteDouble));
prop.putNum("ObjectList_" + c + "_objectMissCacheDeletes", mapy.get(Cache.StatKeys.objectMissCacheDeletes));
//prop.put("ObjectList_" + c + "_objectMissCacheFlushes", mapy.get(Cache.StatKeys.objectMissCacheFlushes));
c++;
}
prop.put("ObjectList", c);
prop.putNum("objectCacheStopGrow", Cache.getMemStopGrow() / (1024d * 1024d));
prop.putNum("objectCacheStartShrink", Cache.getMemStartShrink() / (1024d * 1024d));
prop.putNum("objectHitCacheTotalMem", totalhitmem / (1024d * 1024d));
prop.putNum("objectMissCacheTotalMem", totalmissmem / (1024d * 1024d));
// other caching structures
// final CachedSolrConnector solr = (CachedSolrConnector) Switchboard.getSwitchboard().index.fulltext().getDefaultConnector();
// prop.putNum("solrcacheHit.size", solr.nameCacheHitSize());
// prop.putNum("solrcacheHit.Hit", solr.hitCache_Hit);
// prop.putNum("solrcacheHit.Miss", solr.hitCache_Miss);
// prop.putNum("solrcacheHit.Insert", solr.hitCache_Insert);
//
// prop.putNum("solrcacheMiss.size", solr.nameCacheMissSize());
// prop.putNum("solrcacheMiss.Hit", solr.missCache_Hit);
// prop.putNum("solrcacheMiss.Miss", solr.missCache_Miss);
// prop.putNum("solrcacheMiss.Insert", solr.missCache_Insert);
//
// prop.putNum("solrcacheDocument.size", solr.nameCacheDocumentSize());
// prop.putNum("solrcacheDocument.Hit", solr.documentCache_Hit);
// prop.putNum("solrcacheDocument.Miss", solr.documentCache_Miss);
// prop.putNum("solrcacheDocument.Insert", solr.documentCache_Insert);
// this cache table wasn't used for years
// // write object cache table
// i = Cache.filenames();
// c = 0;
// long missmem, totalmissmem = 0;
// totalhitmem = 0;
// Map<Cache.StatKeys, String> mapy;
// while (i.hasNext()) {
// filename = i.next();
// mapy = Cache.memoryStats(filename);
// prop.put("ObjectList_" + c + "_objectCachePath", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
//
// // hit cache
// hitmem = Long.parseLong(mapy.get(Cache.StatKeys.objectHitMem));
// totalhitmem += hitmem;
// prop.put("ObjectList_" + c + "_objectHitChunkSize", mapy.get(Cache.StatKeys.objectHitChunkSize));
// prop.putNum("ObjectList_" + c + "_objectHitCacheCount", mapy.get(Cache.StatKeys.objectHitCacheCount));
// prop.put("ObjectList_" + c + "_objectHitCacheMem", Formatter.bytesToString(hitmem));
// prop.putNum("ObjectList_" + c + "_objectHitCacheReadHit", mapy.get(Cache.StatKeys.objectHitCacheReadHit));
// prop.putNum("ObjectList_" + c + "_objectHitCacheReadMiss", mapy.get(Cache.StatKeys.objectHitCacheReadMiss));
// prop.putNum("ObjectList_" + c + "_objectHitCacheWriteUnique", mapy.get(Cache.StatKeys.objectHitCacheWriteUnique));
// prop.putNum("ObjectList_" + c + "_objectHitCacheWriteDouble", mapy.get(Cache.StatKeys.objectHitCacheWriteDouble));
// prop.putNum("ObjectList_" + c + "_objectHitCacheDeletes", mapy.get(Cache.StatKeys.objectHitCacheDeletes));
// prop.putNum("ObjectList_" + c + "_objectHitCacheFlushes", mapy.get(Cache.StatKeys.objectHitCacheFlushes));
//
// // miss cache
// missmem = Long.parseLong(mapy.get(Cache.StatKeys.objectMissMem));
// totalmissmem += missmem;
// prop.put("ObjectList_" + c + "_objectMissChunkSize", mapy.get(Cache.StatKeys.objectMissChunkSize));
// prop.putNum("ObjectList_" + c + "_objectMissCacheCount", mapy.get(Cache.StatKeys.objectMissCacheCount));
// prop.putHTML("ObjectList_" + c + "_objectMissCacheMem", Formatter.bytesToString(missmem));
// prop.putNum("ObjectList_" + c + "_objectMissCacheReadHit", mapy.get(Cache.StatKeys.objectMissCacheReadHit));
// prop.putNum("ObjectList_" + c + "_objectMissCacheReadMiss", mapy.get(Cache.StatKeys.objectMissCacheReadMiss));
// prop.putNum("ObjectList_" + c + "_objectMissCacheWriteUnique", mapy.get(Cache.StatKeys.objectMissCacheWriteUnique));
// prop.putNum("ObjectList_" + c + "_objectMissCacheWriteDouble", mapy.get(Cache.StatKeys.objectMissCacheWriteDouble));
// prop.putNum("ObjectList_" + c + "_objectMissCacheDeletes", mapy.get(Cache.StatKeys.objectMissCacheDeletes));
// //prop.put("ObjectList_" + c + "_objectMissCacheFlushes", mapy.get(Cache.StatKeys.objectMissCacheFlushes));
//
// c++;
// }
// prop.put("ObjectList", c);
// prop.putNum("objectCacheStopGrow", Cache.getMemStopGrow() / (1024d * 1024d));
// prop.putNum("objectCacheStartShrink", Cache.getMemStartShrink() / (1024d * 1024d));
// prop.putNum("objectHitCacheTotalMem", totalhitmem / (1024d * 1024d));
// prop.putNum("objectMissCacheTotalMem", totalmissmem / (1024d * 1024d));
prop.putNum("namecacheHit.size", Domains.nameCacheHitSize());
prop.putNum("namecacheHit.Hit", Domains.cacheHit_Hit);

View File

@ -251,7 +251,6 @@ public final class search {
indexSegment,
rankingProfile,
header.get(HeaderFramework.USER_AGENT, ""),
false,
0.0d,
0.0d,
0.0d,
@ -316,7 +315,6 @@ public final class search {
sb.index,
rankingProfile,
header.get(HeaderFramework.USER_AGENT, ""),
false,
0.0d,
0.0d,
0.0d,

View File

@ -669,9 +669,6 @@ public class yacysearch {
indexSegment,
ranking,
header.get(HeaderFramework.USER_AGENT, ""),
sb.getConfigBool(SwitchboardConstants.SEARCH_VERIFY_DELETE, false)
&& sb.getConfigBool(SwitchboardConstants.NETWORK_SEARCHVERIFY, false)
&& sb.peers.mySeed().getFlagAcceptRemoteIndex(),
lat, lon, rad,
sb.getConfigArray("search.navigation", ""));
EventTracker.delete(EventTracker.EClass.SEARCH);

View File

@ -213,7 +213,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>templating-maven-plugin</artifactId>
<version>1.0-alpha-3</version>
<version>1.0.0</version>
<executions>
<execution>
<id>filter-src</id>

View File

@ -205,8 +205,7 @@ public class FederateSearchManager {
sb.index,
sb.getRanking(),
"",//userAgent
false,
0.0d, 0.0d, -1.0d,
0.0d, 0.0d, 0.0d,
new String[0]);
return query(query);

View File

@ -1,313 +0,0 @@
/**
* CachedSolrConnector
* Copyright 2013 by Michael Peter Christen
* First released 18.02.2013 at http://yacy.net
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package net.yacy.cora.federate.solr.connector;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedHashMap;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.cora.storage.ARC;
import net.yacy.cora.storage.ConcurrentARC;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.util.MemoryControl;
import net.yacy.search.schema.CollectionSchema;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
public class CachedSolrConnector extends AbstractSolrConnector implements SolrConnector {
private final static Object EXIST = new Object();
private SolrConnector solr;
private final ARC<String, SolrDocument> documentCache;
public final ARC<String, Object> hitCache, missCache;
public long documentCache_Hit = 0, documentCache_Miss = 0, documentCache_Insert = 0; // for statistics only; do not write
public long hitCache_Hit = 0, hitCache_Miss = 0, hitCache_Insert = 0; // for statistics only; do not write
public long missCache_Hit = 0, missCache_Miss = 0, missCache_Insert = 0; // for statistics only; do not write
private static final String idQuery(String id) {
return CollectionSchema.id.getSolrFieldName() + ":\"" + id + "\"";
}
public CachedSolrConnector(SolrConnector c, int hitCacheMax, int missCacheMax, int docCacheMax) {
this.solr = c;
int partitions = Runtime.getRuntime().availableProcessors() * 2;
this.documentCache = new ConcurrentARC<String, SolrDocument>(docCacheMax, partitions);
this.hitCache = new ConcurrentARC<String, Object>(hitCacheMax, partitions);
this.missCache = new ConcurrentARC<String, Object>(missCacheMax, partitions);
}
@Override
public int bufferSize() {
return solr.bufferSize();
}
@Override
public void clearCaches() {
this.hitCache.clear();
this.missCache.clear();
this.documentCache.clear();
if (this.solr != null) this.solr.commit(true);
}
@Override
public boolean isClosed() {
return this.solr == null || this.solr.isClosed();
}
@Override
protected void finalize() throws Throwable {
this.close();
}
@Override
public synchronized void close() {
this.clearCaches();
if (this.solr != null) this.solr.close();
this.solr = null;
}
/**
* delete everything in the solr index
* @throws IOException
*/
@Override
public void clear() throws IOException {
this.clearCaches();
if (this.solr != null) this.solr.clear();
}
/**
* delete an entry from solr
* @param id the url hash of the entry
* @throws IOException
*/
@Override
public void deleteById(final String id) throws IOException {
String q = idQuery(id);
this.documentCache.remove(q);
this.hitCache.remove(q);
this.missCache.put(q, EXIST);
this.missCache_Insert++;
if (this.solr != null) this.solr.deleteByQuery(q);
}
/**
* delete a set of entries from solr; entries are identified by their url hash
* @param ids a list of url hashes
* @throws IOException
*/
@Override
public void deleteByIds(final Collection<String> ids) throws IOException {
for (String id: ids) {
String q = idQuery(id);
this.documentCache.remove(q);
this.hitCache.remove(q);
this.missCache.put(q, EXIST);
this.missCache_Insert++;
}
if (this.solr != null) this.solr.deleteByIds(ids);
}
@Override
public void deleteByQuery(final String querystring) throws IOException {
this.clearCaches();
this.solr.deleteByQuery(querystring);
}
@Override
public SolrDocument getDocumentById(final String id, final String ... fields) throws IOException {
assert id.length() == Word.commonHashLength : "wrong id: " + id;
String q = idQuery(id);
SolrDocument doc = fields.length == 0 ? this.documentCache.get(q) : null;
if (doc != null) {
this.documentCache_Hit++;
return doc;
}
documentCache_Miss++;
if (this.missCache.containsKey(q)) {
this.missCache_Hit++;
return null;
}
this.missCache_Miss++;
if (solr != null && ((doc = solr.getDocumentById(id, fields)) != null)) {
addToCache(doc, fields.length == 0);
return doc;
}
// check if there is a autocommit problem
if (this.hitCache.containsKey(q)) {
// the document should be there, therefore make a commit and check again
if (solr != null && ((doc = solr.getDocumentById(id, fields)) != null)) {
addToCache(doc, fields.length == 0);
}
}
this.missCache.put(q, EXIST);
this.missCache_Insert++;
return null;
}
/**
* add a Solr document
* @param solrdoc
* @throws IOException
*/
@Override
public void add(final SolrInputDocument solrdoc) throws IOException {
String id = (String) solrdoc.getFieldValue(CollectionSchema.id.getSolrFieldName());
assert id != null;
if (id == null) return;
String q = idQuery(id);
SolrDocument doc = ClientUtils.toSolrDocument(solrdoc);
addToCache(doc, true);
this.documentCache.put(q, doc);
this.documentCache_Insert++;
if (this.solr != null) this.solr.add(solrdoc);
}
@Override
public void add(final Collection<SolrInputDocument> solrdocs) throws IOException, SolrException {
for (SolrInputDocument solrdoc: solrdocs) {
String id = (String) solrdoc.getFieldValue(CollectionSchema.id.getSolrFieldName());
assert id != null;
if (id == null) continue;
String q = idQuery(id);
SolrDocument doc = ClientUtils.toSolrDocument(solrdoc);
addToCache(doc, true);
this.documentCache.put(q, doc);
this.documentCache_Insert++;
}
if (this.solr != null) this.solr.add(solrdocs);
}
/**
* get a query result from solr
* to get all results set the query String to "*:*"
* @param querystring
* @throws IOException
*/
@Override
public SolrDocumentList getDocumentListByQuery(final String querystring, final String sort, final int offset, final int count, final String ... fields) throws IOException {
if (offset == 0 && count == 1 && querystring.startsWith("id:") &&
((querystring.length() == 17 && querystring.charAt(3) == '"' && querystring.charAt(16) == '"') ||
querystring.length() == 15)) {
final SolrDocumentList list = new SolrDocumentList();
SolrDocument doc = getDocumentById(querystring.charAt(3) == '"' ? querystring.substring(4, querystring.length() - 1) : querystring.substring(3), fields);
list.add(doc);
// no addToCache(list) here because that was already handlet in get();
return list;
}
if (this.solr != null) {
SolrDocumentList list = this.solr.getDocumentListByQuery(querystring, sort, offset, count, fields);
addToCache(list, fields.length == 0);
return list;
}
// combine both lists
SolrDocumentList list;
list = this.solr.getDocumentListByQuery(querystring, sort, offset, count, fields);
// add caching
addToCache(list, fields.length == 0);
return list;
}
@Override
public QueryResponse getResponseByParams(ModifiableSolrParams query) throws IOException, SolrException {
QueryResponse list = this.solr.getResponseByParams(query);
return list;
}
@Override
public SolrDocumentList getDocumentListByParams(ModifiableSolrParams params) throws IOException, SolrException {
SolrDocumentList sdl = this.solr.getDocumentListByParams(params);
return sdl;
}
@Override
public long getCountByQuery(final String querystring) throws IOException {
return this.solr.getCountByQuery(querystring);
}
@Override
public LinkedHashMap<String, ReversibleScoreMap<String>> getFacets(final String query, final int maxresults, final String ... fields) throws IOException {
return this.solr.getFacets(query, maxresults, fields);
}
private void addToCache(SolrDocumentList list, boolean doccache) {
if (MemoryControl.shortStatus()) clearCaches();
for (final SolrDocument solrdoc: list) {
addToCache(solrdoc, doccache);
}
}
private void addToCache(SolrDocument doc, boolean doccach) {
String id = (String) doc.getFieldValue(CollectionSchema.id.getSolrFieldName());
String q = idQuery(id);
this.missCache.remove(q);
this.hitCache.put(q, EXIST);
this.hitCache_Insert++;
if (doccach) {
this.documentCache.put(q, doc);
this.documentCache_Insert++;
}
}
@Override
public long getSize() {
long s = this.solr.getSize();
return Math.max(this.documentCache.size(), Math.max(this.hitCache.size(), s)); // this might be incorrect if there are other requests than "id:.." in the cache
}
public int nameCacheHitSize() {
return this.hitCache.size();
}
public int nameCacheMissSize() {
return this.missCache.size();
}
public int nameCacheDocumentSize() {
return this.documentCache.size();
}
@Override
public void commit(boolean softCommit) {
this.solr.commit(softCommit);
}
@Override
public void optimize(int maxSegments) {
this.solr.optimize(maxSegments);
}
@Override
public int getSegmentCount() {
return this.solr.getSegmentCount();
}
}

View File

@ -141,7 +141,6 @@ public final class QueryParams {
public int transmitcount; // number of results that had been shown to the user
public long searchtime, urlretrievaltime, snippetcomputationtime; // time to perform the search, to get all the urls, and to compute the snippets
public final String userAgent;
protected boolean filterfailurls;
protected double lat, lon, radius;
public LinkedHashSet<String> facetfields;
private SolrQuery cachedQuery;
@ -173,7 +172,6 @@ public final class QueryParams {
final Segment indexSegment,
final RankingProfile ranking,
final String userAgent,
final boolean filterfailurls,
final double lat,
final double lon,
final double radius,
@ -240,7 +238,6 @@ public final class QueryParams {
this.indexSegment = indexSegment;
this.userAgent = userAgent;
this.transmitcount = 0;
this.filterfailurls = filterfailurls;
// we normalize here the location and radius because that should cause a better caching
// and as surplus it will increase privacy
this.lat = Math.floor(lat * this.kmNormal) / this.kmNormal;

View File

@ -170,7 +170,8 @@ public class SearchEventCache {
// start a new event
Switchboard sb = Switchboard.getSwitchboard();
final boolean delete = sb == null || Switchboard.getSwitchboard().getConfigBool(SwitchboardConstants.SEARCH_VERIFY_DELETE, true);
final boolean delete = sb == null || Switchboard.getSwitchboard().getConfigBool(SwitchboardConstants.SEARCH_VERIFY_DELETE, true)
|| (sb.getConfigBool(SwitchboardConstants.NETWORK_SEARCHVERIFY, false) && sb.peers.mySeed().getFlagAcceptRemoteIndex());
final boolean addToLocalIdx = sb == null || Switchboard.getSwitchboard().getConfigBool(SwitchboardConstants.REMOTESEARCH_RESULT_STORE, true);
event = new SearchEvent(query, peers, workTables, preselectedPeerHashes, generateAbstracts, loader, remote_maxcount, remote_maxtime, delete, addToLocalIdx);
MemoryControl.request(100 * 1024 * 1024, false); // this may trigger a short memory status which causes a reducing of cache space of other threads

View File

@ -93,7 +93,7 @@ public class htmlParserTest extends TestCase {
// test link with inline html in text
// expectation to deliver pure text as it is possibly indexed in outboundlinks_anchortext_txt/inboundlinks_anchortext_txt
final AnchorURL url = new AnchorURL("http://localhost/");
final String mimetype = "text/html";
final String charset = "UTF-8";
final String testhtml = "<html><body>"
+ "<a href='x1.html'><span>testtext</span></a>" // "testtext"
+ "<a href=\"http://localhost/x2.html\"> <i id=\"home-icon\" class=\"img-sprite\"></i>Start</a>" // "Start"
@ -101,7 +101,7 @@ public class htmlParserTest extends TestCase {
+ "<figure><img width=\"550px\" title=\"image as exemple\" alt=\"image as exemple\" src=\"./img/my_image.png\"></figrue>" // + img width 550 (+html5 figure)
+ "</body></html>";
ContentScraper scraper = parseToScraper(url, mimetype, new VocabularyScraper(), 0, testhtml, 10);
ContentScraper scraper = parseToScraper(url, charset, new VocabularyScraper(), 0, testhtml, 10);
List<AnchorURL> anchorlist = scraper.getAnchors();
String linktxt = anchorlist.get(0).getTextProperty();
@ -126,7 +126,7 @@ public class htmlParserTest extends TestCase {
@Test
public void testParseToScraper_TagTest() throws Exception {
final AnchorURL url = new AnchorURL("http://localhost/");
final String mimetype = "text/html";
final String charset = "UTF-8";
final String textSource = "test text";
final String testhtml = "<html>"
+ "<head><style type=\"text/css\"> h1 { color: #ffffff; }</style></head>"
@ -134,7 +134,7 @@ public class htmlParserTest extends TestCase {
+ "<p>" + textSource + "</p>"
+ "</body></html>";
ContentScraper scraper = parseToScraper(url, mimetype, new VocabularyScraper(), 0, testhtml, 10);
ContentScraper scraper = parseToScraper(url, charset, new VocabularyScraper(), 0, testhtml, 10);
String txt = scraper.getText();
System.out.println("ScraperTagTest: [" + textSource + "] = [" + txt + "]");