preset of proper HashMap dimensions: should prevent re-hashing and increase performance

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6511 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
orbiter 2009-12-02 14:01:19 +00:00
parent 4a5100789f
commit e34e63a039
21 changed files with 39 additions and 47 deletions

View File

@ -77,7 +77,7 @@ public class add_p {
}
private static void publishNews(final String url, final String title, final String description, final String tagsString) {
// create a news message
final HashMap<String, String> map = new HashMap<String, String>();
final HashMap<String, String> map = new HashMap<String, String>(8);
map.put("url", url.replace(',', '|'));
map.put("title", title.replace(',', ' '));
map.put("description", description.replace(',', ' '));

View File

@ -314,7 +314,7 @@ public class CrawlProfile {
final int cacheStrategy) {
if (name == null || name.length() == 0) throw new NullPointerException("name must not be null");
final String handle = (startURL == null) ? Base64Order.enhancedCoder.encode(Digest.encodeMD5Raw(Long.toString(System.currentTimeMillis()))).substring(0, Word.commonHashLength) : startURL.hash();
mem = new HashMap<String, String>();
mem = new HashMap<String, String>(40);
mem.put(HANDLE, handle);
mem.put(NAME, name);
mem.put(START_URL, (startURL == null) ? "" : startURL.toNormalform(true, false));

View File

@ -59,10 +59,10 @@ public final class ResultURLs {
private final Map<EventOrigin, LinkedList<String>> resultStacks;
private final Map<EventOrigin, ScoreCluster<String>> resultDomains;
public ResultURLs() {
public ResultURLs(int initialStackCapacity) {
// init result stacks
resultStacks = new HashMap<EventOrigin, LinkedList<String>>();
resultDomains = new HashMap<EventOrigin, ScoreCluster<String>>();
resultStacks = new HashMap<EventOrigin, LinkedList<String>>(initialStackCapacity);
resultDomains = new HashMap<EventOrigin, ScoreCluster<String>>(initialStackCapacity);
for (EventOrigin origin: EventOrigin.values()) {
resultStacks.put(origin, new LinkedList<String>());
resultDomains.put(origin, new ScoreCluster<String>());
@ -271,7 +271,7 @@ public final class ResultURLs {
* @param args
*/
public static void main(final String[] args) {
final ResultURLs results = new ResultURLs();
final ResultURLs results = new ResultURLs(10);
try {
final DigestURI url = new DigestURI("http", "www.yacy.net", 80, "/");
final URIMetadataRow urlRef = new URIMetadataRow(url, "YaCy Homepage", "", "", "", new Date(), new Date(), new Date(), "", new byte[] {}, 123, 42, '?', new Bitfield(), "de", 0, 0, 0, 0, 0, 0);

View File

@ -99,14 +99,14 @@ public class RobotsEntry {
this.allowPathList = new LinkedList<String>();
this.denyPathList = new LinkedList<String>();
this.mem = new HashMap<String, String>(5);
this.mem = new HashMap<String, String>(10);
if (loadedDate != null) this.mem.put(LOADED_DATE,Long.toString(loadedDate.getTime()));
if (modDate != null) this.mem.put(MOD_DATE,Long.toString(modDate.getTime()));
if (eTag != null) this.mem.put(ETAG,eTag);
if (sitemap != null) this.mem.put(SITEMAP,sitemap);
if (crawlDelayMillis > 0) this.mem.put(CRAWL_DELAY_MILLIS, Long.toString(crawlDelayMillis));
if ((allowPathList != null)&&(allowPathList.size()>0)) {
if (allowPathList != null && !allowPathList.isEmpty()) {
this.allowPathList.addAll(allowPathList);
final StringBuilder pathListStr = new StringBuilder(allowPathList.size() * 30);
@ -117,7 +117,7 @@ public class RobotsEntry {
this.mem.put(ALLOW_PATH_LIST,pathListStr.substring(0,pathListStr.length()-1));
}
if ((disallowPathList != null)&&(disallowPathList.size()>0)) {
if (disallowPathList != null && !disallowPathList.isEmpty()) {
this.denyPathList.addAll(disallowPathList);
final StringBuilder pathListStr = new StringBuilder(disallowPathList.size() * 30);

View File

@ -90,10 +90,6 @@ public class ZURL implements Iterable<ZURL.Entry> {
this.stack = new ConcurrentLinkedQueue<String>();
}
public int size() {
return urlIndex.size() ;
}
public void clear() throws IOException {
if (urlIndex != null) urlIndex.clear();
if (stack != null) stack.clear();

View File

@ -224,7 +224,7 @@ public class DidYouMean {
public void test(String s) throws InterruptedException {
Set<String> libr = LibraryProvider.dymLib.recommend(s);
libr.addAll(LibraryProvider.geoDB.recommend(s));
if (libr.size() != 0) createGen = false;
if (!libr.isEmpty()) createGen = false;
for (String t: libr) guessLib.put(t);
if (createGen) guessGen.put(s);
}

View File

@ -121,7 +121,7 @@ public class RankingProfile {
this(ContentDomain.TEXT); // set defaults
if ((profile != null) && (profile.length() > 0)) {
//parse external form
final HashMap<String, Integer> coeff = new HashMap<String, Integer>();
final HashMap<String, Integer> coeff = new HashMap<String, Integer>(40);
final String[] elts = ((profile.startsWith("{") && (profile.endsWith("}"))) ? profile.substring(1, profile.length() - 1) : profile).split(",");
int p;
final int s = (prefix == null) ? 0 : prefix.length();
@ -187,7 +187,7 @@ public class RankingProfile {
}
public Map<String, String> preToExternalMap(final String prefix) {
final Map<String, String> ext = new HashMap<String, String>();
final Map<String, String> ext = new HashMap<String, String>(40);
ext.put(prefix + DOMLENGTH, Integer.toString(coeff_domlength));
ext.put(prefix + YBR, Integer.toString(coeff_ybr));
ext.put(prefix + DATE, Integer.toString(coeff_date));

View File

@ -75,8 +75,6 @@ public class ReferenceOrder {
public void run() {
BlockingQueue<WordReferenceVars> vars = WordReferenceVars.transform(container);
WordReferenceVars entryMin = null;
WordReferenceVars entryMax = null;
HashMap<String, Integer> doms0 = new HashMap<String, Integer>();
Integer int1 = 1;
@ -88,8 +86,8 @@ public class ReferenceOrder {
while ((iEntry = vars.take()) != WordReferenceVars.poison) {
decodedEntries.put(iEntry);
// find min/max
if (entryMin == null) entryMin = iEntry.clone(); else entryMin.min(iEntry);
if (entryMax == null) entryMax = iEntry.clone(); else entryMax.max(iEntry);
if (min == null) min = iEntry.clone(); else min.min(iEntry);
if (max == null) max = iEntry.clone(); else max.max(iEntry);
// update domcount
dom = iEntry.metadataHash().substring(6);
count = doms0.get(dom);
@ -113,8 +111,6 @@ public class ReferenceOrder {
} catch (Exception e) {
Log.logException(e);
} finally {
if (min == null) min = entryMin.clone(); else min.min(entryMin);
if (max == null) max = entryMax.clone(); else max.max(entryMax);
try {
decodedEntries.put(WordReferenceVars.poison);
} catch (InterruptedException e) {}

View File

@ -331,7 +331,7 @@ public final class SearchEvent {
}
*/
final TreeMap<String, String> abstractJoin = (rcAbstracts.size() == query.queryHashes.size()) ? SetTools.joinConstructive(rcAbstracts.values(), true) : new TreeMap<String, String>();
if (abstractJoin.size() != 0) {
if (!abstractJoin.isEmpty()) {
//System.out.println("DEBUG-INDEXABSTRACT: index abstracts delivered " + abstractJoin.size() + " additional results for secondary search");
// generate query for secondary search
final TreeMap<String, String> secondarySearchURLs = new TreeMap<String, String>(); // a (peerhash:urlhash-liststring) mapping

View File

@ -385,7 +385,7 @@ public final class Switchboard extends serverSwitch {
indexSegments.setSegment(Segments.Process.PUBLIC, getConfig(SwitchboardConstants.SEGMENT_PUBLIC, "default"));
// init crawl results monitor cache
crawlResults = new ResultURLs();
crawlResults = new ResultURLs(100);
// start yacy core
log.logConfig("Starting YaCy Protocol Core");

View File

@ -37,12 +37,12 @@ public final class serverClassLoader extends ClassLoader {
public serverClassLoader() {
//super(ClassLoader.getSystemClassLoader());
super(Thread.currentThread().getContextClassLoader());
this.classes = new HashMap<File, Class<?>>();
this.classes = new HashMap<File, Class<?>>(100);
}
public serverClassLoader(final ClassLoader parent) {
super(parent);
classes = new HashMap<File, Class<?>>();
classes = new HashMap<File, Class<?>>(100);
}
public Package[] packages() {

View File

@ -211,7 +211,7 @@ public class Dispatcher {
for (ReferenceContainer<WordReference> c: containers) {
container = this.segment.termIndex().delete(c.getTermHash());
if (this.log.isFine()) this.log.logFine("selected " + container.size() + " urls for word '" + c.getTermHashAsString() + "'");
if (container.size() != 0) rc.add(container);
if (!container.isEmpty()) rc.add(container);
}
}

View File

@ -300,7 +300,7 @@ public final class Condenser {
int idx;
int wordInSentenceCounter = 1;
boolean comb_indexof = false, last_last = false, last_index = false;
final HashMap<StringBuilder, Phrase> sentences = new HashMap<StringBuilder, Phrase>();
final HashMap<StringBuilder, Phrase> sentences = new HashMap<StringBuilder, Phrase>(100);
// read source
final sievedWordsEnum wordenum = new sievedWordsEnum(is);

View File

@ -175,10 +175,10 @@ public class CharacterCoding {
"\u00FF","&yuml;"
};
private final static HashMap<String, Character> html2unicode4xml = new HashMap<String, Character>();
private final static HashMap<String, Character> html2unicode4html = new HashMap<String, Character>();
private final static HashMap<Character, String> unicode2html4xml = new HashMap<Character, String>();
private final static HashMap<Character, String> unicode2html4html = new HashMap<Character, String>();
private final static HashMap<String, Character> html2unicode4xml = new HashMap<String, Character>(mapping4xml.length * 2);
private final static HashMap<String, Character> html2unicode4html = new HashMap<String, Character>(mapping4html.length * 2);
private final static HashMap<Character, String> unicode2html4xml = new HashMap<Character, String>(mapping4xml.length * 2);
private final static HashMap<Character, String> unicode2html4html = new HashMap<Character, String>(mapping4html.length * 2);
static {
Character c;
for (int i = 0; i < mapping4html.length; i += 2) {

View File

@ -136,7 +136,7 @@ public final class Cache implements ObjectIndex, Iterable<Row.Entry> {
private final Map<String, String> memoryStats() {
// returns statistical data about this object
final HashMap<String, String> map = new HashMap<String, String>();
final HashMap<String, String> map = new HashMap<String, String>(20);
map.put("objectHitChunkSize", (readHitCache == null) ? "0" : Integer.toString(readHitCache.rowdef.objectsize));
map.put("objectHitCacheCount", (readHitCache == null) ? "0" : Integer.toString(readHitCache.size()));
map.put("objectHitMem", (readHitCache == null) ? "0" : Long.toString(readHitCache.rowdef.objectsize * readHitCache.size()));

View File

@ -150,14 +150,14 @@ public final class ObjectArrayCache {
}
public final byte[] removeoneb() {
if ((index1 != null) && (index1.size() != 0)) {
if (index1 != null && !index1.isEmpty()) {
final Row.Entry indexentry = index1.removeOne();
assert (indexentry != null);
if (indexentry == null) return null;
//assert consistencyAnalysis0() : "consistency problem: " + consistencyAnalysis();
return indexentry.getColBytes(1);
}
if ((index0 != null) && (index0.size() != 0)) {
if (index0 != null && !index0.isEmpty()) {
final Row.Entry indexentry = index0.removeOne();
assert (indexentry != null);
if (indexentry == null) return null;

View File

@ -190,34 +190,34 @@ public final class ObjectIndexCache implements ObjectIndex, Iterable<Row.Entry>
}
public final synchronized Row.Entry removeOne() {
if ((index1 != null) && (index1.size() != 0)) {
if (index1 != null && !index1.isEmpty()) {
return index1.removeOne();
}
if ((index0 != null) && (index0.size() != 0)) {
if (index0 != null && !index0.isEmpty()) {
return index0.removeOne();
}
return null;
}
public final synchronized int size() {
if ((index0 != null) && (index1 == null)) {
if (index0 != null && index1 == null) {
return index0.size();
}
if ((index0 == null) && (index1 != null)) {
if (index0 == null && index1 != null) {
return index1.size();
}
assert ((index0 != null) && (index1 != null));
assert (index0 != null && index1 != null);
return index0.size() + index1.size();
}
public final synchronized boolean isEmpty() {
if ((index0 != null) && (index1 == null)) {
if (index0 != null && index1 == null) {
return index0.isEmpty();
}
if ((index0 == null) && (index1 != null)) {
if (index0 == null && index1 != null) {
return index1.isEmpty();
}
assert ((index0 != null) && (index1 != null));
assert (index0 != null && index1 != null);
if (!index0.isEmpty()) return false;
if (!index1.isEmpty()) return false;
return true;

View File

@ -50,7 +50,7 @@ public class TermSearch <ReferenceType extends Reference> {
new HashMap<byte[], ReferenceContainer<ReferenceType>>(0) :
base.searchConjunction(queryHashes, urlselection);
if ((inclusionContainers.size() != 0) &&
if (!inclusionContainers.isEmpty() &&
(inclusionContainers.size() < queryHashes.size()))
inclusionContainers = new HashMap<byte[], ReferenceContainer<ReferenceType>>(0); // prevent that only a subset is returned

View File

@ -265,7 +265,7 @@ public class Table implements ObjectIndex, Iterable<Row.Entry> {
private final Map<String, String> memoryStats() {
// returns statistical data about this object
assert table == null || table.size() == index.size() : "table.size() = " + table.size() + ", index.size() = " + index.size();
final HashMap<String, String> map = new HashMap<String, String>();
final HashMap<String, String> map = new HashMap<String, String>(8);
map.put("tableSize", Integer.toString(index.size()));
map.put("tableKeyChunkSize", Integer.toString(index.row().objectsize));
map.put("tableKeyMem", Integer.toString(index.row().objectsize * index.size()));

View File

@ -357,7 +357,7 @@ public class Domains {
"INT=International"
};
private static HashMap<String, Integer> TLDID = new HashMap<String, Integer>();
private static HashMap<String, Integer> TLDID = new HashMap<String, Integer>(32);
//private static HashMap<String, String> TLDName = new HashMap<String, String>();
private static void insertTLDProps(final String[] TLDList, final int id) {

View File

@ -168,7 +168,7 @@ public class ISO639 {
"zh-Chinese",
"zu-Zulu"};
static HashMap<String, String> mapping = new HashMap<String, String>();
static HashMap<String, String> mapping = new HashMap<String, String>(codes.length);
static {
for (int i = 0; i < codes.length; i++) {