removed warnings

This commit is contained in:
Michael Peter Christen 2012-06-05 20:03:43 +02:00
parent 62ae9bbfda
commit 461a0ce052
10 changed files with 25 additions and 28 deletions

View File

@ -210,7 +210,6 @@ public class sharedBlacklist_p {
prop.put("page", "1"); //result page
prop.put("status", STATUS_ENTRIES_ADDED); //list of added Entries
int count = 0;//couter of added entries
PrintWriter pw = null;
try {
// open the blacklist file
@ -238,7 +237,6 @@ public class sharedBlacklist_p {
// append the item to the file
pw.println(newItem);
count++;
if (Switchboard.urlBlacklist != null) {
final String supportedBlacklistTypesStr = Blacklist.BLACKLIST_TYPES_STRING;
final String[] supportedBlacklistTypes = supportedBlacklistTypesStr.split(",");

View File

@ -34,7 +34,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import de.anomic.server.serverObjects;
import de.anomic.server.servletProperties;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.document.MultiProtocolURI;

View File

@ -67,7 +67,6 @@ public final class CrawlStacker {
private final Log log = new Log("STACKCRAWL");
private final WorkflowProcessor<Request> fastQueue, slowQueue;
private long dnsMiss;
private final CrawlQueues nextQueue;
private final CrawlSwitchboard crawler;
private final Segment indexSegment;
@ -89,8 +88,6 @@ public final class CrawlStacker {
this.crawler = cs;
this.indexSegment = indexSegment;
this.peers = peers;
//this.dnsHit = 0;
this.dnsMiss = 0;
this.acceptLocalURLs = acceptLocalURLs;
this.acceptGlobalURLs = acceptGlobalURLs;
this.domainList = domainList;
@ -179,7 +176,6 @@ public final class CrawlStacker {
} else {
try {
this.slowQueue.enQueue(entry);
this.dnsMiss++;
} catch (final InterruptedException e) {
Log.logException(e);
}

View File

@ -25,6 +25,7 @@
package net.yacy.cora.sorting;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
@ -39,10 +40,11 @@ import java.util.concurrent.TimeUnit;
* If the queue gets larger that the given maxsize, then elements from the tail of the queue
* are drained (deleted).
*/
public class WeakPriorityBlockingQueue<E> {
public class WeakPriorityBlockingQueue<E> implements Serializable {
private final TreeSet<Element<E>> queue; // object within the stack, ordered using a TreeSet
private static final long serialVersionUID = 4573442576760691887L;
private final TreeSet<Element<E>> queue; // object within the stack, ordered using a TreeSet
private final Semaphore enqueued; // semaphore for elements in the stack
private final ArrayList<Element<E>> drained; // objects that had been on the stack but had been removed
protected int maxsize;
@ -274,7 +276,7 @@ public class WeakPriorityBlockingQueue<E> {
return this.drained.iterator();
}
public interface Element<E> {
public interface Element<E> extends Serializable {
public long getWeight();
public E getElement();
public boolean equals(Element<E> o);
@ -284,9 +286,11 @@ public class WeakPriorityBlockingQueue<E> {
public String toString();
}
protected abstract static class AbstractElement<E> implements Element<E> {
protected abstract static class AbstractElement<E> implements Element<E>, Serializable {
public long weight;
private static final long serialVersionUID = -7026597258248026566L;
public long weight;
public E element;
public long getWeight() {
@ -318,7 +322,9 @@ public class WeakPriorityBlockingQueue<E> {
*/
public static class NaturalElement<E> extends AbstractElement<E> implements Element<E>, Comparable<NaturalElement<E>>, Comparator<NaturalElement<E>> {
public NaturalElement(final E element, final long weight) {
private static final long serialVersionUID = 6816543012966928794L;
public NaturalElement(final E element, final long weight) {
this.element = element;
this.weight = weight;
}
@ -347,7 +353,9 @@ public class WeakPriorityBlockingQueue<E> {
*/
public static class ReverseElement<E> extends AbstractElement<E> implements Element<E>, Comparable<ReverseElement<E>>, Comparator<ReverseElement<E>> {
public ReverseElement(final E element, final long weight) {
private static final long serialVersionUID = -8166724491837508921L;
public ReverseElement(final E element, final long weight) {
this.element = element;
this.weight = weight;
}

View File

@ -59,7 +59,6 @@ public class ImportDump {
FileUtils.copy(dump, baos);
String s = UTF8.String(baos.toByteArray());
int batchSize = 0;
int p, q;
String t;
loop: while (s.length() > 0) {
@ -78,11 +77,9 @@ public class ImportDump {
s = s.substring(p);
//if (batchSize + t.length() >= maxBatch) {
statement.executeBatch();
batchSize = 0;
//}
System.out.println(t);
statement.addBatch(t);
batchSize += t.length();
}
statement.executeBatch();
} catch (SQLException e) {

View File

@ -27,6 +27,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.util.HashMap;
import java.util.Map;
@ -51,7 +52,7 @@ import org.xml.sax.helpers.DefaultHandler;
import de.anomic.crawler.retrieval.Response;
public class OAIListFriendsLoader {
public class OAIListFriendsLoader implements Serializable {
private static final long serialVersionUID = -8705115274655024604L;
@ -117,7 +118,7 @@ public class OAIListFriendsLoader {
}
// get a resumption token using a SAX xml parser from am input stream
private static class Parser extends DefaultHandler {
public static class Parser extends DefaultHandler {
// class variables
private final StringBuilder buffer;
@ -161,6 +162,10 @@ public class OAIListFriendsLoader {
<baseURL id="http://roar.eprints.org/id/eprint/1064">http://oai.repec.openlib.org/</baseURL>
</BaseURLs>
*/
public int getCounter() {
return this.recordCounter;
}
public void startElement(final String uri, final String name, final String tag, final Attributes atts) throws SAXException {
if ("baseURL".equals(tag)) {

View File

@ -25,6 +25,7 @@
package net.yacy.kelondro.blob;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@ -38,7 +39,7 @@ import net.yacy.kelondro.order.NaturalOrder;
/**
* a mapping from a column name to maps with the value of the columns to the primary keys where the entry exist in the table
*/
public class MapColumnIndex {
public class MapColumnIndex implements Serializable {
private static final long serialVersionUID=-424741536889467566L;

View File

@ -44,12 +44,10 @@ import net.yacy.cora.order.ByteOrder;
public class ByteArray {
private final byte[] buffer;
private final int hash;
public ByteArray(final byte[] bb) {
this.buffer = bb;
this.hash = 0;
}
public int length() {

View File

@ -271,7 +271,6 @@ public final class RWIProcess extends Thread
|| pattern.equals("smb://.*")
|| pattern.equals("file://.*");
long remaining;
int count = 0;
pollloop: while ( true ) {
remaining = timeout - System.currentTimeMillis();
if (remaining <= 0) {
@ -287,8 +286,6 @@ public final class RWIProcess extends Thread
break pollloop;
}
assert (iEntry.urlhash().length == index.row().primaryKeyLength);
//if (iEntry.urlHash().length() != index.row().primaryKeyLength) continue;
count++;
// increase flag counts
for ( int j = 0; j < 32; j++ ) {

View File

@ -461,7 +461,6 @@ public class SnippetProcess {
final boolean nav_topics = SnippetProcess.this.query.navigators.equals("all") || SnippetProcess.this.query.navigators.indexOf("topics",0) >= 0;
try {
//System.out.println("DEPLOYED WORKER " + id + " FOR " + this.neededResults + " RESULTS, timeoutd = " + (this.timeout - System.currentTimeMillis()));
int loops = 0;
while (this.shallrun && System.currentTimeMillis() < this.timeout) {
//Log.logInfo("SnippetProcess", "***** timeleft = " + (this.timeout - System.currentTimeMillis()));
this.lastLifeSign = System.currentTimeMillis();
@ -509,7 +508,6 @@ public class SnippetProcess {
}
}
loops++;
resultEntry = fetchSnippet(page, solrContent, this.cacheStrategy); // does not fetch snippets if snippetMode == 0
if (resultEntry == null)
{