Now used synchronization in all close() methods to make sure all objects

are 'closed' in an ordered way

Conflicts:
	source/de/anomic/http/server/ChunkedInputStream.java
	source/de/anomic/http/server/ChunkedOutputStream.java
	source/de/anomic/http/server/ContentLengthInputStream.java
	source/net/yacy/cora/protocol/Domains.java
	source/net/yacy/cora/services/federated/solr/SolrShardingConnector.java
	source/net/yacy/cora/services/federated/solr/SolrSingleConnector.java
	source/net/yacy/document/content/dao/PhpBB3Dao.java
	source/net/yacy/document/parser/html/AbstractTransformer.java
	source/net/yacy/kelondro/blob/BEncodedHeap.java
	source/net/yacy/kelondro/blob/HeapReader.java
	source/net/yacy/kelondro/index/RAMIndexCluster.java
	source/net/yacy/kelondro/io/ByteCountInputStream.java
	source/net/yacy/kelondro/logging/ConsoleOutErrHandler.java
	source/net/yacy/kelondro/table/SQLTable.java
This commit is contained in:
Roland 'Quix0r' Haeder 2012-05-14 07:41:55 +02:00 committed by Michael Peter Christen
parent 49cab2b85f
commit a093ccf5eb
59 changed files with 902 additions and 265 deletions

View File

@ -98,7 +98,7 @@ public class CrawlQueues {
this.delegatedURL = new ZURL(this.sb.indexSegments.segment(PROCESS).getSolr(), newQueuePath, DELEGATED_DB_FILENAME, true, this.sb.useTailCache, this.sb.exceed134217727);
}
public void close() {
public synchronized void close() {
// wait for all workers to finish
for (final Loader w: this.workers.values()) {
w.interrupt();

View File

@ -121,7 +121,7 @@ public final class CrawlStacker {
this.slowQueue.announceShutdown();
}
public void close() {
public synchronized void close() {
this.log.logInfo("Shutdown. waiting for remaining " + size() + " crawl stacker job entries. please wait.");
this.fastQueue.announceShutdown();
this.slowQueue.announceShutdown();

View File

@ -541,7 +541,7 @@ public final class CrawlSwitchboard
return hasDoneSomething;
}
public void close() {
public synchronized void close() {
this.profilesActiveCrawlsCache.clear();
this.profilesActiveCrawls.close();
this.profilesPassiveCrawls.close();

View File

@ -91,7 +91,7 @@ public class NoticedURL {
this.noloadStack.clear();
}
public void close() {
public synchronized void close() {
Log.logInfo("NoticedURL", "CLOSING ALL STACKS");
if (this.coreStack != null) {
this.coreStack.close();

View File

@ -88,7 +88,7 @@ public class BlogBoard {
return database.containsKey(UTF8.getBytes(key));
}
public void close() {
public synchronized void close() {
database.close();
}

View File

@ -81,7 +81,7 @@ public class BlogBoardComments {
return this.database.size();
}
public void close() {
public synchronized void close() {
this.database.close();
}

View File

@ -53,7 +53,7 @@ public class BookmarkDate {
this.datesTable = new MapHeap(datesFile, 20, NaturalOrder.naturalOrder, 1024 * 64, 500, '_');
}
public void close() {
public synchronized void close() {
this.datesTable.close();
}

View File

@ -113,7 +113,7 @@ public class BookmarksDB {
// bookmarksDB's functions for 'destructing' the class
// -----------------------------------------------------
public void close(){
public synchronized void close(){
this.bookmarks.close();
this.tags.clear();
this.dates.close();

View File

@ -67,7 +67,7 @@ public class MessageBoard {
return database.size();
}
public void close() {
public synchronized void close() {
database.close();
}

View File

@ -78,7 +78,7 @@ public final class UserDB {
}
}
public void close() {
public synchronized void close() {
userTable.close();
}

View File

@ -100,7 +100,7 @@ public class WikiBoard {
/**
* Closes database files.
*/
public void close() {
public synchronized void close() {
datbase.close();
bkpbase.close();
}

View File

@ -62,7 +62,7 @@ import java.io.UnsupportedEncodingException;
*/
public class ChunkedInputStream extends InputStream {
/** The inputstream that we're wrapping */
private InputStream in;
private final InputStream in;
/** The chunk size */
private int chunkSize;
@ -110,22 +110,23 @@ public class ChunkedInputStream extends InputStream {
*
* @see HttpMethod#getResponseFooters()
*/
@Override
public int read() throws IOException {
if (closed) {
if (this.closed) {
throw new IOException("Attempted read from closed stream.");
}
if (eof) {
if (this.eof) {
return -1;
}
if (pos >= chunkSize) {
if (this.pos >= this.chunkSize) {
nextChunk();
if (eof) {
if (this.eof) {
return -1;
}
}
pos++;
return in.read();
this.pos++;
return this.in.read();
}
/**
@ -139,20 +140,21 @@ public class ChunkedInputStream extends InputStream {
* @see java.io.InputStream#read(byte[], int, int)
* @throws IOException if an IO problem occurs.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (closed) throw new IOException("Attempted read from closed stream.");
if (eof) return -1;
if (this.closed) throw new IOException("Attempted read from closed stream.");
if (this.eof) return -1;
if (pos >= chunkSize) {
if (this.pos >= this.chunkSize) {
nextChunk();
if (eof) {
if (this.eof) {
return -1;
}
}
len = Math.min(len, chunkSize - pos);
int count = in.read(b, off, len);
pos += count;
len = Math.min(len, this.chunkSize - this.pos);
int count = this.in.read(b, off, len);
this.pos += count;
return count;
}
@ -164,6 +166,7 @@ public class ChunkedInputStream extends InputStream {
* @see java.io.InputStream#read(byte[])
* @throws IOException if an IO problem occurs.
*/
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@ -173,9 +176,9 @@ public class ChunkedInputStream extends InputStream {
* @throws IOException If an IO error occurs.
*/
private void readCRLF() throws IOException {
int cr = in.read();
int cr = this.in.read();
if (cr != '\r') throw new IOException("CRLF expected at end of chunk: cr != " + cr);
int lf = in.read();
int lf = this.in.read();
if (lf != '\n') throw new IOException("CRLF expected at end of chunk: lf != " + lf);
}
@ -185,12 +188,12 @@ public class ChunkedInputStream extends InputStream {
* @throws IOException If an IO error occurs.
*/
private void nextChunk() throws IOException {
if (!bof) readCRLF();
chunkSize = getChunkSizeFromInputStream(in);
bof = false;
pos = 0;
if (chunkSize == 0) {
eof = true;
if (!this.bof) readCRLF();
this.chunkSize = getChunkSizeFromInputStream(this.in);
this.bof = false;
this.pos = 0;
if (this.chunkSize == 0) {
this.eof = true;
skipTrailerHeaders();
}
}
@ -307,7 +310,7 @@ public class ChunkedInputStream extends InputStream {
*/
private void skipTrailerHeaders() throws IOException {
for (; ;) {
String line = readLine(in, "US-ASCII");
String line = readLine(this.in, "US-ASCII");
if ((line == null) || (line.trim().length() < 1)) break;
}
}
@ -419,15 +422,16 @@ public class ChunkedInputStream extends InputStream {
* next response without scanning.
* @throws IOException If an IO problem occurs.
*/
public void close() throws IOException {
if (!closed) {
@Override
public synchronized void close() throws IOException {
if (!this.closed) {
try {
if (!eof) {
if (!this.eof) {
exhaustInputStream(this);
}
} finally {
eof = true;
closed = true;
this.eof = true;
this.closed = true;
}
}
}

View File

@ -33,7 +33,6 @@ import net.yacy.cora.document.ASCII;
import net.yacy.cora.document.UTF8;
import net.yacy.kelondro.util.ByteBuffer;
import net.yacy.kelondro.util.FileUtils;
import de.anomic.server.serverCore;
public final class ChunkedOutputStream extends FilterOutputStream {
@ -43,7 +42,8 @@ public final class ChunkedOutputStream extends FilterOutputStream {
super(out);
}
public void close() throws IOException {
@Override
public synchronized void close() throws IOException {
if (!this.finished) this.finish();
this.out.close();
}
@ -58,6 +58,7 @@ public final class ChunkedOutputStream extends FilterOutputStream {
}
}
@Override
public void write(final byte[] b) throws IOException {
if (this.finished) throw new IOException("ChunkedOutputStream already finalized.");
if (b.length == 0) return;
@ -69,6 +70,7 @@ public final class ChunkedOutputStream extends FilterOutputStream {
this.out.flush();
}
@Override
public void write(final byte[] b, final int off, final int len) throws IOException {
if (this.finished) throw new IOException("ChunkedOutputStream already finalized.");
if (len == 0) return;
@ -98,11 +100,12 @@ public final class ChunkedOutputStream extends FilterOutputStream {
this.out.write(ASCII.getBytes(Integer.toHexString(len)));
this.out.write(serverCore.CRLF);
FileUtils.copy(b, out, len);
FileUtils.copy(b, this.out, len);
this.out.write(serverCore.CRLF);
this.out.flush();
}
@Override
public void write(final int b) throws IOException {
if (this.finished) throw new IOException("ChunkedOutputStream already finalized.");

View File

@ -67,7 +67,7 @@ public class ContentLengthInputStream extends InputStream {
* The maximum number of bytes that can be read from the stream. Subsequent
* read operations will return -1.
*/
private long contentLength;
private final long contentLength;
/** The current position */
private long pos = 0;
@ -102,14 +102,15 @@ public class ContentLengthInputStream extends InputStream {
* primed to parse the next response.</p>
* @throws IOException If an IO problem occurs.
*/
public void close() throws IOException {
if (!closed) {
@Override
public synchronized void close() throws IOException {
if (!this.closed) {
try {
ChunkedInputStream.exhaustInputStream(this);
} finally {
// close after above so that we don't throw an exception trying
// to read after closed!
closed = true;
this.closed = true;
}
}
}
@ -121,15 +122,16 @@ public class ContentLengthInputStream extends InputStream {
* @throws IOException If an IO problem occurs
* @see java.io.InputStream#read()
*/
@Override
public int read() throws IOException {
if (closed) {
if (this.closed) {
throw new IOException("Attempted read from closed stream.");
}
if (pos >= contentLength) {
if (this.pos >= this.contentLength) {
return -1;
}
pos++;
this.pos++;
return this.wrappedStream.read();
}
@ -145,20 +147,21 @@ public class ContentLengthInputStream extends InputStream {
*
* @throws java.io.IOException Should an error occur on the wrapped stream.
*/
@Override
public int read (byte[] b, int off, int len) throws java.io.IOException {
if (closed) {
if (this.closed) {
throw new IOException("Attempted read from closed stream.");
}
if (pos >= contentLength) {
if (this.pos >= this.contentLength) {
return -1;
}
if (pos + len > contentLength) {
len = (int) (contentLength - pos);
if (this.pos + len > this.contentLength) {
len = (int) (this.contentLength - this.pos);
}
int count = this.wrappedStream.read(b, off, len);
pos += count;
this.pos += count;
return count;
}
@ -170,6 +173,7 @@ public class ContentLengthInputStream extends InputStream {
* @throws IOException If an IO problem occurs
* @see java.io.InputStream#read(byte[])
*/
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@ -182,20 +186,22 @@ public class ContentLengthInputStream extends InputStream {
* @throws IOException If an error occurs while skipping bytes.
* @see InputStream#skip(long)
*/
@Override
public long skip(long n) throws IOException {
// make sure we don't skip more bytes than are
// still available
long length = Math.min(n, contentLength - pos);
long length = Math.min(n, this.contentLength - this.pos);
// skip and keep track of the bytes actually skipped
length = this.wrappedStream.skip(length);
// only add the skipped bytes to the current position
// if bytes were actually skipped
if (length > 0) {
pos += length;
this.pos += length;
}
return length;
}
@Override
public int available() throws IOException {
if (this.closed) {
return 0;

View File

@ -560,7 +560,7 @@ public final class serverCore extends AbstractBusyThread implements BusyThread {
return this.stopped;
}
public void close() {
public synchronized void close() {
// closing the socket to the client
if (this.controlSocket != null) try {
this.controlSocket.close();

View File

@ -117,7 +117,7 @@ public class TripleStore {
return this.store.keyIterator();
}
public void close() {
public synchronized void close() {
this.store.close();
}

View File

@ -50,6 +50,7 @@ import net.yacy.cora.plugin.ClassProvider;
import net.yacy.cora.storage.ARC;
import net.yacy.cora.storage.ConcurrentARC;
import net.yacy.cora.storage.KeyList;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.MemoryControl;
public class Domains {
@ -468,8 +469,8 @@ public class Domains {
noLocalCheck = v;
}
public static void close() {
if (globalHosts != null) try {globalHosts.close();} catch (final IOException e) {}
public static synchronized void close() {
if (globalHosts != null) try {globalHosts.close();} catch (final IOException e) {Log.logException(e);}
}
/**

View File

@ -44,7 +44,7 @@ public class SolrRetryConnector implements SolrConnector {
}
@Override
public void close() {
public synchronized void close() {
this.solrConnector.close();
}

View File

@ -57,7 +57,7 @@ public class SolrShardingConnector implements SolrConnector {
}
@Override
public void close() {
public synchronized void close() {
for (final SolrConnector connector: this.connectors) connector.close();
}

View File

@ -115,7 +115,7 @@ public class SolrSingleConnector implements SolrConnector {
}
@Override
public void close() {
public synchronized void close() {
try {
this.server.commit();
} catch (SolrServerException e) {
@ -205,6 +205,7 @@ public class SolrSingleConnector implements SolrConnector {
}
}
@Override
public void add(final Collection<SolrDoc> solrdocs) throws IOException, SolrException {
ArrayList<SolrInputDocument> l = new ArrayList<SolrInputDocument>();
for (SolrDoc d: solrdocs) l.add(d);

View File

@ -91,7 +91,7 @@ public class KeyList implements Iterable<String> {
}
}
public void close() throws IOException {
public synchronized void close() throws IOException {
synchronized (this.raf) {
this.raf.close();
}

View File

@ -0,0 +1,91 @@
package net.yacy.cora.storage;
import java.io.File;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
public class ZIPReader extends AbstractMap<String, ZipEntry> implements Map<String, ZipEntry>, Iterable<Map.Entry<String, ZipEntry>> {
private final Set<String> filenames;
private final ZipFile zipFile;
public ZIPReader(File file) throws IOException {
super();
if (!file.exists()) throw new IOException("ZIPWriter can only be used for existing files");
this.zipFile = new ZipFile(file);
// read all entries
this.filenames = new HashSet<String>();
final Enumeration<? extends ZipEntry> e = this.zipFile.entries();
while (e.hasMoreElements()) {
ZipEntry z = e.nextElement();
this.filenames.add(z.getName());
}
}
@Override
public Iterator<java.util.Map.Entry<String, ZipEntry>> iterator() {
final Enumeration<? extends ZipEntry> e = this.zipFile.entries();
return new Iterator<java.util.Map.Entry<String, ZipEntry>>() {
@Override
public boolean hasNext() {
return e.hasMoreElements();
}
@Override
public java.util.Map.Entry<String, ZipEntry> next() {
ZipEntry z = e.nextElement();
return new AbstractMap.SimpleImmutableEntry<String, ZipEntry>(z.getName(), z);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public int size() {
return this.zipFile.size();
}
@Override
public boolean isEmpty() {
return this.zipFile.size() == 0;
}
@Override
public boolean containsKey(Object key) {
return this.filenames.contains(key);
}
@Override
public ZipEntry get(Object key) {
return this.zipFile.getEntry((String) key);
}
@Override
public Set<String> keySet() {
return this.filenames;
}
@Override
public Set<java.util.Map.Entry<String, ZipEntry>> entrySet() {
throw new UnsupportedOperationException();
}
public void close() throws IOException {
this.zipFile.close();
}
}

View File

@ -0,0 +1,62 @@
package net.yacy.cora.storage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class ZIPWriter extends AbstractMap<String, ZipEntry> implements Map<String, ZipEntry>, Iterable<Map.Entry<String, ZipEntry>> {
private final HashMap<String, ZipEntry> backup;
private final ZipOutputStream zos;
public ZIPWriter(File file) throws IOException {
super();
if (file.exists()) throw new IOException("ZIPWriter can only be used for new files");
this.backup = new HashMap<String, ZipEntry>();
this.zos = new ZipOutputStream(new FileOutputStream(file));
}
@Override
public ZipEntry put(String key, ZipEntry value) {
assert !this.backup.containsKey(key);
try {
this.zos.putNextEntry(value);
this.backup.put(key, value);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
public ZipEntry get(Object key) {
return this.backup.get(key);
}
@Override
public Iterator<java.util.Map.Entry<String, ZipEntry>> iterator() {
return this.backup.entrySet().iterator();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public Set<java.util.Map.Entry<String, ZipEntry>> entrySet() {
return this.backup.entrySet();
}
public void close() throws IOException {
this.zos.close();
}
}

View File

@ -702,7 +702,7 @@ dc_rights
}
}
public void close() {
public synchronized void close() {
if (this.text == null) return;
// try close the output stream

View File

@ -142,7 +142,7 @@ public class SentenceReader implements Iterator<StringBuilder> {
throw new UnsupportedOperationException();
}
public void close() {
public synchronized void close() {
try {
raf.close();
} catch(IOException ioe) {

View File

@ -82,7 +82,7 @@ public class WordTokenizer implements Enumeration<StringBuilder> {
return r;
}
public void close() {
public synchronized void close() {
this.e.close();
}
@ -153,7 +153,7 @@ public class WordTokenizer implements Enumeration<StringBuilder> {
return r;
}
public void close() {
public synchronized void close() {
this.e.close();
}
}

View File

@ -85,7 +85,7 @@ public class DatabaseConnection {
}
}
public void close() {
public synchronized void close() {
if (connection != null) {
try {
connection.close();

View File

@ -100,7 +100,7 @@ public class ImportDump {
close();
}
public void close() {
public synchronized void close() {
this.conn.close();
}

View File

@ -66,16 +66,18 @@ public class PhpBB3Dao implements Dao {
this.users = new HashMap<Integer, String>();
}
@Override
protected void finalize() throws Throwable {
close();
}
@Override
public Date first() {
Statement stmt = null;
ResultSet rs = null;
try {
stmt = conn.statement();
rs = stmt.executeQuery("select min(post_time) from " + prefix + "posts");
stmt = this.conn.statement();
rs = stmt.executeQuery("select min(post_time) from " + this.prefix + "posts");
if (rs.next()) {
return new Date(rs.getLong(1) * 1000L);
}
@ -89,12 +91,13 @@ public class PhpBB3Dao implements Dao {
}
}
@Override
public Date latest() {
Statement stmt = null;
ResultSet rs = null;
try {
stmt = conn.statement();
rs = stmt.executeQuery("select max(post_time) from " + prefix + "posts");
stmt = this.conn.statement();
rs = stmt.executeQuery("select max(post_time) from " + this.prefix + "posts");
if (rs.next()) {
return new Date(rs.getLong(1) * 1000L);
}
@ -108,18 +111,21 @@ public class PhpBB3Dao implements Dao {
}
}
@Override
public int size() throws SQLException {
return this.conn.count(prefix + "posts");
return this.conn.count(this.prefix + "posts");
}
@Override
public DCEntry get(int item) {
return getOne("select * from " + prefix + "posts where post_id = " + item);
return getOne("select * from " + this.prefix + "posts where post_id = " + item);
}
@Override
public BlockingQueue<DCEntry> query(int from, int until, int queueSize) {
// define the sql query
final StringBuilder sql = new StringBuilder(256);
sql.append("select * from " + prefix + "posts where post_id >= ");
sql.append("select * from " + this.prefix + "posts where post_id >= ");
sql.append(from);
if (until > from) {
sql.append(" and post_id < ");
@ -131,10 +137,11 @@ public class PhpBB3Dao implements Dao {
return toQueue(sql, queueSize);
}
@Override
public BlockingQueue<DCEntry> query(Date from, int queueSize) {
// define the sql query
final StringBuilder sql = new StringBuilder(256);
sql.append("select * from " + prefix + "posts where post_time >= ");
sql.append("select * from " + this.prefix + "posts where post_time >= ");
sql.append(from.getTime() / 1000);
sql.append(" order by post_id");
@ -147,7 +154,7 @@ public class PhpBB3Dao implements Dao {
Statement stmt = null;
ResultSet rs = null;
try {
stmt = conn.statement();
stmt = this.conn.statement();
rs = stmt.executeQuery(sql);
if (rs.next()) {
try {
@ -170,11 +177,12 @@ public class PhpBB3Dao implements Dao {
// execute the query and push entries to a queue concurrently
final BlockingQueue<DCEntry> queue = new ArrayBlockingQueue<DCEntry>(queueSize);
Thread dbreader = new Thread() {
@Override
public void run() {
Statement stmt = null;
ResultSet rs = null;
try {
stmt = conn.statement();
stmt = PhpBB3Dao.this.conn.statement();
rs = stmt.executeQuery(sql.toString());
while (rs.next()) {
try {
@ -233,12 +241,12 @@ public class PhpBB3Dao implements Dao {
if (nick != null) return nick;
StringBuilder sql = new StringBuilder(256);
sql.append("select * from " + prefix + "users where user_id = ");
sql.append("select * from " + this.prefix + "users where user_id = ");
sql.append(poster_id);
Statement stmt = null;
ResultSet rs = null;
try {
stmt = conn.statement();
stmt = this.conn.statement();
rs = stmt.executeQuery(sql.toString());
if (rs.next()) nick = rs.getString("username");
if (nick == null) nick = "";
@ -253,6 +261,7 @@ public class PhpBB3Dao implements Dao {
}
}
@Override
public int writeSurrogates(
BlockingQueue<DCEntry> queue,
File targetdir,
@ -305,7 +314,8 @@ public class PhpBB3Dao implements Dao {
return 0;
}
public void close() {
@Override
public synchronized void close() {
this.conn.close();
}

View File

@ -564,7 +564,7 @@ public class MediawikiImporter extends Thread implements Importer {
return this.bb.getBytes();
}
public void close() {
public synchronized void close() {
try {
this.is.close();
} catch (final IOException e) {

View File

@ -0,0 +1,390 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* this parser was copied and modified to fit into YaCy from the apache tika project
*/
package net.yacy.document.parser;
import java.io.InputStream;
import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.document.AbstractParser;
import net.yacy.document.Document;
import net.yacy.document.Parser;
import net.yacy.kelondro.util.MemoryControl;
import org.apache.poi.util.StringUtil;
public class dwgParser extends AbstractParser implements Parser {
private static final String HEADER_2000_PROPERTIES_MARKER_STR = "DWGPROPS COOKIE";
private static final byte[] HEADER_2000_PROPERTIES_MARKER = new byte[HEADER_2000_PROPERTIES_MARKER_STR.length()];
static {
StringUtil.putCompressedUnicode(
HEADER_2000_PROPERTIES_MARKER_STR,
HEADER_2000_PROPERTIES_MARKER, 0);
}
/**
* How far to skip after the last standard property, before
* we find any custom properties that might be there.
*/
private static final int CUSTOM_PROPERTIES_SKIP = 20;
public dwgParser() {
super("DWG (CAD Drawing) parser (very basic)");
this.SUPPORTED_EXTENSIONS.add("dwg");
this.SUPPORTED_MIME_TYPES.add("application/dwg");
this.SUPPORTED_MIME_TYPES.add("applications/vnd.dwg");
}
@Override
public Document[] parse(final MultiProtocolURI location, final String mimeType, final String charset, final InputStream source) throws Parser.Failure, InterruptedException {
// check memory for parser
if (!MemoryControl.request(200 * 1024 * 1024, true))
throw new Parser.Failure("Not enough Memory available for pdf parser: " + MemoryControl.available(), location);
return null;
// First up, which version of the format are we handling?
/*
byte[] header = new byte[128];
IOUtils.readFully(source, header);
String version = new String(header, 0, 6, "US-ASCII");
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
if (version.equals("AC1015")) {
metadata.set(Metadata.CONTENT_TYPE, TYPE.toString());
if (skipTo2000PropertyInfoSection(stream, header)) {
get2000Props(stream,metadata,xhtml);
}
} else if (version.equals("AC1018")) {
metadata.set(Metadata.CONTENT_TYPE, TYPE.toString());
if (skipToPropertyInfoSection(stream, header)) {
get2004Props(stream,metadata,xhtml);
}
} else if (version.equals("AC1021") || version.equals("AC1024")) {
metadata.set(Metadata.CONTENT_TYPE, TYPE.toString());
if (skipToPropertyInfoSection(stream, header)) {
get2007and2010Props(stream,metadata,xhtml);
}
} else {
throw new TikaException(
"Unsupported AutoCAD drawing version: " + version);
}
xhtml.endDocument();
String docTitle = null, docSubject = null, docAuthor = null, docPublisher = null, docKeywordStr = null;
if (info != null) {
docTitle = info.getTitle();
docSubject = info.getSubject();
docAuthor = info.getAuthor();
docPublisher = info.getProducer();
if (docPublisher == null || docPublisher.length() == 0) docPublisher = info.getCreator();
docKeywordStr = info.getKeywords();
}
if (docTitle == null || docTitle.length() == 0) {
docTitle = MultiProtocolURI.unescape(location.getFileName());
}
String[] docKeywords = null;
if (docKeywordStr != null) {
docKeywords = docKeywordStr.split(" |,");
}
if (docTitle == null) {
docTitle = docSubject;
}
byte[] contentBytes;
return new Document[]{new Document(
location,
mimeType,
"UTF-8",
this,
null,
docKeywords,
docTitle,
docAuthor,
docPublisher,
null,
null,
0.0f, 0.0f,
contentBytes,
null,
null,
null,
false)};
*/
}
/*
private void get2004Props(
InputStream stream, Metadata metadata, XHTMLContentHandler xhtml)
throws IOException, TikaException, SAXException {
// Standard properties
for (int i = 0; i < HEADER_PROPERTIES_ENTRIES.length; i++) {
String headerValue = read2004String(stream);
handleHeader(i, headerValue, metadata, xhtml);
}
// Custom properties
int customCount = skipToCustomProperties(stream);
for (int i = 0; i < customCount; i++) {
String propName = read2004String(stream);
String propValue = read2004String(stream);
if(propName.length() > 0 && propValue.length() > 0) {
metadata.add(propName, propValue);
}
}
}
private String read2004String(InputStream stream) throws IOException, TikaException {
int stringLen = EndianUtils.readUShortLE(stream);
byte[] stringData = new byte[stringLen];
IOUtils.readFully(stream, stringData);
// Often but not always null terminated
if (stringData[stringLen-1] == 0) {
stringLen--;
}
String value = StringUtil.getFromCompressedUnicode(stringData, 0, stringLen);
return value;
}
// Stored as UCS2, so 16 bit "unicode"
private void get2007and2010Props(
InputStream stream, Metadata metadata, XHTMLContentHandler xhtml)
throws IOException, TikaException, SAXException {
// Standard properties
for (int i = 0; i < HEADER_PROPERTIES_ENTRIES.length; i++) {
String headerValue = read2007and2010String(stream);
handleHeader(i, headerValue, metadata, xhtml);
}
// Custom properties
int customCount = skipToCustomProperties(stream);
for (int i = 0; i < customCount; i++) {
String propName = read2007and2010String(stream);
String propValue = read2007and2010String(stream);
if(propName.length() > 0 && propValue.length() > 0) {
metadata.add(propName, propValue);
}
}
}
private String read2007and2010String(InputStream stream) throws IOException, TikaException {
int stringLen = EndianUtils.readUShortLE(stream);
byte[] stringData = new byte[stringLen * 2];
IOUtils.readFully(stream, stringData);
String value = StringUtil.getFromUnicodeLE(stringData);
// Some strings are null terminated
if(value.charAt(value.length()-1) == 0) {
value = value.substring(0, value.length()-1);
}
return value;
}
private void get2000Props(
InputStream stream, Metadata metadata, XHTMLContentHandler xhtml)
throws IOException, TikaException, SAXException {
int propCount = 0;
while(propCount < 30) {
int propIdx = EndianUtils.readUShortLE(stream);
int length = EndianUtils.readUShortLE(stream);
int valueType = stream.read();
if(propIdx == 0x28) {
// This one seems not to follow the pattern
length = 0x19;
} else if(propIdx == 90) {
// We think this means the end of properties
break;
}
byte[] value = new byte[length];
IOUtils.readFully(stream, value);
if(valueType == 0x1e) {
// Normal string, good
String val = StringUtil.getFromCompressedUnicode(value, 0, length);
// Is it one we can look up by index?
if(propIdx < HEADER_2000_PROPERTIES_ENTRIES.length) {
metadata.add(HEADER_2000_PROPERTIES_ENTRIES[propIdx], val);
xhtml.element("p", val);
} else if(propIdx == 0x012c) {
int splitAt = val.indexOf('=');
if(splitAt > -1) {
String propName = val.substring(0, splitAt);
String propVal = val.substring(splitAt+1);
metadata.add(propName, propVal);
}
}
} else {
// No idea...
}
propCount++;
}
}
private void handleHeader(
int headerNumber, String value, Metadata metadata,
XHTMLContentHandler xhtml) throws SAXException {
if(value == null || value.length() == 0) {
return;
}
String headerProp = HEADER_PROPERTIES_ENTRIES[headerNumber];
if(headerProp != null) {
metadata.set(headerProp, value);
}
xhtml.element("p", value);
}
// Grab the offset, then skip there
private boolean skipToPropertyInfoSection(InputStream stream, byte[] header)
throws IOException, TikaException {
// The offset is stored in the header from 0x20 onwards
long offsetToSection = EndianUtils.getLongLE(header, 0x20);
long toSkip = offsetToSection - header.length;
if(offsetToSection == 0){
return false;
}
while (toSkip > 0) {
byte[] skip = new byte[Math.min((int) toSkip, 0x4000)];
IOUtils.readFully(stream, skip);
toSkip -= skip.length;
}
return true;
}
//We think it can be anywhere...
private boolean skipTo2000PropertyInfoSection(InputStream stream, byte[] header)
throws IOException {
int val = 0;
while(val != -1) {
val = stream.read();
if(val == HEADER_2000_PROPERTIES_MARKER[0]) {
boolean going = true;
for(int i=1; i<HEADER_2000_PROPERTIES_MARKER.length && going; i++) {
val = stream.read();
if(val != HEADER_2000_PROPERTIES_MARKER[i]) going = false;
}
if(going) {
// Bingo, found it
return true;
}
}
}
return false;
}
private int skipToCustomProperties(InputStream stream)
throws IOException, TikaException {
// There should be 4 zero bytes next
byte[] padding = new byte[4];
IOUtils.readFully(stream, padding);
if(padding[0] == 0 && padding[1] == 0 &&
padding[2] == 0 && padding[3] == 0) {
// Looks hopeful, skip on
padding = new byte[CUSTOM_PROPERTIES_SKIP];
IOUtils.readFully(stream, padding);
// We should now have the count
int count = EndianUtils.readUShortLE(stream);
// Sanity check it
if(count > 0 && count < 0x7f) {
// Looks plausible
return count;
} else {
// No properties / count is too high to trust
return 0;
}
} else {
// No padding. That probably means no custom props
return 0;
}
}
public static void main(final String[] args) {
if (args.length > 0 && args[0].length() > 0) {
// file
final File dwgFile = new File(args[0]);
if(dwgFile.canRead()) {
System.out.println(dwgFile.getAbsolutePath());
final long startTime = System.currentTimeMillis();
// parse
final AbstractParser parser = new dwgParser();
Document document = null;
try {
document = Document.mergeDocuments(null, "application/dwg", parser.parse(null, "application/dwg", null, new FileInputStream(dwgFile)));
} catch (final Parser.Failure e) {
System.err.println("Cannot parse file " + dwgFile.getAbsolutePath());
Log.logException(e);
} catch (final InterruptedException e) {
System.err.println("Interrupted while parsing!");
Log.logException(e);
} catch (final NoClassDefFoundError e) {
System.err.println("class not found: " + e.getMessage());
} catch (final FileNotFoundException e) {
Log.logException(e);
}
// statistics
System.out.println("\ttime elapsed: " + (System.currentTimeMillis() - startTime) + " ms");
// output
if (document == null) {
System.out.println("\t!!!Parsing without result!!!");
} else {
System.out.println("\tParsed text with " + document.getTextLength() + " chars of text and " + document.getAnchors().size() + " anchors");
try {
// write file
FileUtils.copy(document.getText(), new File("parsedPdf.txt"));
} catch (final IOException e) {
System.err.println("error saving parsed document");
Log.logException(e);
}
}
} else {
System.err.println("Cannot read file "+ dwgFile.getAbsolutePath());
}
} else {
System.out.println("Please give a filename as first argument.");
}
}
*/
}

View File

@ -37,15 +37,18 @@ public abstract class AbstractTransformer implements Transformer {
this.tags1 = tags1;
}
@Override
public boolean isTag0(final String tag) {
return tags0.contains(tag);
return this.tags0.contains(tag);
}
@Override
public boolean isTag1(final String tag) {
return tags1.contains(tag);
return this.tags1.contains(tag);
}
//the 'missing' method that shall be implemented:
@Override
public abstract char[] transformText(char[] text);
/* could be easily implemented as:
{
@ -54,18 +57,21 @@ public abstract class AbstractTransformer implements Transformer {
*/
// the other methods must take into account to construct the return value correctly
@Override
public char[] transformTag0(final String tagname, final Properties tagopts, final char quotechar) {
return TransformerWriter.genTag0(tagname, tagopts, quotechar);
}
@Override
public char[] transformTag1(final String tagname, final Properties tagopts, final char[] text, final char quotechar) {
return TransformerWriter.genTag1(tagname, tagopts, text, quotechar);
}
public void close() {
@Override
public synchronized void close() {
// free resources
tags0 = null;
tags1 = null;
this.tags0 = null;
this.tags1 = null;
}
}

View File

@ -139,7 +139,7 @@ public class ContentTransformer extends AbstractTransformer implements Transform
}
@Override
public void close() {
public synchronized void close() {
// free resources
super.close();
}

View File

@ -170,7 +170,7 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
}
@Override
public void close() throws IOException {
public synchronized void close() throws IOException {
if (this.writer != null) this.writer.close();
}

View File

@ -522,7 +522,8 @@ public class BEncodedHeap implements MapStore {
* close the backen-file. Should be called explicitely to ensure that all data waiting in IO write buffers
* are flushed
*/
public void close() {
@Override
public synchronized void close() {
int s = this.size();
File f = this.table.heapFile;
this.table.close();

View File

@ -154,7 +154,7 @@ public class BEncodedHeapShard extends AbstractMapStore implements MapStore {
}
@Override
public void close() {
public synchronized void close() {
if (this.shard == null) return;
final Iterator<MapStore> i = this.shard.values().iterator();

View File

@ -803,8 +803,8 @@ public class HeapReader {
}
}
public void close() {
if (this.is != null) try { this.is.close(); } catch (final IOException e) {}
public synchronized void close() {
if (this.is != null) try { this.is.close(); } catch (final IOException e) {Log.logException(e);}
this.is = null;
}

View File

@ -99,7 +99,7 @@ public class Tables implements Iterable<String> {
heap.close();
}
public void close() {
public synchronized void close() {
for (final BEncodedHeap heap: this.tables.values()) heap.close();
this.tables.clear();
}

View File

@ -111,7 +111,7 @@ public class BufferedObjectIndex implements Index, Iterable<Row.Entry> {
}
@Override
public void close() {
public synchronized void close() {
synchronized (this.backend) {
try {
flushBuffer();

View File

@ -434,7 +434,7 @@ public final class HandleMap implements Iterable<Row.Entry> {
return this.map;
}
public void close() {
public synchronized void close() {
this.map.close();
}
}

View File

@ -82,6 +82,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return (int) ((this.rowdef.objectOrder.cardinal(row.bytes(), 0, row.getPrimaryKeyLength()) / 17) % (this.cluster.length));
}
@Override
public final byte[] smallestKey() {
final HandleSet keysort = new HandleSet(this.rowdef.primaryKeyLength, this.rowdef.objectOrder, this.cluster.length);
synchronized (this.cluster) {
@ -94,6 +95,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return keysort.smallestKey();
}
@Override
public final byte[] largestKey() {
final HandleSet keysort = new HandleSet(this.rowdef.primaryKeyLength, this.rowdef.objectOrder, this.cluster.length);
synchronized (this.cluster) {
@ -115,6 +117,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return r;
}
@Override
public final void addUnique(final Entry row) throws RowSpaceExceededException {
final int i = indexFor(row);
assert i >= 0 : "i = " + i;
@ -126,28 +129,37 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
for (final Entry row: rows) addUnique(row);
}
@Override
public final void clear() {
synchronized (this.cluster) {
for (final RAMIndex c: this.cluster) if (c != null) c.clear();
}
}
@Override
public final void close() {
clear();
synchronized (this.cluster) {
for (final RAMIndex c: this.cluster) if (c != null) c.close();
for (final RAMIndex c: this.cluster) {
if (c != null) {
//Log.logInfo("RAMIndexCluster", "Closing RAM index at " + c.getName() + " with " + c.size() + " entries ...");
c.close();
}
}
}
}
@Override
public final void deleteOnExit() {
// no nothing here
}
@Override
public final String filename() {
// we don't have a file name
return null;
}
@Override
public final Entry get(final byte[] key, final boolean forcecopy) {
final int i = indexFor(key);
if (i < 0) return null;
@ -156,8 +168,10 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return r.get(key, forcecopy);
}
@Override
public Map<byte[], Row.Entry> get(final Collection<byte[]> keys, final boolean forcecopy) throws IOException, InterruptedException {
final Map<byte[], Row.Entry> map = new TreeMap<byte[], Row.Entry>(row().objectOrder);
Row.Entry entry;
for (final byte[] key: keys) {
entry = get(key, forcecopy);
@ -166,6 +180,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return map;
}
@Override
public final boolean has(final byte[] key) {
final int i = indexFor(key);
if (i < 0) return false;
@ -174,6 +189,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return r.has(key);
}
@Override
public final CloneableIterator<byte[]> keys(final boolean up, final byte[] firstKey) {
synchronized (this.cluster) {
final Collection<CloneableIterator<byte[]>> col = new ArrayList<CloneableIterator<byte[]>>();
@ -193,6 +209,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
* @throws IOException
* @throws RowSpaceExceededException
*/
@Override
public final boolean put(final Entry row) throws RowSpaceExceededException {
final int i = indexFor(row);
assert i >= 0 : "i = " + i;
@ -200,18 +217,21 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return accessArray(i).put(row);
}
@Override
public final boolean delete(final byte[] key) {
final int i = indexFor(key);
if (i < 0) return false;
return accessArray(i).delete(key);
}
@Override
public final Entry remove(final byte[] key) {
final int i = indexFor(key);
if (i < 0) return null;
return accessArray(i).remove(key);
}
@Override
public final ArrayList<RowCollection> removeDoubles() throws RowSpaceExceededException {
final ArrayList<RowCollection> col = new ArrayList<RowCollection>();
synchronized (this.cluster) {
@ -225,6 +245,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return col;
}
@Override
public final Entry removeOne() {
synchronized (this.cluster) {
for (int i = 0; i < this.cluster.length; i++) {
@ -238,6 +259,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return null;
}
@Override
public List<Row.Entry> top(final int count) {
final List<Row.Entry> list = new ArrayList<Row.Entry>();
synchronized (this.cluster) {
@ -256,6 +278,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return list;
}
@Override
public final Entry replace(final Entry row) throws RowSpaceExceededException {
final int i = indexFor(row);
assert i >= 0 : "i = " + i;
@ -263,10 +286,12 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return accessArray(i).replace(row);
}
@Override
public final Row row() {
return this.rowdef;
}
@Override
@SuppressWarnings("unchecked")
public final CloneableIterator<Entry> rows(final boolean up, final byte[] firstKey) {
synchronized (this.cluster) {
@ -282,10 +307,12 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
}
}
@Override
public final CloneableIterator<Entry> rows() {
return rows(true, null);
}
@Override
public final int size() {
int c = 0;
synchronized (this.cluster) {
@ -294,6 +321,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return c;
}
@Override
public long mem() {
long m = 0;
synchronized (this.cluster) {
@ -302,6 +330,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return m;
}
@Override
public final boolean isEmpty() {
synchronized (this.cluster) {
for (final RAMIndex i: this.cluster) if (i != null && !i.isEmpty()) return false;
@ -309,6 +338,7 @@ public final class RAMIndexCluster implements Index, Iterable<Row.Entry>, Clonea
return true;
}
@Override
public final Iterator<Entry> iterator() {
return this.rows(true, null);
}

View File

@ -29,7 +29,6 @@ package net.yacy.kelondro.io;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
//import java.util.HashMap;
import net.yacy.kelondro.logging.Log;
@ -66,12 +65,14 @@ public final class ByteCountInputStream extends FilterInputStream {
this.byteCountAccountName = accountName;
}
@Override
public final int read(final byte[] b) throws IOException {
final int readCount = super.read(b);
if (readCount > 0) this.byteCount += readCount;
return readCount;
}
@Override
public final int read(final byte[] b, final int off, final int len) throws IOException {
try {
final int readCount = super.read(b, off, len);
@ -82,11 +83,13 @@ public final class ByteCountInputStream extends FilterInputStream {
}
}
@Override
public final int read() throws IOException {
this.byteCount++;
return super.read();
}
@Override
public final long skip(final long len) throws IOException {
final long skipCount = super.skip(len);
if (skipCount > 0) this.byteCount += skipCount;
@ -116,7 +119,8 @@ public final class ByteCountInputStream extends FilterInputStream {
// }
// }
public final void close() throws IOException {
@Override
public final synchronized void close() throws IOException {
try {
super.close();
} catch (OutOfMemoryError e) {

View File

@ -473,7 +473,7 @@ public final class CharBuffer extends Writer {
}
@Override
public void close() {
public synchronized void close() {
this.length = 0;
this.offset = 0;
this.buffer = null; // assist with garbage collection

View File

@ -123,6 +123,7 @@ public final class ConsoleOutErrHandler extends Handler {
}
@Override
public final void publish(final LogRecord record) {
if (!isLoggable(record)) return;
@ -134,7 +135,7 @@ public final class ConsoleOutErrHandler extends Handler {
record.setMessage(msg);
}
if (record.getLevel().intValue() >= splitLevel.intValue()) {
if (record.getLevel().intValue() >= this.splitLevel.intValue()) {
this.stdErrHandler.publish(record);
} else {
this.stdOutHandler.publish(record);
@ -142,12 +143,14 @@ public final class ConsoleOutErrHandler extends Handler {
flush();
}
@Override
public void flush() {
this.stdOutHandler.flush();
this.stdErrHandler.flush();
}
public void close() throws SecurityException {
@Override
public synchronized void close() throws SecurityException {
this.stdOutHandler.close();
this.stdErrHandler.close();
}

View File

@ -75,7 +75,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
true);
}
public void close() {
public synchronized void close() {
this.array.close(true);
}

View File

@ -101,7 +101,7 @@ public final class ReferenceContainerCache<ReferenceType extends Reference> exte
}
@Override
public void close() {
public synchronized void close() {
this.cache = null;
}

View File

@ -83,7 +83,7 @@ public class ReferenceIterator <ReferenceType extends Reference> extends LookAhe
return null;
}
public void close() {
public synchronized void close() {
if (this.blobs != null) this.blobs.close();
this.blobs = null;
}

View File

@ -108,23 +108,28 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
@Override
public long mem() {
return 0;
}
@Override
public byte[] smallestKey() {
return null;
}
@Override
public byte[] largestKey() {
return null;
}
@Override
public String filename() {
return "dbtest." + this.theDBConnection.hashCode();
}
public void close() {
@Override
public synchronized void close() {
if (this.theDBConnection != null) try {
this.theDBConnection.close();
} catch (final SQLException e) {
@ -133,6 +138,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
this.theDBConnection = null;
}
@Override
public int size() {
int size = -1;
try {
@ -155,14 +161,17 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public Row row() {
return this.rowdef;
}
@Override
public boolean has(final byte[] key) {
try {
return (get(key, false) != null);
@ -171,10 +180,12 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public ArrayList<RowCollection> removeDoubles() {
return new ArrayList<RowCollection>();
}
@Override
public Row.Entry get(final byte[] key, final boolean forcecopy) throws IOException {
try {
final String sqlQuery = "SELECT value from test where hash = ?";
@ -199,6 +210,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public Map<byte[], Row.Entry> get(final Collection<byte[]> keys, final boolean forcecopy) throws IOException, InterruptedException {
final Map<byte[], Row.Entry> map = new TreeMap<byte[], Row.Entry>(row().objectOrder);
Row.Entry entry;
@ -209,6 +221,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
return map;
}
@Override
public Row.Entry replace(final Row.Entry row) throws IOException {
try {
final Row.Entry oldEntry = remove(row.getPrimaryKeyBytes());
@ -231,6 +244,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public boolean put(final Row.Entry row) throws IOException {
try {
final String sqlQuery = "INSERT INTO test (" +
@ -252,6 +266,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public synchronized void addUnique(final Row.Entry row) throws IOException {
throw new UnsupportedOperationException();
}
@ -264,6 +279,7 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
throw new UnsupportedOperationException();
}
@Override
public Row.Entry remove(final byte[] key) throws IOException {
PreparedStatement sqlStatement = null;
try {
@ -292,23 +308,28 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public boolean delete(final byte[] key) throws IOException {
return remove(key) != null;
}
@Override
public Row.Entry removeOne() {
return null;
}
@Override
public List<Row.Entry> top(final int count) throws IOException {
return null;
}
@Override
public CloneableIterator<Row.Entry> rows(final boolean up, final byte[] startKey) throws IOException {
// Objects are of type kelondroRow.Entry
return null;
}
@Override
public Iterator<Entry> iterator() {
try {
return rows();
@ -317,10 +338,12 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
}
}
@Override
public CloneableIterator<Row.Entry> rows() throws IOException {
return null;
}
@Override
public CloneableIterator<byte[]> keys(final boolean up, final byte[] startKey) {
// Objects are of type byte[]
return null;
@ -362,10 +385,12 @@ public class SQLTable implements Index, Iterable<Row.Entry> {
return new int[]{0,0,0,0,0,0,0,0,0,0};
}
@Override
public void clear() {
// do nothing
}
@Override
public void deleteOnExit() {
// do nothing
}

View File

@ -161,7 +161,7 @@ public class XMLTables {
return null;
}
public void close() throws IOException {
public synchronized void close() throws IOException {
commit(true);
}

View File

@ -122,7 +122,7 @@ public class NewsDB {
}
}
public void close() {
public synchronized void close() {
if (this.news != null) this.news.close();
this.news = null;
}

View File

@ -98,7 +98,7 @@ public class NewsQueue implements Iterable<NewsDB.Record> {
}
}
public void close() {
public synchronized void close() {
if (this.queueStack != null) this.queueStack.close();
this.queueStack = null;
}

View File

@ -749,7 +749,7 @@ public class WebStructureGraph
}
}
public void close() {
public synchronized void close() {
// finish dns resolving queue
if ( this.publicRefDNSResolvingWorker.isAlive() ) {
log.logInfo("Waiting for the DNS Resolving Queue to terminate");

View File

@ -1551,7 +1551,7 @@ public final class Switchboard extends serverSwitch
return this.crawler.clear();
}
public void close() {
public synchronized void close() {
this.log.logConfig("SWITCHBOARD SHUTDOWN STEP 1: sending termination signal to managed threads:");
MemoryTracker.stopSystemProfiling();
terminateAllThreads(true);

View File

@ -243,7 +243,7 @@ public class DocumentIndex extends Segment
* close the index. This terminates all worker threads and then closes the segment.
*/
@Override
public void close() {
public synchronized void close() {
// send termination signal to worker threads
for ( @SuppressWarnings("unused")
final Worker element : this.worker ) {

View File

@ -303,7 +303,7 @@ public class Segment {
return refCount;
}
public void close() {
public synchronized void close() {
this.termIndex.close();
this.urlMetadata.close();
this.urlCitationIndex.close();

View File

@ -176,7 +176,7 @@ public class Segments implements Iterable<Segment> {
segment(this.process_assignment.get(process)).close();
}
public void close() {
public synchronized void close() {
if (this.segments != null) for (final Segment s: this.segments.values()) s.close();
this.segments = null;
}

View File

@ -436,7 +436,7 @@ public class TarBuffer {
* current block before closing.
* @throws IOException on error
*/
public void close() throws IOException {
public synchronized void close() throws IOException {
if (this.debug) {
System.err.println("TarBuffer.closeBuffer().");
}

View File

@ -111,7 +111,7 @@ public class TarInputStream extends FilterInputStream {
* @throws IOException on error
*/
@Override
public void close() throws IOException {
public synchronized void close() throws IOException {
this.buffer.close();
}

View File

@ -141,7 +141,7 @@ public class TarOutputStream extends FilterOutputStream {
* @throws IOException on error
*/
@Override
public void close() throws IOException {
public synchronized void close() throws IOException {
if (!closed) {
this.finish();
this.buffer.close();