fix for outofbounds-excception in EcoFS chunk iterator

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4657 6c8d7289-2bf4-0310-a012-ef5d649a1542
This commit is contained in:
orbiter 2008-04-06 22:28:17 +00:00
parent 41e9c5723c
commit 319144f4b2

View File

@ -25,12 +25,12 @@
package de.anomic.kelondro;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.util.Iterator;
@ -529,7 +529,7 @@ public class kelondroEcoFS {
public static class ChunkIterator implements Iterator<byte[]> {
private int recordsize, chunksize;
private InputStream stream;
private DataInputStream stream;
/**
* create a ChunkIterator
@ -545,7 +545,7 @@ public class kelondroEcoFS {
assert file.length() % recordsize == 0;
this.recordsize = recordsize;
this.chunksize = chunksize;
this.stream = new BufferedInputStream(new FileInputStream(file), 64 * 1024);
this.stream = new DataInputStream(new BufferedInputStream(new FileInputStream(file), 64 * 1024));
}
public boolean hasNext() {
@ -562,11 +562,9 @@ public class kelondroEcoFS {
int r;
try {
// read the chunk
r = this.stream.read(chunk);
while (r < chunksize) {
r += this.stream.read(chunk, r, chunksize - r);
}
this.stream.readFully(chunk);
// skip remaining bytes
r = chunksize;
while (r < recordsize) {
r += this.stream.skip(recordsize - r);
}