apply new code formatter and save action

This commit is contained in:
2019-11-24 10:20:43 +01:00
parent 5ea82c6a4c
commit 06b379494f
184 changed files with 13455 additions and 13489 deletions

View File

@@ -37,212 +37,213 @@ import org.slf4j.LoggerFactory;
*/
public class BSFile implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(BSFile.class);
private static final Logger LOGGER = LoggerFactory.getLogger(BSFile.class);
public static final int BLOCK_SIZE = 512;
public static final int BLOCK_SIZE = 512;
/*
* The last disk block of this sequence. This is the block new values will be
* appended to.
*/
private BSFileDiskBlock buffer;
/*
* The last disk block of this sequence. This is the block new values will be
* appended to.
*/
private BSFileDiskBlock buffer;
private int offsetInBuffer = 0;
private int offsetInBuffer = 0;
private boolean dirty = false;
private boolean dirty = false;
private final long rootBlockOffset;
private final long rootBlockOffset;
private final DiskStorage diskStorage;
private final DiskStorage diskStorage;
private final BSFileDiskBlock rootDiskBlock;
private final BSFileDiskBlock rootDiskBlock;
private final BSFileCustomizer customizer;
private final BSFileCustomizer customizer;
BSFile(final long rootBlockOffset, final DiskStorage diskStorage, final BSFileCustomizer customizer) {
BSFile(final long rootBlockOffset, final DiskStorage diskStorage, final BSFileCustomizer customizer) {
this(new BSFileDiskBlock(diskStorage.getDiskBlock(rootBlockOffset, BLOCK_SIZE)), diskStorage, customizer);
}
this(new BSFileDiskBlock(diskStorage.getDiskBlock(rootBlockOffset, BLOCK_SIZE)), diskStorage, customizer);
}
BSFile(final BSFileDiskBlock rootDiskBlock, final DiskStorage diskStorage, final BSFileCustomizer customizer) {
BSFile(final BSFileDiskBlock rootDiskBlock, final DiskStorage diskStorage, final BSFileCustomizer customizer) {
this.rootDiskBlock = rootDiskBlock;
this.customizer = customizer;
this.rootBlockOffset = rootDiskBlock.getBlockOffset();
this.diskStorage = diskStorage;
this.rootDiskBlock = rootDiskBlock;
this.customizer = customizer;
this.rootBlockOffset = rootDiskBlock.getBlockOffset();
this.diskStorage = diskStorage;
final long lastBlockNumber = rootDiskBlock.getLastBlockPointer();
if (lastBlockNumber == rootBlockOffset || lastBlockNumber == 0) {
buffer = rootDiskBlock;
} else {
buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(lastBlockNumber, BLOCK_SIZE));
}
offsetInBuffer = determineWriteOffsetInExistingBuffer(buffer);
customizer.init(buffer);
LOGGER.trace("create bsFile={} lastBlockNumber={}", rootBlockOffset, lastBlockNumber);
}
final long lastBlockNumber = rootDiskBlock.getLastBlockPointer();
if (lastBlockNumber == rootBlockOffset || lastBlockNumber == 0) {
buffer = rootDiskBlock;
} else {
buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(lastBlockNumber, BLOCK_SIZE));
}
offsetInBuffer = determineWriteOffsetInExistingBuffer(buffer);
customizer.init(buffer);
LOGGER.trace("create bsFile={} lastBlockNumber={}", rootBlockOffset, lastBlockNumber);
}
private int determineWriteOffsetInExistingBuffer(final BSFileDiskBlock buffer) {
private int determineWriteOffsetInExistingBuffer(final BSFileDiskBlock buffer) {
final byte[] buf = buffer.getBuffer();
final byte[] buf = buffer.getBuffer();
int result = 0;
while (result < buf.length && buf[result] != 0) {
result++;
}
int result = 0;
while (result < buf.length && buf[result] != 0) {
result++;
}
return result;
}
return result;
}
public static BSFile existingFile(final long blockNumber, final DiskStorage diskStorage,
final BSFileCustomizer customizer) {
return new BSFile(blockNumber, diskStorage, customizer);
}
public static BSFile existingFile(final long blockNumber, final DiskStorage diskStorage,
final BSFileCustomizer customizer) {
return new BSFile(blockNumber, diskStorage, customizer);
}
public static BSFile newFile(final DiskStorage diskStorage, final BSFileCustomizer customizer) {
final long rootBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE);
LOGGER.trace("create new bsFile={}", rootBlockOffset);
return new BSFile(rootBlockOffset, diskStorage, customizer);
}
public static BSFile newFile(final DiskStorage diskStorage, final BSFileCustomizer customizer) {
final long rootBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE);
LOGGER.trace("create new bsFile={}", rootBlockOffset);
return new BSFile(rootBlockOffset, diskStorage, customizer);
}
public void append(final long value1, final long value2) {
final long val1 = customizer.preProcessWriteValue1(value1);
final long val2 = customizer.preProcessWriteValue2(value2);
public void append(final long value1, final long value2) {
final long val1 = customizer.preProcessWriteValue1(value1);
final long val2 = customizer.preProcessWriteValue2(value2);
final int bytesWritten = VariableByteEncoder.encodeInto(val1, val2, buffer.getBuffer(), offsetInBuffer);
final int bytesWritten = VariableByteEncoder.encodeInto(val1, val2, buffer.getBuffer(), offsetInBuffer);
if (bytesWritten == 0) {
flushFullBufferAndCreateNew();
customizer.newBlock();
if (bytesWritten == 0) {
flushFullBufferAndCreateNew();
customizer.newBlock();
append(value1, value2);
}
offsetInBuffer += bytesWritten;
dirty = true;
}
append(value1, value2);
}
offsetInBuffer += bytesWritten;
dirty = true;
}
public void append(final long value) {
int bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer);
public void append(final long value) {
int bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer);
if (bytesWritten == 0) {
flushFullBufferAndCreateNew();
bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer);
assert bytesWritten > 0 : "after a flush the buffer is emtpy, so it should be possible to write a few bytes";
}
offsetInBuffer += bytesWritten;
dirty = true;
}
if (bytesWritten == 0) {
flushFullBufferAndCreateNew();
bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer);
assert bytesWritten > 0 : "after a flush the buffer is emtpy, so it should be possible to write a few bytes";
}
offsetInBuffer += bytesWritten;
dirty = true;
}
private void flushFullBufferAndCreateNew() {
private void flushFullBufferAndCreateNew() {
final long newBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE);
final long newBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE);
if (buffer == rootDiskBlock) {
// root block and current block are the same, so we need
// to update only one
buffer.setLastBlockOffset(newBlockOffset);
buffer.setNextBlockOffset(newBlockOffset);
buffer.writeAsync();
} else {
rootDiskBlock.writeLastBlockOffset(newBlockOffset);
if (buffer == rootDiskBlock) {
// root block and current block are the same, so we need
// to update only one
buffer.setLastBlockOffset(newBlockOffset);
buffer.setNextBlockOffset(newBlockOffset);
buffer.writeAsync();
} else {
rootDiskBlock.writeLastBlockOffset(newBlockOffset);
buffer.setNextBlockOffset(newBlockOffset);
buffer.writeAsync();
}
buffer.setNextBlockOffset(newBlockOffset);
buffer.writeAsync();
}
// set the new buffer
buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(newBlockOffset, BLOCK_SIZE));
offsetInBuffer = 0;
dirty = false;
LOGGER.trace("flushFullBufferAndCreateNew bsFile={} newBlock={}", rootBlockOffset, newBlockOffset);
}
// set the new buffer
buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(newBlockOffset, BLOCK_SIZE));
offsetInBuffer = 0;
dirty = false;
LOGGER.trace("flushFullBufferAndCreateNew bsFile={} newBlock={}", rootBlockOffset, newBlockOffset);
}
public void flush() {
public void flush() {
LOGGER.trace("flush bsFile={} dirty={} file={}", rootBlockOffset, dirty, diskStorage.getRelativeDatabaseFileForLogging());
if (dirty) {
buffer.writeAsync();
}
}
LOGGER.trace("flush bsFile={} dirty={} file={}", rootBlockOffset, dirty,
diskStorage.getRelativeDatabaseFileForLogging());
if (dirty) {
buffer.writeAsync();
}
}
public Optional<Long> getLastValue() {
public Optional<Long> getLastValue() {
final byte[] buf = buffer.getBuffer();
final LongList bufferedLongs = VariableByteEncoder.decode(buf);
final byte[] buf = buffer.getBuffer();
final LongList bufferedLongs = VariableByteEncoder.decode(buf);
final Optional<Long> result;
if (bufferedLongs.isEmpty()) {
result = Optional.empty();
} else {
final long lastValue = bufferedLongs.get(bufferedLongs.size() - 1);
result = Optional.of(lastValue);
}
return result;
}
final Optional<Long> result;
if (bufferedLongs.isEmpty()) {
result = Optional.empty();
} else {
final long lastValue = bufferedLongs.get(bufferedLongs.size() - 1);
result = Optional.of(lastValue);
}
return result;
}
public Stream<LongList> streamOfLongLists() {
final Iterator<LongList> iterator = new LongListIterator(rootBlockOffset, diskStorage);
final Stream<LongList> stream = StreamSupport
.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);
public Stream<LongList> streamOfLongLists() {
final Iterator<LongList> iterator = new LongListIterator(rootBlockOffset, diskStorage);
final Stream<LongList> stream = StreamSupport
.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);
final Optional<Function<LongList, LongList>> mapper = customizer.getStreamMapper();
if (mapper.isPresent()) {
return stream.map(mapper.get());
}
return stream;
}
final Optional<Function<LongList, LongList>> mapper = customizer.getStreamMapper();
if (mapper.isPresent()) {
return stream.map(mapper.get());
}
return stream;
}
private static class LongListIterator implements Iterator<LongList> {
private static class LongListIterator implements Iterator<LongList> {
private LongList next = null;
private long nextBlockOffset;
private LongList next = null;
private long nextBlockOffset;
private final DiskStorage diskStorage;
private final DiskStorage diskStorage;
public LongListIterator(final long nextBlockNumber, final DiskStorage diskStorage) {
this.nextBlockOffset = nextBlockNumber;
this.diskStorage = diskStorage;
}
public LongListIterator(final long nextBlockNumber, final DiskStorage diskStorage) {
this.nextBlockOffset = nextBlockNumber;
this.diskStorage = diskStorage;
}
@Override
public boolean hasNext() {
return nextBlockOffset != BSFileDiskBlock.NO_NEXT_POINTER;
}
@Override
public boolean hasNext() {
return nextBlockOffset != BSFileDiskBlock.NO_NEXT_POINTER;
}
@Override
public LongList next() {
if (nextBlockOffset == BSFileDiskBlock.NO_NEXT_POINTER) {
throw new NoSuchElementException();
}
@Override
public LongList next() {
if (nextBlockOffset == BSFileDiskBlock.NO_NEXT_POINTER) {
throw new NoSuchElementException();
}
final BSFileDiskBlock diskBlock = getDiskBlock(nextBlockOffset);
nextBlockOffset = diskBlock.getNextBlockNumber();
final BSFileDiskBlock diskBlock = getDiskBlock(nextBlockOffset);
nextBlockOffset = diskBlock.getNextBlockNumber();
final byte[] buf = diskBlock.getBuffer();
next = VariableByteEncoder.decode(buf);
return next;
}
final byte[] buf = diskBlock.getBuffer();
next = VariableByteEncoder.decode(buf);
return next;
}
private BSFileDiskBlock getDiskBlock(final long blockOffset) {
final DiskBlock diskBlock = diskStorage.getDiskBlock(blockOffset, BLOCK_SIZE);
return new BSFileDiskBlock(diskBlock);
}
}
private BSFileDiskBlock getDiskBlock(final long blockOffset) {
final DiskBlock diskBlock = diskStorage.getDiskBlock(blockOffset, BLOCK_SIZE);
return new BSFileDiskBlock(diskBlock);
}
}
public LongList asLongList() {
public LongList asLongList() {
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
public long getRootBlockOffset() {
public long getRootBlockOffset() {
return rootBlockOffset;
}
return rootBlockOffset;
}
@Override
public void close() {
flush();
}
@Override
public void close() {
flush();
}
}

View File

@@ -6,13 +6,13 @@ import java.util.function.Function;
import org.lucares.collections.LongList;
public interface BSFileCustomizer {
void init(BSFileDiskBlock lastDiskBlockOfStream);
void init(BSFileDiskBlock lastDiskBlockOfStream);
Optional<Function<LongList, LongList>> getStreamMapper();
Optional<Function<LongList, LongList>> getStreamMapper();
void newBlock();
void newBlock();
long preProcessWriteValue1(long value);
long preProcessWriteValue1(long value);
long preProcessWriteValue2(long value);
long preProcessWriteValue2(long value);
}

View File

@@ -8,90 +8,90 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
class BSFileDiskBlock {
protected static final int NEXT_POINTER_OFFSET = 0;
public static final long NO_NEXT_POINTER = 0;
private static final int LAST_BLOCK_POINTER_POSITION = 8;
public static final long NO_LAST_BLOCK = 0;
private static final int INT_SEQUENCE_OFFSET = 8 // next block pointer
+ 8; // last block pointer;
protected static final int NEXT_POINTER_OFFSET = 0;
public static final long NO_NEXT_POINTER = 0;
private static final int LAST_BLOCK_POINTER_POSITION = 8;
public static final long NO_LAST_BLOCK = 0;
private static final int INT_SEQUENCE_OFFSET = 8 // next block pointer
+ 8; // last block pointer;
private final DiskBlock diskBlock;
private long nextBlockOffset = 0;
private long lastBlockOffset = 0;
private final DiskBlock diskBlock;
private long nextBlockOffset = 0;
private long lastBlockOffset = 0;
private byte[] buffer = null;
private byte[] buffer = null;
public BSFileDiskBlock(final DiskBlock diskBlock) {
this.diskBlock = diskBlock;
}
public BSFileDiskBlock(final DiskBlock diskBlock) {
this.diskBlock = diskBlock;
}
public byte[] getBuffer() {
public byte[] getBuffer() {
if (buffer == null) {
final ByteBuffer byteBuffer = diskBlock.getByteBuffer();
this.buffer = new byte[byteBuffer.capacity() - INT_SEQUENCE_OFFSET];
byteBuffer.position(INT_SEQUENCE_OFFSET);
byteBuffer.get(buffer);
}
if (buffer == null) {
final ByteBuffer byteBuffer = diskBlock.getByteBuffer();
this.buffer = new byte[byteBuffer.capacity() - INT_SEQUENCE_OFFSET];
byteBuffer.position(INT_SEQUENCE_OFFSET);
byteBuffer.get(buffer);
}
return buffer;
}
return buffer;
}
public long getBlockOffset() {
return diskBlock.getBlockOffset();
}
public long getBlockOffset() {
return diskBlock.getBlockOffset();
}
public void setNextBlockOffset(final long nextBlockOffset) {
this.nextBlockOffset = nextBlockOffset;
}
public void setNextBlockOffset(final long nextBlockOffset) {
this.nextBlockOffset = nextBlockOffset;
}
public long getLastBlockPointer() {
public long getLastBlockPointer() {
if (lastBlockOffset <= 0) {
lastBlockOffset = diskBlock.getByteBuffer().getLong(LAST_BLOCK_POINTER_POSITION);
}
if (lastBlockOffset <= 0) {
lastBlockOffset = diskBlock.getByteBuffer().getLong(LAST_BLOCK_POINTER_POSITION);
}
return lastBlockOffset;
}
return lastBlockOffset;
}
public long getNextBlockNumber() {
if (nextBlockOffset <= 0) {
nextBlockOffset = diskBlock.getByteBuffer().getLong(NEXT_POINTER_OFFSET);
}
return nextBlockOffset;
}
public long getNextBlockNumber() {
if (nextBlockOffset <= 0) {
nextBlockOffset = diskBlock.getByteBuffer().getLong(NEXT_POINTER_OFFSET);
}
return nextBlockOffset;
}
public void setLastBlockOffset(final long lastBlockOffset) {
this.lastBlockOffset = lastBlockOffset;
}
public void setLastBlockOffset(final long lastBlockOffset) {
this.lastBlockOffset = lastBlockOffset;
}
public void writeLastBlockOffset(final long lastBlockOffset) {
this.lastBlockOffset = lastBlockOffset;
diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset);
}
public void writeLastBlockOffset(final long lastBlockOffset) {
this.lastBlockOffset = lastBlockOffset;
diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset);
}
private void writeBufferToByteBuffer() {
diskBlock.getByteBuffer().position(INT_SEQUENCE_OFFSET);
diskBlock.getByteBuffer().put(buffer);
}
private void writeBufferToByteBuffer() {
diskBlock.getByteBuffer().position(INT_SEQUENCE_OFFSET);
diskBlock.getByteBuffer().put(buffer);
}
private void writeBlockHeader() {
diskBlock.getByteBuffer().putLong(NEXT_POINTER_OFFSET, nextBlockOffset);
diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset);
}
private void writeBlockHeader() {
diskBlock.getByteBuffer().putLong(NEXT_POINTER_OFFSET, nextBlockOffset);
diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset);
}
public void writeAsync() {
writeBlockHeader();
writeBufferToByteBuffer();
}
public void writeAsync() {
writeBlockHeader();
writeBufferToByteBuffer();
}
public void force() {
diskBlock.force();
}
public void force() {
diskBlock.force();
}
@Override
public String toString() {
final LongList bufferDecoded = VariableByteEncoder.decode(buffer);
return "BSFileDiskBlock[bufferDecoded=" + bufferDecoded + "]";
}
@Override
public String toString() {
final LongList bufferDecoded = VariableByteEncoder.decode(buffer);
return "BSFileDiskBlock[bufferDecoded=" + bufferDecoded + "]";
}
}

View File

@@ -8,41 +8,41 @@ import org.lucares.pdb.diskstorage.DiskStorage;
public class LongStreamFile implements AutoCloseable {
private final BSFile bsFile;
private final BSFile bsFile;
LongStreamFile(final BSFile bsFile) {
this.bsFile = bsFile;
}
LongStreamFile(final BSFile bsFile) {
this.bsFile = bsFile;
}
public static LongStreamFile existingFile(final long blockNumber, final DiskStorage diskStorage)
throws IOException {
final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, NullCustomizer.INSTANCE);
return new LongStreamFile(bsFile);
}
public static LongStreamFile existingFile(final long blockNumber, final DiskStorage diskStorage)
throws IOException {
final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, NullCustomizer.INSTANCE);
return new LongStreamFile(bsFile);
}
public static LongStreamFile newFile(final DiskStorage diskStorage) throws IOException {
final BSFile bsFile = BSFile.newFile(diskStorage, NullCustomizer.INSTANCE);
return new LongStreamFile(bsFile);
}
public static LongStreamFile newFile(final DiskStorage diskStorage) throws IOException {
final BSFile bsFile = BSFile.newFile(diskStorage, NullCustomizer.INSTANCE);
return new LongStreamFile(bsFile);
}
public void append(final long value) throws IOException {
public void append(final long value) throws IOException {
bsFile.append(value);
}
bsFile.append(value);
}
public Stream<LongList> streamOfLongLists() {
return bsFile.streamOfLongLists();
}
public Stream<LongList> streamOfLongLists() {
return bsFile.streamOfLongLists();
}
public LongList asLongList() {
public LongList asLongList() {
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
@Override
public void close() {
bsFile.close();
}
@Override
public void close() {
bsFile.close();
}
}

View File

@@ -7,31 +7,31 @@ import org.lucares.collections.LongList;
public class NullCustomizer implements BSFileCustomizer {
public static final NullCustomizer INSTANCE = new NullCustomizer();
public static final NullCustomizer INSTANCE = new NullCustomizer();
@Override
public void init(final BSFileDiskBlock lastDiskBlockOfStream) {
// nothing to do - this is a NullObject
}
@Override
public void init(final BSFileDiskBlock lastDiskBlockOfStream) {
// nothing to do - this is a NullObject
}
@Override
public Optional<Function<LongList, LongList>> getStreamMapper() {
// no mapper to return - this is a NullObject
return Optional.empty();
}
@Override
public Optional<Function<LongList, LongList>> getStreamMapper() {
// no mapper to return - this is a NullObject
return Optional.empty();
}
@Override
public void newBlock() {
// nothing to do - this is a NullObject
}
@Override
public void newBlock() {
// nothing to do - this is a NullObject
}
@Override
public long preProcessWriteValue1(final long value) {
return value;
}
@Override
public long preProcessWriteValue1(final long value) {
return value;
}
@Override
public long preProcessWriteValue2(final long value) {
return value;
}
@Override
public long preProcessWriteValue2(final long value) {
return value;
}
}

View File

@@ -8,71 +8,71 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
public class TimeSeriesCustomizer implements BSFileCustomizer {
private static class TimeStampDeltaDecoder implements Function<LongList, LongList> {
private static class TimeStampDeltaDecoder implements Function<LongList, LongList> {
/**
* Computes the inverse of the delta encoding in {@link BSFile#appendTimeValue}
*/
@Override
public LongList apply(final LongList t) {
long lastTimeValue = 0;
for (int i = 0; i < t.size(); i += 2) {
lastTimeValue += t.get(i);
t.set(i, lastTimeValue);
}
/**
* Computes the inverse of the delta encoding in {@link BSFile#appendTimeValue}
*/
@Override
public LongList apply(final LongList t) {
long lastTimeValue = 0;
for (int i = 0; i < t.size(); i += 2) {
lastTimeValue += t.get(i);
t.set(i, lastTimeValue);
}
return t;
}
}
return t;
}
}
private static final TimeStampDeltaDecoder TIME_DELTA_DECODER = new TimeStampDeltaDecoder();
private static final TimeStampDeltaDecoder TIME_DELTA_DECODER = new TimeStampDeltaDecoder();
private long lastEpochMilli;
private long lastEpochMilli;
@Override
public void init(final BSFileDiskBlock lastDiskBlockOfStream) {
lastEpochMilli = determineLastEpochMilli(lastDiskBlockOfStream);
}
@Override
public void init(final BSFileDiskBlock lastDiskBlockOfStream) {
lastEpochMilli = determineLastEpochMilli(lastDiskBlockOfStream);
}
private long determineLastEpochMilli(final BSFileDiskBlock diskBlock) {
private long determineLastEpochMilli(final BSFileDiskBlock diskBlock) {
// get the time/value delta encoded longs
final byte[] buf = diskBlock.getBuffer();
LongList longList = VariableByteEncoder.decode(buf);
final long result;
if (longList.size() < 2) {
// only new files have empty disk blocks
// and empty disk blocks have time offset 0
result = 0;
} else {
// decode the deltas to get the correct timestamps
longList = TIME_DELTA_DECODER.apply(longList);
// get the time/value delta encoded longs
final byte[] buf = diskBlock.getBuffer();
LongList longList = VariableByteEncoder.decode(buf);
final long result;
if (longList.size() < 2) {
// only new files have empty disk blocks
// and empty disk blocks have time offset 0
result = 0;
} else {
// decode the deltas to get the correct timestamps
longList = TIME_DELTA_DECODER.apply(longList);
// return the last timestamp
result = longList.get(longList.size() - 2);
}
return result;
}
// return the last timestamp
result = longList.get(longList.size() - 2);
}
return result;
}
@Override
public Optional<Function<LongList, LongList>> getStreamMapper() {
return Optional.of(TIME_DELTA_DECODER);
}
@Override
public Optional<Function<LongList, LongList>> getStreamMapper() {
return Optional.of(TIME_DELTA_DECODER);
}
@Override
public void newBlock() {
lastEpochMilli = 0;
}
@Override
public void newBlock() {
lastEpochMilli = 0;
}
@Override
public long preProcessWriteValue1(final long epochMilli) {
final long epochMilliDelta = epochMilli - lastEpochMilli;
lastEpochMilli = epochMilli;
return epochMilliDelta;
}
@Override
public long preProcessWriteValue1(final long epochMilli) {
final long epochMilliDelta = epochMilli - lastEpochMilli;
lastEpochMilli = epochMilli;
return epochMilliDelta;
}
@Override
public long preProcessWriteValue2(final long value) {
return value;
}
@Override
public long preProcessWriteValue2(final long value) {
return value;
}
}

View File

@@ -8,52 +8,52 @@ import org.lucares.pdb.diskstorage.DiskStorage;
public class TimeSeriesFile implements AutoCloseable {
private final BSFile bsFile;
private final BSFile bsFile;
private TimeSeriesFile(final BSFile bsFile) {
this.bsFile = bsFile;
}
private TimeSeriesFile(final BSFile bsFile) {
this.bsFile = bsFile;
}
public static TimeSeriesFile existingFile(final long blockNumber, final DiskStorage diskStorage) {
final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, new TimeSeriesCustomizer());
return new TimeSeriesFile(bsFile);
}
public static TimeSeriesFile existingFile(final long blockNumber, final DiskStorage diskStorage) {
final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, new TimeSeriesCustomizer());
return new TimeSeriesFile(bsFile);
}
public static TimeSeriesFile newFile(final DiskStorage diskStorage) {
final BSFile bsFile = BSFile.newFile(diskStorage, new TimeSeriesCustomizer());
return new TimeSeriesFile(bsFile);
}
public static TimeSeriesFile newFile(final DiskStorage diskStorage) {
final BSFile bsFile = BSFile.newFile(diskStorage, new TimeSeriesCustomizer());
return new TimeSeriesFile(bsFile);
}
public void appendTimeValue(final long epochMilli, final long value) {
public void appendTimeValue(final long epochMilli, final long value) {
bsFile.append(epochMilli, value);
}
bsFile.append(epochMilli, value);
}
public Stream<LongList> streamOfLongLists() {
return bsFile.streamOfLongLists();
}
public Stream<LongList> streamOfLongLists() {
return bsFile.streamOfLongLists();
}
public LongList asTimeValueLongList() {
public LongList asTimeValueLongList() {
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
final LongList result = new LongList();
streamOfLongLists().forEachOrdered(result::addAll);
return result;
}
@Override
public void close() {
bsFile.close();
}
@Override
public void close() {
bsFile.close();
}
public long getRootBlockOffset() {
return bsFile.getRootBlockOffset();
}
public long getRootBlockOffset() {
return bsFile.getRootBlockOffset();
}
public Optional<Long> getLastValue() {
return bsFile.getLastValue();
}
public Optional<Long> getLastValue() {
return bsFile.getLastValue();
}
public void flush() {
bsFile.flush();
}
public void flush() {
bsFile.flush();
}
}

View File

@@ -5,52 +5,52 @@ import java.nio.MappedByteBuffer;
public class DiskBlock {
private byte[] buffer = null;
private final long blockOffset;
private byte[] buffer = null;
private final long blockOffset;
private final ByteBuffer byteBuffer;
private final ByteBuffer byteBuffer;
public DiskBlock(final long blockOffset, final ByteBuffer byteBuffer) {
this.blockOffset = blockOffset;
this.byteBuffer = byteBuffer;
}
public DiskBlock(final long blockOffset, final ByteBuffer byteBuffer) {
this.blockOffset = blockOffset;
this.byteBuffer = byteBuffer;
}
public byte[] getBuffer() {
public byte[] getBuffer() {
if (buffer == null) {
this.buffer = new byte[byteBuffer.capacity()];
byteBuffer.get(buffer);
}
if (buffer == null) {
this.buffer = new byte[byteBuffer.capacity()];
byteBuffer.get(buffer);
}
return buffer;
}
return buffer;
}
public ByteBuffer getByteBuffer() {
return byteBuffer;
}
public ByteBuffer getByteBuffer() {
return byteBuffer;
}
public long getBlockOffset() {
return blockOffset;
}
public long getBlockOffset() {
return blockOffset;
}
private void writeBufferToByteBuffer() {
byteBuffer.position(0);
byteBuffer.put(buffer);
}
private void writeBufferToByteBuffer() {
byteBuffer.position(0);
byteBuffer.put(buffer);
}
public void writeAsync() {
writeBufferToByteBuffer();
}
public void writeAsync() {
writeBufferToByteBuffer();
}
public void force() {
// some tests use HeapByteBuffer and don't support force
if (byteBuffer instanceof MappedByteBuffer) {
((MappedByteBuffer) byteBuffer).force();
}
}
public void force() {
// some tests use HeapByteBuffer and don't support force
if (byteBuffer instanceof MappedByteBuffer) {
((MappedByteBuffer) byteBuffer).force();
}
}
@Override
public String toString() {
return "DiskBlock[" + blockOffset + "]";
}
@Override
public String toString() {
return "DiskBlock[" + blockOffset + "]";
}
}

View File

@@ -14,273 +14,273 @@ import org.slf4j.LoggerFactory;
public class DiskStorage implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(DiskStorage.class);
private static final Logger LOGGER = LoggerFactory.getLogger(DiskStorage.class);
private static final long FREE_LIST_ROOT_OFFSET = 0;
private static final long NO_POINTER = 0;
private static final int FREE_LIST_NEXT_POINTER = 0;
private static final int FREE_LIST_PREV_POINTER = 8;
private static final int FREE_LIST_SIZE = 16;
private static final int FREE_LIST_NODE_SIZE = 32;
private static final long FREE_LIST_ROOT_OFFSET = 0;
private static final long NO_POINTER = 0;
private static final int FREE_LIST_NEXT_POINTER = 0;
private static final int FREE_LIST_PREV_POINTER = 8;
private static final int FREE_LIST_SIZE = 16;
private static final int FREE_LIST_NODE_SIZE = 32;
private final FileChannel fileChannel;
private final FileChannel fileChannel;
private Path relativeDatabaseFileForLogging;
private Path relativeDatabaseFileForLogging;
public DiskStorage(final Path databaseFile, Path storageBasePath) {
this.relativeDatabaseFileForLogging = storageBasePath != null ? storageBasePath.relativize(databaseFile): databaseFile;
try {
Files.createDirectories(databaseFile.getParent());
public DiskStorage(final Path databaseFile, Path storageBasePath) {
this.relativeDatabaseFileForLogging = storageBasePath != null ? storageBasePath.relativize(databaseFile)
: databaseFile;
try {
Files.createDirectories(databaseFile.getParent());
fileChannel = FileChannel.open(databaseFile, StandardOpenOption.READ, StandardOpenOption.WRITE,
StandardOpenOption.CREATE);
fileChannel = FileChannel.open(databaseFile, StandardOpenOption.READ, StandardOpenOption.WRITE,
StandardOpenOption.CREATE);
initIfNew();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
initIfNew();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
private void initIfNew() throws IOException {
if (fileChannel.size() == 0) {
// file is new -> add root of the free list
writeFreeListRootNodePosition(NO_POINTER);
}
}
private void initIfNew() throws IOException {
if (fileChannel.size() == 0) {
// file is new -> add root of the free list
writeFreeListRootNodePosition(NO_POINTER);
}
}
public DiskBlock getDiskBlock(final long blockOffset, final int blockSize) {
try {
LOGGER.trace("read block={} file={}", blockOffset, relativeDatabaseFileForLogging);
public DiskBlock getDiskBlock(final long blockOffset, final int blockSize) {
try {
LOGGER.trace("read block={} file={}", blockOffset, relativeDatabaseFileForLogging);
final var byteBuffer = fileChannel.map(MapMode.READ_WRITE, blockOffset, blockSize);
final var byteBuffer = fileChannel.map(MapMode.READ_WRITE, blockOffset, blockSize);
return new DiskBlock(blockOffset, byteBuffer);
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public Path getRelativeDatabaseFileForLogging() {
return relativeDatabaseFileForLogging;
}
return new DiskBlock(blockOffset, byteBuffer);
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
@Override
public void close() {
try {
fileChannel.force(true);
fileChannel.close();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public Path getRelativeDatabaseFileForLogging() {
return relativeDatabaseFileForLogging;
}
public synchronized long allocateBlock(final int blockSize) {
if (blockSize < FREE_LIST_NODE_SIZE) {
throw new IllegalArgumentException("The minimal allocation size is 32 byte.");
}
@Override
public void close() {
try {
fileChannel.force(true);
fileChannel.close();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
try {
final var optionalFreeBlock = findFreeBlockWithSize(blockSize);
if (optionalFreeBlock.isPresent()) {
final FreeListNode freeBlock = optionalFreeBlock.get();
removeBlockFromFreeList(freeBlock);
clearBlock(freeBlock);
return freeBlock.getOffset();
} else {
return allocateNewBlock(blockSize);
}
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public synchronized long allocateBlock(final int blockSize) {
if (blockSize < FREE_LIST_NODE_SIZE) {
throw new IllegalArgumentException("The minimal allocation size is 32 byte.");
}
private long allocateNewBlock(final int blockSize) throws IOException {
final var buffer = new byte[blockSize];
final var src = ByteBuffer.wrap(buffer);
try {
final var optionalFreeBlock = findFreeBlockWithSize(blockSize);
if (optionalFreeBlock.isPresent()) {
final FreeListNode freeBlock = optionalFreeBlock.get();
removeBlockFromFreeList(freeBlock);
clearBlock(freeBlock);
return freeBlock.getOffset();
} else {
return allocateNewBlock(blockSize);
}
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
// block numbers start with 1, so that the uninitialized value
// (0) means 'no block'. That way we do not have to write
// data to a newly created block, which reduces IO.
final var blockOffset = fileChannel.size();
fileChannel.write(src, fileChannel.size());
return blockOffset;
}
private long allocateNewBlock(final int blockSize) throws IOException {
final var buffer = new byte[blockSize];
final var src = ByteBuffer.wrap(buffer);
public synchronized void free(final long blockOffset, final int blockSize) throws IOException {
// block numbers start with 1, so that the uninitialized value
// (0) means 'no block'. That way we do not have to write
// data to a newly created block, which reduces IO.
final var blockOffset = fileChannel.size();
fileChannel.write(src, fileChannel.size());
return blockOffset;
}
final var neighboringFreeListNode = getNeighboringFreeListNode(blockOffset);
public synchronized void free(final long blockOffset, final int blockSize) throws IOException {
if (neighboringFreeListNode.isPresent()) {
// insert new free node into the free list
final var prev = neighboringFreeListNode.get();
final var neighboringFreeListNode = getNeighboringFreeListNode(blockOffset);
insertFreeListNode(prev, blockOffset, blockSize);
if (neighboringFreeListNode.isPresent()) {
// insert new free node into the free list
final var prev = neighboringFreeListNode.get();
} else {
// add new free list node as the first node in the list
insertFreeListNodeAsNewRoot(blockOffset, blockSize);
}
}
insertFreeListNode(prev, blockOffset, blockSize);
private void insertFreeListNodeAsNewRoot(final long blockOffset, final int blockSize) throws IOException {
final var freeListRootNodePosition = readFreeListRootNodePosition();
} else {
// add new free list node as the first node in the list
insertFreeListNodeAsNewRoot(blockOffset, blockSize);
}
}
if (freeListRootNodePosition > 0) {
// there are free list nodes, but they are after the new node
private void insertFreeListNodeAsNewRoot(final long blockOffset, final int blockSize) throws IOException {
final var freeListRootNodePosition = readFreeListRootNodePosition();
final var next = readFreeListNode(freeListRootNodePosition);
final var newNode = new FreeListNode(blockOffset, blockSize);
if (freeListRootNodePosition > 0) {
// there are free list nodes, but they are after the new node
FreeListNode.link(newNode, next);
final var next = readFreeListNode(freeListRootNodePosition);
final var newNode = new FreeListNode(blockOffset, blockSize);
writeFreeListNode(newNode, next);
writeFreeListRootNodePosition(blockOffset);
FreeListNode.link(newNode, next);
} else {
// this is the first free list node
final var newNode = new FreeListNode(blockOffset, blockSize);
writeFreeListNode(newNode);
writeFreeListRootNodePosition(blockOffset);
}
}
writeFreeListNode(newNode, next);
writeFreeListRootNodePosition(blockOffset);
private void insertFreeListNode(final FreeListNode prev, final long blockOffset, final int blockSize)
throws IOException {
} else {
// this is the first free list node
final var newNode = new FreeListNode(blockOffset, blockSize);
writeFreeListNode(newNode);
writeFreeListRootNodePosition(blockOffset);
}
}
final var newNode = new FreeListNode(blockOffset, blockSize);
final var next = prev.hasNext() ? readFreeListNode(prev.getNext()) : null;
private void insertFreeListNode(final FreeListNode prev, final long blockOffset, final int blockSize)
throws IOException {
FreeListNode.link(prev, newNode, next);
final var newNode = new FreeListNode(blockOffset, blockSize);
final var next = prev.hasNext() ? readFreeListNode(prev.getNext()) : null;
writeFreeListNode(prev, newNode, next);
}
FreeListNode.link(prev, newNode, next);
/**
*
* @param blockOffset the offset of the block that is about to be free'd
* @return the free list node before the block
* @throws IOException
*/
private Optional<FreeListNode> getNeighboringFreeListNode(final long blockOffset) throws IOException {
FreeListNode result = null;
final long freeListRootNodePosition = readFreeListRootNodePosition();
if (freeListRootNodePosition < blockOffset) {
writeFreeListNode(prev, newNode, next);
}
long nextFreeListNodeOffset = freeListRootNodePosition;
while (nextFreeListNodeOffset > 0) {
final var freeListNode = readFreeListNode(nextFreeListNodeOffset);
/**
*
* @param blockOffset the offset of the block that is about to be free'd
* @return the free list node before the block
* @throws IOException
*/
private Optional<FreeListNode> getNeighboringFreeListNode(final long blockOffset) throws IOException {
FreeListNode result = null;
final long freeListRootNodePosition = readFreeListRootNodePosition();
if (freeListRootNodePosition < blockOffset) {
if (freeListNode.getOffset() > blockOffset) {
break;
}
nextFreeListNodeOffset = freeListNode.getNext();
result = freeListNode;
}
}
return Optional.ofNullable(result);
}
long nextFreeListNodeOffset = freeListRootNodePosition;
while (nextFreeListNodeOffset > 0) {
final var freeListNode = readFreeListNode(nextFreeListNodeOffset);
private Optional<FreeListNode> findFreeBlockWithSize(final long blockSize) throws IOException {
FreeListNode result = null;
final long freeListRootNodePosition = readFreeListRootNodePosition();
if (freeListNode.getOffset() > blockOffset) {
break;
}
nextFreeListNodeOffset = freeListNode.getNext();
result = freeListNode;
}
}
return Optional.ofNullable(result);
}
long nextFreeListNodeOffset = freeListRootNodePosition;
while (nextFreeListNodeOffset > 0) {
final var freeListNode = readFreeListNode(nextFreeListNodeOffset);
private Optional<FreeListNode> findFreeBlockWithSize(final long blockSize) throws IOException {
FreeListNode result = null;
final long freeListRootNodePosition = readFreeListRootNodePosition();
if (freeListNode.getSize() == blockSize) {
result = freeListNode;
break;
}
nextFreeListNodeOffset = freeListNode.getNext();
}
long nextFreeListNodeOffset = freeListRootNodePosition;
while (nextFreeListNodeOffset > 0) {
final var freeListNode = readFreeListNode(nextFreeListNodeOffset);
return Optional.ofNullable(result);
}
if (freeListNode.getSize() == blockSize) {
result = freeListNode;
break;
}
nextFreeListNodeOffset = freeListNode.getNext();
}
private void clearBlock(final FreeListNode freeBlock) throws IOException {
final var src = ByteBuffer.allocate(freeBlock.getSize());
fileChannel.write(src, freeBlock.getOffset());
}
return Optional.ofNullable(result);
}
private void removeBlockFromFreeList(final FreeListNode freeBlock) throws IOException {
private void clearBlock(final FreeListNode freeBlock) throws IOException {
final var src = ByteBuffer.allocate(freeBlock.getSize());
fileChannel.write(src, freeBlock.getOffset());
}
if (freeBlock.getPrev() == 0) {
writeFreeListRootNodePosition(freeBlock.getNext());
}
private void removeBlockFromFreeList(final FreeListNode freeBlock) throws IOException {
if (freeBlock.getNext() > 0) {
final FreeListNode next = readFreeListNode(freeBlock.getNext());
next.setPrev(freeBlock.getPrev());
writeFreeListNode(next);
}
if (freeBlock.getPrev() == 0) {
writeFreeListRootNodePosition(freeBlock.getNext());
}
if (freeBlock.getPrev() > 0) {
final FreeListNode prev = readFreeListNode(freeBlock.getPrev());
prev.setNext(freeBlock.getNext());
writeFreeListNode(prev);
}
}
if (freeBlock.getNext() > 0) {
final FreeListNode next = readFreeListNode(freeBlock.getNext());
next.setPrev(freeBlock.getPrev());
writeFreeListNode(next);
}
private FreeListNode readFreeListNode(final long freeListNodePosition) throws IOException {
final var freeListNode = ByteBuffer.allocate(FREE_LIST_NODE_SIZE);
fileChannel.read(freeListNode, freeListNodePosition);
final long offset = freeListNodePosition;
final long next = freeListNode.getLong(FREE_LIST_NEXT_POINTER);
final long prev = freeListNode.getLong(FREE_LIST_PREV_POINTER);
final int size = freeListNode.getInt(FREE_LIST_SIZE);
return new FreeListNode(offset, next, prev, size);
}
if (freeBlock.getPrev() > 0) {
final FreeListNode prev = readFreeListNode(freeBlock.getPrev());
prev.setNext(freeBlock.getNext());
writeFreeListNode(prev);
}
}
private void writeFreeListNode(final FreeListNode... nodes) throws IOException {
private FreeListNode readFreeListNode(final long freeListNodePosition) throws IOException {
final var freeListNode = ByteBuffer.allocate(FREE_LIST_NODE_SIZE);
fileChannel.read(freeListNode, freeListNodePosition);
final long offset = freeListNodePosition;
final long next = freeListNode.getLong(FREE_LIST_NEXT_POINTER);
final long prev = freeListNode.getLong(FREE_LIST_PREV_POINTER);
final int size = freeListNode.getInt(FREE_LIST_SIZE);
return new FreeListNode(offset, next, prev, size);
}
for (final FreeListNode node : nodes) {
if (node != null) {
final var src = ByteBuffer.allocate(FREE_LIST_NODE_SIZE);
src.putLong(FREE_LIST_NEXT_POINTER, node.getNext());
src.putLong(FREE_LIST_PREV_POINTER, node.getPrev());
src.putInt(FREE_LIST_SIZE, node.getSize());
fileChannel.write(src, node.getOffset());
}
}
}
private void writeFreeListNode(final FreeListNode... nodes) throws IOException {
private long readFreeListRootNodePosition() throws IOException {
final var freeListFirstBlock = ByteBuffer.allocate(8);
fileChannel.read(freeListFirstBlock, FREE_LIST_ROOT_OFFSET);
return freeListFirstBlock.getLong(0);
}
for (final FreeListNode node : nodes) {
if (node != null) {
final var src = ByteBuffer.allocate(FREE_LIST_NODE_SIZE);
src.putLong(FREE_LIST_NEXT_POINTER, node.getNext());
src.putLong(FREE_LIST_PREV_POINTER, node.getPrev());
src.putInt(FREE_LIST_SIZE, node.getSize());
fileChannel.write(src, node.getOffset());
}
}
}
private void writeFreeListRootNodePosition(final long freeListRootNodePosition) throws IOException {
final var freeListFirstBlock = ByteBuffer.allocate(8);
freeListFirstBlock.putLong(0, freeListRootNodePosition);
fileChannel.write(freeListFirstBlock, FREE_LIST_ROOT_OFFSET);
}
private long readFreeListRootNodePosition() throws IOException {
final var freeListFirstBlock = ByteBuffer.allocate(8);
fileChannel.read(freeListFirstBlock, FREE_LIST_ROOT_OFFSET);
return freeListFirstBlock.getLong(0);
}
public synchronized void ensureAlignmentForNewBlocks(final int alignment) {
try {
final long size = fileChannel.size();
final int alignmentMismatch = Math.floorMod(size, alignment);
if (alignmentMismatch != 0) {
// The next allocated block would not be aligned. Therefore we allocate a
// throw-away block.
allocateNewBlock(alignment - alignmentMismatch);
}
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
private void writeFreeListRootNodePosition(final long freeListRootNodePosition) throws IOException {
final var freeListFirstBlock = ByteBuffer.allocate(8);
freeListFirstBlock.putLong(0, freeListRootNodePosition);
fileChannel.write(freeListFirstBlock, FREE_LIST_ROOT_OFFSET);
}
public long size() {
try {
return fileChannel.size();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public synchronized void ensureAlignmentForNewBlocks(final int alignment) {
try {
final long size = fileChannel.size();
final int alignmentMismatch = Math.floorMod(size, alignment);
if (alignmentMismatch != 0) {
// The next allocated block would not be aligned. Therefore we allocate a
// throw-away block.
allocateNewBlock(alignment - alignmentMismatch);
}
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public int minAllocationSize() {
return FREE_LIST_NODE_SIZE;
}
public long size() {
try {
return fileChannel.size();
} catch (final IOException e) {
throw new DiskStorageException(e);
}
}
public int minAllocationSize() {
return FREE_LIST_NODE_SIZE;
}
}

View File

@@ -2,18 +2,18 @@ package org.lucares.pdb.diskstorage;
public class DiskStorageException extends RuntimeException {
private static final long serialVersionUID = 1683775743640383633L;
private static final long serialVersionUID = 1683775743640383633L;
public DiskStorageException(final String message, final Throwable cause) {
super(message, cause);
}
public DiskStorageException(final String message, final Throwable cause) {
super(message, cause);
}
public DiskStorageException(final String message) {
super(message);
}
public DiskStorageException(final String message) {
super(message);
}
public DiskStorageException(final Throwable cause) {
super(cause);
}
public DiskStorageException(final Throwable cause) {
super(cause);
}
}

View File

@@ -1,82 +1,82 @@
package org.lucares.pdb.diskstorage;
public class FreeListNode {
private final long offset;
private long next;
private long prev;
private int size;
private final long offset;
private long next;
private long prev;
private int size;
public FreeListNode(final long offset, final int size) {
this.offset = offset;
this.size = size;
}
public FreeListNode(final long offset, final int size) {
this.offset = offset;
this.size = size;
}
public FreeListNode(final long offset, final long next, final long prev, final int size) {
this.offset = offset;
this.next = next;
this.prev = prev;
this.size = size;
}
public FreeListNode(final long offset, final long next, final long prev, final int size) {
this.offset = offset;
this.next = next;
this.prev = prev;
this.size = size;
}
public long getOffset() {
return offset;
}
public long getOffset() {
return offset;
}
public long getNext() {
return next;
}
public long getNext() {
return next;
}
public void setNext(final long next) {
this.next = next;
}
public void setNext(final long next) {
this.next = next;
}
public void setNext(final FreeListNode next) {
this.next = next != null ? next.getOffset() : 0;
}
public void setNext(final FreeListNode next) {
this.next = next != null ? next.getOffset() : 0;
}
public long getPrev() {
return prev;
}
public long getPrev() {
return prev;
}
public void setPrev(final long prev) {
this.prev = prev;
}
public void setPrev(final long prev) {
this.prev = prev;
}
public void setPrev(final FreeListNode prev) {
this.prev = prev != null ? prev.getOffset() : 0;
}
public void setPrev(final FreeListNode prev) {
this.prev = prev != null ? prev.getOffset() : 0;
}
public int getSize() {
return size;
}
public int getSize() {
return size;
}
public void setSize(final int size) {
this.size = size;
}
public void setSize(final int size) {
this.size = size;
}
@Override
public String toString() {
return "FreeListNode [offset=" + offset + ", next=" + next + ", prev=" + prev + ", size=" + size + "]";
}
@Override
public String toString() {
return "FreeListNode [offset=" + offset + ", next=" + next + ", prev=" + prev + ", size=" + size + "]";
}
public boolean hasNext() {
return next != 0;
}
public boolean hasNext() {
return next != 0;
}
public static void link(final FreeListNode prev, final FreeListNode next) {
prev.setNext(next);
next.setPrev(prev);
}
public static void link(final FreeListNode prev, final FreeListNode next) {
prev.setNext(next);
next.setPrev(prev);
}
public static void link(final FreeListNode prev, final FreeListNode middle, final FreeListNode next) {
if (prev != null) {
prev.setNext(middle);
}
middle.setPrev(prev);
middle.setNext(next);
if (next != null) {
next.setPrev(prev);
}
}
public static void link(final FreeListNode prev, final FreeListNode middle, final FreeListNode next) {
if (prev != null) {
prev.setNext(middle);
}
middle.setPrev(prev);
middle.setNext(next);
if (next != null) {
next.setPrev(prev);
}
}
}

View File

@@ -3,77 +3,77 @@ package org.lucares.pdb.map;
import java.util.Arrays;
public final class ByteArrayKey implements Comparable<ByteArrayKey> {
private final byte[] bytes;
private final byte[] bytes;
public ByteArrayKey(final byte[] bytes) {
this.bytes = bytes;
}
public ByteArrayKey(final byte[] bytes) {
this.bytes = bytes;
}
@Override
public int compareTo(final ByteArrayKey o) {
return compare(bytes, o.bytes);
}
@Override
public int compareTo(final ByteArrayKey o) {
return compare(bytes, o.bytes);
}
public static int compare(final byte[] key, final byte[] otherKey) {
return Arrays.compare(key, otherKey);
}
public static int compare(final byte[] key, final byte[] otherKey) {
return Arrays.compare(key, otherKey);
}
public static boolean isPrefix(final byte[] key, final byte[] keyPrefix) {
public static boolean isPrefix(final byte[] key, final byte[] keyPrefix) {
return compareKeyPrefix(key, keyPrefix) == 0;
}
return compareKeyPrefix(key, keyPrefix) == 0;
}
/**
* Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the
* key. {@link #compare(byte[])} return values &gt;0 in that case, because key
* is longer than the prefix.
*
* @param prefix the prefix
* @return 0 if {@code prefix} is a prefix of the key otherwise the value is
* defined by {@link #compare(byte[])}
*/
public static int compareKeyPrefix(final byte[] key, final byte[] prefix) {
int i = 0;
while (i < key.length && i < prefix.length) {
if (key[i] != prefix[i]) {
return key[i] - prefix[i];
}
i++;
}
/**
* Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the
* key. {@link #compare(byte[])} return values &gt;0 in that case, because key
* is longer than the prefix.
*
* @param prefix the prefix
* @return 0 if {@code prefix} is a prefix of the key otherwise the value is
* defined by {@link #compare(byte[])}
*/
public static int compareKeyPrefix(final byte[] key, final byte[] prefix) {
int i = 0;
while (i < key.length && i < prefix.length) {
if (key[i] != prefix[i]) {
return key[i] - prefix[i];
}
i++;
}
return key.length > prefix.length ? 0 : key.length - prefix.length;
return key.length > prefix.length ? 0 : key.length - prefix.length;
}
}
public static boolean equal(final byte[] key, final byte[] otherKey) {
return compare(key, otherKey) == 0;
}
@Override
public String toString() {
return Arrays.toString(bytes);
}
public static boolean equal(final byte[] key, final byte[] otherKey) {
return compare(key, otherKey) == 0;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(bytes);
return result;
}
@Override
public String toString() {
return Arrays.toString(bytes);
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final ByteArrayKey other = (ByteArrayKey) obj;
if (!Arrays.equals(bytes, other.bytes))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(bytes);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final ByteArrayKey other = (ByteArrayKey) obj;
if (!Arrays.equals(bytes, other.bytes))
return false;
return true;
}
}

View File

@@ -14,13 +14,13 @@ import org.lucares.pdb.map.PersistentMap.EncoderDecoder;
* {@link Empty} solves this by providing a single unmodifiable value.
*/
public final class Empty {
public static final Empty INSTANCE = new Empty();
public static final Empty INSTANCE = new Empty();
private Empty() {
}
private Empty() {
}
@Override
public String toString() {
return "<empty>";
}
@Override
public String toString() {
return "<empty>";
}
}

View File

@@ -9,158 +9,158 @@ import java.util.function.Predicate;
import org.lucares.utils.byteencoder.VariableByteEncoder;
class NodeEntry {
enum ValueType {
VALUE_INLINE((byte) 1), NODE_POINTER((byte) 2);
enum ValueType {
VALUE_INLINE((byte) 1), NODE_POINTER((byte) 2);
private final byte b;
private final byte b;
ValueType(final byte b) {
this.b = b;
}
ValueType(final byte b) {
this.b = b;
}
static ValueType fromByte(final byte b) {
for (final ValueType type : values()) {
if (type.b == b) {
return type;
}
}
throw new IllegalStateException("Cannot map byte " + b + " to a value type.");
}
static ValueType fromByte(final byte b) {
for (final ValueType type : values()) {
if (type.b == b) {
return type;
}
}
throw new IllegalStateException("Cannot map byte " + b + " to a value type.");
}
public byte asByte() {
return b;
}
}
public byte asByte() {
return b;
}
}
static final class KeyMatches implements Predicate<NodeEntry> {
static final class KeyMatches implements Predicate<NodeEntry> {
private final byte[] key;
private final byte[] key;
public KeyMatches(final byte[] key) {
this.key = key;
}
public KeyMatches(final byte[] key) {
this.key = key;
}
@Override
public boolean test(final NodeEntry t) {
return Arrays.equals(key, t.getKey());
}
}
@Override
public boolean test(final NodeEntry t) {
return Arrays.equals(key, t.getKey());
}
}
private final ValueType type;
private final byte[] key;
private final byte[] value;
private final ValueType type;
private final byte[] key;
private final byte[] value;
public NodeEntry(final ValueType type, final byte[] key, final byte[] value) {
this.type = type;
this.key = key;
this.value = value;
}
public NodeEntry(final ValueType type, final byte[] key, final byte[] value) {
this.type = type;
this.key = key;
this.value = value;
}
public ValueType getType() {
return type;
}
public ValueType getType() {
return type;
}
public byte[] getKey() {
return key;
}
public byte[] getKey() {
return key;
}
public byte[] getValue() {
return value;
}
public byte[] getValue() {
return value;
}
public int size() {
return 1 + key.length + value.length;
}
public int size() {
return 1 + key.length + value.length;
}
@Override
public String toString() {
final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value))
: new String(value, StandardCharsets.UTF_8);
@Override
public String toString() {
final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value))
: new String(value, StandardCharsets.UTF_8);
return "NodeEntry [type=" + type + ", key=" + new String(key, StandardCharsets.UTF_8) + ", value="
+ valueAsString + "]";
}
return "NodeEntry [type=" + type + ", key=" + new String(key, StandardCharsets.UTF_8) + ", value="
+ valueAsString + "]";
}
public <K,V> String toString(final Function<byte[], K> keyDecoder, final Function<byte[], V> valueDecoder) {
final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value))
: String.valueOf(valueDecoder.apply(value));
public <K, V> String toString(final Function<byte[], K> keyDecoder, final Function<byte[], V> valueDecoder) {
final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value))
: String.valueOf(valueDecoder.apply(value));
final String keyAsString;
if (Arrays.equals(key, PersistentMap.MAX_KEY)) {
keyAsString = "<<<MAX_KEY>>>";
} else {
keyAsString = String.valueOf(keyDecoder.apply(key));
}
final String keyAsString;
if (Arrays.equals(key, PersistentMap.MAX_KEY)) {
keyAsString = "<<<MAX_KEY>>>";
} else {
keyAsString = String.valueOf(keyDecoder.apply(key));
}
return "NodeEntry [type=" + type + ", key=" + keyAsString + ", value=" + valueAsString + "]";
}
return "NodeEntry [type=" + type + ", key=" + keyAsString + ", value=" + valueAsString + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(key);
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + Arrays.hashCode(value);
return result;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(key);
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + Arrays.hashCode(value);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final NodeEntry other = (NodeEntry) obj;
if (!Arrays.equals(key, other.key))
return false;
if (type != other.type)
return false;
if (!Arrays.equals(value, other.value))
return false;
return true;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final NodeEntry other = (NodeEntry) obj;
if (!Arrays.equals(key, other.key))
return false;
if (type != other.type)
return false;
if (!Arrays.equals(value, other.value))
return false;
return true;
}
public static int neededBytes(final Collection<NodeEntry> entries) {
return entries.stream().mapToInt(NodeEntry::size).sum();
}
public static int neededBytes(final Collection<NodeEntry> entries) {
return entries.stream().mapToInt(NodeEntry::size).sum();
}
public int compare(final byte[] otherKey) {
public int compare(final byte[] otherKey) {
return ByteArrayKey.compare(key, otherKey);
}
return ByteArrayKey.compare(key, otherKey);
}
public boolean isPrefix(final byte[] keyPrefix) {
public boolean isPrefix(final byte[] keyPrefix) {
return ByteArrayKey.compareKeyPrefix(key, keyPrefix) == 0;
}
return ByteArrayKey.compareKeyPrefix(key, keyPrefix) == 0;
}
/**
* Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the
* key. {@link #compare(byte[])} return values &gt;0 in that case, because key
* is longer than the prefix.
*
* @param prefix the prefix
* @return 0 if {@code prefix} is a prefix of the key otherwise the value is
* defined by {@link #compare(byte[])}
*/
public int compareKeyPrefix(final byte[] prefix) {
/**
* Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the
* key. {@link #compare(byte[])} return values &gt;0 in that case, because key
* is longer than the prefix.
*
* @param prefix the prefix
* @return 0 if {@code prefix} is a prefix of the key otherwise the value is
* defined by {@link #compare(byte[])}
*/
public int compareKeyPrefix(final byte[] prefix) {
return ByteArrayKey.compareKeyPrefix(key, prefix);
}
return ByteArrayKey.compareKeyPrefix(key, prefix);
}
public boolean equal(final byte[] otherKey) {
return compare(otherKey) == 0;
}
public boolean equal(final byte[] otherKey) {
return compare(otherKey) == 0;
}
public boolean isDataNode() {
return type == ValueType.VALUE_INLINE;
}
public boolean isDataNode() {
return type == ValueType.VALUE_INLINE;
}
public boolean isInnerNode() {
return type == ValueType.NODE_POINTER;
}
public boolean isInnerNode() {
return type == ValueType.NODE_POINTER;
}
}

View File

@@ -23,470 +23,470 @@ import org.slf4j.LoggerFactory;
public class PersistentMap<K, V> implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(PersistentMap.class);
private static final Logger LOGGER = LoggerFactory.getLogger(PersistentMap.class);
// the maximum key
static final byte[] MAX_KEY;
static {
MAX_KEY = new byte[20];
Arrays.fill(MAX_KEY, Byte.MAX_VALUE);
}
// the maximum key
static final byte[] MAX_KEY;
static {
MAX_KEY = new byte[20];
Arrays.fill(MAX_KEY, Byte.MAX_VALUE);
}
interface VisitorCallback {
void visit(PersistentMapDiskNode node, PersistentMapDiskNode parentNode, NodeEntry nodeEntry, int depth);
}
interface VisitorCallback {
void visit(PersistentMapDiskNode node, PersistentMapDiskNode parentNode, NodeEntry nodeEntry, int depth);
}
public interface EncoderDecoder<O> {
public byte[] encode(O object);
public O decode(byte[] bytes);
public interface EncoderDecoder<O> {
public byte[] encode(O object);
public O decode(byte[] bytes);
public default Function<byte[], O> asDecoder() {
return bytes -> this.decode(bytes);
}
public default Function<byte[], O> asDecoder() {
return bytes -> this.decode(bytes);
}
public default Function<O, byte[]> asEncoder() {
return plain -> this.encode(plain);
}
public default Function<O, byte[]> asEncoder() {
return plain -> this.encode(plain);
}
public byte[] getEmptyValue();
}
public byte[] getEmptyValue();
}
private static final class StringCoder implements EncoderDecoder<String> {
private static final class StringCoder implements EncoderDecoder<String> {
@Override
public byte[] encode(final String object) {
return object.getBytes(StandardCharsets.UTF_8);
}
@Override
public byte[] encode(final String object) {
return object.getBytes(StandardCharsets.UTF_8);
}
@Override
public String decode(final byte[] bytes) {
return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8);
}
@Override
public String decode(final byte[] bytes) {
return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8);
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
private static final class LongCoder implements EncoderDecoder<Long> {
private static final class LongCoder implements EncoderDecoder<Long> {
@Override
public byte[] encode(final Long object) {
return VariableByteEncoder.encode(object);
}
@Override
public byte[] encode(final Long object) {
return VariableByteEncoder.encode(object);
}
@Override
public Long decode(final byte[] bytes) {
return bytes == null ? null : VariableByteEncoder.decodeFirstValue(bytes);
}
@Override
public Long decode(final byte[] bytes) {
return bytes == null ? null : VariableByteEncoder.decodeFirstValue(bytes);
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
private static final class UUIDCoder implements EncoderDecoder<UUID> {
private static final class UUIDCoder implements EncoderDecoder<UUID> {
@Override
public byte[] encode(final UUID uuid) {
final long mostSignificantBits = uuid.getMostSignificantBits();
final long leastSignificantBits = uuid.getLeastSignificantBits();
return VariableByteEncoder.encode(mostSignificantBits, leastSignificantBits);
}
@Override
public byte[] encode(final UUID uuid) {
final long mostSignificantBits = uuid.getMostSignificantBits();
final long leastSignificantBits = uuid.getLeastSignificantBits();
return VariableByteEncoder.encode(mostSignificantBits, leastSignificantBits);
}
@Override
public UUID decode(final byte[] bytes) {
@Override
public UUID decode(final byte[] bytes) {
final LongList longs = VariableByteEncoder.decode(bytes);
final long mostSignificantBits = longs.get(0);
final long leastSignificantBits = longs.get(1);
final LongList longs = VariableByteEncoder.decode(bytes);
final long mostSignificantBits = longs.get(0);
final long leastSignificantBits = longs.get(1);
return new UUID(mostSignificantBits, leastSignificantBits);
}
return new UUID(mostSignificantBits, leastSignificantBits);
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
@Override
public byte[] getEmptyValue() {
return new byte[] { 0 };
}
}
private static final class EmptyCoder implements EncoderDecoder<Empty> {
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
private static final class EmptyCoder implements EncoderDecoder<Empty> {
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
@Override
public byte[] encode(final Empty __) {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] encode(final Empty __) {
return EMPTY_BYTE_ARRAY;
}
@Override
public Empty decode(final byte[] bytes) {
Preconditions.checkTrue(bytes.length == 0, "");
return Empty.INSTANCE;
}
@Override
public byte[] getEmptyValue() {
return new byte[] {};
}
}
public static final EncoderDecoder<Long> LONG_CODER = new LongCoder();
public static final EncoderDecoder<UUID> UUID_ENCODER = new UUIDCoder();
public static final EncoderDecoder<String> STRING_CODER = new StringCoder();
public static final EncoderDecoder<Empty> EMPTY_ENCODER = new EmptyCoder();
static final int BLOCK_SIZE = 4096;
static final long NODE_OFFSET_TO_ROOT_NODE = 8;
private final DiskStorage diskStore;
private int maxEntriesInNode = Integer.MAX_VALUE;
private final EncoderDecoder<K> keyEncoder;
private final EncoderDecoder<V> valueEncoder;
private final LRUCache<Long, PersistentMapDiskNode> nodeCache = new LRUCache<>(10_000);
private final LRUCache<K, V> valueCache = new LRUCache<>(1_000);
public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder<K> keyEncoder,
final EncoderDecoder<V> valueEncoder) {
this.diskStore = new DiskStorage(path, storageBasePath);
this.keyEncoder = keyEncoder;
this.valueEncoder = valueEncoder;
initIfNew();
}
@Override
public void close() {
diskStore.close();
}
public void setMaxEntriesInNode(final int maxEntriesInNode) {
this.maxEntriesInNode = maxEntriesInNode;
}
private void initIfNew() {
if (diskStore.size() < BLOCK_SIZE) {
final long nodeOffsetToRootNode = diskStore.allocateBlock(diskStore.minAllocationSize());
Preconditions.checkEqual(nodeOffsetToRootNode, NODE_OFFSET_TO_ROOT_NODE,
"The offset of the pointer to the root node must be at a well known location. "
+ "Otherwise we would not be able to find it in an already existing file.");
// 2. make sure new blocks are aligned to the block size (for faster disk IO)
diskStore.ensureAlignmentForNewBlocks(BLOCK_SIZE);
// 3. initialize an empty root node
final long blockOffset = diskStore.allocateBlock(BLOCK_SIZE);
final var rootNode = PersistentMapDiskNode.emptyRootNode(blockOffset);
writeNode(rootNode);
// 4. update pointer to root node
writeNodeOffsetOfRootNode(blockOffset);
// 5. insert a dummy entry with a 'maximum' key
putValue(MAX_KEY, valueEncoder.getEmptyValue());
}
}
public synchronized void putAllValues(final Map<K, V> map) {
for (final Entry<K, V> e : map.entrySet()) {
putValue(e.getKey(), e.getValue());
}
}
public synchronized V putValue(final K key, final V value) {
final V cachedValue = valueCache.get(key);
if (cachedValue != null && cachedValue == value) {
return value;
}
final byte[] encodedKey = keyEncoder.encode(key);
final byte[] encodedValue = valueEncoder.encode(value);
final byte[] encodedOldValue = putValue(encodedKey, encodedValue);
final V oldValue = encodedOldValue == null ? null : valueEncoder.decode(encodedOldValue);
valueCache.put(key, value);
return oldValue;
}
public synchronized V getValue(final K key) {
final V cachedValue = valueCache.get(key);
if (cachedValue != null) {
return cachedValue;
}
final byte[] encodedKey = keyEncoder.encode(key);
final byte[] foundValue = getValue(encodedKey);
final V result = foundValue == null ? null : valueEncoder.decode(foundValue);
valueCache.put(key, result);
return result;
}
private byte[] putValue(final byte[] key, final byte[] value) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final Stack<PersistentMapDiskNode> parents = new Stack<>();
return insert(parents, rootNodeOffset, key, value);
}
private byte[] getValue(final byte[] key) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final NodeEntry entry = findNodeEntry(rootNodeOffset, key);
return entry == null ? null : entry.getValue();
}
private byte[] insert(final Stack<PersistentMapDiskNode> parents, final long nodeOffest, final byte[] key,
final byte[] value) {
final PersistentMapDiskNode node = getNode(nodeOffest);
final NodeEntry entry = node.getNodeEntryTo(key);
if (entry == null || entry.isDataNode()) {
final byte[] oldValue;
if (entry == null) {
oldValue = null;
} else {
// found a NodeEntry that is either equal to key, or it is at the insertion
// point
final boolean entryIsForKey = entry.equal(key);
oldValue = entryIsForKey ? entry.getValue() : null;
// Early exit, if the oldValue equals the new value.
// We do not have to replace the value, because it would not change anything
// (just cause unnecessary write operations). But we return the oldValue so that
// the caller thinks we replaced the value.
if (Objects.equals(oldValue, value)) {
return oldValue;
}
if (entryIsForKey) {
node.removeKey(key);
}
}
if (node.canAdd(key, value, maxEntriesInNode)) {
// insert in existing node
node.addKeyValue(key, value);
writeNode(node);
return oldValue;
} else {
// add new node
// 1. split current node into A and B
splitNode(parents, node);
// 2. insert the value
// start from the root, because we might have added a new root node
return putValue(key, value);
}
} else {
final long childNodeOffset = toNodeOffset(entry);
parents.add(node);
return insert(parents, childNodeOffset, key, value);
}
}
private PersistentMapDiskNode splitNode(final Stack<PersistentMapDiskNode> parents,
final PersistentMapDiskNode node) {
// System.out.println("\n\npre split node: " + node + "\n");
final long newBlockOffset = diskStore.allocateBlock(BLOCK_SIZE);
final PersistentMapDiskNode newNode = node.split(newBlockOffset);
final PersistentMapDiskNode parent = parents.isEmpty() ? null : parents.pop();
if (parent != null) {
final byte[] newNodeKey = newNode.getTopNodeEntry().getKey();
if (parent.canAdd(newNodeKey, newBlockOffset, maxEntriesInNode)) {
parent.addKeyNodePointer(newNodeKey, newBlockOffset);
writeNode(parent);
writeNode(newNode);
writeNode(node);
return parent;
} else {
final PersistentMapDiskNode grandParentNode = splitNode(parents, parent);
final NodeEntry pointerToParentAfterSplit = grandParentNode.getNodeEntryTo(newNodeKey);
Preconditions.checkEqual(pointerToParentAfterSplit.isInnerNode(), true, "{0} is pointer to inner node",
pointerToParentAfterSplit);
final long parentNodeOffset = toNodeOffset(pointerToParentAfterSplit); // the parent we have to add the
// newNode to
final PersistentMapDiskNode parentNode = getNode(parentNodeOffset);
parentNode.addKeyNodePointer(newNodeKey, newBlockOffset);
writeNode(parentNode);
writeNode(newNode);
writeNode(node);
return parentNode;
}
} else {
// has no parent -> create a new parent (the new parent will also be the new
// root)
final long newRootNodeOffset = diskStore.allocateBlock(BLOCK_SIZE);
final PersistentMapDiskNode rootNode = PersistentMapDiskNode.emptyRootNode(newRootNodeOffset);
final byte[] newNodeKey = newNode.getTopNodeEntry().getKey();
rootNode.addKeyNodePointer(newNodeKey, newBlockOffset);
final byte[] oldNodeKey = node.getTopNodeEntry().getKey();
rootNode.addKeyNodePointer(oldNodeKey, node.getNodeOffset());
writeNode(rootNode);
writeNode(newNode);
writeNode(node);
writeNodeOffsetOfRootNode(newRootNodeOffset);
return rootNode;
}
}
private NodeEntry findNodeEntry(final long nodeOffest, final byte[] key) {
final PersistentMapDiskNode node = getNode(nodeOffest);
final var entry = node.getNodeEntryTo(key);
if (entry == null) {
return null;
} else if (entry.isDataNode()) {
if (entry.equal(key)) {
return entry;
} else {
return null;
}
} else {
final long childNodeOffset = toNodeOffset(entry);
return findNodeEntry(childNodeOffset, key);
}
}
private long toNodeOffset(final NodeEntry entry) {
Preconditions.checkEqual(entry.isInnerNode(), true);
return VariableByteEncoder.decodeFirstValue(entry.getValue());
}
private PersistentMapDiskNode getNode(final long nodeOffset) {
PersistentMapDiskNode node = nodeCache.get(nodeOffset);
if (node == null) {
final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffset, BLOCK_SIZE);
node = PersistentMapDiskNode.parse(nodeOffset, diskBlock);
nodeCache.put(nodeOffset, node);
}
return node;
}
private void writeNode(final PersistentMapDiskNode node) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("writing node {}", node.toString(keyEncoder.asDecoder(), valueEncoder.asDecoder()));
}
final long nodeOffest = node.getNodeOffset();
// final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE);
DiskBlock diskBlock = node.getDiskBlock();
if (diskBlock == null) {
diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE);
}
final byte[] buffer = diskBlock.getBuffer();
final byte[] newBuffer = node.serialize();
System.arraycopy(newBuffer, 0, buffer, 0, buffer.length);
diskBlock.writeAsync();
// diskBlock.force(); // makes writing nodes slower by factor 800 (sic!)
}
public synchronized void print() {
visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> {
final PrintStream writer = System.out;
final String children = "#" + node.getEntries().size();
writer.println(" ".repeat(depth) + "@" + node.getNodeOffset() + " " + children + " " + nodeEntry
.toString(b -> String.valueOf(keyEncoder.decode(b)), b -> String.valueOf(valueEncoder.decode(b))));
});
}
public synchronized void visitNodeEntriesPreOrder(final VisitorCallback visitor) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
visitNodeEntriesPreOrderRecursively(rootNodeOffset, null, visitor, 0);
}
private void visitNodeEntriesPreOrderRecursively(final long nodeOffset, final PersistentMapDiskNode parentNode,
final VisitorCallback visitor, final int depth) {
final PersistentMapDiskNode node = getNode(nodeOffset);
for (final NodeEntry child : node.getEntries()) {
visitor.visit(node, parentNode, child, depth);
if (child.isInnerNode()) {
final long childNodeOffset = VariableByteEncoder.decodeFirstValue(child.getValue());
visitNodeEntriesPreOrderRecursively(childNodeOffset, node, visitor, depth + 1);
}
}
}
enum VisitByPrefixMode {
FIND, ITERATE
}
public synchronized void visitValues(final K keyPrefix, final Visitor<K, V> visitor) {
final byte[] encodedKeyPrefix = keyEncoder.encode(keyPrefix);
final long rootNodeOffset = readNodeOffsetOfRootNode();
iterateNodeEntryByPrefix(rootNodeOffset, encodedKeyPrefix, visitor);
}
private void iterateNodeEntryByPrefix(final long nodeOffest, final byte[] keyPrefix, final Visitor<K, V> visitor) {
final PersistentMapDiskNode node = getNode(nodeOffest);
// list of children that might contain a key with the keyPrefix
final List<NodeEntry> nodesForPrefix = node.getNodesByPrefix(keyPrefix);
for (final NodeEntry entry : nodesForPrefix) {
if (entry.isDataNode()) {
final int prefixCompareResult = entry.compareKeyPrefix(keyPrefix);
if (prefixCompareResult == 0) {
if (Arrays.equals(entry.getKey(), MAX_KEY)) {
continue;
}
final K key = keyEncoder.decode(entry.getKey());
final V value = valueEncoder.decode(entry.getValue());
visitor.visit(key, value);
// System.out.println("--> " + key + "=" + value);
} else if (prefixCompareResult > 0) {
break;
}
} else {
final long childNodeOffset = toNodeOffset(entry);
iterateNodeEntryByPrefix(childNodeOffset, keyPrefix, visitor);
}
}
}
private long readNodeOffsetOfRootNode() {
final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize());
return diskBlock.getByteBuffer().getLong(0);
}
private void writeNodeOffsetOfRootNode(final long newNodeOffsetToRootNode) {
final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize());
diskBlock.getByteBuffer().putLong(0, newNodeOffsetToRootNode);
diskBlock.force();
}
@Override
public Empty decode(final byte[] bytes) {
Preconditions.checkTrue(bytes.length == 0, "");
return Empty.INSTANCE;
}
@Override
public byte[] getEmptyValue() {
return new byte[] {};
}
}
public static final EncoderDecoder<Long> LONG_CODER = new LongCoder();
public static final EncoderDecoder<UUID> UUID_ENCODER = new UUIDCoder();
public static final EncoderDecoder<String> STRING_CODER = new StringCoder();
public static final EncoderDecoder<Empty> EMPTY_ENCODER = new EmptyCoder();
static final int BLOCK_SIZE = 4096;
static final long NODE_OFFSET_TO_ROOT_NODE = 8;
private final DiskStorage diskStore;
private int maxEntriesInNode = Integer.MAX_VALUE;
private final EncoderDecoder<K> keyEncoder;
private final EncoderDecoder<V> valueEncoder;
private final LRUCache<Long, PersistentMapDiskNode> nodeCache = new LRUCache<>(10_000);
private final LRUCache<K, V> valueCache = new LRUCache<>(1_000);
public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder<K> keyEncoder,
final EncoderDecoder<V> valueEncoder) {
this.diskStore = new DiskStorage(path, storageBasePath);
this.keyEncoder = keyEncoder;
this.valueEncoder = valueEncoder;
initIfNew();
}
@Override
public void close() {
diskStore.close();
}
public void setMaxEntriesInNode(final int maxEntriesInNode) {
this.maxEntriesInNode = maxEntriesInNode;
}
private void initIfNew() {
if (diskStore.size() < BLOCK_SIZE) {
final long nodeOffsetToRootNode = diskStore.allocateBlock(diskStore.minAllocationSize());
Preconditions.checkEqual(nodeOffsetToRootNode, NODE_OFFSET_TO_ROOT_NODE,
"The offset of the pointer to the root node must be at a well known location. "
+ "Otherwise we would not be able to find it in an already existing file.");
// 2. make sure new blocks are aligned to the block size (for faster disk IO)
diskStore.ensureAlignmentForNewBlocks(BLOCK_SIZE);
// 3. initialize an empty root node
final long blockOffset = diskStore.allocateBlock(BLOCK_SIZE);
final var rootNode = PersistentMapDiskNode.emptyRootNode(blockOffset);
writeNode(rootNode);
// 4. update pointer to root node
writeNodeOffsetOfRootNode(blockOffset);
// 5. insert a dummy entry with a 'maximum' key
putValue(MAX_KEY, valueEncoder.getEmptyValue());
}
}
public synchronized void putAllValues(final Map<K, V> map) {
for (final Entry<K, V> e : map.entrySet()) {
putValue(e.getKey(), e.getValue());
}
}
public synchronized V putValue(final K key, final V value) {
final V cachedValue = valueCache.get(key);
if (cachedValue != null && cachedValue == value) {
return value;
}
final byte[] encodedKey = keyEncoder.encode(key);
final byte[] encodedValue = valueEncoder.encode(value);
final byte[] encodedOldValue = putValue(encodedKey, encodedValue);
final V oldValue = encodedOldValue == null ? null : valueEncoder.decode(encodedOldValue);
valueCache.put(key, value);
return oldValue;
}
public synchronized V getValue(final K key) {
final V cachedValue = valueCache.get(key);
if (cachedValue != null) {
return cachedValue;
}
final byte[] encodedKey = keyEncoder.encode(key);
final byte[] foundValue = getValue(encodedKey);
final V result = foundValue == null ? null : valueEncoder.decode(foundValue);
valueCache.put(key, result);
return result;
}
private byte[] putValue(final byte[] key, final byte[] value) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final Stack<PersistentMapDiskNode> parents = new Stack<>();
return insert(parents, rootNodeOffset, key, value);
}
private byte[] getValue(final byte[] key) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final NodeEntry entry = findNodeEntry(rootNodeOffset, key);
return entry == null ? null : entry.getValue();
}
private byte[] insert(final Stack<PersistentMapDiskNode> parents, final long nodeOffest, final byte[] key,
final byte[] value) {
final PersistentMapDiskNode node = getNode(nodeOffest);
final NodeEntry entry = node.getNodeEntryTo(key);
if (entry == null || entry.isDataNode()) {
final byte[] oldValue;
if (entry == null) {
oldValue = null;
} else {
// found a NodeEntry that is either equal to key, or it is at the insertion
// point
final boolean entryIsForKey = entry.equal(key);
oldValue = entryIsForKey ? entry.getValue() : null;
// Early exit, if the oldValue equals the new value.
// We do not have to replace the value, because it would not change anything
// (just cause unnecessary write operations). But we return the oldValue so that
// the caller thinks we replaced the value.
if (Objects.equals(oldValue, value)) {
return oldValue;
}
if (entryIsForKey) {
node.removeKey(key);
}
}
if (node.canAdd(key, value, maxEntriesInNode)) {
// insert in existing node
node.addKeyValue(key, value);
writeNode(node);
return oldValue;
} else {
// add new node
// 1. split current node into A and B
splitNode(parents, node);
// 2. insert the value
// start from the root, because we might have added a new root node
return putValue(key, value);
}
} else {
final long childNodeOffset = toNodeOffset(entry);
parents.add(node);
return insert(parents, childNodeOffset, key, value);
}
}
private PersistentMapDiskNode splitNode(final Stack<PersistentMapDiskNode> parents,
final PersistentMapDiskNode node) {
// System.out.println("\n\npre split node: " + node + "\n");
final long newBlockOffset = diskStore.allocateBlock(BLOCK_SIZE);
final PersistentMapDiskNode newNode = node.split(newBlockOffset);
final PersistentMapDiskNode parent = parents.isEmpty() ? null : parents.pop();
if (parent != null) {
final byte[] newNodeKey = newNode.getTopNodeEntry().getKey();
if (parent.canAdd(newNodeKey, newBlockOffset, maxEntriesInNode)) {
parent.addKeyNodePointer(newNodeKey, newBlockOffset);
writeNode(parent);
writeNode(newNode);
writeNode(node);
return parent;
} else {
final PersistentMapDiskNode grandParentNode = splitNode(parents, parent);
final NodeEntry pointerToParentAfterSplit = grandParentNode.getNodeEntryTo(newNodeKey);
Preconditions.checkEqual(pointerToParentAfterSplit.isInnerNode(), true, "{0} is pointer to inner node",
pointerToParentAfterSplit);
final long parentNodeOffset = toNodeOffset(pointerToParentAfterSplit); // the parent we have to add the
// newNode to
final PersistentMapDiskNode parentNode = getNode(parentNodeOffset);
parentNode.addKeyNodePointer(newNodeKey, newBlockOffset);
writeNode(parentNode);
writeNode(newNode);
writeNode(node);
return parentNode;
}
} else {
// has no parent -> create a new parent (the new parent will also be the new
// root)
final long newRootNodeOffset = diskStore.allocateBlock(BLOCK_SIZE);
final PersistentMapDiskNode rootNode = PersistentMapDiskNode.emptyRootNode(newRootNodeOffset);
final byte[] newNodeKey = newNode.getTopNodeEntry().getKey();
rootNode.addKeyNodePointer(newNodeKey, newBlockOffset);
final byte[] oldNodeKey = node.getTopNodeEntry().getKey();
rootNode.addKeyNodePointer(oldNodeKey, node.getNodeOffset());
writeNode(rootNode);
writeNode(newNode);
writeNode(node);
writeNodeOffsetOfRootNode(newRootNodeOffset);
return rootNode;
}
}
private NodeEntry findNodeEntry(final long nodeOffest, final byte[] key) {
final PersistentMapDiskNode node = getNode(nodeOffest);
final var entry = node.getNodeEntryTo(key);
if (entry == null) {
return null;
} else if (entry.isDataNode()) {
if (entry.equal(key)) {
return entry;
} else {
return null;
}
} else {
final long childNodeOffset = toNodeOffset(entry);
return findNodeEntry(childNodeOffset, key);
}
}
private long toNodeOffset(final NodeEntry entry) {
Preconditions.checkEqual(entry.isInnerNode(), true);
return VariableByteEncoder.decodeFirstValue(entry.getValue());
}
private PersistentMapDiskNode getNode(final long nodeOffset) {
PersistentMapDiskNode node = nodeCache.get(nodeOffset);
if (node == null) {
final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffset, BLOCK_SIZE);
node = PersistentMapDiskNode.parse(nodeOffset, diskBlock);
nodeCache.put(nodeOffset, node);
}
return node;
}
private void writeNode(final PersistentMapDiskNode node) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("writing node {}", node.toString(keyEncoder.asDecoder(), valueEncoder.asDecoder()));
}
final long nodeOffest = node.getNodeOffset();
// final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE);
DiskBlock diskBlock = node.getDiskBlock();
if (diskBlock == null) {
diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE);
}
final byte[] buffer = diskBlock.getBuffer();
final byte[] newBuffer = node.serialize();
System.arraycopy(newBuffer, 0, buffer, 0, buffer.length);
diskBlock.writeAsync();
// diskBlock.force(); // makes writing nodes slower by factor 800 (sic!)
}
public synchronized void print() {
visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> {
final PrintStream writer = System.out;
final String children = "#" + node.getEntries().size();
writer.println(" ".repeat(depth) + "@" + node.getNodeOffset() + " " + children + " " + nodeEntry
.toString(b -> String.valueOf(keyEncoder.decode(b)), b -> String.valueOf(valueEncoder.decode(b))));
});
}
public synchronized void visitNodeEntriesPreOrder(final VisitorCallback visitor) {
final long rootNodeOffset = readNodeOffsetOfRootNode();
visitNodeEntriesPreOrderRecursively(rootNodeOffset, null, visitor, 0);
}
private void visitNodeEntriesPreOrderRecursively(final long nodeOffset, final PersistentMapDiskNode parentNode,
final VisitorCallback visitor, final int depth) {
final PersistentMapDiskNode node = getNode(nodeOffset);
for (final NodeEntry child : node.getEntries()) {
visitor.visit(node, parentNode, child, depth);
if (child.isInnerNode()) {
final long childNodeOffset = VariableByteEncoder.decodeFirstValue(child.getValue());
visitNodeEntriesPreOrderRecursively(childNodeOffset, node, visitor, depth + 1);
}
}
}
enum VisitByPrefixMode {
FIND, ITERATE
}
public synchronized void visitValues(final K keyPrefix, final Visitor<K, V> visitor) {
final byte[] encodedKeyPrefix = keyEncoder.encode(keyPrefix);
final long rootNodeOffset = readNodeOffsetOfRootNode();
iterateNodeEntryByPrefix(rootNodeOffset, encodedKeyPrefix, visitor);
}
private void iterateNodeEntryByPrefix(final long nodeOffest, final byte[] keyPrefix, final Visitor<K, V> visitor) {
final PersistentMapDiskNode node = getNode(nodeOffest);
// list of children that might contain a key with the keyPrefix
final List<NodeEntry> nodesForPrefix = node.getNodesByPrefix(keyPrefix);
for (final NodeEntry entry : nodesForPrefix) {
if (entry.isDataNode()) {
final int prefixCompareResult = entry.compareKeyPrefix(keyPrefix);
if (prefixCompareResult == 0) {
if (Arrays.equals(entry.getKey(), MAX_KEY)) {
continue;
}
final K key = keyEncoder.decode(entry.getKey());
final V value = valueEncoder.decode(entry.getValue());
visitor.visit(key, value);
// System.out.println("--> " + key + "=" + value);
} else if (prefixCompareResult > 0) {
break;
}
} else {
final long childNodeOffset = toNodeOffset(entry);
iterateNodeEntryByPrefix(childNodeOffset, keyPrefix, visitor);
}
}
}
private long readNodeOffsetOfRootNode() {
final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize());
return diskBlock.getByteBuffer().getLong(0);
}
private void writeNodeOffsetOfRootNode(final long newNodeOffsetToRootNode) {
final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize());
diskBlock.getByteBuffer().putLong(0, newNodeOffsetToRootNode);
diskBlock.force();
}
}

View File

@@ -42,256 +42,256 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
*/
public class PersistentMapDiskNode {
private final TreeMap<ByteArrayKey, NodeEntry> entries;
private final long nodeOffset;
private final DiskBlock diskBlock;
private final TreeMap<ByteArrayKey, NodeEntry> entries;
private final long nodeOffset;
private final DiskBlock diskBlock;
public PersistentMapDiskNode(final long nodeOffset, final Collection<NodeEntry> entries,
final DiskBlock diskBlock) {
this.nodeOffset = nodeOffset;
this.diskBlock = diskBlock;
this.entries = toMap(entries);
}
public PersistentMapDiskNode(final long nodeOffset, final Collection<NodeEntry> entries,
final DiskBlock diskBlock) {
this.nodeOffset = nodeOffset;
this.diskBlock = diskBlock;
this.entries = toMap(entries);
}
private static TreeMap<ByteArrayKey, NodeEntry> toMap(final Collection<NodeEntry> nodeEntries) {
final TreeMap<ByteArrayKey, NodeEntry> result = new TreeMap<>();
private static TreeMap<ByteArrayKey, NodeEntry> toMap(final Collection<NodeEntry> nodeEntries) {
final TreeMap<ByteArrayKey, NodeEntry> result = new TreeMap<>();
for (final NodeEntry nodeEntry : nodeEntries) {
result.put(new ByteArrayKey(nodeEntry.getKey()), nodeEntry);
}
for (final NodeEntry nodeEntry : nodeEntries) {
result.put(new ByteArrayKey(nodeEntry.getKey()), nodeEntry);
}
return result;
}
return result;
}
public static PersistentMapDiskNode emptyRootNode(final long nodeOffset) {
return new PersistentMapDiskNode(nodeOffset, Collections.emptyList(), null);
}
public static PersistentMapDiskNode emptyRootNode(final long nodeOffset) {
return new PersistentMapDiskNode(nodeOffset, Collections.emptyList(), null);
}
public static PersistentMapDiskNode parse(final long nodeOffset, final DiskBlock diskBlock) {
final byte[] data = diskBlock.getBuffer();
if (data.length != PersistentMap.BLOCK_SIZE) {
throw new IllegalStateException(
"block size must be " + PersistentMap.BLOCK_SIZE + " but was " + data.length);
}
final LongList longs = VariableByteEncoder.decode(data);
public static PersistentMapDiskNode parse(final long nodeOffset, final DiskBlock diskBlock) {
final byte[] data = diskBlock.getBuffer();
if (data.length != PersistentMap.BLOCK_SIZE) {
throw new IllegalStateException(
"block size must be " + PersistentMap.BLOCK_SIZE + " but was " + data.length);
}
final LongList longs = VariableByteEncoder.decode(data);
final List<NodeEntry> entries = deserialize(longs, data);
return new PersistentMapDiskNode(nodeOffset, entries, diskBlock);
}
final List<NodeEntry> entries = deserialize(longs, data);
return new PersistentMapDiskNode(nodeOffset, entries, diskBlock);
}
public static List<NodeEntry> deserialize(final LongList keyLengths, final byte[] buffer) {
final List<NodeEntry> entries = new ArrayList<>();
public static List<NodeEntry> deserialize(final LongList keyLengths, final byte[] buffer) {
final List<NodeEntry> entries = new ArrayList<>();
if (keyLengths.isEmpty() || keyLengths.get(0) == 0) {
// node is empty -> should only happen for the root node
} else {
final int numEntries = (int) keyLengths.get(0);
if (keyLengths.isEmpty() || keyLengths.get(0) == 0) {
// node is empty -> should only happen for the root node
} else {
final int numEntries = (int) keyLengths.get(0);
int offset = PersistentMap.BLOCK_SIZE;
for (int i = 0; i < numEntries; i++) {
final int keyLength = (int) keyLengths.get(i * 2 + 1);
final int valueLength = (int) keyLengths.get(i * 2 + 2);
int offset = PersistentMap.BLOCK_SIZE;
for (int i = 0; i < numEntries; i++) {
final int keyLength = (int) keyLengths.get(i * 2 + 1);
final int valueLength = (int) keyLengths.get(i * 2 + 2);
final int valueOffset = offset - valueLength;
final int keyOffset = valueOffset - keyLength;
final int typeOffset = keyOffset - 1;
final int valueOffset = offset - valueLength;
final int keyOffset = valueOffset - keyLength;
final int typeOffset = keyOffset - 1;
final byte typeByte = buffer[typeOffset];
final byte[] key = Arrays.copyOfRange(buffer, keyOffset, keyOffset + keyLength);
final byte[] value = Arrays.copyOfRange(buffer, valueOffset, valueOffset + valueLength);
final byte typeByte = buffer[typeOffset];
final byte[] key = Arrays.copyOfRange(buffer, keyOffset, keyOffset + keyLength);
final byte[] value = Arrays.copyOfRange(buffer, valueOffset, valueOffset + valueLength);
final NodeEntry entry = new NodeEntry(ValueType.fromByte(typeByte), key, value);
final NodeEntry entry = new NodeEntry(ValueType.fromByte(typeByte), key, value);
entries.add(entry);
entries.add(entry);
offset = typeOffset;
}
}
return entries;
}
offset = typeOffset;
}
}
return entries;
}
public byte[] serialize() {
public byte[] serialize() {
return serialize(entries);
}
return serialize(entries);
}
public DiskBlock getDiskBlock() {
return diskBlock;
}
public DiskBlock getDiskBlock() {
return diskBlock;
}
public long getNodeOffset() {
return nodeOffset;
}
public long getNodeOffset() {
return nodeOffset;
}
public NodeEntry getNodeEntryTo(final byte[] key) {
public NodeEntry getNodeEntryTo(final byte[] key) {
final Entry<ByteArrayKey, NodeEntry> ceilingEntry = entries.ceilingEntry(new ByteArrayKey(key));
return ceilingEntry != null ? ceilingEntry.getValue() : null;
}
final Entry<ByteArrayKey, NodeEntry> ceilingEntry = entries.ceilingEntry(new ByteArrayKey(key));
return ceilingEntry != null ? ceilingEntry.getValue() : null;
}
public List<NodeEntry> getNodesByPrefix(final byte[] keyPrefix) {
final List<NodeEntry> result = new ArrayList<>();
public List<NodeEntry> getNodesByPrefix(final byte[] keyPrefix) {
final List<NodeEntry> result = new ArrayList<>();
for (final NodeEntry nodeEntry : entries.values()) {
final int prefixCompareResult = nodeEntry.compareKeyPrefix(keyPrefix);
if (prefixCompareResult == 0) {
// add all entries where keyPrefix is a prefix of the key
result.add(nodeEntry);
} else if (prefixCompareResult > 0) {
// Only add the first entry where the keyPrefix is smaller (as defined by
// compareKeyPrefix) than the key.
// These are entries that might contain key with the keyPrefix. But only the
// first of those can really have such keys.
result.add(nodeEntry);
break;
}
}
for (final NodeEntry nodeEntry : entries.values()) {
final int prefixCompareResult = nodeEntry.compareKeyPrefix(keyPrefix);
if (prefixCompareResult == 0) {
// add all entries where keyPrefix is a prefix of the key
result.add(nodeEntry);
} else if (prefixCompareResult > 0) {
// Only add the first entry where the keyPrefix is smaller (as defined by
// compareKeyPrefix) than the key.
// These are entries that might contain key with the keyPrefix. But only the
// first of those can really have such keys.
result.add(nodeEntry);
break;
}
}
return result;
}
return result;
}
public void addKeyValue(final byte[] key, final byte[] value) {
addNode(ValueType.VALUE_INLINE, key, value);
}
public void addKeyValue(final byte[] key, final byte[] value) {
addNode(ValueType.VALUE_INLINE, key, value);
}
public void addKeyNodePointer(final byte[] key, final long nodePointer) {
final byte[] value = VariableByteEncoder.encode(nodePointer);
addNode(ValueType.NODE_POINTER, key, value);
}
public void addKeyNodePointer(final byte[] key, final long nodePointer) {
final byte[] value = VariableByteEncoder.encode(nodePointer);
addNode(ValueType.NODE_POINTER, key, value);
}
public void addNode(final ValueType valueType, final byte[] key, final byte[] value) {
public void addNode(final ValueType valueType, final byte[] key, final byte[] value) {
final NodeEntry entry = new NodeEntry(valueType, key, value);
entries.put(new ByteArrayKey(key), entry);
}
final NodeEntry entry = new NodeEntry(valueType, key, value);
entries.put(new ByteArrayKey(key), entry);
}
public boolean canAdd(final byte[] key, final long nodeOffset, final int maxEntriesInNode) {
return canAdd(key, VariableByteEncoder.encode(nodeOffset), maxEntriesInNode);
}
public boolean canAdd(final byte[] key, final long nodeOffset, final int maxEntriesInNode) {
return canAdd(key, VariableByteEncoder.encode(nodeOffset), maxEntriesInNode);
}
public boolean canAdd(final byte[] key, final byte[] value, final int maxEntriesInNode) {
public boolean canAdd(final byte[] key, final byte[] value, final int maxEntriesInNode) {
if (entries.size() > maxEntriesInNode) {
return false;
} else {
final NodeEntry entry = new NodeEntry(ValueType.VALUE_INLINE, key, value);
final List<NodeEntry> tmp = new ArrayList<>(entries.size() + 1);
tmp.addAll(entries.values());
tmp.add(entry);
if (entries.size() > maxEntriesInNode) {
return false;
} else {
final NodeEntry entry = new NodeEntry(ValueType.VALUE_INLINE, key, value);
final List<NodeEntry> tmp = new ArrayList<>(entries.size() + 1);
tmp.addAll(entries.values());
tmp.add(entry);
// the +1 is for the null-byte terminator of the prefix
return neededBytesTotal(tmp) + 1 <= PersistentMap.BLOCK_SIZE;
}
}
// the +1 is for the null-byte terminator of the prefix
return neededBytesTotal(tmp) + 1 <= PersistentMap.BLOCK_SIZE;
}
}
public void removeKey(final byte[] key) {
entries.remove(new ByteArrayKey(key));
}
public void removeKey(final byte[] key) {
entries.remove(new ByteArrayKey(key));
}
public List<NodeEntry> getEntries() {
return new ArrayList<>(entries.values());
}
public List<NodeEntry> getEntries() {
return new ArrayList<>(entries.values());
}
public void clear() {
entries.clear();
}
public void clear() {
entries.clear();
}
@Override
public String toString() {
return "@" + nodeOffset + ": "
+ String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList()));
}
public <K,V> String toString(Function<byte[], K> keyDecoder, Function<byte[], V> valueDecoder) {
StringBuilder result = new StringBuilder();
result.append("@");
result.append(nodeOffset);
result.append(": ");
for (NodeEntry e : entries.values()) {
String s = e.toString(keyDecoder, valueDecoder);
result.append("\n");
result.append(s);
}
return result.toString();
}
@Override
public String toString() {
return "@" + nodeOffset + ": "
+ String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList()));
}
public NodeEntry getTopNodeEntry() {
return entries.lastEntry().getValue();
}
public <K, V> String toString(Function<byte[], K> keyDecoder, Function<byte[], V> valueDecoder) {
StringBuilder result = new StringBuilder();
result.append("@");
result.append(nodeOffset);
result.append(": ");
for (NodeEntry e : entries.values()) {
String s = e.toString(keyDecoder, valueDecoder);
result.append("\n");
result.append(s);
}
public PersistentMapDiskNode split(final long newBlockOffset) {
return result.toString();
}
final List<NodeEntry> entriesAsCollection = new ArrayList<>(entries.values());
public NodeEntry getTopNodeEntry() {
return entries.lastEntry().getValue();
}
final var leftEntries = new ArrayList<>(entriesAsCollection.subList(0, entriesAsCollection.size() / 2));
final var rightEntries = new ArrayList<>(
entriesAsCollection.subList(entriesAsCollection.size() / 2, entriesAsCollection.size()));
public PersistentMapDiskNode split(final long newBlockOffset) {
entries.clear();
entries.putAll(toMap(rightEntries));
final List<NodeEntry> entriesAsCollection = new ArrayList<>(entries.values());
return new PersistentMapDiskNode(newBlockOffset, leftEntries, null);
}
final var leftEntries = new ArrayList<>(entriesAsCollection.subList(0, entriesAsCollection.size() / 2));
final var rightEntries = new ArrayList<>(
entriesAsCollection.subList(entriesAsCollection.size() / 2, entriesAsCollection.size()));
public static int neededBytesTotal(final List<NodeEntry> entries) {
final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE];
entries.clear();
entries.putAll(toMap(rightEntries));
final int usedBytes = serializePrefix(entries, buffer);
return new PersistentMapDiskNode(newBlockOffset, leftEntries, null);
}
return usedBytes + NodeEntry.neededBytes(entries);
}
public static int neededBytesTotal(final List<NodeEntry> entries) {
final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE];
private static byte[] serialize(final Map<ByteArrayKey, NodeEntry> entries) {
final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE];
final Collection<NodeEntry> entriesAsCollection = entries.values();
final int usedBytes = serializePrefix(entriesAsCollection, buffer);
final int usedBytes = serializePrefix(entries, buffer);
// the +1 is for the null-byte terminator of the prefix
Preconditions.checkGreaterOrEqual(PersistentMap.BLOCK_SIZE,
usedBytes + 1 + NodeEntry.neededBytes(entriesAsCollection),
"The node is too big. It cannot be encoded into " + PersistentMap.BLOCK_SIZE + " bytes.");
return usedBytes + NodeEntry.neededBytes(entries);
}
serializeIntoFromTail(entriesAsCollection, buffer);
return buffer;
}
private static byte[] serialize(final Map<ByteArrayKey, NodeEntry> entries) {
final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE];
final Collection<NodeEntry> entriesAsCollection = entries.values();
final int usedBytes = serializePrefix(entriesAsCollection, buffer);
private static int serializePrefix(final Collection<NodeEntry> entries, final byte[] buffer) {
final LongList longs = serializeKeyLengths(entries);
// the +1 is for the null-byte terminator of the prefix
Preconditions.checkGreaterOrEqual(PersistentMap.BLOCK_SIZE,
usedBytes + 1 + NodeEntry.neededBytes(entriesAsCollection),
"The node is too big. It cannot be encoded into " + PersistentMap.BLOCK_SIZE + " bytes.");
final int usedBytes = VariableByteEncoder.encodeInto(longs, buffer, 0);
return usedBytes;
}
serializeIntoFromTail(entriesAsCollection, buffer);
return buffer;
}
private static LongList serializeKeyLengths(final Collection<NodeEntry> entries) {
final var keyLengths = new LongList();
keyLengths.add(entries.size());
for (final NodeEntry nodeEntry : entries) {
keyLengths.add(nodeEntry.getKey().length);
keyLengths.add(nodeEntry.getValue().length);
}
private static int serializePrefix(final Collection<NodeEntry> entries, final byte[] buffer) {
final LongList longs = serializeKeyLengths(entries);
return keyLengths;
}
final int usedBytes = VariableByteEncoder.encodeInto(longs, buffer, 0);
return usedBytes;
}
private static void serializeIntoFromTail(final Collection<NodeEntry> entries, final byte[] buffer) {
private static LongList serializeKeyLengths(final Collection<NodeEntry> entries) {
final var keyLengths = new LongList();
keyLengths.add(entries.size());
for (final NodeEntry nodeEntry : entries) {
keyLengths.add(nodeEntry.getKey().length);
keyLengths.add(nodeEntry.getValue().length);
}
int offset = buffer.length;
return keyLengths;
}
for (final var entry : entries) {
final byte[] valueBytes = entry.getValue();
final byte[] keyBytes = entry.getKey();
private static void serializeIntoFromTail(final Collection<NodeEntry> entries, final byte[] buffer) {
final int offsetValue = offset - valueBytes.length;
final int offsetKey = offsetValue - keyBytes.length;
final int offsetType = offsetKey - 1;
int offset = buffer.length;
System.arraycopy(valueBytes, 0, buffer, offsetValue, valueBytes.length);
System.arraycopy(keyBytes, 0, buffer, offsetKey, keyBytes.length);
buffer[offsetType] = entry.getType().asByte();
for (final var entry : entries) {
final byte[] valueBytes = entry.getValue();
final byte[] keyBytes = entry.getKey();
offset = offsetType;
}
}
final int offsetValue = offset - valueBytes.length;
final int offsetKey = offsetValue - keyBytes.length;
final int offsetType = offsetKey - 1;
System.arraycopy(valueBytes, 0, buffer, offsetValue, valueBytes.length);
System.arraycopy(keyBytes, 0, buffer, offsetKey, keyBytes.length);
buffer[offsetType] = entry.getType().asByte();
offset = offsetType;
}
}
}

View File

@@ -1,5 +1,5 @@
package org.lucares.pdb.map;
public interface Visitor<K, V> {
void visit(K key, V value);
void visit(K key, V value);
}